repo_name
stringlengths 7
94
| repo_path
stringlengths 4
237
| repo_head_hexsha
stringlengths 40
40
| content
stringlengths 10
680k
| apis
stringlengths 2
680k
|
---|---|---|---|---|
liudonghua123/bridgedb | lib/bridgedb/email/request.py | 94dd10673f9e6650e8a00e162f348e64f7a1ecab | # -*- coding: utf-8; test-case-name: bridgedb.test.test_email_request; -*-
#_____________________________________________________________________________
#
# This file is part of BridgeDB, a Tor bridge distribution system.
#
# :authors: Nick Mathewson <[email protected]>
# Isis Lovecruft <[email protected]> 0xA3ADB67A2CDB8B35
# Matthew Finkel <[email protected]>
# please also see AUTHORS file
# :copyright: (c) 2007-2015, The Tor Project, Inc.
# (c) 2013-2015, Isis Lovecruft
# :license: see LICENSE for licensing information
#_____________________________________________________________________________
"""
.. py:module:: bridgedb.email.request
:synopsis: Classes for parsing and storing information about requests for
bridges which are sent to the email distributor.
bridgedb.email.request
======================
Classes for parsing and storing information about requests for bridges
which are sent to the email distributor.
::
bridgedb.email.request
| |_ determineBridgeRequestOptions - Figure out which filters to apply, or
| offer help.
|_ EmailBridgeRequest - A request for bridges which was received through
the email distributor.
..
"""
from __future__ import print_function
from __future__ import unicode_literals
import logging
import re
from bridgedb import bridgerequest
from bridgedb.Dist import EmailRequestedHelp
from bridgedb.Dist import EmailRequestedKey
#: A regular expression for matching the Pluggable Transport method TYPE in
#: emailed requests for Pluggable Transports.
TRANSPORT_REGEXP = ".*transport ([a-z][_a-z0-9]*)"
TRANSPORT_PATTERN = re.compile(TRANSPORT_REGEXP)
#: A regular expression that matches country codes in requests for unblocked
#: bridges.
UNBLOCKED_REGEXP = ".*unblocked ([a-z]{2,4})"
UNBLOCKED_PATTERN = re.compile(UNBLOCKED_REGEXP)
def determineBridgeRequestOptions(lines):
"""Figure out which :class:`Bridges.BridgeFilter`s to apply, or offer help.
.. note:: If any ``'transport TYPE'`` was requested, or bridges not
blocked in a specific CC (``'unblocked CC'``), then the ``TYPE``
and/or ``CC`` will *always* be stored as a *lowercase* string.
:param list lines: A list of lines from an email, including the headers.
:raises EmailRequestedHelp: if the client requested help.
:raises EmailRequestedKey: if the client requested our GnuPG key.
:rtype: :class:`EmailBridgeRequest`
:returns: A :class:`~bridgerequst.BridgeRequest` with all of the requested
parameters set. The returned ``BridgeRequest`` will have already had
its filters generated via :meth:`~EmailBridgeRequest.generateFilters`.
"""
request = EmailBridgeRequest()
skippedHeaders = False
for line in lines:
line = line.strip().lower()
# Ignore all lines before the first empty line:
if not line: skippedHeaders = True
if not skippedHeaders: continue
if ("help" in line) or ("halp" in line):
raise EmailRequestedHelp("Client requested help.")
if "get" in line:
request.isValid(True)
logging.debug("Email request was valid.")
if "key" in line:
request.wantsKey(True)
raise EmailRequestedKey("Email requested a copy of our GnuPG key.")
if "ipv6" in line:
request.withIPv6()
if "transport" in line:
request.withPluggableTransportType(line)
if "unblocked" in line:
request.withoutBlockInCountry(line)
logging.debug("Generating hashring filters for request.")
request.generateFilters()
return request
class EmailBridgeRequest(bridgerequest.BridgeRequestBase):
"""We received a request for bridges through the email distributor."""
def __init__(self):
"""Process a new bridge request received through the
:class:`~bridgedb.Dist.EmailBasedDistributor`.
"""
super(EmailBridgeRequest, self).__init__()
self._isValid = False
self._wantsKey = False
def isValid(self, valid=None):
"""Get or set the validity of this bridge request.
If called without parameters, this method will return the current
state, otherwise (if called with the **valid** parameter), it will set
the current state of validity for this request.
:param bool valid: If given, set the validity state of this
request. Otherwise, get the current state.
"""
if valid is not None:
self._isValid = bool(valid)
return self._isValid
def wantsKey(self, wantsKey=None):
"""Get or set whether this bridge request wanted our GnuPG key.
If called without parameters, this method will return the current
state, otherwise (if called with the **wantsKey** parameter set), it
will set the current state for whether or not this request wanted our
key.
:param bool wantsKey: If given, set the validity state of this
request. Otherwise, get the current state.
"""
if wantsKey is not None:
self._wantsKey = bool(wantsKey)
return self._wantsKey
def withoutBlockInCountry(self, line):
"""This request was for bridges not blocked in **country**.
Add any country code found in the **line** to the list of
``notBlockedIn``. Currently, a request for a transport is recognized
if the email line contains the ``'unblocked'`` command.
:param str country: The line from the email wherein the client
requested some type of Pluggable Transport.
"""
unblocked = None
logging.debug("Parsing 'unblocked' line: %r" % line)
try:
unblocked = UNBLOCKED_PATTERN.match(line).group(1)
except (TypeError, AttributeError):
pass
if unblocked:
self.notBlockedIn.append(unblocked)
logging.info("Email requested bridges not blocked in: %r"
% unblocked)
def withPluggableTransportType(self, line):
"""This request included a specific Pluggable Transport identifier.
Add any Pluggable Transport method TYPE found in the **line** to the
list of ``transports``. Currently, a request for a transport is
recognized if the email line contains the ``'transport'`` command.
:param str line: The line from the email wherein the client
requested some type of Pluggable Transport.
"""
transport = None
logging.debug("Parsing 'transport' line: %r" % line)
try:
transport = TRANSPORT_PATTERN.match(line).group(1)
except (TypeError, AttributeError):
pass
if transport:
self.transports.append(transport)
logging.info("Email requested transport type: %r" % transport)
| [((1722, 1750), 're.compile', 're.compile', (['TRANSPORT_REGEXP'], {}), '(TRANSPORT_REGEXP)\n', (1732, 1750), False, 'import re\n'), ((1907, 1935), 're.compile', 're.compile', (['UNBLOCKED_REGEXP'], {}), '(UNBLOCKED_REGEXP)\n', (1917, 1935), False, 'import re\n'), ((3628, 3685), 'logging.debug', 'logging.debug', (['"""Generating hashring filters for request."""'], {}), "('Generating hashring filters for request.')\n", (3641, 3685), False, 'import logging\n'), ((5771, 5823), 'logging.debug', 'logging.debug', (['("Parsing \'unblocked\' line: %r" % line)'], {}), '("Parsing \'unblocked\' line: %r" % line)\n', (5784, 5823), False, 'import logging\n'), ((6660, 6712), 'logging.debug', 'logging.debug', (['("Parsing \'transport\' line: %r" % line)'], {}), '("Parsing \'transport\' line: %r" % line)\n', (6673, 6712), False, 'import logging\n'), ((3099, 3143), 'bridgedb.Dist.EmailRequestedHelp', 'EmailRequestedHelp', (['"""Client requested help."""'], {}), "('Client requested help.')\n", (3117, 3143), False, 'from bridgedb.Dist import EmailRequestedHelp\n'), ((3217, 3258), 'logging.debug', 'logging.debug', (['"""Email request was valid."""'], {}), "('Email request was valid.')\n", (3230, 3258), False, 'import logging\n'), ((3338, 3399), 'bridgedb.Dist.EmailRequestedKey', 'EmailRequestedKey', (['"""Email requested a copy of our GnuPG key."""'], {}), "('Email requested a copy of our GnuPG key.')\n", (3355, 3399), False, 'from bridgedb.Dist import EmailRequestedKey\n'), ((6044, 6114), 'logging.info', 'logging.info', (["('Email requested bridges not blocked in: %r' % unblocked)"], {}), "('Email requested bridges not blocked in: %r' % unblocked)\n", (6056, 6114), False, 'import logging\n'), ((6932, 6994), 'logging.info', 'logging.info', (["('Email requested transport type: %r' % transport)"], {}), "('Email requested transport type: %r' % transport)\n", (6944, 6994), False, 'import logging\n')] |
Unknoob/buck | test/com/facebook/buck/skylark/parser/testdata/rule_with_wrong_types/attr_value_type/subdir/foo.bzl | 2dfc734354b326f2f66896dde7746a11965d5a13 | """ Module docstring """
def _impl(_ctx):
""" Function docstring """
pass
some_rule = rule(
attrs = {
"attr1": attr.int(
default = 2,
mandatory = False,
),
"attr2": 5,
},
implementation = _impl,
)
| [] |
griimx/Summer-2016 | src/printReport.py | 08bf0a68a0e12ee81318409f68448adaf75983fe | from __future__ import print_function
from connection import *
from jinja2 import Environment, FileSystemLoader
import webbrowser
def print_report(id):
env = Environment(loader=FileSystemLoader('.'))
template = env.get_template("src/template.html")
cursor = db.cursor(MySQLdb.cursors.DictCursor)
sql = "SELECT e.*, b.*, d.`depName` "
sql += "FROM `employees` e, `baccounts` b, `departments` d "
sql +="WHERE e.`empID` = b.`empdb_empID` "
sql +="AND e.`depDB_depID` = d.`depID` "
sql +="AND e.`empID` = '"+ id +"'"
# print(sql)
cursor.execute(sql)
result = cursor.fetchall()
# print(result[0])
result = result[0]
print(result)
template_vars = {"empID" : result['empID'],
"firstName" : result['firstName'],
"lastName" : result['lastName'],
"address" : result['address'],
"pin" : result['pin'],
"state" : result['state'],
"adharID" : result['adharID'],
"panID" : result['panID'],
"designation" : result['designation'],
"unit" : result['unit'],
"email" : result['email'],
"mobile" : result['mobile'],
"depName" : result['depName'],
"IFSC" : result['IFSC'],
"ACNo" : result['ACNo'],
"BranchAdd" : result['BranchAdd']
}
content = template.render(template_vars)
with open('print.html', 'w') as static_file:
static_file.write(content)
webbrowser.open_new_tab('print.html')
# self.entry_text(self.entry_name, result['firstName']+" "+result['lastName'] )
# self.entry_text(self.entry_EmpID, result['empID'])
# self.entry_text(self.entry_EmpName, result['firstName']+" "+result['lastName'])
# self.entry_text(self.entry_personalno, result['empID'])
# self.entry_text(self.entry_address,result['address'] )
# self.entry_text(self.entry_pin, result['pin'])
# self.entry_text(self.entry_state, result['state'])
# self.entry_text(self.entry_adhar, result['adharID'])
# self.entry_text(self.entry_pan, result['panID'])
# self.entry_text(self.entry_designation, result['designation'])
# self.entry_text(self.entry_unit, result['unit'])
# self.entry_text(self.entry_emailid, result['email'])
# self.entry_text(self.entry_mobile, result['mobile'])
# self.entry_text(self.entry_department, result['depName'])
# self.entry_text(self.entry_ifsc, result['IFSC'])
# self.entry_text(self.enrtry_acno, result['ACNo'])
# self.entry_text(self.entry_branch, result['BranchAdd'])
| [((1360, 1397), 'webbrowser.open_new_tab', 'webbrowser.open_new_tab', (['"""print.html"""'], {}), "('print.html')\n", (1383, 1397), False, 'import webbrowser\n'), ((179, 200), 'jinja2.FileSystemLoader', 'FileSystemLoader', (['"""."""'], {}), "('.')\n", (195, 200), False, 'from jinja2 import Environment, FileSystemLoader\n')] |
avalentino/pyre | packages/pyre/schemata/Container.py | 7e1f0287eb7eba1c6d1ef385e5160079283ac363 | # -*- coding: utf-8 -*-
#
# michael a.g. aïvázis
# orthologue
# (c) 1998-2021 all rights reserved
#
# superclass
from .Schema import Schema
# declaration
class Container(Schema):
"""
The base class for type declarators that are sequences of other types
"""
# constants
typename = 'container' # the name of my type
isContainer = True
@property
def container(self):
"""
The default container represented by this schema
"""
# complain that the subclass is not constructed properly
raise NotImplementedError(
"class {.__name__} must define a {container} type".format(type(self)))
# interface
def coerce(self, value, **kwds):
"""
Convert {value} into an iterable
"""
# get the worker to build an iterable, cast it into my container type and return it
return self.container(self._coerce(value=value, **kwds))
def render(self, renderer, value, workload):
"""
Render {value} using {renderer}
"""
# get my schema
schema = self.schema
# render just my name
yield renderer.trait(name=self.name, value='')
# go through the items
for item in value:
# ask my schema to render each one
entry = ','.join(schema.render(renderer=renderer, value=item,
workload=workload, incognito=True))
# and put it on a separate line
yield renderer.value(value=f"{entry},")
# all done
return
# meta-methods
def __init__(self, default=object, schema=Schema(), **kwds):
# adjust the default; carefully, so we don't all end up using the same global container
# checking for {None} is not appropriate here; the user may want {None} as the default
# value; we need a way to know that {default} was not supplied: use a TYPE (in this
# case object) as the marker
default = self.container() if default is object else default
# chain up with my default
super().__init__(default=default, **kwds)
# save my schema
self.schema = schema
# all done
return
# end of file
| [] |
nomad-coe/electronic-parsers | electronicparsers/exciting/parser.py | defb47be6ac22b2e48d4fb9204c85390a3c2f328 | #
# Copyright The NOMAD Authors.
#
# This file is part of NOMAD.
# See https://nomad-lab.eu for further info.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import numpy as np
import os
import re
import logging
from nomad.units import ureg
from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser
from nomad.datamodel.metainfo.simulation.run import Run, Program
from nomad.datamodel.metainfo.simulation.method import (
Method, DFT, Electronic, Smearing, XCFunctional, Functional,
GW as GWMethod, Scf, BasisSet
)
from nomad.datamodel.metainfo.simulation.system import (
System, Atoms
)
from nomad.datamodel.metainfo.simulation.calculation import (
Calculation, Dos, DosValues, BandStructure, BandEnergies, Energy, EnergyEntry, Charges,
Forces, ForcesEntry, ScfIteration, BandGap
)
from nomad.datamodel.metainfo.workflow import Workflow, GeometryOptimization
from .metainfo.exciting import x_exciting_section_MT_charge_atom, x_exciting_section_MT_moment_atom,\
x_exciting_section_spin, x_exciting_section_fermi_surface,\
x_exciting_section_atoms_group
re_float = r'[-+]?\d+\.\d*(?:[Ee][-+]\d+)?'
class GWInfoParser(TextParser):
def __init__(self):
super().__init__(None)
def init_quantities(self):
self._quantities = []
def str_to_frequency(val_in):
val = [v.split() for v in val_in.split('\n')]
val = np.transpose(np.array([v for v in val if len(v) == 3], float))
return dict(
number=np.array(val[0], dtype=int), values=val[1] * ureg.hartree,
weights=val[2])
# TODO Read also input parameters here if input_GW.xml does not exist
self._quantities.append(
Quantity(
'frequency_data', r'frequency list:\s*\<\s*#\s*freqs\s*weight\s*>\s*([\d\.Ee\s\-]+)',
str_operation=str_to_frequency, repeats=False)
)
self._quantities.append(
Quantity(
'fermi_energy', r'\-\s*G0W0.+\-\s*\-+\s*[\s\S]*?Fermi [Ee]nergy\s*[:=](\s*-?[\d\.]+)\s',
unit=ureg.hartree, repeats=False)
)
self._quantities.append(
Quantity(
'direct_band_gap', r'\-\s*G0W0\s*\-\s*\-+\s*[\s\S]*?Direct BandGap\s*\((?P<__unit>\w+)\)\s*\:(\s*[\d\.]+)\s',
repeats=False)
)
self._quantities.append(
Quantity(
'fundamental_band_gap', r'\-\s*G0W0\s*\-\s*\-+\s*[\s\S]*?Fundamental BandGap\s*\((?P<__unit>\w+)\)\s*\:(\s*[\d\.]+)\s',
repeats=False)
)
self._quantities.append(
Quantity(
'optical_band_gap', r'\-\s*G0W0\s*\-\s*\-+\s*[\s\S]*?Optical BandGap\s*\((?P<__unit>\w+)\)\s*\:(\s*[\d\.]+)\s',
repeats=False)
)
class ExcitingEvalqpParser(TextParser):
def __init__(self):
super().__init__(None)
def init_quantities(self):
self._quantities = []
def str_to_eigenvalue(val_in):
val = val_in.strip().split('\n')
kpts = np.array(val[0].split(), dtype=float)
keys = val[1].split()
eigs = np.transpose(np.array([v.split() for v in val[2:]], dtype=float))
eigs = {keys[i]: eigs[i] for i in range(len(keys))}
return [kpts, eigs]
self._quantities.append(
Quantity(
'kpoints_eigenvalues', r'\s*k\-point \#\s*\d+:\s*([\d\s\.\-]+)([ \w\(\)]+\n)([\s\d\.\-Ee]+)',
str_operation=str_to_eigenvalue, repeats=True))
class BandstructureDatParser(DataTextParser):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self._nspin = kwargs.get('nspin', None)
self._energy_unit = kwargs.get('energy_unit', None)
def init_parameters(self):
# TODO make a parent clas for bandstructure dat and xml
self._nspin = None
self._nkpts_segment = None
self._neigs_segment = None
self._vertices = None
self._distances = None
self._band_energies = None
self._band_k_points = None
@property
def band_energies(self):
if self._band_energies is None:
if self.data is None:
return
data = np.transpose(self.data)
n_kpoints = int(max(data[1]))
bands = data[6:]
bands = np.reshape(bands, (
self.number_of_spin_channels, self.number_of_band_segment_eigenvalues, n_kpoints))
self._band_energies = []
start = 0
for nkpts_segment in self.number_of_k_points_per_segment:
end = start + nkpts_segment
band_energy = np.array([np.transpose(band)[start:end] for band in bands])
if self._energy_unit:
band_energy = band_energy * self._energy_unit
self._band_energies.append(band_energy)
start = end
return self._band_energies
@property
def band_k_points(self):
if self._band_k_points is None:
data = np.transpose(self.data)
self._band_k_points = []
start = 0
for nkpts_segment in self.number_of_k_points_per_segment:
end = start + nkpts_segment
self._band_k_points.append(
np.transpose(data[2:5])[start:end])
start = end
return self._band_k_points
@property
def distances(self):
if self._distances is None:
data = np.transpose(self.data)
self._distances = data[5][:int(max(data[1]))]
return self._distances
@property
def number_of_spin_channels(self):
if self._nspin is None:
self._nspin = np.shape(np.transpose(self.data))[0] - 6
return self._nspin
@property
def number_of_k_points_per_segment(self):
if self._nkpts_segment is None:
self._nkpts_segment = []
count = 1
for i in range(1, len(self.distances)):
if self.distances[i] == self.distances[i - 1]:
self._nkpts_segment.append(count)
count = 1
else:
count += 1
self._nkpts_segment.append(count)
return self._nkpts_segment
@property
def number_of_band_segment_eigenvalues(self):
if self._neigs_segment is None:
data = np.transpose(self.data)
self._neigs_segment = int(max(data[0]))
return self._neigs_segment
class BandOutParser(DataTextParser):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self._nspin = kwargs.get('nspin', None)
self._energy_unit = kwargs.get('energy_unit', None)
def init_parameters(self):
self._nspin = None
self._distances = None
self._band_energies = None
self._neigs_segment = None
self._nkpts_segment = None
@property
def band_energies(self):
if self._band_energies is None:
data = np.transpose(self.data)
n_kpoints = np.where(data[0] == data[0][0])[0][1]
bands = data[1:]
bands = np.reshape(bands, (
self.number_of_spin_channels, self.number_of_band_segment_eigenvalues, n_kpoints))
self._band_energies = []
start = 0
for nkpts_segment in self.number_of_k_points_per_segment:
end = start + nkpts_segment
band_energy = np.array([np.transpose(band)[start:end] for band in bands])
if self._energy_unit:
band_energy = band_energy * self._energy_unit
self._band_energies.append(band_energy)
start = end
return self._band_energies
@property
def distances(self):
if self._distances is None:
dist = np.transpose(self.data)[0]
n_k_points = np.where(dist == dist[0])[0][1]
self._distances = dist[:n_k_points]
return self._distances
@property
def number_of_spin_channels(self):
if self._nspin is None:
self._nspin = np.shape(np.transpose(self.data)[1:])[0]
return self._nspin
@property
def number_of_k_points_per_segment(self):
if self._nkpts_segment is None:
self._nkpts_segment = []
count = 1
for i in range(1, len(self.distances)):
if self.distances[i] == self.distances[i - 1]:
self._nkpts_segment.append(count)
count = 1
else:
count += 1
self._nkpts_segment.append(count)
return self._nkpts_segment
@property
def number_of_band_segment_eigenvalues(self):
if self._neigs_segment is None:
data = np.transpose(self.data)[0]
self._neigs_segment = len(np.where(data == data[0])[0])
return self._neigs_segment
class BandstructureXMLParser(XMLParser):
def __init__(self, **kwargs):
# TODO make a parent class for dos and bandstructure
super().__init__(None)
self._distance_key = 'distance'
self._coord_key = 'coord'
self._energy_key = 'eval'
self._vertex_key = 'vertex'
self._band_key = 'band'
self._atom_key = 'atom'
self._nspin = kwargs.get('nspin', None)
self._energy_unit = kwargs.get('energy_unit', None)
def init_parameters(self):
self._nspin = None
self._nkpts_segment = None
self._neigs_segment = None
self._bands = None
self._vertices = None
self._distances = None
self._species = None
@property
def distances(self):
if self._distances is None:
if not self.bands:
return
self._distances = [
point.attrib.get(self._distance_key) for point in self.bands[0][0]]
self._distances = np.array(self._distances, dtype=float)
return self._distances
@property
def bands(self):
if self._bands is None:
bands = self.root.findall('./%s' % self._band_key)
self._bands = []
if bands:
self._bands.append(bands)
# add atom-resolved
bands_atom = self.root.findall('./*/%s' % self._atom_key)
for band in bands_atom:
self._bands.append(band.findall('./%s' % self._band_key))
return self._bands
@property
def vertices(self):
if self._vertices is None:
self._vertices = self.root.findall('./%s' % self._vertex_key)
return self._vertices
@property
def number_of_spin_channels(self):
if self._nspin is None:
self._nspin = 1
return self._nspin
@property
def number_of_k_points_per_segment(self):
if self._nkpts_segment is None:
self._nkpts_segment = []
count = 1
for i in range(1, len(self.distances)):
if self.distances[i] == self.distances[i - 1]:
self._nkpts_segment .append(count)
count = 1
else:
count += 1
self._nkpts_segment.append(count)
return self._nkpts_segment
@property
def number_of_band_segment_eigenvalues(self):
if self._neigs_segment is None:
self._neigs_segment = len(self.bands[0]) // self.number_of_spin_channels
return self._neigs_segment
def parse(self, key):
if self._results is None:
self._results = dict()
if not self.bands:
return
if key == 'band_energies':
# TODO I am not certain about the format for the spin polarized case
# I cannot find an example bandstructure file
# atom-resolved bandstructure are added as separate section_k_band
res = []
for n in range(len(self.bands)):
res_n = []
start = 0
band_energies = np.zeros((
self.number_of_spin_channels, self.number_of_band_segment_eigenvalues,
len(self.distances)), dtype=float)
for i in range(len(self.bands[n])):
band_energies[i % self.number_of_spin_channels][i] = np.array(
[e.attrib.get(self._energy_key) for e in self.bands[n][i]])
for nkpts_segment in self.number_of_k_points_per_segment:
end = start + nkpts_segment
band_energy = np.array([
np.transpose(energy)[start:end] for energy in band_energies])
if self._energy_unit is not None:
band_energy = band_energy * self._energy_unit
res_n.append(band_energy)
start = end
res.append(res_n)
elif key == 'band_k_points':
res = []
for i in range(len(self.number_of_k_points_per_segment)):
start = np.array(
self.vertices[i].attrib.get(self._coord_key).split(), dtype=float)
end = np.array(
self.vertices[i + 1].attrib.get(self._coord_key).split(), dtype=float)
res.append(np.linspace(start, end, self.number_of_k_points_per_segment[i]))
elif key == 'band_segm_labels':
res = []
for i in range(len(self.vertices) - 1):
start = self.vertices[i].attrib.get('label')
end = self.vertices[i + 1].attrib.get('label')
res.append([
'\u0393' if start.lower() == 'gamma' else start,
'\u0393' if end.lower() == 'gamma' else end])
elif key == 'band_segm_start_end':
res = []
for i in range(len(self.number_of_k_points_per_segment)):
start = self.vertices[i].attrib.get(self._coord_key).split()
end = self.vertices[i + 1].attrib.get(self._coord_key).split()
res.append([start, end])
else:
res = None
self._results[key] = res
class DOSXMLParser(XMLParser):
def __init__(self, **kwargs):
super().__init__(None)
self._nspin_key = 'nspin'
self._totaldos_key = 'totaldos'
self._partialdos_key = 'partialdos'
self._diagram_key = 'diagram'
self._l_key = 'l'
self._m_key = 'm'
self._energy_key = 'e'
self._dos_key = 'dos'
self._unit_key = 'unit'
self._energy_unit = kwargs.get('energy_unit', None)
self._units_mapping = dict(hartree=ureg.hartree)
def init_parameters(self):
self._ndos = None
self._natoms = None
self._nspin = None
self._nlm = None
self._energies = None
self._total_dos = None
self._partial_dos = None
@property
def energy_unit(self):
if self._energy_unit is None:
axis = self.root.find('./axis')
if axis is None:
return
self._energy_unit = self._units_mapping.get(axis.attrib.get(self._unit_key).lower(), 1)
return self._energy_unit
@property
def number_of_spin_channels(self):
if self._nspin is None:
if not self.total_dos:
return
self._nspin = len(self.total_dos)
return self._nspin
@property
def number_of_atoms(self):
if self._natoms is None:
partial_dos = self.root.findall('./%s' % self._partialdos_key)
self._natoms = len(partial_dos)
return self._natoms
@property
def number_of_dos(self):
if self._ndos is None:
total_dos = self.root.find('./%s/%s' % (self._totaldos_key, self._diagram_key))
self._ndos = len(total_dos)
return self._ndos
@property
def number_of_lm(self):
if self._nlm is None:
if self.partial_dos is None:
return
self._nlm = 0
l_list = set([int(e.attrib.get(self._l_key)) for e in self.partial_dos])
for li in l_list:
self._nlm += 2 * li + 1
return self._nlm
@property
def total_dos(self):
if self._total_dos is None:
self._total_dos = self.root.findall('./%s/%s' % (self._totaldos_key, self._diagram_key))
return self._total_dos
@property
def partial_dos(self):
if self._partial_dos is None:
self._partial_dos = self.root.findall('./%s/%s' % (self._partialdos_key, self._diagram_key))
return self._partial_dos
@property
def energies(self):
if self._energies is None:
if self.total_dos is None:
return
self._energies = np.array(
[float(point.attrib.get(self._energy_key)) for point in self.total_dos[0]])
if self.energy_unit is not None:
self._energies = self._energies * self.energy_unit
return self._energies
def _get_dos(self, diagram):
dos = np.array(
[point.attrib.get(self._dos_key) for point in diagram], dtype=float)
return dos
def parse(self, key):
if self._results is None:
self._results = dict()
if 'total' in key:
if not self.total_dos:
return
res = np.zeros((self.number_of_spin_channels, self.number_of_dos))
for i in range(len(self.total_dos)):
spin = self.total_dos[i].attrib.get(self._nspin_key, i)
res[i] = self._get_dos(self._total_dos[i])
if self.energy_unit is not None:
res = res * (1 / self.energy_unit)
elif 'partial' in key:
if not self.partial_dos:
return
res = np.zeros((
self.number_of_lm, self.number_of_spin_channels, self.number_of_atoms, self.number_of_dos))
for i in range(len(self.partial_dos)):
spin = self.partial_dos[i].attrib.get(self._nspin_key, None)
if spin is None:
spin = (i % (self.number_of_spin_channels * self.number_of_lm)) // self.number_of_lm
else:
spin = int(spin) - 1
val_l = self.partial_dos[i].attrib.get(self._l_key, None)
val_m = self.partial_dos[i].attrib.get(self._m_key, None)
if val_l is None or val_m is None:
lm = i % self.number_of_lm
else:
lm = int(val_l) ** 2 + int(val_m) + int(val_l)
atom = i // (self.number_of_lm * self.number_of_spin_channels)
res[lm][spin][atom] = self._get_dos(self.partial_dos[i])
if self.energy_unit is not None:
res = res * (1 / self.energy_unit)
elif key == 'energies':
return self.energies
else:
res = None
self._results[key] = res
class ExcitingFermiSurfaceBxsfParser(TextParser):
def __init__(self):
super().__init__(None)
def init_quantities(self):
self._quantities = []
self._quantities.append(
Quantity(
'fermi_energy', r'Fermi Energy:\s*([\d\.]+)\s*', unit=ureg.hartree, repeats=False))
def str_to_band_parameters(val_in):
val = val_in.strip().split('\n')
nbands = int(val[0])
mesh = np.array(val[1].split(), dtype=int)
origin = np.array(val[2].split(), dtype=float)
vector = np.array([v.split() for v in val[3:6]], dtype=float)
return [nbands, mesh, origin, vector]
self._quantities.append(
Quantity(
'band_parameters', r'BANDGRID_3D_BANDS\s*([\d\.\-Ee\s]+)',
str_operation=str_to_band_parameters, repeats=False))
self._quantities.append(
Quantity(
'fermi_surface', r'BAND:\s*\d+\s*([\d\-\+\.Ee\s]+)\n *E*', unit=ureg.hartree,
repeats=True))
class ExcitingEigenvalueParser(TextParser):
def __init__(self):
super().__init__(None)
def init_quantities(self):
self._quantities = []
self._quantities.append(
Quantity(
'k_points', r'\s*\d+\s*([\d\.Ee\- ]+):\s*k\-point', repeats=True))
def str_to_eigenvalues(val_in):
val = val_in[:val_in.rfind('\n \n')].strip()
val = np.array([v.split() for v in val.split('\n')], dtype=float)
val = np.transpose(val)
occs = val[-1]
eigs = val[-2]
nspin = 2 if occs[0] == 1. else 1
data = dict()
data['occupancies'] = np.reshape(occs, (nspin, len(occs) // nspin))
data['eigenvalues'] = np.reshape(eigs, (nspin, len(eigs) // nspin))
return data
self._quantities.append(
Quantity(
'eigenvalues_occupancies', r'\(state\, eigenvalue and occupancy below\)\s*([\d\.Ee\-\s]+?(?:\n *\n))',
str_operation=str_to_eigenvalues, repeats=True))
class ExcitingGWOutParser(TextParser):
def __init__(self, mainfile, logger):
super().__init__(mainfile, logger=logger)
def init_quantities(self):
self._quantities = []
class ExcitingInfoParser(TextParser):
def __init__(self):
super().__init__(None)
def init_quantities(self):
re_symbol = re.compile(r'([A-Z][a-z]?)')
def str_to_array(val_in):
val = [v.split(':')[-1].split() for v in val_in.strip().split('\n')]
val = val[0] if len(val) == 1 else val
return np.array(val, dtype=float)
def str_to_atom_properties_dict(val_in):
unit = None
if 'charge' in val_in:
unit = ureg.elementary_charge
elif 'moment' in val_in:
unit = ureg.elementary_charge * ureg.bohr
val = val_in.strip().split('\n')
properties = dict()
atom_resolved = []
species = None
for v in val:
v = v.strip().split(':')
if len(v) < 2:
continue
elif v[0].startswith('species'):
species = re.search(re_symbol, v[-1]).group(1)
elif v[0].startswith('atom'):
v[0] = v[0].split()
v[1] = [float(vi) for vi in v[1].split()]
v[1] = v[1][0] if len(v[1]) == 1 else v[1]
if species is None:
species = v[0][2]
atom_resolved.append(((species, v[1] * unit)))
else:
vi = [float(vii) for vii in v[1].split()]
vi = vi[0] if len(vi) == 1 else vi
properties[v[0].strip()] = vi * unit
properties['atom_resolved'] = atom_resolved
return properties
def str_to_quantity_tolerances(val_in):
return val_in.strip().replace('(', '').replace(')', '').split()
def str_to_energy_dict(val_in):
val = val_in.strip().split('\n')
energies = dict()
for v in val:
v = v.split(':')
if len(v) < 2:
continue
energies[v[0].strip()] = float(v[1]) * ureg.hartree
return energies
self._quantities = [Quantity(
'program_version', r'\s*EXCITING\s*([\w\-\(\)\. ]+)\s*started', repeats=False,
dtype=str, flatten=False)]
initialization_quantities = [
Quantity(
'lattice_vectors',
r'Lattice vectors\s*[\(cartesian\)]*\s*:\s*([\-0-9\.\s]+)\n',
str_operation=str_to_array, unit=ureg.bohr, repeats=False, convert=False),
Quantity(
'lattice_vectors_reciprocal',
r'Reciprocal lattice vectors\s*[\(cartesian\)]*\s*:\s*([\-0-9\.\s]+)\n',
str_operation=str_to_array, unit=1 / ureg.bohr, repeats=False, convert=False),
]
self._system_keys_mapping = {
'x_exciting_unit_cell_volume': ('Unit cell volume', ureg.bohr ** 3),
'x_exciting_brillouin_zone_volume': ('Brillouin zone volume', 1 / ureg.bohr ** 3),
'x_exciting_number_of_atoms': ('Total number of atoms per unit cell', None),
'x_exciting_spin_treatment': ('Spin treatment', None),
'x_exciting_number_of_bravais_lattice_symmetries': ('Number of Bravais lattice symmetries', None),
'x_exciting_number_of_crystal_symmetries': ('Number of crystal symmetries', None),
'x_exciting_kpoint_grid': (r'k\-point grid', None),
'x_exciting_kpoint_offset': (r'k\-point offset', None),
'x_exciting_number_kpoints': (r'Total number of k\-points', None),
'x_exciting_rgkmax': (r'R\^MT\_min \* \|G\+k\|\_max \(rgkmax\)', None),
'x_exciting_species_rtmin': (r'Species with R\^MT\_min', None),
'x_exciting_gkmax': (r'Maximum \|G\+k\| for APW functions', 1 / ureg.bohr),
'x_exciting_gmaxvr': (r'Maximum \|G\| for potential and density', 1 / ureg.bohr),
'x_exciting_gvector_size': (r'G\-vector grid sizes', None),
'x_exciting_gvector_total': (r'Total number of G\-vectors', None),
'x_exciting_lmaxapw': (r' APW functions', None),
'x_exciting_nuclear_charge': ('Total nuclear charge', ureg.elementary_charge),
'x_exciting_electronic_charge': ('Total electronic charge', ureg.elementary_charge),
'x_exciting_core_charge_initial': ('Total core charge', ureg.elementary_charge),
'x_exciting_valence_charge_initial': ('Total valence charge', ureg.elementary_charge),
'x_exciting_wigner_radius': (r'Effective Wigner radius, r\_s', ureg.bohr),
'x_exciting_empty_states': ('Number of empty states', None),
'x_exciting_valence_states': ('Total number of valence states', None),
'x_exciting_hamiltonian_size': ('Maximum Hamiltonian size', None),
'x_exciting_pw': (r'Maximum number of plane\-waves', None),
'x_exciting_lo': (r'Total number of local\-orbitals', None)}
self._method_keys_mapping = {
'smearing_kind': ('Smearing scheme', None),
'smearing_width': ('Smearing width', None)}
for name, key_unit in self._system_keys_mapping.items():
initialization_quantities.append(
Quantity(
name, r'%s\s*:\s*([\s\S]*?)\n' % key_unit[0], unit=key_unit[1], repeats=False)
)
for name, key_unit in self._method_keys_mapping.items():
initialization_quantities.append(
Quantity(
name, r'%s\s*:\s*([\s\S]*?)\n' % key_unit[0], unit=key_unit[1], repeats=False)
)
initialization_quantities.append(Quantity(
'species',
rf'(Species : *\d+ *\(\w+\)[\s\S]+?{re_float} *{re_float} *{re_float}\n\s*\n)',
repeats=True, sub_parser=TextParser(quantities=[
Quantity('number', r'Species : *(\d+)', dtype=np.int32),
Quantity('symbol', r'\((\w+)\)'),
Quantity('file', r'parameters loaded from *: *(.+)'),
Quantity('name', r'name *: *(.+)'),
Quantity('nuclear_charge', rf'nuclear charge *: *({re_float})', dtype=np.float64, unit=ureg.elementary_charge),
Quantity('electronic_charge', rf'electronic charge *: *({re_float})', dtype=np.float64, unit=ureg.elementary_charge),
Quantity('atomic_mass', rf'atomic mass *: *({re_float})', dtype=np.float64, unit=ureg.electron_mass),
Quantity('muffin_tin_radius', rf'muffin-tin radius *: *({re_float})', dtype=np.float64, unit=ureg.bohr),
Quantity('radial_points', rf'radial points in muffin-tin *: *({re_float})', dtype=np.int32),
Quantity('positions_format', r'atomic positions \((.+?)\)', flatten=False),
Quantity(
'positions',
rf'\d+ : *({re_float}) *({re_float}) *({re_float})',
repeats=True, dtype=np.dtype(np.float64))])))
initialization_quantities.append(Quantity(
'potential_mixing', r'Using ([\w ]+) potential mixing', repeats=False, flatten=False)
)
initialization_quantities.append(Quantity(
'xc_functional', r'(Exchange-correlation type[\s\S]+?\n *\n)',
sub_parser=TextParser(quantities=[
Quantity('type', r'Exchange-correlation type +: +(\S+)'),
Quantity(
'name_reference',
r'\n *(.+?,.+)',
str_operation=lambda x: [v.strip() for v in x.split(':')]),
Quantity(
'parameters',
r'\n *(.+?:.+)', repeats=True,
str_operation=lambda x: [v.strip() for v in x.split(':')])]))
)
self._quantities.append(Quantity(
'initialization',
r'(?:All units are atomic|Starting initialization)([\s\S]+?)(?:Using|Ending initialization)', repeats=False,
sub_parser=TextParser(quantities=initialization_quantities))
)
scf_quantities = [
Quantity(
'energy_total', r'[Tt]*otal energy\s*:\s*([\-\d\.Ee]+)', repeats=False,
dtype=float, unit=ureg.hartree),
Quantity(
'energy_contributions', r'(?:Energies|_)([\+\-\s\w\.\:]+?)\n *(?:DOS|Density)',
str_operation=str_to_energy_dict, repeats=False, convert=False),
Quantity(
'x_exciting_dos_fermi',
r'DOS at Fermi energy \(states\/Ha\/cell\)\s*:\s*([\-\d\.Ee]+)',
repeats=False, dtype=float, unit=1 / ureg.hartree),
Quantity(
'charge_contributions',
r'(?:Charges|Electron charges\s*\:*\s*)([\-\s\w\.\:\(\)]+?)\n *[A-Z\+]',
str_operation=str_to_atom_properties_dict, repeats=False, convert=False),
Quantity(
'moment_contributions',
r'(?:Moments\s*\:*\s*)([\-\s\w\.\:\(\)]+?)\n *[A-Z\+]',
str_operation=str_to_atom_properties_dict, repeats=False, convert=False)]
self._miscellaneous_keys_mapping = {
'x_exciting_gap': (r'Estimated fundamental gap', ureg.hartree),
'time': (r'Wall time \(seconds\)', ureg.s)}
for name, key_unit in self._miscellaneous_keys_mapping.items():
scf_quantities.append(Quantity(
name, r'%s\s*\:*\s*([\-\d\.Ee]+)' % key_unit[0], repeats=False,
unit=key_unit[1]))
self._convergence_keys_mapping = {
'x_exciting_effective_potential_convergence': (
r'RMS change in effective potential \(target\)', ureg.hartree),
'x_exciting_energy_convergence': (
r'Absolute change in total energy\s*\(target\)', ureg.hartree),
'x_exciting_charge_convergence': (
r'Charge distance\s*\(target\)', ureg.elementary_charge),
'x_exciting_IBS_force_convergence': (
r'Abs\. change in max\-nonIBS\-force\s*\(target\)', ureg.hartree / ureg.bohr)}
for name, key_unit in self._convergence_keys_mapping.items():
scf_quantities.append(Quantity(
name, r'%s\s*\:*\s*([\(\)\d\.\-\+Ee ]+)' % key_unit[0],
str_operation=str_to_quantity_tolerances, unit=key_unit[1], repeats=False))
module_quantities = [
Quantity(
'scf_iteration', r'(?:I| i)teration number :([\s\S]+?)(?:\n *\n\+{10}|\+\-{10})',
sub_parser=TextParser(quantities=scf_quantities), repeats=True),
Quantity(
'final',
r'(?:Convergence targets achieved\. Performing final SCF iteration|Reached self-consistent loops maximum)([\s\S]+?)(\n *\n\+{10})',
sub_parser=TextParser(quantities=scf_quantities), repeats=False),
Quantity(
'atomic_positions',
r'(Atomic positions\s*\([\s\S]+?)\n\n',
sub_parser=TextParser(quantities=[
Quantity(
'positions_format', r'Atomic positions\s*\(([a-z]+)\)'),
Quantity(
'symbols', r'atom\s*\d+\s*(\w+)', repeats=True, dtype=str),
Quantity(
'positions', r'\s*:\s*([\d\.\-]+\s*[\d\.\-]+\s*[\d\.\-]+)',
repeats=True, dtype=float)])),
Quantity(
'forces', r'Total atomic forces including IBS \(\w+\)\s*\:(\s*atom[\-\s\w\.\:]*?)\n *Atomic',
repeats=False, str_operation=str_to_array, dtype=float, unit=ureg.hartree / ureg.bohr)
]
self._quantities.append(Quantity(
'groundstate',
r'(?:Self\-consistent loop started|Groundstate module started)([\s\S]+?)Groundstate module stopped',
sub_parser=TextParser(quantities=module_quantities), repeats=False))
optimization_quantities = [
Quantity(
'atomic_positions',
r'(Atomic positions at this step\s*\([\s\S]+?)\n\n',
sub_parser=TextParser(quantities=[
Quantity(
'positions_format', r'Atomic positions at this step\s*\(([a-z]+)\)'),
Quantity(
'symbols', r'atom\s*\d+\s*(\w+)', repeats=True, dtype=str),
Quantity(
'positions', r'\s*:\s*([\d\.\-]+\s*[\d\.\-]+\s*[\d\.\-]+)',
repeats=True, dtype=float)])),
Quantity(
'forces',
r'Total atomic forces including IBS \(\w+\)\s*\:(\s*atom[\-\s\w\.\:]*?)\n *Time',
repeats=False, str_operation=str_to_array, convert=False, unit=ureg.hartree / ureg.bohr),
Quantity(
'step', r'Optimization step\s*(\d+)', repeats=False, dtype=int),
Quantity(
'method', r'method\s*=\s*(\w+)', repeats=False, dtype=str),
Quantity(
'n_scf_iterations',
r'Number of (?:total)* scf iterations\s*\:\s*(\d+)', repeats=False, dtype=int),
Quantity(
'force_convergence',
r'Maximum force magnitude\s*\(target\)\s*\:(\s*[\(\)\d\.\-\+Ee ]+)',
str_operation=str_to_quantity_tolerances, unit=ureg.hartree / ureg.bohr, repeats=False,
dtype=float),
Quantity(
'energy_total', r'Total energy at this optimization step\s*\:\s*([\-\d\.Ee]+)',
unit=ureg.hartree, repeats=False, dtype=float),
Quantity(
'time', r'Time spent in this optimization step\s*\:\s*([\-\d\.Ee]+)\s*seconds',
unit=ureg.s, repeats=False, dtype=float)
]
self._quantities.append(Quantity(
'structure_optimization',
r'Structure\-optimization module started([\s\S]+?)Structure\-optimization module stopped',
sub_parser=TextParser(quantities=[
Quantity(
'optimization_step',
r'(Optimization step\s*\d+[\s\S]+?(?:\n *\n\-{10}|Time spent in this optimization step\s*:\s*[\d\.]+ seconds))',
sub_parser=TextParser(quantities=optimization_quantities),
repeats=True),
Quantity(
'final',
r'Force convergence target achieved([\s\S]+?Opt)',
sub_parser=TextParser(quantities=scf_quantities),
repeats=False),
Quantity(
'atomic_positions',
r'(imized atomic positions\s*\([\s\S]+?)\n\n',
sub_parser=TextParser(quantities=[
Quantity(
'positions_format', r'imized atomic positions\s*\(([a-z]+)\)'),
Quantity(
'symbols', r'atom\s*\d+\s*(\w+)', repeats=True, dtype=str),
Quantity(
'positions', r'\s*:\s*([\d\.\-]+\s*[\d\.\-]+\s*[\d\.\-]+)',
repeats=True, dtype=float)])),
Quantity(
'forces',
r'Total atomic forces including IBS \(\w+\)\s*\:(\s*atom[\-\s\w\.\:]*?)\n *Atomic',
repeats=False, str_operation=str_to_array, dtype=float, unit=ureg.hartree / ureg.bohr),
]), repeats=False))
self._quantities.append(Quantity(
'hybrids',
r'Hybrids module started([\s\S]+?)Hybrids module stopped',
sub_parser=TextParser(quantities=module_quantities)
))
def get_atom_labels(self, section):
labels = section.get('symbols')
if labels is None:
# we get it by concatenating species symbols
species = self.get('initialization', {}).get('species', [])
labels = []
for specie in species:
labels += [specie.get('symbol')] * len(specie.get('positions'))
return labels
def get_positions_format(self, section):
positions_format = section.get('positions_format')
if positions_format is None:
species = self.get_initialization_parameter('species', [])
for specie in species:
positions_format = specie.get('positions_format', None)
if positions_format is not None:
break
return positions_format
def get_atom_positions(self, section={}, positions=None, positions_format=None):
positions = positions if positions is not None else section.get('positions')
if positions is None:
species = self.get_initialization_parameter('species', [])
if species:
positions = np.vstack([s.get('positions') for s in species])
if positions is None:
return
positions = np.array(positions)
positions_format = positions_format if positions_format is not None else self.get_positions_format(section)
if positions_format == 'lattice':
cell = self.get_initialization_parameter('lattice_vectors')
if cell is None:
return
positions = np.dot(positions, cell.magnitude)
return positions * ureg.bohr
def get_scf_threshold(self, name):
reference = self.get('groundstate', self.get('hybrids', {}))
return reference.get('scf_iteration', [{}])[-1].get(
name, [None, None])[-1]
def get_scf_quantity(self, name):
n_scf = len(self.get('energy_total_scf_iteration', []))
quantity = self.get('%s_scf_iteration' % name)
if quantity is None:
return
# this is really problematic if some scf steps dont have the quantity
# the only thing that we can do is to assume that the first steps are the
# ones with the missing quantity
if len(quantity) < n_scf:
quantity = [None] * (n_scf - len(quantity)) + quantity
return quantity
def get_xc_functional_name(self):
# TODO expand list to include other xcf
xc_functional_map = {
2: ['LDA_C_PZ', 'LDA_X_PZ'],
3: ['LDA_C_PW', 'LDA_X_PZ'],
4: ['LDA_C_XALPHA'],
5: ['LDA_C_VBH'],
20: ['GGA_C_PBE', 'GGA_X_PBE'],
21: ['GGA_C_PBE', 'GGA_X_PBE_R'],
22: ['GGA_C_PBE_SOL', 'GGA_X_PBE_SOL'],
26: ['GGA_C_PBE', 'GGA_X_WC'],
30: ['GGA_C_AM05', 'GGA_C_AM05'],
300: ['GGA_C_BGCP', 'GGA_X_PBE'],
406: ['HYB_GGA_XC_PBEH'],
408: ['HYB_GGA_XC_HSE03']}
xc_functional = self.get('initialization', {}).get('xc_functional', None)
if xc_functional is None:
return []
name = xc_functional_map.get(xc_functional.type, [])
return name
@property
def n_optimization_steps(self):
return len(self.get('structure_optimization', {}).get('optimization_step', []))
def get_number_of_spin_channels(self):
spin_treatment = self.get('initialization', {}).get(
'x_exciting_spin_treatment', 'spin-unpolarised')
n_spin = 1 if spin_treatment.lower() == 'spin-unpolarised' else 2
return n_spin
def get_unit_cell_volume(self):
return self.get('initialization', {}).get('x_exciting_unit_cell_volume', 1.0 * ureg.bohr ** 3)
def get_initialization_parameter(self, key, default=None):
return self.get('initialization', {}).get(key, default)
class ExcitingParser:
def __init__(self):
self.info_parser = ExcitingInfoParser()
self.dos_parser = DOSXMLParser(energy_unit=ureg.hartree)
self.bandstructure_parser = BandstructureXMLParser(energy_unit=ureg.hartree)
self.eigval_parser = ExcitingEigenvalueParser()
self.fermisurf_parser = ExcitingFermiSurfaceBxsfParser()
self.evalqp_parser = ExcitingEvalqpParser()
self.dos_out_parser = DataTextParser()
self.bandstructure_dat_parser = BandstructureDatParser(energy_unit=ureg.hartree)
self.band_out_parser = BandOutParser(energy_unit=ureg.hartree)
self.info_gw_parser = GWInfoParser()
self.input_xml_parser = XMLParser()
self.data_xs_parser = DataTextParser()
self.data_clathrate_parser = DataTextParser(dtype=str)
# different names for different versions of exciting
self._energy_keys_mapping = {
'energy_total': ['Total energy', 'total energy'],
'x_exciting_fermi_energy': ['Fermi energy', 'Fermi'],
'energy_kinetic_electronic': ['Kinetic energy', 'electronic kinetic'],
'energy_coulomb': ['Coulomb energy', 'Coulomb'],
'x_exciting_coulomb_energy': ['Coulomb energy', 'Coulomb'],
'energy_exchange': ['Exchange energy', 'exchange'],
'x_exciting_exchange_energy': ['Exchange energy', 'exchange'],
'energy_correlation': ['Correlation energy', 'correlation'],
'x_exciting_correlation_energy': ['Correlation energy', 'correlation'],
'energy_sum_eigenvalues': ['Sum of eigenvalues', 'sum of eigenvalues'],
'x_exciting_effective_potential_energy': ['Effective potential energy'],
'x_exciting_coulomb_potential_energy': ['Coulomb potential energy', 'Coulomb potential'],
'energy_xc_potential': ['xc potential energy', 'xc potential'],
'energy_electrostatic': ['Hartree energy', 'Hartree'],
'x_exciting_hartree_energy': ['Hartree energy', 'Hartree'],
'x_exciting_electron_nuclear_energy': ['Electron-nuclear energy', 'electron-nuclear '],
'x_exciting_nuclear_nuclear_energy': ['Nuclear-nuclear energy', 'nuclear-nuclear'],
'x_exciting_madelung_energy': ['Madelung energy', 'Madelung'],
'x_exciting_core_electron_kinetic_energy': ['Core-electron kinetic energy', 'core electron kinetic'],
'x_exciting_dft_d2_dispersion_correction': ['DFT-D2 dispersion correction']
}
self._electron_charge_keys_mapping = {
'x_exciting_core_charge': ['core'],
'x_exciting_core_leakage': ['core leakage'],
'x_exciting_valence_charge': ['valence'],
'x_exciting_interstitial_charge': ['interstitial'],
'x_exciting_total_MT_charge': ['total charge in muffin-tins', 'total in muffin-tins'],
'charge_total': ['total charge'],
'x_exciting_section_MT_charge_atom': ['atom_resolved']
}
self._moment_keys_mapping = {
'x_exciting_interstitial_moment': ['interstitial'],
'x_exciting_total_MT_moment': ['total moment in muffin-tins'],
'x_exciting_total_moment': ['total moment'],
'x_exciting_section_MT_moment_atom': ['atom_resolved']
}
def get_exciting_files(self, default):
mainfile = os.path.basename(self.info_parser.mainfile)
suffix = mainfile.strip('INFO.OUT')
target = default.rsplit('.', 1)
filename = '%s%s' % (target[0], suffix)
if target[1:]:
filename = '%s.%s' % (filename, target[1])
filename = os.path.join(self.info_parser.maindir, filename)
if os.path.isfile(filename):
return [filename]
filename = os.path.join(self.info_parser.maindir, default)
if not os.path.isfile(filename):
file_ext = default.split('.')[-1]
mainfile_base = mainfile.rsplit('.', 1)[0].replace('INFO', '')
options = [
f for f in os.listdir(
self.info_parser.maindir) if target[0] in f and mainfile_base in f]
options = [f for f in options if f.endswith(file_ext)]
options.sort()
filenames = [os.path.join(self.info_parser.maindir, f) for f in options]
else:
filenames = [filename]
filenames = [f for f in filenames if os.access(f, os.F_OK)]
return filenames
def file_exists(self, filename):
"""Checks if a the given filename exists and is accessible in the same
folder where the mainfile is stored.
"""
mainfile = os.path.basename(self.info_parser.mainfile)
suffix = mainfile.strip('INFO.OUT')
target = filename.rsplit('.', 1)
filepath = '%s%s' % (target[0], suffix)
if target[1:]:
filepath = '%s.%s' % (filepath, target[1])
filepath = os.path.join(self.info_parser.maindir, filepath)
if os.path.isfile(filepath) and os.access(filepath, os.F_OK):
return True
return False
def _parse_dos(self, sec_scc):
if self.dos_parser.get('totaldos', None) is None:
return
# Get fermi energy: it is used to un-shift the DOS to
# the original scale in which also other energies are reported.
energy_fermi = sec_scc.energy.fermi
if energy_fermi is None:
return
energy_fermi = (energy_fermi.magnitude * ureg.joule).to('hartree')
sec_dos = sec_scc.m_create(Dos, Calculation.dos_electronic)
sec_dos.n_energies = self.dos_parser.number_of_dos
sec_dos.energies = self.dos_parser.energies + energy_fermi
volume = self.info_parser.get_unit_cell_volume()
totaldos = self.dos_parser.get('totaldos') * volume.to('m**3').magnitude
for spin in range(len(totaldos)):
sec_dos_values = sec_dos.m_create(DosValues, Dos.total)
sec_dos_values.spin = spin
sec_dos_values.value = totaldos[spin]
partialdos = self.dos_parser.get('partialdos')
if partialdos is None:
return
partialdos = partialdos.to('1/joule').magnitude
lm_values = np.column_stack((np.arange(len(partialdos)), np.zeros(len(partialdos), dtype=np.int32)))
for lm in range(len(partialdos)):
for spin in range(len(partialdos[lm])):
for atom in range(len(partialdos[lm][spin])):
sec_dos_values = sec_dos.m_create(DosValues, Dos.atom_projected)
sec_dos_values.m_kind = 'spherical'
sec_dos_values.lm = lm_values[lm]
sec_dos_values.spin = spin
sec_dos_values.atom_index = atom
sec_dos_values.value = partialdos[lm][spin][atom]
def _parse_bandstructure(self, sec_scc):
# we need to set nspin again as this is overwritten when setting mainfile
self.bandstructure_parser._nspin = self.info_parser.get_number_of_spin_channels()
band_energies = self.bandstructure_parser.get('band_energies', [])
for n in range(len(band_energies)):
# Get fermi energy: it is used to un-shift the band structure to
# the original scale in which also other energies are reported.
energy_fermi = sec_scc.energy.fermi
if energy_fermi is None:
continue
energy_fermi = energy_fermi.to("hartree")
sec_k_band = sec_scc.m_create(BandStructure, Calculation.band_structure_electronic)
sec_k_band.energy_fermi = energy_fermi
band_k_points = self.bandstructure_parser.get('band_k_points')
nkpts_segment = self.bandstructure_parser.number_of_k_points_per_segment
band_seg_labels = self.bandstructure_parser.get('band_segm_labels')
for nb in range(len(band_energies[n])):
sec_k_band_segment = sec_k_band.m_create(BandEnergies)
sec_k_band_segment.n_kpoints = nkpts_segment[nb]
sec_k_band_segment.kpoints = band_k_points[nb]
sec_k_band_segment.endpoints_labels = band_seg_labels[nb]
sec_k_band_segment.energies = band_energies[n][nb] + energy_fermi
def _parse_eigenvalues(self, sec_scc):
if self.eigval_parser.get('eigenvalues_occupancies', None) is None:
return
nspin = self.info_parser.get_number_of_spin_channels()
def get_data(key):
data = self.eigval_parser.get('eigenvalues_occupancies')
# reshaping is not necessary as this is done in parser, however nspin is
# determined from occupancies which is problematic sometimes
res = np.hstack([np.reshape(v[key], (nspin, np.size(v[key]) // nspin)) for v in data])
res = res.reshape((len(res), len(data), len(res[0]) // len(data)))
if key == 'eigenvalues':
res = res * ureg.hartree
return res
sec_eigenvalues = sec_scc.m_create(BandEnergies)
sec_eigenvalues.kpoints = self.eigval_parser.get('k_points')
sec_eigenvalues.occupations = get_data('occupancies')
sec_eigenvalues.energies = get_data('eigenvalues')
def _parse_fermisurface(self, sec_scc):
fermi_surface = self.fermisurf_parser.get('fermi_surface', [None])[0]
if fermi_surface is None:
return
sec_fermisurface = sec_scc.m_create(x_exciting_section_fermi_surface)
band_parameters = self.fermisurf_parser.get('band_parameters', None)
if band_parameters is not None:
sec_fermisurface.x_exciting_number_of_bands_fermi_surface = band_parameters[0]
sec_fermisurface.x_exciting_number_of_mesh_points_fermi_surface = np.product(band_parameters[1])
sec_fermisurface.x_exciting_grid_fermi_surface = band_parameters[1]
sec_fermisurface.x_exciting_origin_fermi_surface = band_parameters[2]
sec_fermisurface.x_exciting_vectors_fermi_surface = band_parameters[3]
fermi_energy = self.fermisurf_parser.get('fermi_energy', None)
if fermi_energy is not None:
sec_fermisurface.x_exciting_fermi_energy_fermi_surface = fermi_energy
sec_fermisurface.x_exciting_values_fermi_surface = fermi_surface
def _parse_evalqp(self, sec_scc):
data = self.evalqp_parser.get('kpoints_eigenvalues')
if data is None:
return
def get_data(key):
if key == 'k_points':
return np.array([d[0][:3] for d in data])
elif key == 'Znk':
return np.array([d[1].get(key, None) for d in data])
else:
energy = np.array([d[1].get(key, None) for d in data])
if None in energy:
return energy
return np.array([d[1].get(key) for d in data]) * ureg.hartree
eigs_gw = get_data('E_GW')
if eigs_gw[0] is None:
return
nspin = self.info_parser.get_number_of_spin_channels()
def reshape(data):
if data[0] is None:
return
return np.reshape(data, (nspin, len(data) // nspin, len(data[0])))
sec_gw_eigenvalues = sec_scc.m_create(BandEnergies)
sec_gw_eigenvalues.qp_linearization_prefactor = reshape(get_data('Znk'))
sec_gw_eigenvalues.n_bands = len(eigs_gw[0])
sec_gw_eigenvalues.n_kpoints = len(eigs_gw)
sec_gw_eigenvalues.kpoints = get_data('k_points')
sec_gw_eigenvalues.energies = reshape(eigs_gw)
sec_gw_eigenvalues.value_exchange = reshape(get_data('Sx'))
eigs_gw_C = reshape(get_data('Sc'))
if eigs_gw_C is None:
eigs_gw_C = reshape(get_data('Re(Sc)'))
sec_gw_eigenvalues.value_correlation = eigs_gw_C
sec_gw_eigenvalues.value_xc_potential = reshape(get_data('Vxc'))
def _parse_dos_out(self, sec_scc):
data = self.dos_out_parser.data
if data is None:
return
# Get fermi energy: it is used to un-shift the DOS to
# the original scale in which also other energies are reported.
energy_fermi = sec_scc.energy.fermi
if energy_fermi is None:
return
energy_fermi = (energy_fermi.magnitude * ureg.joule).to('hartree')
# TODO I am not sure about format for spin-polarized case! I assume it is
# energy dos_up dos_down
nspin = self.info_parser.get_number_of_spin_channels()
sec_dos = sec_scc.m_create(Dos, Calculation.dos_electronic)
sec_dos.n_energies = len(data) // nspin
data = np.reshape(data, (nspin, len(data) // nspin, 2))
data = np.transpose(data, axes=(2, 0, 1))
sec_dos.energies = data[0][0] * ureg.hartree + energy_fermi
volume = self.info_parser.get_unit_cell_volume()
dos = data[1] * (1 / ureg.hartree) * volume.to('m**3').magnitude
for spin in range(len(dos)):
sec_dos_values = sec_dos.m_create(DosValues, Dos.total)
sec_dos_values.spin = spin
sec_dos_values.value = dos[spin]
# TODO add PDOS
def _parse_bandstructure_dat(self, sec_scc):
self.bandstructure_dat_parser._nspin = self.info_parser.get_number_of_spin_channels()
band_energies = self.bandstructure_dat_parser.band_energies
if band_energies is None:
return
# Get fermi energy: it is used to un-shift the band structure to
# the original scale in which also other energies are reported.
energy_fermi = sec_scc.energy.fermi
if energy_fermi is None:
return
energy_fermi = (energy_fermi.magnitude * ureg.joule).to('hartree')
sec_k_band = sec_scc.m_create(BandStructure, Calculation.band_structure_electronic)
sec_k_band.energy_fermi = energy_fermi
band_k_points = self.bandstructure_dat_parser.band_k_points
nkpts_segment = self.bandstructure_dat_parser.number_of_k_points_per_segment
for nb in range(len(band_energies)):
sec_k_band_segment = sec_k_band.m_create(BandEnergies)
sec_k_band_segment.n_kpoints = nkpts_segment[nb]
sec_k_band_segment.kpoints = band_k_points[nb]
sec_k_band_segment.energies = band_energies[nb] + energy_fermi
def _parse_band_out(self, sec_scc):
self.band_out_parser._nspin = self.info_parser.get_number_of_spin_channels()
band_energies = self.band_out_parser.band_energies
if band_energies is None:
return
# Get fermi energy: it is used to un-shift the band structure to
# the original scale in which also other energies are reported.
energy_fermi = 0.0 * ureg.hartree
if sec_scc.energy is not None:
energy_fermi = sec_scc.energy.fermi
energy_fermi = (energy_fermi.magnitude * ureg.joule).to('hartree')
sec_k_band = sec_scc.m_create(BandStructure, Calculation.band_structure_electronic)
sec_k_band.energy_fermi = energy_fermi
nkpts_segment = self.band_out_parser.number_of_k_points_per_segment
for nb in range(len(band_energies)):
sec_k_band_segment = sec_k_band.m_create(BandEnergies)
sec_k_band_segment.n_kpoints = nkpts_segment[nb]
sec_k_band_segment.value = band_energies[nb] + energy_fermi
def parse_file(self, name, section):
# TODO add support for info.xml, wannier.out
if name.startswith('dos') and name.endswith('xml'):
parser = self.dos_parser
parser_function = self._parse_dos
elif name.startswith('bandstructure') and name.endswith('xml'):
parser = self.bandstructure_parser
parser_function = self._parse_bandstructure
elif name.startswith('EIGVAL') and name.endswith('OUT'):
parser = self.eigval_parser
parser_function = self._parse_eigenvalues
elif (name.startswith('FERMISURF') or name.startswith('FS')) and name.endswith('bxsf'):
parser = self.fermisurf_parser
parser_function = self._parse_fermisurface
elif name.startswith('EVALQP') and (name.endswith('DAT') or name.endswith('TXT')):
parser = self.evalqp_parser
parser_function = self._parse_evalqp
elif name.startswith('TDOS') and name.endswith('OUT'):
parser = self.dos_out_parser
parser_function = self._parse_dos_out
elif name.startswith('bandstructure') and name.endswith('dat'):
parser = self.bandstructure_dat_parser
parser_function = self._parse_bandstructure_dat
elif name.startswith('BAND') and name.endswith('OUT'):
parser = self.band_out_parser
parser_function = self._parse_band_out
elif name.startswith('input') and name.endswith('xml'):
parser = self.input_xml_parser
if self._calculation_type == 'gw':
parser_function = self._parse_input_gw
elif self._calculation_type == 'xs':
parser_function = self._parse_input_xs
else:
# TODO implement reading of parameters from input.xml for normal calculations
# in addition to INFO.OUT
return
else:
return
files = self.get_exciting_files(name)
if len(files) > 1:
self.logger.warn('Found multiple files. Will read all!', data=dict(file=name))
for n in range(len(files)):
parser.mainfile = files[n]
parser_function(section)
# free up memory
parser.mainfile = None
def _parse_input_xs(self, sec_method):
xstype = self.input_xml_parser.get('xs/xstype', None)
if xstype is not None:
sec_method.x_exciting_xs_xstype = xstype
sec_method.x_exciting_electronic_structure_method = xstype
sec_method.x_exciting_xs_broadening = self.input_xml_parser.get(
'xs/broad', 0.01, 'hartree')
sec_method.x_exciting_xs_gqmax = self.input_xml_parser.get(
'xs/gqmax', 0.0, '1/bohr')
sec_method.x_exciting_xs_lmaxapw = self.input_xml_parser.get('xs/lmaxapw', 10)
sec_method.x_exciting_xs_number_of_empty_states = self.input_xml_parser.get(
'xs/nempty', 5)
sec_method.x_exciting_xs_ngridq = self.input_xml_parser.get('xs/ngridq', [1, 1, 1])
sec_method.x_exciting_xs_ngridk = self.input_xml_parser.get('xs/ngridk', [1, 1, 1])
rgkmax = self.input_xml_parser.get('xs/rgkmax', None)
if rgkmax is None:
rgkmax = self.info_parser.get_initialization_parameter('x_exciting_rgkmax', 0.)
sec_method.x_exciting_xs_rgkmax = rgkmax
sec_method.x_exciting_xs_scissor = self.input_xml_parser.get('xs/scissor', 0.0)
sec_method.x_exciting_xs_vkloff = self.input_xml_parser.get('xs/vkloff', [0., 0., 0.])
# TODO I am not certain if screening/BSE are children of xs
if self.input_xml_parser.get('xs/screening') is not None:
sec_method.x_exciting_xs_screening_number_of_empty_states = self.input_xml_parser.get(
'xs/screening/nempty', 0)
sec_method.x_exciting_xs_screening_ngridk = self.input_xml_parser.get(
'xs/screening/ngridk', [0, 0, 0])
rgkmax = self.input_xml_parser.get('xs/screening/rgkmax', None)
if rgkmax is None:
rgkmax = self.info_parser.get_initialization_parameter('x_exciting_rgkmax', 0.)
sec_method.x_exciting_xs_screening_rgkmax = rgkmax
sec_method.x_exciting_xs_screening_type = self.input_xml_parser.get(
'xs/screening/screentype', 'full')
if self.input_xml_parser.get('xs/BSE') is not None:
sec_method.x_exciting_xs_bse_antiresonant = self.input_xml_parser.get(
'xs/BSE/aresbse', True)
sec_method.x_exciting_xs_bse_angular_momentum_cutoff = self.input_xml_parser.get(
'xs/BSE/lmaxdielt', 14)
rgkmax = self.input_xml_parser.get('xs/BSE/rgkmax', None)
if rgkmax is None:
rgkmax = self.info_parser.get_initialization_parameter('x_exciting_rgkmax', 0)
sec_method.x_exciting_xs_bse_rgkmax = rgkmax
sec_method.x_exciting_xs_bse_sciavbd = self.input_xml_parser.get(
'xs/BSE/sciavbd', True)
sec_method.x_exciting_xs_bse_sciavqbd = self.input_xml_parser.get(
'xs/BSE/sciavqbd', False)
sec_method.x_exciting_xs_bse_sciavqhd = self.input_xml_parser.get(
'xs/BSE/sciavqhd', False)
sec_method.x_exciting_xs_bse_sciavqwg = self.input_xml_parser.get(
'xs/BSE/sciavqwg', False)
sec_method.x_exciting_xs_bse_sciavtype = self.input_xml_parser.get(
'xs/BSE/sciavtype', 'spherical')
sec_method.x_exciting_xs_bse_xas = self.input_xml_parser.get(
'xs/BSE/xas', False)
sec_method.x_exciting_xs_bse_number_of_bands = self.input_xml_parser.get(
'xs/BSE/nstlbse', [0, 0, 0, 0])
if sec_method.x_exciting_xs_bse_xas:
sec_method.x_exciting_xs_bse_xasatom = self.input_xml_parser.get(
'xs/BSE/xasatom', 0)
sec_method.x_exciting_xs_bse_xasedge = self.input_xml_parser.get(
'xs/BSE/xasedge', 'K')
sec_method.x_exciting_xs_bse_xasspecies = self.input_xml_parser.get(
'xs/BSE/xasspecies', 0)
sec_method.x_exciting_xs_bse_xas_number_of_bands = self.input_xml_parser.get(
'xs/BSE/nstlxas', [0, 0])
if self.input_xml_parser.get('xs/tddft') is not None:
sec_method.x_exciting_xs_tddft_analytic_continuation = self.input_xml_parser.get(
'xs/tddft/acont', False)
sec_method.x_exciting_xs_tddft_anomalous_Hall_conductivity = self.input_xml_parser.get(
'xs/tddft/ahc', False)
sec_method.x_exciting_xs_tddft_anti_resonant_dielectric = self.input_xml_parser.get(
'xs/tddft/aresdf', False)
sec_method.x_exciting_xs_tddft_anti_resonant_xc_kernel = self.input_xml_parser.get(
'xs/tddft/aresfxc', True)
sec_method.x_exciting_xs_tddft_drude = self.input_xml_parser.get(
'xs/tddft/drude', [0., 0.])
sec_method.x_exciting_xs_tddft_split_parameter = self.input_xml_parser.get(
'xs/tddft/fxcbsesplit', 0.00001, 'hartree')
sec_method.x_exciting_xs_tddft_xc_kernel = self.input_xml_parser.get(
'xs/tddft/fxctype', 'RPA')
sec_method.x_exciting_xs_tddft_finite_q_intraband_contribution = self.input_xml_parser.get(
'xs/tddft/intraband', False)
sec_method.x_exciting_xs_tddft_diagonal_xc_kernel = self.input_xml_parser.get(
'xs/tddft/kerndiag', False)
sec_method.x_exciting_xs_tddft_lmax_alda = self.input_xml_parser.get(
'xs/tddft/lmaxalda', 3)
sec_method.x_exciting_xs_tddft_macroscopic_dielectric_function_q_treatment = self.input_xml_parser.get(
'xs/tddft/mdfqtype', 0)
sec_method.x_exciting_xs_tddft_analytic_continuation_number_of_intervals = self.input_xml_parser.get(
'xs/tddft/nwacont', 0)
sec_method.x_exciting_xs_tetra = self.input_xml_parser.get(
'xs/tetra/tetradf', False)
def _parse_xs_bse(self):
sec_run = self.archive.run[-1]
# TODO read from xml file
def get_files(name):
bse_types = ['IP', 'singlet', 'triplet', 'RPA']
scr_types = ['full', 'diag', 'noinvdiag', 'longrange']
bse_files = []
for bse_type in bse_types:
for scr_type in scr_types:
files = self.get_exciting_files(
'%s_BSE%s_SCR%s.OUT' % (name, bse_type, scr_type))
bse_files.append(files)
return bse_files
def get_data(files):
data = []
for f in files:
self.data_xs_parser.mainfile = f
if self.data_xs_parser.data is None:
continue
data.append(self.data_xs_parser.data)
return data
def parse_exciton(data, sec_scc):
n_components = len(data)
data = np.transpose(np.vstack(data))
sec_scc.x_exciting_xs_bse_number_of_components = n_components
n_excitons = len(data[0]) // n_components
sec_scc.x_exciting_xs_bse_number_of_excitons = n_excitons
sec_scc.x_exciting_xs_bse_exciton_energies = np.reshape(
data[1], (n_components, n_excitons)) * ureg.hartree
sec_scc.x_exciting_xs_bse_exciton_binding_energies = np.reshape(
data[2], (n_components, n_excitons)) * ureg.hartree
sec_scc.x_exciting_xs_bse_exciton_oscillator_strength = np.reshape(
data[3], (n_components, n_excitons))
sec_scc.x_exciting_xs_bse_exciton_amplitude_re = np.reshape(
data[4], (n_components, n_excitons))
sec_scc.x_exciting_xs_bse_exciton_amplitude_im = np.reshape(
data[5], (n_components, n_excitons))
def parse_epsilon(data, sec_scc):
n_components = len(data)
data = np.transpose(np.vstack(data))
n_epsilon = len(data[0]) // n_components
sec_scc.x_exciting_xs_bse_number_of_energy_points = n_epsilon
sec_scc.x_exciting_xs_bse_epsilon_energies = np.reshape(
data[0], (n_components, n_epsilon)) * ureg.hartree
sec_scc.x_exciting_xs_bse_epsilon_re = np.reshape(
data[1], (n_components, n_epsilon))
sec_scc.x_exciting_xs_bse_epsilon_im = np.reshape(
data[2], (n_components, n_epsilon))
def parse_sigma(data, sec_scc):
n_components = len(data)
data = np.transpose(np.vstack(data))
n_sigma = len(data[0]) // n_components
sec_scc.x_exciting_xs_bse_sigma_energies = np.reshape(
data[0], (n_components, n_sigma)) * ureg.hartree
sec_scc.x_exciting_xs_bse_sigma_re = np.reshape(
data[1], (n_components, n_sigma))
sec_scc.x_exciting_xs_bse_sigma_im = np.reshape(
data[2], (n_components, n_sigma))
def parse_loss(data, sec_scc):
n_components = len(data)
data = np.transpose(np.vstack(data))
n_loss = len(data[0]) // n_components
sec_scc.x_exciting_xs_bse_loss_energies = np.reshape(
data[0], (n_components, n_loss)) * ureg.hartree
sec_scc.x_exciting_xs_bse_loss = np.reshape(
data[1], (n_components, n_loss))
# TODO check if format of files are really correct, i.e. columns are supposed
# to be what they are. What is the fourth column in epsilon which is not parsed?
sccs = []
for quantity in ['EXCITON', 'EPSILON', 'SIGMA', 'LOSS']:
files = get_files(quantity)
for i in range(len(files)):
data = get_data(files[i])
if not data:
sccs.append(None)
continue
if quantity == 'EXCITON':
sec_scc = sec_run.m_create(Calculation)
sccs.append(sec_scc)
else:
sec_scc = sccs[i]
if sec_scc is None:
# This is the case when there is a mismatch between files
self.logger.warn(
'Mismatch in EXCITON and file type', data=dict(file=quantity))
sec_scc = sec_run.m_create(Calculation)
if quantity == 'EXCITON':
parse_function = parse_exciton
elif quantity == 'EPSILON':
parse_function = parse_epsilon
elif quantity == 'SIGMA':
parse_function = parse_sigma
elif quantity == 'LOSS':
parse_function = parse_loss
else:
continue
try:
parse_function(data, sec_scc)
except Exception:
self.logger.error('Error setting xs data', data=dict(file=quantity))
def _parse_xs_tddft(self):
sec_run = self.archive.run[-1]
fxctype = self.input_xml_parser.get('xs/tddft/fxctype', 'RPA')
tetradf = self.input_xml_parser.get('xs/tetra/tetradf', None)
nwacont = self.input_xml_parser.get('xs/tddft/nwacont', None)
aresdf = self.input_xml_parser.get('xs/tddft/aresdf', True)
file_ext_list = [
'TET' if tetradf else None, 'AC' if nwacont else None, 'NAR' if not aresdf else None]
file_ext = '_'.join([e for e in file_ext_list if e])
# read q points
qpoints = self.input_xml_parser.get('xs/qpointset/qpoint')
def get_data(quantity, ext):
# all files related to quantity at all qpoints
files = self.get_exciting_files('%s_%s%s%s.OUT' % (quantity, file_ext, ext, fxctype))
data = [[], [], []]
for i in range(len(qpoints)):
data_q = []
files_q = [f for f in files if f.endswith('QMT%s.OUT' % str(i + 1).rjust(3, '0'))]
for f in files_q:
self.data_xs_parser.mainfile = f
if self.data_xs_parser.data is None:
continue
data_q.append(self.data_xs_parser.data)
if not data_q:
continue
data_q = np.transpose(data_q, axes=(2, 0, 1))
for j in range(len(data)):
data[j].append(data_q[j])
return data
for quantity in ['EPSILON', 'LOSS', 'SIGMA']:
for ext in ['FXC', 'NLF_FXC']:
data = get_data(quantity, ext)
if not data[0]:
continue
if quantity == 'EPSILON' and ext == 'FXC':
sec_scc = sec_run.m_create(Calculation)
sec_scc.x_exciting_xs_tddft_number_of_epsilon_values = len(data[0][0][0])
sec_scc.x_exciting_xs_tddft_epsilon_energies = data[0][0][0] * ureg.hartree
sec_scc.x_exciting_xs_tddft_dielectric_function_local_field = data[1:]
elif quantity == 'EPSILON' and ext == 'NLF_FXC':
sec_scc.x_exciting_xs_tddft_dielectric_function_no_local_field = data[1:3]
elif quantity == 'LOSS' and ext == 'FXC':
sec_scc.x_exciting_xs_tddft_loss_function_local_field = data[1]
elif quantity == 'LOSS' and ext == 'NLF_FXC':
sec_scc.x_exciting_xs_tddft_loss_function_no_local_field = data[1]
elif quantity == 'SIGMA' and ext == 'FXC':
sec_scc.x_exciting_xs_tddft_sigma_local_field = data[1:3]
elif quantity == 'SIGMA' and ext == 'NLF_FXC':
sec_scc.x_exciting_xs_tddft_sigma_no_local_field = data[1:3]
def parse_xs(self):
sec_run = self.archive.run[-1]
xs_info_files = self.get_exciting_files('INFOXS.OUT')
if not xs_info_files:
return
self._calculation_type = 'xs'
# inconsistency in the naming convention for xs input xml file
sec_method = sec_run.m_create(Method)
sec_method_ref = self.archive.run[-1].method[0]
sec_method.starting_method_ref = sec_method_ref
sec_method.methods_ref = [sec_method_ref]
self.parse_file('input.xml', sec_method)
# parse properties
input_file = self.get_exciting_files('input.xml')
if not input_file:
return
self.input_xml_parser.mainfile = input_file[0]
xstype = self.input_xml_parser.get('xs/xstype', '')
if xstype.lower() == 'bse':
self._parse_xs_bse()
elif xstype.lower() == 'tddft':
self._parse_xs_tddft()
def _parse_input_gw(self, sec_method):
sec_gw = sec_method.m_create(GWMethod)
sec_gw.type = 'G0W0'
gmaxvr = self.info_parser.get_initialization_parameter('x_exciting_gmaxvr', 0)
sec_gw.core_treatment = self.input_xml_parser.get(
'gw/coreflag', 'all')
sec_gw.polarizability_number_of_empty_states = int(
self.input_xml_parser.get('gw/nempty', 0))
sec_gw.ngridq = self.input_xml_parser.get('gw/ngridq', [1, 1, 1])
sec_gw.basis_set = 'mixed'
sec_gw.qp_equation_treatment = 'linearization'
sec_gw.max_frequency = self.input_xml_parser.get(
'gw/freqgrid/freqmax', 1.0)
sec_gw.frequency_grid_type = self.input_xml_parser.get(
'gw/freqgrid/fgrid', 'gaule2')
sec_gw.number_of_frequencies = int(self.input_xml_parser.get(
'gw/freqgrid/nomeg', 16))
sec_gw.self_energy_c_number_of_poles = int(self.input_xml_parser.get(
'gw/selfenergy/npol', 0))
sec_gw.self_energy_c_number_of_empty_states = int(self.input_xml_parser.get(
'gw/selfenergy/nempty', 0))
sec_gw.self_energy_singularity_treatment = self.input_xml_parser.get(
'gw/selfenergy/singularity', 'mpd')
sec_gw.self_energy_c_analytical_continuation = self.input_xml_parser.get(
'gw/selfenergy/actype', 'pade')
sec_gw.mixed_basis_lmax = int(self.input_xml_parser.get(
'gw/mixbasis/lmaxmb', 3))
sec_gw.mixed_basis_tolerance = self.input_xml_parser.get(
'gw/mixbasis/epsmb', 0.0001)
gmb = self.input_xml_parser.get('gw/mixbasis/gmb', 1.0)
sec_gw.mixed_basis_gmax = gmb * gmaxvr
pwm = self.input_xml_parser.get('gw/barecoul/pwm', 2.0)
sec_gw.bare_coulomb_gmax = pwm * gmb * gmaxvr
sec_gw.bare_coulomb_cutofftype = self.input_xml_parser.get(
'gw/barecoul/cutofftype', 'none')
sec_gw.screened_coulomb_volume_average = self.input_xml_parser.get(
'gw/scrcoul/sciavtype', 'isotropic')
sec_gw.screened_Coulomb = self.input_xml_parser.get(
'gw/scrcoul/scrtype', 'rpa')
def parse_gw(self):
sec_run = self.archive.run[-1]
# two versions of gw info files
gw_info_files = ['GW_INFO.OUT', 'GWINFO.OUT']
for f in gw_info_files:
if self.get_exciting_files(f):
self._calculation_type = 'gw'
gw_info_file = f
break
if not self._calculation_type == 'gw':
return
sec_method = sec_run.m_create(Method)
sec_method_ref = self.archive.run[-1].method[0]
sec_method.starting_method_ref = sec_method_ref
sec_method.methods_ref = [sec_method_ref]
# parse input xml file, there seems to be two versions, input_gw.xml and input-gw.xml
for f in ['input_gw.xml', 'input-gw.xml', 'input.xml']:
self.parse_file(f, sec_method)
xc_functional_name = ' '.join(self.info_parser.get_xc_functional_name())
sec_method.gw.starting_point = xc_functional_name
sec_scc = sec_run.m_create(Calculation)
sec_scc.method_ref = sec_method
if sec_run.system:
sec_scc.system_ref = sec_run.system[-1]
sec_scc_ref = sec_run.calculation[0]
sec_scc.starting_calculation_ref = sec_scc_ref
sec_scc.calculations_ref = [sec_scc_ref]
# parse properties
gw_info_files = self.get_exciting_files(gw_info_file)
if len(gw_info_files) > 1:
self.logger.warn('Found multiple GW info files, will read only first!')
self.info_gw_parser.mainfile = gw_info_files[0]
fermi_energy = self.info_gw_parser.get('fermi_energy', None)
if fermi_energy is not None:
sec_scc.energy = Energy(fermi=fermi_energy)
gw_files = ['EVALQP.DAT', 'EVALQP.TXT', 'TDOS-QP.OUT']
# Parse GW band structure from one of the files:
bs_files = ['bandstructure-qp.dat', 'BAND-QP.OUT']
for fname in bs_files:
if self.file_exists(fname):
gw_files.append(fname)
break
for f in gw_files:
self.parse_file(f, sec_scc)
frequency_data = self.info_gw_parser.get('frequency_data', None)
if frequency_data is not None:
number = frequency_data.get('number')
sec_method.gw.number_of_frequencies = len(number)
sec_method.gw.frequency_number = number
sec_method.gw.frequency_values = frequency_data.get('values')
sec_method.gw.frequency_weights = frequency_data.get('weights')
fundamental_band_gap = self.info_gw_parser.get('direct_band_gap', None)
if fundamental_band_gap is None:
fundamental_band_gap = self.info_gw_parser.get('fundamental_band_gap', None)
sec_gap = sec_scc.eigenvalues[-1].m_create(BandGap)
if fundamental_band_gap is not None:
sec_gap.value_fundamental = fundamental_band_gap
optical_band_gap = self.info_gw_parser.get('optical_band_gap', None)
if optical_band_gap is not None:
sec_gap.value_optical = optical_band_gap
def parse_miscellaneous(self):
sec_worfklow = self.archive.m_create(Workflow)
sec_worfklow.type = 'single_point'
structure_optimization = self.info_parser.get('structure_optimization')
if structure_optimization is not None:
sec_worfklow.type = 'geometry_optimization'
sec_geometry_opt = sec_worfklow.m_create(GeometryOptimization)
threshold_force = structure_optimization.get(
'optimization_step', [{}])[0].get('force_convergence', [0., 0.])[-1]
sec_geometry_opt.input_force_maximum_tolerance = threshold_force
def parse_method(self):
sec_run = self.archive.run[-1]
sec_method = sec_run.m_create(Method)
sec_method.basis_set.append(BasisSet(type='(L)APW+lo'))
sec_dft = sec_method.m_create(DFT)
sec_electronic = sec_method.m_create(Electronic)
sec_electronic.method = 'DFT'
smearing_kind_map = {
'Gaussian': 'gaussian', 'Methfessel-Paxton': 'methfessel-paxton',
'Fermi-Dirac': 'fermi', 'Extended': 'tetrahedra'}
sec_smearing = sec_electronic.m_create(Smearing)
smearing_kind = self.info_parser.get_initialization_parameter('smearing_kind')
if smearing_kind is not None:
if not isinstance(smearing_kind, str):
smearing_kind = smearing_kind[0]
smearing_kind = smearing_kind_map[smearing_kind]
sec_smearing.kind = smearing_kind
smearing_width = self.info_parser.get_initialization_parameter('smearing_width')
if smearing_width is not None:
smearing_width = (smearing_width * ureg.hartree).to('joule')
# TODO smearing with should have units of energy
sec_smearing.width = smearing_width.magnitude
for name in self.info_parser._convergence_keys_mapping.keys():
threshold = self.info_parser.get_scf_threshold(name)
if threshold is None:
continue
metainfo_name = 'x_exciting_scf_threshold_%s_change' % name.split('_')[-2]
setattr(sec_method, metainfo_name, threshold)
# additionally, set threshold to global metainfo. This is killing me!
if metainfo_name == 'x_exciting_scf_threshold_energy_change':
sec_method.scf = Scf(threshold_energy_change=threshold)
xc_functional_names = self.info_parser.get_xc_functional_name()
if not xc_functional_names:
# get it from input.xml
input_file = self.get_exciting_files('input.xml')
for f in input_file:
self.input_xml_parser.mainfile = f
correlation = self.input_xml_parser.get('libxc/correlation', None)
xc_functional_names.append(correlation)
exchange = self.input_xml_parser.get('libxc/exchange', None)
xc_functional_names.append(exchange)
sec_xc_functional = sec_dft.m_create(XCFunctional)
for name in xc_functional_names:
if name is None:
continue
if '_X_' in name:
sec_xc_functional.exchange.append(Functional(name=name))
elif '_C_' in name:
sec_xc_functional.correlation.append(Functional(name=name))
elif 'HYB' in name:
sec_xc_functional.hybrid.append(Functional(name=name))
else:
sec_xc_functional.contributions.append(Functional(name=name))
if not xc_functional_names:
# simply write parameters
xc_functional = self.info_parser.get('initialization', {}).get('xc_functional')
if xc_functional is not None:
sec_xc_functional.name = xc_functional.get('name_reference', [None, None])[0]
sec_xc_functional.reference = xc_functional.get('name_reference', [None, None])[1]
sec_electronic.n_spin_channels = self.info_parser.get_number_of_spin_channels()
if self._calculation_type == 'volume_optimization':
sec_method.x_exciting_volume_optimization = True
def parse_scc(self, section):
sec_run = self.archive.run[-1]
final = section if section.get('energy_total') is not None else section.get('final')
if final is None:
# get it from last scf_iteration or optimization_step
final = section.get('scf_iteration', [None])[-1]
final = section.get('optimization_step', [None])[-1] if final is None else final
if final is None:
return
sec_scc = sec_run.m_create(Calculation)
def parse_scf(iteration, msection):
energy_total = iteration.get('energy_total')
sec_energy = msection.m_create(Energy)
if energy_total is not None:
sec_energy.total = EnergyEntry(value=energy_total)
x_exciting_dos_fermi = iteration.get('x_exciting_dos_fermi')
if x_exciting_dos_fermi is not None:
setattr(msection, 'x_exciting_dos_fermi', x_exciting_dos_fermi)
# energy contributions
energy_contributions = iteration.get('energy_contributions', {})
for key, names in self._energy_keys_mapping.items():
val = None
for name in names:
val = energy_contributions.get(name, None)
if val is not None:
break
if val is None:
continue
if key.startswith('energy_'):
sec_energy.m_add_sub_section(getattr(
Energy, key.replace('energy_', '')), EnergyEntry(value=val))
else:
setattr(msection, key, val)
if key == 'x_exciting_fermi_energy':
sec_energy.fermi = val
# charge contributions
charge_contributions = iteration.get('charge_contributions', {})
for key, names in self._electron_charge_keys_mapping.items():
val = None
for name in names:
val = charge_contributions.get(name, None)
if val is not None:
break
if val is None:
continue
if key == 'x_exciting_section_MT_charge_atom':
for n in range(len(val)):
sec_mt_charge_atom = msection.m_create(x_exciting_section_MT_charge_atom)
sec_mt_charge_atom.x_exciting_MT_charge_atom_index = n + 1
sec_mt_charge_atom.x_exciting_MT_charge_atom_symbol = val[n][0]
sec_mt_charge_atom.x_exciting_MT_charge_atom_value = val[n][1]
sec_charges = msection.m_create(Charges)
sec_charges.value = [
val[n][1].magnitude for n in range(len(val))] * val[0][1].units
sec_charges.total = charge_contributions.get('total charge')
elif key == 'charge_total':
pass
else:
setattr(msection, key, val)
# moment contributions
moment_contributions = iteration.get('moment_contributions', {})
for key, names in self._moment_keys_mapping.items():
val = None
for name in names:
val = moment_contributions.get(name, None)
if val is not None:
break
if val is None:
continue
if key == 'x_exciting_section_MT_moment_atom':
for n in range(len(val)):
sec_mt_moment_atom = msection.m_create(x_exciting_section_MT_moment_atom)
sec_mt_moment_atom.x_exciting_MT_moment_atom_index = n + 1
sec_mt_moment_atom.x_exciting_MT_moment_atom_symbol = val[n][0]
sec_mt_moment_atom.x_exciting_MT_moment_atom_value = val[n][1]
else:
setattr(msection, key, val)
# convergence values
for name in self.info_parser._convergence_keys_mapping.keys():
val = iteration.get(name)
if val is None:
continue
setattr(msection, name, val)
# other metainfo
for name in self.info_parser._miscellaneous_keys_mapping.keys():
val = iteration.get(name)
if val is None:
continue
if name == 'time':
msection.time_calculation = val
else:
setattr(msection, name, val)
# energy, moment, charge contributions
parse_scf(final, sec_scc)
# forces
forces = section.get('forces')
if forces is not None:
sec_forces = sec_scc.m_create(Forces)
sec_forces.total = ForcesEntry(value=forces)
# scf iterations
scf_iterations = section.get('scf_iteration', [])
for scf_iteration in scf_iterations:
sec_scf_iteration = sec_scc.m_create(ScfIteration)
parse_scf(scf_iteration, sec_scf_iteration)
return sec_scc
def parse_system(self, section):
sec_run = self.archive.run[-1]
positions = self.info_parser.get_atom_positions(section.get('atomic_positions', {}))
lattice_vectors = self.info_parser.get_initialization_parameter('lattice_vectors')
atom_labels = self.info_parser.get_atom_labels(section.get('atomic_positions', {}))
input_file = self.get_exciting_files('input.xml')
if positions is None:
# get it from input.xml
for f in input_file:
self.input_xml_parser.mainfile = f
positions = self.input_xml_parser.get('structure/species/atom/coord')
lattice_vectors = self.input_xml_parser.get(
'structure/crystal/basevect', np.eye(3))
species = self.input_xml_parser.get('structure/species/speciesfile')
if positions is None or lattice_vectors is None or species is None:
continue
lattice_vectors = np.array(lattice_vectors, dtype=float)
lattice_vectors *= self.input_xml_parser.get('structure/crystal/scale', 1.0)
positions = np.dot(positions, lattice_vectors) * ureg.bohr
lattice_vectors = lattice_vectors * ureg.bohr
atoms = self.input_xml_parser.get('structure/species/atom')
atom_labels = []
for n in range(len(atoms)):
atom_labels.extend([species[n].split('.')[0]] * len(atoms[n]))
if positions is None or atom_labels is None:
return
sec_system = sec_run.m_create(System)
sec_atoms = sec_system.m_create(Atoms)
sec_atoms.positions = positions
sec_atoms.labels = atom_labels
sec_atoms.periodic = [True] * 3
# TODO confirm no cell optimization in exciting
sec_atoms.lattice_vectors = lattice_vectors
lattice_vectors_reciprocal = self.info_parser.get_initialization_parameter(
'lattice_vectors_reciprocal')
sec_atoms.lattice_vectors_reciprocal = lattice_vectors_reciprocal
if len(sec_run.system) > 1:
return sec_system
for name in self.info_parser._system_keys_mapping.keys():
val = self.info_parser.get_initialization_parameter(name)
if val is None:
continue
if name == 'x_exciting_spin_treatment':
sub_sec = sec_system.m_create(x_exciting_section_spin)
sub_sec.x_exciting_spin_treatment = val
elif name == 'x_exciting_species_rtmin':
setattr(sec_system, name, ' '.join([str(v) for v in val]))
else:
try:
setattr(sec_system, name, val)
except Exception:
self.logger.warn('Error setting metainfo.')
# species
species = self.info_parser.get_initialization_parameter('species', [])
for specie in species:
sec_atoms_group = sec_system.m_create(x_exciting_section_atoms_group)
sec_atoms_group.x_exciting_geometry_atom_labels = specie.get('symbol')
sec_atoms_group.x_exciting_geometry_atom_number = str(specie.get('number'))
sec_atoms_group.x_exciting_muffin_tin_points = specie.get('radial_points')
sec_atoms_group.x_exciting_muffin_tin_radius = specie.get('muffin_tin_radius')
positions_format = specie.get('positions_format')
sec_atoms_group.x_exciting_atom_position_format = positions_format
positions = specie.get('positions')
positions = self.info_parser.get_atom_positions(
positions=positions, positions_format=positions_format).to('m')
sec_atoms_group.x_exciting_geometry_atom_positions = positions.magnitude
# clathrate info
clathrate_file = self.get_exciting_files('str.out')
if clathrate_file:
sec_system.x_exciting_clathrates = True
self.data_clathrate_parser.mainfile = clathrate_file[0]
if self.data_clathrate_parser.data:
data = np.transpose(self.data_clathrate_parser.data)
sec_system.x_exciting_clathrates_atom_coordinates = np.transpose(
np.array(data[:3], dtype=float))
sec_system.x_exciting_clathrates_atom_labels = list(data[3])
else:
sec_system.x_exciting_clathrates = False
potential_mixing = self.info_parser.get_initialization_parameter('potential_mixing')
if potential_mixing is not None:
sec_system.x_exciting_potential_mixing = potential_mixing
return sec_system
def parse_configurations(self):
sec_run = self.archive.run[-1]
def parse_configuration(section):
if not section:
return
sec_scc = self.parse_scc(section)
if sec_scc is None:
return
sec_system = self.parse_system(section)
if sec_system is not None:
sec_scc.system_ref = sec_system
sec_scc.method_ref = sec_run.method[-1]
return sec_scc
# groundstate and hybrids calculation
for module in ['groundstate', 'hybrids']:
sec_scc = parse_configuration(self.info_parser.get(module))
if sec_scc is None:
continue
# add data to scc
# TODO add support for more output files and properties
exciting_files = ['EIGVAL.OUT', 'FERMISURF.bxsf', 'FS.bxsf']
# Parse DFT DOS from one of the files
bs_files = ['dos.xml', 'TDOS.OUT']
for fname in bs_files:
if self.file_exists(fname):
exciting_files.append(fname)
break
# Parse DFT band structure from one of the files
bs_files = ['bandstructure.xml', 'BAND.OUT', 'bandstructure.dat']
for fname in bs_files:
if self.file_exists(fname):
exciting_files.append(fname)
break
for f in exciting_files:
self.parse_file(f, sec_scc)
# structure optimization
structure_optimization = self.info_parser.get('structure_optimization', {})
for optimization_step in structure_optimization.get('optimization_step', []):
sec_scc = parse_configuration(optimization_step)
if optimization_step.get('method') is not None:
sec_scc.x_exciting_geometry_optimization_method = optimization_step.get('method')
if optimization_step.get('step') is not None:
sec_scc.x_exciting_geometry_optimization_step = optimization_step.get('step')
force_convergence = optimization_step.get('force_convergence')
if force_convergence is not None:
sec_scc.x_exciting_maximum_force_magnitude = force_convergence[0]
sec_scc.x_exciting_geometry_optimization_threshold_force = force_convergence[1]
sec_scc = parse_configuration(structure_optimization)
if sec_scc is None:
return
# volume optimizations
volume_index = 1
while True:
info_volume = self.get_exciting_files('run_dir%s/INFO.OUT' % str(volume_index).rjust(2, '0'))
if not info_volume:
break
sec_scc.calculations_path.append(info_volume[0])
def init_parser(self):
self.info_parser.mainfile = self.filepath
self.info_parser.logger = self.logger
self.dos_parser.logger = self.logger
self.bandstructure_parser.logger = self.logger
self.eigval_parser.logger = self.logger
self.fermisurf_parser.logger = self.logger
self.evalqp_parser.logger = self.logger
self.dos_out_parser.logger = self.logger
self.bandstructure_dat_parser.logger = self.logger
self.band_out_parser.logger = self.logger
self.info_gw_parser.logger = self.logger
self.input_xml_parser.logger = self.logger
self.data_xs_parser.logger = self.logger
self.data_clathrate_parser.logger = self.logger
def reuse_parser(self, parser):
self.info_parser.quantities = parser.info_parser.quantities
self.eigval_parser.quantities = parser.eigval_parser.quantities
self.fermisurf_parser.quantities = parser.fermisurf_parser.quantities
self.evalqp_parser.quantities = parser.evalqp_parser.quantities
self.info_gw_parser.quantities = parser.info_gw_parser.quantities
def parse(self, filepath, archive, logger):
self.filepath = filepath
self.archive = archive
self.logger = logger if logger is not None else logging
self._calculation_type = None
self.init_parser()
sec_run = self.archive.m_create(Run)
sec_run.program = Program(
name='exciting', version=self.info_parser.get('program_version', '').strip())
# method goes first since reference needed for sec_scc
self.parse_method()
self.parse_configurations()
self.parse_gw()
self.parse_xs()
self.parse_miscellaneous()
| [((22188, 22215), 're.compile', 're.compile', (['"""([A-Z][a-z]?)"""'], {}), "('([A-Z][a-z]?)')\n", (22198, 22215), False, 'import re\n'), ((39102, 39121), 'numpy.array', 'np.array', (['positions'], {}), '(positions)\n', (39110, 39121), True, 'import numpy as np\n'), ((42197, 42213), 'nomad.parsing.file_parser.DataTextParser', 'DataTextParser', ([], {}), '()\n', (42211, 42213), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((42451, 42462), 'nomad.parsing.file_parser.XMLParser', 'XMLParser', ([], {}), '()\n', (42460, 42462), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((42493, 42509), 'nomad.parsing.file_parser.DataTextParser', 'DataTextParser', ([], {}), '()\n', (42507, 42509), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((42547, 42572), 'nomad.parsing.file_parser.DataTextParser', 'DataTextParser', ([], {'dtype': 'str'}), '(dtype=str)\n', (42561, 42572), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((45150, 45193), 'os.path.basename', 'os.path.basename', (['self.info_parser.mainfile'], {}), '(self.info_parser.mainfile)\n', (45166, 45193), False, 'import os\n'), ((45423, 45471), 'os.path.join', 'os.path.join', (['self.info_parser.maindir', 'filename'], {}), '(self.info_parser.maindir, filename)\n', (45435, 45471), False, 'import os\n'), ((45484, 45508), 'os.path.isfile', 'os.path.isfile', (['filename'], {}), '(filename)\n', (45498, 45508), False, 'import os\n'), ((45560, 45607), 'os.path.join', 'os.path.join', (['self.info_parser.maindir', 'default'], {}), '(self.info_parser.maindir, default)\n', (45572, 45607), False, 'import os\n'), ((46437, 46480), 'os.path.basename', 'os.path.basename', (['self.info_parser.mainfile'], {}), '(self.info_parser.mainfile)\n', (46453, 46480), False, 'import os\n'), ((46711, 46759), 'os.path.join', 'os.path.join', (['self.info_parser.maindir', 'filepath'], {}), '(self.info_parser.maindir, filepath)\n', (46723, 46759), False, 'import os\n'), ((54545, 54579), 'numpy.transpose', 'np.transpose', (['data'], {'axes': '(2, 0, 1)'}), '(data, axes=(2, 0, 1))\n', (54557, 54579), True, 'import numpy as np\n'), ((2249, 2409), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""frequency_data"""', '"""frequency list:\\\\s*\\\\<\\\\s*#\\\\s*freqs\\\\s*weight\\\\s*>\\\\s*([\\\\d\\\\.Ee\\\\s\\\\-]+)"""'], {'str_operation': 'str_to_frequency', 'repeats': '(False)'}), "('frequency_data',\n 'frequency list:\\\\s*\\\\<\\\\s*#\\\\s*freqs\\\\s*weight\\\\s*>\\\\s*([\\\\d\\\\.Ee\\\\s\\\\-]+)'\n , str_operation=str_to_frequency, repeats=False)\n", (2257, 2409), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((2480, 2632), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""fermi_energy"""', '"""\\\\-\\\\s*G0W0.+\\\\-\\\\s*\\\\-+\\\\s*[\\\\s\\\\S]*?Fermi [Ee]nergy\\\\s*[:=](\\\\s*-?[\\\\d\\\\.]+)\\\\s"""'], {'unit': 'ureg.hartree', 'repeats': '(False)'}), "('fermi_energy',\n '\\\\-\\\\s*G0W0.+\\\\-\\\\s*\\\\-+\\\\s*[\\\\s\\\\S]*?Fermi [Ee]nergy\\\\s*[:=](\\\\s*-?[\\\\d\\\\.]+)\\\\s'\n , unit=ureg.hartree, repeats=False)\n", (2488, 2632), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((2701, 2861), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""direct_band_gap"""', '"""\\\\-\\\\s*G0W0\\\\s*\\\\-\\\\s*\\\\-+\\\\s*[\\\\s\\\\S]*?Direct BandGap\\\\s*\\\\((?P<__unit>\\\\w+)\\\\)\\\\s*\\\\:(\\\\s*[\\\\d\\\\.]+)\\\\s"""'], {'repeats': '(False)'}), "('direct_band_gap',\n '\\\\-\\\\s*G0W0\\\\s*\\\\-\\\\s*\\\\-+\\\\s*[\\\\s\\\\S]*?Direct BandGap\\\\s*\\\\((?P<__unit>\\\\w+)\\\\)\\\\s*\\\\:(\\\\s*[\\\\d\\\\.]+)\\\\s'\n , repeats=False)\n", (2709, 2861), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((2924, 3094), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""fundamental_band_gap"""', '"""\\\\-\\\\s*G0W0\\\\s*\\\\-\\\\s*\\\\-+\\\\s*[\\\\s\\\\S]*?Fundamental BandGap\\\\s*\\\\((?P<__unit>\\\\w+)\\\\)\\\\s*\\\\:(\\\\s*[\\\\d\\\\.]+)\\\\s"""'], {'repeats': '(False)'}), "('fundamental_band_gap',\n '\\\\-\\\\s*G0W0\\\\s*\\\\-\\\\s*\\\\-+\\\\s*[\\\\s\\\\S]*?Fundamental BandGap\\\\s*\\\\((?P<__unit>\\\\w+)\\\\)\\\\s*\\\\:(\\\\s*[\\\\d\\\\.]+)\\\\s'\n , repeats=False)\n", (2932, 3094), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((3157, 3319), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""optical_band_gap"""', '"""\\\\-\\\\s*G0W0\\\\s*\\\\-\\\\s*\\\\-+\\\\s*[\\\\s\\\\S]*?Optical BandGap\\\\s*\\\\((?P<__unit>\\\\w+)\\\\)\\\\s*\\\\:(\\\\s*[\\\\d\\\\.]+)\\\\s"""'], {'repeats': '(False)'}), "('optical_band_gap',\n '\\\\-\\\\s*G0W0\\\\s*\\\\-\\\\s*\\\\-+\\\\s*[\\\\s\\\\S]*?Optical BandGap\\\\s*\\\\((?P<__unit>\\\\w+)\\\\)\\\\s*\\\\:(\\\\s*[\\\\d\\\\.]+)\\\\s'\n , repeats=False)\n", (3165, 3319), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((3898, 4073), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""kpoints_eigenvalues"""', '"""\\\\s*k\\\\-point \\\\#\\\\s*\\\\d+:\\\\s*([\\\\d\\\\s\\\\.\\\\-]+)([ \\\\w\\\\(\\\\)]+\\\\n)([\\\\s\\\\d\\\\.\\\\-Ee]+)"""'], {'str_operation': 'str_to_eigenvalue', 'repeats': '(True)'}), "('kpoints_eigenvalues',\n '\\\\s*k\\\\-point \\\\#\\\\s*\\\\d+:\\\\s*([\\\\d\\\\s\\\\.\\\\-]+)([ \\\\w\\\\(\\\\)]+\\\\n)([\\\\s\\\\d\\\\.\\\\-Ee]+)'\n , str_operation=str_to_eigenvalue, repeats=True)\n", (3906, 4073), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((4792, 4815), 'numpy.transpose', 'np.transpose', (['self.data'], {}), '(self.data)\n', (4804, 4815), True, 'import numpy as np\n'), ((4907, 5013), 'numpy.reshape', 'np.reshape', (['bands', '(self.number_of_spin_channels, self.number_of_band_segment_eigenvalues,\n n_kpoints)'], {}), '(bands, (self.number_of_spin_channels, self.\n number_of_band_segment_eigenvalues, n_kpoints))\n', (4917, 5013), True, 'import numpy as np\n'), ((5617, 5640), 'numpy.transpose', 'np.transpose', (['self.data'], {}), '(self.data)\n', (5629, 5640), True, 'import numpy as np\n'), ((6073, 6096), 'numpy.transpose', 'np.transpose', (['self.data'], {}), '(self.data)\n', (6085, 6096), True, 'import numpy as np\n'), ((6985, 7008), 'numpy.transpose', 'np.transpose', (['self.data'], {}), '(self.data)\n', (6997, 7008), True, 'import numpy as np\n'), ((7610, 7633), 'numpy.transpose', 'np.transpose', (['self.data'], {}), '(self.data)\n', (7622, 7633), True, 'import numpy as np\n'), ((7745, 7851), 'numpy.reshape', 'np.reshape', (['bands', '(self.number_of_spin_channels, self.number_of_band_segment_eigenvalues,\n n_kpoints)'], {}), '(bands, (self.number_of_spin_channels, self.\n number_of_band_segment_eigenvalues, n_kpoints))\n', (7755, 7851), True, 'import numpy as np\n'), ((10547, 10585), 'numpy.array', 'np.array', (['self._distances'], {'dtype': 'float'}), '(self._distances, dtype=float)\n', (10555, 10585), True, 'import numpy as np\n'), ((18088, 18148), 'numpy.zeros', 'np.zeros', (['(self.number_of_spin_channels, self.number_of_dos)'], {}), '((self.number_of_spin_channels, self.number_of_dos))\n', (18096, 18148), True, 'import numpy as np\n'), ((19927, 20026), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""fermi_energy"""', '"""Fermi Energy:\\\\s*([\\\\d\\\\.]+)\\\\s*"""'], {'unit': 'ureg.hartree', 'repeats': '(False)'}), "('fermi_energy', 'Fermi Energy:\\\\s*([\\\\d\\\\.]+)\\\\s*', unit=ureg.\n hartree, repeats=False)\n", (19935, 20026), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((20446, 20574), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""band_parameters"""', '"""BANDGRID_3D_BANDS\\\\s*([\\\\d\\\\.\\\\-Ee\\\\s]+)"""'], {'str_operation': 'str_to_band_parameters', 'repeats': '(False)'}), "('band_parameters', 'BANDGRID_3D_BANDS\\\\s*([\\\\d\\\\.\\\\-Ee\\\\s]+)',\n str_operation=str_to_band_parameters, repeats=False)\n", (20454, 20574), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((20647, 20759), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""fermi_surface"""', '"""BAND:\\\\s*\\\\d+\\\\s*([\\\\d\\\\-\\\\+\\\\.Ee\\\\s]+)\\\\n *E*"""'], {'unit': 'ureg.hartree', 'repeats': '(True)'}), "('fermi_surface', 'BAND:\\\\s*\\\\d+\\\\s*([\\\\d\\\\-\\\\+\\\\.Ee\\\\s]+)\\\\n *E*',\n unit=ureg.hartree, repeats=True)\n", (20655, 20759), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((20990, 21076), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""k_points"""', '"""\\\\s*\\\\d+\\\\s*([\\\\d\\\\.Ee\\\\- ]+):\\\\s*k\\\\-point"""'], {'repeats': '(True)'}), "('k_points', '\\\\s*\\\\d+\\\\s*([\\\\d\\\\.Ee\\\\- ]+):\\\\s*k\\\\-point', repeats\n =True)\n", (20998, 21076), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((21277, 21294), 'numpy.transpose', 'np.transpose', (['val'], {}), '(val)\n', (21289, 21294), True, 'import numpy as np\n'), ((21652, 21829), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""eigenvalues_occupancies"""', '"""\\\\(state\\\\, eigenvalue and occupancy below\\\\)\\\\s*([\\\\d\\\\.Ee\\\\-\\\\s]+?(?:\\\\n *\\\\n))"""'], {'str_operation': 'str_to_eigenvalues', 'repeats': '(True)'}), "('eigenvalues_occupancies',\n '\\\\(state\\\\, eigenvalue and occupancy below\\\\)\\\\s*([\\\\d\\\\.Ee\\\\-\\\\s]+?(?:\\\\n *\\\\n))'\n , str_operation=str_to_eigenvalues, repeats=True)\n", (21660, 21829), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((22403, 22429), 'numpy.array', 'np.array', (['val'], {'dtype': 'float'}), '(val, dtype=float)\n', (22411, 22429), True, 'import numpy as np\n'), ((24191, 24319), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""program_version"""', '"""\\\\s*EXCITING\\\\s*([\\\\w\\\\-\\\\(\\\\)\\\\. ]+)\\\\s*started"""'], {'repeats': '(False)', 'dtype': 'str', 'flatten': '(False)'}), "('program_version',\n '\\\\s*EXCITING\\\\s*([\\\\w\\\\-\\\\(\\\\)\\\\. ]+)\\\\s*started', repeats=False,\n dtype=str, flatten=False)\n", (24199, 24319), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((24382, 24561), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""lattice_vectors"""', '"""Lattice vectors\\\\s*[\\\\(cartesian\\\\)]*\\\\s*:\\\\s*([\\\\-0-9\\\\.\\\\s]+)\\\\n"""'], {'str_operation': 'str_to_array', 'unit': 'ureg.bohr', 'repeats': '(False)', 'convert': '(False)'}), "('lattice_vectors',\n 'Lattice vectors\\\\s*[\\\\(cartesian\\\\)]*\\\\s*:\\\\s*([\\\\-0-9\\\\.\\\\s]+)\\\\n',\n str_operation=str_to_array, unit=ureg.bohr, repeats=False, convert=False)\n", (24390, 24561), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((24608, 24818), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""lattice_vectors_reciprocal"""', '"""Reciprocal lattice vectors\\\\s*[\\\\(cartesian\\\\)]*\\\\s*:\\\\s*([\\\\-0-9\\\\.\\\\s]+)\\\\n"""'], {'str_operation': 'str_to_array', 'unit': '(1 / ureg.bohr)', 'repeats': '(False)', 'convert': '(False)'}), "('lattice_vectors_reciprocal',\n 'Reciprocal lattice vectors\\\\s*[\\\\(cartesian\\\\)]*\\\\s*:\\\\s*([\\\\-0-9\\\\.\\\\s]+)\\\\n'\n , str_operation=str_to_array, unit=1 / ureg.bohr, repeats=False,\n convert=False)\n", (24616, 24818), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((29117, 29216), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""potential_mixing"""', '"""Using ([\\\\w ]+) potential mixing"""'], {'repeats': '(False)', 'flatten': '(False)'}), "('potential_mixing', 'Using ([\\\\w ]+) potential mixing', repeats=\n False, flatten=False)\n", (29125, 29216), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((30184, 30304), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""energy_total"""', '"""[Tt]*otal energy\\\\s*:\\\\s*([\\\\-\\\\d\\\\.Ee]+)"""'], {'repeats': '(False)', 'dtype': 'float', 'unit': 'ureg.hartree'}), "('energy_total', '[Tt]*otal energy\\\\s*:\\\\s*([\\\\-\\\\d\\\\.Ee]+)',\n repeats=False, dtype=float, unit=ureg.hartree)\n", (30192, 30304), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((30343, 30509), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""energy_contributions"""', '"""(?:Energies|_)([\\\\+\\\\-\\\\s\\\\w\\\\.\\\\:]+?)\\\\n *(?:DOS|Density)"""'], {'str_operation': 'str_to_energy_dict', 'repeats': '(False)', 'convert': '(False)'}), "('energy_contributions',\n '(?:Energies|_)([\\\\+\\\\-\\\\s\\\\w\\\\.\\\\:]+?)\\\\n *(?:DOS|Density)',\n str_operation=str_to_energy_dict, repeats=False, convert=False)\n", (30351, 30509), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((30542, 30706), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""x_exciting_dos_fermi"""', '"""DOS at Fermi energy \\\\(states\\\\/Ha\\\\/cell\\\\)\\\\s*:\\\\s*([\\\\-\\\\d\\\\.Ee]+)"""'], {'repeats': '(False)', 'dtype': 'float', 'unit': '(1 / ureg.hartree)'}), "('x_exciting_dos_fermi',\n 'DOS at Fermi energy \\\\(states\\\\/Ha\\\\/cell\\\\)\\\\s*:\\\\s*([\\\\-\\\\d\\\\.Ee]+)',\n repeats=False, dtype=float, unit=1 / ureg.hartree)\n", (30550, 30706), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((30753, 30951), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""charge_contributions"""', '"""(?:Charges|Electron charges\\\\s*\\\\:*\\\\s*)([\\\\-\\\\s\\\\w\\\\.\\\\:\\\\(\\\\)]+?)\\\\n *[A-Z\\\\+]"""'], {'str_operation': 'str_to_atom_properties_dict', 'repeats': '(False)', 'convert': '(False)'}), "('charge_contributions',\n '(?:Charges|Electron charges\\\\s*\\\\:*\\\\s*)([\\\\-\\\\s\\\\w\\\\.\\\\:\\\\(\\\\)]+?)\\\\n *[A-Z\\\\+]'\n , str_operation=str_to_atom_properties_dict, repeats=False, convert=False)\n", (30761, 30951), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((30994, 31174), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""moment_contributions"""', '"""(?:Moments\\\\s*\\\\:*\\\\s*)([\\\\-\\\\s\\\\w\\\\.\\\\:\\\\(\\\\)]+?)\\\\n *[A-Z\\\\+]"""'], {'str_operation': 'str_to_atom_properties_dict', 'repeats': '(False)', 'convert': '(False)'}), "('moment_contributions',\n '(?:Moments\\\\s*\\\\:*\\\\s*)([\\\\-\\\\s\\\\w\\\\.\\\\:\\\\(\\\\)]+?)\\\\n *[A-Z\\\\+]',\n str_operation=str_to_atom_properties_dict, repeats=False, convert=False)\n", (31002, 31174), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((33552, 33766), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""forces"""', '"""Total atomic forces including IBS \\\\(\\\\w+\\\\)\\\\s*\\\\:(\\\\s*atom[\\\\-\\\\s\\\\w\\\\.\\\\:]*?)\\\\n *Atomic"""'], {'repeats': '(False)', 'str_operation': 'str_to_array', 'dtype': 'float', 'unit': '(ureg.hartree / ureg.bohr)'}), "('forces',\n 'Total atomic forces including IBS \\\\(\\\\w+\\\\)\\\\s*\\\\:(\\\\s*atom[\\\\-\\\\s\\\\w\\\\.\\\\:]*?)\\\\n *Atomic'\n , repeats=False, str_operation=str_to_array, dtype=float, unit=ureg.\n hartree / ureg.bohr)\n", (33560, 33766), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((34683, 34897), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""forces"""', '"""Total atomic forces including IBS \\\\(\\\\w+\\\\)\\\\s*\\\\:(\\\\s*atom[\\\\-\\\\s\\\\w\\\\.\\\\:]*?)\\\\n *Time"""'], {'repeats': '(False)', 'str_operation': 'str_to_array', 'convert': '(False)', 'unit': '(ureg.hartree / ureg.bohr)'}), "('forces',\n 'Total atomic forces including IBS \\\\(\\\\w+\\\\)\\\\s*\\\\:(\\\\s*atom[\\\\-\\\\s\\\\w\\\\.\\\\:]*?)\\\\n *Time'\n , repeats=False, str_operation=str_to_array, convert=False, unit=ureg.\n hartree / ureg.bohr)\n", (34691, 34897), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((34935, 35008), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""step"""', '"""Optimization step\\\\s*(\\\\d+)"""'], {'repeats': '(False)', 'dtype': 'int'}), "('step', 'Optimization step\\\\s*(\\\\d+)', repeats=False, dtype=int)\n", (34943, 35008), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((35038, 35107), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""method"""', '"""method\\\\s*=\\\\s*(\\\\w+)"""'], {'repeats': '(False)', 'dtype': 'str'}), "('method', 'method\\\\s*=\\\\s*(\\\\w+)', repeats=False, dtype=str)\n", (35046, 35107), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((35136, 35254), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""n_scf_iterations"""', '"""Number of (?:total)* scf iterations\\\\s*\\\\:\\\\s*(\\\\d+)"""'], {'repeats': '(False)', 'dtype': 'int'}), "('n_scf_iterations',\n 'Number of (?:total)* scf iterations\\\\s*\\\\:\\\\s*(\\\\d+)', repeats=False,\n dtype=int)\n", (35144, 35254), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((35290, 35514), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""force_convergence"""', '"""Maximum force magnitude\\\\s*\\\\(target\\\\)\\\\s*\\\\:(\\\\s*[\\\\(\\\\)\\\\d\\\\.\\\\-\\\\+Ee ]+)"""'], {'str_operation': 'str_to_quantity_tolerances', 'unit': '(ureg.hartree / ureg.bohr)', 'repeats': '(False)', 'dtype': 'float'}), "('force_convergence',\n 'Maximum force magnitude\\\\s*\\\\(target\\\\)\\\\s*\\\\:(\\\\s*[\\\\(\\\\)\\\\d\\\\.\\\\-\\\\+Ee ]+)'\n , str_operation=str_to_quantity_tolerances, unit=ureg.hartree / ureg.\n bohr, repeats=False, dtype=float)\n", (35298, 35514), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((35568, 35716), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""energy_total"""', '"""Total energy at this optimization step\\\\s*\\\\:\\\\s*([\\\\-\\\\d\\\\.Ee]+)"""'], {'unit': 'ureg.hartree', 'repeats': '(False)', 'dtype': 'float'}), "('energy_total',\n 'Total energy at this optimization step\\\\s*\\\\:\\\\s*([\\\\-\\\\d\\\\.Ee]+)',\n unit=ureg.hartree, repeats=False, dtype=float)\n", (35576, 35716), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((35750, 35894), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""time"""', '"""Time spent in this optimization step\\\\s*\\\\:\\\\s*([\\\\-\\\\d\\\\.Ee]+)\\\\s*seconds"""'], {'unit': 'ureg.s', 'repeats': '(False)', 'dtype': 'float'}), "('time',\n 'Time spent in this optimization step\\\\s*\\\\:\\\\s*([\\\\-\\\\d\\\\.Ee]+)\\\\s*seconds'\n , unit=ureg.s, repeats=False, dtype=float)\n", (35758, 35894), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((39429, 39462), 'numpy.dot', 'np.dot', (['positions', 'cell.magnitude'], {}), '(positions, cell.magnitude)\n', (39435, 39462), True, 'import numpy as np\n'), ((45623, 45647), 'os.path.isfile', 'os.path.isfile', (['filename'], {}), '(filename)\n', (45637, 45647), False, 'import os\n'), ((46772, 46796), 'os.path.isfile', 'os.path.isfile', (['filepath'], {}), '(filepath)\n', (46786, 46796), False, 'import os\n'), ((46801, 46829), 'os.access', 'os.access', (['filepath', 'os.F_OK'], {}), '(filepath, os.F_OK)\n', (46810, 46829), False, 'import os\n'), ((51601, 51631), 'numpy.product', 'np.product', (['band_parameters[1]'], {}), '(band_parameters[1])\n', (51611, 51631), True, 'import numpy as np\n'), ((66982, 67029), 'numpy.reshape', 'np.reshape', (['data[3]', '(n_components, n_excitons)'], {}), '(data[3], (n_components, n_excitons))\n', (66992, 67029), True, 'import numpy as np\n'), ((67108, 67155), 'numpy.reshape', 'np.reshape', (['data[4]', '(n_components, n_excitons)'], {}), '(data[4], (n_components, n_excitons))\n', (67118, 67155), True, 'import numpy as np\n'), ((67234, 67281), 'numpy.reshape', 'np.reshape', (['data[5]', '(n_components, n_excitons)'], {}), '(data[5], (n_components, n_excitons))\n', (67244, 67281), True, 'import numpy as np\n'), ((67743, 67789), 'numpy.reshape', 'np.reshape', (['data[1]', '(n_components, n_epsilon)'], {}), '(data[1], (n_components, n_epsilon))\n', (67753, 67789), True, 'import numpy as np\n'), ((67858, 67904), 'numpy.reshape', 'np.reshape', (['data[2]', '(n_components, n_epsilon)'], {}), '(data[2], (n_components, n_epsilon))\n', (67868, 67904), True, 'import numpy as np\n'), ((68282, 68326), 'numpy.reshape', 'np.reshape', (['data[1]', '(n_components, n_sigma)'], {}), '(data[1], (n_components, n_sigma))\n', (68292, 68326), True, 'import numpy as np\n'), ((68393, 68437), 'numpy.reshape', 'np.reshape', (['data[2]', '(n_components, n_sigma)'], {}), '(data[2], (n_components, n_sigma))\n', (68403, 68437), True, 'import numpy as np\n'), ((68807, 68850), 'numpy.reshape', 'np.reshape', (['data[1]', '(n_components, n_loss)'], {}), '(data[1], (n_components, n_loss))\n', (68817, 68850), True, 'import numpy as np\n'), ((78099, 78125), 'nomad.datamodel.metainfo.simulation.calculation.Energy', 'Energy', ([], {'fermi': 'fermi_energy'}), '(fermi=fermi_energy)\n', (78105, 78125), False, 'from nomad.datamodel.metainfo.simulation.calculation import Calculation, Dos, DosValues, BandStructure, BandEnergies, Energy, EnergyEntry, Charges, Forces, ForcesEntry, ScfIteration, BandGap\n'), ((80248, 80274), 'nomad.datamodel.metainfo.simulation.method.BasisSet', 'BasisSet', ([], {'type': '"""(L)APW+lo"""'}), "(type='(L)APW+lo')\n", (80256, 80274), False, 'from nomad.datamodel.metainfo.simulation.method import Method, DFT, Electronic, Smearing, XCFunctional, Functional, GW as GWMethod, Scf, BasisSet\n'), ((88548, 88573), 'nomad.datamodel.metainfo.simulation.calculation.ForcesEntry', 'ForcesEntry', ([], {'value': 'forces'}), '(value=forces)\n', (88559, 88573), False, 'from nomad.datamodel.metainfo.simulation.calculation import Calculation, Dos, DosValues, BandStructure, BandEnergies, Energy, EnergyEntry, Charges, Forces, ForcesEntry, ScfIteration, BandGap\n'), ((8447, 8470), 'numpy.transpose', 'np.transpose', (['self.data'], {}), '(self.data)\n', (8459, 8470), True, 'import numpy as np\n'), ((9409, 9432), 'numpy.transpose', 'np.transpose', (['self.data'], {}), '(self.data)\n', (9421, 9432), True, 'import numpy as np\n'), ((18538, 18644), 'numpy.zeros', 'np.zeros', (['(self.number_of_lm, self.number_of_spin_channels, self.number_of_atoms,\n self.number_of_dos)'], {}), '((self.number_of_lm, self.number_of_spin_channels, self.\n number_of_atoms, self.number_of_dos))\n', (18546, 18644), True, 'import numpy as np\n'), ((27328, 27423), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['name', "('%s\\\\s*:\\\\s*([\\\\s\\\\S]*?)\\\\n' % key_unit[0])"], {'unit': 'key_unit[1]', 'repeats': '(False)'}), "(name, '%s\\\\s*:\\\\s*([\\\\s\\\\S]*?)\\\\n' % key_unit[0], unit=key_unit[1],\n repeats=False)\n", (27336, 27423), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((27579, 27674), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['name', "('%s\\\\s*:\\\\s*([\\\\s\\\\S]*?)\\\\n' % key_unit[0])"], {'unit': 'key_unit[1]', 'repeats': '(False)'}), "(name, '%s\\\\s*:\\\\s*([\\\\s\\\\S]*?)\\\\n' % key_unit[0], unit=key_unit[1],\n repeats=False)\n", (27587, 27674), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((31491, 31591), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['name', "('%s\\\\s*\\\\:*\\\\s*([\\\\-\\\\d\\\\.Ee]+)' % key_unit[0])"], {'repeats': '(False)', 'unit': 'key_unit[1]'}), "(name, '%s\\\\s*\\\\:*\\\\s*([\\\\-\\\\d\\\\.Ee]+)' % key_unit[0], repeats=\n False, unit=key_unit[1])\n", (31499, 31591), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((32298, 32449), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['name', "('%s\\\\s*\\\\:*\\\\s*([\\\\(\\\\)\\\\d\\\\.\\\\-\\\\+Ee ]+)' % key_unit[0])"], {'str_operation': 'str_to_quantity_tolerances', 'unit': 'key_unit[1]', 'repeats': '(False)'}), "(name, '%s\\\\s*\\\\:*\\\\s*([\\\\(\\\\)\\\\d\\\\.\\\\-\\\\+Ee ]+)' % key_unit[0],\n str_operation=str_to_quantity_tolerances, unit=key_unit[1], repeats=False)\n", (32306, 32449), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((46041, 46082), 'os.path.join', 'os.path.join', (['self.info_parser.maindir', 'f'], {}), '(self.info_parser.maindir, f)\n', (46053, 46082), False, 'import os\n'), ((46196, 46217), 'os.access', 'os.access', (['f', 'os.F_OK'], {}), '(f, os.F_OK)\n', (46205, 46217), False, 'import os\n'), ((52371, 52405), 'numpy.array', 'np.array', (['[d[0][:3] for d in data]'], {}), '([d[0][:3] for d in data])\n', (52379, 52405), True, 'import numpy as np\n'), ((66416, 66431), 'numpy.vstack', 'np.vstack', (['data'], {}), '(data)\n', (66425, 66431), True, 'import numpy as np\n'), ((66689, 66736), 'numpy.reshape', 'np.reshape', (['data[1]', '(n_components, n_excitons)'], {}), '(data[1], (n_components, n_excitons))\n', (66699, 66736), True, 'import numpy as np\n'), ((66834, 66881), 'numpy.reshape', 'np.reshape', (['data[2]', '(n_components, n_excitons)'], {}), '(data[2], (n_components, n_excitons))\n', (66844, 66881), True, 'import numpy as np\n'), ((67411, 67426), 'numpy.vstack', 'np.vstack', (['data'], {}), '(data)\n', (67420, 67426), True, 'import numpy as np\n'), ((67613, 67659), 'numpy.reshape', 'np.reshape', (['data[0]', '(n_components, n_epsilon)'], {}), '(data[0], (n_components, n_epsilon))\n', (67623, 67659), True, 'import numpy as np\n'), ((68032, 68047), 'numpy.vstack', 'np.vstack', (['data'], {}), '(data)\n', (68041, 68047), True, 'import numpy as np\n'), ((68156, 68200), 'numpy.reshape', 'np.reshape', (['data[0]', '(n_components, n_sigma)'], {}), '(data[0], (n_components, n_sigma))\n', (68166, 68200), True, 'import numpy as np\n'), ((68564, 68579), 'numpy.vstack', 'np.vstack', (['data'], {}), '(data)\n', (68573, 68579), True, 'import numpy as np\n'), ((68686, 68729), 'numpy.reshape', 'np.reshape', (['data[0]', '(n_components, n_loss)'], {}), '(data[0], (n_components, n_loss))\n', (68696, 68729), True, 'import numpy as np\n'), ((71830, 71866), 'numpy.transpose', 'np.transpose', (['data_q'], {'axes': '(2, 0, 1)'}), '(data_q, axes=(2, 0, 1))\n', (71842, 71866), True, 'import numpy as np\n'), ((81826, 81864), 'nomad.datamodel.metainfo.simulation.method.Scf', 'Scf', ([], {'threshold_energy_change': 'threshold'}), '(threshold_energy_change=threshold)\n', (81829, 81864), False, 'from nomad.datamodel.metainfo.simulation.method import Method, DFT, Electronic, Smearing, XCFunctional, Functional, GW as GWMethod, Scf, BasisSet\n'), ((84341, 84372), 'nomad.datamodel.metainfo.simulation.calculation.EnergyEntry', 'EnergyEntry', ([], {'value': 'energy_total'}), '(value=energy_total)\n', (84352, 84372), False, 'from nomad.datamodel.metainfo.simulation.calculation import Calculation, Dos, DosValues, BandStructure, BandEnergies, Energy, EnergyEntry, Charges, Forces, ForcesEntry, ScfIteration, BandGap\n'), ((89850, 89888), 'numpy.array', 'np.array', (['lattice_vectors'], {'dtype': 'float'}), '(lattice_vectors, dtype=float)\n', (89858, 89888), True, 'import numpy as np\n'), ((92984, 93029), 'numpy.transpose', 'np.transpose', (['self.data_clathrate_parser.data'], {}), '(self.data_clathrate_parser.data)\n', (92996, 93029), True, 'import numpy as np\n'), ((2033, 2060), 'numpy.array', 'np.array', (['val[0]'], {'dtype': 'int'}), '(val[0], dtype=int)\n', (2041, 2060), True, 'import numpy as np\n'), ((7658, 7689), 'numpy.where', 'np.where', (['(data[0] == data[0][0])'], {}), '(data[0] == data[0][0])\n', (7666, 7689), True, 'import numpy as np\n'), ((8499, 8524), 'numpy.where', 'np.where', (['(dist == dist[0])'], {}), '(dist == dist[0])\n', (8507, 8524), True, 'import numpy as np\n'), ((9474, 9499), 'numpy.where', 'np.where', (['(data == data[0])'], {}), '(data == data[0])\n', (9482, 9499), True, 'import numpy as np\n'), ((30084, 30132), 'nomad.parsing.file_parser.TextParser', 'TextParser', ([], {'quantities': 'initialization_quantities'}), '(quantities=initialization_quantities)\n', (30094, 30132), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((32650, 32687), 'nomad.parsing.file_parser.TextParser', 'TextParser', ([], {'quantities': 'scf_quantities'}), '(quantities=scf_quantities)\n', (32660, 32687), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((32926, 32963), 'nomad.parsing.file_parser.TextParser', 'TextParser', ([], {'quantities': 'scf_quantities'}), '(quantities=scf_quantities)\n', (32936, 32963), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((33991, 34031), 'nomad.parsing.file_parser.TextParser', 'TextParser', ([], {'quantities': 'module_quantities'}), '(quantities=module_quantities)\n', (34001, 34031), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((37777, 37817), 'nomad.parsing.file_parser.TextParser', 'TextParser', ([], {'quantities': 'module_quantities'}), '(quantities=module_quantities)\n', (37787, 37817), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((45821, 45857), 'os.listdir', 'os.listdir', (['self.info_parser.maindir'], {}), '(self.info_parser.maindir)\n', (45831, 45857), False, 'import os\n'), ((82660, 82681), 'nomad.datamodel.metainfo.simulation.method.Functional', 'Functional', ([], {'name': 'name'}), '(name=name)\n', (82670, 82681), False, 'from nomad.datamodel.metainfo.simulation.method import Method, DFT, Electronic, Smearing, XCFunctional, Functional, GW as GWMethod, Scf, BasisSet\n'), ((89606, 89615), 'numpy.eye', 'np.eye', (['(3)'], {}), '(3)\n', (89612, 89615), True, 'import numpy as np\n'), ((90010, 90044), 'numpy.dot', 'np.dot', (['positions', 'lattice_vectors'], {}), '(positions, lattice_vectors)\n', (90016, 90044), True, 'import numpy as np\n'), ((93132, 93163), 'numpy.array', 'np.array', (['data[:3]'], {'dtype': 'float'}), '(data[:3], dtype=float)\n', (93140, 93163), True, 'import numpy as np\n'), ((5878, 5901), 'numpy.transpose', 'np.transpose', (['data[2:5]'], {}), '(data[2:5])\n', (5890, 5901), True, 'import numpy as np\n'), ((6308, 6331), 'numpy.transpose', 'np.transpose', (['self.data'], {}), '(self.data)\n', (6320, 6331), True, 'import numpy as np\n'), ((8732, 8755), 'numpy.transpose', 'np.transpose', (['self.data'], {}), '(self.data)\n', (8744, 8755), True, 'import numpy as np\n'), ((13936, 13999), 'numpy.linspace', 'np.linspace', (['start', 'end', 'self.number_of_k_points_per_segment[i]'], {}), '(start, end, self.number_of_k_points_per_segment[i])\n', (13947, 13999), True, 'import numpy as np\n'), ((82768, 82789), 'nomad.datamodel.metainfo.simulation.method.Functional', 'Functional', ([], {'name': 'name'}), '(name=name)\n', (82778, 82789), False, 'from nomad.datamodel.metainfo.simulation.method import Method, DFT, Electronic, Smearing, XCFunctional, Functional, GW as GWMethod, Scf, BasisSet\n'), ((85175, 85197), 'nomad.datamodel.metainfo.simulation.calculation.EnergyEntry', 'EnergyEntry', ([], {'value': 'val'}), '(value=val)\n', (85186, 85197), False, 'from nomad.datamodel.metainfo.simulation.calculation import Calculation, Dos, DosValues, BandStructure, BandEnergies, Energy, EnergyEntry, Charges, Forces, ForcesEntry, ScfIteration, BandGap\n'), ((5240, 5258), 'numpy.transpose', 'np.transpose', (['band'], {}), '(band)\n', (5252, 5258), True, 'import numpy as np\n'), ((8078, 8096), 'numpy.transpose', 'np.transpose', (['band'], {}), '(band)\n', (8090, 8096), True, 'import numpy as np\n'), ((82871, 82892), 'nomad.datamodel.metainfo.simulation.method.Functional', 'Functional', ([], {'name': 'name'}), '(name=name)\n', (82881, 82892), False, 'from nomad.datamodel.metainfo.simulation.method import Method, DFT, Electronic, Smearing, XCFunctional, Functional, GW as GWMethod, Scf, BasisSet\n'), ((82967, 82988), 'nomad.datamodel.metainfo.simulation.method.Functional', 'Functional', ([], {'name': 'name'}), '(name=name)\n', (82977, 82988), False, 'from nomad.datamodel.metainfo.simulation.method import Method, DFT, Electronic, Smearing, XCFunctional, Functional, GW as GWMethod, Scf, BasisSet\n'), ((13237, 13257), 'numpy.transpose', 'np.transpose', (['energy'], {}), '(energy)\n', (13249, 13257), True, 'import numpy as np\n'), ((23024, 23051), 're.search', 're.search', (['re_symbol', 'v[-1]'], {}), '(re_symbol, v[-1])\n', (23033, 23051), False, 'import re\n'), ((27946, 28001), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""number"""', '"""Species : *(\\\\d+)"""'], {'dtype': 'np.int32'}), "('number', 'Species : *(\\\\d+)', dtype=np.int32)\n", (27954, 28001), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((28019, 28053), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""symbol"""', '"""\\\\((\\\\w+)\\\\)"""'], {}), "('symbol', '\\\\((\\\\w+)\\\\)')\n", (28027, 28053), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((28069, 28120), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""file"""', '"""parameters loaded from *: *(.+)"""'], {}), "('file', 'parameters loaded from *: *(.+)')\n", (28077, 28120), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((28139, 28172), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""name"""', '"""name *: *(.+)"""'], {}), "('name', 'name *: *(.+)')\n", (28147, 28172), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((28191, 28305), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""nuclear_charge"""', 'f"""nuclear charge *: *({re_float})"""'], {'dtype': 'np.float64', 'unit': 'ureg.elementary_charge'}), "('nuclear_charge', f'nuclear charge *: *({re_float})', dtype=np.\n float64, unit=ureg.elementary_charge)\n", (28199, 28305), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((28319, 28439), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""electronic_charge"""', 'f"""electronic charge *: *({re_float})"""'], {'dtype': 'np.float64', 'unit': 'ureg.elementary_charge'}), "('electronic_charge', f'electronic charge *: *({re_float})', dtype=\n np.float64, unit=ureg.elementary_charge)\n", (28327, 28439), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((28453, 28556), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""atomic_mass"""', 'f"""atomic mass *: *({re_float})"""'], {'dtype': 'np.float64', 'unit': 'ureg.electron_mass'}), "('atomic_mass', f'atomic mass *: *({re_float})', dtype=np.float64,\n unit=ureg.electron_mass)\n", (28461, 28556), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((28571, 28678), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""muffin_tin_radius"""', 'f"""muffin-tin radius *: *({re_float})"""'], {'dtype': 'np.float64', 'unit': 'ureg.bohr'}), "('muffin_tin_radius', f'muffin-tin radius *: *({re_float})', dtype=\n np.float64, unit=ureg.bohr)\n", (28579, 28678), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((28692, 28786), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""radial_points"""', 'f"""radial points in muffin-tin *: *({re_float})"""'], {'dtype': 'np.int32'}), "('radial_points', f'radial points in muffin-tin *: *({re_float})',\n dtype=np.int32)\n", (28700, 28786), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((28801, 28876), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""positions_format"""', '"""atomic positions \\\\((.+?)\\\\)"""'], {'flatten': '(False)'}), "('positions_format', 'atomic positions \\\\((.+?)\\\\)', flatten=False)\n", (28809, 28876), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((29425, 29481), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""type"""', '"""Exchange-correlation type +: +(\\\\S+)"""'], {}), "('type', 'Exchange-correlation type +: +(\\\\S+)')\n", (29433, 29481), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((33166, 33232), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""positions_format"""', '"""Atomic positions\\\\s*\\\\(([a-z]+)\\\\)"""'], {}), "('positions_format', 'Atomic positions\\\\s*\\\\(([a-z]+)\\\\)')\n", (33174, 33232), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((33277, 33347), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""symbols"""', '"""atom\\\\s*\\\\d+\\\\s*(\\\\w+)"""'], {'repeats': '(True)', 'dtype': 'str'}), "('symbols', 'atom\\\\s*\\\\d+\\\\s*(\\\\w+)', repeats=True, dtype=str)\n", (33285, 33347), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((33391, 33506), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""positions"""', '"""\\\\s*:\\\\s*([\\\\d\\\\.\\\\-]+\\\\s*[\\\\d\\\\.\\\\-]+\\\\s*[\\\\d\\\\.\\\\-]+)"""'], {'repeats': '(True)', 'dtype': 'float'}), "('positions',\n '\\\\s*:\\\\s*([\\\\d\\\\.\\\\-]+\\\\s*[\\\\d\\\\.\\\\-]+\\\\s*[\\\\d\\\\.\\\\-]+)', repeats=True,\n dtype=float)\n", (33399, 33506), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((34284, 34363), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""positions_format"""', '"""Atomic positions at this step\\\\s*\\\\(([a-z]+)\\\\)"""'], {}), "('positions_format', 'Atomic positions at this step\\\\s*\\\\(([a-z]+)\\\\)')\n", (34292, 34363), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((34408, 34478), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""symbols"""', '"""atom\\\\s*\\\\d+\\\\s*(\\\\w+)"""'], {'repeats': '(True)', 'dtype': 'str'}), "('symbols', 'atom\\\\s*\\\\d+\\\\s*(\\\\w+)', repeats=True, dtype=str)\n", (34416, 34478), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((34522, 34637), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""positions"""', '"""\\\\s*:\\\\s*([\\\\d\\\\.\\\\-]+\\\\s*[\\\\d\\\\.\\\\-]+\\\\s*[\\\\d\\\\.\\\\-]+)"""'], {'repeats': '(True)', 'dtype': 'float'}), "('positions',\n '\\\\s*:\\\\s*([\\\\d\\\\.\\\\-]+\\\\s*[\\\\d\\\\.\\\\-]+\\\\s*[\\\\d\\\\.\\\\-]+)', repeats=True,\n dtype=float)\n", (34530, 34637), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((37333, 37547), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""forces"""', '"""Total atomic forces including IBS \\\\(\\\\w+\\\\)\\\\s*\\\\:(\\\\s*atom[\\\\-\\\\s\\\\w\\\\.\\\\:]*?)\\\\n *Atomic"""'], {'repeats': '(False)', 'str_operation': 'str_to_array', 'dtype': 'float', 'unit': '(ureg.hartree / ureg.bohr)'}), "('forces',\n 'Total atomic forces including IBS \\\\(\\\\w+\\\\)\\\\s*\\\\:(\\\\s*atom[\\\\-\\\\s\\\\w\\\\.\\\\:]*?)\\\\n *Atomic'\n , repeats=False, str_operation=str_to_array, dtype=float, unit=ureg.\n hartree / ureg.bohr)\n", (37341, 37547), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((50587, 50602), 'numpy.size', 'np.size', (['v[key]'], {}), '(v[key])\n', (50594, 50602), True, 'import numpy as np\n'), ((29049, 29069), 'numpy.dtype', 'np.dtype', (['np.float64'], {}), '(np.float64)\n', (29057, 29069), True, 'import numpy as np\n'), ((36385, 36431), 'nomad.parsing.file_parser.TextParser', 'TextParser', ([], {'quantities': 'optimization_quantities'}), '(quantities=optimization_quantities)\n', (36395, 36431), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((36625, 36662), 'nomad.parsing.file_parser.TextParser', 'TextParser', ([], {'quantities': 'scf_quantities'}), '(quantities=scf_quantities)\n', (36635, 36662), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((36912, 36985), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""positions_format"""', '"""imized atomic positions\\\\s*\\\\(([a-z]+)\\\\)"""'], {}), "('positions_format', 'imized atomic positions\\\\s*\\\\(([a-z]+)\\\\)')\n", (36920, 36985), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((37038, 37108), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""symbols"""', '"""atom\\\\s*\\\\d+\\\\s*(\\\\w+)"""'], {'repeats': '(True)', 'dtype': 'str'}), "('symbols', 'atom\\\\s*\\\\d+\\\\s*(\\\\w+)', repeats=True, dtype=str)\n", (37046, 37108), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((37160, 37275), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""positions"""', '"""\\\\s*:\\\\s*([\\\\d\\\\.\\\\-]+\\\\s*[\\\\d\\\\.\\\\-]+\\\\s*[\\\\d\\\\.\\\\-]+)"""'], {'repeats': '(True)', 'dtype': 'float'}), "('positions',\n '\\\\s*:\\\\s*([\\\\d\\\\.\\\\-]+\\\\s*[\\\\d\\\\.\\\\-]+\\\\s*[\\\\d\\\\.\\\\-]+)', repeats=True,\n dtype=float)\n", (37168, 37275), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n')] |
KZzizzle/osparc-simcore | services/storage/client-sdk/python/simcore_service_storage_sdk/api/users_api.py | 981bc8d193f3f5d507e3225f857e0308c339e163 | # coding: utf-8
"""
simcore-service-storage API
API definition for simcore-service-storage service # noqa: E501
OpenAPI spec version: 0.1.0
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from simcore_service_storage_sdk.api_client import ApiClient
class UsersApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def check_action_post(self, action, **kwargs): # noqa: E501
"""Test checkpoint to ask server to fail or echo back the transmitted data # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.check_action_post(action, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str action: (required)
:param str data:
:param FakeType fake_type:
:return: FakeEnveloped
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.check_action_post_with_http_info(action, **kwargs) # noqa: E501
else:
(data) = self.check_action_post_with_http_info(action, **kwargs) # noqa: E501
return data
def check_action_post_with_http_info(self, action, **kwargs): # noqa: E501
"""Test checkpoint to ask server to fail or echo back the transmitted data # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.check_action_post_with_http_info(action, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str action: (required)
:param str data:
:param FakeType fake_type:
:return: FakeEnveloped
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['action', 'data', 'fake_type'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method check_action_post" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'action' is set
if ('action' not in local_var_params or
local_var_params['action'] is None):
raise ValueError("Missing the required parameter `action` when calling `check_action_post`") # noqa: E501
collection_formats = {}
path_params = {}
if 'action' in local_var_params:
path_params['action'] = local_var_params['action'] # noqa: E501
query_params = []
if 'data' in local_var_params:
query_params.append(('data', local_var_params['data'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'fake_type' in local_var_params:
body_params = local_var_params['fake_type']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/check/{action}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FakeEnveloped', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_file(self, file_id, location_id, user_id, **kwargs): # noqa: E501
"""Deletes File # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_file(file_id, location_id, user_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str file_id: (required)
:param str location_id: (required)
:param str user_id: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_file_with_http_info(file_id, location_id, user_id, **kwargs) # noqa: E501
else:
(data) = self.delete_file_with_http_info(file_id, location_id, user_id, **kwargs) # noqa: E501
return data
def delete_file_with_http_info(self, file_id, location_id, user_id, **kwargs): # noqa: E501
"""Deletes File # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_file_with_http_info(file_id, location_id, user_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str file_id: (required)
:param str location_id: (required)
:param str user_id: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['file_id', 'location_id', 'user_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_file" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'file_id' is set
if ('file_id' not in local_var_params or
local_var_params['file_id'] is None):
raise ValueError("Missing the required parameter `file_id` when calling `delete_file`") # noqa: E501
# verify the required parameter 'location_id' is set
if ('location_id' not in local_var_params or
local_var_params['location_id'] is None):
raise ValueError("Missing the required parameter `location_id` when calling `delete_file`") # noqa: E501
# verify the required parameter 'user_id' is set
if ('user_id' not in local_var_params or
local_var_params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `delete_file`") # noqa: E501
collection_formats = {}
path_params = {}
if 'file_id' in local_var_params:
path_params['fileId'] = local_var_params['file_id'] # noqa: E501
if 'location_id' in local_var_params:
path_params['location_id'] = local_var_params['location_id'] # noqa: E501
query_params = []
if 'user_id' in local_var_params:
query_params.append(('user_id', local_var_params['user_id'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/locations/{location_id}/files/{fileId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def download_file(self, file_id, location_id, user_id, **kwargs): # noqa: E501
"""Returns download link for requested file # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.download_file(file_id, location_id, user_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str file_id: (required)
:param str location_id: (required)
:param str user_id: (required)
:return: PresignedLinkEnveloped
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.download_file_with_http_info(file_id, location_id, user_id, **kwargs) # noqa: E501
else:
(data) = self.download_file_with_http_info(file_id, location_id, user_id, **kwargs) # noqa: E501
return data
def download_file_with_http_info(self, file_id, location_id, user_id, **kwargs): # noqa: E501
"""Returns download link for requested file # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.download_file_with_http_info(file_id, location_id, user_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str file_id: (required)
:param str location_id: (required)
:param str user_id: (required)
:return: PresignedLinkEnveloped
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['file_id', 'location_id', 'user_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method download_file" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'file_id' is set
if ('file_id' not in local_var_params or
local_var_params['file_id'] is None):
raise ValueError("Missing the required parameter `file_id` when calling `download_file`") # noqa: E501
# verify the required parameter 'location_id' is set
if ('location_id' not in local_var_params or
local_var_params['location_id'] is None):
raise ValueError("Missing the required parameter `location_id` when calling `download_file`") # noqa: E501
# verify the required parameter 'user_id' is set
if ('user_id' not in local_var_params or
local_var_params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `download_file`") # noqa: E501
collection_formats = {}
path_params = {}
if 'file_id' in local_var_params:
path_params['fileId'] = local_var_params['file_id'] # noqa: E501
if 'location_id' in local_var_params:
path_params['location_id'] = local_var_params['location_id'] # noqa: E501
query_params = []
if 'user_id' in local_var_params:
query_params.append(('user_id', local_var_params['user_id'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/locations/{location_id}/files/{fileId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PresignedLinkEnveloped', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_file_metadata(self, file_id, location_id, user_id, **kwargs): # noqa: E501
"""Get File Metadata # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_file_metadata(file_id, location_id, user_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str file_id: (required)
:param str location_id: (required)
:param str user_id: (required)
:return: FileMetaDataEnveloped
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_file_metadata_with_http_info(file_id, location_id, user_id, **kwargs) # noqa: E501
else:
(data) = self.get_file_metadata_with_http_info(file_id, location_id, user_id, **kwargs) # noqa: E501
return data
def get_file_metadata_with_http_info(self, file_id, location_id, user_id, **kwargs): # noqa: E501
"""Get File Metadata # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_file_metadata_with_http_info(file_id, location_id, user_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str file_id: (required)
:param str location_id: (required)
:param str user_id: (required)
:return: FileMetaDataEnveloped
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['file_id', 'location_id', 'user_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_file_metadata" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'file_id' is set
if ('file_id' not in local_var_params or
local_var_params['file_id'] is None):
raise ValueError("Missing the required parameter `file_id` when calling `get_file_metadata`") # noqa: E501
# verify the required parameter 'location_id' is set
if ('location_id' not in local_var_params or
local_var_params['location_id'] is None):
raise ValueError("Missing the required parameter `location_id` when calling `get_file_metadata`") # noqa: E501
# verify the required parameter 'user_id' is set
if ('user_id' not in local_var_params or
local_var_params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `get_file_metadata`") # noqa: E501
collection_formats = {}
path_params = {}
if 'file_id' in local_var_params:
path_params['fileId'] = local_var_params['file_id'] # noqa: E501
if 'location_id' in local_var_params:
path_params['location_id'] = local_var_params['location_id'] # noqa: E501
query_params = []
if 'user_id' in local_var_params:
query_params.append(('user_id', local_var_params['user_id'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/locations/{location_id}/files/{fileId}/metadata', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FileMetaDataEnveloped', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_files_metadata(self, location_id, user_id, **kwargs): # noqa: E501
"""Get Files Metadata # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_files_metadata(location_id, user_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str location_id: (required)
:param str user_id: (required)
:param str uuid_filter:
:return: FileMetaDataArrayEnveloped
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_files_metadata_with_http_info(location_id, user_id, **kwargs) # noqa: E501
else:
(data) = self.get_files_metadata_with_http_info(location_id, user_id, **kwargs) # noqa: E501
return data
def get_files_metadata_with_http_info(self, location_id, user_id, **kwargs): # noqa: E501
"""Get Files Metadata # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_files_metadata_with_http_info(location_id, user_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str location_id: (required)
:param str user_id: (required)
:param str uuid_filter:
:return: FileMetaDataArrayEnveloped
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['location_id', 'user_id', 'uuid_filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_files_metadata" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'location_id' is set
if ('location_id' not in local_var_params or
local_var_params['location_id'] is None):
raise ValueError("Missing the required parameter `location_id` when calling `get_files_metadata`") # noqa: E501
# verify the required parameter 'user_id' is set
if ('user_id' not in local_var_params or
local_var_params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `get_files_metadata`") # noqa: E501
collection_formats = {}
path_params = {}
if 'location_id' in local_var_params:
path_params['location_id'] = local_var_params['location_id'] # noqa: E501
query_params = []
if 'user_id' in local_var_params:
query_params.append(('user_id', local_var_params['user_id'])) # noqa: E501
if 'uuid_filter' in local_var_params:
query_params.append(('uuid_filter', local_var_params['uuid_filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/locations/{location_id}/files/metadata', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FileMetaDataArrayEnveloped', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_storage_locations(self, user_id, **kwargs): # noqa: E501
"""Get available storage locations # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_storage_locations(user_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str user_id: (required)
:return: FileLocationArrayEnveloped
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_storage_locations_with_http_info(user_id, **kwargs) # noqa: E501
else:
(data) = self.get_storage_locations_with_http_info(user_id, **kwargs) # noqa: E501
return data
def get_storage_locations_with_http_info(self, user_id, **kwargs): # noqa: E501
"""Get available storage locations # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_storage_locations_with_http_info(user_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str user_id: (required)
:return: FileLocationArrayEnveloped
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['user_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_storage_locations" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in local_var_params or
local_var_params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `get_storage_locations`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'user_id' in local_var_params:
query_params.append(('user_id', local_var_params['user_id'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/locations', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FileLocationArrayEnveloped', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def health_check(self, **kwargs): # noqa: E501
"""Service health-check endpoint # noqa: E501
Some general information on the API and state of the service behind # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.health_check(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: HealthCheckEnveloped
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.health_check_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.health_check_with_http_info(**kwargs) # noqa: E501
return data
def health_check_with_http_info(self, **kwargs): # noqa: E501
"""Service health-check endpoint # noqa: E501
Some general information on the API and state of the service behind # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.health_check_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: HealthCheckEnveloped
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method health_check" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='HealthCheckEnveloped', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def update_file_meta_data(self, file_id, location_id, **kwargs): # noqa: E501
"""Update File Metadata # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_file_meta_data(file_id, location_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str file_id: (required)
:param str location_id: (required)
:param FileMetaDataType file_meta_data_type:
:return: FileMetaDataEnveloped
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_file_meta_data_with_http_info(file_id, location_id, **kwargs) # noqa: E501
else:
(data) = self.update_file_meta_data_with_http_info(file_id, location_id, **kwargs) # noqa: E501
return data
def update_file_meta_data_with_http_info(self, file_id, location_id, **kwargs): # noqa: E501
"""Update File Metadata # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_file_meta_data_with_http_info(file_id, location_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str file_id: (required)
:param str location_id: (required)
:param FileMetaDataType file_meta_data_type:
:return: FileMetaDataEnveloped
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['file_id', 'location_id', 'file_meta_data_type'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_file_meta_data" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'file_id' is set
if ('file_id' not in local_var_params or
local_var_params['file_id'] is None):
raise ValueError("Missing the required parameter `file_id` when calling `update_file_meta_data`") # noqa: E501
# verify the required parameter 'location_id' is set
if ('location_id' not in local_var_params or
local_var_params['location_id'] is None):
raise ValueError("Missing the required parameter `location_id` when calling `update_file_meta_data`") # noqa: E501
collection_formats = {}
path_params = {}
if 'file_id' in local_var_params:
path_params['fileId'] = local_var_params['file_id'] # noqa: E501
if 'location_id' in local_var_params:
path_params['location_id'] = local_var_params['location_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'file_meta_data_type' in local_var_params:
body_params = local_var_params['file_meta_data_type']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/locations/{location_id}/files/{fileId}/metadata', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FileMetaDataEnveloped', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def upload_file(self, file_id, location_id, user_id, **kwargs): # noqa: E501
"""Returns upload link or performs copy operation to datcore # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.upload_file(file_id, location_id, user_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str file_id: (required)
:param str location_id: (required)
:param str user_id: (required)
:param str extra_location:
:param str extra_source:
:return: PresignedLinkEnveloped
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.upload_file_with_http_info(file_id, location_id, user_id, **kwargs) # noqa: E501
else:
(data) = self.upload_file_with_http_info(file_id, location_id, user_id, **kwargs) # noqa: E501
return data
def upload_file_with_http_info(self, file_id, location_id, user_id, **kwargs): # noqa: E501
"""Returns upload link or performs copy operation to datcore # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.upload_file_with_http_info(file_id, location_id, user_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str file_id: (required)
:param str location_id: (required)
:param str user_id: (required)
:param str extra_location:
:param str extra_source:
:return: PresignedLinkEnveloped
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['file_id', 'location_id', 'user_id', 'extra_location', 'extra_source'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method upload_file" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'file_id' is set
if ('file_id' not in local_var_params or
local_var_params['file_id'] is None):
raise ValueError("Missing the required parameter `file_id` when calling `upload_file`") # noqa: E501
# verify the required parameter 'location_id' is set
if ('location_id' not in local_var_params or
local_var_params['location_id'] is None):
raise ValueError("Missing the required parameter `location_id` when calling `upload_file`") # noqa: E501
# verify the required parameter 'user_id' is set
if ('user_id' not in local_var_params or
local_var_params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `upload_file`") # noqa: E501
collection_formats = {}
path_params = {}
if 'file_id' in local_var_params:
path_params['fileId'] = local_var_params['file_id'] # noqa: E501
if 'location_id' in local_var_params:
path_params['location_id'] = local_var_params['location_id'] # noqa: E501
query_params = []
if 'user_id' in local_var_params:
query_params.append(('user_id', local_var_params['user_id'])) # noqa: E501
if 'extra_location' in local_var_params:
query_params.append(('extra_location', local_var_params['extra_location'])) # noqa: E501
if 'extra_source' in local_var_params:
query_params.append(('extra_source', local_var_params['extra_source'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/locations/{location_id}/files/{fileId}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PresignedLinkEnveloped', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
| [((2747, 2788), 'six.iteritems', 'six.iteritems', (["local_var_params['kwargs']"], {}), "(local_var_params['kwargs'])\n", (2760, 2788), False, 'import six\n'), ((7028, 7069), 'six.iteritems', 'six.iteritems', (["local_var_params['kwargs']"], {}), "(local_var_params['kwargs'])\n", (7041, 7069), False, 'import six\n'), ((11844, 11885), 'six.iteritems', 'six.iteritems', (["local_var_params['kwargs']"], {}), "(local_var_params['kwargs'])\n", (11857, 11885), False, 'import six\n'), ((16661, 16702), 'six.iteritems', 'six.iteritems', (["local_var_params['kwargs']"], {}), "(local_var_params['kwargs'])\n", (16674, 16702), False, 'import six\n'), ((21456, 21497), 'six.iteritems', 'six.iteritems', (["local_var_params['kwargs']"], {}), "(local_var_params['kwargs'])\n", (21469, 21497), False, 'import six\n'), ((25778, 25819), 'six.iteritems', 'six.iteritems', (["local_var_params['kwargs']"], {}), "(local_var_params['kwargs'])\n", (25791, 25819), False, 'import six\n'), ((29474, 29515), 'six.iteritems', 'six.iteritems', (["local_var_params['kwargs']"], {}), "(local_var_params['kwargs'])\n", (29487, 29515), False, 'import six\n'), ((33039, 33080), 'six.iteritems', 'six.iteritems', (["local_var_params['kwargs']"], {}), "(local_var_params['kwargs'])\n", (33052, 33080), False, 'import six\n'), ((38004, 38045), 'six.iteritems', 'six.iteritems', (["local_var_params['kwargs']"], {}), "(local_var_params['kwargs'])\n", (38017, 38045), False, 'import six\n'), ((699, 710), 'simcore_service_storage_sdk.api_client.ApiClient', 'ApiClient', ([], {}), '()\n', (708, 710), False, 'from simcore_service_storage_sdk.api_client import ApiClient\n')] |
mattiolato98/reservation-ninja | reservation_management/migrations/0021_delete_greenpass.py | 0e50b218dd9d90f134868bade2ec2934283c12b5 | # Generated by Django 3.2.7 on 2021-10-22 14:23
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('reservation_management', '0020_greenpass'),
]
operations = [
migrations.DeleteModel(
name='GreenPass',
),
]
| [((233, 273), 'django.db.migrations.DeleteModel', 'migrations.DeleteModel', ([], {'name': '"""GreenPass"""'}), "(name='GreenPass')\n", (255, 273), False, 'from django.db import migrations\n')] |
bionet/ted.python | demos/iaf_pop_demo.py | 1698a7f792db23123003ae4e2d39b4c18f25f347 | #!/usr/bin/env python
"""
Demos of encoding and decoding algorithms using populations of
IAF neurons.
"""
# Copyright (c) 2009-2015, Lev Givon
# All rights reserved.
# Distributed under the terms of the BSD license:
# http://www.opensource.org/licenses/bsd-license
import sys
import numpy as np
# Set matplotlib backend so that plots can be generated without a
# display:
import matplotlib
matplotlib.use('AGG')
from bionet.utils.misc import func_timer
import bionet.utils.band_limited as bl
import bionet.utils.plotting as pl
import bionet.ted.iaf as iaf
# For determining output plot file names:
output_name = 'iaf_pop_demo_'
output_count = 0
output_ext = '.png'
# Define algorithm parameters and input signal:
dur = 0.1
dt = 1e-6
f = 32
bw = 2*np.pi*f
t = np.arange(0, dur, dt)
np.random.seed(0)
noise_power = None
if noise_power == None:
fig_title = 'IAF Input Signal with No Noise'
else:
fig_title = 'IAF Input Signal with %d dB of Noise' % noise_power
print fig_title
u = func_timer(bl.gen_band_limited)(dur, dt, f, noise_power)
pl.plot_signal(t, u, fig_title,
output_name + str(output_count) + output_ext)
# Test leaky IAF algorithms:
b1 = 3.5 # bias
d1 = 0.7 # threshold
R1 = 10.0 # resistance
C1 = 0.01 # capacitance
try:
iaf.iaf_recoverable(u, bw, b1, d1, R1, C1)
except ValueError('reconstruction condition not satisfied'):
sys.exit()
b2 = 3.4 # bias
d2 = 0.8 # threshold
R2 = 9.0 # resistance
C2 = 0.01 # capacitance
try:
iaf.iaf_recoverable(u, bw, b2, d2, R2, C2)
except ValueError('reconstruction condition not satisfied'):
sys.exit()
b_list = np.array([b1, b2])
d_list = np.array([d1, d2])
R_list = np.array([R1, R2])
C_list = np.array([C1, C2])
output_count += 1
fig_title = 'Signal Encoded Using Leaky IAF Encoder'
print fig_title
s_list = func_timer(iaf.iaf_encode_pop)([u, u], dt, b_list, d_list, R_list, C_list)
pl.plot_encoded(t, u, s_list[0], fig_title + ' #1',
output_name + str(output_count) + output_ext)
output_count += 1
pl.plot_encoded(t, u, s_list[1], fig_title + ' #2',
output_name + str(output_count) + output_ext)
output_count += 1
fig_title = 'Signal Decoded Using Leaky IAF Population Decoder'
print fig_title
u_rec = func_timer(iaf.iaf_decode_pop)(s_list, dur, dt, bw,
b_list, d_list, R_list,
C_list)
pl.plot_compare(t, u, u_rec, fig_title,
output_name + str(output_count) + output_ext)
# Test ideal IAF algorithms:
b1 = 3.5 # bias
d1 = 0.7 # threshold
R1 = np.inf # resistance
C1 = 0.01 # capacitance
try:
iaf.iaf_recoverable(u, bw, b1, d1, R1, C1)
except ValueError('reconstruction condition not satisfied'):
sys.exit()
b2 = 3.4 # bias
d2 = 0.8 # threshold
R2 = np.inf # resistance
C2 = 0.01 # capacitance
try:
iaf.iaf_recoverable(u, bw, b2, d2, R2, C2)
except ValueError('reconstruction condition not satisfied'):
sys.exit()
b_list = [b1, b2]
d_list = [d1, d2]
R_list = [R1, R2]
C_list = [C1, C2]
output_count += 1
fig_title = 'Signal Encoded Using Ideal IAF Encoder'
print fig_title
s_list = func_timer(iaf.iaf_encode_pop)([u, u], dt, b_list, d_list, R_list, C_list)
pl.plot_encoded(t, u, s_list[0], fig_title + ' #1',
output_name + str(output_count) + output_ext)
output_count += 1
pl.plot_encoded(t, u, s_list[1], fig_title + ' #2',
output_name + str(output_count) + output_ext)
output_count += 1
fig_title = 'Signal Decoded Using Ideal IAF Population Decoder'
print fig_title
u_rec = func_timer(iaf.iaf_decode_pop)(s_list, dur, dt, bw,
b_list, d_list, R_list,
C_list)
pl.plot_compare(t, u, u_rec, fig_title,
output_name + str(output_count) + output_ext)
| [] |
bpmbank/PythonJS | regtests/calling/function_expression.py | 591a80afd8233fb715493591db2b68f1748558d9 | """func expr"""
F = function( x,y ):
return x+y
def main():
TestError( F(1,2) == 3 )
| [] |
A425/django-xadmin-1.8 | nadmin/plugins/sortable.py | 9ab06192311b22ec654778935ce3e3c5ffd39a00 | #coding:utf-8
from nadmin.sites import site
from nadmin.views import BaseAdminPlugin, ListAdminView
SORTBY_VAR = '_sort_by'
class SortablePlugin(BaseAdminPlugin):
sortable_fields = ['sort']
# Media
def get_media(self, media):
if self.sortable_fields and self.request.GET.get(SORTBY_VAR):
media = media + self.vendor('nadmin.plugin.sortable.js')
return media
# Block Views
def block_top_toolbar(self, context, nodes):
if self.sortable_fields:
pass
# current_refresh = self.request.GET.get(REFRESH_VAR)
# context.update({
# 'has_refresh': bool(current_refresh),
# 'clean_refresh_url': self.admin_view.get_query_string(remove=(REFRESH_VAR,)),
# 'current_refresh': current_refresh,
# 'refresh_times': [{
# 'time': r,
# 'url': self.admin_view.get_query_string({REFRESH_VAR: r}),
# 'selected': str(r) == current_refresh,
# } for r in self.refresh_times],
# })
# nodes.append(loader.render_to_string('nadmin/blocks/refresh.html', context_instance=context))
site.register_plugin(SortablePlugin, ListAdminView)
| [((1214, 1265), 'nadmin.sites.site.register_plugin', 'site.register_plugin', (['SortablePlugin', 'ListAdminView'], {}), '(SortablePlugin, ListAdminView)\n', (1234, 1265), False, 'from nadmin.sites import site\n')] |
texastribune/donations | batch-tmp.py | 45a75e528564b5fd502319ed7d512ca91bda7f37 | import logging
from config import ACCOUNTING_MAIL_RECIPIENT, LOG_LEVEL, REDIS_URL, TIMEZONE
from datetime import datetime, timedelta
from pytz import timezone
import celery
import redis
from charges import amount_to_charge, charge, ChargeException
from npsp import Opportunity
from util import send_email
zone = timezone(TIMEZONE)
log_level = logging.getLevelName(LOG_LEVEL)
root = logging.getLogger()
root.setLevel(log_level)
class Log(object):
"""
This encapulates sending to the console/stdout and email all in one.
"""
def __init__(self):
self.log = list()
def it(self, string):
"""
Add something to the log.
"""
logging.debug(string)
self.log.append(string)
def send(self):
"""
Send the assembled log out as an email.
"""
body = "\n".join(self.log)
recipient = ACCOUNTING_MAIL_RECIPIENT
subject = "Batch run"
send_email(body=body, recipient=recipient, subject=subject)
class AlreadyExecuting(Exception):
"""
Here to show when more than one job of the same type is running.
"""
pass
class Lock(object):
"""
Claim an exclusive lock. Using Redis.
"""
def __init__(self, key):
self.key = key
self.connection = redis.from_url(REDIS_URL)
def acquire(self):
if self.connection.get(self.key):
raise AlreadyExecuting
self.connection.setex(name=self.key, value="bar", time=1200)
def release(self):
self.connection.delete(self.key)
# TODO stop sending this email and just rely on Sentry and logs?
@celery.task()
def charge_cards():
lock = Lock(key="charge-cards-lock")
lock.acquire()
log = Log()
log.it("---Starting batch job...")
three_days_ago = (datetime.now(tz=zone) - timedelta(days=10)).strftime("%Y-%m-%d")
today = datetime.now(tz=zone).strftime("%Y-%m-%d")
opportunities = Opportunity.list(begin=three_days_ago, end=today)
log.it("---Processing charges...")
log.it(f"Found {len(opportunities)} opportunities available to process.")
for opportunity in opportunities:
if not opportunity.stripe_customer:
continue
amount = amount_to_charge(opportunity)
log.it(
f"---- Charging ${amount} to {opportunity.stripe_customer} ({opportunity.name})"
)
try:
charge(opportunity)
except ChargeException as e:
logging.info("Batch charge error")
e.send_slack_notification()
log.send()
lock.release()
if __name__ == "__main__":
charge_cards()
| [((315, 333), 'pytz.timezone', 'timezone', (['TIMEZONE'], {}), '(TIMEZONE)\n', (323, 333), False, 'from pytz import timezone\n'), ((348, 379), 'logging.getLevelName', 'logging.getLevelName', (['LOG_LEVEL'], {}), '(LOG_LEVEL)\n', (368, 379), False, 'import logging\n'), ((388, 407), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (405, 407), False, 'import logging\n'), ((1636, 1649), 'celery.task', 'celery.task', ([], {}), '()\n', (1647, 1649), False, 'import celery\n'), ((1952, 2001), 'npsp.Opportunity.list', 'Opportunity.list', ([], {'begin': 'three_days_ago', 'end': 'today'}), '(begin=three_days_ago, end=today)\n', (1968, 2001), False, 'from npsp import Opportunity\n'), ((688, 709), 'logging.debug', 'logging.debug', (['string'], {}), '(string)\n', (701, 709), False, 'import logging\n'), ((954, 1013), 'util.send_email', 'send_email', ([], {'body': 'body', 'recipient': 'recipient', 'subject': 'subject'}), '(body=body, recipient=recipient, subject=subject)\n', (964, 1013), False, 'from util import send_email\n'), ((1305, 1330), 'redis.from_url', 'redis.from_url', (['REDIS_URL'], {}), '(REDIS_URL)\n', (1319, 1330), False, 'import redis\n'), ((2242, 2271), 'charges.amount_to_charge', 'amount_to_charge', (['opportunity'], {}), '(opportunity)\n', (2258, 2271), False, 'from charges import amount_to_charge, charge, ChargeException\n'), ((1888, 1909), 'datetime.datetime.now', 'datetime.now', ([], {'tz': 'zone'}), '(tz=zone)\n', (1900, 1909), False, 'from datetime import datetime, timedelta\n'), ((2416, 2435), 'charges.charge', 'charge', (['opportunity'], {}), '(opportunity)\n', (2422, 2435), False, 'from charges import amount_to_charge, charge, ChargeException\n'), ((1811, 1832), 'datetime.datetime.now', 'datetime.now', ([], {'tz': 'zone'}), '(tz=zone)\n', (1823, 1832), False, 'from datetime import datetime, timedelta\n'), ((1835, 1853), 'datetime.timedelta', 'timedelta', ([], {'days': '(10)'}), '(days=10)\n', (1844, 1853), False, 'from datetime import datetime, timedelta\n'), ((2485, 2519), 'logging.info', 'logging.info', (['"""Batch charge error"""'], {}), "('Batch charge error')\n", (2497, 2519), False, 'import logging\n')] |
coblee/rotki | rotkehlchen/tests/integration/test_blockchain.py | d675f5c2d0df5176337b7b10038524ee74923482 | import operator
import os
from unittest.mock import patch
import pytest
import requests
from rotkehlchen.chain.ethereum.manager import NodeName
from rotkehlchen.constants.assets import A_BTC
from rotkehlchen.tests.utils.blockchain import mock_etherscan_query
from rotkehlchen.typing import SupportedBlockchain
@pytest.mark.skipif(
os.name == 'nt',
reason='Not testing running with geth in windows at the moment',
)
@pytest.mark.parametrize('have_blockchain_backend', [True])
def test_eth_connection_initial_balances(
blockchain,
inquirer, # pylint: disable=unused-argument
):
"""TODO for this test. Either:
1. Not use own chain but use a normal open node for this test.
2. If we use own chain, deploy the eth-scan contract there.
But probably (1) makes more sense
"""
msg = 'Should be connected to ethereum node'
assert blockchain.ethereum.web3_mapping.get(NodeName.OWN) is not None, msg
def test_query_btc_balances(blockchain):
blockchain.query_btc_balances()
assert 'BTC' not in blockchain.totals
account = '3BZU33iFcAiyVyu2M2GhEpLNuh81GymzJ7'
blockchain.modify_btc_account(account, 'append', operator.add)
blockchain.query_btc_balances()
assert blockchain.totals[A_BTC].usd_value is not None
assert blockchain.totals[A_BTC].amount is not None
@pytest.mark.parametrize('number_of_eth_accounts', [0])
def test_add_remove_account_assure_all_balances_not_always_queried(blockchain):
"""Due to a programming mistake at addition and removal of blockchain accounts
after the first time all balances were queried every time. That slowed
everything down (https://github.com/rotki/rotki/issues/678).
This is a regression test for that behaviour
TODO: Is this still needed? Shouldn't it just be removed?
Had to add lots of mocks to make it not be a slow test
"""
addr1 = '0xe188c6BEBB81b96A65aa20dDB9e2aef62627fa4c'
addr2 = '0x78a087fCf440315b843632cFd6FDE6E5adcCc2C2'
etherscan_patch = mock_etherscan_query(
eth_map={addr1: {'ETH': 1}, addr2: {'ETH': 2}},
etherscan=blockchain.ethereum.etherscan,
original_requests_get=requests.get,
original_queries=[],
)
ethtokens_max_chunks_patch = patch(
'rotkehlchen.chain.ethereum.tokens.ETHERSCAN_MAX_TOKEN_CHUNK_LENGTH',
new=800,
)
with etherscan_patch, ethtokens_max_chunks_patch:
blockchain.add_blockchain_accounts(
blockchain=SupportedBlockchain.ETHEREUM,
accounts=[addr1],
)
assert addr1 in blockchain.accounts.eth
with etherscan_patch, ethtokens_max_chunks_patch, patch.object(blockchain, 'query_balances') as mock: # noqa: E501
blockchain.remove_blockchain_accounts(
blockchain=SupportedBlockchain.ETHEREUM,
accounts=[addr1],
)
assert addr1 not in blockchain.accounts.eth
assert mock.call_count == 0, 'blockchain.query_balances() should not have been called'
addr2 = '0x78a087fCf440315b843632cFd6FDE6E5adcCc2C2'
with etherscan_patch, ethtokens_max_chunks_patch, patch.object(blockchain, 'query_balances') as mock: # noqa: E501
blockchain.add_blockchain_accounts(
blockchain=SupportedBlockchain.ETHEREUM,
accounts=[addr2],
)
| [((315, 420), 'pytest.mark.skipif', 'pytest.mark.skipif', (["(os.name == 'nt')"], {'reason': '"""Not testing running with geth in windows at the moment"""'}), "(os.name == 'nt', reason=\n 'Not testing running with geth in windows at the moment')\n", (333, 420), False, 'import pytest\n'), ((428, 486), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""have_blockchain_backend"""', '[True]'], {}), "('have_blockchain_backend', [True])\n", (451, 486), False, 'import pytest\n'), ((1339, 1393), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""number_of_eth_accounts"""', '[0]'], {}), "('number_of_eth_accounts', [0])\n", (1362, 1393), False, 'import pytest\n'), ((2013, 2188), 'rotkehlchen.tests.utils.blockchain.mock_etherscan_query', 'mock_etherscan_query', ([], {'eth_map': "{addr1: {'ETH': 1}, addr2: {'ETH': 2}}", 'etherscan': 'blockchain.ethereum.etherscan', 'original_requests_get': 'requests.get', 'original_queries': '[]'}), "(eth_map={addr1: {'ETH': 1}, addr2: {'ETH': 2}},\n etherscan=blockchain.ethereum.etherscan, original_requests_get=requests\n .get, original_queries=[])\n", (2033, 2188), False, 'from rotkehlchen.tests.utils.blockchain import mock_etherscan_query\n'), ((2252, 2340), 'unittest.mock.patch', 'patch', (['"""rotkehlchen.chain.ethereum.tokens.ETHERSCAN_MAX_TOKEN_CHUNK_LENGTH"""'], {'new': '(800)'}), "('rotkehlchen.chain.ethereum.tokens.ETHERSCAN_MAX_TOKEN_CHUNK_LENGTH',\n new=800)\n", (2257, 2340), False, 'from unittest.mock import patch\n'), ((2650, 2692), 'unittest.mock.patch.object', 'patch.object', (['blockchain', '"""query_balances"""'], {}), "(blockchain, 'query_balances')\n", (2662, 2692), False, 'from unittest.mock import patch\n'), ((3108, 3150), 'unittest.mock.patch.object', 'patch.object', (['blockchain', '"""query_balances"""'], {}), "(blockchain, 'query_balances')\n", (3120, 3150), False, 'from unittest.mock import patch\n')] |
LaptopBiologist/ReferenceAnalyzer | __init__.py | 109f1f58ee2b0173e7285156091ba2b11459ff85 | #-------------------------------------------------------------------------------
# Name: module1
# Purpose:
#
# Author: I am
#
# Created: 02/11/2017
# Copyright: (c) I am 2017
# Licence: <your licence>
#-------------------------------------------------------------------------------
def main():
pass
if __name__ == '__main__':
main()
| [] |
jimmybutton/moviedb | app/__init__.py | 61028ac4db7f58a671ab3a1c2afd3bfb53372773 | from flask import Flask
from config import Config
from sqlalchemy import MetaData
from flask_sqlalchemy import SQLAlchemy
from flask_migrate import Migrate
from flask_login import LoginManager
from flask_moment import Moment
from flask_misaka import Misaka
from flask_bootstrap import Bootstrap
import os
import logging
from logging.handlers import RotatingFileHandler
from elasticsearch import Elasticsearch
convention = {
"ix": 'ix_%(column_0_label)s',
"uq": "uq_%(table_name)s_%(column_0_name)s",
"ck": "ck_%(table_name)s_%(constraint_name)s",
"fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s",
"pk": "pk_%(table_name)s"
}
metadata = MetaData(naming_convention=convention)
db = SQLAlchemy(metadata=metadata)
migrate = Migrate()
login = LoginManager()
login.login_view = "auth.login"
moment = Moment()
md = Misaka()
bootstrap = Bootstrap()
def create_app(config_class=Config):
app = Flask(__name__)
app.config.from_object(Config)
db.init_app(app)
with app.app_context():
if db.engine.url.drivername == 'sqlite':
migrate.init_app(app, db, render_as_batch=True)
else:
migrate.init_app(app, db)
# migrate.init_app(app, db)
login.init_app(app)
moment.init_app(app)
md.init_app(app)
bootstrap.init_app(app)
from app.errors import bp as errors_bp
app.register_blueprint(errors_bp)
from app.auth import bp as auth_bp
app.register_blueprint(auth_bp, url_prefix='/auth')
from app.main import bp as main_bp
app.register_blueprint(main_bp)
from app.cli import bp as cli_bp
app.register_blueprint(cli_bp)
app.elasticsearch = Elasticsearch([app.config['ELASTICSEARCH_URL']]) \
if app.config['ELASTICSEARCH_URL'] else None
from app import models
if not app.debug and not app.testing:
if not os.path.exists("logs"):
os.mkdir("logs")
file_handler = RotatingFileHandler(
"logs/moviedb.log", maxBytes=10240, backupCount=10
)
file_handler.setFormatter(
logging.Formatter(
"%(asctime)s %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]"
)
)
file_handler.setLevel(logging.INFO)
app.logger.addHandler(file_handler)
app.logger.setLevel(logging.INFO)
app.logger.info("Moviedb startup")
return app
| [((677, 715), 'sqlalchemy.MetaData', 'MetaData', ([], {'naming_convention': 'convention'}), '(naming_convention=convention)\n', (685, 715), False, 'from sqlalchemy import MetaData\n'), ((722, 751), 'flask_sqlalchemy.SQLAlchemy', 'SQLAlchemy', ([], {'metadata': 'metadata'}), '(metadata=metadata)\n', (732, 751), False, 'from flask_sqlalchemy import SQLAlchemy\n'), ((762, 771), 'flask_migrate.Migrate', 'Migrate', ([], {}), '()\n', (769, 771), False, 'from flask_migrate import Migrate\n'), ((780, 794), 'flask_login.LoginManager', 'LoginManager', ([], {}), '()\n', (792, 794), False, 'from flask_login import LoginManager\n'), ((836, 844), 'flask_moment.Moment', 'Moment', ([], {}), '()\n', (842, 844), False, 'from flask_moment import Moment\n'), ((850, 858), 'flask_misaka.Misaka', 'Misaka', ([], {}), '()\n', (856, 858), False, 'from flask_misaka import Misaka\n'), ((871, 882), 'flask_bootstrap.Bootstrap', 'Bootstrap', ([], {}), '()\n', (880, 882), False, 'from flask_bootstrap import Bootstrap\n'), ((931, 946), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (936, 946), False, 'from flask import Flask\n'), ((1675, 1723), 'elasticsearch.Elasticsearch', 'Elasticsearch', (["[app.config['ELASTICSEARCH_URL']]"], {}), "([app.config['ELASTICSEARCH_URL']])\n", (1688, 1723), False, 'from elasticsearch import Elasticsearch\n'), ((1941, 2012), 'logging.handlers.RotatingFileHandler', 'RotatingFileHandler', (['"""logs/moviedb.log"""'], {'maxBytes': '(10240)', 'backupCount': '(10)'}), "('logs/moviedb.log', maxBytes=10240, backupCount=10)\n", (1960, 2012), False, 'from logging.handlers import RotatingFileHandler\n'), ((1865, 1887), 'os.path.exists', 'os.path.exists', (['"""logs"""'], {}), "('logs')\n", (1879, 1887), False, 'import os\n'), ((1901, 1917), 'os.mkdir', 'os.mkdir', (['"""logs"""'], {}), "('logs')\n", (1909, 1917), False, 'import os\n'), ((2082, 2175), 'logging.Formatter', 'logging.Formatter', (['"""%(asctime)s %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]"""'], {}), "(\n '%(asctime)s %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]')\n", (2099, 2175), False, 'import logging\n')] |
AranKomat/Sequential-Alpha-Zero | optimize.py | 21f78dc95e70b68b5fd18eb33d1ea2d5b5a853d4 | import numpy as np
import random
from time import time, sleep
import h5py
import torch
import torch.nn as nn
import torch.optim as optimizer
import glob
import os
#from scipy.stats import rankdata
from lstm import Model, initialize
from Optim import ScheduledOptim
# import _pickle as cPickle
# np.set_printoptions(threshold=np.nan)
def start(config):
model = Model(config)
model = model.to(config.device)
#optim = optimizer.SGD(model.parameters(), lr=2e-4, momentum=0.9, weight_decay=config.c)
#lr_scheduler = torch.optim.lr_scheduler.StepLR(optim, step_size=200, gamma=0.1) # 20M iters
optim = ScheduledOptim(
optimizer.Adam(
filter(lambda p: p.requires_grad, model.parameters()), lr=config.lr,
betas=(0.9, 0.98), eps=1e-09),
config.hidden_dim, 2000)
list_of_files = glob.glob(config.model_path + '/*')
latest_file = None
if list_of_files:
latest_file = max(list_of_files, key=os.path.getctime)
model_ckpt = latest_file
# model_ckpt = config.model_path + '/model-454.pth'
print(model_ckpt)
if model_ckpt:
checkpoint = torch.load(model_ckpt)
model.load_state_dict(checkpoint['state_dict'])
optim.optimizer.load_state_dict(checkpoint['optimizer'])
start_iter = model_ckpt.split('-')[-1].split('.')[0]
start_iter = int(start_iter)
else:
model.apply(initialize)
start_iter = 0
count = 0
for iter in range(start_iter, config.total_iterations):
print('iteration: %s' % iter)
#if (iter + 1) % 100000 == 0:
# lr_scheduler.step()
start_time = time()
optim.update_learning_rate(iter)
# reads the randomly sampled (s,pi,z)'s from the buffer
# ~ 0.1s
# TODO: if error, set a lock
# translate, _ = cPickle.load(open('save/vocab_cotra.pkl', 'rb'))
with h5py.File("buffer", "r") as f:
cur_row = int(f['/cur_row'][0])
s_buffer = f['/s']
pi_buffer = f['/pi']
z_buffer = f['/z']
s_tmp = []
pi_tmp = []
z_tmp = []
df = cur_row - count
'''x = np.bincount(s_buffer[:,1].astype(int)) / 500000
for i in range(len(x)):
if x[i] > 0.01:
print(i, x[i], translate[i])
break'''
if count == 0:
count = cur_row
t_inf = time()
if count != 0 and df >= 1000:
print('time required for 32 self-play games: ', 32 * (time() - t_inf) / df)
t_inf = time()
count = cur_row
if cur_row >= config.buffer_size:
r = np.sort(
np.random.choice(list(range(0, config.buffer_size)), (config.batch_size // 2), replace=False))
else:
r = np.sort(
np.random.choice(list(range(0, cur_row)), (config.batch_size // 2), replace=False))
tmp = []
# randomly sample rows 8 times for a dramatic speedup.
num_segments = 8
for i in range(num_segments):
tmp.append(
r[(config.batch_size // 2) // num_segments * i:(config.batch_size // 2) // num_segments * (i + 1)])
for i in range(num_segments):
s_tmp.append(s_buffer[tmp[i], :config.max_length])
pi_tmp.append(pi_buffer[tmp[i], :config.max_length, ...])
z_tmp.append(z_buffer[tmp[i], ...])
s = np.concatenate(s_tmp, 0)
pi = np.concatenate(pi_tmp, 0)
z = np.concatenate(z_tmp, 0)
# print('io time: ',time() - start_time)
# decompresses sampled pi's
# takes about 0.005s
new_pi = np.zeros(((config.batch_size // 2), config.max_length, config.vocab_size))
for i in range((config.batch_size // 2)):
for j in range(config.max_length):
if pi[i, j, 0] == -1: # meaning the terminal state; pi=0
new_pi[i, j, :] = 0
elif pi[i, j, 0] == -2 or sum(pi[i, j, :]) == 0: # meaning the padding; place -1 padding
new_pi[i, j, :] = -1
else:
# Beware that np.bincount's bin is [0,1,...min_length-1]
new_pi[i, j, :] = np.bincount(pi[i, j, :].astype(int),
minlength=config.vocab_size) / config.simulation_num_per_move
pi = new_pi
# creating a mask for loss function and preparing a minibatch
def generate_mask(array):
new_array = np.zeros_like(array)
for i in range(len(array)):
for j in range(len(array[i])):
if j == len(array[i]) - 1:
new_array[i, :] = 1
elif array[i, j] == config.period_token:
new_array[i, :j + 1] = 1
break
elif array[i, j] == config.blank_token:
new_array[i, :j] = 1
break
return new_array
def pi_mask(array):
array = array[:, 1:]
array = np.pad(array, ((0, 0), (0, 1)), 'constant')
return generate_mask(array)
# pi_tmp isn't modified here, since the mask will be modified appropriately
_, pi_mask = pi_mask(s)
z_mask = generate_mask(s)
z_batch = np.concatenate(
[np.ones([(config.batch_size // 2), config.max_length]) * (-1),
np.ones([(config.batch_size // 2), config.max_length])])
def convert(x):
return torch.tensor(x.astype(np.float32), device=config.device)
t2 = time()
# gradient update
model.train()
cache = []
for i in range(config.depth // config.unit_depth):
cache += [torch.zeros(config.batch_size, config.hidden_dim,device=config.device),
torch.zeros(config.batch_size, config.hidden_dim,device=config.device)]
s_batch = convert(np.array(s)).long()
policy, v, cache = model(s_batch, tuple(cache))
def loss_policy(y_true, y_pred):
return torch.sum(-y_true * torch.log(y_pred + 1.0e-8), 2)
def loss_value(y_true, y_pred):
return (y_true - y_pred) ** 2
pi_mask = convert(pi_mask)
z_mask = convert(z_mask)
z = convert(z)
pi = convert(pi)
loss = torch.mean(torch.sum(loss_policy(pi, policy) * pi_mask +
loss_value(z, v) * z_mask
, 1) / torch.sum(z_mask, 1))
loss.backward()
gn = nn.utils.clip_grad_norm(model.parameters(), config.clip)
print(gn)
optim.step()
optim.zero_grad()
print("grad update: %s seconds" % (time() - t2))
print("iteration: %s seconds" % (time() - start_time))
checkpoint = {'state_dict': model.state_dict(),
'optimizer': optim.optimizer.state_dict()}
sleep(config.training_sleep_time)
torch.save(checkpoint, config.model_path + '/model' + '-' + str(iter + 1) + '.pth') | [((370, 383), 'lstm.Model', 'Model', (['config'], {}), '(config)\n', (375, 383), False, 'from lstm import Model, initialize\n'), ((842, 877), 'glob.glob', 'glob.glob', (["(config.model_path + '/*')"], {}), "(config.model_path + '/*')\n", (851, 877), False, 'import glob\n'), ((1133, 1155), 'torch.load', 'torch.load', (['model_ckpt'], {}), '(model_ckpt)\n', (1143, 1155), False, 'import torch\n'), ((1646, 1652), 'time.time', 'time', ([], {}), '()\n', (1650, 1652), False, 'from time import time, sleep\n'), ((3564, 3588), 'numpy.concatenate', 'np.concatenate', (['s_tmp', '(0)'], {}), '(s_tmp, 0)\n', (3578, 3588), True, 'import numpy as np\n'), ((3602, 3627), 'numpy.concatenate', 'np.concatenate', (['pi_tmp', '(0)'], {}), '(pi_tmp, 0)\n', (3616, 3627), True, 'import numpy as np\n'), ((3640, 3664), 'numpy.concatenate', 'np.concatenate', (['z_tmp', '(0)'], {}), '(z_tmp, 0)\n', (3654, 3664), True, 'import numpy as np\n'), ((3797, 3869), 'numpy.zeros', 'np.zeros', (['(config.batch_size // 2, config.max_length, config.vocab_size)'], {}), '((config.batch_size // 2, config.max_length, config.vocab_size))\n', (3805, 3869), True, 'import numpy as np\n'), ((5781, 5787), 'time.time', 'time', ([], {}), '()\n', (5785, 5787), False, 'from time import time, sleep\n'), ((7127, 7160), 'time.sleep', 'sleep', (['config.training_sleep_time'], {}), '(config.training_sleep_time)\n', (7132, 7160), False, 'from time import time, sleep\n'), ((1902, 1926), 'h5py.File', 'h5py.File', (['"""buffer"""', '"""r"""'], {}), "('buffer', 'r')\n", (1911, 1926), False, 'import h5py\n'), ((4665, 4685), 'numpy.zeros_like', 'np.zeros_like', (['array'], {}), '(array)\n', (4678, 4685), True, 'import numpy as np\n'), ((5250, 5293), 'numpy.pad', 'np.pad', (['array', '((0, 0), (0, 1))', '"""constant"""'], {}), "(array, ((0, 0), (0, 1)), 'constant')\n", (5256, 5293), True, 'import numpy as np\n'), ((2465, 2471), 'time.time', 'time', ([], {}), '()\n', (2469, 2471), False, 'from time import time, sleep\n'), ((2630, 2636), 'time.time', 'time', ([], {}), '()\n', (2634, 2636), False, 'from time import time, sleep\n'), ((5610, 5662), 'numpy.ones', 'np.ones', (['[config.batch_size // 2, config.max_length]'], {}), '([config.batch_size // 2, config.max_length])\n', (5617, 5662), True, 'import numpy as np\n'), ((5938, 6009), 'torch.zeros', 'torch.zeros', (['config.batch_size', 'config.hidden_dim'], {'device': 'config.device'}), '(config.batch_size, config.hidden_dim, device=config.device)\n', (5949, 6009), False, 'import torch\n'), ((6033, 6104), 'torch.zeros', 'torch.zeros', (['config.batch_size', 'config.hidden_dim'], {'device': 'config.device'}), '(config.batch_size, config.hidden_dim, device=config.device)\n', (6044, 6104), False, 'import torch\n'), ((6696, 6716), 'torch.sum', 'torch.sum', (['z_mask', '(1)'], {}), '(z_mask, 1)\n', (6705, 6716), False, 'import torch\n'), ((5534, 5586), 'numpy.ones', 'np.ones', (['[config.batch_size // 2, config.max_length]'], {}), '([config.batch_size // 2, config.max_length])\n', (5541, 5586), True, 'import numpy as np\n'), ((6131, 6142), 'numpy.array', 'np.array', (['s'], {}), '(s)\n', (6139, 6142), True, 'import numpy as np\n'), ((6288, 6313), 'torch.log', 'torch.log', (['(y_pred + 1e-08)'], {}), '(y_pred + 1e-08)\n', (6297, 6313), False, 'import torch\n'), ((6920, 6926), 'time.time', 'time', ([], {}), '()\n', (6924, 6926), False, 'from time import time, sleep\n'), ((6975, 6981), 'time.time', 'time', ([], {}), '()\n', (6979, 6981), False, 'from time import time, sleep\n'), ((2584, 2590), 'time.time', 'time', ([], {}), '()\n', (2588, 2590), False, 'from time import time, sleep\n')] |
Command-Master/MCCC | src/bin_expr.py | a49440bfd8542002aee35d41bee093dc8b51d781 | from c_int import Int
from casting import cast
from globals_consts import NAMESPACE
from temps import used_temps, get_temp, get_temp_func
def binary_expression(copy_strings, expression, target, variables_name, vtypes):
from expression import generate_expression
c1, t1, tt1 = generate_expression(None, expression.left, vtypes, variables_name, copy_strings, False)
c2, t2, tt2 = generate_expression(None, expression.right, vtypes, variables_name, copy_strings, False)
for ttt in tt1: used_temps.remove(ttt)
for ttt in tt2: used_temps.remove(ttt)
ot = cast(t1, t2)
rt = ot
if expression.op in ['<', '>', '<=', '>=', '==', '!=', '&&']:
rt = Int()
if target is None or target == []:
target = [get_temp() for _ in range(ot.size)]
used_temps.extend(target)
code = ''
if expression.op in ['&&', '||']:
if expression.op == '&&':
code += c1
code += t1.cast(ot, tt1, target)
f2 = get_temp_func()
f2h = open(f'{f2}.mcfunction', 'w')
f2h.write(c2)
f2h.write(t2.cast(ot, tt2, target))
f2h.close()
code += f'execute unless score {target[0]} {NAMESPACE} matches 0 run function {NAMESPACE}:{f2}\n'
elif expression.op == '||':
code += c1
code += t1.cast(ot, tt1, target)
f2 = get_temp_func()
f2h = open(f'{f2}.mcfunction', 'w')
f2h.write(c2)
f2h.write(t2.cast(ot, tt2, target))
f2h.close()
code += f'execute if score {target[0]} {NAMESPACE} matches 0 run function {NAMESPACE}:{f2}\n'
else:
if ot == t1:
code += c1
code += c2
code += t2.cast(ot, tt2, target)
code += ot.binary(expression.op, tt1, target, target)
else:
code += c1
code += t1.cast(ot, tt1, target)
code += c2
code += ot.binary(expression.op, target, tt2, target)
return code, rt, target | [((286, 377), 'expression.generate_expression', 'generate_expression', (['None', 'expression.left', 'vtypes', 'variables_name', 'copy_strings', '(False)'], {}), '(None, expression.left, vtypes, variables_name,\n copy_strings, False)\n', (305, 377), False, 'from expression import generate_expression\n'), ((392, 484), 'expression.generate_expression', 'generate_expression', (['None', 'expression.right', 'vtypes', 'variables_name', 'copy_strings', '(False)'], {}), '(None, expression.right, vtypes, variables_name,\n copy_strings, False)\n', (411, 484), False, 'from expression import generate_expression\n'), ((576, 588), 'casting.cast', 'cast', (['t1', 't2'], {}), '(t1, t2)\n', (580, 588), False, 'from casting import cast\n'), ((501, 523), 'temps.used_temps.remove', 'used_temps.remove', (['ttt'], {}), '(ttt)\n', (518, 523), False, 'from temps import used_temps, get_temp, get_temp_func\n'), ((544, 566), 'temps.used_temps.remove', 'used_temps.remove', (['ttt'], {}), '(ttt)\n', (561, 566), False, 'from temps import used_temps, get_temp, get_temp_func\n'), ((680, 685), 'c_int.Int', 'Int', ([], {}), '()\n', (683, 685), False, 'from c_int import Int\n'), ((787, 812), 'temps.used_temps.extend', 'used_temps.extend', (['target'], {}), '(target)\n', (804, 812), False, 'from temps import used_temps, get_temp, get_temp_func\n'), ((743, 753), 'temps.get_temp', 'get_temp', ([], {}), '()\n', (751, 753), False, 'from temps import used_temps, get_temp, get_temp_func\n'), ((984, 999), 'temps.get_temp_func', 'get_temp_func', ([], {}), '()\n', (997, 999), False, 'from temps import used_temps, get_temp, get_temp_func\n'), ((1377, 1392), 'temps.get_temp_func', 'get_temp_func', ([], {}), '()\n', (1390, 1392), False, 'from temps import used_temps, get_temp, get_temp_func\n')] |
bobmittmann/yard-ice | tools/mkcodelet.py | 3b27f94279d806d3a222de60adccf934994ed168 | #!/usr/bin/python
from struct import *
from getopt import *
import sys
import os
import re
def usage():
global progname
print >> sys.stderr, ""
print >> sys.stderr, " Usage:", progname, "[options] fname"
print >> sys.stderr, ""
print >> sys.stderr, "Options"
print >> sys.stderr, " -h, --help show this help message and exit"
print >> sys.stderr, " -o FILENAME, --addr=FILENAME"
print >> sys.stderr, ""
def error(msg):
print >> sys.stderr, ""
print >> sys.stderr, "#error:", msg
usage()
sys.exit(2)
def mk_codelet(in_fname, out_fname, hdr_fname):
try:
in_file = open(in_fname, mode='r')
except:
print >> sys.stderr, "#error: can't open file: '%s'" % in_fname
sys.exit(1)
try:
c_file = open(out_fname, mode='w')
except:
print >> sys.stderr, "#error: can't create file: %s" % out_fname
sys.exit(1)
try:
h_file = open(hdr_fname, mode='w')
except:
print >> sys.stderr, "#error: can't create file: %s" % hdr_fname
sys.exit(1)
i = 0
for line in in_file:
if re.match("SYMBOL TABLE:", line):
break
s_pat = re.compile("([0-9a-f]{8}) ..*[0-9a-f]{8} ([.A-Za-z_][A-Za-z_0-9]*)")
sym = {}
for line in in_file:
m = s_pat.findall(line)
if m:
addr = int(m[0][0], 16)
name = m[0][1]
sym[addr] = name
else:
break
for line in in_file:
if re.match("Contents of section .text:", line):
break
token_pat = re.compile("([0-9a-f]{2})([0-9a-f]{2})([0-9a-f]{2})([0-9a-f]{2})")
c_file.write("#include <stdint.h>\n\n")
h_file.write("#include <stdint.h>\n\n")
addr = 0
i = 0
for line in in_file:
for a, b, c, d in token_pat.findall(line):
try:
sym[addr]
if (i > 0):
c_file.write("\n};\n\n")
c_file.write("const uint32_t %s[] = {" % sym[addr])
h_file.write("extern const uint32_t %s[];\n\n" % sym[addr])
i = 0
except KeyError:
pass
if ((i % 4) == 0):
if (i > 0):
c_file.write(",")
c_file.write("\n\t0x" + d + c + b + a)
else:
c_file.write(", 0x" + d + c + b + a )
i = i + 1;
addr = addr + 4
c_file.write("\n};\n")
in_file.close()
c_file.close()
h_file.close()
return
def main():
global progname
progname = sys.argv[0]
try:
opts, args = getopt(sys.argv[1:], "ho:", \
["help", "output="])
except GetoptError, err:
error(str(err))
for o, a in opts:
if o in ("-h", "--help"):
usage()
sys.exit()
elif o in ("-o", "--output"):
out_fname = a
else:
assert False, "unhandled option"
if len(args) == 0:
error("missing fname")
if len(args) > 1:
error("too many arguments")
in_fname = args[0]
try:
out_fname
except NameError:
dirname, fname = os.path.split(in_fname)
basename, extension = os.path.splitext(fname)
out_fname = basename + '.' + 'c'
dirname, fname = os.path.split(out_fname)
basename, extension = os.path.splitext(fname)
hdr_fname = basename + '.' + 'h'
mk_codelet(in_fname, out_fname, hdr_fname)
if __name__ == "__main__":
main()
| [] |
MichaelSeeburger/Robot-Framework-Mainframe-3270-Library | utest/x3270/test_screenshot.py | 76b589d58c55a39f96c027a8ae28c41fa37ed445 | import os
from pytest_mock import MockerFixture
from robot.api import logger
from Mainframe3270.x3270 import x3270
def test_set_screenshot_folder(under_test: x3270):
path = os.getcwd()
under_test.set_screenshot_folder(path)
assert under_test.imgfolder == os.getcwd()
def test_set_screenshot_folder_nonexistent(mocker: MockerFixture, under_test: x3270):
mocker.patch("robot.api.logger.error")
mocker.patch("robot.api.logger.warn")
path = os.path.join(os.getcwd(), "nonexistent")
under_test.set_screenshot_folder(path)
logger.error.assert_called_with('Given screenshots path "%s" does not exist' % path)
logger.warn.assert_called_with(
'Screenshots will be saved in "%s"' % under_test.imgfolder
)
def test_take_screenshot(mocker: MockerFixture, under_test: x3270):
mocker.patch("Mainframe3270.py3270.Emulator.save_screen")
mocker.patch("robot.api.logger.write")
mocker.patch("time.time", return_value=1.0)
under_test.take_screenshot(500, 500)
logger.write.assert_called_with(
'<iframe src="./screenshot_1000.html" height="500" width="500"></iframe>',
level="INFO",
html=True,
)
| [((181, 192), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (190, 192), False, 'import os\n'), ((559, 648), 'robot.api.logger.error.assert_called_with', 'logger.error.assert_called_with', (['(\'Given screenshots path "%s" does not exist\' % path)'], {}), '(\n \'Given screenshots path "%s" does not exist\' % path)\n', (590, 648), False, 'from robot.api import logger\n'), ((648, 742), 'robot.api.logger.warn.assert_called_with', 'logger.warn.assert_called_with', (['(\'Screenshots will be saved in "%s"\' % under_test.imgfolder)'], {}), '(\'Screenshots will be saved in "%s"\' %\n under_test.imgfolder)\n', (678, 742), False, 'from robot.api import logger\n'), ((1023, 1163), 'robot.api.logger.write.assert_called_with', 'logger.write.assert_called_with', (['"""<iframe src="./screenshot_1000.html" height="500" width="500"></iframe>"""'], {'level': '"""INFO"""', 'html': '(True)'}), '(\n \'<iframe src="./screenshot_1000.html" height="500" width="500"></iframe>\',\n level=\'INFO\', html=True)\n', (1054, 1163), False, 'from robot.api import logger\n'), ((273, 284), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (282, 284), False, 'import os\n'), ((482, 493), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (491, 493), False, 'import os\n')] |
brackham/splat | splat/photometry.py | 5ee0da82f19017e900ee83af94609dbe9f8a0ea4 | # -*- coding: utf-8 -*-
from __future__ import print_function, division
"""
.. note::
These are the spectrophotometry functions for SPLAT
"""
# imports - internal
import copy
import os
# imports - external
import numpy
from astropy import units as u # standard units
from astropy import constants as const # physical constants in SI units
import matplotlib.patches as patches
import matplotlib.pyplot as plt
from scipy.integrate import trapz # for numerical integration
from scipy.interpolate import interp1d
# splat functions and constants
from .initialize import *
from .utilities import *
#####################################################
############### SPECTROPHOTOMETRY ###############
#####################################################
# this function has been obseleted
def checkFilter(filt,verbose=True):
output = False
f = copy.deepcopy(filt)
f = f.replace(' ','_').upper()
for k in list(FILTERS.keys()):
if f==k.upper() or f.lower() in FILTERS[k]['altnames']:
output = k
if output == False and verbose == True:
print('\nFilter '+filt+' not currently available for SPLAT; contact '+EMAIL+'\n')
filterInfo()
return output
def filterProfile(filt,**kwargs):
'''
:Purpose: Retrieve the filter profile for a SPLAT filter. Returns two arrays: the filter wavelength and filter transmission curve.
:param filter: String giving the name of one of the predefined filters listed in splat.FILTERS.keys() (required)
:param filterFolder: folder containing the filter transmission files (optional, default = splat.FILTER_FOLDER)
:Example:
>>> import splat
>>> import splat.photometry as spphot
>>> sp = splat.getSpectrum(shortname='1507-1627')[0]
>>> sp.fluxCalibrate('2MASS J',14.5)
>>> spphot.filterMag(sp,'MKO J')
(14.345894376898123, 0.027596454828421831)
'''
# keyword parameters
filterFolder = kwargs.get('filterFolder',SPLAT_PATH+FILTER_FOLDER)
if not os.path.exists(filterFolder):
filterFolder = SPLAT_URL+FILTER_FOLDER
# check that requested filter is in list
f0 = checkFilterName(filt, verbose=True)
if f0 == False: raise ValueError
filt = f0
# read in filter
fwave,ftrans = numpy.genfromtxt(os.path.normpath(filterFolder+FILTERS[filt]['file']), comments='#', unpack=True, missing_values = ('NaN','nan'), filling_values = (numpy.nan))
# print(type(fwave),type(ftrans),isinstance(fwave,numpy.ndarray),isinstance(ftrans,numpy.ndarray),not isinstance(fwave,numpy.ndarray) or not isinstance(ftrans,numpy.ndarray))
if not isinstance(fwave,numpy.ndarray) or not isinstance(ftrans,numpy.ndarray):
raise ValueError('\nProblem reading in {}'.format(filterFolder+FILTERS[filt]['file']))
fwave = fwave[~numpy.isnan(ftrans)]*u.micron
ftrans = ftrans[~numpy.isnan(ftrans)]
return fwave,ftrans
def filterMag(sp,filt,*args,**kwargs):
'''
:Purpose:
Determine the photometric magnitude of a source based on its
spectrum. Spectral fluxes are convolved with the filter profile specified by
the ``filter`` input. By default this filter is also
convolved with a model of Vega to extract Vega magnitudes,
but the user can also specify AB magnitudes, photon flux or energy flux.
:Required Parameters:
**sp**: Spectrum class object, which should contain wave, flux and noise array elements.
**filter**: String giving name of filter, which can either be one of the predefined filters listed in splat.FILTERS.keys() or a custom filter name
:Optional Parameters:
**custom** = None: A 2 x N vector array specifying the wavelengths and transmissions for a custom filter
**notch** = None: A 2 element array that specifies the lower and upper wavelengths for a notch filter (100% transmission within, 0% transmission without)
**vega** = True: compute Vega magnitudes (may be set by filter)
**ab** = False: compute AB magnitudes (may be set by filter)
**energy** = False: compute energy flux
**photon** = False: compute photon flux
**filterFolder** = splat.FILTER_FOLDER: folder containing the filter transmission files
**vegaFile** = 'vega_kurucz.txt': name of file containing Vega flux file, must be within ``filterFolder``
**nsamples** = 100: number of samples to use in Monte Carlo error estimation
**info** = False: List the predefined filter names available
**verbose** = True: List the predefined filter names available
:Example:
>>> import splat
>>> import splat.photometry as spphot
>>> sp = splat.getSpectrum(shortname='1507-1627')[0]
>>> sp.fluxCalibrate('2MASS J',14.5)
>>> spphot.filterMag(sp,'MKO J')
(14.345894376898123, 0.027596454828421831)
'''
# keyword parameters
filterFolder = kwargs.get('filterFolder',SPLAT_PATH+FILTER_FOLDER)
if not os.path.exists(filterFolder):
filterFolder = SPLAT_URL+FILTER_FOLDER
vegaFile = kwargs.get('vegaFile',VEGAFILE)
info = kwargs.get('info',False)
custom = kwargs.get('custom',False)
notch = kwargs.get('notch',False)
vega = kwargs.get('vega',True)
ab = kwargs.get('ab',not vega)
rsr = kwargs.get('rsr',False)
nsamples = kwargs.get('nsamples',100)
verbose = kwargs.get('verbose',False)
# check that requested filter is in list
if isinstance(custom,bool) and isinstance(notch,bool):
f0 = checkFilterName(filt,verbose=True)
if f0 == False:
return numpy.nan, numpy.nan
filt = f0
# reset filter calculation methods based on filter design
if 'ab' in FILTERS[filt]['method']:
ab = kwargs.get('ab',True)
vega = not ab
if 'vega' in FILTERS[filt]['method']:
vega = kwargs.get('vega',True)
ab = not vega
rsr = FILTERS[filt]['rsr']
# other possibilities
photons = kwargs.get('photons',False)
photons = kwargs.get('photon',photons)
energy = kwargs.get('energy',False)
energy = kwargs.get('flux',energy)
if (photons or energy):
vega = False
ab = False
if photons: energy = False
if energy: photons = False
# Read in filter
if isinstance(custom,bool) and isinstance(notch,bool):
fwave,ftrans = filterProfile(filt,**kwargs)
# notch filter
elif isinstance(custom,bool) and isinstance(notch,list):
dn = (notch[1]-notch[0])/1000
fwave = numpy.arange(notch[0]-5.*dn,notch[1]+5.*dn,dn)
ftrans = numpy.zeros(len(fwave))
ftrans[numpy.where(numpy.logical_and(fwave >= notch[0],fwave <= notch[1]))] = 1.
# custom filter
else:
fwave,ftrans = custom[0],custom[1]
# units
if isinstance(fwave,u.quantity.Quantity) == True:
fwave = fwave.to(u.micron)
else:
fwave = fwave*u.micron
# check that spectrum and filter cover the same wavelength ranges
if numpy.nanmax(fwave) < numpy.nanmin(sp.wave) or numpy.nanmin(fwave) > numpy.nanmax(sp.wave):
if verbose==True: print('\nWarning: no overlap between spectrum for {} and filter {}'.format(sp.name,filt))
return numpy.nan, numpy.nan
if numpy.nanmin(fwave) < numpy.nanmin(sp.wave) or numpy.nanmax(fwave) > numpy.nanmax(sp.wave):
if verbose==True: print('\nWarning: spectrum for {} does not span full filter profile for {}'.format(sp.name,filt))
# interpolate spectrum onto filter wavelength function
wgood = numpy.where(~numpy.isnan(sp.noise))
if len(sp.wave[wgood]) > 0:
d = interp1d(sp.wave[wgood].value,sp.flux[wgood].value,bounds_error=False,fill_value=0.)
n = interp1d(sp.wave[wgood].value,sp.noise[wgood].value,bounds_error=False,fill_value=0)
# catch for models
else:
if verbose==True: print('\nWarning: data values in range of filter {} have no uncertainties'.format(filt))
d = interp1d(sp.wave.value,sp.flux.value,bounds_error=False,fill_value=0.)
n = interp1d(sp.wave.value,sp.flux.value*1.e-9,bounds_error=False,fill_value=0.)
result = []
if (vega):
# Read in Vega spectrum
vwave,vflux = numpy.genfromtxt(os.path.normpath(filterFolder+vegaFile), comments='#', unpack=True, \
missing_values = ('NaN','nan'), filling_values = (numpy.nan))
vwave = vwave[~numpy.isnan(vflux)]*u.micron
vflux = vflux[~numpy.isnan(vflux)]*(u.erg/(u.cm**2 * u.s * u.micron))
vflux.to(sp.flux_unit,equivalencies=u.spectral_density(vwave))
# interpolate Vega onto filter wavelength function
v = interp1d(vwave.value,vflux.value,bounds_error=False,fill_value=0.)
if rsr:
val = -2.5*numpy.log10(trapz(ftrans*fwave.value*d(fwave.value),fwave.value)/trapz(ftrans*fwave.value*v(fwave.value),fwave.value))
else:
val = -2.5*numpy.log10(trapz(ftrans*d(fwave.value),fwave.value)/trapz(ftrans*v(fwave.value),fwave.value))
for i in numpy.arange(nsamples):
# result.append(-2.5*numpy.log10(trapz(ftrans*numpy.random.normal(d(fwave),n(fwave))*sp.flux_unit,fwave)/trapz(ftrans*v(fwave)*sp.flux_unit,fwave)))
if rsr:
result.append(-2.5*numpy.log10(trapz(ftrans*fwave.value*(d(fwave.value)+numpy.random.normal(0,1.)*n(fwave.value)),fwave.value)/trapz(ftrans*fwave.value*v(fwave.value),fwave.value)))
else:
result.append(-2.5*numpy.log10(trapz(ftrans*(d(fwave.value)+numpy.random.normal(0,1.)*n(fwave.value)),fwave.value)/trapz(ftrans*v(fwave.value),fwave.value)))
outunit = 1.
elif (ab):
nu = sp.wave.to('Hz',equivalencies=u.spectral())
fnu = sp.flux.to('Jy',equivalencies=u.spectral_density(sp.wave))
noisenu = sp.noise.to('Jy',equivalencies=u.spectral_density(sp.wave))
filtnu = fwave.to('Hz',equivalencies=u.spectral())
fconst = 3631*u.jansky
d = interp1d(nu.value,fnu.value,bounds_error=False,fill_value=0.)
n = interp1d(nu.value,noisenu.value,bounds_error=False,fill_value=0.)
b = trapz((ftrans/filtnu.value)*fconst.value,filtnu.value)
val = -2.5*numpy.log10(trapz(ftrans*d(filtnu.value)/filtnu.value,filtnu.value)/b)
for i in numpy.arange(nsamples):
a = trapz(ftrans*(d(filtnu.value)+numpy.random.normal(0,1)*n(filtnu.value))/filtnu.value,filtnu.value)
result.append(-2.5*numpy.log10(a/b))
outunit = 1.
elif (energy):
outunit = u.erg/u.s/u.cm**2
if rsr:
a = trapz(ftrans*fwave.value*d(fwave.value),fwave.value)*sp.wave.unit*sp.flux.unit
b = trapz(ftrans*fwave.value,fwave.value)*sp.wave.unit
c = trapz(ftrans*fwave.value*fwave.value,fwave.value)*sp.wave.unit*sp.wave.unit
val = (a/b * c/b).to(outunit).value
else:
a = trapz(ftrans*d(fwave.value),fwave.value)*sp.wave.unit*sp.flux.unit
b = trapz(ftrans,fwave.value)*sp.wave.unit
c = trapz(ftrans*fwave.value,fwave.value)*sp.wave.unit*sp.wave.unit
val = (a/b * c/b).to(outunit).value
for i in numpy.arange(nsamples):
if rsr:
result.append((trapz(ftrans*fwave.value*(d(fwave.value)+numpy.random.normal(0,1.)*n(fwave.value)),fwave.value)*sp.wave.unit*sp.flux.unit).to(outunit).value)
else:
result.append((trapz(ftrans*(d(fwave.value)+numpy.random.normal(0,1.)*n(fwave.value)),fwave.value)*sp.wave.unit*sp.flux.unit).to(outunit).value)
elif (photons):
outunit = 1./u.s/u.cm**2
convert = const.h.to('erg s')*const.c.to('micron/s')
val = (trapz(ftrans*fwave.value*convert.value*d(fwave.value),fwave.value)*sp.wave.unit*sp.flux.unit*convert.unit).to(outunit).value
for i in numpy.arange(nsamples):
result.append((trapz(ftrans*fwave.value*convert.value*(d(fwave.value)+numpy.random.normal(0,1.)*n(fwave.value)),fwave.value)*sp.wave.unit*sp.flux.unit*convert.unit).to(outunit).value)
else:
raise NameError('\nfilterMag not given a correct physical quantity (vega, ab, energy, photons) to compute photometry\n\n')
# val = numpy.nanmean(result)*outunit
err = numpy.nanstd(result)
if len(sp.wave[wgood]) == 0:
err = 0.
return val*outunit,err*outunit
def vegaToAB(filt,vegafile=VEGAFILE,filterfolder=SPLAT_PATH+FILTER_FOLDER,custom=False,notch=False,rsr=False,**kwargs):
# check that requested filter is in list
if isinstance(custom,bool) and isinstance(notch,bool):
f0 = checkFilterName(filt,verbose=True)
if f0 == False:
return numpy.nan, numpy.nan
filt = f0
rsr = FILTERS[filt]['rsr']
# Read in filter
if isinstance(custom,bool) and isinstance(notch,bool):
fwave,ftrans = filterProfile(filt,**kwargs)
# notch filter
elif isinstance(custom,bool) and isinstance(notch,list):
dn = (notch[1]-notch[0])/1000
fwave = numpy.arange(notch[0]-5.*dn,notch[1]+5.*dn,dn)
ftrans = numpy.zeros(len(fwave))
ftrans[numpy.where(numpy.logical_and(fwave >= notch[0],fwave <= notch[1]))] = 1.
# custom filter
else:
fwave,ftrans = custom[0],custom[1]
# Read in Vega spectrum
vwave,vflux = numpy.genfromtxt(os.path.normpath(filterfolder+vegafile), comments='#', unpack=True, \
missing_values = ('NaN','nan'), filling_values = (numpy.nan))
vwave = vwave[~numpy.isnan(vflux)]*u.micron
vflux = vflux[~numpy.isnan(vflux)]*(u.erg/(u.cm**2 * u.s * u.micron))
# trim spectrum
vflux = vflux[vwave>=numpy.nanmin(fwave)]
vwave = vwave[vwave>=numpy.nanmin(fwave)]
vflux = vflux[vwave<=numpy.nanmax(fwave)]
vwave = vwave[vwave<=numpy.nanmax(fwave)]
# convert to fnu
nu = vwave.to('Hz',equivalencies=u.spectral())
fnu = vflux.to('Jy',equivalencies=u.spectral_density(vwave))
filtnu = fwave.to('Hz',equivalencies=u.spectral())
fconst = 3631*u.jansky
d = interp1d(nu.value,fnu.value,bounds_error=False,fill_value=0.)
b = trapz((ftrans/filtnu.value)*fconst.value,filtnu.value)
return -2.5*numpy.log10(trapz(ftrans*d(filtnu.value)/filtnu.value,filtnu.value)/b)
def filterInfo(*args,**kwargs):
'''
:Purpose: Prints out the current list of filters in the SPLAT reference library.
'''
verbose = kwargs.get('verbose',True)
if len(args) > 0:
fname = list(args)
elif kwargs.get('filter',False) != False:
fname = kwargs['filter']
else:
fname = sorted(list(FILTERS.keys()))
if isinstance(fname,list) == False:
fname = [fname]
output = {}
for k in fname:
f = checkFilterName(k)
if f != False:
output[f] = {}
output[f]['description'] = FILTERS[f]['description']
output[f]['zeropoint'] = FILTERS[f]['zeropoint']
fwave,ftrans = filterProfile(f,**kwargs)
try:
fwave = fwave.to(u.micron)
except:
fwave = fwave*u.micron
fw = fwave[numpy.where(ftrans > 0.01*numpy.nanmax(ftrans))]
ft = ftrans[numpy.where(ftrans > 0.01*numpy.nanmax(ftrans))]
fw05 = fwave[numpy.where(ftrans > 0.5*numpy.nanmax(ftrans))]
output[f]['lambda_mean'] = trapz(ft*fw,fw)/trapz(ft,fw)
output[f]['lambda_pivot'] = numpy.sqrt(trapz(fw*ft,fw)/trapz(ft/fw,fw))
output[f]['lambda_central'] = 0.5*(numpy.max(fw)+numpy.min(fw))
output[f]['lambda_fwhm'] = numpy.max(fw05)-numpy.min(fw05)
output[f]['lambda_min'] = numpy.min(fw)
output[f]['lambda_max'] = numpy.max(fw)
if verbose ==True:
print(f.replace('_',' ')+': '+output[f]['zeropoint'])
print('Zeropoint = {} Jy'.format(output[f]['zeropoint']))
print('Central wavelength: = {:.3f}'.format(output[f]['lambda_central']))
print('Mean wavelength: = {:.3f}'.format(output[f]['lambda_mean']))
print('Pivot point: = {:.3f}'.format(output[f]['lambda_pivot']))
print('FWHM = {:.3f}'.format(output[f]['lambda_fwhm']))
print('Wavelength range = {:.3f} to {:.3f}\n'.format(output[f]['lambda_min'],output[f]['lambda_max']))
else:
if verbose ==True: print(' Filter {} not in SPLAT filter list'.format(k))
kys = list(output.keys())
if len(kys) == 1: return output[kys[0]]
else: return output
def filterProperties(filt,**kwargs):
'''
:Purpose: Returns a dictionary containing key parameters for a particular filter.
:param filter: name of filter, must be one of the specifed filters given by splat.FILTERS.keys()
:type filter: required
:param verbose: print out information about filter to screen
:type verbose: optional, default = True
:Example:
>>> import splat
>>> data = splat.filterProperties('2MASS J')
Filter 2MASS J: 2MASS J-band
Zeropoint = 1594.0 Jy
Pivot point: = 1.252 micron
FWHM = 0.323 micron
Wavelength range = 1.066 to 1.442 micron
>>> data = splat.filterProperties('2MASS X')
Filter 2MASS X not among the available filters:
2MASS H: 2MASS H-band
2MASS J: 2MASS J-band
2MASS KS: 2MASS Ks-band
BESSEL I: Bessel I-band
FOURSTAR H: FOURSTAR H-band
FOURSTAR H LONG: FOURSTAR H long
FOURSTAR H SHORT: FOURSTAR H short
...
'''
filterFolder = kwargs.get('filterFolder',SPLAT_PATH+FILTER_FOLDER)
if not os.path.exists(filterFolder):
filterFolder = SPLAT_URL+FILTER_FOLDER
# check that requested filter is in list
filt = checkFilterName(filt)
if filt == False: return None
report = {}
report['name'] = filt
report['description'] = FILTERS[filt]['description']
report['zeropoint'] = FILTERS[filt]['zeropoint']
report['method'] = FILTERS[filt]['method']
report['rsr'] = FILTERS[filt]['rsr']
fwave,ftrans = filterProfile(filt,**kwargs)
try:
fwave = fwave.to(u.micron)
except:
fwave = fwave*u.micron
fw = fwave[numpy.where(ftrans > 0.01*numpy.nanmax(ftrans))]
ft = ftrans[numpy.where(ftrans > 0.01*numpy.nanmax(ftrans))]
fw05 = fwave[numpy.where(ftrans > 0.5*numpy.nanmax(ftrans))]
# print(trapz(ft,fw))
# print(trapz(fw*ft,fw))
report['lambda_mean'] = trapz(ft*fw,fw)/trapz(ft,fw)
report['lambda_pivot'] = numpy.sqrt(trapz(fw*ft,fw)/trapz(ft/fw,fw))
report['lambda_central'] = 0.5*(numpy.max(fw)+numpy.min(fw))
report['lambda_fwhm'] = numpy.max(fw05)-numpy.min(fw05)
report['lambda_min'] = numpy.min(fw)
report['lambda_max'] = numpy.max(fw)
report['wave'] = fwave
report['transmission'] = ftrans
# report values out
if kwargs.get('verbose',False):
print('\nFilter '+filt+': '+report['description'])
print('Zeropoint = {} Jy'.format(report['zeropoint']))
print('Pivot point: = {:.3f}'.format(report['lambda_pivot']))
print('FWHM = {:.3f}'.format(report['lambda_fwhm']))
print('Wavelength range = {:.3f} to {:.3f}\n'.format(report['lambda_min'],report['lambda_max']))
return report
def magToFlux(mag,filt,**kwargs):
'''
:Purpose: Converts a magnitude into an energy, and vice versa.
:param mag: magnitude on whatever system is defined for the filter or provided (required)
:param filter: name of filter, must be one of the specifed filters given by splat.FILTERS.keys() (required)
:param reverse: convert energy into magnitude instead (optional, default = False)
:param ab: magnitude is on the AB system (optional, default = filter preference)
:param vega: magnitude is on the Vega system (optional, default = filter preference)
:param rsr: magnitude is on the Vega system (optional, default = filter preference)
:param units: units for energy as an astropy.units variable; if this conversion does not work, the conversion is ignored (optional, default = erg/cm2/s)
:param verbose: print out information about filter to screen (optional, default = False)
WARNING: THIS CODE IS ONLY PARTIALLY COMPLETE
'''
# keyword parameters
filterFolder = kwargs.get('filterFolder',SPLAT_PATH+FILTER_FOLDER)
if not os.path.exists(filterFolder):
filterFolder = SPLAT_URL+FILTER_FOLDER
vegaFile = kwargs.get('vegaFile','vega_kurucz.txt')
vega = kwargs.get('vega',True)
ab = kwargs.get('ab',not vega)
rsr = kwargs.get('rsr',False)
nsamples = kwargs.get('nsamples',100)
custom = kwargs.get('custom',False)
notch = kwargs.get('notch',False)
base_unit = u.erg/(u.cm**2 * u.s)
return_unit = kwargs.get('unit',base_unit)
e_mag = kwargs.get('uncertainty',0.)
e_mag = kwargs.get('unc',e_mag)
e_mag = kwargs.get('e_mag',e_mag)
if not isinstance(mag,u.quantity.Quantity): mag=mag*u.s/u.s
if not isinstance(e_mag,u.quantity.Quantity): e_mag=e_mag*mag.unit
# check that requested filter is in list
filt = checkFilterName(filt)
if filt == False: return numpy.nan, numpy.nan
# reset filter calculation methods based on filter design
if 'ab' in FILTERS[filt]['method']:
ab = kwargs.get('ab',True)
vega = not ab
if 'vega' in FILTERS[filt]['method']:
vega = kwargs.get('vega',True)
ab = not vega
if 'rsr' in FILTERS[filt]['method']:
rsr = kwargs.get('rsr',True)
# Read in filter
if isinstance(custom,bool) and isinstance(notch,bool):
fwave,ftrans = filterProfile(filt,**kwargs)
# notch filter
elif isinstance(custom,bool) and isinstance(notch,list):
dn = (notch[1]-notch[0])/1000
fwave = numpy.arange(notch[0]-5.*dn,notch[1]+5.*dn,dn)*u.micron
ftrans = numpy.zeros(len(fwave))
ftrans[numpy.where(numpy.logical_and(fwave >= notch[0],fwave <= notch[1]))] = 1.
# custom filter
else:
fwave,ftrans = custom[0],custom[1]
if isinstance(fwave,u.quantity.Quantity) == False: fwave=fwave*u.micron
if isinstance(ftrans,u.quantity.Quantity) == True: ftrans=ftrans.value
fwave = fwave[~numpy.isnan(ftrans)]
ftrans = ftrans[~numpy.isnan(ftrans)]
result = []
err = 0.
# magnitude -> energy
if kwargs.get('reverse',False) == False:
if vega == True:
# Read in Vega spectrum
vwave,vflux = numpy.genfromtxt(os.path.normpath(filterFolder+vegaFile), comments='#', unpack=True, \
missing_values = ('NaN','nan'), filling_values = (numpy.nan))
vwave = vwave[~numpy.isnan(vflux)]*u.micron
vflux = vflux[~numpy.isnan(vflux)]*(u.erg/(u.cm**2 * u.s * u.micron))
# interpolate Vega onto filter wavelength function
v = interp1d(vwave.value,vflux.value,bounds_error=False,fill_value=0.)
if rsr: fact = trapz(ftrans*fwave.value*v(fwave.value),fwave.value)
else: fact = trapz(ftrans*v(fwave.value),fwave.value)
val = 10.**(-0.4*mag.value)*fact*u.erg/(u.cm**2 * u.s)
# calculate uncertainty
if e_mag.value > 0.:
for i in numpy.arange(nsamples): result.append(10.**(-0.4*(mag.value+numpy.random.normal(0,1.)*e_mag.value))*fact)
err = (numpy.nanstd(result))*u.erg/(u.cm**2 * u.s)
else: err = 0.*u.erg/(u.cm**2 * u.s)
elif ab == True:
fconst = 3631*u.jansky
ftrans = (ftrans*fconst).to(u.erg/(u.cm**2 * u.s * u.micron),equivalencies=u.spectral_density(fwave))
if rsr: fact = trapz(ftrans.value*fwave.value,fwave.value)
else: fact = trapz(ftrans.value,fwave.value)
val = (10.**(-0.4*mag.value)*fact)*u.erg/(u.cm**2 * u.s)
# calculate uncertainty
if e_mag.value > 0.:
for i in numpy.arange(nsamples): result.append(10.**(-0.4*(mag.value+numpy.random.normal(0,1.)*e_mag.value))*fact)
err = (numpy.nanstd(result))*u.erg/(u.cm**2 * u.s)
else: err = 0.*u.erg/(u.cm**2 * u.s)
else:
raise ValueError('\nmagToFlux needs vega or ab method specified')
# convert to desired energy units
# try:
val.to(return_unit)
err.to(return_unit)
# except:
# print('\nWarning: unit {} is not an energy flux unit'.format(return_unit))
try:
val.to(base_unit)
err.to(base_unit)
except:
print('\nWarning: cannot convert result to an energy flux unit'.format(base_unit))
return numpy.nan, numpy.nan
return val, err
# energy -> magnitude
# THIS NEEDS TO BE COMPLETED
else:
print('passed')
pass
# check that input is an energy flux
# try:
# mag.to(base_unit)
# e_mag.to(base_unit)
# except:
# raise ValueError('\nInput quantity unit {} is not a flux unit'.format(mag.unit))
def visualizeFilter(filters,verbose=True,xra=[],yra=[0,1.2],**kwargs):
'''
:Purpose: Plots a filter profile or set of filter profiles, optionally on top of a spectrum
WARNING: THIS CODE IS CURRENTLY UNDER DEVELOPMENT, BUGS MAY BE COMMON
'''
filt = copy.deepcopy(filters)
wave_unit = kwargs.get('wave_unit',DEFAULT_WAVE_UNIT)
# single filter name
if isinstance(filt,str):
filt = [filt]
if isinstance(filt,list):
# list of filter names
if isinstance(filt[0],str):
for f in filt:
fc = checkFilterName(f)
filt.remove(f)
if fc == False:
if verbose==True: print('Removed filter {}: not included in SPLAT'.format(f))
else:
filt.insert(len(filt),fc)
if len(filt) == 0:
raise ValueError('Did not recognize any of the input filters {}'.format(filters))
# prep parameters
fwave,ftrans = filterProfile(f,**kwargs)
if isUnit(fwave): wave_unit = kwargs.get('wave_unit',fwave.unit)
xl = kwargs.get('xlabel','Wavelength ({})'.format(wave_unit))
yl = kwargs.get('ylabel','Transmission Curve')
legend = []
fig = plt.figure(figsize=kwargs.get('figsize',[5,4]))
for i,f in enumerate(filt):
fwave,ftrans = filterProfile(f,**kwargs)
if isUnit(fwave): fwave.to(wave_unit)
else: fwave = fwave*wave_unit
if kwargs.get('normalize',False): ftrans = ftrans/numpy.nanmax(ftrans)
plt.plot(fwave,ftrans)
if len(xra) == 0: xra = [numpy.nanmin(fwave.value),numpy.nanmax(fwave.value)]
xra = [numpy.nanmin([xra[0],numpy.nanmin(fwave.value)]),numpy.nanmax([xra[1],numpy.nanmax(fwave.value)])]
yra = [yra[0],numpy.nanmax([yra[1],numpy.nanmax(ftrans)])]
legend.append(FILTERS[f]['description'])
if FILTERS[f]['rsr'] == True: yl = kwargs.get('ylabel','Transmission Curve')
# list of notch ranges
if isinstance(filt[0],int) or isinstance(filt[0],float):
filt = [filt]
# list of notch ranges
if isinstance(filt[0],list):
xl = kwargs.get('xlabel','Wavelength ({})'.format(wave_unit))
yl = kwargs.get('ylabel','Transmission Curve')
legend = []
fig = plt.figure(figsize=kwargs.get('figsize',[5,4]))
for i,f in enumerate(filt):
fwave,ftrans = numpy.linspace(f[0],f[1],1000)*wave_unit,numpy.ones(1000)
plt.plot(fwave,ftrans)
if len(xra) == 0: xra = [numpy.nanmin(fwave.value),numpy.nanmax(fwave.value)]
xra = [numpy.nanmin([xra[0],numpy.nanmin(fwave.value)]),numpy.nanmax([xra[1],numpy.nanmax(fwave.value)])]
yra = [yra[0],numpy.nanmax([yra[1],numpy.nanmax(ftrans)])]
legend.append('Filter {}'.format(i+1))
else:
raise ValueError('Could not parse input {}'.format(filt))
# add a comparison spectrum
sp = kwargs.get('spectrum',None)
sp = kwargs.get('comparison',sp)
if isinstance(sp,splat.core.Spectrum) == True:
print(xra)
sp.normalize(xra)
sp.scale(numpy.nanmax(ftrans)*kwargs.get('comparison_scale',0.8))
plt.plot(sp.wave,sp.flux,color=kwargs.get('comparison_color','k'),alpha=kwargs.get('comparison_alpha',0.5))
legend.append(sp.name)
yra = [yra[0],yra[1]*1.1]
# finish up
plt.xlim(xra)
plt.ylim(yra)
plt.xlabel(xl)
plt.ylabel(yl)
plt.legend(legend)
# save if desired
file = kwargs.get('file','')
file = kwargs.get('filename',file)
file = kwargs.get('output',file)
if file != '': plt.savefig(file)
return fig
#########################################
######## SED FITTING TOOLS #########
### WARNING: THESE ARE EXPERIMENTAL!! ###
#########################################
# plan:
def modelMagnitudes(verbose=True):
'''
this will be a code that calculates a set of magnitudes for a model set's SED models
saves to file that could be uploaded
pre-save some model magnitudes
'''
pass
def interpolateMagnitudes(verbose=True):
'''
produces an interpolated value for a grid set of model magnitudes
'''
pass
def compareMagnitudes(mags1,mags2,unc=None,unc2=None,ignore=[],verbose=True):
'''
this code compares a set of magnitudes using one of several statistics
'''
chi = 0.
dm,em = [],[]
for f in list(mags1.keys()):
if f in list(mags2.keys()) and f in list(unc.keys()) and f not in ignore:
dm.append(mags1[f]-mags2[f])
em.append(unc[f])
# find best scale factor
dm = numpy.array(dm)
em = numpy.array(em)
offset = numpy.sum(dm/em**2)/numpy.sum (1./em**2)
dmo = numpy.array([m-offset for m in dm])
return numpy.sum((dmo/em)**2), offset
def SEDFitGrid(verbose=True):
'''
this code will compare a set of magnitudes to a grid of model magnitudes and choose the
closest match based on various statistics
'''
pass
def SEDFitMCMC(verbose=True):
'''
this code will conduct a comparison of a set of magnitudes to model magnitudes using an
MCMC wrapper, and choose best/average/distribution of parameters
'''
pass
def SEDFitAmoeba(verbose=True):
'''
this code will conduct a comparison of a set of magnitudes to model magnitudes using an
Amoeba (Nelder-Mead) wrapper, and choose the closest match
'''
pass
def SEDVisualize(verbose=True):
'''
Visualizes magnitudes on SED scale (flux = lam x F_lam), with option of also comparing to SED spectrum
'''
pass
#####################################################
############### MAGNITUDE CLASS ###############
#####################################################
class Magnitude(object):
'''
:Description:
This is a class data structure for a magnitude value
'''
def __init__(self, magnitude, filt, uncertainty=0., magtype='apparent', verbose=False,**kwargs):
self.magnitude = magnitude
self.uncertainty = uncertainty
self.type = magtype
# check filter and rename if necessary
self.knownfilter = True
fflag = checkFilterName(filt,verbose=verbose)
if fflag == False:
if verbose== True: print('filter {} is not a standard filter; some functions may not work'.format(filt))
self.knownfilter = False
else: filt = fflag
self.filter = filt
# some things that are based on presets
if self.knownfilter == True:
self.wave,self.transmission = filterProfile(self.filter)
info = filterProperties(self.filter)
for k in info.keys(): setattr(self,k,info[k])
def __copy__(self):
'''
:Purpose: Make a copy of a Magnitude object
'''
s = type(self)(self.magnitude,self.filter,uncertainty=self.uncertainty)
s.__dict__.update(self.__dict__)
return s
# backup version
def copy(self):
'''
:Purpose: Make a copy of a Magnitude object
'''
s = type(self)(self.magnitude,self.filter,uncertainty=self.uncertainty)
s.__dict__.update(self.__dict__)
return s
def __repr__(self):
'''
:Purpose: A simple representation of the Spectrum object
'''
if self.uncertainty != 0. and numpy.isfinite(self.uncertainty):
return '{} magnitude of {}+/-{}'.format(self.filter,self.magnitude,self.uncertainty)
else:
return '{} magnitude of {}'.format(self.filter,self.magnitude)
def __add__(self,other,samp=1000):
'''
:Purpose:
A representation of addition for Magnitude classes that takes into account uncertainties
:Output:
A new Magnitude object equal to the sum of values
'''
# make a copy and fill in combined magnitude
out = copy.deepcopy(self)
out.magnitude = self.magnitude+other.magnitude
out.uncertainty = self.uncertainty+other.uncertainty
# combine noises
if self.uncertainty != 0 and other.uncertainty != 0:
m1 = numpy.random.normal(self.magnitude,self.uncertainty,samp)
m2 = numpy.random.normal(other.magnitude,other.uncertainty,samp)
val = m1+m2
out.uncertainty = numpy.nanstd(val)
# check filter agreement
if self.filter != other.filter:
out.filter = '{}+{}'.format(self.filter,other.filter)
return out
def __sub__(self,other,samp=1000):
'''
:Purpose:
A representation of subtraction for Magnitude classes that takes into account uncertainties
:Output:
A new Magnitude object equal to the diffence of values
'''
# make a copy and fill in combined magnitude
out = copy.deepcopy(self)
out.magnitude = self.magnitude-other.magnitude
out.uncertainty = self.uncertainty+other.uncertainty
# combine noises
if self.uncertainty != 0 and other.uncertainty != 0:
m1 = numpy.random.normal(self.magnitude,self.uncertainty,samp)
m2 = numpy.random.normal(other.magnitude,other.uncertainty,samp)
val = m1-m2
out.uncertainty = numpy.nanstd(val)
# check filter agreement
if self.filter != other.filter:
out.filter = '{}-{}'.format(self.filter,other.filter)
return out
def flux(self,type='fnu',samp=1000):
'''
:Purpose:
Report the equivalent flux density of a magnitude
:Output:
astropy quantity in flux density units (default = erg/cm2/s/micron)
'''
pass
def addFlux(self,other,samp=1000):
'''
:Purpose:
A representation of addition for magnitudes (addition of fluxes)
:Output:
A new magnitude object equal to the equivalent sum of fluxes
'''
# check filter agreement
if self.filter != other.filter:
raise ValueError('magnitudes filters {} and {} are not the same'.format(self.filter,other.filter))
# make a copy and fill in combined magnitude
out = copy.deepcopy(self)
out.magnitude = self.magnitude-2.5*numpy.log10(1.+10.**(-0.4*(other.magnitude-self.magnitude)))
out.uncertainty = self.uncertainty+other.uncertainty
# combine noises
if self.uncertainty != 0 and other.uncertainty != 0:
m1 = numpy.random.normal(self.magnitude,self.uncertainty,samp)
m2 = numpy.random.normal(other.magnitude,other.uncertainty,samp)
val = m1-2.5*numpy.log10(1.+10.**(-0.4*(m2-m1)))
out.uncertainty = numpy.nanstd(val)
return out
| [((895, 914), 'copy.deepcopy', 'copy.deepcopy', (['filt'], {}), '(filt)\n', (908, 914), False, 'import copy\n'), ((12197, 12217), 'numpy.nanstd', 'numpy.nanstd', (['result'], {}), '(result)\n', (12209, 12217), False, 'import numpy\n'), ((13945, 14010), 'scipy.interpolate.interp1d', 'interp1d', (['nu.value', 'fnu.value'], {'bounds_error': '(False)', 'fill_value': '(0.0)'}), '(nu.value, fnu.value, bounds_error=False, fill_value=0.0)\n', (13953, 14010), False, 'from scipy.interpolate import interp1d\n'), ((14015, 14072), 'scipy.integrate.trapz', 'trapz', (['(ftrans / filtnu.value * fconst.value)', 'filtnu.value'], {}), '(ftrans / filtnu.value * fconst.value, filtnu.value)\n', (14020, 14072), False, 'from scipy.integrate import trapz\n'), ((18601, 18614), 'numpy.min', 'numpy.min', (['fw'], {}), '(fw)\n', (18610, 18614), False, 'import numpy\n'), ((18642, 18655), 'numpy.max', 'numpy.max', (['fw'], {}), '(fw)\n', (18651, 18655), False, 'import numpy\n'), ((25140, 25162), 'copy.deepcopy', 'copy.deepcopy', (['filters'], {}), '(filters)\n', (25153, 25162), False, 'import copy\n'), ((28421, 28434), 'matplotlib.pyplot.xlim', 'plt.xlim', (['xra'], {}), '(xra)\n', (28429, 28434), True, 'import matplotlib.pyplot as plt\n'), ((28439, 28452), 'matplotlib.pyplot.ylim', 'plt.ylim', (['yra'], {}), '(yra)\n', (28447, 28452), True, 'import matplotlib.pyplot as plt\n'), ((28457, 28471), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['xl'], {}), '(xl)\n', (28467, 28471), True, 'import matplotlib.pyplot as plt\n'), ((28476, 28490), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['yl'], {}), '(yl)\n', (28486, 28490), True, 'import matplotlib.pyplot as plt\n'), ((28495, 28513), 'matplotlib.pyplot.legend', 'plt.legend', (['legend'], {}), '(legend)\n', (28505, 28513), True, 'import matplotlib.pyplot as plt\n'), ((29664, 29679), 'numpy.array', 'numpy.array', (['dm'], {}), '(dm)\n', (29675, 29679), False, 'import numpy\n'), ((29689, 29704), 'numpy.array', 'numpy.array', (['em'], {}), '(em)\n', (29700, 29704), False, 'import numpy\n'), ((29769, 29808), 'numpy.array', 'numpy.array', (['[(m - offset) for m in dm]'], {}), '([(m - offset) for m in dm])\n', (29780, 29808), False, 'import numpy\n'), ((2038, 2066), 'os.path.exists', 'os.path.exists', (['filterFolder'], {}), '(filterFolder)\n', (2052, 2066), False, 'import os\n'), ((2307, 2361), 'os.path.normpath', 'os.path.normpath', (["(filterFolder + FILTERS[filt]['file'])"], {}), "(filterFolder + FILTERS[filt]['file'])\n", (2323, 2361), False, 'import os\n'), ((4966, 4994), 'os.path.exists', 'os.path.exists', (['filterFolder'], {}), '(filterFolder)\n', (4980, 4994), False, 'import os\n'), ((7605, 7697), 'scipy.interpolate.interp1d', 'interp1d', (['sp.wave[wgood].value', 'sp.flux[wgood].value'], {'bounds_error': '(False)', 'fill_value': '(0.0)'}), '(sp.wave[wgood].value, sp.flux[wgood].value, bounds_error=False,\n fill_value=0.0)\n', (7613, 7697), False, 'from scipy.interpolate import interp1d\n'), ((7702, 7793), 'scipy.interpolate.interp1d', 'interp1d', (['sp.wave[wgood].value', 'sp.noise[wgood].value'], {'bounds_error': '(False)', 'fill_value': '(0)'}), '(sp.wave[wgood].value, sp.noise[wgood].value, bounds_error=False,\n fill_value=0)\n', (7710, 7793), False, 'from scipy.interpolate import interp1d\n'), ((7943, 8017), 'scipy.interpolate.interp1d', 'interp1d', (['sp.wave.value', 'sp.flux.value'], {'bounds_error': '(False)', 'fill_value': '(0.0)'}), '(sp.wave.value, sp.flux.value, bounds_error=False, fill_value=0.0)\n', (7951, 8017), False, 'from scipy.interpolate import interp1d\n'), ((8026, 8112), 'scipy.interpolate.interp1d', 'interp1d', (['sp.wave.value', '(sp.flux.value * 1e-09)'], {'bounds_error': '(False)', 'fill_value': '(0.0)'}), '(sp.wave.value, sp.flux.value * 1e-09, bounds_error=False,\n fill_value=0.0)\n', (8034, 8112), False, 'from scipy.interpolate import interp1d\n'), ((8606, 8676), 'scipy.interpolate.interp1d', 'interp1d', (['vwave.value', 'vflux.value'], {'bounds_error': '(False)', 'fill_value': '(0.0)'}), '(vwave.value, vflux.value, bounds_error=False, fill_value=0.0)\n', (8614, 8676), False, 'from scipy.interpolate import interp1d\n'), ((8980, 9002), 'numpy.arange', 'numpy.arange', (['nsamples'], {}), '(nsamples)\n', (8992, 9002), False, 'import numpy\n'), ((13258, 13299), 'os.path.normpath', 'os.path.normpath', (['(filterfolder + vegafile)'], {}), '(filterfolder + vegafile)\n', (13274, 13299), False, 'import os\n'), ((17502, 17530), 'os.path.exists', 'os.path.exists', (['filterFolder'], {}), '(filterFolder)\n', (17516, 17530), False, 'import os\n'), ((18347, 18365), 'scipy.integrate.trapz', 'trapz', (['(ft * fw)', 'fw'], {}), '(ft * fw, fw)\n', (18352, 18365), False, 'from scipy.integrate import trapz\n'), ((18363, 18376), 'scipy.integrate.trapz', 'trapz', (['ft', 'fw'], {}), '(ft, fw)\n', (18368, 18376), False, 'from scipy.integrate import trapz\n'), ((18542, 18557), 'numpy.max', 'numpy.max', (['fw05'], {}), '(fw05)\n', (18551, 18557), False, 'import numpy\n'), ((18558, 18573), 'numpy.min', 'numpy.min', (['fw05'], {}), '(fw05)\n', (18567, 18573), False, 'import numpy\n'), ((20230, 20258), 'os.path.exists', 'os.path.exists', (['filterFolder'], {}), '(filterFolder)\n', (20244, 20258), False, 'import os\n'), ((28661, 28678), 'matplotlib.pyplot.savefig', 'plt.savefig', (['file'], {}), '(file)\n', (28672, 28678), True, 'import matplotlib.pyplot as plt\n'), ((29718, 29741), 'numpy.sum', 'numpy.sum', (['(dm / em ** 2)'], {}), '(dm / em ** 2)\n', (29727, 29741), False, 'import numpy\n'), ((29738, 29762), 'numpy.sum', 'numpy.sum', (['(1.0 / em ** 2)'], {}), '(1.0 / em ** 2)\n', (29747, 29762), False, 'import numpy\n'), ((29816, 29842), 'numpy.sum', 'numpy.sum', (['((dmo / em) ** 2)'], {}), '((dmo / em) ** 2)\n', (29825, 29842), False, 'import numpy\n'), ((32934, 32953), 'copy.deepcopy', 'copy.deepcopy', (['self'], {}), '(self)\n', (32947, 32953), False, 'import copy\n'), ((33860, 33879), 'copy.deepcopy', 'copy.deepcopy', (['self'], {}), '(self)\n', (33873, 33879), False, 'import copy\n'), ((35205, 35224), 'copy.deepcopy', 'copy.deepcopy', (['self'], {}), '(self)\n', (35218, 35224), False, 'import copy\n'), ((6530, 6588), 'numpy.arange', 'numpy.arange', (['(notch[0] - 5.0 * dn)', '(notch[1] + 5.0 * dn)', 'dn'], {}), '(notch[0] - 5.0 * dn, notch[1] + 5.0 * dn, dn)\n', (6542, 6588), False, 'import numpy\n'), ((6989, 7008), 'numpy.nanmax', 'numpy.nanmax', (['fwave'], {}), '(fwave)\n', (7001, 7008), False, 'import numpy\n'), ((7011, 7032), 'numpy.nanmin', 'numpy.nanmin', (['sp.wave'], {}), '(sp.wave)\n', (7023, 7032), False, 'import numpy\n'), ((7036, 7055), 'numpy.nanmin', 'numpy.nanmin', (['fwave'], {}), '(fwave)\n', (7048, 7055), False, 'import numpy\n'), ((7058, 7079), 'numpy.nanmax', 'numpy.nanmax', (['sp.wave'], {}), '(sp.wave)\n', (7070, 7079), False, 'import numpy\n'), ((7241, 7260), 'numpy.nanmin', 'numpy.nanmin', (['fwave'], {}), '(fwave)\n', (7253, 7260), False, 'import numpy\n'), ((7263, 7284), 'numpy.nanmin', 'numpy.nanmin', (['sp.wave'], {}), '(sp.wave)\n', (7275, 7284), False, 'import numpy\n'), ((7288, 7307), 'numpy.nanmax', 'numpy.nanmax', (['fwave'], {}), '(fwave)\n', (7300, 7307), False, 'import numpy\n'), ((7310, 7331), 'numpy.nanmax', 'numpy.nanmax', (['sp.wave'], {}), '(sp.wave)\n', (7322, 7331), False, 'import numpy\n'), ((7538, 7559), 'numpy.isnan', 'numpy.isnan', (['sp.noise'], {}), '(sp.noise)\n', (7549, 7559), False, 'import numpy\n'), ((8198, 8239), 'os.path.normpath', 'os.path.normpath', (['(filterFolder + vegaFile)'], {}), '(filterFolder + vegaFile)\n', (8214, 8239), False, 'import os\n'), ((9921, 9986), 'scipy.interpolate.interp1d', 'interp1d', (['nu.value', 'fnu.value'], {'bounds_error': '(False)', 'fill_value': '(0.0)'}), '(nu.value, fnu.value, bounds_error=False, fill_value=0.0)\n', (9929, 9986), False, 'from scipy.interpolate import interp1d\n'), ((9995, 10064), 'scipy.interpolate.interp1d', 'interp1d', (['nu.value', 'noisenu.value'], {'bounds_error': '(False)', 'fill_value': '(0.0)'}), '(nu.value, noisenu.value, bounds_error=False, fill_value=0.0)\n', (10003, 10064), False, 'from scipy.interpolate import interp1d\n'), ((10073, 10130), 'scipy.integrate.trapz', 'trapz', (['(ftrans / filtnu.value * fconst.value)', 'filtnu.value'], {}), '(ftrans / filtnu.value * fconst.value, filtnu.value)\n', (10078, 10130), False, 'from scipy.integrate import trapz\n'), ((10235, 10257), 'numpy.arange', 'numpy.arange', (['nsamples'], {}), '(nsamples)\n', (10247, 10257), False, 'import numpy\n'), ((12951, 13009), 'numpy.arange', 'numpy.arange', (['(notch[0] - 5.0 * dn)', '(notch[1] + 5.0 * dn)', 'dn'], {}), '(notch[0] - 5.0 * dn, notch[1] + 5.0 * dn, dn)\n', (12963, 13009), False, 'import numpy\n'), ((13776, 13788), 'astropy.units.spectral', 'u.spectral', ([], {}), '()\n', (13786, 13788), True, 'from astropy import units as u\n'), ((13828, 13853), 'astropy.units.spectral_density', 'u.spectral_density', (['vwave'], {}), '(vwave)\n', (13846, 13853), True, 'from astropy import units as u\n'), ((13896, 13908), 'astropy.units.spectral', 'u.spectral', ([], {}), '()\n', (13906, 13908), True, 'from astropy import units as u\n'), ((15558, 15571), 'numpy.min', 'numpy.min', (['fw'], {}), '(fw)\n', (15567, 15571), False, 'import numpy\n'), ((15610, 15623), 'numpy.max', 'numpy.max', (['fw'], {}), '(fw)\n', (15619, 15623), False, 'import numpy\n'), ((18416, 18434), 'scipy.integrate.trapz', 'trapz', (['(fw * ft)', 'fw'], {}), '(fw * ft, fw)\n', (18421, 18434), False, 'from scipy.integrate import trapz\n'), ((18432, 18450), 'scipy.integrate.trapz', 'trapz', (['(ft / fw)', 'fw'], {}), '(ft / fw, fw)\n', (18437, 18450), False, 'from scipy.integrate import trapz\n'), ((18485, 18498), 'numpy.max', 'numpy.max', (['fw'], {}), '(fw)\n', (18494, 18498), False, 'import numpy\n'), ((18499, 18512), 'numpy.min', 'numpy.min', (['fw'], {}), '(fw)\n', (18508, 18512), False, 'import numpy\n'), ((22694, 22764), 'scipy.interpolate.interp1d', 'interp1d', (['vwave.value', 'vflux.value'], {'bounds_error': '(False)', 'fill_value': '(0.0)'}), '(vwave.value, vflux.value, bounds_error=False, fill_value=0.0)\n', (22702, 22764), False, 'from scipy.interpolate import interp1d\n'), ((32387, 32419), 'numpy.isfinite', 'numpy.isfinite', (['self.uncertainty'], {}), '(self.uncertainty)\n', (32401, 32419), False, 'import numpy\n'), ((33166, 33225), 'numpy.random.normal', 'numpy.random.normal', (['self.magnitude', 'self.uncertainty', 'samp'], {}), '(self.magnitude, self.uncertainty, samp)\n', (33185, 33225), False, 'import numpy\n'), ((33241, 33302), 'numpy.random.normal', 'numpy.random.normal', (['other.magnitude', 'other.uncertainty', 'samp'], {}), '(other.magnitude, other.uncertainty, samp)\n', (33260, 33302), False, 'import numpy\n'), ((33355, 33372), 'numpy.nanstd', 'numpy.nanstd', (['val'], {}), '(val)\n', (33367, 33372), False, 'import numpy\n'), ((34092, 34151), 'numpy.random.normal', 'numpy.random.normal', (['self.magnitude', 'self.uncertainty', 'samp'], {}), '(self.magnitude, self.uncertainty, samp)\n', (34111, 34151), False, 'import numpy\n'), ((34167, 34228), 'numpy.random.normal', 'numpy.random.normal', (['other.magnitude', 'other.uncertainty', 'samp'], {}), '(other.magnitude, other.uncertainty, samp)\n', (34186, 34228), False, 'import numpy\n'), ((34281, 34298), 'numpy.nanstd', 'numpy.nanstd', (['val'], {}), '(val)\n', (34293, 34298), False, 'import numpy\n'), ((35486, 35545), 'numpy.random.normal', 'numpy.random.normal', (['self.magnitude', 'self.uncertainty', 'samp'], {}), '(self.magnitude, self.uncertainty, samp)\n', (35505, 35545), False, 'import numpy\n'), ((35561, 35622), 'numpy.random.normal', 'numpy.random.normal', (['other.magnitude', 'other.uncertainty', 'samp'], {}), '(other.magnitude, other.uncertainty, samp)\n', (35580, 35622), False, 'import numpy\n'), ((35712, 35729), 'numpy.nanstd', 'numpy.nanstd', (['val'], {}), '(val)\n', (35724, 35729), False, 'import numpy\n'), ((2877, 2896), 'numpy.isnan', 'numpy.isnan', (['ftrans'], {}), '(ftrans)\n', (2888, 2896), False, 'import numpy\n'), ((8516, 8541), 'astropy.units.spectral_density', 'u.spectral_density', (['vwave'], {}), '(vwave)\n', (8534, 8541), True, 'from astropy import units as u\n'), ((11115, 11137), 'numpy.arange', 'numpy.arange', (['nsamples'], {}), '(nsamples)\n', (11127, 11137), False, 'import numpy\n'), ((13562, 13581), 'numpy.nanmin', 'numpy.nanmin', (['fwave'], {}), '(fwave)\n', (13574, 13581), False, 'import numpy\n'), ((13608, 13627), 'numpy.nanmin', 'numpy.nanmin', (['fwave'], {}), '(fwave)\n', (13620, 13627), False, 'import numpy\n'), ((13654, 13673), 'numpy.nanmax', 'numpy.nanmax', (['fwave'], {}), '(fwave)\n', (13666, 13673), False, 'import numpy\n'), ((13700, 13719), 'numpy.nanmax', 'numpy.nanmax', (['fwave'], {}), '(fwave)\n', (13712, 13719), False, 'import numpy\n'), ((15260, 15278), 'scipy.integrate.trapz', 'trapz', (['(ft * fw)', 'fw'], {}), '(ft * fw, fw)\n', (15265, 15278), False, 'from scipy.integrate import trapz\n'), ((15276, 15289), 'scipy.integrate.trapz', 'trapz', (['ft', 'fw'], {}), '(ft, fw)\n', (15281, 15289), False, 'from scipy.integrate import trapz\n'), ((15488, 15503), 'numpy.max', 'numpy.max', (['fw05'], {}), '(fw05)\n', (15497, 15503), False, 'import numpy\n'), ((15504, 15519), 'numpy.min', 'numpy.min', (['fw05'], {}), '(fw05)\n', (15513, 15519), False, 'import numpy\n'), ((21647, 21705), 'numpy.arange', 'numpy.arange', (['(notch[0] - 5.0 * dn)', '(notch[1] + 5.0 * dn)', 'dn'], {}), '(notch[0] - 5.0 * dn, notch[1] + 5.0 * dn, dn)\n', (21659, 21705), False, 'import numpy\n'), ((22072, 22091), 'numpy.isnan', 'numpy.isnan', (['ftrans'], {}), '(ftrans)\n', (22083, 22091), False, 'import numpy\n'), ((22114, 22133), 'numpy.isnan', 'numpy.isnan', (['ftrans'], {}), '(ftrans)\n', (22125, 22133), False, 'import numpy\n'), ((22337, 22378), 'os.path.normpath', 'os.path.normpath', (['(filterFolder + vegaFile)'], {}), '(filterFolder + vegaFile)\n', (22353, 22378), False, 'import os\n'), ((23068, 23090), 'numpy.arange', 'numpy.arange', (['nsamples'], {}), '(nsamples)\n', (23080, 23090), False, 'import numpy\n'), ((26486, 26509), 'matplotlib.pyplot.plot', 'plt.plot', (['fwave', 'ftrans'], {}), '(fwave, ftrans)\n', (26494, 26509), True, 'import matplotlib.pyplot as plt\n'), ((27494, 27517), 'matplotlib.pyplot.plot', 'plt.plot', (['fwave', 'ftrans'], {}), '(fwave, ftrans)\n', (27502, 27517), True, 'import matplotlib.pyplot as plt\n'), ((28166, 28186), 'numpy.nanmax', 'numpy.nanmax', (['ftrans'], {}), '(ftrans)\n', (28178, 28186), False, 'import numpy\n'), ((35268, 35338), 'numpy.log10', 'numpy.log10', (['(1.0 + 10.0 ** (-0.4 * (other.magnitude - self.magnitude)))'], {}), '(1.0 + 10.0 ** (-0.4 * (other.magnitude - self.magnitude)))\n', (35279, 35338), False, 'import numpy\n'), ((2826, 2845), 'numpy.isnan', 'numpy.isnan', (['ftrans'], {}), '(ftrans)\n', (2837, 2845), False, 'import numpy\n'), ((9654, 9666), 'astropy.units.spectral', 'u.spectral', ([], {}), '()\n', (9664, 9666), True, 'from astropy import units as u\n'), ((9712, 9739), 'astropy.units.spectral_density', 'u.spectral_density', (['sp.wave'], {}), '(sp.wave)\n', (9730, 9739), True, 'from astropy import units as u\n'), ((9790, 9817), 'astropy.units.spectral_density', 'u.spectral_density', (['sp.wave'], {}), '(sp.wave)\n', (9808, 9817), True, 'from astropy import units as u\n'), ((9864, 9876), 'astropy.units.spectral', 'u.spectral', ([], {}), '()\n', (9874, 9876), True, 'from astropy import units as u\n'), ((11783, 11805), 'numpy.arange', 'numpy.arange', (['nsamples'], {}), '(nsamples)\n', (11795, 11805), False, 'import numpy\n'), ((13417, 13435), 'numpy.isnan', 'numpy.isnan', (['vflux'], {}), '(vflux)\n', (13428, 13435), False, 'import numpy\n'), ((13465, 13483), 'numpy.isnan', 'numpy.isnan', (['vflux'], {}), '(vflux)\n', (13476, 13483), False, 'import numpy\n'), ((15340, 15358), 'scipy.integrate.trapz', 'trapz', (['(fw * ft)', 'fw'], {}), '(fw * ft, fw)\n', (15345, 15358), False, 'from scipy.integrate import trapz\n'), ((15356, 15374), 'scipy.integrate.trapz', 'trapz', (['(ft / fw)', 'fw'], {}), '(ft / fw, fw)\n', (15361, 15374), False, 'from scipy.integrate import trapz\n'), ((15420, 15433), 'numpy.max', 'numpy.max', (['fw'], {}), '(fw)\n', (15429, 15433), False, 'import numpy\n'), ((15434, 15447), 'numpy.min', 'numpy.min', (['fw'], {}), '(fw)\n', (15443, 15447), False, 'import numpy\n'), ((23491, 23537), 'scipy.integrate.trapz', 'trapz', (['(ftrans.value * fwave.value)', 'fwave.value'], {}), '(ftrans.value * fwave.value, fwave.value)\n', (23496, 23537), False, 'from scipy.integrate import trapz\n'), ((23560, 23592), 'scipy.integrate.trapz', 'trapz', (['ftrans.value', 'fwave.value'], {}), '(ftrans.value, fwave.value)\n', (23565, 23592), False, 'from scipy.integrate import trapz\n'), ((23755, 23777), 'numpy.arange', 'numpy.arange', (['nsamples'], {}), '(nsamples)\n', (23767, 23777), False, 'import numpy\n'), ((27461, 27477), 'numpy.ones', 'numpy.ones', (['(1000)'], {}), '(1000)\n', (27471, 27477), False, 'import numpy\n'), ((35646, 35691), 'numpy.log10', 'numpy.log10', (['(1.0 + 10.0 ** (-0.4 * (m2 - m1)))'], {}), '(1.0 + 10.0 ** (-0.4 * (m2 - m1)))\n', (35657, 35691), False, 'import numpy\n'), ((6645, 6700), 'numpy.logical_and', 'numpy.logical_and', (['(fwave >= notch[0])', '(fwave <= notch[1])'], {}), '(fwave >= notch[0], fwave <= notch[1])\n', (6662, 6700), False, 'import numpy\n'), ((8365, 8383), 'numpy.isnan', 'numpy.isnan', (['vflux'], {}), '(vflux)\n', (8376, 8383), False, 'import numpy\n'), ((8417, 8435), 'numpy.isnan', 'numpy.isnan', (['vflux'], {}), '(vflux)\n', (8428, 8435), False, 'import numpy\n'), ((10405, 10423), 'numpy.log10', 'numpy.log10', (['(a / b)'], {}), '(a / b)\n', (10416, 10423), False, 'import numpy\n'), ((10627, 10667), 'scipy.integrate.trapz', 'trapz', (['(ftrans * fwave.value)', 'fwave.value'], {}), '(ftrans * fwave.value, fwave.value)\n', (10632, 10667), False, 'from scipy.integrate import trapz\n'), ((10931, 10957), 'scipy.integrate.trapz', 'trapz', (['ftrans', 'fwave.value'], {}), '(ftrans, fwave.value)\n', (10936, 10957), False, 'from scipy.integrate import trapz\n'), ((11583, 11602), 'astropy.constants.h.to', 'const.h.to', (['"""erg s"""'], {}), "('erg s')\n", (11593, 11602), True, 'from astropy import constants as const\n'), ((11603, 11625), 'astropy.constants.c.to', 'const.c.to', (['"""micron/s"""'], {}), "('micron/s')\n", (11613, 11625), True, 'from astropy import constants as const\n'), ((13066, 13121), 'numpy.logical_and', 'numpy.logical_and', (['(fwave >= notch[0])', '(fwave <= notch[1])'], {}), '(fwave >= notch[0], fwave <= notch[1])\n', (13083, 13121), False, 'import numpy\n'), ((18105, 18125), 'numpy.nanmax', 'numpy.nanmax', (['ftrans'], {}), '(ftrans)\n', (18117, 18125), False, 'import numpy\n'), ((18170, 18190), 'numpy.nanmax', 'numpy.nanmax', (['ftrans'], {}), '(ftrans)\n', (18182, 18190), False, 'import numpy\n'), ((18235, 18255), 'numpy.nanmax', 'numpy.nanmax', (['ftrans'], {}), '(ftrans)\n', (18247, 18255), False, 'import numpy\n'), ((21771, 21826), 'numpy.logical_and', 'numpy.logical_and', (['(fwave >= notch[0])', '(fwave <= notch[1])'], {}), '(fwave >= notch[0], fwave <= notch[1])\n', (21788, 21826), False, 'import numpy\n'), ((23197, 23217), 'numpy.nanstd', 'numpy.nanstd', (['result'], {}), '(result)\n', (23209, 23217), False, 'import numpy\n'), ((23437, 23462), 'astropy.units.spectral_density', 'u.spectral_density', (['fwave'], {}), '(fwave)\n', (23455, 23462), True, 'from astropy import units as u\n'), ((26449, 26469), 'numpy.nanmax', 'numpy.nanmax', (['ftrans'], {}), '(ftrans)\n', (26461, 26469), False, 'import numpy\n'), ((26550, 26575), 'numpy.nanmin', 'numpy.nanmin', (['fwave.value'], {}), '(fwave.value)\n', (26562, 26575), False, 'import numpy\n'), ((26576, 26601), 'numpy.nanmax', 'numpy.nanmax', (['fwave.value'], {}), '(fwave.value)\n', (26588, 26601), False, 'import numpy\n'), ((27420, 27452), 'numpy.linspace', 'numpy.linspace', (['f[0]', 'f[1]', '(1000)'], {}), '(f[0], f[1], 1000)\n', (27434, 27452), False, 'import numpy\n'), ((27558, 27583), 'numpy.nanmin', 'numpy.nanmin', (['fwave.value'], {}), '(fwave.value)\n', (27570, 27583), False, 'import numpy\n'), ((27584, 27609), 'numpy.nanmax', 'numpy.nanmax', (['fwave.value'], {}), '(fwave.value)\n', (27596, 27609), False, 'import numpy\n'), ((10694, 10748), 'scipy.integrate.trapz', 'trapz', (['(ftrans * fwave.value * fwave.value)', 'fwave.value'], {}), '(ftrans * fwave.value * fwave.value, fwave.value)\n', (10699, 10748), False, 'from scipy.integrate import trapz\n'), ((10986, 11026), 'scipy.integrate.trapz', 'trapz', (['(ftrans * fwave.value)', 'fwave.value'], {}), '(ftrans * fwave.value, fwave.value)\n', (10991, 11026), False, 'from scipy.integrate import trapz\n'), ((22512, 22530), 'numpy.isnan', 'numpy.isnan', (['vflux'], {}), '(vflux)\n', (22523, 22530), False, 'import numpy\n'), ((22568, 22586), 'numpy.isnan', 'numpy.isnan', (['vflux'], {}), '(vflux)\n', (22579, 22586), False, 'import numpy\n'), ((23884, 23904), 'numpy.nanstd', 'numpy.nanstd', (['result'], {}), '(result)\n', (23896, 23904), False, 'import numpy\n'), ((26647, 26672), 'numpy.nanmin', 'numpy.nanmin', (['fwave.value'], {}), '(fwave.value)\n', (26659, 26672), False, 'import numpy\n'), ((26696, 26721), 'numpy.nanmax', 'numpy.nanmax', (['fwave.value'], {}), '(fwave.value)\n', (26708, 26721), False, 'import numpy\n'), ((26776, 26796), 'numpy.nanmax', 'numpy.nanmax', (['ftrans'], {}), '(ftrans)\n', (26788, 26796), False, 'import numpy\n'), ((27655, 27680), 'numpy.nanmin', 'numpy.nanmin', (['fwave.value'], {}), '(fwave.value)\n', (27667, 27680), False, 'import numpy\n'), ((27704, 27729), 'numpy.nanmax', 'numpy.nanmax', (['fwave.value'], {}), '(fwave.value)\n', (27716, 27729), False, 'import numpy\n'), ((27784, 27804), 'numpy.nanmax', 'numpy.nanmax', (['ftrans'], {}), '(ftrans)\n', (27796, 27804), False, 'import numpy\n'), ((15052, 15072), 'numpy.nanmax', 'numpy.nanmax', (['ftrans'], {}), '(ftrans)\n', (15064, 15072), False, 'import numpy\n'), ((15125, 15145), 'numpy.nanmax', 'numpy.nanmax', (['ftrans'], {}), '(ftrans)\n', (15137, 15145), False, 'import numpy\n'), ((15198, 15218), 'numpy.nanmax', 'numpy.nanmax', (['ftrans'], {}), '(ftrans)\n', (15210, 15218), False, 'import numpy\n'), ((10305, 10330), 'numpy.random.normal', 'numpy.random.normal', (['(0)', '(1)'], {}), '(0, 1)\n', (10324, 10330), False, 'import numpy\n'), ((23128, 23155), 'numpy.random.normal', 'numpy.random.normal', (['(0)', '(1.0)'], {}), '(0, 1.0)\n', (23147, 23155), False, 'import numpy\n'), ((9272, 9299), 'numpy.random.normal', 'numpy.random.normal', (['(0)', '(1.0)'], {}), '(0, 1.0)\n', (9291, 9299), False, 'import numpy\n'), ((9476, 9503), 'numpy.random.normal', 'numpy.random.normal', (['(0)', '(1.0)'], {}), '(0, 1.0)\n', (9495, 9503), False, 'import numpy\n'), ((23815, 23842), 'numpy.random.normal', 'numpy.random.normal', (['(0)', '(1.0)'], {}), '(0, 1.0)\n', (23834, 23842), False, 'import numpy\n'), ((11231, 11258), 'numpy.random.normal', 'numpy.random.normal', (['(0)', '(1.0)'], {}), '(0, 1.0)\n', (11250, 11258), False, 'import numpy\n'), ((11410, 11437), 'numpy.random.normal', 'numpy.random.normal', (['(0)', '(1.0)'], {}), '(0, 1.0)\n', (11429, 11437), False, 'import numpy\n'), ((11889, 11916), 'numpy.random.normal', 'numpy.random.normal', (['(0)', '(1.0)'], {}), '(0, 1.0)\n', (11908, 11916), False, 'import numpy\n')] |
mandalorian-101/badger-system | helpers/time_utils.py | 2b0ee9bd77a2cc6f875b9b984ae4dfe713bbc55c | import datetime
ONE_MINUTE = 60
ONE_HOUR = 3600
ONE_DAY = 24 * ONE_HOUR
ONE_YEAR = 1 * 365 * ONE_DAY
def days(days):
return int(days * 86400.0)
def hours(hours):
return int(hours * 3600.0)
def minutes(minutes):
return int(minutes * 60.0)
def to_utc_date(timestamp):
return datetime.datetime.utcfromtimestamp(timestamp).strftime("%Y-%m-%dT%H:%M:%SZ")
def to_timestamp(date):
print(date.timestamp())
return int(date.timestamp())
def to_minutes(duration):
return duration / ONE_MINUTE
def to_days(duration):
return duration / ONE_DAY
def to_hours(duration):
return duration / ONE_HOUR | [((296, 341), 'datetime.datetime.utcfromtimestamp', 'datetime.datetime.utcfromtimestamp', (['timestamp'], {}), '(timestamp)\n', (330, 341), False, 'import datetime\n')] |
British-Oceanographic-Data-Centre/NEMO-ENTRUST | example_scripts/profile_validation/plot_validation_gridded_data.py | 41ed278e56428404ab8ec41d74a9a3a761e308ae | """
Plot up surface or bottom (or any fixed level) errors from a profile object
with no z_dim (vertical dimension). Provide an array of netcdf files and
mess with the options to get a figure you like.
You can define how many rows and columns the plot will have. This script will
plot the provided list of netcdf datasets from left to right and top to bottom.
A colorbar will be placed right of the figure.
"""
import xarray as xr
import matplotlib.pyplot as plt
import numpy as np
import sys
sys.path.append("/Users/dbyrne/code/COAsT")
import coast
import pandas as pd
#%% File settings
run_name = "test"
# List of analysis output files. Profiles from each will be plotted
# on each axis of the plot
fn_list = [
"~/transfer/test_grid.nc",
"~/transfer/test_grid.nc",
]
# Filename for the output
fn_out = "/Users/dbyrne/transfer/surface_gridded_errors_{0}.png".format(run_name)
#%% General Plot Settings
var_name = "abs_diff_temperature" # Variable name in analysis file to plot
# If you used var modified to make gridded data
# then this is where to select season etc.
save_plot = False
# Masking out grid cells that don't contain many points
min_points_in_average = 5
name_of_count_variable = "grid_N"
# Subplot axes settings
n_r = 2 # Number of subplot rows
n_c = 2 # Number of subplot columns
figsize = (10, 5) # Figure size
lonbounds = [-15, 9.5] # Longitude bounds
latbounds = [45, 64] # Latitude bounds
subplot_padding = 0.5 # Amount of vertical and horizontal padding between plots
fig_pad = (0.075, 0.075, 0.1, 0.1) # Figure padding (left, top, right, bottom)
# Leave some space on right for colorbar
# Scatter opts
marker_size = 3 # Marker size
cmap = "bwr" # Colormap for normal points
clim = (-1, 1) # Color limits for normal points
discrete_cmap = True # Discretize colormap
cmap_levels = 14
# Labels and Titles
fig_title = "SST Errors" # Whole figure title
title_fontsize = 13 # Fontsize of title
title_fontweight = "bold" # Fontweight to use for title
dataset_names = ["CO9p0", "CO9p0", "CO9p0"] # Names to use for labelling plots
subtitle_fontsize = 11 # Fontsize for dataset subtitles
subtitle_fontweight = "normal" # Fontweight for dataset subtitles
# PLOT SEASONS. Make sure n_r = 2 and n_c = 2
# If this option is true, only the first dataset will be plotted, with seasonal
# variables on each subplot. The season_suffixes will be added to var_name
# for each subplot panel.
plot_seasons = True
season_suffixes = ["DJF", "MAM", "JJA", "SON"]
#%% Read and plotdata
# Read all datasets into list
ds_list = [xr.open_dataset(dd) for dd in fn_list]
n_ds = len(ds_list)
n_ax = n_r * n_c
# Create plot and flatten axis array
f, a = coast.plot_util.create_geo_subplots(lonbounds, latbounds, n_r, n_c, figsize=figsize)
a_flat = a.flatten()
# Dicretize colormap maybe
if discrete_cmap:
cmap = plt.cm.get_cmap(cmap, cmap_levels)
# Determine if we will extend the colormap or not
extend_cbar = []
# Loop over dataset
for ii in range(n_ax):
ur_index = np.unravel_index(ii, (n_r, n_c))
# Select season if required
if plot_seasons:
ds = ds_list[0]
var_ii = var_name + "_{0}".format(season_suffixes[ii])
N_var = "{0}_{1}".format(name_of_count_variable, season_suffixes[ii])
a_flat[ii].text(0.05, 1.02, season_suffixes[ii], transform=a_flat[ii].transAxes, fontweight="bold")
else:
ds = ds_list[ii]
var_ii = var_name
a_flat[ii].set_title(dataset_names[ii], fontsize=subtitle_fontsize, fontweight=subtitle_fontweight)
N_var = name_of_count_variable
data = ds[var_ii].values
count_var = ds[N_var]
data[count_var < min_points_in_average] = np.nan
# Scatter and set title
pc = a_flat[ii].pcolormesh(
ds.longitude,
ds.latitude,
data,
cmap=cmap,
vmin=clim[0],
vmax=clim[1],
)
# Will we extend the colorbar for this dataset?
extend_cbar.append(coast.plot_util.determine_colorbar_extension(data, clim[0], clim[1]))
# Set Figure title
f.suptitle(fig_title, fontsize=title_fontsize, fontweight=title_fontweight)
# Set tight figure layout
f.tight_layout(w_pad=subplot_padding, h_pad=subplot_padding)
f.subplots_adjust(left=(fig_pad[0]), bottom=(fig_pad[1]), right=(1 - fig_pad[2]), top=(1 - fig_pad[3]))
# Handle colorbar -- will we extend it?
if "both" in extend_cbar:
extend = "both"
elif "max" in extend_cbar and "min" in extend_cbar:
extend = "both"
elif "max" in extend_cbar:
extend = "max"
elif "min" in extend_cbar:
extend = "min"
else:
extend = "neither"
cbar_ax = f.add_axes([(1 - fig_pad[2] + fig_pad[2] * 0.15), 0.15, 0.025, 0.7])
f.colorbar(pc, cax=cbar_ax, extend=extend)
# Save plot maybe
if save_plot:
f.savefig(fn_out)
| [((497, 540), 'sys.path.append', 'sys.path.append', (['"""/Users/dbyrne/code/COAsT"""'], {}), "('/Users/dbyrne/code/COAsT')\n", (512, 540), False, 'import sys\n'), ((2685, 2774), 'coast.plot_util.create_geo_subplots', 'coast.plot_util.create_geo_subplots', (['lonbounds', 'latbounds', 'n_r', 'n_c'], {'figsize': 'figsize'}), '(lonbounds, latbounds, n_r, n_c, figsize\n =figsize)\n', (2720, 2774), False, 'import coast\n'), ((2564, 2583), 'xarray.open_dataset', 'xr.open_dataset', (['dd'], {}), '(dd)\n', (2579, 2583), True, 'import xarray as xr\n'), ((2848, 2882), 'matplotlib.pyplot.cm.get_cmap', 'plt.cm.get_cmap', (['cmap', 'cmap_levels'], {}), '(cmap, cmap_levels)\n', (2863, 2882), True, 'import matplotlib.pyplot as plt\n'), ((3010, 3042), 'numpy.unravel_index', 'np.unravel_index', (['ii', '(n_r, n_c)'], {}), '(ii, (n_r, n_c))\n', (3026, 3042), True, 'import numpy as np\n'), ((3950, 4018), 'coast.plot_util.determine_colorbar_extension', 'coast.plot_util.determine_colorbar_extension', (['data', 'clim[0]', 'clim[1]'], {}), '(data, clim[0], clim[1])\n', (3994, 4018), False, 'import coast\n')] |
jeury301/text-classifier | feature-engineering/samples/statistical_features.py | d86f658ef3368e4a3f6fd74328fa862e2881ac3b | from sklearn.feature_extraction.text import TfidfVectorizer
def compute_tf_idf(corpus):
"""Computing term frequency (tf) - inverse document frequency (idf).
:param corpus: List of documents.
:returns: tf-idf of corpus.
"""
return TfidfVectorizer().fit_transform(corpus)
if __name__ == '__main__':
sample_corpus = [
'This is sample document.',
'another random document.',
'third sample document text'
]
print(compute_tf_idf(sample_corpus))
| [((252, 269), 'sklearn.feature_extraction.text.TfidfVectorizer', 'TfidfVectorizer', ([], {}), '()\n', (267, 269), False, 'from sklearn.feature_extraction.text import TfidfVectorizer\n')] |
hunterpaulson/fprime | Gds/src/fprime_gds/executables/tcpserver.py | 70560897b56dc3037dc966c99751b708b1cc8a05 | #!/usr/bin/env python3
from __future__ import print_function
import socket
import threading
try:
import socketserver
except ImportError:
import SocketServer as socketserver
import time
import os
import signal
import sys
import struct
import errno
from fprime.constants import DATA_ENCODING
from optparse import OptionParser
__version__ = 0.1
__date__ = "2015-04-03"
__updated__ = "2016-04-07"
# Universal server id global
SERVER = None
LOCK = None
shutdown_event = threading.Event()
FSW_clients = []
GUI_clients = []
FSW_ids = []
GUI_ids = []
def signal_handler(*_):
print("Ctrl-C received, server shutting down.")
shutdown_event.set()
def now():
return time.ctime(time.time())
class ThreadedTCPRequestHandler(socketserver.StreamRequestHandler):
"""
Derived from original Stable demo during R&TD and adapted
for use in new FSW gse.py applicaiton.
TCP socket server for commands, log events, and telemetry data.
Later this will handle other things such as sequence files and parameters.
Handle is instanced in own thread for each client.
Registration is done by sending the string "Register <name>".
Sending a message to destination <name> is done as
"A5A5 <name> <data>" Note only <data> is sent.
Any client that sends a "List" comment makes the server display all
registered clients.
"""
socketserver.StreamRequestHandler.allow_reuse_address = True
socketserver.StreamRequestHandler.timeout = 1
def handle(self): # on each client connect
"""
The function that is invoked upon a new client. This function listens
for data on the socket. Packets for now are assumed to be separated
by a newline. For each packet, call processPkt.
"""
self.partial = b""
self.cmdQueue = []
self.registered = False
self.name = b""
self.id = 0
# print self.client_address, now() # show this client's address
# Read the data from the socket
data = self.recv(13)
# Connection was closed by the client
if not data:
print("Client exited.")
return
else:
# Process the data into the cmdQueue
self.getCmds(data)
# Process the cmdQueue
self.processQueue()
if self.registered:
print("Registration complete waiting for message.")
self.getNewMsg()
else:
print("Unable to register client.")
return
LOCK.acquire()
del SERVER.dest_obj[self.name]
if self.name in FSW_clients:
FSW_clients.remove(self.name)
FSW_ids.remove(self.id)
elif self.name in GUI_clients:
GUI_clients.remove(self.name)
GUI_ids.remove(self.id)
LOCK.release()
print("Closed %s connection." % self.name.decode(DATA_ENCODING))
self.registered = False
self.request.close()
def getCmds(self, inputString, end_of_command=b"\n"):
"""
Build a command from partial or full socket input
"""
commands = inputString.split(end_of_command)
if len(self.partial):
commands[0] = self.partial + commands[0]
self.partial = b""
if len(commands[-1]):
self.partial = commands[-1]
self.cmdQueue.extend(commands[:-1])
else:
self.cmdQueue.extend(commands[:-1])
def processQueue(self):
for cmd in self.cmdQueue:
self.processRegistration(cmd)
self.cmdQueue = []
def processRegistration(self, cmd):
params = cmd.split()
process_id = 0
if params[0] == b"Register":
LOCK.acquire()
name = params[1]
if b"FSW" in name:
if FSW_clients:
process_id = sorted(FSW_ids)[-1] + 1
name = params[1] + b"_" + bytes(process_id)
FSW_clients.append(name)
FSW_ids.append(process_id)
elif b"GUI" in name:
if GUI_clients:
process_id = sorted(GUI_ids)[-1] + 1
name = params[1] + b"_" + bytes(process_id)
GUI_clients.append(name)
GUI_ids.append(process_id)
SERVER.dest_obj[name] = DestObj(name, self.request)
LOCK.release()
self.registered = True
self.name = name
self.id = process_id
print("Registered client " + self.name.decode(DATA_ENCODING))
#################################################
# New Routines to process the command messages
#################################################
def getNewMsg(self):
"""
After registration wait for an incoming message
The first part must always be an "A5A5 " or a "List "
"""
# Loop while the connected client has packets to send/receive
while not shutdown_event.is_set():
# Read the header data from the socket either A5A5 or List
header = self.readHeader()
# If the received header is an empty string, connection closed, exit loop
if not header:
break
elif header == b"Quit":
LOCK.acquire()
print("Quit received!")
SERVER.dest_obj[self.name].put(struct.pack(">I", 0xA5A5A5A5))
shutdown_event.set()
time.sleep(1)
print("Quit processed!")
SERVER.shutdown()
SERVER.server_close()
LOCK.release()
break
# Got the header data so read the data of the message here...
data = self.readData(header)
# Process and send the packet of the message here...
self.processNewPkt(header, data)
def recv(self, l):
"""
Read l bytes from socket.
"""
chunk = b""
msg = b""
n = 0
while l > n:
try:
chunk = self.request.recv(l - n)
if chunk == b"":
print("read data from socket is empty!")
return b""
msg = msg + chunk
n = len(msg)
except socket.timeout:
if shutdown_event.is_set():
print("socket timed out and shutdown is requested")
return b"Quit\n"
continue
except socket.error as err:
if err.errno == errno.ECONNRESET:
print(
"Socket error "
+ str(err.errno)
+ " (Connection reset by peer) occurred on recv()."
)
else:
print("Socket error " + str(err.errno) + " occurred on recv().")
return msg
def readHeader(self):
"""
Read the 9 byte header (e.g. "A5A5 GUI " or "A5A5 FSW "),
or just read the "List\n" command.
"""
header = self.recv(5)
if len(header) == 0:
print(
"Header information is empty, client "
+ self.name.decode(DATA_ENCODING)
+ " exiting."
)
return header
if header == b"List\n":
return b"List"
elif header == b"Quit\n":
return b"Quit"
elif header[:-1] == b"A5A5":
header2 = self.recv(4)
return header + header2
else:
return
def readData(self, header):
"""
Read the data part of the message sent to either GUI or FSW.
GUI receives telemetry.
FSW receives commands of various lengths.
"""
data = b""
if header == b"List":
return b""
elif header == b"Quit":
return b""
dst = header.split(b" ")[1].strip(b" ")
if dst == b"FSW":
# Read variable length command data here...
desc = self.recv(4)
sizeb = self.recv(4)
size = struct.unpack(">I", sizeb)[0]
data = desc + sizeb + self.recv(size)
elif dst == b"GUI":
# Read telemetry data here...
tlm_packet_size = self.recv(4)
size = struct.unpack(">I", tlm_packet_size)[0]
data = tlm_packet_size + self.recv(size)
else:
raise RuntimeError("unrecognized client %s" % dst.decode(DATA_ENCODING))
return data
def processNewPkt(self, header, data):
"""
Process a single command here header and data here.
The command must always start with A5A5 except if it is a List.
Once the entire header tstring is processed send it on queue.
If something goes wrong report and shutdown server.
"""
dest_list = []
if header == b"List":
print("List of registered clients: ")
LOCK.acquire()
for d in list(SERVER.dest_obj.keys()):
print("\t" + SERVER.dest_obj[d].name.decode(DATA_ENCODING))
reg_client_str = b"List " + SERVER.dest_obj[d].name
l = len(reg_client_str)
reg_client_str = struct.pack("i%ds" % l, l, reg_client_str)
self.request.send(reg_client_str)
LOCK.release()
return 0
# Process data here...
head, dst = header.strip(b" ").split(b" ")
if head == b"A5A5": # Packet Header
# print "Received Packet: %s %s...\n" % (head,dst)
if data == b"":
print(" Data is empty, returning.")
if b"GUI" in dst:
dest_list = GUI_clients
elif b"FSW" in dst:
dest_list = FSW_clients
for dest_elem in dest_list:
# print "Locking TCP"
LOCK.acquire()
if dest_elem in list(SERVER.dest_obj.keys()):
# Send the message here....
# print "Sending TCP msg to ", dest_elem
SERVER.dest_obj[dest_elem].put(data)
LOCK.release()
else:
raise RuntimeError("Packet missing A5A5 header")
class ThreadedUDPRequestHandler(socketserver.BaseRequestHandler):
"""
Derived from original Stable demo during R&TD and adapted
for use in new FSW gse.py applicaiton.
TCP socket server for commands, log events, and telemetry data.
Later this will handle other things such as sequence files and parameters.
Handle is instanced in own thread for each client.
Registration is done by sending the string "Register <name>".
Sending a message to destination <name> is done as
"A5A5 <name> <data>" Note only <data> is sent.
Any client that sends a "List" comment makes the server display all
registered clients.
"""
socketserver.BaseRequestHandler.allow_reuse_address = True
def handle(self): # on each packet
"""
The function that is invoked when a packet is received. This function listens
for data on the socket. Packets for now are assumed to be separated
by a newline. For each packet, call processPkt.
"""
self.getNewMsg(self.request[0])
#################################################
# New Routines to process the command messages
#################################################
def getNewMsg(self, packet):
"""
After registration wait for an incoming message
The first part must always be an "A5A5 " or a "List "
"""
# Read the header data from the socket either A5A5 or List
(header, packet) = self.readHeader(packet)
# If the received header is an empty string, connection closed, exit loop
if not header:
return
# Got the header data so read the data of the message here...
data = self.readData(header, packet)
# Process and send the packet of the message here...
self.processNewPkt(header, data)
def readHeader(self, packet):
"""
Read the 9 byte header (e.g. "A5A5 GUI " or "A5A5 FSW "),
or just read the "List\n" command.
"""
header = packet[:4]
header2 = packet[4:9]
packet = packet[9:]
return (header + header2, packet)
def readData(self, header, packet):
"""
Read the data part of the message sent to either GUI or FSW.
GUI receives telemetry.
FSW receives commands of various lengths.
"""
data = ""
dst = header.split(b" ")[1].strip(b" ")
# Read telemetry data here...
tlm_packet_size = packet[:4]
size = struct.unpack(">I", tlm_packet_size)[0]
data = tlm_packet_size + packet[4 : 4 + size]
return data
def processNewPkt(self, header, data):
"""
Process a single command here header and data here.
The command must always start with A5A5 except if it is a List.
Once the entire header string is processed send it on queue.
If something goes wrong report and shutdown server.
"""
dest_list = []
# Process data here...
head, dst = header.strip(b" ").split(b" ")
if head == b"A5A5": # Packet Header
# print "Received Packet: %s %s...\n" % (head,dst)
if data == "":
print(" Data is empty, returning.")
if b"GUI" in dst:
dest_list = GUI_clients
else:
print("dest? %s" % dst.decode(DATA_ENCODING))
for dest_elem in dest_list:
LOCK.acquire()
if dest_elem in list(SERVER.dest_obj.keys()):
# Send the message here....
# print "Sending UDP msg to ", dest_elem
SERVER.dest_obj[dest_elem].put(data)
LOCK.release()
else:
raise RuntimeError("Telemetry missing A5A5 header")
class ThreadedTCPServer(socketserver.ThreadingMixIn, socketserver.TCPServer):
"""
TCP Socket server.
Keep a dictionary of destination objects containing queues and
socket id's for writting to destinations.
"""
dest_obj = dict()
lock_obj = threading.Lock()
class ThreadedUDPServer(socketserver.ThreadingMixIn, socketserver.UDPServer):
"""
UDP Socket server.
"""
class DestObj:
"""
Destination object for all clients registered.
"""
def __init__(self, name, request):
"""
Constructor
"""
self.name = name
self.socket = request
self.packet = b""
def put(self, msg):
"""
Write out the message to the destination socket
"""
try:
# print "about to send data to " + self.name
self.socket.send(msg)
except socket.error as err:
print("Socket error " + str(err.errno) + " occurred on send().")
def fileno(self):
"""
"""
return self.socket
def main(argv=None):
global SERVER, LOCK
program_name = os.path.basename(sys.argv[0])
program_license = "Copyright 2015 user_name (California Institute of Technology) \
ALL RIGHTS RESERVED. U.S. Government Sponsorship acknowledged."
program_version = "v0.1"
program_build_date = "%s" % __updated__
program_version_string = "%%prog %s (%s)" % (program_version, program_build_date)
program_longdesc = (
"""""" # optional - give further explanation about what the program does
)
if argv is None:
argv = sys.argv[1:]
try:
parser = OptionParser(
version=program_version_string,
epilog=program_longdesc,
description=program_license,
)
parser.add_option(
"-p",
"--port",
dest="port",
action="store",
type="int",
help="Set threaded tcp socket server port [default: %default]",
default=50007,
)
parser.add_option(
"-i",
"--host",
dest="host",
action="store",
type="string",
help="Set threaded tcp socket server ip [default: %default]",
default="127.0.0.1",
)
# process options
(opts, args) = parser.parse_args(argv)
HOST = opts.host
PORT = opts.port
server = ThreadedTCPServer((HOST, PORT), ThreadedTCPRequestHandler)
udp_server = ThreadedUDPServer((HOST, PORT), ThreadedUDPRequestHandler)
# Hopefully this will allow address reuse and server to restart immediately
server.allow_reuse_address = True
SERVER = server
LOCK = server.lock_obj
ip, port = server.server_address
print("TCP Socket Server listening on host addr %s, port %s" % (HOST, PORT))
# Start a thread with the server -- that thread will then start one
# more thread for each request
server_thread = threading.Thread(target=server.serve_forever)
udp_server_thread = threading.Thread(target=udp_server.serve_forever)
signal.signal(signal.SIGINT, signal_handler)
server_thread.daemon = False
server_thread.start()
udp_server_thread.daemon = False
udp_server_thread.start()
while not shutdown_event.is_set():
server_thread.join(timeout=5.0)
udp_server_thread.join(timeout=5.0)
print("shutdown from main thread")
SERVER.shutdown()
SERVER.server_close()
udp_server.shutdown()
udp_server.server_close()
time.sleep(1)
except Exception as e:
indent = len(program_name) * " "
sys.stderr.write(program_name + ": " + repr(e) + "\n")
sys.stderr.write(indent + " for help use --help\n")
return 2
if __name__ == "__main__":
sys.exit(main())
| [((477, 494), 'threading.Event', 'threading.Event', ([], {}), '()\n', (492, 494), False, 'import threading\n'), ((14461, 14477), 'threading.Lock', 'threading.Lock', ([], {}), '()\n', (14475, 14477), False, 'import threading\n'), ((15310, 15339), 'os.path.basename', 'os.path.basename', (['sys.argv[0]'], {}), '(sys.argv[0])\n', (15326, 15339), False, 'import os\n'), ((694, 705), 'time.time', 'time.time', ([], {}), '()\n', (703, 705), False, 'import time\n'), ((15899, 16001), 'optparse.OptionParser', 'OptionParser', ([], {'version': 'program_version_string', 'epilog': 'program_longdesc', 'description': 'program_license'}), '(version=program_version_string, epilog=program_longdesc,\n description=program_license)\n', (15911, 16001), False, 'from optparse import OptionParser\n'), ((17294, 17339), 'threading.Thread', 'threading.Thread', ([], {'target': 'server.serve_forever'}), '(target=server.serve_forever)\n', (17310, 17339), False, 'import threading\n'), ((17368, 17417), 'threading.Thread', 'threading.Thread', ([], {'target': 'udp_server.serve_forever'}), '(target=udp_server.serve_forever)\n', (17384, 17417), False, 'import threading\n'), ((17426, 17470), 'signal.signal', 'signal.signal', (['signal.SIGINT', 'signal_handler'], {}), '(signal.SIGINT, signal_handler)\n', (17439, 17470), False, 'import signal\n'), ((17923, 17936), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (17933, 17936), False, 'import time\n'), ((12895, 12931), 'struct.unpack', 'struct.unpack', (['""">I"""', 'tlm_packet_size'], {}), "('>I', tlm_packet_size)\n", (12908, 12931), False, 'import struct\n'), ((18077, 18129), 'sys.stderr.write', 'sys.stderr.write', (["(indent + ' for help use --help\\n')"], {}), "(indent + ' for help use --help\\n')\n", (18093, 18129), False, 'import sys\n'), ((8224, 8250), 'struct.unpack', 'struct.unpack', (['""">I"""', 'sizeb'], {}), "('>I', sizeb)\n", (8237, 8250), False, 'import struct\n'), ((9378, 9420), 'struct.pack', 'struct.pack', (["('i%ds' % l)", 'l', 'reg_client_str'], {}), "('i%ds' % l, l, reg_client_str)\n", (9389, 9420), False, 'import struct\n'), ((5549, 5562), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (5559, 5562), False, 'import time\n'), ((8436, 8472), 'struct.unpack', 'struct.unpack', (['""">I"""', 'tlm_packet_size'], {}), "('>I', tlm_packet_size)\n", (8449, 8472), False, 'import struct\n'), ((5465, 5494), 'struct.pack', 'struct.pack', (['""">I"""', '(2779096485)'], {}), "('>I', 2779096485)\n", (5476, 5494), False, 'import struct\n')] |
haivle/BTB-manager-telegram | btb_manager_telegram/__init__.py | c0f71c5a98a3d128ad03578930932737dc580ed1 | import logging
import sched
import time
(
MENU,
EDIT_COIN_LIST,
EDIT_USER_CONFIG,
DELETE_DB,
UPDATE_TG,
UPDATE_BTB,
PANIC_BUTTON,
CUSTOM_SCRIPT,
) = range(8)
BOUGHT, BUYING, SOLD, SELLING = range(4)
logging.basicConfig(
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", level=logging.INFO
)
logger = logging.getLogger("btb_manager_telegram_logger")
scheduler = sched.scheduler(time.time, time.sleep)
| [((234, 341), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': '"""%(asctime)s - %(name)s - %(levelname)s - %(message)s"""', 'level': 'logging.INFO'}), "(format=\n '%(asctime)s - %(name)s - %(levelname)s - %(message)s', level=logging.INFO)\n", (253, 341), False, 'import logging\n'), ((352, 400), 'logging.getLogger', 'logging.getLogger', (['"""btb_manager_telegram_logger"""'], {}), "('btb_manager_telegram_logger')\n", (369, 400), False, 'import logging\n'), ((414, 452), 'sched.scheduler', 'sched.scheduler', (['time.time', 'time.sleep'], {}), '(time.time, time.sleep)\n', (429, 452), False, 'import sched\n')] |
brettcannon/modutil | tests/test_data/lazy_mod.py | a34794ffee9b6217a9ced41baddab09b4f034cbb | import modutil
mod, __getattr__ = modutil.lazy_import(__name__,
['tests.test_data.A', '.B', '.C as still_C'])
def trigger_A():
return mod.A
def trigger_B():
return mod.B
def trigger_C():
return mod.still_C
def trigger_failure():
return mod.does_not_exist
| [((36, 111), 'modutil.lazy_import', 'modutil.lazy_import', (['__name__', "['tests.test_data.A', '.B', '.C as still_C']"], {}), "(__name__, ['tests.test_data.A', '.B', '.C as still_C'])\n", (55, 111), False, 'import modutil\n')] |
xiaohuaibaoguigui/EllSeg | test.py | ff56b255f8e650856aec9af23792e105897eba5c | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import sys
import tqdm
import torch
import pickle
import resource
import numpy as np
import matplotlib.pyplot as plt
from args import parse_args
from modelSummary import model_dict
from pytorchtools import load_from_file
from torch.utils.data import DataLoader
from helperfunctions import mypause, stackall_Dict
from loss import get_seg2ptLoss
from utils import get_nparams, get_predictions
from utils import getSeg_metrics, getPoint_metric, generateImageGrid, unnormPts
sys.path.append(os.path.abspath(os.path.join(os.getcwd(), os.pardir)))
rlimit = resource.getrlimit(resource.RLIMIT_NOFILE)
resource.setrlimit(resource.RLIMIT_NOFILE, (2048*10, rlimit[1]))
#%%
if __name__ == '__main__':
args = parse_args()
device=torch.device("cuda")
torch.cuda.manual_seed(12)
if torch.cuda.device_count() > 1:
print('Moving to a multiGPU setup.')
args.useMultiGPU = True
else:
args.useMultiGPU = False
torch.backends.cudnn.deterministic=False
if args.model not in model_dict:
print("Model not found.")
print("valid models are: {}".format(list(model_dict.keys())))
exit(1)
LOGDIR = os.path.join(os.getcwd(), 'logs', args.model, args.expname)
path2model = os.path.join(LOGDIR, 'weights')
path2checkpoint = os.path.join(LOGDIR, 'checkpoints')
path2writer = os.path.join(LOGDIR, 'TB.lock')
path2op = os.path.join(os.getcwd(), 'op', str(args.curObj))
os.makedirs(LOGDIR, exist_ok=True)
os.makedirs(path2model, exist_ok=True)
os.makedirs(path2checkpoint, exist_ok=True)
os.makedirs(path2writer, exist_ok=True)
os.makedirs(path2op, exist_ok=True)
model = model_dict[args.model]
netDict = load_from_file([args.loadfile,
os.path.join(path2checkpoint, 'checkpoint.pt')])
startEp = netDict['epoch'] if 'epoch' in netDict.keys() else 0
if 'state_dict' in netDict.keys():
model.load_state_dict(netDict['state_dict'])
print('Parameters: {}'.format(get_nparams(model)))
model = model if not args.useMultiGPU else torch.nn.DataParallel(model)
model = model.to(device).to(args.prec)
f = open(os.path.join('curObjects',
'baseline',
'cond_'+str(args.curObj)+'.pkl'), 'rb')
_, _, testObj = pickle.load(f)
testObj.path2data = os.path.join(args.path2data, 'Datasets', 'All')
testObj.augFlag = False
testloader = DataLoader(testObj,
batch_size=args.batchsize,
shuffle=False,
num_workers=args.workers,
drop_last=False)
if args.disp:
fig, axs = plt.subplots(nrows=1, ncols=1)
#%%
accLoss = 0.0
imCounter = 0
ious = []
dists_pupil_latent = []
dists_pupil_seg = []
dists_iris_latent = []
dists_iris_seg = []
model.eval()
opDict = {'id':[], 'archNum': [], 'archName': [], 'code': [],
'scores':{'iou':[], 'lat_dst':[], 'seg_dst':[]},
'pred':{'pup_latent_c':[],
'pup_seg_c':[],
'iri_latent_c':[],
'iri_seg_c':[],
'mask':[]},
'gt':{'pup_c':[], 'mask':[]}}
with torch.no_grad():
for bt, batchdata in enumerate(tqdm.tqdm(testloader)):
img, labels, spatialWeights, distMap, pupil_center, iris_center, elNorm, cond, imInfo = batchdata
out_tup = model(img.to(device).to(args.prec),
labels.to(device).long(),
pupil_center.to(device).to(args.prec),
elNorm.to(device).to(args.prec),
spatialWeights.to(device).to(args.prec),
distMap.to(device).to(args.prec),
cond.to(device).to(args.prec),
imInfo[:, 2].to(device).to(torch.long),
0.5)
output, elOut, latent, loss = out_tup
latent_pupil_center = elOut[:, 0:2].detach().cpu().numpy()
latent_iris_center = elOut[:, 5:7].detach().cpu().numpy()
_, seg_pupil_center = get_seg2ptLoss(output[:, 2, ...].cpu(), pupil_center, temperature=4)
_, seg_iris_center = get_seg2ptLoss(-output[:, 0, ...].cpu(), iris_center, temperature=4)
loss = loss if args.useMultiGPU else loss.mean()
accLoss += loss.detach().cpu().item()
predict = get_predictions(output)
iou, iou_bySample = getSeg_metrics(labels.numpy(),
predict.numpy(),
cond[:, 1].numpy())[1:]
latent_pupil_dist, latent_pupil_dist_bySample = getPoint_metric(pupil_center.numpy(),
latent_pupil_center,
cond[:,0].numpy(),
img.shape[2:],
True) # Unnormalizes the points
seg_pupil_dist, seg_pupil_dist_bySample = getPoint_metric(pupil_center.numpy(),
seg_pupil_center,
cond[:,1].numpy(),
img.shape[2:],
True) # Unnormalizes the points
latent_iris_dist, latent_iris_dist_bySample = getPoint_metric(iris_center.numpy(),
latent_iris_center,
cond[:,1].numpy(),
img.shape[2:],
True) # Unnormalizes the points
seg_iris_dist, seg_iris_dist_bySample = getPoint_metric(iris_center.numpy(),
seg_iris_center,
cond[:,1].numpy(),
img.shape[2:],
True) # Unnormalizes the points
dists_pupil_latent.append(latent_pupil_dist)
dists_iris_latent.append(latent_iris_dist)
dists_pupil_seg.append(seg_pupil_dist)
dists_iris_seg.append(seg_iris_dist)
ious.append(iou)
pup_latent_c = unnormPts(latent_pupil_center,
img.shape[2:])
pup_seg_c = unnormPts(seg_pupil_center,
img.shape[2:])
iri_latent_c = unnormPts(latent_iris_center,
img.shape[2:])
iri_seg_c = unnormPts(seg_iris_center,
img.shape[2:])
dispI = generateImageGrid(img.numpy().squeeze(),
predict.numpy(),
elOut.detach().cpu().numpy().reshape(-1, 2, 5),
pup_seg_c,
cond.numpy(),
override=True,
heatmaps=False)
for i in range(0, img.shape[0]):
archNum = testObj.imList[imCounter, 1]
opDict['id'].append(testObj.imList[imCounter, 0])
opDict['code'].append(latent[i,...].detach().cpu().numpy())
opDict['archNum'].append(archNum)
opDict['archName'].append(testObj.arch[archNum])
opDict['pred']['pup_latent_c'].append(pup_latent_c[i, :])
opDict['pred']['pup_seg_c'].append(pup_seg_c[i, :])
opDict['pred']['iri_latent_c'].append(iri_latent_c[i, :])
opDict['pred']['iri_seg_c'].append(iri_seg_c[i, :])
if args.test_save_op_masks:
opDict['pred']['mask'].append(predict[i,...].numpy().astype(np.uint8))
opDict['scores']['iou'].append(iou_bySample[i, ...])
opDict['scores']['lat_dst'].append(latent_pupil_dist_bySample[i, ...])
opDict['scores']['seg_dst'].append(seg_pupil_dist_bySample[i, ...])
opDict['gt']['pup_c'].append(pupil_center[i,...].numpy())
if args.test_save_op_masks:
opDict['gt']['mask'].append(labels[i,...].numpy().astype(np.uint8))
imCounter+=1
if args.disp:
if bt == 0:
h_im = plt.imshow(dispI.permute(1, 2, 0))
plt.pause(0.01)
else:
h_im.set_data(dispI.permute(1, 2, 0))
mypause(0.01)
opDict = stackall_Dict(opDict)
ious = np.stack(ious, axis=0)
ious = np.nanmean(ious, axis=0)
print('mIoU: {}. IoUs: {}'.format(np.mean(ious), ious))
print('Latent space PUPIL dist. Med: {}, STD: {}'.format(np.nanmedian(dists_pupil_latent),
np.nanstd(dists_pupil_latent)))
print('Segmentation PUPIL dist. Med: {}, STD: {}'.format(np.nanmedian(dists_pupil_seg),
np.nanstd(dists_pupil_seg)))
print('Latent space IRIS dist. Med: {}, STD: {}'.format(np.nanmedian(dists_iris_latent),
np.nanstd(dists_iris_latent)))
print('Segmentation IRIS dist. Med: {}, STD: {}'.format(np.nanmedian(dists_iris_seg),
np.nanstd(dists_iris_seg)))
print('--- Saving output directory ---')
f = open(os.path.join(path2op, 'opDict.pkl'), 'wb')
pickle.dump(opDict, f)
f.close()
| [((615, 657), 'resource.getrlimit', 'resource.getrlimit', (['resource.RLIMIT_NOFILE'], {}), '(resource.RLIMIT_NOFILE)\n', (633, 657), False, 'import resource\n'), ((658, 724), 'resource.setrlimit', 'resource.setrlimit', (['resource.RLIMIT_NOFILE', '(2048 * 10, rlimit[1])'], {}), '(resource.RLIMIT_NOFILE, (2048 * 10, rlimit[1]))\n', (676, 724), False, 'import resource\n'), ((767, 779), 'args.parse_args', 'parse_args', ([], {}), '()\n', (777, 779), False, 'from args import parse_args\n'), ((792, 812), 'torch.device', 'torch.device', (['"""cuda"""'], {}), "('cuda')\n", (804, 812), False, 'import torch\n'), ((817, 843), 'torch.cuda.manual_seed', 'torch.cuda.manual_seed', (['(12)'], {}), '(12)\n', (839, 843), False, 'import torch\n'), ((1296, 1327), 'os.path.join', 'os.path.join', (['LOGDIR', '"""weights"""'], {}), "(LOGDIR, 'weights')\n", (1308, 1327), False, 'import os\n'), ((1350, 1385), 'os.path.join', 'os.path.join', (['LOGDIR', '"""checkpoints"""'], {}), "(LOGDIR, 'checkpoints')\n", (1362, 1385), False, 'import os\n'), ((1404, 1435), 'os.path.join', 'os.path.join', (['LOGDIR', '"""TB.lock"""'], {}), "(LOGDIR, 'TB.lock')\n", (1416, 1435), False, 'import os\n'), ((1505, 1539), 'os.makedirs', 'os.makedirs', (['LOGDIR'], {'exist_ok': '(True)'}), '(LOGDIR, exist_ok=True)\n', (1516, 1539), False, 'import os\n'), ((1544, 1582), 'os.makedirs', 'os.makedirs', (['path2model'], {'exist_ok': '(True)'}), '(path2model, exist_ok=True)\n', (1555, 1582), False, 'import os\n'), ((1587, 1630), 'os.makedirs', 'os.makedirs', (['path2checkpoint'], {'exist_ok': '(True)'}), '(path2checkpoint, exist_ok=True)\n', (1598, 1630), False, 'import os\n'), ((1635, 1674), 'os.makedirs', 'os.makedirs', (['path2writer'], {'exist_ok': '(True)'}), '(path2writer, exist_ok=True)\n', (1646, 1674), False, 'import os\n'), ((1679, 1714), 'os.makedirs', 'os.makedirs', (['path2op'], {'exist_ok': '(True)'}), '(path2op, exist_ok=True)\n', (1690, 1714), False, 'import os\n'), ((2376, 2390), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (2387, 2390), False, 'import pickle\n'), ((2415, 2462), 'os.path.join', 'os.path.join', (['args.path2data', '"""Datasets"""', '"""All"""'], {}), "(args.path2data, 'Datasets', 'All')\n", (2427, 2462), False, 'import os\n'), ((2509, 2618), 'torch.utils.data.DataLoader', 'DataLoader', (['testObj'], {'batch_size': 'args.batchsize', 'shuffle': '(False)', 'num_workers': 'args.workers', 'drop_last': '(False)'}), '(testObj, batch_size=args.batchsize, shuffle=False, num_workers=\n args.workers, drop_last=False)\n', (2519, 2618), False, 'from torch.utils.data import DataLoader\n'), ((851, 876), 'torch.cuda.device_count', 'torch.cuda.device_count', ([], {}), '()\n', (874, 876), False, 'import torch\n'), ((1232, 1243), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1241, 1243), False, 'import os\n'), ((1463, 1474), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1472, 1474), False, 'import os\n'), ((2138, 2166), 'torch.nn.DataParallel', 'torch.nn.DataParallel', (['model'], {}), '(model)\n', (2159, 2166), False, 'import torch\n'), ((2764, 2794), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'nrows': '(1)', 'ncols': '(1)'}), '(nrows=1, ncols=1)\n', (2776, 2794), True, 'import matplotlib.pyplot as plt\n'), ((3354, 3369), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (3367, 3369), False, 'import torch\n'), ((9323, 9344), 'helperfunctions.stackall_Dict', 'stackall_Dict', (['opDict'], {}), '(opDict)\n', (9336, 9344), False, 'from helperfunctions import mypause, stackall_Dict\n'), ((9360, 9382), 'numpy.stack', 'np.stack', (['ious'], {'axis': '(0)'}), '(ious, axis=0)\n', (9368, 9382), True, 'import numpy as np\n'), ((9398, 9422), 'numpy.nanmean', 'np.nanmean', (['ious'], {'axis': '(0)'}), '(ious, axis=0)\n', (9408, 9422), True, 'import numpy as np\n'), ((10349, 10371), 'pickle.dump', 'pickle.dump', (['opDict', 'f'], {}), '(opDict, f)\n', (10360, 10371), False, 'import pickle\n'), ((579, 590), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (588, 590), False, 'import os\n'), ((1827, 1873), 'os.path.join', 'os.path.join', (['path2checkpoint', '"""checkpoint.pt"""'], {}), "(path2checkpoint, 'checkpoint.pt')\n", (1839, 1873), False, 'import os\n'), ((2070, 2088), 'utils.get_nparams', 'get_nparams', (['model'], {}), '(model)\n', (2081, 2088), False, 'from utils import get_nparams, get_predictions\n'), ((3410, 3431), 'tqdm.tqdm', 'tqdm.tqdm', (['testloader'], {}), '(testloader)\n', (3419, 3431), False, 'import tqdm\n'), ((4611, 4634), 'utils.get_predictions', 'get_predictions', (['output'], {}), '(output)\n', (4626, 4634), False, 'from utils import get_nparams, get_predictions\n'), ((6975, 7020), 'utils.unnormPts', 'unnormPts', (['latent_pupil_center', 'img.shape[2:]'], {}), '(latent_pupil_center, img.shape[2:])\n', (6984, 7020), False, 'from utils import getSeg_metrics, getPoint_metric, generateImageGrid, unnormPts\n'), ((7082, 7124), 'utils.unnormPts', 'unnormPts', (['seg_pupil_center', 'img.shape[2:]'], {}), '(seg_pupil_center, img.shape[2:])\n', (7091, 7124), False, 'from utils import getSeg_metrics, getPoint_metric, generateImageGrid, unnormPts\n'), ((7186, 7230), 'utils.unnormPts', 'unnormPts', (['latent_iris_center', 'img.shape[2:]'], {}), '(latent_iris_center, img.shape[2:])\n', (7195, 7230), False, 'from utils import getSeg_metrics, getPoint_metric, generateImageGrid, unnormPts\n'), ((7292, 7333), 'utils.unnormPts', 'unnormPts', (['seg_iris_center', 'img.shape[2:]'], {}), '(seg_iris_center, img.shape[2:])\n', (7301, 7333), False, 'from utils import getSeg_metrics, getPoint_metric, generateImageGrid, unnormPts\n'), ((10298, 10333), 'os.path.join', 'os.path.join', (['path2op', '"""opDict.pkl"""'], {}), "(path2op, 'opDict.pkl')\n", (10310, 10333), False, 'import os\n'), ((9465, 9478), 'numpy.mean', 'np.mean', (['ious'], {}), '(ious)\n', (9472, 9478), True, 'import numpy as np\n'), ((9552, 9584), 'numpy.nanmedian', 'np.nanmedian', (['dists_pupil_latent'], {}), '(dists_pupil_latent)\n', (9564, 9584), True, 'import numpy as np\n'), ((9646, 9675), 'numpy.nanstd', 'np.nanstd', (['dists_pupil_latent'], {}), '(dists_pupil_latent)\n', (9655, 9675), True, 'import numpy as np\n'), ((9743, 9772), 'numpy.nanmedian', 'np.nanmedian', (['dists_pupil_seg'], {}), '(dists_pupil_seg)\n', (9755, 9772), True, 'import numpy as np\n'), ((9834, 9860), 'numpy.nanstd', 'np.nanstd', (['dists_pupil_seg'], {}), '(dists_pupil_seg)\n', (9843, 9860), True, 'import numpy as np\n'), ((9927, 9958), 'numpy.nanmedian', 'np.nanmedian', (['dists_iris_latent'], {}), '(dists_iris_latent)\n', (9939, 9958), True, 'import numpy as np\n'), ((10019, 10047), 'numpy.nanstd', 'np.nanstd', (['dists_iris_latent'], {}), '(dists_iris_latent)\n', (10028, 10047), True, 'import numpy as np\n'), ((10114, 10142), 'numpy.nanmedian', 'np.nanmedian', (['dists_iris_seg'], {}), '(dists_iris_seg)\n', (10126, 10142), True, 'import numpy as np\n'), ((10203, 10228), 'numpy.nanstd', 'np.nanstd', (['dists_iris_seg'], {}), '(dists_iris_seg)\n', (10212, 10228), True, 'import numpy as np\n'), ((1168, 1185), 'modelSummary.model_dict.keys', 'model_dict.keys', ([], {}), '()\n', (1183, 1185), False, 'from modelSummary import model_dict\n'), ((9175, 9190), 'matplotlib.pyplot.pause', 'plt.pause', (['(0.01)'], {}), '(0.01)\n', (9184, 9190), True, 'import matplotlib.pyplot as plt\n'), ((9291, 9304), 'helperfunctions.mypause', 'mypause', (['(0.01)'], {}), '(0.01)\n', (9298, 9304), False, 'from helperfunctions import mypause, stackall_Dict\n')] |
meskio/tuf | tests/test_util.py | 09c3ceb993d40f7339bbbaf4eae617f95b972708 | #!/usr/bin/env python
"""
<Program Name>
test_util.py
<Author>
Konstantin Andrianov.
<Started>
February 1, 2013.
<Copyright>
See LICENSE for licensing information.
<Purpose>
Unit test for 'util.py'
"""
# Help with Python 3 compatibility, where the print statement is a function, an
# implicit relative import is invalid, and the '/' operator performs true
# division. Example: print 'hello world' raises a 'SyntaxError' exception.
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import os
import sys
import gzip
import shutil
import logging
import tempfile
import unittest
import tuf
import tuf.log
import tuf.hash
import tuf.util
import tuf.unittest_toolbox as unittest_toolbox
import tuf._vendor.six as six
logger = logging.getLogger('tuf.test_util')
class TestUtil(unittest_toolbox.Modified_TestCase):
def setUp(self):
unittest_toolbox.Modified_TestCase.setUp(self)
self.temp_fileobj = tuf.util.TempFile()
def tearDown(self):
unittest_toolbox.Modified_TestCase.tearDown(self)
self.temp_fileobj.close_temp_file()
def test_A1_tempfile_close_temp_file(self):
# Was the temporary file closed?
self.temp_fileobj.close_temp_file()
self.assertTrue(self.temp_fileobj.temporary_file.closed)
def _extract_tempfile_directory(self, config_temp_dir=None):
"""
Takes a directory (essentially specified in the conf.py as
'temporary_directory') and substitutes tempfile.TemporaryFile() with
tempfile.mkstemp() in order to extract actual directory of the stored
tempfile. Returns the config's temporary directory (or default temp
directory) and actual directory.
"""
# Patching 'tuf.conf.temporary_directory'.
tuf.conf.temporary_directory = config_temp_dir
if config_temp_dir is None:
# 'config_temp_dir' needs to be set to default.
config_temp_dir = tempfile.gettempdir()
# Patching 'tempfile.TemporaryFile()' (by substituting
# temfile.TemporaryFile() with tempfile.mkstemp()) in order to get the
# directory of the stored tempfile object.
saved_tempfile_TemporaryFile = tuf.util.tempfile.NamedTemporaryFile
tuf.util.tempfile.NamedTemporaryFile = tempfile.mkstemp
_temp_fileobj = tuf.util.TempFile()
tuf.util.tempfile.NamedTemporaryFile = saved_tempfile_TemporaryFile
junk, _tempfilepath = _temp_fileobj.temporary_file
_tempfile_dir = os.path.dirname(_tempfilepath)
# In the case when 'config_temp_dir' is None or some other discrepancy,
# '_temp_fileobj' needs to be closed manually since tempfile.mkstemp()
# was used.
if os.path.exists(_tempfilepath):
os.remove(_tempfilepath)
return config_temp_dir, _tempfile_dir
def test_A2_tempfile_init(self):
# Goal: Verify that temporary files are stored in the appropriate temp
# directory. The location of the temporary files is set in 'tuf.conf.py'.
# Test: Expected input verification.
# Assumed 'tuf.conf.temporary_directory' is 'None' initially.
temp_file = tuf.util.TempFile()
temp_file_directory = os.path.dirname(temp_file.temporary_file.name)
self.assertEqual(tempfile.gettempdir(), temp_file_directory)
saved_temporary_directory = tuf.conf.temporary_directory
temp_directory = self.make_temp_directory()
tuf.conf.temporary_directory = temp_directory
temp_file = tuf.util.TempFile()
temp_file_directory = os.path.dirname(temp_file.temporary_file.name)
self.assertEqual(temp_directory, temp_file_directory)
tuf.conf.temporary_directory = saved_temporary_directory
# Test: Unexpected input handling.
config_temp_dirs = [self.random_string(), 123, ['a'], {'a':1}]
for config_temp_dir in config_temp_dirs:
config_temp_dir, actual_dir = \
self._extract_tempfile_directory(config_temp_dir)
self.assertEqual(tempfile.gettempdir(), actual_dir)
def test_A3_tempfile_read(self):
filepath = self.make_temp_data_file(data = '1234567890')
fileobj = open(filepath, 'rb')
# Patching 'temp_fileobj.temporary_file'.
self.temp_fileobj.temporary_file = fileobj
# Test: Expected input.
self.assertEqual(self.temp_fileobj.read().decode('utf-8'), '1234567890')
self.assertEqual(self.temp_fileobj.read(4).decode('utf-8'), '1234')
# Test: Unexpected input.
for bogus_arg in ['abcd', ['abcd'], {'a':'a'}, -100]:
self.assertRaises(tuf.FormatError, self.temp_fileobj.read, bogus_arg)
def test_A4_tempfile_write(self):
data = self.random_string()
self.temp_fileobj.write(data.encode('utf-8'))
self.assertEqual(data, self.temp_fileobj.read().decode('utf-8'))
self.temp_fileobj.write(data.encode('utf-8'), auto_flush=False)
self.assertEqual(data, self.temp_fileobj.read().decode('utf-8'))
def test_A5_tempfile_move(self):
# Destination directory to save the temporary file in.
dest_temp_dir = self.make_temp_directory()
dest_path = os.path.join(dest_temp_dir, self.random_string())
self.temp_fileobj.write(self.random_string().encode('utf-8'))
self.temp_fileobj.move(dest_path)
self.assertTrue(dest_path)
def _compress_existing_file(self, filepath):
"""
[Helper]Compresses file 'filepath' and returns file path of
the compresses file.
"""
# NOTE: DO NOT forget to remove the newly created compressed file!
if os.path.exists(filepath):
compressed_filepath = filepath+'.gz'
f_in = open(filepath, 'rb')
f_out = gzip.open(compressed_filepath, 'wb')
f_out.writelines(f_in)
f_out.close()
f_in.close()
return compressed_filepath
else:
logger.error('Compression of '+repr(filepath)+' failed. Path does not exist.')
sys.exit(1)
def _decompress_file(self, compressed_filepath):
"""[Helper]"""
if os.path.exists(compressed_filepath):
f = gzip.open(compressed_filepath, 'rb')
file_content = f.read()
f.close()
return file_content
else:
logger.error('Decompression of '+repr(compressed_filepath)+' failed. '+\
'Path does not exist.')
sys.exit(1)
def test_A6_tempfile_decompress_temp_file_object(self):
# Setup: generate a temp file (self.make_temp_data_file()),
# compress it. Write it to self.temp_fileobj().
filepath = self.make_temp_data_file()
fileobj = open(filepath, 'rb')
compressed_filepath = self._compress_existing_file(filepath)
compressed_fileobj = open(compressed_filepath, 'rb')
self.temp_fileobj.write(compressed_fileobj.read())
os.remove(compressed_filepath)
# Try decompression using incorrect compression type i.e. compressions
# other than 'gzip'. In short feeding incorrect input.
bogus_args = ['zip', 1234, self.random_string()]
for arg in bogus_args:
self.assertRaises(tuf.Error,
self.temp_fileobj.decompress_temp_file_object, arg)
self.temp_fileobj.decompress_temp_file_object('gzip')
self.assertEqual(self.temp_fileobj.read(), fileobj.read())
# Checking the content of the TempFile's '_orig_file' instance.
check_compressed_original = self.make_temp_file()
with open(check_compressed_original, 'wb') as file_object:
file_object.write(self.temp_fileobj._orig_file.read())
data_in_orig_file = self._decompress_file(check_compressed_original)
fileobj.seek(0)
self.assertEqual(data_in_orig_file, fileobj.read())
# Try decompressing once more.
self.assertRaises(tuf.Error,
self.temp_fileobj.decompress_temp_file_object, 'gzip')
# Test decompression of invalid gzip file.
temp_file = tuf.util.TempFile()
fileobj.seek(0)
temp_file.write(fileobj.read())
temp_file.decompress_temp_file_object('gzip')
def test_B1_get_file_details(self):
# Goal: Verify proper output given certain expected/unexpected input.
# Making a temporary file.
filepath = self.make_temp_data_file()
# Computing the hash and length of the tempfile.
digest_object = tuf.hash.digest_filename(filepath, algorithm='sha256')
file_hash = {'sha256' : digest_object.hexdigest()}
file_length = os.path.getsize(filepath)
# Test: Expected input.
self.assertEqual(tuf.util.get_file_details(filepath), (file_length, file_hash))
# Test: Incorrect input.
bogus_inputs = [self.random_string(), 1234, [self.random_string()],
{'a': 'b'}, None]
for bogus_input in bogus_inputs:
if isinstance(bogus_input, six.string_types):
self.assertRaises(tuf.Error, tuf.util.get_file_details, bogus_input)
else:
self.assertRaises(tuf.FormatError, tuf.util.get_file_details, bogus_input)
def test_B2_ensure_parent_dir(self):
existing_parent_dir = self.make_temp_directory()
non_existing_parent_dir = os.path.join(existing_parent_dir, 'a', 'b')
for parent_dir in [existing_parent_dir, non_existing_parent_dir, 12, [3]]:
if isinstance(parent_dir, six.string_types):
tuf.util.ensure_parent_dir(os.path.join(parent_dir, 'a.txt'))
self.assertTrue(os.path.isdir(parent_dir))
else:
self.assertRaises(tuf.FormatError, tuf.util.ensure_parent_dir, parent_dir)
def test_B3_file_in_confined_directories(self):
# Goal: Provide invalid input for 'filepath' and 'confined_directories'.
# Include inputs like: '[1, 2, "a"]' and such...
# Reference to 'file_in_confined_directories()' to improve readability.
in_confined_directory = tuf.util.file_in_confined_directories
list_of_confined_directories = ['a', 12, {'a':'a'}, [1]]
list_of_filepaths = [12, ['a'], {'a':'a'}, 'a']
for bogus_confined_directory in list_of_confined_directories:
for filepath in list_of_filepaths:
self.assertRaises(tuf.FormatError, in_confined_directory,
filepath, bogus_confined_directory)
# Test: Inputs that evaluate to False.
confined_directories = ['a/b/', 'a/b/c/d/']
self.assertFalse(in_confined_directory('a/b/c/1.txt', confined_directories))
confined_directories = ['a/b/c/d/e/']
self.assertFalse(in_confined_directory('a', confined_directories))
self.assertFalse(in_confined_directory('a/b', confined_directories))
self.assertFalse(in_confined_directory('a/b/c', confined_directories))
self.assertFalse(in_confined_directory('a/b/c/d', confined_directories))
# Below, 'e' is a file in the 'a/b/c/d/' directory.
self.assertFalse(in_confined_directory('a/b/c/d/e', confined_directories))
# Test: Inputs that evaluate to True.
self.assertTrue(in_confined_directory('a/b/c.txt', ['']))
self.assertTrue(in_confined_directory('a/b/c.txt', ['a/b/']))
self.assertTrue(in_confined_directory('a/b/c.txt', ['x', '']))
self.assertTrue(in_confined_directory('a/b/c/..', ['a/']))
def test_B4_import_json(self):
self.assertTrue('json' in sys.modules)
def test_B5_load_json_string(self):
# Test normal case.
data = ['a', {'b': ['c', None, 30.3, 29]}]
json_string = tuf.util.json.dumps(data)
self.assertEqual(data, tuf.util.load_json_string(json_string))
# Test invalid arguments.
self.assertRaises(tuf.Error, tuf.util.load_json_string, 8)
invalid_json_string = {'a': tuf.FormatError}
self.assertRaises(tuf.Error, tuf.util.load_json_string, invalid_json_string)
def test_B6_load_json_file(self):
data = ['a', {'b': ['c', None, 30.3, 29]}]
filepath = self.make_temp_file()
fileobj = open(filepath, 'wt')
tuf.util.json.dump(data, fileobj)
fileobj.close()
self.assertEqual(data, tuf.util.load_json_file(filepath))
# Test a gzipped file.
compressed_filepath = self._compress_existing_file(filepath)
self.assertEqual(data, tuf.util.load_json_file(compressed_filepath))
Errors = (tuf.FormatError, IOError)
for bogus_arg in [b'a', 1, [b'a'], {'a':b'b'}]:
self.assertRaises(Errors, tuf.util.load_json_file, bogus_arg)
def test_C1_get_target_hash(self):
# Test normal case.
expected_target_hashes = {
'/file1.txt': 'e3a3d89eb3b70ce3fbce6017d7b8c12d4abd5635427a0e8a238f53157df85b3d',
'/README.txt': '8faee106f1bb69f34aaf1df1e3c2e87d763c4d878cb96b91db13495e32ceb0b0',
'/warehouse/file2.txt': 'd543a573a2cec67026eff06e75702303559e64e705eba06f65799baaf0424417'
}
for filepath, target_hash in six.iteritems(expected_target_hashes):
self.assertTrue(tuf.formats.RELPATH_SCHEMA.matches(filepath))
self.assertTrue(tuf.formats.HASH_SCHEMA.matches(target_hash))
self.assertEqual(tuf.util.get_target_hash(filepath), target_hash)
# Test for improperly formatted argument.
self.assertRaises(tuf.FormatError, tuf.util.get_target_hash, 8)
def test_C2_find_delegated_role(self):
# Test normal case. Create an expected role list, which is one of the
# required arguments to 'find_delegated_role()'.
role_list = [
{
"keyids": [
"a394c28384648328b16731f81440d72243c77bb44c07c040be99347f0df7d7bf"
],
"name": "targets/warehouse",
"paths": [
"/file1.txt", "/README.txt", '/warehouse/'
],
"threshold": 3
},
{
"keyids": [
"a394c28384648328b16731f81440d72243c77bb44c07c040be99347f0df7d7bf"
],
"name": "targets/tuf",
"paths": [
"/updater.py", "formats.py", '/tuf/'
],
"threshold": 4
}
]
self.assertTrue(tuf.formats.ROLELIST_SCHEMA.matches(role_list))
self.assertEqual(tuf.util.find_delegated_role(role_list, 'targets/tuf'), 1)
self.assertEqual(tuf.util.find_delegated_role(role_list, 'targets/warehouse'), 0)
# Test for non-existent role. 'find_delegated_role()' returns 'None'
# if the role is not found.
self.assertEqual(tuf.util.find_delegated_role(role_list, 'targets/non-existent'),
None)
# Test improperly formatted arguments.
self.assertRaises(tuf.FormatError, tuf.util.find_delegated_role, 8, role_list)
self.assertRaises(tuf.FormatError, tuf.util.find_delegated_role, 8, 'targets/tuf')
# Test duplicate roles.
role_list.append(role_list[1])
self.assertRaises(tuf.RepositoryError, tuf.util.find_delegated_role, role_list,
'targets/tuf')
# Test missing 'name' attribute (optional, but required by
# 'find_delegated_role()'.
# Delete the duplicate role, and the remaining role's 'name' attribute.
del role_list[2]
del role_list[0]['name']
self.assertRaises(tuf.RepositoryError, tuf.util.find_delegated_role, role_list,
'targets/warehouse')
def test_C3_paths_are_consistent_with_hash_prefixes(self):
# Test normal case.
path_hash_prefixes = ['e3a3', '8fae', 'd543']
list_of_targets = ['/file1.txt', '/README.txt', '/warehouse/file2.txt']
# Ensure the paths of 'list_of_targets' each have the epected path hash
# prefix listed in 'path_hash_prefixes'.
for filepath in list_of_targets:
self.assertTrue(tuf.util.get_target_hash(filepath)[0:4] in path_hash_prefixes)
self.assertTrue(tuf.util.paths_are_consistent_with_hash_prefixes(list_of_targets,
path_hash_prefixes))
extra_invalid_prefix = ['e3a3', '8fae', 'd543', '0000']
self.assertTrue(tuf.util.paths_are_consistent_with_hash_prefixes(list_of_targets,
extra_invalid_prefix))
# Test improperly formatted arguments.
self.assertRaises(tuf.FormatError,
tuf.util.paths_are_consistent_with_hash_prefixes, 8,
path_hash_prefixes)
self.assertRaises(tuf.FormatError,
tuf.util.paths_are_consistent_with_hash_prefixes,
list_of_targets, 8)
self.assertRaises(tuf.FormatError,
tuf.util.paths_are_consistent_with_hash_prefixes,
list_of_targets, ['zza1'])
# Test invalid list of targets.
bad_target_path = '/file5.txt'
self.assertTrue(tuf.util.get_target_hash(bad_target_path)[0:4] not in
path_hash_prefixes)
self.assertFalse(tuf.util.paths_are_consistent_with_hash_prefixes([bad_target_path],
path_hash_prefixes))
# Add invalid target path to 'list_of_targets'.
list_of_targets.append(bad_target_path)
self.assertFalse(tuf.util.paths_are_consistent_with_hash_prefixes(list_of_targets,
path_hash_prefixes))
def test_C4_ensure_all_targets_allowed(self):
# Test normal case.
rolename = 'targets/warehouse'
self.assertTrue(tuf.formats.ROLENAME_SCHEMA.matches(rolename))
list_of_targets = ['/file1.txt', '/README.txt', '/warehouse/file2.txt']
self.assertTrue(tuf.formats.RELPATHS_SCHEMA.matches(list_of_targets))
parent_delegations = {"keys": {
"a394c28384648328b16731f81440d72243c77bb44c07c040be99347f0df7d7bf": {
"keytype": "ed25519",
"keyval": {
"public": "3eb81026ded5af2c61fb3d4b272ac53cd1049a810ee88f4df1fc35cdaf918157"
}
}
},
"roles": [
{
"keyids": [
"a394c28384648328b16731f81440d72243c77bb44c07c040be99347f0df7d7bf"
],
"name": "targets/warehouse",
"paths": [
"/file1.txt", "/README.txt", '/warehouse/'
],
"threshold": 1
}
]
}
self.assertTrue(tuf.formats.DELEGATIONS_SCHEMA.matches(parent_delegations))
tuf.util.ensure_all_targets_allowed(rolename, list_of_targets,
parent_delegations)
# The target files of 'targets' are always allowed. 'list_of_targets' and
# 'parent_delegations' are not checked in this case.
tuf.util.ensure_all_targets_allowed('targets', list_of_targets,
parent_delegations)
# Test improperly formatted arguments.
self.assertRaises(tuf.FormatError, tuf.util.ensure_all_targets_allowed,
8, list_of_targets, parent_delegations)
self.assertRaises(tuf.FormatError, tuf.util.ensure_all_targets_allowed,
rolename, 8, parent_delegations)
self.assertRaises(tuf.FormatError, tuf.util.ensure_all_targets_allowed,
rolename, list_of_targets, 8)
# Test for invalid 'rolename', which has not been delegated by its parent,
# 'targets'.
self.assertRaises(tuf.RepositoryError, tuf.util.ensure_all_targets_allowed,
'targets/non-delegated_rolename', list_of_targets,
parent_delegations)
# Test for target file that is not allowed by the parent role.
self.assertRaises(tuf.ForbiddenTargetError, tuf.util.ensure_all_targets_allowed,
'targets/warehouse', ['file8.txt'], parent_delegations)
self.assertRaises(tuf.ForbiddenTargetError, tuf.util.ensure_all_targets_allowed,
'targets/warehouse', ['file1.txt', 'bad-README.txt'],
parent_delegations)
# Test for required attributes.
# Missing 'paths' attribute.
del parent_delegations['roles'][0]['paths']
self.assertRaises(tuf.FormatError, tuf.util.ensure_all_targets_allowed,
'targets/warehouse', list_of_targets, parent_delegations)
# Test 'path_hash_prefixes' attribute.
path_hash_prefixes = ['e3a3', '8fae', 'd543']
parent_delegations['roles'][0]['path_hash_prefixes'] = path_hash_prefixes
# Test normal case for 'path_hash_prefixes'.
tuf.util.ensure_all_targets_allowed('targets/warehouse', list_of_targets,
parent_delegations)
# Test target file with a path_hash_prefix that is not allowed in its
# parent role.
path_hash_prefix = tuf.util.get_target_hash('file5.txt')[0:4]
self.assertTrue(path_hash_prefix not in parent_delegations['roles'][0]
['path_hash_prefixes'])
self.assertRaises(tuf.ForbiddenTargetError, tuf.util.ensure_all_targets_allowed,
'targets/warehouse', ['file5.txt'], parent_delegations)
def test_C5_unittest_toolbox_make_temp_directory(self):
# Verify that the tearDown function does not fail when
# unittest_toolbox.make_temp_directory deletes the generated temp directory
# here.
temp_directory = self.make_temp_directory()
os.rmdir(temp_directory)
def test_c6_get_compressed_length(self):
self.temp_fileobj.write(b'hello world')
self.assertTrue(self.temp_fileobj.get_compressed_length() == 11)
temp_file = tuf.util.TempFile()
# Run unit test.
if __name__ == '__main__':
unittest.main()
| [((840, 874), 'logging.getLogger', 'logging.getLogger', (['"""tuf.test_util"""'], {}), "('tuf.test_util')\n", (857, 874), False, 'import logging\n'), ((21056, 21071), 'unittest.main', 'unittest.main', ([], {}), '()\n', (21069, 21071), False, 'import unittest\n'), ((953, 999), 'tuf.unittest_toolbox.Modified_TestCase.setUp', 'unittest_toolbox.Modified_TestCase.setUp', (['self'], {}), '(self)\n', (993, 999), True, 'import tuf.unittest_toolbox as unittest_toolbox\n'), ((1024, 1043), 'tuf.util.TempFile', 'tuf.util.TempFile', ([], {}), '()\n', (1041, 1043), False, 'import tuf\n'), ((1075, 1124), 'tuf.unittest_toolbox.Modified_TestCase.tearDown', 'unittest_toolbox.Modified_TestCase.tearDown', (['self'], {}), '(self)\n', (1118, 1124), True, 'import tuf.unittest_toolbox as unittest_toolbox\n'), ((2334, 2353), 'tuf.util.TempFile', 'tuf.util.TempFile', ([], {}), '()\n', (2351, 2353), False, 'import tuf\n'), ((2501, 2531), 'os.path.dirname', 'os.path.dirname', (['_tempfilepath'], {}), '(_tempfilepath)\n', (2516, 2531), False, 'import os\n'), ((2708, 2737), 'os.path.exists', 'os.path.exists', (['_tempfilepath'], {}), '(_tempfilepath)\n', (2722, 2737), False, 'import os\n'), ((3130, 3149), 'tuf.util.TempFile', 'tuf.util.TempFile', ([], {}), '()\n', (3147, 3149), False, 'import tuf\n'), ((3176, 3222), 'os.path.dirname', 'os.path.dirname', (['temp_file.temporary_file.name'], {}), '(temp_file.temporary_file.name)\n', (3191, 3222), False, 'import os\n'), ((3464, 3483), 'tuf.util.TempFile', 'tuf.util.TempFile', ([], {}), '()\n', (3481, 3483), False, 'import tuf\n'), ((3510, 3556), 'os.path.dirname', 'os.path.dirname', (['temp_file.temporary_file.name'], {}), '(temp_file.temporary_file.name)\n', (3525, 3556), False, 'import os\n'), ((5471, 5495), 'os.path.exists', 'os.path.exists', (['filepath'], {}), '(filepath)\n', (5485, 5495), False, 'import os\n'), ((5932, 5967), 'os.path.exists', 'os.path.exists', (['compressed_filepath'], {}), '(compressed_filepath)\n', (5946, 5967), False, 'import os\n'), ((6672, 6702), 'os.remove', 'os.remove', (['compressed_filepath'], {}), '(compressed_filepath)\n', (6681, 6702), False, 'import os\n'), ((7770, 7789), 'tuf.util.TempFile', 'tuf.util.TempFile', ([], {}), '()\n', (7787, 7789), False, 'import tuf\n'), ((8159, 8213), 'tuf.hash.digest_filename', 'tuf.hash.digest_filename', (['filepath'], {'algorithm': '"""sha256"""'}), "(filepath, algorithm='sha256')\n", (8183, 8213), False, 'import tuf\n'), ((8287, 8312), 'os.path.getsize', 'os.path.getsize', (['filepath'], {}), '(filepath)\n', (8302, 8312), False, 'import os\n'), ((8964, 9007), 'os.path.join', 'os.path.join', (['existing_parent_dir', '"""a"""', '"""b"""'], {}), "(existing_parent_dir, 'a', 'b')\n", (8976, 9007), False, 'import os\n'), ((11206, 11231), 'tuf.util.json.dumps', 'tuf.util.json.dumps', (['data'], {}), '(data)\n', (11225, 11231), False, 'import tuf\n'), ((11687, 11720), 'tuf.util.json.dump', 'tuf.util.json.dump', (['data', 'fileobj'], {}), '(data, fileobj)\n', (11705, 11720), False, 'import tuf\n'), ((12543, 12580), 'tuf._vendor.six.iteritems', 'six.iteritems', (['expected_target_hashes'], {}), '(expected_target_hashes)\n', (12556, 12580), True, 'import tuf._vendor.six as six\n'), ((17830, 17916), 'tuf.util.ensure_all_targets_allowed', 'tuf.util.ensure_all_targets_allowed', (['rolename', 'list_of_targets', 'parent_delegations'], {}), '(rolename, list_of_targets,\n parent_delegations)\n', (17865, 17916), False, 'import tuf\n'), ((18091, 18178), 'tuf.util.ensure_all_targets_allowed', 'tuf.util.ensure_all_targets_allowed', (['"""targets"""', 'list_of_targets', 'parent_delegations'], {}), "('targets', list_of_targets,\n parent_delegations)\n", (18126, 18178), False, 'import tuf\n'), ((19905, 20002), 'tuf.util.ensure_all_targets_allowed', 'tuf.util.ensure_all_targets_allowed', (['"""targets/warehouse"""', 'list_of_targets', 'parent_delegations'], {}), "('targets/warehouse', list_of_targets,\n parent_delegations)\n", (19940, 20002), False, 'import tuf\n'), ((20786, 20810), 'os.rmdir', 'os.rmdir', (['temp_directory'], {}), '(temp_directory)\n', (20794, 20810), False, 'import os\n'), ((20987, 21006), 'tuf.util.TempFile', 'tuf.util.TempFile', ([], {}), '()\n', (21004, 21006), False, 'import tuf\n'), ((1976, 1997), 'tempfile.gettempdir', 'tempfile.gettempdir', ([], {}), '()\n', (1995, 1997), False, 'import tempfile\n'), ((2745, 2769), 'os.remove', 'os.remove', (['_tempfilepath'], {}), '(_tempfilepath)\n', (2754, 2769), False, 'import os\n'), ((3244, 3265), 'tempfile.gettempdir', 'tempfile.gettempdir', ([], {}), '()\n', (3263, 3265), False, 'import tempfile\n'), ((5588, 5624), 'gzip.open', 'gzip.open', (['compressed_filepath', '"""wb"""'], {}), "(compressed_filepath, 'wb')\n", (5597, 5624), False, 'import gzip\n'), ((5839, 5850), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (5847, 5850), False, 'import sys\n'), ((5979, 6015), 'gzip.open', 'gzip.open', (['compressed_filepath', '"""rb"""'], {}), "(compressed_filepath, 'rb')\n", (5988, 6015), False, 'import gzip\n'), ((6224, 6235), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (6232, 6235), False, 'import sys\n'), ((8364, 8399), 'tuf.util.get_file_details', 'tuf.util.get_file_details', (['filepath'], {}), '(filepath)\n', (8389, 8399), False, 'import tuf\n'), ((11259, 11297), 'tuf.util.load_json_string', 'tuf.util.load_json_string', (['json_string'], {}), '(json_string)\n', (11284, 11297), False, 'import tuf\n'), ((11768, 11801), 'tuf.util.load_json_file', 'tuf.util.load_json_file', (['filepath'], {}), '(filepath)\n', (11791, 11801), False, 'import tuf\n'), ((11923, 11967), 'tuf.util.load_json_file', 'tuf.util.load_json_file', (['compressed_filepath'], {}), '(compressed_filepath)\n', (11946, 11967), False, 'import tuf\n'), ((13634, 13680), 'tuf.formats.ROLELIST_SCHEMA.matches', 'tuf.formats.ROLELIST_SCHEMA.matches', (['role_list'], {}), '(role_list)\n', (13669, 13680), False, 'import tuf\n'), ((13703, 13757), 'tuf.util.find_delegated_role', 'tuf.util.find_delegated_role', (['role_list', '"""targets/tuf"""'], {}), "(role_list, 'targets/tuf')\n", (13731, 13757), False, 'import tuf\n'), ((13783, 13843), 'tuf.util.find_delegated_role', 'tuf.util.find_delegated_role', (['role_list', '"""targets/warehouse"""'], {}), "(role_list, 'targets/warehouse')\n", (13811, 13843), False, 'import tuf\n'), ((13975, 14038), 'tuf.util.find_delegated_role', 'tuf.util.find_delegated_role', (['role_list', '"""targets/non-existent"""'], {}), "(role_list, 'targets/non-existent')\n", (14003, 14038), False, 'import tuf\n'), ((15332, 15421), 'tuf.util.paths_are_consistent_with_hash_prefixes', 'tuf.util.paths_are_consistent_with_hash_prefixes', (['list_of_targets', 'path_hash_prefixes'], {}), '(list_of_targets,\n path_hash_prefixes)\n', (15380, 15421), False, 'import tuf\n'), ((15560, 15651), 'tuf.util.paths_are_consistent_with_hash_prefixes', 'tuf.util.paths_are_consistent_with_hash_prefixes', (['list_of_targets', 'extra_invalid_prefix'], {}), '(list_of_targets,\n extra_invalid_prefix)\n', (15608, 15651), False, 'import tuf\n'), ((16442, 16533), 'tuf.util.paths_are_consistent_with_hash_prefixes', 'tuf.util.paths_are_consistent_with_hash_prefixes', (['[bad_target_path]', 'path_hash_prefixes'], {}), '([bad_target_path],\n path_hash_prefixes)\n', (16490, 16533), False, 'import tuf\n'), ((16709, 16798), 'tuf.util.paths_are_consistent_with_hash_prefixes', 'tuf.util.paths_are_consistent_with_hash_prefixes', (['list_of_targets', 'path_hash_prefixes'], {}), '(list_of_targets,\n path_hash_prefixes)\n', (16757, 16798), False, 'import tuf\n'), ((16986, 17031), 'tuf.formats.ROLENAME_SCHEMA.matches', 'tuf.formats.ROLENAME_SCHEMA.matches', (['rolename'], {}), '(rolename)\n', (17021, 17031), False, 'import tuf\n'), ((17130, 17182), 'tuf.formats.RELPATHS_SCHEMA.matches', 'tuf.formats.RELPATHS_SCHEMA.matches', (['list_of_targets'], {}), '(list_of_targets)\n', (17165, 17182), False, 'import tuf\n'), ((17765, 17823), 'tuf.formats.DELEGATIONS_SCHEMA.matches', 'tuf.formats.DELEGATIONS_SCHEMA.matches', (['parent_delegations'], {}), '(parent_delegations)\n', (17803, 17823), False, 'import tuf\n'), ((20155, 20192), 'tuf.util.get_target_hash', 'tuf.util.get_target_hash', (['"""file5.txt"""'], {}), "('file5.txt')\n", (20179, 20192), False, 'import tuf\n'), ((3946, 3967), 'tempfile.gettempdir', 'tempfile.gettempdir', ([], {}), '()\n', (3965, 3967), False, 'import tempfile\n'), ((12604, 12648), 'tuf.formats.RELPATH_SCHEMA.matches', 'tuf.formats.RELPATH_SCHEMA.matches', (['filepath'], {}), '(filepath)\n', (12638, 12648), False, 'import tuf\n'), ((12672, 12716), 'tuf.formats.HASH_SCHEMA.matches', 'tuf.formats.HASH_SCHEMA.matches', (['target_hash'], {}), '(target_hash)\n', (12703, 12716), False, 'import tuf\n'), ((12741, 12775), 'tuf.util.get_target_hash', 'tuf.util.get_target_hash', (['filepath'], {}), '(filepath)\n', (12765, 12775), False, 'import tuf\n'), ((9174, 9207), 'os.path.join', 'os.path.join', (['parent_dir', '"""a.txt"""'], {}), "(parent_dir, 'a.txt')\n", (9186, 9207), False, 'import os\n'), ((9233, 9258), 'os.path.isdir', 'os.path.isdir', (['parent_dir'], {}), '(parent_dir)\n', (9246, 9258), False, 'import os\n'), ((16327, 16368), 'tuf.util.get_target_hash', 'tuf.util.get_target_hash', (['bad_target_path'], {}), '(bad_target_path)\n', (16351, 16368), False, 'import tuf\n'), ((15248, 15282), 'tuf.util.get_target_hash', 'tuf.util.get_target_hash', (['filepath'], {}), '(filepath)\n', (15272, 15282), False, 'import tuf\n')] |
BFlameSwift/AirplaneReservationSystem | background/forms.py | bbabb0e258c72eb50fcbbf7ade437e38a39e6f02 |
from django import forms
class FlightrForm(forms.Form):
flight_number = forms.CharField(max_length=30, label="航班号", widget=forms.TextInput(attrs={'class': 'form-control'}))
plane_type_choices = [
('波音', (
('1', '747'),
('2', '777'),
('3', '787'),
)
),
('空客', (
('4', 'A300'),
('5', 'A310'),
('6', 'A320'),
('7', 'A350'),
)
),
]
plane_type = forms.ChoiceField(label='飞机型号', choices=plane_type_choices,widget=forms.Select)
origination = forms.CharField(max_length=30,label="始发地", widget=forms.TextInput(attrs={'class': 'form-control'}))
destination = forms.CharField(max_length=30,label="目的地", widget=forms.TextInput(attrs={'class': 'form-control'}))
starting_time = forms.TimeField(label="始发时间",widget=forms.TimeInput(attrs={'class': 'form-control'}))
departure_airport = forms.CharField(max_length=64, label="始发机场", widget=forms.TextInput(attrs={'class': 'form-control'}))
landing_airport = forms.CharField(max_length=64, label="目的机场", widget=forms.TextInput(attrs={'class': 'form-control'}))
arrival_time = forms.TimeField(label="到达时间",widget=forms.TimeInput(attrs={'class': 'form-control'}))
first_class_price = forms.FloatField(label="头等舱价格",widget=forms.NumberInput(attrs={'class': 'form-control'}))
# highlevel_economy_class_price = forms.FloatField(label="高级经济舱价格",widget=forms.NumberInput(attrs={'class': 'form-control'}))
business_class_price = forms.FloatField(label="商务舱价格",widget=forms.NumberInput(attrs={'class': 'form-control'}))
economy_class_price = forms.FloatField(label="经济舱价格",widget=forms.NumberInput(attrs={'class': 'form-control'}))
starting_date = forms.DateField(label="始发日期", widget=forms.DateInput(attrs={'class': 'form-control'}))
ending_date = forms.DateField(label="终止日期", widget=forms.DateInput(attrs={'class': 'form-control'}))
class StartStopDateForm(forms.Form):
starting_date = forms.DateField(label="始发日期", widget=forms.DateInput(attrs={'class': 'form-control'}))
ending_date = forms.DateField(label="终止日期", widget=forms.DateInput(attrs={'class': 'form-control'}))
flight_number = forms.CharField(max_length=30, label="航班号", widget=forms.TextInput(attrs={'class': 'form-control'}))
# book_sum = forms.IntegerField(label="订票总数")
# plane_capacity = forms.IntegerField(label="飞机容量")
class flight_number_Form(forms.Form):
flight_number = forms.CharField(max_length=30, label="航班号", widget=forms.TextInput(attrs={'class': 'form-control'}))
class concrete_flight_id_Form(forms.Form):
concrete_flight_id = forms.CharField(max_length=30, label="航班id", widget=forms.TextInput(attrs={'class': 'form-control'})) | [((497, 582), 'django.forms.ChoiceField', 'forms.ChoiceField', ([], {'label': '"""飞机型号"""', 'choices': 'plane_type_choices', 'widget': 'forms.Select'}), "(label='飞机型号', choices=plane_type_choices, widget=forms.Select\n )\n", (514, 582), False, 'from django import forms\n'), ((137, 185), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'class': 'form-control'}"}), "(attrs={'class': 'form-control'})\n", (152, 185), False, 'from django import forms\n'), ((651, 699), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'class': 'form-control'}"}), "(attrs={'class': 'form-control'})\n", (666, 699), False, 'from django import forms\n'), ((769, 817), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'class': 'form-control'}"}), "(attrs={'class': 'form-control'})\n", (784, 817), False, 'from django import forms\n'), ((877, 925), 'django.forms.TimeInput', 'forms.TimeInput', ([], {'attrs': "{'class': 'form-control'}"}), "(attrs={'class': 'form-control'})\n", (892, 925), False, 'from django import forms\n'), ((1004, 1052), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'class': 'form-control'}"}), "(attrs={'class': 'form-control'})\n", (1019, 1052), False, 'from django import forms\n'), ((1128, 1176), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'class': 'form-control'}"}), "(attrs={'class': 'form-control'})\n", (1143, 1176), False, 'from django import forms\n'), ((1233, 1281), 'django.forms.TimeInput', 'forms.TimeInput', ([], {'attrs': "{'class': 'form-control'}"}), "(attrs={'class': 'form-control'})\n", (1248, 1281), False, 'from django import forms\n'), ((1347, 1397), 'django.forms.NumberInput', 'forms.NumberInput', ([], {'attrs': "{'class': 'form-control'}"}), "(attrs={'class': 'form-control'})\n", (1364, 1397), False, 'from django import forms\n'), ((1594, 1644), 'django.forms.NumberInput', 'forms.NumberInput', ([], {'attrs': "{'class': 'form-control'}"}), "(attrs={'class': 'form-control'})\n", (1611, 1644), False, 'from django import forms\n'), ((1710, 1760), 'django.forms.NumberInput', 'forms.NumberInput', ([], {'attrs': "{'class': 'form-control'}"}), "(attrs={'class': 'form-control'})\n", (1727, 1760), False, 'from django import forms\n'), ((1817, 1865), 'django.forms.DateInput', 'forms.DateInput', ([], {'attrs': "{'class': 'form-control'}"}), "(attrs={'class': 'form-control'})\n", (1832, 1865), False, 'from django import forms\n'), ((1922, 1970), 'django.forms.DateInput', 'forms.DateInput', ([], {'attrs': "{'class': 'form-control'}"}), "(attrs={'class': 'form-control'})\n", (1937, 1970), False, 'from django import forms\n'), ((2066, 2114), 'django.forms.DateInput', 'forms.DateInput', ([], {'attrs': "{'class': 'form-control'}"}), "(attrs={'class': 'form-control'})\n", (2081, 2114), False, 'from django import forms\n'), ((2171, 2219), 'django.forms.DateInput', 'forms.DateInput', ([], {'attrs': "{'class': 'form-control'}"}), "(attrs={'class': 'form-control'})\n", (2186, 2219), False, 'from django import forms\n'), ((2290, 2338), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'class': 'form-control'}"}), "(attrs={'class': 'form-control'})\n", (2305, 2338), False, 'from django import forms\n'), ((2555, 2603), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'class': 'form-control'}"}), "(attrs={'class': 'form-control'})\n", (2570, 2603), False, 'from django import forms\n'), ((2724, 2772), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'class': 'form-control'}"}), "(attrs={'class': 'form-control'})\n", (2739, 2772), False, 'from django import forms\n')] |
boliqq07/cam3d | cams/propressing/data_rotate.py | 8b66681166a8ce0ef3304309385c1b899f1d2bb9 | from functools import lru_cache
from math import cos, sin
import scipy
from scipy.ndimage import affine_transform
import numpy as np
@lru_cache(maxsize=10)
def get_matrix(angles=(90, 90, 90), inverse=False):
"""
Axis of rotation Get matrix by angle.
(shear+compress)
Examples: z = 120
############################################################
---------------------- --------------------------------
-oooooooooooooooooooo- --------------------------------
-oooooooooooooooooooo- -oooooooooooooooooooo-----------
-oooooooooooooooooooo- ---oooooooooooooooooooo---------
-oooooooooooooooooooo- >>> -----oooooooooooooooooooo-------
-oooooooooooooooooooo- -------oooooooooooooooooooo-----
-oooooooooooooooooooo- ---------oooooooooooooooooooo---
-oooooooooooooooooooo- -----------oooooooooooooooooooo-
---------------------- --------------------------------
############################################################
1.The ``matrix`` is the transform matrix to rotate the data with angle. Always in Cartesian coordinates.
2.The ``inverse matrix`` is the interpolation matrix for get true data matrix(Cartesian coordinates)
from relative data matrix (Non-Cartesian coordinates).
The
Parameters
----------
angles: tuple
3 angle of x, y, z
z angle is the intersection angle of x,y,
y angle is the intersection angle of x,z,
x angle is the intersection angle of y,z.
inverse:
Compute the (multiplicative) inverse of a matrix.
"""
theta1, theta2, theta3 = [np.pi / 180 * angle for angle in angles]
matrix1 = np.array([[1, cos(theta3), 0],
[0, sin(theta3), 0],
[0, 0, 1]])
matrix2 = np.array([[1, 0, 0],
[0, 1, cos(theta1)],
[0, 0, sin(theta1)]])
matrix3 = np.array([[1, 0, cos(theta2)],
[0, 1, 0],
[0, 0, sin(theta2)]])
matrix = np.dot(matrix1, matrix2).dot(matrix3)
if inverse:
matrix = np.linalg.inv(matrix)
return matrix
def rotation_axis_by_angle(data, angles=(90, 90, 90), times=(2, 2, 2)):
"""
Get true data matrix(Cartesian coordinates) from relative data matrix (Non-Cartesian coordinates).
Parameters
----------
data: np.ndarray
data with shape (nx,ny,nz).
angles:tuple
3 angle of x, y, z
z angle is the intersection angle of x,y,
y angle is the intersection angle of x,z,
x angle is the intersection angle of y,z.
times: tuple
expand the multiple of the matrix.
"""
matrix = get_matrix(angles=angles, inverse=True)
return rotation_axis_by_matrix(data, matrix, times=times)
def rotation_axis_by_matrix(data, matrix, times=(2, 2, 2)):
"""
Get true data matrix(Cartesian coordinates) from relative data matrix (Non-Cartesian coordinates).
Parameters
----------
data: np.ndarray
data with shape (nx,ny,nz).
matrix:tuple
See Also ``get_matrix``
times: tuple
expand the multiple of the matrix.
"""
dims_old = data.shape
dims = tuple([int(i * j) for i, j in zip(dims_old, times)])
n_data = np.zeros(dims)
d0s = int((dims[0] - dims_old[0]) / 2)
d1s = int((dims[1] - dims_old[1]) / 2)
d2s = int((dims[2] - dims_old[2]) / 2)
n_data[d0s:d0s + dims_old[0], d1s:d1s + dims_old[1], d2s:d2s + dims_old[2]] = data
coords = np.meshgrid(range(dims[0]), range(dims[1]), range(dims[2]), indexing="ij")
xy_coords = np.vstack([coords[0].reshape(-1), coords[1].reshape(-1), coords[2].reshape(-1)])
# apply the transformation matrix
# please note: the coordinates are not homogeneous.
# for the 3D case, I've added code for homogeneous coordinates, you might want to look at that
# please also note: rotation is always around the origin:
# since I want the origin to be in the image center, I had to substract dim/2, rotate, then add it again
dims2 = np.array([i / 2 for i in dims])
dims2 = dims2.reshape(-1, 1)
xy_coords = np.dot(matrix, xy_coords - dims2) + dims2
#
# # undo the stacking and reshaping
x = xy_coords[0, :]
y = xy_coords[1, :]
z = xy_coords[2, :]
x = x.reshape(dims, order="A")
y = y.reshape(dims, order="A")
z = z.reshape(dims, order="A")
new_coords = [x, y, z]
# use map_coordinates to sample values for the new image
new_img = scipy.ndimage.map_coordinates(n_data, new_coords, order=2)
return new_img
def _coords(points, angles=(90, 90, 90), times=(2, 2, 2)):
"""
Parameters
----------
points: np.darray
percent of shape.
key points with shape(n_sample,3)
angles:tuple
3 angle of x, y, z
z angle is the intersection angle of x,y,
y angle is the intersection angle of x,z,
x angle is the intersection angle of y,z.
times: tuple
expand the multiple of the matrix.
"""
dims_old = [1, 1, 1]
matrix = get_matrix(angles=angles)
times = np.array(list(times))
times = times.reshape((-1, 1))
dims_old = np.array(dims_old)
dims_old = dims_old.reshape(-1, 1)
dims2 = dims_old / 2
points = points.T * dims_old
xy_coords = np.dot(matrix, points - dims2) + dims2
xy_coords = xy_coords + (times / 2 - 0.5)
return xy_coords
def rote_index(points, data, angles=(90, 90, 90), times=(2, 2, 2), data_init=True, return_type="float"):
"""
Parameters
----------
points: np.darray
key points with shape(n_sample,3)
percent of shape.
data: np.ndarray or tuple
data or data.shape
data_init:bool
The data is the init data (relative location) or Cartesian coordinates.(rotation_axis_by_angle)
angles:tuple
3 angle of x, y, z
z angle is the intersection angle of x,y,
y angle is the intersection angle of x,z,
x angle is the intersection angle of y,z.
times: tuple
expand the multiple of the matrix.
return_type:str
"float", "int", "percent"
for "float", "int" return the new index
for "percent" return the new percent.
"""
data_shape = data.shape if isinstance(data, np.ndarray) else data
if data_init:
times_np = np.array([1,1,1])
else:
times_np = np.array(times)
dims = data_shape
dims = np.array(dims).reshape((-1, 1))
xy_coords = _coords(points, angles=angles, times=times)
if return_type == "percent":
return xy_coords
if return_type == "float":
return (dims * xy_coords/times_np).T
else:
return np.round((dims * xy_coords/times_np).T).astype(int) # for rounding off: .4 -, .5 +
def rote_value(points, data, angles=(90, 90, 90), times=(2, 2, 2), method="in", data_type="td"):
"""
Parameters
----------
points: np.darray
key points with shape(n_sample,3)
percent of shape.
data: np.ndarray
data
angles:tuple
3 angle of x, y, z
z angle is the intersection angle of x,y,
y angle is the intersection angle of x,z,
x angle is the intersection angle of y,z.
times: tuple
expand the multiple of the matrix.
data_type:str
if "init" the data accept init data (elfcar, chgcar). see rotation_axis_by_angle.
if "td" the data accept true matrix data . see rotation_axis_by_angle.
method:str
if "near" , return nearest site's value.
if "inter" , return the interpolation value.
"""
if data_type == "td":
new_data = data
else:
new_data = rotation_axis_by_angle(data, angles=angles, times=times)
if method == "near":
ind = rote_index(points, data, angles=angles, times=times, return_type="int")
new_value = np.array([new_data[tuple(i)] for i in ind.T])
return new_value
else:
ind = rote_index(points, data, angles=angles, times=times, return_type="float")
new_value = scipy.ndimage.map_coordinates(new_data, ind, order=2)
return new_value
| [((138, 159), 'functools.lru_cache', 'lru_cache', ([], {'maxsize': '(10)'}), '(maxsize=10)\n', (147, 159), False, 'from functools import lru_cache\n'), ((3378, 3392), 'numpy.zeros', 'np.zeros', (['dims'], {}), '(dims)\n', (3386, 3392), True, 'import numpy as np\n'), ((4170, 4203), 'numpy.array', 'np.array', (['[(i / 2) for i in dims]'], {}), '([(i / 2) for i in dims])\n', (4178, 4203), True, 'import numpy as np\n'), ((4620, 4678), 'scipy.ndimage.map_coordinates', 'scipy.ndimage.map_coordinates', (['n_data', 'new_coords'], {'order': '(2)'}), '(n_data, new_coords, order=2)\n', (4649, 4678), False, 'import scipy\n'), ((5301, 5319), 'numpy.array', 'np.array', (['dims_old'], {}), '(dims_old)\n', (5309, 5319), True, 'import numpy as np\n'), ((2203, 2224), 'numpy.linalg.inv', 'np.linalg.inv', (['matrix'], {}), '(matrix)\n', (2216, 2224), True, 'import numpy as np\n'), ((4251, 4284), 'numpy.dot', 'np.dot', (['matrix', '(xy_coords - dims2)'], {}), '(matrix, xy_coords - dims2)\n', (4257, 4284), True, 'import numpy as np\n'), ((5435, 5465), 'numpy.dot', 'np.dot', (['matrix', '(points - dims2)'], {}), '(matrix, points - dims2)\n', (5441, 5465), True, 'import numpy as np\n'), ((6478, 6497), 'numpy.array', 'np.array', (['[1, 1, 1]'], {}), '([1, 1, 1])\n', (6486, 6497), True, 'import numpy as np\n'), ((6525, 6540), 'numpy.array', 'np.array', (['times'], {}), '(times)\n', (6533, 6540), True, 'import numpy as np\n'), ((8203, 8256), 'scipy.ndimage.map_coordinates', 'scipy.ndimage.map_coordinates', (['new_data', 'ind'], {'order': '(2)'}), '(new_data, ind, order=2)\n', (8232, 8256), False, 'import scipy\n'), ((2131, 2155), 'numpy.dot', 'np.dot', (['matrix1', 'matrix2'], {}), '(matrix1, matrix2)\n', (2137, 2155), True, 'import numpy as np\n'), ((6575, 6589), 'numpy.array', 'np.array', (['dims'], {}), '(dims)\n', (6583, 6589), True, 'import numpy as np\n'), ((1766, 1777), 'math.cos', 'cos', (['theta3'], {}), '(theta3)\n', (1769, 1777), False, 'from math import cos, sin\n'), ((1811, 1822), 'math.sin', 'sin', (['theta3'], {}), '(theta3)\n', (1814, 1822), False, 'from math import cos, sin\n'), ((1931, 1942), 'math.cos', 'cos', (['theta1'], {}), '(theta1)\n', (1934, 1942), False, 'from math import cos, sin\n'), ((1976, 1987), 'math.sin', 'sin', (['theta1'], {}), '(theta1)\n', (1979, 1987), False, 'from math import cos, sin\n'), ((2023, 2034), 'math.cos', 'cos', (['theta2'], {}), '(theta2)\n', (2026, 2034), False, 'from math import cos, sin\n'), ((2103, 2114), 'math.sin', 'sin', (['theta2'], {}), '(theta2)\n', (2106, 2114), False, 'from math import cos, sin\n'), ((6827, 6868), 'numpy.round', 'np.round', (['(dims * xy_coords / times_np).T'], {}), '((dims * xy_coords / times_np).T)\n', (6835, 6868), True, 'import numpy as np\n')] |
flange/esp | playground/conversions/parser/lola2dot.py | 78925925daf876e4936ca7af046b4f884e8a4233 | #!/usr/bin/env python
import sys
#lolafile = open("ex-small.graph", "r")
source = 0
target = 0
lowlink = 0
trans = "bla"
print("digraph {")
with open(sys.argv[1]) as lolafile:
for line in lolafile:
if len(line) == 1:
continue
linelist = line.split(" ")
if "STATE" in linelist:
source = linelist[1]
lowlink = linelist[3].rstrip()
if "->" in linelist:
trans = linelist[0]
target = linelist[2].rstrip()
print(''' {} -> {} [label="{}", lowlink="{}"];'''.format(source, target, trans, lowlink))
print("}")
| [] |
takeshixx/dprkdict | engkor/views.py | 7f436eb99a855ae8037b2219fc97944f5c000f68 | import re
import urllib.parse
from django.shortcuts import render, get_object_or_404
from django.http import HttpResponse, JsonResponse
from django.core.paginator import Paginator, PageNotAnInteger, EmptyPage
from .models import Definition
RE_HANGUL = re.compile(r'[(]*[\uAC00-\uD7AF]+[\uAC00-\uD7AF (),;]*', re.IGNORECASE)
def index(request):
definitions = Definition.objects.all()
limit = request.GET.get('limit')
try:
limit = int(limit)
except (ValueError, TypeError):
limit = 15
paginator = Paginator(definitions, limit)
page = request.GET.get('page')
try:
show_lines = paginator.page(page)
except PageNotAnInteger:
show_lines = paginator.page(1)
except EmptyPage:
show_lines = paginator.page(paginator.num_pages)
return render(request, 'index.html', {'definitions': definitions,
'lines': show_lines})
def fix_definition_format(definition):
definition = definition.replace('{I}', '<i>') \
.replace('{/I}', '</i>') \
.replace('{B}', '<b>') \
.replace('{/B}', '</b>') \
.replace('{Pr}', '[') \
.replace('{/Pr}', ']') \
.replace('{H}', '') \
.replace('{/H}', '') \
.replace('{E}', '') \
.replace('{/E}', '') \
.replace('{J}', '') \
.replace('{/J}', '') \
.replace('{S}', '') \
.replace('{/S}', '') \
.replace('{U}', '') \
.replace('{-}', '- ')
if definition.startswith('&'):
definition = definition[1:]
word, _definition = definition.split('\n', 1)
definition = '<h4>' + word + '</h4>\n'
definition += _definition
return definition
def generate_translate_tag(word):
out = '<a href="https://translate.google.de/#ko/en/{word_url}" '
out += 'title="Translate with Google Translate" target="'
out += '_blank">{word}</a>'
out = out.format(word_url=urllib.parse.quote_plus(word.group(0)),
word=word.group(0))
return out
def get_definitions(request):
if request.is_ajax():
q = request.GET.get('term', '')
definitions = Definition.objects.filter(word__icontains=q) \
.values_list('word', flat=True)[:25]
data = list(definitions)
else:
data = []
return JsonResponse(data, safe=False)
def get_definition(request, id):
definition = get_object_or_404(Definition, id=id)
data = fix_definition_format(definition.definition)
data = RE_HANGUL.sub(generate_translate_tag, data)
return HttpResponse(data)
def get_definition_word(request, word):
definition = get_object_or_404(Definition, word=word)
data = fix_definition_format(definition.definition)
data = RE_HANGUL.sub(generate_translate_tag, data)
return HttpResponse(data) | [((255, 329), 're.compile', 're.compile', (['"""[(]*[\\\\uAC00-\\\\uD7AF]+[\\\\uAC00-\\\\uD7AF (),;]*"""', 're.IGNORECASE'], {}), "('[(]*[\\\\uAC00-\\\\uD7AF]+[\\\\uAC00-\\\\uD7AF (),;]*', re.IGNORECASE)\n", (265, 329), False, 'import re\n'), ((536, 565), 'django.core.paginator.Paginator', 'Paginator', (['definitions', 'limit'], {}), '(definitions, limit)\n', (545, 565), False, 'from django.core.paginator import Paginator, PageNotAnInteger, EmptyPage\n'), ((810, 895), 'django.shortcuts.render', 'render', (['request', '"""index.html"""', "{'definitions': definitions, 'lines': show_lines}"], {}), "(request, 'index.html', {'definitions': definitions, 'lines': show_lines}\n )\n", (816, 895), False, 'from django.shortcuts import render, get_object_or_404\n'), ((2597, 2627), 'django.http.JsonResponse', 'JsonResponse', (['data'], {'safe': '(False)'}), '(data, safe=False)\n', (2609, 2627), False, 'from django.http import HttpResponse, JsonResponse\n'), ((2680, 2716), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Definition'], {'id': 'id'}), '(Definition, id=id)\n', (2697, 2716), False, 'from django.shortcuts import render, get_object_or_404\n'), ((2839, 2857), 'django.http.HttpResponse', 'HttpResponse', (['data'], {}), '(data)\n', (2851, 2857), False, 'from django.http import HttpResponse, JsonResponse\n'), ((2917, 2957), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Definition'], {'word': 'word'}), '(Definition, word=word)\n', (2934, 2957), False, 'from django.shortcuts import render, get_object_or_404\n'), ((3080, 3098), 'django.http.HttpResponse', 'HttpResponse', (['data'], {}), '(data)\n', (3092, 3098), False, 'from django.http import HttpResponse, JsonResponse\n')] |
Mithras/ha | appdaemon/apps/toggle_switch/toggle_switch.py | d37f8673eed27a85f76c97ee3e924d2ddc033ee5 | import globals
class ToggleSwitch(globals.Hass):
async def initialize(self):
config = self.args["config"]
self._input = config["input"]
self._toggle_service = config["toggle_service"]
self._toggle_payload = config["toggle_payload"]
self._power = config["power"]
self._power_on_threshold = float(config["power_on_threshold"])
self._check_interval = float(config["check_interval"])
self.ensure_state_task = await self.create_task(
self._ensure_state_async(False))
await self.listen_state(self._input_callback_async,
entity=self._input)
async def terminate(self):
# self.log("Terminate")
self.ensure_state_task.cancel()
async def _input_callback_async(self, entity, attribute, old, new, kwargs):
if old == new:
return
# self.log(f"InputChange: old = {old}, new = {new}")
self.ensure_state_task.cancel()
self.ensure_state_task = await self.create_task(self._ensure_state_async())
async def _ensure_state_async(self, immediate=True):
# self.log(f"EnsureState: immediate = {immediate}")
if immediate:
await self._toggle_async()
while True:
await self.sleep(self._check_interval)
power = float(await self.get_state(self._power))
input = await self.get_state(self._input)
# self.log(
# f"EnsureState: input = {input}, power: {power}")
if input == "on" and power < self._power_on_threshold or input == "off" and power > self._power_on_threshold:
await self._toggle_async()
async def _toggle_async(self):
# self.log("Toggle")
await self.call_service(self._toggle_service,
**self._toggle_payload)
| [] |
charlieccarey/rdoc | templates_deepdive_app_bagofwords/udf/dd_extract_features.py | 2e857f29e128f893706d042d583eec698c0bc56a | #!/usr/bin/env python
from __future__ import print_function
'''
1\taaaa~^~bbbb~^~cccc
2\tdddd~^~EEEE~^~ffff
'''
import sys
ARR_DELIM = '~^~'
for row in sys.stdin:
row = row.strip()
sent_id, lemmas = row.split('\t')
lemmas = lemmas.split(ARR_DELIM)
for lemma in lemmas:
print('{}\t{}'.format(sent_id, lemma))
| [] |
vandana0608/Pharmacy-Managament | src/supplier/templates/supplier/urls.py | f99bdec11c24027a432858daa19247a21cecc092 | from django.urls import path
from . import views
urlpatterns = [
path('', views.SupplierList.as_view(), name='supplier_list'),
path('view/<int:pk>', views.SupplierView.as_view(), name='supplier_view'),
path('new', views.SupplierCreate.as_view(), name='supplier_new'),
path('view/<int:pk>', views.SupplierView.as_view(), name='supplier_view'),
path('edit/<int:pk>', views.SupplierUpdate.as_view(), name='supplier_edit'),
path('delete/<int:pk>', views.SupplierDelete.as_view(), name='supplier_delete'),
] | [] |
rarc41/web_scraper_pro | web_scraper/extract/common.py | f297c785617c6b1617ced8f29ad11afec31f2968 | import yaml
__config=None
def config():
global __config
if not __config:
with open('config.yaml', mode='r') as f:
__config=yaml.safe_load(f)
return __config | [((155, 172), 'yaml.safe_load', 'yaml.safe_load', (['f'], {}), '(f)\n', (169, 172), False, 'import yaml\n')] |
infiniteloop98/lazies-cmd | engine/config/constant.py | 46ac58f9b31942c6fa63b7ffa8d409e3a6b4df26 | APP_PROFILE_DIRECTORY_NAME = 'lazies-cmd'
DOSKEY_FILE_NAME = 'doskey.bat'
AUTO_RUN_REGISTRY_NAME = 'AutoRun'
| [] |
fanglu01/cDNA_Cupcake | sequence/get_seqs_from_list.py | 60f56dc291661a2b84e40b64d469fba658889c34 | #!/usr/bin/env python
import os, sys
from Bio import SeqIO
def get_seqs_from_list(fastafile, listfile):
seqs = [line.strip() for line in open(listfile)]
for r in SeqIO.parse(open(fastafile), 'fasta'):
if r.id in seqs or r.id.split('|')[0] in seqs or any(r.id.startswith(x) for x in seqs):
print ">" + r.id
print r.seq
if __name__ == "__main__":
from argparse import ArgumentParser
parser = ArgumentParser("Get sequences from a fasta file from a list")
parser.add_argument("fasta_filename", help="Input fasta filename to extract sequences from")
parser.add_argument("list_filename", help="List of sequence IDs to extract")
args = parser.parse_args()
get_seqs_from_list(args.fasta_filename, args.list_filename)
| [] |
cusma/pposdex | ppos_dex_data.py | 31b834ffcb1a43958ccc57b444c7b9337a5623c9 |
import time
import json
import base64
import msgpack
from schema import Schema, And, Optional
from datetime import datetime
from algosdk import mnemonic
from algosdk.account import address_from_private_key
from algosdk.error import *
from algosdk.future.transaction import PaymentTxn
from inequality_indexes import *
from algo_query import *
def wait_for_confirmation(algod_client, transaction_id, timeout):
"""Wait until the transaction is confirmed or rejected, or until 'timeout'
number of rounds have passed.
Args:
algod_client (AlgodClient): Algod Client
transaction_id (str): the transaction to wait for
timeout (int): maximum number of rounds to wait
Returns:
(dict): pending transaction information, or throws an error if the
transaction is not confirmed or rejected in the next timeout rounds
"""
start_round = algod_client.status()["last-round"] + 1
current_round = start_round
while current_round < start_round + timeout:
algod_client.status_after_block(current_round)
try:
pending_txn = algod_client.pending_transaction_info(transaction_id)
except Exception:
return
if pending_txn.get("confirmed-round", 0) > 0:
return pending_txn
elif pending_txn["pool-error"]:
raise Exception(
'pool error: {}'.format(pending_txn["pool-error"]))
current_round += 1
raise Exception(
'pending tx not found in timeout rounds, timeout value = : {}'.format(
timeout))
def post_ppos_dex_data(algod_client, indexer_client, passphrase,
algo_threshold):
private_key = mnemonic.to_private_key(passphrase)
account = {'pk': address_from_private_key(private_key),
'sk': private_key}
CONNECTION_ATTEMPT_DELAY_SEC = 3
MAX_CONNECTION_ATTEMPTS = 10
MICROALGO_TO_ALGO = 1 / 10 ** 6
MICROALGO_TOTAL_SUPPLY = 10 ** 16
attempts = 1
params = None
ledger = None
while attempts <= MAX_CONNECTION_ATTEMPTS:
try:
params = algod_client.suggested_params()
ledger = algod_client.ledger_supply()
break
except AlgodHTTPError:
print(f"Algod Client connection attempt "
f"{attempts}/{MAX_CONNECTION_ATTEMPTS}")
print("Trying to contact Algod Client again...")
time.sleep(CONNECTION_ATTEMPT_DELAY_SEC)
finally:
attempts += 1
if attempts > MAX_CONNECTION_ATTEMPTS:
quit("Unable to connect to Algod Client.")
attempts = 1
algo_owners = None
while attempts <= MAX_CONNECTION_ATTEMPTS:
try:
algo_owners = get_algo_owners(indexer_client, algo_threshold)
break
except IndexerHTTPError:
print(f"Indexer Client connection attempt "
f"{attempts}/{MAX_CONNECTION_ATTEMPTS}")
print("Trying to contact Indexer Client again...")
time.sleep(CONNECTION_ATTEMPT_DELAY_SEC)
finally:
attempts += 1
if attempts > MAX_CONNECTION_ATTEMPTS:
quit("Unable to connect to Indexer Client.")
stakes = [account['amount'] * MICROALGO_TO_ALGO for
account in algo_owners]
algo_hhi = herfindahl_hirschman_index(stakes)
online_stakes = [account['amount'] * MICROALGO_TO_ALGO
for account in algo_owners
if account['status'] == 'Online']
algo_dynamics = ledger['total-money'] / MICROALGO_TOTAL_SUPPLY
ppos_online_stake = ledger['online-money'] / ledger['total-money']
ppos_online_accounts = len(online_stakes) / len(algo_owners)
ppos_gini = gini_index(online_stakes)
ppos_theil_l = theil_l_index(online_stakes)
ppos_theil_t = theil_t_index(online_stakes)
ppos_hhi = herfindahl_hirschman_index(online_stakes)
ppos_dex = (algo_dynamics
* ppos_online_stake
* ppos_online_accounts
* (1 - ppos_gini))
note = {'algo_threshold': algo_threshold,
'accounts': len(algo_owners),
'algo_hhi': algo_hhi,
'algo_dynamics': algo_dynamics,
'ppos_online_stake': ppos_online_stake,
'ppos_online_accounts': ppos_online_accounts,
'ppos_gini': ppos_gini,
'ppos_theil_l': ppos_theil_l,
'ppos_theil_t': ppos_theil_t,
'ppos_hhi': ppos_hhi,
'ppos_dex': ppos_dex,
'timestamp': str(datetime.now())}
bytes_note = msgpack.packb(note)
unsigned_txn = PaymentTxn(sender=account['pk'],
sp=params,
receiver=account['pk'],
amt=0,
note=bytes_note)
signed_txn = unsigned_txn.sign(account['sk'])
txid = algod_client.send_transaction(signed_txn)
print("Publishing Algorand PPoS Dex data in txID: {}".format(txid))
try:
confirmed_txn = wait_for_confirmation(algod_client, txid, 4)
except Exception as err:
print(err)
return
print("txID: {}".format(txid), " confirmed in round: {}\n".format(
confirmed_txn.get("confirmed-round", 0)))
print("Transaction information:\n{}".format(
json.dumps(confirmed_txn, indent=4)))
def get_ppos_dex_data(indexer_client, ppos_dex_address, algo_threshold,
start_block=11476070, end_block=None):
CONNECTION_ATTEMPT_DELAY_SEC = 3
MAX_CONNECTION_ATTEMPTS = 10
attempts = 1
ppos_dex_txns_note = None
while attempts <= MAX_CONNECTION_ATTEMPTS:
try:
ppos_dex_txns_note = get_address_txns_note(
indexer_client, ppos_dex_address, start_block, end_block)
break
except IndexerHTTPError:
print(f"Indexer Client connection attempt "
f"{attempts}/{MAX_CONNECTION_ATTEMPTS}")
print("Trying to contact Indexer Client again...")
time.sleep(CONNECTION_ATTEMPT_DELAY_SEC)
finally:
attempts += 1
if attempts > MAX_CONNECTION_ATTEMPTS:
quit("Unable to connect to Indexer Client.")
# TODO: make 'algo_hhi' and 'ppos_hhi' mandatory fileds in the schema
schema = Schema({
'algo_threshold': int,
'accounts': And(int, lambda n: 0 <= n),
Optional('algo_hhi'): And(float, lambda n: 0 <= n <= 1),
'algo_dynamics': And(float, lambda n: 0 <= n),
'ppos_online_stake': And(float, lambda n: 0 <= n <= 1),
'ppos_online_accounts': And(float, lambda n: 0 <= n <= 1),
'ppos_gini': And(float, lambda n: 0 <= n <= 1),
'ppos_theil_l': And(float, lambda n: 0 <= n),
'ppos_theil_t': And(float, lambda n: 0 <= n),
Optional('ppos_hhi'): And(float, lambda n: 0 <= n <= 1),
'ppos_dex': And(float, lambda n: 0 <= n <= 1),
'timestamp': str
})
ppos_dex_data = []
for txn_note in ppos_dex_txns_note:
try:
data = schema.validate(
msgpack.unpackb(base64.b64decode(txn_note))
)
if data['algo_threshold'] == algo_threshold:
ppos_dex_data += [data]
except:
pass
if not ppos_dex_data:
quit(f"Impossible to find valid PPos Dex data published by "
f"{ppos_dex_address} starting from block {start_block}.")
return ppos_dex_data
| [((1701, 1736), 'algosdk.mnemonic.to_private_key', 'mnemonic.to_private_key', (['passphrase'], {}), '(passphrase)\n', (1724, 1736), False, 'from algosdk import mnemonic\n'), ((4577, 4596), 'msgpack.packb', 'msgpack.packb', (['note'], {}), '(note)\n', (4590, 4596), False, 'import msgpack\n'), ((4617, 4712), 'algosdk.future.transaction.PaymentTxn', 'PaymentTxn', ([], {'sender': "account['pk']", 'sp': 'params', 'receiver': "account['pk']", 'amt': '(0)', 'note': 'bytes_note'}), "(sender=account['pk'], sp=params, receiver=account['pk'], amt=0,\n note=bytes_note)\n", (4627, 4712), False, 'from algosdk.future.transaction import PaymentTxn\n'), ((1759, 1796), 'algosdk.account.address_from_private_key', 'address_from_private_key', (['private_key'], {}), '(private_key)\n', (1783, 1796), False, 'from algosdk.account import address_from_private_key\n'), ((4542, 4556), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (4554, 4556), False, 'from datetime import datetime\n'), ((5326, 5361), 'json.dumps', 'json.dumps', (['confirmed_txn'], {'indent': '(4)'}), '(confirmed_txn, indent=4)\n', (5336, 5361), False, 'import json\n'), ((6413, 6433), 'schema.Optional', 'Optional', (['"""algo_hhi"""'], {}), "('algo_hhi')\n", (6421, 6433), False, 'from schema import Schema, And, Optional\n'), ((6828, 6848), 'schema.Optional', 'Optional', (['"""ppos_hhi"""'], {}), "('ppos_hhi')\n", (6836, 6848), False, 'from schema import Schema, And, Optional\n'), ((6377, 6403), 'schema.And', 'And', (['int', '(lambda n: 0 <= n)'], {}), '(int, lambda n: 0 <= n)\n', (6380, 6403), False, 'from schema import Schema, And, Optional\n'), ((6435, 6468), 'schema.And', 'And', (['float', '(lambda n: 0 <= n <= 1)'], {}), '(float, lambda n: 0 <= n <= 1)\n', (6438, 6468), False, 'from schema import Schema, And, Optional\n'), ((6495, 6523), 'schema.And', 'And', (['float', '(lambda n: 0 <= n)'], {}), '(float, lambda n: 0 <= n)\n', (6498, 6523), False, 'from schema import Schema, And, Optional\n'), ((6554, 6587), 'schema.And', 'And', (['float', '(lambda n: 0 <= n <= 1)'], {}), '(float, lambda n: 0 <= n <= 1)\n', (6557, 6587), False, 'from schema import Schema, And, Optional\n'), ((6621, 6654), 'schema.And', 'And', (['float', '(lambda n: 0 <= n <= 1)'], {}), '(float, lambda n: 0 <= n <= 1)\n', (6624, 6654), False, 'from schema import Schema, And, Optional\n'), ((6677, 6710), 'schema.And', 'And', (['float', '(lambda n: 0 <= n <= 1)'], {}), '(float, lambda n: 0 <= n <= 1)\n', (6680, 6710), False, 'from schema import Schema, And, Optional\n'), ((6736, 6764), 'schema.And', 'And', (['float', '(lambda n: 0 <= n)'], {}), '(float, lambda n: 0 <= n)\n', (6739, 6764), False, 'from schema import Schema, And, Optional\n'), ((6790, 6818), 'schema.And', 'And', (['float', '(lambda n: 0 <= n)'], {}), '(float, lambda n: 0 <= n)\n', (6793, 6818), False, 'from schema import Schema, And, Optional\n'), ((6850, 6883), 'schema.And', 'And', (['float', '(lambda n: 0 <= n <= 1)'], {}), '(float, lambda n: 0 <= n <= 1)\n', (6853, 6883), False, 'from schema import Schema, And, Optional\n'), ((6905, 6938), 'schema.And', 'And', (['float', '(lambda n: 0 <= n <= 1)'], {}), '(float, lambda n: 0 <= n <= 1)\n', (6908, 6938), False, 'from schema import Schema, And, Optional\n'), ((2429, 2469), 'time.sleep', 'time.sleep', (['CONNECTION_ATTEMPT_DELAY_SEC'], {}), '(CONNECTION_ATTEMPT_DELAY_SEC)\n', (2439, 2469), False, 'import time\n'), ((3023, 3063), 'time.sleep', 'time.sleep', (['CONNECTION_ATTEMPT_DELAY_SEC'], {}), '(CONNECTION_ATTEMPT_DELAY_SEC)\n', (3033, 3063), False, 'import time\n'), ((6049, 6089), 'time.sleep', 'time.sleep', (['CONNECTION_ATTEMPT_DELAY_SEC'], {}), '(CONNECTION_ATTEMPT_DELAY_SEC)\n', (6059, 6089), False, 'import time\n'), ((7117, 7143), 'base64.b64decode', 'base64.b64decode', (['txn_note'], {}), '(txn_note)\n', (7133, 7143), False, 'import base64\n')] |
Zhengrui-Liu/FireAlarmingSysCDA | src/test/python/programmingtheiot/part01/unit/system/SystemMemUtilTaskTest.py | 26db6375a21ee9bdccba3d137e30d2e63ad6395c | #####
#
# This class is part of the Programming the Internet of Things
# project, and is available via the MIT License, which can be
# found in the LICENSE file at the top level of this repository.
#
# Copyright (c) 2020 by Andrew D. King
#
import logging
import unittest
from programmingtheiot.cda.system.SystemMemUtilTask import SystemMemUtilTask
class SystemMemUtilTaskTest(unittest.TestCase):
"""
This test case class contains very basic unit tests for
SystemMemUtilTask. It should not be considered complete,
but serve as a starting point for the student implementing
additional functionality within their Programming the IoT
environment.
"""
@classmethod
def setUpClass(self):
logging.basicConfig(format = '%(asctime)s:%(module)s:%(levelname)s:%(message)s', level = logging.DEBUG)
logging.info("Testing SystemMemUtilTask class...")
self.memUtilTask = SystemMemUtilTask()
def setUp(self):
pass
def tearDown(self):
pass
#@unittest.skip("Ignore for now.")
def testGenerateTelemetry(self):
"""Test get memory utilization from sensor data
"""
sd = self.memUtilTask.generateTelemetry()
self.assertIsNotNone(sd)
self.assertGreaterEqual(sd.getValue(), 0.0)
logging.info("Virtual memory SensorData: %s", str(sd))
def testGetTelemetryValue(self):
"""Test get memory utilization
"""
val = self.memUtilTask.getTelemetryValue()
self.assertGreaterEqual(val, 0.0)
logging.info("Virtual memory utilization: %s", str(val))
if __name__ == "__main__":
unittest.main()
| [((1513, 1528), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1526, 1528), False, 'import unittest\n'), ((702, 806), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': '"""%(asctime)s:%(module)s:%(levelname)s:%(message)s"""', 'level': 'logging.DEBUG'}), "(format=\n '%(asctime)s:%(module)s:%(levelname)s:%(message)s', level=logging.DEBUG)\n", (721, 806), False, 'import logging\n'), ((808, 858), 'logging.info', 'logging.info', (['"""Testing SystemMemUtilTask class..."""'], {}), "('Testing SystemMemUtilTask class...')\n", (820, 858), False, 'import logging\n'), ((880, 899), 'programmingtheiot.cda.system.SystemMemUtilTask.SystemMemUtilTask', 'SystemMemUtilTask', ([], {}), '()\n', (897, 899), False, 'from programmingtheiot.cda.system.SystemMemUtilTask import SystemMemUtilTask\n')] |
mattclark/osf.io | api/base/settings/defaults.py | 7a362ceb6af3393d3d0423aafef336ee13277303 | """
Django settings for api project.
Generated by 'django-admin startproject' using Django 1.8.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
import os
from urlparse import urlparse
from website import settings as osf_settings
BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
DATABASES = {
'default': {
'CONN_MAX_AGE': 0,
'ENGINE': 'osf.db.backends.postgresql', # django.db.backends.postgresql
'NAME': os.environ.get('OSF_DB_NAME', 'osf'),
'USER': os.environ.get('OSF_DB_USER', 'postgres'),
'PASSWORD': os.environ.get('OSF_DB_PASSWORD', ''),
'HOST': os.environ.get('OSF_DB_HOST', '127.0.0.1'),
'PORT': os.environ.get('OSF_DB_PORT', '5432'),
'ATOMIC_REQUESTS': True,
'TEST': {
'SERIALIZE': False,
},
},
}
DATABASE_ROUTERS = ['osf.db.router.PostgreSQLFailoverRouter', ]
PASSWORD_HASHERS = [
'django.contrib.auth.hashers.BCryptSHA256PasswordHasher',
'django.contrib.auth.hashers.BCryptPasswordHasher',
]
AUTH_USER_MODEL = 'osf.OSFUser'
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = osf_settings.SECRET_KEY
AUTHENTICATION_BACKENDS = (
'api.base.authentication.backends.ODMBackend',
'guardian.backends.ObjectPermissionBackend',
)
# SECURITY WARNING: don't run with debug turned on in production!
DEV_MODE = osf_settings.DEV_MODE
DEBUG = osf_settings.DEBUG_MODE
DEBUG_PROPAGATE_EXCEPTIONS = True
# session:
SESSION_COOKIE_NAME = 'api'
SESSION_COOKIE_SECURE = osf_settings.SECURE_MODE
SESSION_COOKIE_HTTPONLY = osf_settings.SESSION_COOKIE_HTTPONLY
# csrf:
CSRF_COOKIE_NAME = 'api-csrf'
CSRF_COOKIE_SECURE = osf_settings.SECURE_MODE
CSRF_COOKIE_HTTPONLY = osf_settings.SECURE_MODE
ALLOWED_HOSTS = [
'.osf.io',
]
# Application definition
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.messages',
'django.contrib.sessions',
'django.contrib.staticfiles',
'django.contrib.admin',
# 3rd party
'django_celery_beat',
'django_celery_results',
'rest_framework',
'corsheaders',
'raven.contrib.django.raven_compat',
'django_extensions',
'guardian',
'storages',
'waffle',
'elasticsearch_metrics',
# OSF
'osf',
# Addons
'addons.osfstorage',
'addons.bitbucket',
'addons.box',
'addons.dataverse',
'addons.dropbox',
'addons.figshare',
'addons.forward',
'addons.github',
'addons.gitlab',
'addons.googledrive',
'addons.mendeley',
'addons.onedrive',
'addons.owncloud',
'addons.s3',
'addons.twofactor',
'addons.wiki',
'addons.zotero',
)
# local development using https
if osf_settings.SECURE_MODE and DEBUG:
INSTALLED_APPS += ('sslserver',)
# TODO: Are there more granular ways to configure reporting specifically related to the API?
RAVEN_CONFIG = {
'tags': {'App': 'api'},
'dsn': osf_settings.SENTRY_DSN,
'release': osf_settings.VERSION,
}
BULK_SETTINGS = {
'DEFAULT_BULK_LIMIT': 100,
}
MAX_PAGE_SIZE = 100
REST_FRAMEWORK = {
'PAGE_SIZE': 10,
'DEFAULT_RENDERER_CLASSES': (
'api.base.renderers.JSONAPIRenderer',
'api.base.renderers.JSONRendererWithESISupport',
'api.base.renderers.BrowsableAPIRendererNoForms',
),
'DEFAULT_PARSER_CLASSES': (
'api.base.parsers.JSONAPIParser',
'api.base.parsers.JSONAPIParserForRegularJSON',
'rest_framework.parsers.FormParser',
'rest_framework.parsers.MultiPartParser',
),
'EXCEPTION_HANDLER': 'api.base.exceptions.json_api_exception_handler',
'DEFAULT_CONTENT_NEGOTIATION_CLASS': 'api.base.content_negotiation.JSONAPIContentNegotiation',
'DEFAULT_VERSIONING_CLASS': 'api.base.versioning.BaseVersioning',
'DEFAULT_VERSION': '2.0',
'ALLOWED_VERSIONS': (
'2.0',
'2.1',
'2.2',
'2.3',
'2.4',
'2.5',
'2.6',
'2.7',
'2.8',
'2.9',
'2.10',
'2.11',
'2.12',
'2.13',
'2.14',
'2.15',
'2.16',
'2.17',
),
'DEFAULT_FILTER_BACKENDS': ('api.base.filters.OSFOrderingFilter',),
'DEFAULT_PAGINATION_CLASS': 'api.base.pagination.JSONAPIPagination',
'ORDERING_PARAM': 'sort',
'DEFAULT_AUTHENTICATION_CLASSES': (
# Custom auth classes
'api.base.authentication.drf.OSFBasicAuthentication',
'api.base.authentication.drf.OSFSessionAuthentication',
'api.base.authentication.drf.OSFCASAuthentication',
),
'DEFAULT_THROTTLE_CLASSES': (
'rest_framework.throttling.UserRateThrottle',
'api.base.throttling.NonCookieAuthThrottle',
),
'DEFAULT_THROTTLE_RATES': {
'user': '10000/day',
'non-cookie-auth': '100/hour',
'add-contributor': '10/second',
'create-guid': '1000/hour',
'root-anon-throttle': '1000/hour',
'test-user': '2/hour',
'test-anon': '1/hour',
'send-email': '2/minute',
},
}
# Settings related to CORS Headers addon: allow API to receive authenticated requests from OSF
# CORS plugin only matches based on "netloc" part of URL, so as workaround we add that to the list
CORS_ORIGIN_ALLOW_ALL = False
CORS_ORIGIN_WHITELIST = (
urlparse(osf_settings.DOMAIN).netloc,
osf_settings.DOMAIN,
)
# This needs to remain True to allow cross origin requests that are in CORS_ORIGIN_WHITELIST to
# use cookies.
CORS_ALLOW_CREDENTIALS = True
# Set dynamically on app init
ORIGINS_WHITELIST = ()
MIDDLEWARE = (
'api.base.middleware.DjangoGlobalMiddleware',
'api.base.middleware.CeleryTaskMiddleware',
'api.base.middleware.PostcommitTaskMiddleware',
# A profiling middleware. ONLY FOR DEV USE
# Uncomment and add "prof" to url params to recieve a profile for that url
# 'api.base.middleware.ProfileMiddleware',
# 'django.contrib.sessions.middleware.SessionMiddleware',
'api.base.middleware.CorsMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
# 'django.contrib.auth.middleware.AuthenticationMiddleware',
# 'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
# 'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
'waffle.middleware.WaffleMiddleware',
)
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
},
]
ROOT_URLCONF = 'api.base.urls'
WSGI_APPLICATION = 'api.base.wsgi.application'
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# https://django-storages.readthedocs.io/en/latest/backends/gcloud.html
if os.environ.get('GOOGLE_APPLICATION_CREDENTIALS', False):
# Required to interact with Google Cloud Storage
DEFAULT_FILE_STORAGE = 'api.base.storage.RequestlessURLGoogleCloudStorage'
GS_BUCKET_NAME = os.environ.get('GS_BUCKET_NAME', 'cos-osf-stage-cdn-us')
GS_FILE_OVERWRITE = os.environ.get('GS_FILE_OVERWRITE', False)
elif osf_settings.DEV_MODE or osf_settings.DEBUG_MODE:
DEFAULT_FILE_STORAGE = 'api.base.storage.DevFileSystemStorage'
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_ROOT = os.path.join(BASE_DIR, 'static/vendor')
API_BASE = 'v2/'
API_PRIVATE_BASE = '_/'
STATIC_URL = '/static/'
NODE_CATEGORY_MAP = osf_settings.NODE_CATEGORY_MAP
DEBUG_TRANSACTIONS = DEBUG
JWT_SECRET = 'osf_api_cas_login_jwt_secret_32b'
JWE_SECRET = 'osf_api_cas_login_jwe_secret_32b'
ENABLE_VARNISH = osf_settings.ENABLE_VARNISH
ENABLE_ESI = osf_settings.ENABLE_ESI
VARNISH_SERVERS = osf_settings.VARNISH_SERVERS
ESI_MEDIA_TYPES = osf_settings.ESI_MEDIA_TYPES
ADDONS_FOLDER_CONFIGURABLE = ['box', 'dropbox', 's3', 'googledrive', 'figshare', 'owncloud', 'onedrive']
ADDONS_OAUTH = ADDONS_FOLDER_CONFIGURABLE + ['dataverse', 'github', 'bitbucket', 'gitlab', 'mendeley', 'zotero', 'forward']
BYPASS_THROTTLE_TOKEN = 'test-token'
OSF_SHELL_USER_IMPORTS = None
# Settings for use in the admin
OSF_URL = 'https://osf.io'
SELECT_FOR_UPDATE_ENABLED = True
# Disable anonymous user permissions in django-guardian
ANONYMOUS_USER_NAME = None
# If set to True, automated tests with extra queries will fail.
NPLUSONE_RAISE = False
# salt used for generating hashids
HASHIDS_SALT = 'pinkhimalayan'
# django-elasticsearch-metrics
ELASTICSEARCH_DSL = {
'default': {
'hosts': os.environ.get('ELASTIC6_URI', '127.0.0.1:9201'),
'retry_on_timeout': True,
},
}
# Store yearly indices for time-series metrics
ELASTICSEARCH_METRICS_DATE_FORMAT = '%Y'
WAFFLE_CACHE_NAME = 'waffle_cache'
STORAGE_USAGE_CACHE_NAME = 'storage_usage'
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
},
STORAGE_USAGE_CACHE_NAME: {
'BACKEND': 'django.core.cache.backends.db.DatabaseCache',
'LOCATION': 'osf_cache_table',
},
WAFFLE_CACHE_NAME: {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
},
}
| [((7215, 7270), 'os.environ.get', 'os.environ.get', (['"""GOOGLE_APPLICATION_CREDENTIALS"""', '(False)'], {}), "('GOOGLE_APPLICATION_CREDENTIALS', False)\n", (7229, 7270), False, 'import os\n'), ((7747, 7786), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""static/vendor"""'], {}), "(BASE_DIR, 'static/vendor')\n", (7759, 7786), False, 'import os\n'), ((7425, 7481), 'os.environ.get', 'os.environ.get', (['"""GS_BUCKET_NAME"""', '"""cos-osf-stage-cdn-us"""'], {}), "('GS_BUCKET_NAME', 'cos-osf-stage-cdn-us')\n", (7439, 7481), False, 'import os\n'), ((7506, 7548), 'os.environ.get', 'os.environ.get', (['"""GS_FILE_OVERWRITE"""', '(False)'], {}), "('GS_FILE_OVERWRITE', False)\n", (7520, 7548), False, 'import os\n'), ((767, 803), 'os.environ.get', 'os.environ.get', (['"""OSF_DB_NAME"""', '"""osf"""'], {}), "('OSF_DB_NAME', 'osf')\n", (781, 803), False, 'import os\n'), ((821, 862), 'os.environ.get', 'os.environ.get', (['"""OSF_DB_USER"""', '"""postgres"""'], {}), "('OSF_DB_USER', 'postgres')\n", (835, 862), False, 'import os\n'), ((884, 921), 'os.environ.get', 'os.environ.get', (['"""OSF_DB_PASSWORD"""', '""""""'], {}), "('OSF_DB_PASSWORD', '')\n", (898, 921), False, 'import os\n'), ((939, 981), 'os.environ.get', 'os.environ.get', (['"""OSF_DB_HOST"""', '"""127.0.0.1"""'], {}), "('OSF_DB_HOST', '127.0.0.1')\n", (953, 981), False, 'import os\n'), ((999, 1036), 'os.environ.get', 'os.environ.get', (['"""OSF_DB_PORT"""', '"""5432"""'], {}), "('OSF_DB_PORT', '5432')\n", (1013, 1036), False, 'import os\n'), ((5625, 5654), 'urlparse.urlparse', 'urlparse', (['osf_settings.DOMAIN'], {}), '(osf_settings.DOMAIN)\n', (5633, 5654), False, 'from urlparse import urlparse\n'), ((8927, 8975), 'os.environ.get', 'os.environ.get', (['"""ELASTIC6_URI"""', '"""127.0.0.1:9201"""'], {}), "('ELASTIC6_URI', '127.0.0.1:9201')\n", (8941, 8975), False, 'import os\n'), ((447, 472), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (462, 472), False, 'import os\n'), ((6892, 6927), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""templates"""'], {}), "(BASE_DIR, 'templates')\n", (6904, 6927), False, 'import os\n')] |
GTedHa/gblackboard | tests/__init__.py | 61c13ca69113019b8fc691acaa1953751f517347 | # -*- coding: utf-8 -*-
"""Unit test package for gblackboard."""
| [] |
RudSmith/beluganos | src/fabricflow/fibc/api/fibcapis_pb2_grpc.py | 7a3f8524e1d9d9313d96476c783a96096180654c | # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
import fibcapi_pb2 as fibcapi__pb2
import fibcapis_pb2 as fibcapis__pb2
class FIBCApApiStub(object):
# missing associated documentation comment in .proto file
pass
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.Monitor = channel.unary_stream(
'/fibcapi.FIBCApApi/Monitor',
request_serializer=fibcapis__pb2.ApMonitorRequest.SerializeToString,
response_deserializer=fibcapis__pb2.ApMonitorReply.FromString,
)
self.GetPortStats = channel.unary_stream(
'/fibcapi.FIBCApApi/GetPortStats',
request_serializer=fibcapis__pb2.ApGetPortStatsRequest.SerializeToString,
response_deserializer=fibcapi__pb2.FFPortStats.FromString,
)
self.ModPortStats = channel.unary_unary(
'/fibcapi.FIBCApApi/ModPortStats',
request_serializer=fibcapis__pb2.ApModPortStatsRequest.SerializeToString,
response_deserializer=fibcapis__pb2.ApModPortStatsReply.FromString,
)
self.GetPortEntries = channel.unary_stream(
'/fibcapi.FIBCApApi/GetPortEntries',
request_serializer=fibcapis__pb2.ApGetPortEntriesRequest.SerializeToString,
response_deserializer=fibcapis__pb2.DbPortEntry.FromString,
)
self.GetIDEntries = channel.unary_stream(
'/fibcapi.FIBCApApi/GetIDEntries',
request_serializer=fibcapis__pb2.ApGetIdEntriesRequest.SerializeToString,
response_deserializer=fibcapis__pb2.DbIdEntry.FromString,
)
self.GetDpEntries = channel.unary_stream(
'/fibcapi.FIBCApApi/GetDpEntries',
request_serializer=fibcapis__pb2.ApGetDpEntriesRequest.SerializeToString,
response_deserializer=fibcapis__pb2.DbDpEntry.FromString,
)
self.AddPortEntry = channel.unary_unary(
'/fibcapi.FIBCApApi/AddPortEntry',
request_serializer=fibcapis__pb2.DbPortEntry.SerializeToString,
response_deserializer=fibcapis__pb2.ApAddPortEntryReply.FromString,
)
self.AddIDEntry = channel.unary_unary(
'/fibcapi.FIBCApApi/AddIDEntry',
request_serializer=fibcapis__pb2.DbIdEntry.SerializeToString,
response_deserializer=fibcapis__pb2.ApAddIdEntryReply.FromString,
)
self.DelPortEntry = channel.unary_unary(
'/fibcapi.FIBCApApi/DelPortEntry',
request_serializer=fibcapis__pb2.DbPortKey.SerializeToString,
response_deserializer=fibcapis__pb2.ApDelPortEntryReply.FromString,
)
self.DelIDEntry = channel.unary_unary(
'/fibcapi.FIBCApApi/DelIDEntry',
request_serializer=fibcapis__pb2.DbIdEntry.SerializeToString,
response_deserializer=fibcapis__pb2.ApDelIdEntryReply.FromString,
)
self.GetStats = channel.unary_stream(
'/fibcapi.FIBCApApi/GetStats',
request_serializer=fibcapis__pb2.ApGetStatsRequest.SerializeToString,
response_deserializer=fibcapis__pb2.StatsEntry.FromString,
)
self.RunOAM = channel.unary_unary(
'/fibcapi.FIBCApApi/RunOAM',
request_serializer=fibcapi__pb2.OAM.Request.SerializeToString,
response_deserializer=fibcapis__pb2.OAMReplyAck.FromString,
)
class FIBCApApiServicer(object):
# missing associated documentation comment in .proto file
pass
def Monitor(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetPortStats(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ModPortStats(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetPortEntries(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetIDEntries(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetDpEntries(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def AddPortEntry(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def AddIDEntry(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DelPortEntry(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DelIDEntry(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetStats(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def RunOAM(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_FIBCApApiServicer_to_server(servicer, server):
rpc_method_handlers = {
'Monitor': grpc.unary_stream_rpc_method_handler(
servicer.Monitor,
request_deserializer=fibcapis__pb2.ApMonitorRequest.FromString,
response_serializer=fibcapis__pb2.ApMonitorReply.SerializeToString,
),
'GetPortStats': grpc.unary_stream_rpc_method_handler(
servicer.GetPortStats,
request_deserializer=fibcapis__pb2.ApGetPortStatsRequest.FromString,
response_serializer=fibcapi__pb2.FFPortStats.SerializeToString,
),
'ModPortStats': grpc.unary_unary_rpc_method_handler(
servicer.ModPortStats,
request_deserializer=fibcapis__pb2.ApModPortStatsRequest.FromString,
response_serializer=fibcapis__pb2.ApModPortStatsReply.SerializeToString,
),
'GetPortEntries': grpc.unary_stream_rpc_method_handler(
servicer.GetPortEntries,
request_deserializer=fibcapis__pb2.ApGetPortEntriesRequest.FromString,
response_serializer=fibcapis__pb2.DbPortEntry.SerializeToString,
),
'GetIDEntries': grpc.unary_stream_rpc_method_handler(
servicer.GetIDEntries,
request_deserializer=fibcapis__pb2.ApGetIdEntriesRequest.FromString,
response_serializer=fibcapis__pb2.DbIdEntry.SerializeToString,
),
'GetDpEntries': grpc.unary_stream_rpc_method_handler(
servicer.GetDpEntries,
request_deserializer=fibcapis__pb2.ApGetDpEntriesRequest.FromString,
response_serializer=fibcapis__pb2.DbDpEntry.SerializeToString,
),
'AddPortEntry': grpc.unary_unary_rpc_method_handler(
servicer.AddPortEntry,
request_deserializer=fibcapis__pb2.DbPortEntry.FromString,
response_serializer=fibcapis__pb2.ApAddPortEntryReply.SerializeToString,
),
'AddIDEntry': grpc.unary_unary_rpc_method_handler(
servicer.AddIDEntry,
request_deserializer=fibcapis__pb2.DbIdEntry.FromString,
response_serializer=fibcapis__pb2.ApAddIdEntryReply.SerializeToString,
),
'DelPortEntry': grpc.unary_unary_rpc_method_handler(
servicer.DelPortEntry,
request_deserializer=fibcapis__pb2.DbPortKey.FromString,
response_serializer=fibcapis__pb2.ApDelPortEntryReply.SerializeToString,
),
'DelIDEntry': grpc.unary_unary_rpc_method_handler(
servicer.DelIDEntry,
request_deserializer=fibcapis__pb2.DbIdEntry.FromString,
response_serializer=fibcapis__pb2.ApDelIdEntryReply.SerializeToString,
),
'GetStats': grpc.unary_stream_rpc_method_handler(
servicer.GetStats,
request_deserializer=fibcapis__pb2.ApGetStatsRequest.FromString,
response_serializer=fibcapis__pb2.StatsEntry.SerializeToString,
),
'RunOAM': grpc.unary_unary_rpc_method_handler(
servicer.RunOAM,
request_deserializer=fibcapi__pb2.OAM.Request.FromString,
response_serializer=fibcapis__pb2.OAMReplyAck.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'fibcapi.FIBCApApi', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
class FIBCVmApiStub(object):
# missing associated documentation comment in .proto file
pass
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.SendHello = channel.unary_unary(
'/fibcapi.FIBCVmApi/SendHello',
request_serializer=fibcapi__pb2.Hello.SerializeToString,
response_deserializer=fibcapis__pb2.HelloReply.FromString,
)
self.SendPortConfig = channel.unary_unary(
'/fibcapi.FIBCVmApi/SendPortConfig',
request_serializer=fibcapi__pb2.PortConfig.SerializeToString,
response_deserializer=fibcapis__pb2.PortConfigReply.FromString,
)
self.SendFlowMod = channel.unary_unary(
'/fibcapi.FIBCVmApi/SendFlowMod',
request_serializer=fibcapi__pb2.FlowMod.SerializeToString,
response_deserializer=fibcapis__pb2.FlowModReply.FromString,
)
self.SendGroupMod = channel.unary_unary(
'/fibcapi.FIBCVmApi/SendGroupMod',
request_serializer=fibcapi__pb2.GroupMod.SerializeToString,
response_deserializer=fibcapis__pb2.GroupModReply.FromString,
)
self.SendOAMReply = channel.unary_unary(
'/fibcapi.FIBCVmApi/SendOAMReply',
request_serializer=fibcapis__pb2.OAMReply.SerializeToString,
response_deserializer=fibcapis__pb2.OAMReplyAck.FromString,
)
self.Monitor = channel.unary_stream(
'/fibcapi.FIBCVmApi/Monitor',
request_serializer=fibcapis__pb2.VmMonitorRequest.SerializeToString,
response_deserializer=fibcapis__pb2.VmMonitorReply.FromString,
)
class FIBCVmApiServicer(object):
# missing associated documentation comment in .proto file
pass
def SendHello(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SendPortConfig(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SendFlowMod(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SendGroupMod(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SendOAMReply(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Monitor(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_FIBCVmApiServicer_to_server(servicer, server):
rpc_method_handlers = {
'SendHello': grpc.unary_unary_rpc_method_handler(
servicer.SendHello,
request_deserializer=fibcapi__pb2.Hello.FromString,
response_serializer=fibcapis__pb2.HelloReply.SerializeToString,
),
'SendPortConfig': grpc.unary_unary_rpc_method_handler(
servicer.SendPortConfig,
request_deserializer=fibcapi__pb2.PortConfig.FromString,
response_serializer=fibcapis__pb2.PortConfigReply.SerializeToString,
),
'SendFlowMod': grpc.unary_unary_rpc_method_handler(
servicer.SendFlowMod,
request_deserializer=fibcapi__pb2.FlowMod.FromString,
response_serializer=fibcapis__pb2.FlowModReply.SerializeToString,
),
'SendGroupMod': grpc.unary_unary_rpc_method_handler(
servicer.SendGroupMod,
request_deserializer=fibcapi__pb2.GroupMod.FromString,
response_serializer=fibcapis__pb2.GroupModReply.SerializeToString,
),
'SendOAMReply': grpc.unary_unary_rpc_method_handler(
servicer.SendOAMReply,
request_deserializer=fibcapis__pb2.OAMReply.FromString,
response_serializer=fibcapis__pb2.OAMReplyAck.SerializeToString,
),
'Monitor': grpc.unary_stream_rpc_method_handler(
servicer.Monitor,
request_deserializer=fibcapis__pb2.VmMonitorRequest.FromString,
response_serializer=fibcapis__pb2.VmMonitorReply.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'fibcapi.FIBCVmApi', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
class FIBCVsApiStub(object):
# missing associated documentation comment in .proto file
pass
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.SendHello = channel.unary_unary(
'/fibcapi.FIBCVsApi/SendHello',
request_serializer=fibcapi__pb2.FFHello.SerializeToString,
response_deserializer=fibcapis__pb2.FFHelloReply.FromString,
)
self.SendFFPacket = channel.unary_unary(
'/fibcapi.FIBCVsApi/SendFFPacket',
request_serializer=fibcapi__pb2.FFPacket.SerializeToString,
response_deserializer=fibcapis__pb2.FFPacketReply.FromString,
)
self.SendPacketIn = channel.unary_unary(
'/fibcapi.FIBCVsApi/SendPacketIn',
request_serializer=fibcapi__pb2.FFPacketIn.SerializeToString,
response_deserializer=fibcapis__pb2.FFPacketInReply.FromString,
)
self.SendOAMReply = channel.unary_unary(
'/fibcapi.FIBCVsApi/SendOAMReply',
request_serializer=fibcapis__pb2.OAMReply.SerializeToString,
response_deserializer=fibcapis__pb2.OAMReplyAck.FromString,
)
self.Monitor = channel.unary_stream(
'/fibcapi.FIBCVsApi/Monitor',
request_serializer=fibcapis__pb2.VsMonitorRequest.SerializeToString,
response_deserializer=fibcapis__pb2.VsMonitorReply.FromString,
)
class FIBCVsApiServicer(object):
# missing associated documentation comment in .proto file
pass
def SendHello(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SendFFPacket(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SendPacketIn(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SendOAMReply(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Monitor(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_FIBCVsApiServicer_to_server(servicer, server):
rpc_method_handlers = {
'SendHello': grpc.unary_unary_rpc_method_handler(
servicer.SendHello,
request_deserializer=fibcapi__pb2.FFHello.FromString,
response_serializer=fibcapis__pb2.FFHelloReply.SerializeToString,
),
'SendFFPacket': grpc.unary_unary_rpc_method_handler(
servicer.SendFFPacket,
request_deserializer=fibcapi__pb2.FFPacket.FromString,
response_serializer=fibcapis__pb2.FFPacketReply.SerializeToString,
),
'SendPacketIn': grpc.unary_unary_rpc_method_handler(
servicer.SendPacketIn,
request_deserializer=fibcapi__pb2.FFPacketIn.FromString,
response_serializer=fibcapis__pb2.FFPacketInReply.SerializeToString,
),
'SendOAMReply': grpc.unary_unary_rpc_method_handler(
servicer.SendOAMReply,
request_deserializer=fibcapis__pb2.OAMReply.FromString,
response_serializer=fibcapis__pb2.OAMReplyAck.SerializeToString,
),
'Monitor': grpc.unary_stream_rpc_method_handler(
servicer.Monitor,
request_deserializer=fibcapis__pb2.VsMonitorRequest.FromString,
response_serializer=fibcapis__pb2.VsMonitorReply.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'fibcapi.FIBCVsApi', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
class FIBCDpApiStub(object):
# missing associated documentation comment in .proto file
pass
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.SendHello = channel.unary_unary(
'/fibcapi.FIBCDpApi/SendHello',
request_serializer=fibcapi__pb2.FFHello.SerializeToString,
response_deserializer=fibcapis__pb2.FFHelloReply.FromString,
)
self.SendPacketIn = channel.unary_unary(
'/fibcapi.FIBCDpApi/SendPacketIn',
request_serializer=fibcapi__pb2.FFPacketIn.SerializeToString,
response_deserializer=fibcapis__pb2.FFPacketInReply.FromString,
)
self.SendPortStatus = channel.unary_unary(
'/fibcapi.FIBCDpApi/SendPortStatus',
request_serializer=fibcapi__pb2.FFPortStatus.SerializeToString,
response_deserializer=fibcapis__pb2.FFPortStatusReply.FromString,
)
self.SendL2AddrStatus = channel.unary_unary(
'/fibcapi.FIBCDpApi/SendL2AddrStatus',
request_serializer=fibcapi__pb2.FFL2AddrStatus.SerializeToString,
response_deserializer=fibcapis__pb2.L2AddrStatusReply.FromString,
)
self.SendMultipartReply = channel.unary_unary(
'/fibcapi.FIBCDpApi/SendMultipartReply',
request_serializer=fibcapis__pb2.DpMultipartReply.SerializeToString,
response_deserializer=fibcapis__pb2.DpMultipartReplyAck.FromString,
)
self.SendOAMReply = channel.unary_unary(
'/fibcapi.FIBCDpApi/SendOAMReply',
request_serializer=fibcapis__pb2.OAMReply.SerializeToString,
response_deserializer=fibcapis__pb2.OAMReplyAck.FromString,
)
self.Monitor = channel.unary_stream(
'/fibcapi.FIBCDpApi/Monitor',
request_serializer=fibcapis__pb2.DpMonitorRequest.SerializeToString,
response_deserializer=fibcapis__pb2.DpMonitorReply.FromString,
)
class FIBCDpApiServicer(object):
# missing associated documentation comment in .proto file
pass
def SendHello(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SendPacketIn(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SendPortStatus(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SendL2AddrStatus(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SendMultipartReply(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SendOAMReply(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Monitor(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_FIBCDpApiServicer_to_server(servicer, server):
rpc_method_handlers = {
'SendHello': grpc.unary_unary_rpc_method_handler(
servicer.SendHello,
request_deserializer=fibcapi__pb2.FFHello.FromString,
response_serializer=fibcapis__pb2.FFHelloReply.SerializeToString,
),
'SendPacketIn': grpc.unary_unary_rpc_method_handler(
servicer.SendPacketIn,
request_deserializer=fibcapi__pb2.FFPacketIn.FromString,
response_serializer=fibcapis__pb2.FFPacketInReply.SerializeToString,
),
'SendPortStatus': grpc.unary_unary_rpc_method_handler(
servicer.SendPortStatus,
request_deserializer=fibcapi__pb2.FFPortStatus.FromString,
response_serializer=fibcapis__pb2.FFPortStatusReply.SerializeToString,
),
'SendL2AddrStatus': grpc.unary_unary_rpc_method_handler(
servicer.SendL2AddrStatus,
request_deserializer=fibcapi__pb2.FFL2AddrStatus.FromString,
response_serializer=fibcapis__pb2.L2AddrStatusReply.SerializeToString,
),
'SendMultipartReply': grpc.unary_unary_rpc_method_handler(
servicer.SendMultipartReply,
request_deserializer=fibcapis__pb2.DpMultipartReply.FromString,
response_serializer=fibcapis__pb2.DpMultipartReplyAck.SerializeToString,
),
'SendOAMReply': grpc.unary_unary_rpc_method_handler(
servicer.SendOAMReply,
request_deserializer=fibcapis__pb2.OAMReply.FromString,
response_serializer=fibcapis__pb2.OAMReplyAck.SerializeToString,
),
'Monitor': grpc.unary_stream_rpc_method_handler(
servicer.Monitor,
request_deserializer=fibcapis__pb2.DpMonitorRequest.FromString,
response_serializer=fibcapis__pb2.DpMonitorReply.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'fibcapi.FIBCDpApi', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
| [((9777, 9855), 'grpc.method_handlers_generic_handler', 'grpc.method_handlers_generic_handler', (['"""fibcapi.FIBCApApi"""', 'rpc_method_handlers'], {}), "('fibcapi.FIBCApApi', rpc_method_handlers)\n", (9813, 9855), False, 'import grpc\n'), ((14833, 14911), 'grpc.method_handlers_generic_handler', 'grpc.method_handlers_generic_handler', (['"""fibcapi.FIBCVmApi"""', 'rpc_method_handlers'], {}), "('fibcapi.FIBCVmApi', rpc_method_handlers)\n", (14869, 14911), False, 'import grpc\n'), ((19141, 19219), 'grpc.method_handlers_generic_handler', 'grpc.method_handlers_generic_handler', (['"""fibcapi.FIBCVsApi"""', 'rpc_method_handlers'], {}), "('fibcapi.FIBCVsApi', rpc_method_handlers)\n", (19177, 19219), False, 'import grpc\n'), ((25085, 25163), 'grpc.method_handlers_generic_handler', 'grpc.method_handlers_generic_handler', (['"""fibcapi.FIBCDpApi"""', 'rpc_method_handlers'], {}), "('fibcapi.FIBCDpApi', rpc_method_handlers)\n", (25121, 25163), False, 'import grpc\n'), ((6769, 6965), 'grpc.unary_stream_rpc_method_handler', 'grpc.unary_stream_rpc_method_handler', (['servicer.Monitor'], {'request_deserializer': 'fibcapis__pb2.ApMonitorRequest.FromString', 'response_serializer': 'fibcapis__pb2.ApMonitorReply.SerializeToString'}), '(servicer.Monitor, request_deserializer\n =fibcapis__pb2.ApMonitorRequest.FromString, response_serializer=\n fibcapis__pb2.ApMonitorReply.SerializeToString)\n', (6805, 6965), False, 'import grpc\n'), ((7018, 7218), 'grpc.unary_stream_rpc_method_handler', 'grpc.unary_stream_rpc_method_handler', (['servicer.GetPortStats'], {'request_deserializer': 'fibcapis__pb2.ApGetPortStatsRequest.FromString', 'response_serializer': 'fibcapi__pb2.FFPortStats.SerializeToString'}), '(servicer.GetPortStats,\n request_deserializer=fibcapis__pb2.ApGetPortStatsRequest.FromString,\n response_serializer=fibcapi__pb2.FFPortStats.SerializeToString)\n', (7054, 7218), False, 'import grpc\n'), ((7273, 7481), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.ModPortStats'], {'request_deserializer': 'fibcapis__pb2.ApModPortStatsRequest.FromString', 'response_serializer': 'fibcapis__pb2.ApModPortStatsReply.SerializeToString'}), '(servicer.ModPortStats,\n request_deserializer=fibcapis__pb2.ApModPortStatsRequest.FromString,\n response_serializer=fibcapis__pb2.ApModPortStatsReply.SerializeToString)\n', (7308, 7481), False, 'import grpc\n'), ((7538, 7743), 'grpc.unary_stream_rpc_method_handler', 'grpc.unary_stream_rpc_method_handler', (['servicer.GetPortEntries'], {'request_deserializer': 'fibcapis__pb2.ApGetPortEntriesRequest.FromString', 'response_serializer': 'fibcapis__pb2.DbPortEntry.SerializeToString'}), '(servicer.GetPortEntries,\n request_deserializer=fibcapis__pb2.ApGetPortEntriesRequest.FromString,\n response_serializer=fibcapis__pb2.DbPortEntry.SerializeToString)\n', (7574, 7743), False, 'import grpc\n'), ((7798, 7997), 'grpc.unary_stream_rpc_method_handler', 'grpc.unary_stream_rpc_method_handler', (['servicer.GetIDEntries'], {'request_deserializer': 'fibcapis__pb2.ApGetIdEntriesRequest.FromString', 'response_serializer': 'fibcapis__pb2.DbIdEntry.SerializeToString'}), '(servicer.GetIDEntries,\n request_deserializer=fibcapis__pb2.ApGetIdEntriesRequest.FromString,\n response_serializer=fibcapis__pb2.DbIdEntry.SerializeToString)\n', (7834, 7997), False, 'import grpc\n'), ((8052, 8251), 'grpc.unary_stream_rpc_method_handler', 'grpc.unary_stream_rpc_method_handler', (['servicer.GetDpEntries'], {'request_deserializer': 'fibcapis__pb2.ApGetDpEntriesRequest.FromString', 'response_serializer': 'fibcapis__pb2.DbDpEntry.SerializeToString'}), '(servicer.GetDpEntries,\n request_deserializer=fibcapis__pb2.ApGetDpEntriesRequest.FromString,\n response_serializer=fibcapis__pb2.DbDpEntry.SerializeToString)\n', (8088, 8251), False, 'import grpc\n'), ((8306, 8504), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.AddPortEntry'], {'request_deserializer': 'fibcapis__pb2.DbPortEntry.FromString', 'response_serializer': 'fibcapis__pb2.ApAddPortEntryReply.SerializeToString'}), '(servicer.AddPortEntry,\n request_deserializer=fibcapis__pb2.DbPortEntry.FromString,\n response_serializer=fibcapis__pb2.ApAddPortEntryReply.SerializeToString)\n', (8341, 8504), False, 'import grpc\n'), ((8557, 8749), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.AddIDEntry'], {'request_deserializer': 'fibcapis__pb2.DbIdEntry.FromString', 'response_serializer': 'fibcapis__pb2.ApAddIdEntryReply.SerializeToString'}), '(servicer.AddIDEntry,\n request_deserializer=fibcapis__pb2.DbIdEntry.FromString,\n response_serializer=fibcapis__pb2.ApAddIdEntryReply.SerializeToString)\n', (8592, 8749), False, 'import grpc\n'), ((8804, 9000), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.DelPortEntry'], {'request_deserializer': 'fibcapis__pb2.DbPortKey.FromString', 'response_serializer': 'fibcapis__pb2.ApDelPortEntryReply.SerializeToString'}), '(servicer.DelPortEntry,\n request_deserializer=fibcapis__pb2.DbPortKey.FromString,\n response_serializer=fibcapis__pb2.ApDelPortEntryReply.SerializeToString)\n', (8839, 9000), False, 'import grpc\n'), ((9053, 9245), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.DelIDEntry'], {'request_deserializer': 'fibcapis__pb2.DbIdEntry.FromString', 'response_serializer': 'fibcapis__pb2.ApDelIdEntryReply.SerializeToString'}), '(servicer.DelIDEntry,\n request_deserializer=fibcapis__pb2.DbIdEntry.FromString,\n response_serializer=fibcapis__pb2.ApDelIdEntryReply.SerializeToString)\n', (9088, 9245), False, 'import grpc\n'), ((9296, 9488), 'grpc.unary_stream_rpc_method_handler', 'grpc.unary_stream_rpc_method_handler', (['servicer.GetStats'], {'request_deserializer': 'fibcapis__pb2.ApGetStatsRequest.FromString', 'response_serializer': 'fibcapis__pb2.StatsEntry.SerializeToString'}), '(servicer.GetStats,\n request_deserializer=fibcapis__pb2.ApGetStatsRequest.FromString,\n response_serializer=fibcapis__pb2.StatsEntry.SerializeToString)\n', (9332, 9488), False, 'import grpc\n'), ((9537, 9722), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.RunOAM'], {'request_deserializer': 'fibcapi__pb2.OAM.Request.FromString', 'response_serializer': 'fibcapis__pb2.OAMReplyAck.SerializeToString'}), '(servicer.RunOAM, request_deserializer=\n fibcapi__pb2.OAM.Request.FromString, response_serializer=fibcapis__pb2.\n OAMReplyAck.SerializeToString)\n', (9572, 9722), False, 'import grpc\n'), ((13378, 13558), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.SendHello'], {'request_deserializer': 'fibcapi__pb2.Hello.FromString', 'response_serializer': 'fibcapis__pb2.HelloReply.SerializeToString'}), '(servicer.SendHello,\n request_deserializer=fibcapi__pb2.Hello.FromString, response_serializer\n =fibcapis__pb2.HelloReply.SerializeToString)\n', (13413, 13558), False, 'import grpc\n'), ((13614, 13808), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.SendPortConfig'], {'request_deserializer': 'fibcapi__pb2.PortConfig.FromString', 'response_serializer': 'fibcapis__pb2.PortConfigReply.SerializeToString'}), '(servicer.SendPortConfig,\n request_deserializer=fibcapi__pb2.PortConfig.FromString,\n response_serializer=fibcapis__pb2.PortConfigReply.SerializeToString)\n', (13649, 13808), False, 'import grpc\n'), ((13862, 14047), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.SendFlowMod'], {'request_deserializer': 'fibcapi__pb2.FlowMod.FromString', 'response_serializer': 'fibcapis__pb2.FlowModReply.SerializeToString'}), '(servicer.SendFlowMod,\n request_deserializer=fibcapi__pb2.FlowMod.FromString,\n response_serializer=fibcapis__pb2.FlowModReply.SerializeToString)\n', (13897, 14047), False, 'import grpc\n'), ((14102, 14290), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.SendGroupMod'], {'request_deserializer': 'fibcapi__pb2.GroupMod.FromString', 'response_serializer': 'fibcapis__pb2.GroupModReply.SerializeToString'}), '(servicer.SendGroupMod,\n request_deserializer=fibcapi__pb2.GroupMod.FromString,\n response_serializer=fibcapis__pb2.GroupModReply.SerializeToString)\n', (14137, 14290), False, 'import grpc\n'), ((14345, 14532), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.SendOAMReply'], {'request_deserializer': 'fibcapis__pb2.OAMReply.FromString', 'response_serializer': 'fibcapis__pb2.OAMReplyAck.SerializeToString'}), '(servicer.SendOAMReply,\n request_deserializer=fibcapis__pb2.OAMReply.FromString,\n response_serializer=fibcapis__pb2.OAMReplyAck.SerializeToString)\n', (14380, 14532), False, 'import grpc\n'), ((14582, 14778), 'grpc.unary_stream_rpc_method_handler', 'grpc.unary_stream_rpc_method_handler', (['servicer.Monitor'], {'request_deserializer': 'fibcapis__pb2.VmMonitorRequest.FromString', 'response_serializer': 'fibcapis__pb2.VmMonitorReply.SerializeToString'}), '(servicer.Monitor, request_deserializer\n =fibcapis__pb2.VmMonitorRequest.FromString, response_serializer=\n fibcapis__pb2.VmMonitorReply.SerializeToString)\n', (14618, 14778), False, 'import grpc\n'), ((17925, 18108), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.SendHello'], {'request_deserializer': 'fibcapi__pb2.FFHello.FromString', 'response_serializer': 'fibcapis__pb2.FFHelloReply.SerializeToString'}), '(servicer.SendHello,\n request_deserializer=fibcapi__pb2.FFHello.FromString,\n response_serializer=fibcapis__pb2.FFHelloReply.SerializeToString)\n', (17960, 18108), False, 'import grpc\n'), ((18163, 18351), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.SendFFPacket'], {'request_deserializer': 'fibcapi__pb2.FFPacket.FromString', 'response_serializer': 'fibcapis__pb2.FFPacketReply.SerializeToString'}), '(servicer.SendFFPacket,\n request_deserializer=fibcapi__pb2.FFPacket.FromString,\n response_serializer=fibcapis__pb2.FFPacketReply.SerializeToString)\n', (18198, 18351), False, 'import grpc\n'), ((18406, 18598), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.SendPacketIn'], {'request_deserializer': 'fibcapi__pb2.FFPacketIn.FromString', 'response_serializer': 'fibcapis__pb2.FFPacketInReply.SerializeToString'}), '(servicer.SendPacketIn,\n request_deserializer=fibcapi__pb2.FFPacketIn.FromString,\n response_serializer=fibcapis__pb2.FFPacketInReply.SerializeToString)\n', (18441, 18598), False, 'import grpc\n'), ((18653, 18840), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.SendOAMReply'], {'request_deserializer': 'fibcapis__pb2.OAMReply.FromString', 'response_serializer': 'fibcapis__pb2.OAMReplyAck.SerializeToString'}), '(servicer.SendOAMReply,\n request_deserializer=fibcapis__pb2.OAMReply.FromString,\n response_serializer=fibcapis__pb2.OAMReplyAck.SerializeToString)\n', (18688, 18840), False, 'import grpc\n'), ((18890, 19086), 'grpc.unary_stream_rpc_method_handler', 'grpc.unary_stream_rpc_method_handler', (['servicer.Monitor'], {'request_deserializer': 'fibcapis__pb2.VsMonitorRequest.FromString', 'response_serializer': 'fibcapis__pb2.VsMonitorReply.SerializeToString'}), '(servicer.Monitor, request_deserializer\n =fibcapis__pb2.VsMonitorRequest.FromString, response_serializer=\n fibcapis__pb2.VsMonitorReply.SerializeToString)\n', (18926, 19086), False, 'import grpc\n'), ((23326, 23509), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.SendHello'], {'request_deserializer': 'fibcapi__pb2.FFHello.FromString', 'response_serializer': 'fibcapis__pb2.FFHelloReply.SerializeToString'}), '(servicer.SendHello,\n request_deserializer=fibcapi__pb2.FFHello.FromString,\n response_serializer=fibcapis__pb2.FFHelloReply.SerializeToString)\n', (23361, 23509), False, 'import grpc\n'), ((23564, 23756), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.SendPacketIn'], {'request_deserializer': 'fibcapi__pb2.FFPacketIn.FromString', 'response_serializer': 'fibcapis__pb2.FFPacketInReply.SerializeToString'}), '(servicer.SendPacketIn,\n request_deserializer=fibcapi__pb2.FFPacketIn.FromString,\n response_serializer=fibcapis__pb2.FFPacketInReply.SerializeToString)\n', (23599, 23756), False, 'import grpc\n'), ((23813, 24011), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.SendPortStatus'], {'request_deserializer': 'fibcapi__pb2.FFPortStatus.FromString', 'response_serializer': 'fibcapis__pb2.FFPortStatusReply.SerializeToString'}), '(servicer.SendPortStatus,\n request_deserializer=fibcapi__pb2.FFPortStatus.FromString,\n response_serializer=fibcapis__pb2.FFPortStatusReply.SerializeToString)\n', (23848, 24011), False, 'import grpc\n'), ((24070, 24272), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.SendL2AddrStatus'], {'request_deserializer': 'fibcapi__pb2.FFL2AddrStatus.FromString', 'response_serializer': 'fibcapis__pb2.L2AddrStatusReply.SerializeToString'}), '(servicer.SendL2AddrStatus,\n request_deserializer=fibcapi__pb2.FFL2AddrStatus.FromString,\n response_serializer=fibcapis__pb2.L2AddrStatusReply.SerializeToString)\n', (24105, 24272), False, 'import grpc\n'), ((24333, 24542), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.SendMultipartReply'], {'request_deserializer': 'fibcapis__pb2.DpMultipartReply.FromString', 'response_serializer': 'fibcapis__pb2.DpMultipartReplyAck.SerializeToString'}), '(servicer.SendMultipartReply,\n request_deserializer=fibcapis__pb2.DpMultipartReply.FromString,\n response_serializer=fibcapis__pb2.DpMultipartReplyAck.SerializeToString)\n', (24368, 24542), False, 'import grpc\n'), ((24597, 24784), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.SendOAMReply'], {'request_deserializer': 'fibcapis__pb2.OAMReply.FromString', 'response_serializer': 'fibcapis__pb2.OAMReplyAck.SerializeToString'}), '(servicer.SendOAMReply,\n request_deserializer=fibcapis__pb2.OAMReply.FromString,\n response_serializer=fibcapis__pb2.OAMReplyAck.SerializeToString)\n', (24632, 24784), False, 'import grpc\n'), ((24834, 25030), 'grpc.unary_stream_rpc_method_handler', 'grpc.unary_stream_rpc_method_handler', (['servicer.Monitor'], {'request_deserializer': 'fibcapis__pb2.DpMonitorRequest.FromString', 'response_serializer': 'fibcapis__pb2.DpMonitorReply.SerializeToString'}), '(servicer.Monitor, request_deserializer\n =fibcapis__pb2.DpMonitorRequest.FromString, response_serializer=\n fibcapis__pb2.DpMonitorReply.SerializeToString)\n', (24870, 25030), False, 'import grpc\n')] |
pmart123/security_id | cymbology/identifiers/__init__.py | 95087be9525ab8d2fd47baa93f83aaa30e76bb54 | from cymbology.identifiers.sedol import Sedol
from cymbology.identifiers.cusip import Cusip, cusip_from_isin
from cymbology.identifiers.isin import Isin
__all__ = ('Sedol', 'Cusip', 'cusip_from_isin', 'Isin')
| [] |
Noahffiliation/corpus-christi | api/src/error_report/models.py | c69ec88784de7d2e5acde3012926f307b43e38b3 | from marshmallow import Schema, fields
from marshmallow.validate import Range, Length
from sqlalchemy import Column, Integer, Boolean, DateTime
from ..db import Base
from ..shared.models import StringTypes
# ---- Error-report
class ErrorReport(Base):
__tablename__ = 'error_report'
id = Column(Integer, primary_key=True)
description = Column(StringTypes.LONG_STRING, nullable=False)
time_stamp = Column(DateTime)
status_code = Column(Integer)
endpoint = Column(StringTypes.MEDIUM_STRING)
solved = Column(Boolean, default=False)
def __repr__(self):
return f"<Error-report(id={self.id})>"
class ErrorReportSchema(Schema):
id = fields.Integer(dump_only=True, required=True, validate=Range(min=1))
description = fields.String(required=True, validate=Length(min=1))
time_stamp = fields.DateTime()
status_code = fields.Integer()
endpoint = fields.String()
solved = fields.Boolean()
| [((300, 333), 'sqlalchemy.Column', 'Column', (['Integer'], {'primary_key': '(True)'}), '(Integer, primary_key=True)\n', (306, 333), False, 'from sqlalchemy import Column, Integer, Boolean, DateTime\n'), ((352, 399), 'sqlalchemy.Column', 'Column', (['StringTypes.LONG_STRING'], {'nullable': '(False)'}), '(StringTypes.LONG_STRING, nullable=False)\n', (358, 399), False, 'from sqlalchemy import Column, Integer, Boolean, DateTime\n'), ((417, 433), 'sqlalchemy.Column', 'Column', (['DateTime'], {}), '(DateTime)\n', (423, 433), False, 'from sqlalchemy import Column, Integer, Boolean, DateTime\n'), ((452, 467), 'sqlalchemy.Column', 'Column', (['Integer'], {}), '(Integer)\n', (458, 467), False, 'from sqlalchemy import Column, Integer, Boolean, DateTime\n'), ((483, 516), 'sqlalchemy.Column', 'Column', (['StringTypes.MEDIUM_STRING'], {}), '(StringTypes.MEDIUM_STRING)\n', (489, 516), False, 'from sqlalchemy import Column, Integer, Boolean, DateTime\n'), ((530, 560), 'sqlalchemy.Column', 'Column', (['Boolean'], {'default': '(False)'}), '(Boolean, default=False)\n', (536, 560), False, 'from sqlalchemy import Column, Integer, Boolean, DateTime\n'), ((834, 851), 'marshmallow.fields.DateTime', 'fields.DateTime', ([], {}), '()\n', (849, 851), False, 'from marshmallow import Schema, fields\n'), ((870, 886), 'marshmallow.fields.Integer', 'fields.Integer', ([], {}), '()\n', (884, 886), False, 'from marshmallow import Schema, fields\n'), ((902, 917), 'marshmallow.fields.String', 'fields.String', ([], {}), '()\n', (915, 917), False, 'from marshmallow import Schema, fields\n'), ((931, 947), 'marshmallow.fields.Boolean', 'fields.Boolean', ([], {}), '()\n', (945, 947), False, 'from marshmallow import Schema, fields\n'), ((732, 744), 'marshmallow.validate.Range', 'Range', ([], {'min': '(1)'}), '(min=1)\n', (737, 744), False, 'from marshmallow.validate import Range, Length\n'), ((802, 815), 'marshmallow.validate.Length', 'Length', ([], {'min': '(1)'}), '(min=1)\n', (808, 815), False, 'from marshmallow.validate import Range, Length\n')] |
arpitran/HackerRank_solutions | Python/Vowel-Substring/solution.py | a3a77c858edd3955ea38530916db9051b1aa93f9 | #!/bin/python3
import math
import os
import random
import re
import sys
#
# Complete the 'findSubstring' function below.
#
# The function is expected to return a STRING.
# The function accepts following parameters:
# 1. STRING s
# 2. INTEGER k
#
def isVowel(x):
if(x=="a" or x=='e' or x=='i' or x=='o' or x=='u'):
return True
return False
def vowelcount(x):
lowercase = x.lower()
vowel_counts = {}
for vowel in "aeiou":
count = lowercase.count(vowel)
vowel_counts[vowel] = count
counts = vowel_counts.values()
total_vowels = sum(counts)
return total_vowels
def findSubstring(s, k):
test_str = s
count = 0
sub_string = {}
res = [test_str[i: j] for i in range(len(test_str)) for j in range(i+1, len(test_str)+1) if len(test_str[i:j])==k]
for i in res:
sub_string[i]=vowelcount(i)
if sub_string.get(max(sub_string,key=sub_string.get))==0:
return "Not found!"
else:
return str(max(sub_string,key=sub_string.get))
# Write your code here
| [] |
erichav/NIMA | nima/models/productos/constants.py | 6ca845047e2d1764f07af76bfbbed9f1a82bc10f | COLLECTION = 'productos' | [] |
hsjang001205/deepchem | deepchem/metrics/score_function.py | 02fce35729826b1ef12a1cfa6519b491510217be | """Evaluation metrics."""
import numpy as np
from sklearn.metrics import matthews_corrcoef # noqa
from sklearn.metrics import recall_score # noqa
from sklearn.metrics import cohen_kappa_score
from sklearn.metrics import r2_score # noqa
from sklearn.metrics import mean_squared_error
from sklearn.metrics import mean_absolute_error
from sklearn.metrics import precision_score # noqa
from sklearn.metrics import precision_recall_curve
from sklearn.metrics import auc
from sklearn.metrics import jaccard_score
from sklearn.metrics import f1_score
from sklearn.metrics import roc_auc_score # noqa
from sklearn.metrics import accuracy_score # noqa
from sklearn.metrics import balanced_accuracy_score # noqa
from scipy.stats import pearsonr
# kappa_score is an alias for `sklearn.metrics.cohen_kappa_score`
kappa_score = cohen_kappa_score
def pearson_r2_score(y: np.ndarray, y_pred: np.ndarray) -> float:
"""Computes Pearson R^2 (square of Pearson correlation).
Parameters
----------
y: np.ndarray
ground truth array
y_pred: np.ndarray
predicted array
Returns
-------
float
The Pearson-R^2 score.
"""
return pearsonr(y, y_pred)[0]**2
def jaccard_index(y: np.ndarray, y_pred: np.ndarray) -> float:
"""Computes Jaccard Index which is the Intersection Over Union metric
which is commonly used in image segmentation tasks.
DEPRECATED: WILL BE REMOVED IN A FUTURE VERSION OF DEEEPCHEM. USE `jaccard_score` instead.
Parameters
----------
y: np.ndarray
ground truth array
y_pred: np.ndarray
predicted array
Returns
-------
score: float
The jaccard index. A number between 0 and 1.
"""
return jaccard_score(y, y_pred)
def pixel_error(y: np.ndarray, y_pred: np.ndarray) -> float:
"""An error metric in case y, y_pred are images.
Defined as 1 - the maximal F-score of pixel similarity, or squared
Euclidean distance between the original and the result labels.
Parameters
----------
y: np.ndarray
ground truth array
y_pred: np.ndarray
predicted array
Returns
-------
score: float
The pixel-error. A number between 0 and 1.
"""
return 1 - f1_score(y, y_pred)
def prc_auc_score(y: np.ndarray, y_pred: np.ndarray) -> float:
"""Compute area under precision-recall curve
Parameters
----------
y: np.ndarray
A numpy array of shape `(N, n_classes)` or `(N,)` with true labels
y_pred: np.ndarray
Of shape `(N, n_classes)` with class probabilities.
Returns
-------
float
The area under the precision-recall curve. A number between 0 and 1.
"""
precision, recall, _ = precision_recall_curve(y[:, 1], y_pred[:, 1])
return auc(recall, precision)
def rms_score(y_true: np.ndarray, y_pred: np.ndarray) -> float:
"""Computes RMS error."""
return np.sqrt(mean_squared_error(y_true, y_pred))
def mae_score(y_true: np.ndarray, y_pred: np.ndarray) -> float:
"""Computes MAE."""
return mean_absolute_error(y_true, y_pred)
def bedroc_score(y_true: np.ndarray, y_pred: np.ndarray, alpha: float = 20.0):
"""Compute BEDROC metric.
BEDROC metric implemented according to Truchon and Bayley that modifies
the ROC score by allowing for a factor of early recognition.
Please confirm details from [1]_.
Parameters
----------
y_true: np.ndarray
Binary class labels. 1 for positive class, 0 otherwise
y_pred: np.ndarray
Predicted labels
alpha: float, default 20.0
Early recognition parameter
Returns
-------
float
Value in [0, 1] that indicates the degree of early recognition
Notes
-----
This function requires RDKit to be installed.
References
----------
.. [1] Truchon et al. "Evaluating virtual screening methods: good and bad metrics
for the “early recognition” problem." Journal of chemical information and modeling
47.2 (2007): 488-508.
"""
try:
from rdkit.ML.Scoring.Scoring import CalcBEDROC
except ModuleNotFoundError:
raise ValueError("This function requires RDKit to be installed.")
# validation
assert len(y_true) == len(y_pred), 'Number of examples do not match'
assert np.array_equal(
np.unique(y_true).astype(int),
[0, 1]), ('Class labels must be binary: %s' % np.unique(y_true))
yt = np.asarray(y_true)
yp = np.asarray(y_pred)
yt = yt.flatten()
yp = yp[:, 1].flatten() # Index 1 because one_hot predictions
scores = list(zip(yt, yp))
scores = sorted(scores, key=lambda pair: pair[1], reverse=True)
return CalcBEDROC(scores, 0, alpha)
| [((1665, 1689), 'sklearn.metrics.jaccard_score', 'jaccard_score', (['y', 'y_pred'], {}), '(y, y_pred)\n', (1678, 1689), False, 'from sklearn.metrics import jaccard_score\n'), ((2604, 2653), 'sklearn.metrics.precision_recall_curve', 'precision_recall_curve', (['y[:, (1)]', 'y_pred[:, (1)]'], {}), '(y[:, (1)], y_pred[:, (1)])\n', (2626, 2653), False, 'from sklearn.metrics import precision_recall_curve\n'), ((2659, 2681), 'sklearn.metrics.auc', 'auc', (['recall', 'precision'], {}), '(recall, precision)\n', (2662, 2681), False, 'from sklearn.metrics import auc\n'), ((2926, 2961), 'sklearn.metrics.mean_absolute_error', 'mean_absolute_error', (['y_true', 'y_pred'], {}), '(y_true, y_pred)\n', (2945, 2961), False, 'from sklearn.metrics import mean_absolute_error\n'), ((4235, 4253), 'numpy.asarray', 'np.asarray', (['y_true'], {}), '(y_true)\n', (4245, 4253), True, 'import numpy as np\n'), ((4261, 4279), 'numpy.asarray', 'np.asarray', (['y_pred'], {}), '(y_pred)\n', (4271, 4279), True, 'import numpy as np\n'), ((4472, 4500), 'rdkit.ML.Scoring.Scoring.CalcBEDROC', 'CalcBEDROC', (['scores', '(0)', 'alpha'], {}), '(scores, 0, alpha)\n', (4482, 4500), False, 'from rdkit.ML.Scoring.Scoring import CalcBEDROC\n'), ((2148, 2167), 'sklearn.metrics.f1_score', 'f1_score', (['y', 'y_pred'], {}), '(y, y_pred)\n', (2156, 2167), False, 'from sklearn.metrics import f1_score\n'), ((2793, 2827), 'sklearn.metrics.mean_squared_error', 'mean_squared_error', (['y_true', 'y_pred'], {}), '(y_true, y_pred)\n', (2811, 2827), False, 'from sklearn.metrics import mean_squared_error\n'), ((4208, 4225), 'numpy.unique', 'np.unique', (['y_true'], {}), '(y_true)\n', (4217, 4225), True, 'import numpy as np\n'), ((1147, 1166), 'scipy.stats.pearsonr', 'pearsonr', (['y', 'y_pred'], {}), '(y, y_pred)\n', (1155, 1166), False, 'from scipy.stats import pearsonr\n'), ((4125, 4142), 'numpy.unique', 'np.unique', (['y_true'], {}), '(y_true)\n', (4134, 4142), True, 'import numpy as np\n')] |
hyperevo/py-helios-node | hvm/chains/base.py | ff417fe3fe90f85c9f95b3d8a5f0dd4c80532ee8 | from __future__ import absolute_import
import operator
from collections import deque
import functools
from abc import (
ABCMeta,
abstractmethod
)
import rlp_cython as rlp
import time
import math
from uuid import UUID
from typing import ( # noqa: F401
Any,
Optional,
Callable,
cast,
Dict,
Generator,
Iterator,
Tuple,
Type,
TYPE_CHECKING,
Union,
List,
Iterable,
)
import logging
from itertools import groupby
from hvm.rlp.receipts import Receipt
from hvm.types import Timestamp
from eth_typing import (
Address,
BlockNumber,
Hash32,
)
from eth_utils import (
to_tuple,
to_set,
)
from hvm.db.backends.base import BaseDB
from hvm.db.backends.memory import MemoryDB
from hvm.db.chain import (
BaseChainDB,
ChainDB,
)
from hvm.db.journal import (
JournalDB,
)
from hvm.db.read_only import ReadOnlyDB
from hvm.constants import (
BLOCK_GAS_LIMIT,
BLANK_ROOT_HASH,
NUMBER_OF_HEAD_HASH_TO_SAVE,
TIME_BETWEEN_HEAD_HASH_SAVE,
GENESIS_PARENT_HASH,
)
from hvm.db.trie import make_trie_root_and_nodes
from hvm import constants
from hvm.estimators import (
get_gas_estimator,
)
from hvm.exceptions import (
HeaderNotFound,
TransactionNotFound,
ValidationError,
VMNotFound,
BlockOnWrongChain,
CanonicalHeadNotFound,
CannotCalculateStake,
NotEnoughTimeBetweenBlocks,
ReceivableTransactionNotFound,
TriedImportingGenesisBlock,
JournalDbNotActivated,
ReplacingBlocksNotAllowed,
UnprocessedBlockNotAllowed,
AppendHistoricalRootHashTooOld,
HistoricalNetworkTPCMissing,
HistoricalMinGasPriceError,
UnprocessedBlockChildIsProcessed,
ParentNotFound,
NoChronologicalBlocks,
RewardProofSenderBlockMissing,
InvalidHeadRootTimestamp,
RewardAmountRoundsToZero, TriedDeletingGenesisBlock, NoGenesisBlockPresent)
from eth_keys.exceptions import (
BadSignature,
)
from hvm.utils.blocks import reorganize_chronological_block_list_for_correct_chronological_order_at_index
from hvm.validation import (
validate_block_number,
validate_uint256,
validate_word,
validate_vm_configuration,
validate_canonical_address,
validate_is_queue_block,
validate_centisecond_timestamp,
)
from hvm.rlp.blocks import (
BaseBlock,
BaseQueueBlock,
)
from hvm.rlp.headers import (
BlockHeader,
HeaderParams,
)
from hvm.rlp.transactions import (
BaseTransaction,
BaseReceiveTransaction
)
from hvm.utils.db import (
apply_state_dict,
)
from hvm.utils.datatypes import (
Configurable,
)
from hvm.utils.headers import (
compute_gas_limit_bounds,
)
from hvm.utils.hexadecimal import (
encode_hex,
decode_hex
)
from hvm.utils.rlp import (
ensure_imported_block_unchanged,
)
from hvm.db.chain_head import ChainHeadDB
from hvm.db.consensus import ConsensusDB
from eth_keys import keys
from eth_keys.datatypes import(
BaseKey,
PublicKey,
PrivateKey
)
from hvm.utils.numeric import (
effecient_diff,
are_items_in_list_equal,
)
from sortedcontainers import (
SortedList,
SortedDict,
)
from hvm.rlp.consensus import NodeStakingScore, PeerNodeHealth
from hvm.rlp.accounts import TransactionKey
if TYPE_CHECKING:
from hvm.vm.base import BaseVM # noqa: F401
from functools import partial
import asyncio
# Mapping from address to account state.
# 'balance', 'nonce' -> int
# 'code' -> bytes
# 'storage' -> Dict[int, int]
AccountState = Dict[Address, Dict[str, Union[int, bytes, Dict[int, int]]]]
class BaseChain(Configurable, metaclass=ABCMeta):
"""
The base class for all Chain objects
"""
chain_head_db: ChainHeadDB = None
chaindb: ChainDB = None
chaindb_class = None # type: Type[BaseChainDB]
vm_configuration = None # type: Tuple[Tuple[int, Type[BaseVM]], ...]
genesis_wallet_address: Address = None
genesis_block_timestamp: Timestamp = None
min_time_between_blocks: int = None
#
# Helpers
#
@classmethod
@abstractmethod
def get_chaindb_class(cls) -> Type[BaseChainDB]:
raise NotImplementedError("Chain classes must implement this method")
@abstractmethod
def get_consensus_db(self, header: BlockHeader = None, timestamp: Timestamp = None) -> ConsensusDB:
raise NotImplementedError("Chain classes must implement this method")
@abstractmethod
def enable_read_only_db(self) -> None:
raise NotImplementedError("Chain classes must implement this method")
#
# Chain API
#
@classmethod
@abstractmethod
def from_genesis(cls,
base_db: BaseDB,
genesis_params: Dict[str, HeaderParams],
genesis_state: AccountState=None) -> 'BaseChain':
raise NotImplementedError("Chain classes must implement this method")
@classmethod
@abstractmethod
def from_genesis_header(cls,
base_db: BaseDB,
genesis_header: BlockHeader) -> 'BaseChain':
raise NotImplementedError("Chain classes must implement this method")
@abstractmethod
def get_chain_at_block_parent(self, block: BaseBlock) -> 'BaseChain':
raise NotImplementedError("Chain classes must implement this method")
#
# VM API
#
@classmethod
def get_vm_configuration(cls) -> Tuple[Tuple[int, Type['BaseVM']], ...]:
return cls.vm_configuration
@classmethod
def get_vm_class(cls, header: BlockHeader) -> Type['BaseVM']:
"""
Returns the VM instance for the given block number.
"""
return cls.get_vm_class_for_block_timestamp(header.timestamp)
@abstractmethod
def get_vm(self, header: BlockHeader=None, timestamp: Timestamp = None) -> 'BaseVM':
raise NotImplementedError("Chain classes must implement this method")
@classmethod
def get_vm_class_for_block_timestamp(cls, timestamp: int = None) -> Type['BaseVM']:
"""
Returns the VM class for the given block number.
"""
if timestamp is None:
timestamp = int(time.time())
if cls.vm_configuration is None:
raise AttributeError("Chain classes must define the VMs in vm_configuration")
validate_uint256(timestamp)
for start_timestamp, vm_class in reversed(cls.vm_configuration):
if timestamp >= start_timestamp:
return vm_class
else:
raise VMNotFound("No vm available for timestamp #{0}".format(timestamp))
#
# Header API
#
@abstractmethod
def create_header_from_parent(self,
parent_header: BlockHeader,
**header_params: HeaderParams) -> BlockHeader:
raise NotImplementedError("Chain classes must implement this method")
@abstractmethod
def get_block_header_by_hash(self, block_hash: Hash32) -> BlockHeader:
raise NotImplementedError("Chain classes must implement this method")
@abstractmethod
def get_canonical_head(self):
raise NotImplementedError("Chain classes must implement this method")
#
# Block API
#
@abstractmethod
def get_ancestors(self, limit: int, header: BlockHeader=None) -> Iterator[BaseBlock]:
raise NotImplementedError("Chain classes must implement this method")
@abstractmethod
def get_block_by_hash(self, block_hash: Hash32) -> BaseBlock:
raise NotImplementedError("Chain classes must implement this method")
@abstractmethod
def get_block_by_header(self, block_header: BlockHeader) -> BaseBlock:
raise NotImplementedError("Chain classes must implement this method")
@abstractmethod
def get_block_by_number(self, block_number: BlockNumber, wallet_address: Address = None) -> BaseBlock:
raise NotImplementedError("Chain classes must implement this method")
@abstractmethod
def get_blocks_on_chain(self, start: int, end: int, wallet_address: Address = None) -> List[BaseBlock]:
raise NotImplementedError("Chain classes must implement this method")
@abstractmethod
def get_all_blocks_on_chain(self, wallet_address: Address = None) -> List[BaseBlock]:
raise NotImplementedError("Chain classes must implement this method")
@abstractmethod
def get_all_blocks_on_chain_by_head_block_hash(self, chain_head_hash: Hash32) -> List[BaseBlock]:
raise NotImplementedError("Chain classes must implement this method")
@abstractmethod
def get_blocks_on_chain_up_to_block_hash(self, chain_head_hash: Hash32, start_block_number: int = 0, limit: int = float('inf')) -> List[BaseBlock]:
raise NotImplementedError("Chain classes must implement this method")
@abstractmethod
def get_block(self) -> BaseBlock:
raise NotImplementedError("Chain classes must implement this method")
# @abstractmethod
# def get_canonical_block_by_number(self, block_number: BlockNumber) -> BaseBlock:
# raise NotImplementedError("Chain classes must implement this method")
# @abstractmethod
# def get_canonical_block_hash(self, block_number):
# raise NotImplementedError("Chain classes must implement this method")
@abstractmethod
def get_all_chronological_blocks_for_window(self, window_timestamp: Timestamp) -> List[BaseBlock]:
raise NotImplementedError("Chain classes must implement this method")
@abstractmethod
def import_current_queue_block(self) -> BaseBlock:
raise NotImplementedError("Chain classes must implement this method")
@abstractmethod
def import_current_queue_block_with_reward(self, node_staking_score_list: List[NodeStakingScore]) -> BaseBlock:
raise NotImplementedError("Chain classes must implement this method")
@abstractmethod
def purge_block_and_all_children_and_set_parent_as_chain_head_by_hash(self, block_hash_to_delete: Hash32) -> None:
raise NotImplementedError("Chain classes must implement this method")
@abstractmethod
def purge_block_and_all_children_and_set_parent_as_chain_head(self, existing_block_header: BlockHeader):
raise NotImplementedError("Chain classes must implement this method")
#
# Chronologically consistent blockchain db API
#
@abstractmethod
def check_block_chronological_consistency(self, block: BaseBlock) -> List[Hash32]:
raise NotImplementedError("Chain classes must implement this method")
#
# Transaction API
#
@abstractmethod
def get_transaction_by_block_hash_and_index(self, block_hash: Hash32, transaction_index: int) -> Union[BaseTransaction, BaseReceiveTransaction]:
raise NotImplementedError("Chain classes must implement this method")
@abstractmethod
def create_transaction(self, *args: Any, **kwargs: Any) -> BaseTransaction:
raise NotImplementedError("Chain classes must implement this method")
@abstractmethod
def get_canonical_transaction(self, transaction_hash: Hash32) -> BaseTransaction:
raise NotImplementedError("Chain classes must implement this method")
@abstractmethod
def populate_queue_block_with_receive_tx(self) -> List[BaseReceiveTransaction]:
raise NotImplementedError("Chain classes must implement this method")
@abstractmethod
def get_block_receive_transactions_by_hash(
self,
block_hash: Hash32) -> List['BaseReceiveTransaction']:
raise NotImplementedError("Chain classes must implement this method")
@abstractmethod
def get_receive_tx_from_send_tx(self, tx_hash: Hash32) -> Optional['BaseReceiveTransaction']:
raise NotImplementedError("Chain classes must implement this method")
@abstractmethod
def create_receivable_transactions(self) -> List[BaseReceiveTransaction]:
raise NotImplementedError("Chain classes must implement this method")
@abstractmethod
def get_receivable_transactions(self, address: Address) -> Tuple[List[BaseReceiveTransaction], List[TransactionKey]]:
raise NotImplementedError("Chain classes must implement this method")
@abstractmethod
def get_current_queue_block_nonce(self) -> int:
raise NotImplementedError("Chain classes must implement this method")
@abstractmethod
def create_and_sign_transaction_for_queue_block(self, *args: Any, **kwargs: Any) -> BaseTransaction:
raise NotImplementedError("Chain classes must implement this method")
@abstractmethod
def create_and_sign_transaction(self, *args: Any, **kwargs: Any) -> BaseTransaction:
raise NotImplementedError("Chain classes must implement this method")
#
# Chronological Chain API
#
@abstractmethod
def try_to_rebuild_chronological_chain_from_historical_root_hashes(self, historical_root_hash_timestamp: Timestamp) -> None:
raise NotImplementedError("Chain classes must implement this method")
@abstractmethod
def get_block_hashes_that_are_new_for_this_historical_root_hash_timestamp(self, historical_root_hash_timestamp: Timestamp) -> List[Tuple[Timestamp, Hash32]]:
raise NotImplementedError("Chain classes must implement this method")
@abstractmethod
def initialize_historical_root_hashes_and_chronological_blocks(self) -> None:
raise NotImplementedError("Chain classes must implement this method")
#
# Execution API
#
# @abstractmethod
# def apply_transaction(self, transaction):
# raise NotImplementedError("Chain classes must implement this method")
@abstractmethod
def estimate_gas(self, transaction: BaseTransaction, at_header: BlockHeader=None) -> int:
raise NotImplementedError("Chain classes must implement this method")
@abstractmethod
def import_block(self, block: BaseBlock, perform_validation: bool=True) -> BaseBlock:
raise NotImplementedError("Chain classes must implement this method")
@abstractmethod
def import_chain(self, block_list: List[BaseBlock], perform_validation: bool=True, save_block_head_hash_timestamp: bool = True, allow_replacement: bool = True) -> None:
raise NotImplementedError("Chain classes must implement this method")
@abstractmethod
def import_chronological_block_window(self, block_list: List[BaseBlock], window_start_timestamp: Timestamp,
save_block_head_hash_timestamp: bool = True,
allow_unprocessed: bool = False) -> None:
raise NotImplementedError("Chain classes must implement this method")
#
# Validation API
#
@abstractmethod
def get_allowed_time_of_next_block(self, chain_address: Address = None) -> Timestamp:
raise NotImplementedError("Chain classes must implement this method")
@abstractmethod
def validate_block(self, block: BaseBlock) -> None:
raise NotImplementedError("Chain classes must implement this method")
@abstractmethod
def validate_gaslimit(self, header: BlockHeader) -> None:
raise NotImplementedError("Chain classes must implement this method")
@abstractmethod
def validate_block_specification(self, block) -> bool:
raise NotImplementedError("Chain classes must implement this method")
#
# Stake API
#
@abstractmethod
def get_mature_stake(self, wallet_address: Address = None, raise_canonical_head_not_found_error:bool = False) -> int:
raise NotImplementedError("Chain classes must implement this method")
@abstractmethod
def get_mature_stake_for_chronological_block_window(self, chronological_block_window_timestamp, timestamp_for_stake):
raise NotImplementedError("Chain classes must implement this method")
@abstractmethod
def get_new_block_hash_to_test_peer_node_health(self) -> Hash32:
raise NotImplementedError("Chain classes must implement this method")
#
# Min Block Gas API used for throttling the network
#
@abstractmethod
def re_initialize_historical_minimum_gas_price_at_genesis(self) -> None:
raise NotImplementedError("Chain classes must implement this method")
@abstractmethod
def update_current_network_tpc_capability(self, current_network_tpc_cap: int,
update_min_gas_price: bool = True) -> None:
raise NotImplementedError("Chain classes must implement this method")
@abstractmethod
def get_local_tpc_cap(self) -> int:
raise NotImplementedError("Chain classes must implement this method")
#
# Consensus db passthrough with correct db corresponding to timestamp
#
@abstractmethod
def get_signed_peer_score(self, private_key: PrivateKey,
network_id: int,
peer_wallet_address: Address,
after_block_number: BlockNumber = None,
) -> NodeStakingScore:
raise NotImplementedError("Chain classes must implement this method")
@abstractmethod
def get_signed_peer_score_string_private_key(self,
private_key_string: bytes,
peer_wallet_address: Address,
after_block_number: BlockNumber = None,
) -> NodeStakingScore:
raise NotImplementedError("Chain classes must implement this method")
@abstractmethod
def validate_node_staking_score(self,
node_staking_score: NodeStakingScore,
since_block_number: BlockNumber) -> None:
raise NotImplementedError("Chain classes must implement this method")
@abstractmethod
def save_health_request(self, peer_wallet_address: Address, response_time_in_micros: int = float('inf')) -> None:
raise NotImplementedError("Chain classes must implement this method")
@abstractmethod
def get_current_peer_node_health(self,peer_wallet_address: Address) -> PeerNodeHealth:
raise NotImplementedError("Chain classes must implement this method")
class Chain(BaseChain):
"""
A Chain is a combination of one or more VM classes. Each VM is associated
with a range of blocks. The Chain class acts as a wrapper around these other
VM classes, delegating operations to the appropriate VM depending on the
current block number.
"""
raise_errors = False
logger = logging.getLogger("hvm.chain.chain.Chain")
header = None # type: BlockHeader
network_id = None # type: int
gas_estimator = None # type: Callable
_journaldb = None
num_journal_records_for_block_import = 0
chaindb_class = ChainDB # type: Type[BaseChainDB]
chain_head_db_class = ChainHeadDB
_queue_block: BaseQueueBlock = None
def __init__(self, base_db: BaseDB, wallet_address: Address, private_key: BaseKey=None) -> None:
if not self.vm_configuration:
raise ValueError(
"The Chain class cannot be instantiated with an empty `vm_configuration`"
)
else:
validate_vm_configuration(self.vm_configuration)
validate_canonical_address(wallet_address, "Wallet Address")
self.db = base_db
self.private_key = private_key
self.wallet_address = wallet_address
self.chaindb = self.get_chaindb_class()(self.db)
self.chain_head_db = self.get_chain_head_db_class().load_from_saved_root_hash(self.db)
try:
self.header = self.create_header_from_parent(self.get_canonical_head())
except CanonicalHeadNotFound:
#this is a new block, lets make a genesis block
# self.logger.debug("Creating new genesis block on chain {}".format(self.wallet_address))
self.header = self.get_vm_class_for_block_timestamp().create_genesis_block(self.wallet_address).header
if self.gas_estimator is None:
self.gas_estimator = get_gas_estimator() # type: ignore
def reinitialize(self):
self.__init__(self.db, self.wallet_address, self.private_key)
def set_new_wallet_address(self, wallet_address: Address, private_key: BaseKey=None):
self.logger.debug('setting new wallet address')
self.wallet_address = wallet_address
self.private_key = private_key
self.reinitialize()
@property
def queue_block(self):
if self._queue_block is None:
self._queue_block = self.get_queue_block()
return self._queue_block
@queue_block.setter
def queue_block(self,val:BaseQueueBlock):
self._queue_block = val
@property
def min_time_between_blocks(self):
vm = self.get_vm(timestamp=Timestamp(int(time.time())))
min_allowed_time_between_blocks = vm.min_time_between_blocks
return min_allowed_time_between_blocks
# @property
# def consensus_db(self, header: BlockHeader = None, timestamp: Timestamp = None):
# # gets the consensus db corresponding to the block timestamp
#
# return self.get_vm(header, timestamp).consensus_db
def get_consensus_db(self, header: BlockHeader = None, timestamp: Timestamp = None) -> ConsensusDB:
# gets the consensus db corresponding to the block timestamp
return self.get_vm(header, timestamp).consensus_db
#
# Global Record and discard API
#
def enable_read_only_db(self) -> None:
if not isinstance(self.db, ReadOnlyDB):
self.base_db = self.db
self.db = ReadOnlyDB(self.base_db)
self.reinitialize()
def enable_journal_db(self):
if self._journaldb is None:
self.base_db = self.db
self._journaldb = JournalDB(self.base_db)
#we keep the name self.db so that all of the functions still work, but at this point it is a journaldb.
self.db = self._journaldb
#reinitialize to ensure chain and chain_head_db have the new journaldb
self.reinitialize()
def disable_journal_db(self):
if self._journaldb is not None:
self.db = self.base_db
self._journaldb = None
#reinitialize to ensure chain and chain_head_db have the new journaldb
self.reinitialize()
def record_journal(self) -> UUID:
if self._journaldb is not None:
return (self._journaldb.record())
else:
raise JournalDbNotActivated()
def discard_journal(self, changeset: UUID) -> None:
if self._journaldb is not None:
db_changeset = changeset
self._journaldb.discard(db_changeset)
else:
raise JournalDbNotActivated()
def commit_journal(self, changeset: UUID) -> None:
if self._journaldb is not None:
db_changeset = changeset
self._journaldb.commit(db_changeset)
else:
raise JournalDbNotActivated()
def persist_journal(self) -> None:
if self._journaldb is not None:
self._journaldb.persist()
else:
raise JournalDbNotActivated()
#
# Helpers
#
@classmethod
def get_chaindb_class(cls) -> Type[BaseChainDB]:
if cls.chaindb_class is None:
raise AttributeError("`chaindb_class` not set")
return cls.chaindb_class
@classmethod
def get_chain_head_db_class(cls) -> Type[ChainHeadDB]:
if cls.chain_head_db_class is None:
raise AttributeError("`chain_head_db class` not set")
return cls.chain_head_db_class
@classmethod
def get_genesis_wallet_address(cls) -> Address:
if cls.genesis_wallet_address is None:
raise AttributeError("`genesis_wallet_address` not set")
return cls.genesis_wallet_address
#
# Chain API
#
@classmethod
def create_genesis_header(cls,
base_db: BaseDB,
wallet_address: Address,
private_key: BaseKey,
genesis_params: Dict[str, HeaderParams],
genesis_state: AccountState=None,
) -> 'BaseChain':
genesis_vm_class = cls.get_vm_class_for_block_timestamp()
account_db = genesis_vm_class.get_state_class().get_account_db_class()(base_db)
if genesis_state is None:
genesis_state = {}
# mutation
account_db = apply_state_dict(account_db, genesis_state)
account_db.persist(save_account_hash = True, wallet_address = wallet_address)
genesis_params['account_hash'] = account_db.get_account_hash(wallet_address)
genesis_header = BlockHeader(**genesis_params)
signed_genesis_header = genesis_header.get_signed(private_key, cls.network_id)
chaindb = cls.get_chaindb_class()(base_db)
chaindb.persist_header(signed_genesis_header)
return signed_genesis_header
@classmethod
def from_genesis(cls,
base_db: BaseDB,
wallet_address: Address,
genesis_params: Dict[str, HeaderParams],
genesis_state: AccountState,
private_key: BaseKey = None
) -> 'BaseChain':
"""
Initializes the Chain from a genesis state.
"""
genesis_vm_class = cls.get_vm_class_for_block_timestamp()
account_db = genesis_vm_class.get_state_class().get_account_db_class()(
base_db
)
if genesis_state is None:
genesis_state = {}
# mutation
account_db = apply_state_dict(account_db, genesis_state)
account_db.persist(save_account_hash = True, wallet_address = cls.genesis_wallet_address)
genesis_header = BlockHeader(**genesis_params)
return cls.from_genesis_header(base_db, wallet_address = wallet_address, private_key = private_key, genesis_header = genesis_header)
@classmethod
def from_genesis_header(cls,
base_db: BaseDB,
wallet_address: Address,
genesis_header: BlockHeader,
private_key: BaseKey,
) -> 'BaseChain':
"""
Initializes the chain from the genesis header.
"""
chaindb = cls.get_chaindb_class()(base_db)
chaindb.persist_header(genesis_header)
chain_head_db = cls.get_chain_head_db_class()(base_db)
#window_for_this_block = math.ceil((genesis_header.timestamp+1)/TIME_BETWEEN_HEAD_HASH_SAVE) * TIME_BETWEEN_HEAD_HASH_SAVE
window_for_this_block = int(genesis_header.timestamp / TIME_BETWEEN_HEAD_HASH_SAVE) * TIME_BETWEEN_HEAD_HASH_SAVE + TIME_BETWEEN_HEAD_HASH_SAVE
chain_head_db.set_chain_head_hash(cls.genesis_wallet_address, genesis_header.hash)
chain_head_db.initialize_historical_root_hashes(chain_head_db.root_hash, window_for_this_block)
chain_head_db.persist(save_current_root_hash = True)
#chain_head_db.add_block_hash_to_chronological_window(genesis_header.hash, genesis_header.timestamp)
return cls(base_db, wallet_address = wallet_address, private_key=private_key)
def get_chain_at_block_parent(self, block: BaseBlock) -> BaseChain:
"""
Returns a `Chain` instance with the given block's parent at the chain head.
"""
try:
parent_header = self.get_block_header_by_hash(block.header.parent_hash)
except HeaderNotFound:
raise ValidationError("Parent ({0}) of block {1} not found".format(
block.header.parent_hash,
block.header.hash
))
init_header = self.create_header_from_parent(parent_header)
return type(self)(self.chaindb.db, self.wallet_address, self.private_key, init_header)
#
# VM API
#
def get_vm(self, header: BlockHeader=None, timestamp: Timestamp = None) -> 'BaseVM':
"""
Returns the VM instance for the given block timestamp. Or if timestamp is given, gets the vm for that timestamp
"""
if header is not None and timestamp is not None:
raise ValueError("Cannot specify header and timestamp for get_vm(). Only one is allowed.")
if header is None or header == self.header:
header = self.header
if timestamp is not None:
header = header.copy(timestamp = timestamp)
vm_class = self.get_vm_class_for_block_timestamp(header.timestamp)
return vm_class(header=header,
chaindb=self.chaindb,
network_id=self.network_id)
else:
vm_class = self.get_vm_class_for_block_timestamp(header.timestamp)
return vm_class(header=header,
chaindb=self.chaindb,
network_id=self.network_id)
#
# Header API
#
def create_header_from_parent(self, parent_header, **header_params):
"""
Passthrough helper to the VM class of the block descending from the
given header.
"""
return self.get_vm_class_for_block_timestamp().create_header_from_parent(parent_header, **header_params)
def get_block_header_by_hash(self, block_hash: Hash32) -> BlockHeader:
"""
Returns the requested block header as specified by block hash.
Raises BlockNotFound if there's no block header with the given hash in the db.
"""
validate_word(block_hash, title="Block Hash")
return self.chaindb.get_block_header_by_hash(block_hash)
def get_canonical_head(self, chain_address = None):
"""
Returns the block header at the canonical chain head.
Raises CanonicalHeadNotFound if there's no head defined for the canonical chain.
"""
if chain_address is not None:
return self.chaindb.get_canonical_head(chain_address)
else:
return self.chaindb.get_canonical_head(self.wallet_address)
#
# Block API
#
def get_genesis_block_hash(self) -> Hash32:
return self.chaindb.get_canonical_block_hash(block_number = BlockNumber(0),
chain_address= self.genesis_wallet_address)
@to_tuple
def get_ancestors(self, limit: int, header: BlockHeader=None) -> Iterator[BaseBlock]:
"""
Return `limit` number of ancestor blocks from the current canonical head.
"""
if header is None:
header = self.header
lower_limit = max(header.block_number - limit, 0)
for n in reversed(range(lower_limit, header.block_number)):
yield self.get_block_by_number(BlockNumber(n), header.chain_address)
def get_block_by_hash(self, block_hash: Hash32) -> BaseBlock:
block_header = self.get_block_header_by_hash(block_hash)
return self.get_block_by_header(block_header)
def get_block_by_header(self, block_header: BlockHeader) -> BaseBlock:
"""
Returns the requested block as specified by the block header.
"""
block_class = self.get_vm_class_for_block_timestamp(block_header.timestamp).get_block_class()
send_transactions = self.chaindb.get_block_transactions(block_header, block_class.transaction_class)
receive_transactions = self.chaindb.get_block_receive_transactions(block_header,block_class.receive_transaction_class)
reward_bundle = self.chaindb.get_reward_bundle(block_header.reward_hash, block_class.reward_bundle_class)
output_block = block_class(block_header, send_transactions, receive_transactions, reward_bundle)
return output_block
def get_block_by_number(self, block_number: BlockNumber, chain_address: Address = None) -> BaseBlock:
if chain_address is None:
chain_address = self.wallet_address
block_hash = self.chaindb.get_canonical_block_hash(block_number, chain_address)
return self.get_block_by_hash(block_hash)
def get_blocks_on_chain(self, start: int, end: int, chain_address: Address = None) -> List[BaseBlock]:
if chain_address is None:
chain_address = self.wallet_address
if end == 0:
canonical_head_header = self.get_canonical_head(chain_address=chain_address)
head_block_number = canonical_head_header.block_number
end = head_block_number + 1
blocks = []
for block_number in range(start, end):
try:
new_block = self.get_block_by_number(BlockNumber(block_number), chain_address)
blocks.append(new_block)
except HeaderNotFound:
break
return blocks
def get_all_blocks_on_chain(self, chain_address: Address = None) -> List[BaseBlock]:
if chain_address is None:
chain_address = self.wallet_address
canonical_head_header = self.get_canonical_head(chain_address=chain_address)
head_block_number = canonical_head_header.block_number
return self.get_blocks_on_chain(0, head_block_number + 1, chain_address=chain_address)
def get_all_blocks_on_chain_by_head_block_hash(self, chain_head_hash: Hash32) -> List[BaseBlock]:
chain_head_header = self.get_block_header_by_hash(chain_head_hash)
chain_address = chain_head_header.chain_address
return self.get_all_blocks_on_chain(chain_address)
def get_blocks_on_chain_up_to_block_hash(self, chain_head_hash: Hash32, start_block_number: int = 0, limit: int = float('inf')) -> List[BaseBlock]:
chain_head_header = self.get_block_header_by_hash(chain_head_hash)
to_block_number = chain_head_header.block_number
if to_block_number > (start_block_number + limit):
to_block_number = (start_block_number + limit)
chain_address = chain_head_header.chain_address
return self.get_blocks_on_chain(start_block_number, to_block_number + 1, chain_address)
def get_block(self) -> BaseBlock:
"""
Returns the current TIP block.
"""
return self.get_vm().block
def get_queue_block(self) -> BaseBlock:
"""
Returns the current TIP block.
"""
return self.get_vm().queue_block
# def get_block_by_hash(self, block_hash: Hash32) -> BaseBlock:
# """
# Returns the requested block as specified by block hash.
# """
# validate_word(block_hash, title="Block Hash")
# block_header = self.get_block_header_by_hash(block_hash)
# return self.get_block_by_header(block_header)
# def get_canonical_block_by_number(self, block_number: BlockNumber) -> BaseBlock:
# """
# Returns the block with the given number in the canonical chain.
#
# Raises BlockNotFound if there's no block with the given number in the
# canonical chain.
# """
# validate_uint256(block_number, title="Block Number")
# return self.get_block_by_hash(self.chaindb.get_canonical_block_hash(block_number))
#
# def get_canonical_block_hash(self, block_number: BlockNumber) -> Hash32:
# """
# Returns the block hash with the given number in the canonical chain.
#
# Raises BlockNotFound if there's no block with the given number in the
# canonical chain.
# """
# return self.chaindb.get_canonical_block_hash(block_number)
#
# Blockchain Database API
#
def save_chain_head_hash_to_trie_for_time_period(self,block_header):
timestamp = block_header.timestamp
currently_saving_window = int(time.time()/TIME_BETWEEN_HEAD_HASH_SAVE) * TIME_BETWEEN_HEAD_HASH_SAVE +TIME_BETWEEN_HEAD_HASH_SAVE
if timestamp <= currently_saving_window:
#we have to go back and put it into the correct window, and update all windows after that
#lets only keep the past NUMBER_OF_HEAD_HASH_TO_SAVE block_head_root_hash
window_for_this_block = int(timestamp / TIME_BETWEEN_HEAD_HASH_SAVE) * TIME_BETWEEN_HEAD_HASH_SAVE + TIME_BETWEEN_HEAD_HASH_SAVE
#window_for_this_block = math.ceil((timestamp + 1)/TIME_BETWEEN_HEAD_HASH_SAVE) * TIME_BETWEEN_HEAD_HASH_SAVE
# if propogate_to_present:
self.chain_head_db.add_block_hash_to_timestamp(block_header.chain_address, block_header.hash, window_for_this_block)
# else:
# self.chain_head_db.add_block_hash_to_timestamp_without_propogating_to_present(self.wallet_address, block_header.hash, window_for_this_block)
#
# Queueblock API
#
def add_transaction_to_queue_block(self, transaction) -> None:
validate_is_queue_block(self.queue_block, title='self.queue_block')
if isinstance(transaction, BaseTransaction):
if not self.queue_block.contains_transaction(transaction):
self.queue_block = self.queue_block.add_transaction(transaction)
else:
self.logger.debug("found transaction in queueblock already, not adding again")
else:
if not self.queue_block.contains_receive_transaction(transaction):
self.queue_block = self.queue_block.add_receive_transaction(transaction)
else:
self.logger.debug("found receive transaction in queueblock already, not adding again")
def add_transactions_to_queue_block(self, transactions) -> None:
if not isinstance(transactions, list):
self.add_transaction_to_queue_block(transactions)
#self.logger.debug("tx_nonce after adding transaction = {}".format(self.queue_block.current_tx_nonce))
else:
for tx in transactions:
self.add_transaction_to_queue_block(tx)
def sign_queue_block(self, *args: Any, **kwargs: Any) -> BaseQueueBlock:
"""
Passthrough helper to the current VM class.
"""
return self.get_vm().sign_queue_block(*args, **kwargs)
def sign_header(self, *args: Any, **kwargs: Any) -> BlockHeader:
"""
Passthrough helper to the current VM class.
"""
return self.get_vm().sign_header(*args, **kwargs)
#
# Transaction API
#
def get_canonical_transaction(self, transaction_hash: Hash32) -> BaseTransaction:
"""
Returns the requested transaction as specified by the transaction hash
from the canonical chain.
Raises TransactionNotFound if no transaction with the specified hash is
found in the main chain.
"""
(block_hash, index, is_receive) = self.chaindb.get_transaction_index(transaction_hash)
block_header = self.get_block_header_by_hash(block_hash)
VM = self.get_vm_class_for_block_timestamp(block_header.timestamp)
if is_receive == False:
transaction = self.chaindb.get_transaction_by_index_and_block_hash(
block_hash,
index,
VM.get_transaction_class(),
)
else:
transaction = self.chaindb.get_receive_transaction_by_index_and_block_hash(
block_hash,
index,
VM.get_receive_transaction_class(),
)
if transaction.hash == transaction_hash:
return transaction
else:
raise TransactionNotFound("Found transaction {} instead of {} in block {} at {}".format(
encode_hex(transaction.hash),
encode_hex(transaction_hash),
block_hash,
index,
))
@functools.lru_cache(maxsize=32)
def get_transaction_by_block_hash_and_index(self, block_hash: Hash32, transaction_index: int) -> Union[BaseTransaction, BaseReceiveTransaction]:
num_send_transactions = self.chaindb.get_number_of_send_tx_in_block(block_hash)
header = self.chaindb.get_block_header_by_hash(block_hash)
vm = self.get_vm(header=header)
if transaction_index >= num_send_transactions:
# receive transaction
transaction_index = transaction_index - num_send_transactions
tx = self.chaindb.get_receive_transaction_by_index_and_block_hash(block_hash=block_hash,
transaction_index=transaction_index,
transaction_class=vm.get_receive_transaction_class())
else:
# send transaction
tx = self.chaindb.get_transaction_by_index_and_block_hash(block_hash=block_hash,
transaction_index=transaction_index,
transaction_class=vm.get_transaction_class())
return tx
def create_transaction(self, *args: Any, **kwargs: Any) -> BaseTransaction:
"""
Passthrough helper to the current VM class.
"""
return self.get_vm().create_transaction(*args, **kwargs)
def create_and_sign_transaction(self, *args: Any, **kwargs: Any) -> BaseTransaction:
if self.private_key is None:
raise ValueError("Cannot sign transaction because private key not provided for chain instantiation")
transaction = self.create_transaction(*args, **kwargs)
signed_transaction = transaction.get_signed(self.private_key, self.network_id)
return signed_transaction
def create_and_sign_transaction_for_queue_block(self, *args: Any, **kwargs: Any) -> BaseTransaction:
if 'nonce' not in kwargs or kwargs['nonce'] is None:
kwargs['nonce'] = self.get_current_queue_block_nonce()
transaction = self.create_and_sign_transaction(*args, **kwargs)
self.add_transactions_to_queue_block(transaction)
return transaction
def get_current_queue_block_nonce(self) -> int:
if self.queue_block is None or self.queue_block.current_tx_nonce is None:
tx_nonce = self.get_vm().state.account_db.get_nonce(self.wallet_address)
else:
tx_nonce =self.queue_block.current_tx_nonce
return tx_nonce
def create_receive_transaction(self, *args: Any, **kwargs: Any) -> BaseReceiveTransaction:
"""
Passthrough helper to the current VM class.
"""
return self.get_vm().create_receive_transaction(*args, **kwargs)
def get_receivable_transactions(self, address: Address) -> Tuple[List[BaseReceiveTransaction], List[TransactionKey]]:
#from hvm.rlp_templates.accounts import TransactionKey
tx_keys = self.get_vm().state.account_db.get_receivable_transactions(address)
if len(tx_keys) == 0:
return [], []
transactions = []
for tx_key in tx_keys:
tx = self.get_canonical_transaction(tx_key.transaction_hash)
transactions.append(tx)
return transactions, tx_keys
def create_receivable_transactions(self) -> List[BaseReceiveTransaction]:
tx_keys = self.get_vm().state.account_db.get_receivable_transactions(self.wallet_address)
if len(tx_keys) == 0:
return []
receive_transactions = []
for tx_key in tx_keys:
#find out if it is a receive or a refund
block_hash, index, is_receive = self.chaindb.get_transaction_index(tx_key.transaction_hash)
re_tx = self.get_vm().create_receive_transaction(
sender_block_hash = tx_key.sender_block_hash,
send_transaction_hash=tx_key.transaction_hash,
is_refund=is_receive,
)
receive_transactions.append(re_tx)
return receive_transactions
def populate_queue_block_with_receive_tx(self) -> List[BaseReceiveTransaction]:
receive_tx = self.create_receivable_transactions()
self.add_transactions_to_queue_block(receive_tx)
return receive_tx
def get_block_receive_transactions_by_hash(
self,
block_hash: Hash32) -> List['BaseReceiveTransaction']:
block_header = self.get_block_header_by_hash(block_hash)
vm = self.get_vm(header = block_header)
receive_transaction_class = vm.get_block_class().receive_transaction_class
receive_transactions = self.chaindb.get_block_receive_transactions(header = block_header, transaction_class = receive_transaction_class)
return receive_transactions
def get_receive_tx_from_send_tx(self, tx_hash: Hash32) -> Optional['BaseReceiveTransaction']:
block_hash, index, is_receive = self.chaindb.get_transaction_index(tx_hash)
if is_receive:
raise ValidationError("The provided tx hash is not for a send transaction")
send_transaction = self.get_canonical_transaction(tx_hash)
block_children = self.chaindb.get_block_children(block_hash)
if block_children is not None:
block_children_on_correct_chain = [child_hash for child_hash in block_children
if self.chaindb.get_chain_wallet_address_for_block_hash(child_hash) == send_transaction.to]
for block_hash in block_children_on_correct_chain:
receive_transactions = self.get_block_receive_transactions_by_hash(block_hash)
for receive_tx in receive_transactions:
if receive_tx.send_transaction_hash == tx_hash:
return receive_tx
return None
def get_transaction_by_index_and_block_hash(self, block_hash: Hash32, transaction_index: int) -> Union[BaseTransaction, BaseReceiveTransaction]:
header = self.chaindb.get_block_header_by_hash(block_hash)
vm = self.get_vm(header=header)
self.chaindb.get_transaction_by_index_and_block_hash()
self.chaindb.get_transaction_by_index_and_block_hash(
block_hash,
transaction_index,
vm.get_transaction_class(),
)
#
# Chronological Chain api
#
def try_to_rebuild_chronological_chain_from_historical_root_hashes(self, historical_root_hash_timestamp: Timestamp) -> None:
try:
correct_chronological_block_window = self.get_block_hashes_that_are_new_for_this_historical_root_hash_timestamp(historical_root_hash_timestamp)
self.chain_head_db.save_chronological_block_window(correct_chronological_block_window, historical_root_hash_timestamp-TIME_BETWEEN_HEAD_HASH_SAVE)
except InvalidHeadRootTimestamp:
pass
def get_block_hashes_that_are_new_for_this_historical_root_hash_timestamp(self, historical_root_hash_timestamp: Timestamp) -> List[Tuple[Timestamp, Hash32]]:
'''
This is a time consuming function that gets all of the blocks that are new in this root hash that didn't exist in the base root hash.
:param timestamp:
:return:
'''
block_window_start = historical_root_hash_timestamp - TIME_BETWEEN_HEAD_HASH_SAVE
base_root_hash = self.chain_head_db.get_historical_root_hash(block_window_start)
new_root_hash = self.chain_head_db.get_historical_root_hash(historical_root_hash_timestamp)
if base_root_hash == new_root_hash:
return None
if base_root_hash is None or new_root_hash is None:
raise InvalidHeadRootTimestamp(
"Could not load block hashes for this historical_root_hash_timestamp because we don't have a root hash for this window or the previous window.")
base_head_block_hashes = set(self.chain_head_db.get_head_block_hashes(base_root_hash))
new_head_block_hashes = set(self.chain_head_db.get_head_block_hashes(new_root_hash))
diff_head_block_hashes = new_head_block_hashes - base_head_block_hashes
chronological_block_hash_timestamps = []
# now we have to run down each chain until we get to a block that is older than block_window_start
for head_block_hash in diff_head_block_hashes:
header = self.chaindb.get_block_header_by_hash(head_block_hash)
chronological_block_hash_timestamps.append([header.timestamp, head_block_hash])
while True:
if header.parent_hash == GENESIS_PARENT_HASH:
break
try:
header = self.chaindb.get_block_header_by_hash(header.parent_hash)
except HeaderNotFound:
break
if header.timestamp < block_window_start:
break
chronological_block_hash_timestamps.append([header.timestamp, header.hash])
assert len(chronological_block_hash_timestamps) > 0
chronological_block_hash_timestamps.sort()
return chronological_block_hash_timestamps
# def initialize_historical_root_hashes_and_chronological_blocks(self) -> None:
# '''
# This function rebuilds all historical root hashes, and chronological blocks, from the blockchain database. It starts with the saved root hash and works backwards.
# This function needs to be run from chain because it requires chain_head_db and chaindb.
# :return:
# '''
#
# self.chain_head_db.load_saved_root_hash()
# current_window = self.chain_head_db.current_window
# earliest_root_hash = self.chain_head_db.earliest_window
# #TIME_BETWEEN_HEAD_HASH_SAVE
#
# # 1) iterate down the root hash times
# # 2) create new chain_head_db with memorydb
# # 3) go through each chain and any blocks newer than the timestamp, save to chronological window.
# # 4) when you reach a block less than the timestamp, set it as chain head in the new memory based chain_head_db
# # 5) get the root hash
# # 6) set this root hash in the real chain_head_db at the correct timestamp.
#
# # A chronological block window holds all of the blocks starting at its timestamp, going to timestamp + TIME_BETWEEN_HEAD_HASH_SAVE
# # A historical root hash is the root hash at the given timestamp, so it includes all blocks earlier than that timestamp.
#
# # us a journaldb so that it doesnt write changes to the database.
# temp_chain_head_db = self.get_chain_head_db_class()(MemoryDB())
# #temp_chain_head_db = self.get_chain_head_db_class().load_from_saved_root_hash(JournalDB(self.db))
# for current_timestamp in range(current_window, earliest_root_hash-TIME_BETWEEN_HEAD_HASH_SAVE, -TIME_BETWEEN_HEAD_HASH_SAVE):
# self.logger.debug("Rebuilding chronological block window {}".format(current_timestamp))
# if current_timestamp < self.genesis_block_timestamp:
# break
#
# if current_timestamp == current_window:
# head_block_hashes = self.chain_head_db.get_head_block_hashes_list()
# else:
# head_block_hashes = temp_chain_head_db.get_head_block_hashes_list()
#
# # iterate over all chains
# for head_block_hash in head_block_hashes:
# current_block_hash = head_block_hash
# # now iterate over blocks in chain
# while True:
# current_header = self.chaindb.get_block_header_by_hash(current_block_hash)
# if current_header.timestamp >= current_timestamp:
# # add it to chronological block window in the real chain head db
# self.chain_head_db.add_block_hash_to_chronological_window(current_header.hash, current_header.timestamp)
# else:
# # The block is older than the timestamp. Set it as the chain head block hash in our temp chain head db
# temp_chain_head_db.set_chain_head_hash(current_header.chain_address, current_header.hash)
# break
# if current_header.parent_hash == GENESIS_PARENT_HASH:
# # we reached the end of the chain
# temp_chain_head_db.delete_chain_head_hash(current_header.chain_address)
# break
# # set the current block to the parent so we move down the chain
# current_block_hash = current_header.parent_hash
#
# # Now that we have gone through all chains, and removed any blocks newer than this timestamp, the root hash in the
# # temp chain head db is the correct one for this historical root hash timestamp.
# self.chain_head_db.save_single_historical_root_hash(temp_chain_head_db.root_hash, Timestamp(current_timestamp))
def initialize_historical_root_hashes_and_chronological_blocks(self) -> None:
'''
This function rebuilds all historical root hashes, and chronological blocks, from the blockchain database. It starts with the saved root hash and works backwards.
This function needs to be run from chain because it requires chain_head_db and chaindb.
:return:
'''
self.chain_head_db.load_saved_root_hash()
current_window = self.chain_head_db.current_window
earliest_root_hash = self.chain_head_db.earliest_window
#TIME_BETWEEN_HEAD_HASH_SAVE
# the saved
# 1) iterate down the root hash times
# 2) create new chain_head_db with memorydb
# 3) go through each chain and any blocks newer than the timestamp, save to chronological window.
# 4) when you reach a block less than the timestamp, set it as chain head in the new memory based chain_head_db
# 5) get the root hash
# 6) set this root hash in the real chain_head_db at the correct timestamp.
# A chronological block window holds all of the blocks starting at its timestamp, going to timestamp + TIME_BETWEEN_HEAD_HASH_SAVE
# A historical root hash is the root hash at the given timestamp, so it includes all blocks earlier than that timestamp.
self.logger.debug("Rebuilding chronological block windows")
# us a journaldb so that it doesnt write changes to the database.
temp_chain_head_db = self.get_chain_head_db_class()(MemoryDB())
#temp_chain_head_db = self.get_chain_head_db_class().load_from_saved_root_hash(JournalDB(self.db))
for current_timestamp in range(current_window, earliest_root_hash-TIME_BETWEEN_HEAD_HASH_SAVE, -TIME_BETWEEN_HEAD_HASH_SAVE):
if current_timestamp < self.genesis_block_timestamp:
break
head_block_hashes = self.chain_head_db.get_head_block_hashes_list()
# iterate over all chains
for head_block_hash in head_block_hashes:
current_block_hash = head_block_hash
# now iterate over blocks in chain
while True:
current_header = self.chaindb.get_block_header_by_hash(current_block_hash)
if current_header.timestamp >= current_timestamp:
# add it to chronological block window in the real chain head db
self.chain_head_db.add_block_hash_to_chronological_window(current_header.hash, current_header.timestamp)
else:
# The block is older than the timestamp. Set it as the chain head block hash in our temp chain head db
self.chain_head_db.set_chain_head_hash(current_header.chain_address, current_header.hash)
break
if current_header.parent_hash == GENESIS_PARENT_HASH:
# we reached the end of the chain
self.chain_head_db.delete_chain_head_hash(current_header.chain_address)
break
# set the current block to the parent so we move down the chain
current_block_hash = current_header.parent_hash
# Now that we have gone through all chains, and removed any blocks newer than this timestamp, the root hash in the
# temp chain head db is the correct one for this historical root hash timestamp.
self.chain_head_db.save_single_historical_root_hash(self.chain_head_db.root_hash, Timestamp(current_timestamp))
self.chain_head_db.persist()
# finally, lets load the saved root hash again so we are up to date.
self.chain_head_db.load_saved_root_hash()
#
# Execution API
#
def estimate_gas(self, transaction: BaseTransaction, at_header: BlockHeader=None) -> int:
"""
Returns an estimation of the amount of gas the given transaction will
use if executed on top of the block specified by the given header.
"""
if at_header is None:
at_header = self.get_canonical_head()
with self.get_vm(at_header).state_in_temp_block() as state:
return self.gas_estimator(state, transaction)
def validate_time_from_genesis_block(self,block):
if not block.is_genesis:
#first make sure enough time has passed since genesis. We need at least TIME_BETWEEN_HEAD_HASH_SAVE since genesis so that the
# genesis historical root hash only contains the genesis chain.
if block.header.timestamp < (self.genesis_block_timestamp + TIME_BETWEEN_HEAD_HASH_SAVE):
raise NotEnoughTimeBetweenBlocks("Not enough time has passed since the genesis block. Must wait at least {} seconds after genesis block. "
"This block timestamp is {}, genesis block timestamp is {}.".format(TIME_BETWEEN_HEAD_HASH_SAVE, block.header.timestamp, self.genesis_block_timestamp))
return
#
# Reverting block functions
#
def delete_canonical_chain(self, wallet_address: Address, vm: 'BaseVM', save_block_head_hash_timestamp:bool = True) -> None:
self.logger.debug("delete_canonical_chain. Chain address {}".format(encode_hex(wallet_address)))
self.chain_head_db.delete_chain(wallet_address, save_block_head_hash_timestamp)
self.chaindb.delete_canonical_chain(wallet_address)
vm.state.clear_account_keep_receivable_transactions_and_persist(wallet_address)
def set_parent_as_canonical_head(self, existing_block_header: BlockHeader, vm: 'BaseVM', save_block_head_hash_timestamp:bool = True) -> None:
block_parent_header = self.chaindb.get_block_header_by_hash(existing_block_header.parent_hash)
self.logger.debug("Setting new block as canonical head after reverting blocks. Chain address {}, header hash {}".format(encode_hex(existing_block_header.chain_address), encode_hex(block_parent_header.hash)))
if save_block_head_hash_timestamp:
self.save_chain_head_hash_to_trie_for_time_period(block_parent_header)
self.chain_head_db.set_chain_head_hash(block_parent_header.chain_address, block_parent_header.hash)
self.chaindb._set_as_canonical_chain_head(block_parent_header)
vm.state.revert_account_to_hash_keep_receivable_transactions_and_persist(block_parent_header.account_hash, block_parent_header.chain_address)
def revert_block(self, descendant_block_hash: Hash32) -> None:
self.logger.debug('Reverting block with hash {}'.format(encode_hex(descendant_block_hash)))
descendant_block_header = self.chaindb.get_block_header_by_hash(descendant_block_hash)
vm = self.get_vm(descendant_block_header)
self.chain_head_db.delete_block_hash_from_chronological_window(descendant_block_hash, descendant_block_header.timestamp)
self.chaindb.remove_block_from_all_parent_child_lookups(descendant_block_header, vm.get_block_class().receive_transaction_class)
self.chaindb.delete_all_block_children_lookups(descendant_block_hash)
self.revert_block_chronological_consistency_lookups(descendant_block_hash)
#for every one, re-add pending receive transaction for all receive transactions only if sending block still exists
#make all blocks unprocessed so that receivable transactions are not saved that came from one of the non-canonical blocks.
vm.reverse_pending_transactions(descendant_block_header)
# remove the block from the canonical chain. This must be done last because reversing the pending transactions requires that it
# is still in the canonical chain to look up transactions
self.chaindb.delete_block_from_canonical_chain(descendant_block_hash)
#self.chaindb.save_unprocessed_block_lookup(descendant_block_hash)
vm.state.account_db.persist()
def revert_block_chronological_consistency_lookups(self, block_hash: Hash32) -> None:
# check to see if there are any reward type 2 proofs. Then loop through each one to revert inconsistency lookups
block_header = self.chaindb.get_block_header_by_hash(block_hash)
block_class = self.get_vm_class_for_block_timestamp(block_header.timestamp).get_block_class()
reward_bundle = self.chaindb.get_reward_bundle(block_header.reward_hash, block_class.reward_bundle_class)
chronological_consistency_key = [block_header.timestamp, block_header.hash]
for proof in reward_bundle.reward_type_2.proof:
# timestamp, block hash of block responsible
sender_chain_header = self.chaindb.get_block_header_by_hash(proof.head_hash_of_sender_chain)
# The chronological consistency restrictions are placed on the block on top of the one giving the proof.
block_number_with_restrictions = sender_chain_header.block_number + 1
self.chaindb.delete_block_consistency_key(sender_chain_header.chain_address, block_number_with_restrictions, chronological_consistency_key)
def purge_block_and_all_children_and_set_parent_as_chain_head_by_hash(self, block_hash_to_delete: Hash32, save_block_head_hash_timestamp: bool = True) -> None:
genesis_block_hash = self.chaindb.get_canonical_block_hash(BlockNumber(0), self.genesis_wallet_address)
if block_hash_to_delete == genesis_block_hash:
raise TriedDeletingGenesisBlock("Attempted to delete genesis block. This is not allowed.")
block_header_to_delete = self.chaindb.get_block_header_by_hash(block_hash_to_delete)
self.purge_block_and_all_children_and_set_parent_as_chain_head(block_header_to_delete, save_block_head_hash_timestamp)
def purge_block_and_all_children_and_set_parent_as_chain_head(self, existing_block_header: BlockHeader, save_block_head_hash_timestamp: bool = True) -> None:
# First make sure it is actually in the canonical chain. If not, then we don't have anything to do.
if self.chaindb.is_in_canonical_chain(existing_block_header.hash):
vm = self.get_vm()
if existing_block_header.block_number == 0:
self.delete_canonical_chain(existing_block_header.chain_address, vm, save_block_head_hash_timestamp)
else:
#set the parent block as the new canonical head, and handle all the data for that
self.set_parent_as_canonical_head(existing_block_header, vm, save_block_head_hash_timestamp)
#1) delete chronological transactions, delete everything from chronological root hashes, delete children lookups
all_descendant_block_hashes = self.chaindb.get_all_descendant_block_hashes(existing_block_header.hash)
#first set all of the new chain heads and all the data that goes along with them
if all_descendant_block_hashes is not None:
for descendant_block_hash in all_descendant_block_hashes:
if not self.chaindb.is_block_unprocessed(descendant_block_hash):
descendant_block_header = self.chaindb.get_block_header_by_hash(descendant_block_hash)
if descendant_block_header.parent_hash not in all_descendant_block_hashes:
#this is the new head of a chain. set it as the new head for chronological root hashes
#except for children in this chain, because it will be off by 1 block. we already set this earlier
if descendant_block_header.chain_address != existing_block_header.chain_address:
if descendant_block_header.block_number == 0:
self.delete_canonical_chain(descendant_block_header.chain_address, vm, save_block_head_hash_timestamp)
else:
self.set_parent_as_canonical_head(descendant_block_header, vm, save_block_head_hash_timestamp)
# Must persist now because revert_block creates new vm's for each block and could overrwite changes if we wait.
vm.state.account_db.persist()
#now we know what the new heads are, so we can deal with the rest of the descendants
for descendant_block_hash in all_descendant_block_hashes:
#here, since we are already going through all children, we don't need this function to purge children as well
if self.chaindb.is_block_unprocessed(descendant_block_hash):
self.purge_unprocessed_block(descendant_block_hash, purge_children_too = False)
else:
self.revert_block(descendant_block_hash)
self.revert_block(existing_block_header.hash)
#persist changes
self.chain_head_db.persist(True)
self.reinitialize()
def purge_unprocessed_block(self, block_hash, purge_children_too = True):
'''
Deletes all unprocessed block lookups, and unprocessed children lookups for this block and all children blocks.
Todo: delete saved block header, and saved transaction tries for each block as well
'''
self.logger.debug("purging unprocessed block")
if purge_children_too:
self.logger.debug("purging unprocessed children")
if self.chaindb.has_unprocessed_children(block_hash):
self.logger.debug("HAS UNPROCESSED CHILDREN BLOCKS")
children_block_hashes = self.chaindb.get_block_children(block_hash)
if children_block_hashes != None:
for child_block_hash in children_block_hashes:
#this includes the child in this actual chain as well as children from send transactions.
if not self.chaindb.is_block_unprocessed(child_block_hash):
raise UnprocessedBlockChildIsProcessed("In process of deleting children of unprocessed block, and found one that is processed. This should never happen")
else:
self.purge_unprocessed_block(child_block_hash)
try:
block = self.get_block_by_hash(block_hash)
chain = encode_hex(block.header.chain_address)
self.logger.debug("deleting unprocessed child block number {} on chain {}".format(block.number, chain))
self.chaindb.remove_block_from_unprocessed(block)
except HeaderNotFound:
pass
def import_chronological_block_window(self, block_list: List[BaseBlock], window_start_timestamp: Timestamp, save_block_head_hash_timestamp:bool = True, allow_unprocessed:bool =False) -> None:
validate_uint256(window_start_timestamp, title='timestamp')
if block_list is None or len(block_list) == 0:
return
#if we are given a block that is not one of the two allowed classes, try converting it.
if len(block_list) > 0 and not isinstance(block_list[0], self.get_vm(timestamp = block_list[0].header.timestamp).get_block_class()):
self.logger.debug("converting chain to correct class")
corrected_block_list = []
for block in block_list:
corrected_block = self.get_vm(timestamp = block.header.timestamp).convert_block_to_correct_class(block)
corrected_block_list.append(corrected_block)
block_list = corrected_block_list
#first we delete any blocks we have in the same window that are not in the new block list
local_chronological_timestamp_block_window = self.chain_head_db.load_chronological_block_window(window_start_timestamp)
if local_chronological_timestamp_block_window is not None:
local_block_hash_list = [x[1] for x in local_chronological_timestamp_block_window]
new_block_hash_list = [block.hash for block in block_list]
block_hashes_to_delete = effecient_diff(new_block_hash_list, local_block_hash_list)
if len(block_hashes_to_delete) > 0:
self.logger.debug("deleting existing blocks in chronological window {}".format(block_hashes_to_delete))
for block_hash_to_delete in block_hashes_to_delete:
self.purge_block_and_all_children_and_set_parent_as_chain_head_by_hash(block_hash_to_delete)
if len(block_list) > 0:
self.logger.debug("starting block import for chronological block window")
#if block list is empty, load the local historical root hashes and delete them all
for i in range(len(block_list)):
# Reset this after each block imports
blocks_that_have_been_reorganized = set()
wallet_address = block_list[i].header.chain_address
while True:
try:
self.import_block(block_list[i], wallet_address = wallet_address, save_block_head_hash_timestamp = save_block_head_hash_timestamp, allow_unprocessed=allow_unprocessed)
break
except (UnprocessedBlockNotAllowed, ParentNotFound) as e:
# Because of the timestamps being in seconds, there may be multiple blocks that depend on each other
# with the same timestamp, and they could be out of order. So we attempt to reorganize the blocks
# and import again. If it fails again we will raise the exception.
if block_list[i].header.hash in blocks_that_have_been_reorganized:
self.logger.debug("Already tried reorganizing this block.")
raise e
self.logger.debug("Attempting to reorganize chronological window for import")
blocks_that_have_been_reorganized.add(block_list[i].header.hash)
block_list = reorganize_chronological_block_list_for_correct_chronological_order_at_index(block_list, i, self.logger)
else:
self.logger.debug("importing an empty chronological window. going to make sure we have a saved historical root hash")
historical_root_hashes = self.chain_head_db.get_historical_root_hashes()
if historical_root_hashes is not None:
#historical_root_hashes_dict = dict(historical_root_hashes)
#if it does exist, make sure it is the same as the last one. if not, then delete all newer
try:
self.chain_head_db.propogate_previous_historical_root_hash_to_timestamp(window_start_timestamp + TIME_BETWEEN_HEAD_HASH_SAVE)
except AppendHistoricalRootHashTooOld:
self.logger.debug("Tried to propogate the previous historical root hash but there was none. This shouldn't happen")
#self.logger.debug("historical root hashes after chronological block import {}".format(self.chain_head_db.get_historical_root_hashes()))
def import_chain(self, block_list: List[BaseBlock], perform_validation: bool=True, save_block_head_hash_timestamp: bool = True, allow_replacement: bool = True) -> None:
if len(block_list) > 0:
self.logger.debug("importing chain")
#if we are given a block that is not one of the two allowed classes, try converting it.
if not isinstance(block_list[0], self.get_vm(timestamp = block_list[0].header.timestamp).get_block_class()):
self.logger.debug("converting chain to correct class")
corrected_block_list = []
for block in block_list:
corrected_block = self.get_vm(timestamp = block.header.timestamp).convert_block_to_correct_class(block)
corrected_block_list.append(corrected_block)
block_list = corrected_block_list
wallet_address = block_list[0].header.chain_address
for block in block_list:
self.import_block(block,
perform_validation = perform_validation,
save_block_head_hash_timestamp = save_block_head_hash_timestamp,
wallet_address = wallet_address,
allow_replacement = allow_replacement)
# If we started with a longer chain, and all the imported blocks match ours, our chain will remain longer even after importing the new one.
# To fix this, we need to delete any blocks of ours that is longer in length then this chain that we are importing
# First make sure the whole chain imported correctly. If not, then we don't need to do anything
try:
local_canonical_head = self.chaindb.get_canonical_head(wallet_address)
imported_canonical_head = block_list[-1].header
#self.logger.debug("imported chain head hash {}. actual chain head hash {}".format(encode_hex(imported_canonical_head.hash), encode_hex(local_canonical_head.hash)))
if imported_canonical_head.block_number < local_canonical_head.block_number:
if self.chaindb.is_in_canonical_chain(imported_canonical_head.hash):
# Our chain is the same as the imported one, but we have some extra blocks on top. In this case, we would like to prune our chain
# to match the imported one.
# We only need to purge the next block after the imported chain. The vm will automatically purge all children
self.logger.debug("After importing a chain, our local chain is identical except with additional blocks on top. We will prune the top blocks to bring"
" our chain in line with the imported one.")
block_number_to_purge = imported_canonical_head.block_number + 1
hash_to_purge = self.chaindb.get_canonical_block_hash(BlockNumber(block_number_to_purge), wallet_address)
self.purge_block_and_all_children_and_set_parent_as_chain_head_by_hash(hash_to_purge, save_block_head_hash_timestamp)
except CanonicalHeadNotFound:
pass
from hvm.utils.profile import profile
@profile(sortby='cumulative')
def import_block_with_profiler(self, *args, **kwargs):
self.import_block(*args, **kwargs)
def import_block(self, block: BaseBlock,
perform_validation: bool=True,
save_block_head_hash_timestamp = True,
wallet_address = None,
allow_unprocessed = True,
allow_replacement = True,
ensure_block_unchanged:bool = True,
microblock_origin: bool = False) -> BaseBlock:
#we handle replacing blocks here
#this includes deleting any blocks that it might be replacing
#then we start the journal db
#then within _import_block, it can commit the journal
#but we wont persist until it gets out here again.
wallet_address = block.header.chain_address
# we need to re-initialize the chain for the new wallet address.
if wallet_address != self.wallet_address:
self.logger.debug("Changing to chain with wallet address {}".format(encode_hex(wallet_address)))
self.set_new_wallet_address(wallet_address=wallet_address)
journal_enabled = False
#if we are given a block that is not one of the two allowed classes, try converting it.
#There is no reason why this should be a queueblock, because a queueblock would never come over the network, it
#it always generated locally, and should have the correct class.
if not isinstance(block, self.get_vm(timestamp = block.header.timestamp).get_block_class()):
self.logger.debug("converting block to correct class")
block = self.get_vm(timestamp = block.header.timestamp).convert_block_to_correct_class(block)
if isinstance(block, self.get_vm(timestamp = block.header.timestamp).get_queue_block_class()):
# Set the queue block timestamp to now, when it is being imported.
block = block.copy(header=block.header.copy(timestamp=int(time.time())))
else:
if block.header.chain_address == self.genesis_wallet_address and block.header.block_number == 0:
try:
our_genesis_hash = self.chaindb.get_canonical_block_header_by_number(BlockNumber(0), self.genesis_wallet_address).hash
except HeaderNotFound:
raise NoGenesisBlockPresent("Tried importing a block, but we have no genesis block loaded. Need to load a genesis block first.")
if block.header.hash == our_genesis_hash:
return block
else:
raise ValidationError("Tried to import a new genesis block on the genesis chain. This is not allowed.")
if len(block.transactions) == 0 and len(block.receive_transactions) == 0:
# if block.reward_bundle is None:
# raise ValidationError('The block must have at least 1 transaction, or a non-zero reward bundle. Reward bundle = None')
if (block.reward_bundle.reward_type_1.amount == 0 and block.reward_bundle.reward_type_2.amount == 0):
raise RewardAmountRoundsToZero('The block has no send or receive transactions, and the reward bundle has amount = 0 for all types of rewards. This is not allowed. If this is just a reward block this usually means more time needs to pass before creating reward bundle.')
#if we are adding to the top of the chain, or beyond, we need to check for unprocessed blocks
#handle deleting any unprocessed blocks that will be replaced.
if block.number >= self.header.block_number:
existing_unprocessed_block_hash = self.chaindb.get_unprocessed_block_hash_by_block_number(self.wallet_address, block.number)
if (existing_unprocessed_block_hash != block.hash) and (existing_unprocessed_block_hash is not None):
if not allow_replacement:
raise ReplacingBlocksNotAllowed("Attempted to replace an unprocessed block.")
#check to make sure the parent matches the one we have
if block.number != 0:
# if block.number == self.header.block_number:
# existing_parent_hash = self.chaindb.get_canonical_head_hash(self.wallet_address)
# else:
existing_unprocessed_parent_hash = self.chaindb.get_unprocessed_block_hash_by_block_number(self.wallet_address, block.number-1)
if existing_unprocessed_parent_hash is not None:
if block.header.parent_hash != existing_unprocessed_parent_hash:
raise ParentNotFound("Parent is unprocessed. Parent hash = {}, this hash = {}".format(
encode_hex(existing_unprocessed_parent_hash), encode_hex(block.header.parent_hash)))
else:
try:
existing_canonical_parent_hash = self.chaindb.get_canonical_block_header_by_number(block.header.block_number-1, block.header.chain_address)
if block.header.parent_hash != existing_canonical_parent_hash:
raise ParentNotFound("Parent is canonical. Parent hash = {}, this hash = {}".format(
encode_hex(existing_canonical_parent_hash), encode_hex(block.header.parent_hash)))
except HeaderNotFound:
pass
#lets delete the unprocessed block, and its children, then import
self.enable_journal_db()
journal_record = self.record_journal()
journal_enabled = True
self.purge_unprocessed_block(existing_unprocessed_block_hash)
#check to see if this is the same hash that was already saved as unprocessed
if block.number > self.header.block_number:
#check that the parent hash matches what we have.
existing_parent_hash = self.chaindb.get_unprocessed_block_hash_by_block_number(self.wallet_address, block.number-1)
#we can allow this for unprocessed blocks as long as we have the parent in our database
if existing_parent_hash == block.header.parent_hash:
if block.hash == self.chaindb.get_unprocessed_block_hash_by_block_number(self.wallet_address, block.number):
#we already imported this one
return_block = block
else:
#save as unprocessed
if not allow_unprocessed:
raise UnprocessedBlockNotAllowed()
self.logger.debug("Saving block as unprocessed because parent on this chain is unprocessed")
return_block = self.save_block_as_unprocessed(block)
if journal_enabled:
self.logger.debug('commiting journal')
self.commit_journal(journal_record)
self.persist_journal()
self.disable_journal_db()
return return_block
else:
raise ParentNotFound('Parent is unprocessed 2')
#now, if it is the head of the chain, lets make sure the parent hash is correct.
if block.number == self.header.block_number and block.number != 0:
if block.header.parent_hash != self.chaindb.get_canonical_head_hash(chain_address= self.wallet_address):
raise ParentNotFound("Block is at the head of the chain")
if block.number < self.header.block_number:
if not allow_replacement:
raise ReplacingBlocksNotAllowed("Attempted to replace a canonical block")
self.logger.debug("went into block replacing mode")
self.logger.debug("block.number = {}, self.header.block_number = {}".format(block.number,self.header.block_number))
self.logger.debug("this chains wallet address = {}, this block's sender = {}".format(encode_hex(self.wallet_address), encode_hex(block.sender)))
#check to see if we can load the existing canonical block
existing_block_header = self.chaindb.get_canonical_block_header_by_number(block.number, self.wallet_address)
if existing_block_header.hash == block.header.hash:
self.logger.debug("tried to import a block that has a hash that matches the local block. no import required.")
return block
else:
if not journal_enabled:
self.enable_journal_db()
journal_record = self.record_journal()
journal_enabled = True
self.purge_block_and_all_children_and_set_parent_as_chain_head(existing_block_header, save_block_head_hash_timestamp = save_block_head_hash_timestamp)
#check to see if this block is chronologically inconsistent - usually due to reward block that used proof from this chain
block_hashes_leading_to_inconsistency = self.check_block_chronological_consistency(block)
if len(block_hashes_leading_to_inconsistency) > 0:
if not allow_replacement:
raise ReplacingBlocksNotAllowed("Attempted to import chronologically inconsistent block. Block hashes leading to inconsistency = {}.".format([encode_hex(x) for x in block_hashes_leading_to_inconsistency]))
else:
# revert all of the blocks leading to the inconsistency.
if not journal_enabled:
self.enable_journal_db()
journal_record = self.record_journal()
journal_enabled = True
for block_hash in block_hashes_leading_to_inconsistency:
self.logger.debug("Purging block {} to preserve chronological consistency".format(encode_hex(block_hash)))
block_header = self.chaindb.get_block_header_by_hash(block_hash)
# This should be impossible, but lets double check that none of these blocks are on the same chain as this block
if block_header.chain_address == block.header.chain_address:
raise Exception("Tried to revert chronologically inconsistent block on this same chain. This should never happen...")
self.purge_block_and_all_children_and_set_parent_as_chain_head(block_header, save_block_head_hash_timestamp = save_block_head_hash_timestamp)
try:
return_block = self._import_block(block = block,
perform_validation = perform_validation,
save_block_head_hash_timestamp = save_block_head_hash_timestamp,
allow_unprocessed = allow_unprocessed,
ensure_block_unchanged= ensure_block_unchanged,
microblock_origin = microblock_origin)
# handle importing unprocessed blocks here because doing it recursively results in maximum recursion depth exceeded error
if not self.chaindb.is_block_unprocessed(return_block.hash):
self.logger.debug("Checking to see if block has unprocessed children")
self.import_all_unprocessed_descendants(return_block.hash,
perform_validation= True,
save_block_head_hash_timestamp = save_block_head_hash_timestamp,
allow_unprocessed = True)
except Exception as e:
if journal_enabled:
self.logger.debug('discarding journal')
self.discard_journal(journal_record)
self.disable_journal_db()
raise e
if journal_enabled:
self.logger.debug('commiting journal')
self.commit_journal(journal_record)
self.persist_journal()
self.disable_journal_db()
return return_block
def _import_block(self, block: BaseBlock,
perform_validation: bool=True,
save_block_head_hash_timestamp = True,
allow_unprocessed = True,
ensure_block_unchanged: bool = True,
microblock_origin: bool = False) -> BaseBlock:
"""
Imports a complete block.
"""
self.logger.debug("importing block {} with number {}".format(block.__repr__(), block.number))
self.validate_time_from_genesis_block(block)
if isinstance(block, self.get_vm(timestamp = block.header.timestamp).get_queue_block_class()):
# If it was a queueblock, then the header will have changed after importing
perform_validation = False
ensure_block_unchanged = False
queue_block = True
else:
queue_block = False
if not self.chaindb.is_block_unprocessed(block.header.parent_hash):
#this part checks to make sure the parent exists
try:
vm = self.get_vm(timestamp = block.header.timestamp)
self.logger.debug("importing block with vm {}".format(vm.__repr__()))
if queue_block:
imported_block = vm.import_block(block, private_key = self.private_key)
else:
imported_block = vm.import_block(block)
# Validate the imported block.
if ensure_block_unchanged:
if microblock_origin:
# this started out as a microblock. So we only ensure the microblock fields are unchanged.
self.logger.debug('ensuring block unchanged. microblock correction')
corrected_micro_block = block.copy(header = block.header.copy(
receipt_root = imported_block.header.receipt_root,
bloom = imported_block.header.bloom,
gas_limit = imported_block.header.gas_limit,
gas_used = imported_block.header.gas_used,
account_hash = imported_block.header.account_hash,
account_balance = imported_block.header.account_balance,
))
ensure_imported_block_unchanged(imported_block, corrected_micro_block)
else:
self.logger.debug('ensuring block unchanged')
ensure_imported_block_unchanged(imported_block, block)
else:
self.logger.debug('Not checking block for changes.')
if perform_validation:
self.validate_block(imported_block)
#self.chain_head_db.set_chain_head_hash(self.wallet_address, imported_block.header.hash)
if save_block_head_hash_timestamp:
self.chain_head_db.add_block_hash_to_chronological_window(imported_block.header.hash, imported_block.header.timestamp)
self.save_chain_head_hash_to_trie_for_time_period(imported_block.header)
self.chain_head_db.set_chain_head_hash(imported_block.header.chain_address, imported_block.header.hash)
self.chain_head_db.persist(True)
self.chaindb.persist_block(imported_block)
vm.state.account_db.persist(save_account_hash = True, wallet_address = self.wallet_address)
#here we must delete the unprocessed lookup before importing children
#because the children cannot be imported if their chain parent is unprocessed.
#but we cannot delete the lookup for unprocessed children yet.
self.chaindb.remove_block_from_unprocessed(imported_block)
# Add chronological consistency lookups
self.save_block_chronological_consistency_lookups(imported_block)
try:
self.header = self.create_header_from_parent(self.get_canonical_head())
except CanonicalHeadNotFound:
self.header = self.get_vm_class_for_block_timestamp().create_genesis_block(self.wallet_address).header
self.queue_block = None
self.logger.debug(
'IMPORTED_BLOCK: number %s | hash %s',
imported_block.number,
encode_hex(imported_block.hash),
)
# Make sure our wallet address hasn't magically changed
if self.wallet_address != imported_block.header.chain_address:
raise ValidationError("Attempted to import a block onto the wrong chain.")
return_block = imported_block
except ReceivableTransactionNotFound as e:
if not allow_unprocessed:
raise UnprocessedBlockNotAllowed()
self.logger.debug("Saving block as unprocessed because of ReceivableTransactionNotFound error: {}".format(e))
return_block = self.save_block_as_unprocessed(block)
if self.raise_errors:
raise e
except RewardProofSenderBlockMissing as e:
if not allow_unprocessed:
raise UnprocessedBlockNotAllowed()
self.logger.debug("Saving block as unprocessed because of RewardProofSenderBlockMissing error: {}".format(e))
return_block = self.save_block_as_unprocessed(block)
else:
if not allow_unprocessed:
raise UnprocessedBlockNotAllowed()
self.logger.debug("Saving block as unprocessed because parent on this chain is unprocessed")
return_block = self.save_block_as_unprocessed(block)
return return_block
def import_all_unprocessed_descendants(self, block_hash, *args, **kwargs):
# 1) get unprocessed children
# 2) loop through and import
# 3) if child imports, add their unprocessed children to list, and delete that block from unprocessed
# 4) if list of unprocessed children has 0 length, break
# need to step one level at a time. We use a queue to achieve this effect. It won't get to the next level
# until it finishes all of the blocks on this level. So it goes one level at a time.
if self.chaindb.has_unprocessed_children(block_hash):
self.logger.debug("HAS UNPROCESSED BLOCKS")
# try to import all children
children_block_hashes = self.chaindb.get_block_children(block_hash)
if children_block_hashes != None:
block_hashes_to_import = deque(children_block_hashes)
# iterate over children
while True:
# remove from right side
current_block_hash_to_import = block_hashes_to_import.pop()
if self.chaindb.is_block_unprocessed(current_block_hash_to_import):
self.logger.debug("importing child block")
try:
child_block = self.get_block_by_hash(current_block_hash_to_import)
if child_block.header.chain_address != self.wallet_address:
#self.logger.debug("Changing to chain with wallet address {}".format(encode_hex(child_block.header.chain_address)))
self.set_new_wallet_address(wallet_address=child_block.header.chain_address)
self._import_block(child_block, *args, **kwargs)
#if the block imported, add its children the the deque
if not self.chaindb.is_block_unprocessed(current_block_hash_to_import):
# it imported successfully
if self.chaindb.has_unprocessed_children(current_block_hash_to_import):
children_block_hashes = self.chaindb.get_block_children(current_block_hash_to_import)
if children_block_hashes != None:
block_hashes_to_import.extendleft(children_block_hashes)
# we have queued up its children to be imported. Assuming exceptions don't occur, we can remove this block from the unprocessed children lookup.
self.chaindb.delete_unprocessed_children_blocks_lookup(current_block_hash_to_import)
except Exception as e:
self.logger.error("Tried to import an unprocessed child block and got this error {}".format(e))
if len(block_hashes_to_import) == 0:
return
self.chaindb.delete_unprocessed_children_blocks_lookup(block_hash)
def save_block_chronological_consistency_lookups(self, block: BaseBlock) -> None:
'''
We need to require that the proof sender chain doesn't add a block after their claimed chain_head_hash, and the timestamp of this block being imported.
:param block:
:return:
'''
block_header = block.header
reward_bundle = self.chaindb.get_reward_bundle(block_header.reward_hash, block.reward_bundle_class)
chronological_consistency_key = [block_header.timestamp, block_header.hash]
for proof in reward_bundle.reward_type_2.proof:
# timestamp, block hash of block responsible
sender_chain_header = self.chaindb.get_block_header_by_hash(proof.head_hash_of_sender_chain)
# The chronological consistency restrictions are placed on the block on top of the one giving the proof.
block_number_with_restrictions = sender_chain_header.block_number + 1
self.logger.debug("saving chronological consistency lookup for chain {}, block {}, timestamp {}".format(encode_hex(sender_chain_header.chain_address), block_number_with_restrictions, block_header.timestamp))
self.chaindb.add_block_consistency_key(sender_chain_header.chain_address, block_number_with_restrictions, chronological_consistency_key)
def save_block_as_unprocessed(self, block):
#if it is already saved as unprocesessed, do nothing
if self.chaindb.is_block_unprocessed(block.hash):
return block
#before adding to unprocessed blocks, make sure the receive transactions are valid
# for receive_transaction in block.receive_transactions:
# #there must be at least 1 to get this far
# receive_transaction.validate()
#now we add it to unprocessed blocks
self.chaindb.save_block_as_unprocessed(block)
#save the transactions to db
vm = self.get_vm(timestamp = block.header.timestamp)
vm.save_items_to_db_as_trie(block.transactions, block.header.transaction_root)
vm.save_items_to_db_as_trie(block.receive_transactions, block.header.receive_transaction_root)
#we don't want to persist because that will add it to the canonical chain.
#We just want to save it to the database so we can process it later if needbe.
self.chaindb.persist_non_canonical_block(block)
#self.chaindb.persist_block(block)
try:
self.header = self.create_header_from_parent(self.get_canonical_head())
except CanonicalHeadNotFound:
self.header = self.get_vm_class_for_block_timestamp().create_genesis_block(self.wallet_address).header
self.queue_block = None
self.logger.debug(
'SAVED_BLOCK_AS_UNPROCESSED: number %s | hash %s',
block.number,
encode_hex(block.hash),
)
return block
def import_current_queue_block(self) -> BaseBlock:
return self.import_block(self.queue_block)
def import_current_queue_block_with_reward(self, node_staking_score_list: List[NodeStakingScore]) -> BaseBlock:
reward_bundle = self.get_consensus_db().create_reward_bundle_for_block(self.wallet_address, node_staking_score_list, at_timestamp=Timestamp(int(time.time())))
# #testing
# reward_bundle = reward_bundle.copy(reward_type_2 = reward_bundle.reward_type_2.copy(amount=0))
self.queue_block = self.queue_block.copy(reward_bundle = reward_bundle)
return self.import_current_queue_block()
def get_all_chronological_blocks_for_window(self, window_timestamp:Timestamp) -> List[BaseBlock]:
validate_uint256(window_timestamp, title='timestamp')
chronological_blocks = self.chain_head_db.load_chronological_block_window(window_timestamp)
if chronological_blocks is None:
return None
else:
list_of_blocks = []
for chronological_block in chronological_blocks:
block_hash = chronological_block[1]
new_block = self.get_block_by_hash(block_hash)
list_of_blocks.append(new_block)
return list_of_blocks
#
# Chronologically consistent blockchain db API
#
def check_block_chronological_consistency(self, block: BaseBlock) -> List[Hash32]:
'''
Checks to see if the block breaks any chronological consistency. If it does, it will return a list of blocks that need to be reverted for this block to be imported
returns list of block hashes that have to be reverted
:param block:
:return:
'''
consistency_keys = self.chaindb.get_block_chronological_consistency_keys(block.header.chain_address, block.header.block_number)
block_hashes_to_revert = list()
for consistency_key in consistency_keys:
if consistency_key[0] > block.header.timestamp:
block_hashes_to_revert.append(consistency_key[1])
return block_hashes_to_revert
#
# Validation API
#
def get_allowed_time_of_next_block(self, chain_address: Address = None) -> Timestamp:
if chain_address is None:
chain_address = self.wallet_address
try:
canonical_head = self.chaindb.get_canonical_head(chain_address=chain_address)
except CanonicalHeadNotFound:
return Timestamp(0)
vm = self.get_vm(timestamp=Timestamp(int(time.time())))
min_allowed_time_between_blocks = vm.min_time_between_blocks
return Timestamp(canonical_head.timestamp + min_allowed_time_between_blocks)
def validate_block(self, block: BaseBlock) -> None:
"""
Performs validation on a block that is either being mined or imported.
Since block validation (specifically the uncle validation must have
access to the ancestor blocks, this validation must occur at the Chain
level.
"""
self.validate_gaslimit(block.header)
def validate_gaslimit(self, header: BlockHeader) -> None:
"""
Validate the gas limit on the given header.
"""
#parent_header = self.get_block_header_by_hash(header.parent_hash)
#low_bound, high_bound = compute_gas_limit_bounds(parent_header)
#if header.gas_limit < low_bound:
# raise ValidationError(
# "The gas limit on block {0} is too low: {1}. It must be at least {2}".format(
# encode_hex(header.hash), header.gas_limit, low_bound))
if header.gas_limit > BLOCK_GAS_LIMIT:
raise ValidationError(
"The gas limit on block {0} is too high: {1}. It must be at most {2}".format(
encode_hex(header.hash), header.gas_limit, BLOCK_GAS_LIMIT))
def validate_block_specification(self, block) -> bool:
'''
This validates everything we can without looking at the blockchain database. It doesnt need to assume
that we have the block that sent the transactions.
This that this can check:
block signature
send transaction signatures
receive transaction signatures - dont need to check this. it doesnt add any security
signatures of send transaction within receive transactions
send transaction root matches transactions
receive transaction root matches transactions
'''
if not isinstance(block, self.get_vm(timestamp = block.header.timestamp).get_block_class()):
self.logger.debug("converting block to correct class")
block = self.get_vm(timestamp = block.header.timestamp).convert_block_to_correct_class(block)
block.header.check_signature_validity()
for transaction in block.transactions:
transaction.validate()
for transaction in block.receive_transactions:
transaction.validate()
send_tx_root_hash, _ = make_trie_root_and_nodes(block.transactions)
if block.header.transaction_root != send_tx_root_hash:
raise ValidationError("Block has invalid transaction root")
receive_tx_root_hash, _ = make_trie_root_and_nodes(block.receive_transactions)
if block.header.receive_transaction_root != receive_tx_root_hash:
raise ValidationError("Block has invalid receive transaction root")
return True
#
# Stake API
#
def get_mature_stake(self, wallet_address: Address = None, raise_canonical_head_not_found_error:bool = False) -> int:
if wallet_address is None:
wallet_address = self.wallet_address
coin_mature_time_for_staking = self.get_vm(timestamp = Timestamp(int(time.time()))).consensus_db.coin_mature_time_for_staking
return self.chaindb.get_mature_stake(wallet_address, coin_mature_time_for_staking, raise_canonical_head_not_found_error = raise_canonical_head_not_found_error)
# gets the stake for the timestamp corresponding to teh chronological block window, so it is all blocks for the next 1000 seconds.
def get_mature_stake_for_chronological_block_window(self, chronological_block_window_timestamp: Timestamp, timestamp_for_stake: Timestamp = None):
if timestamp_for_stake is not None and timestamp_for_stake < chronological_block_window_timestamp:
raise ValidationError("Cannot get chronological block window stake for a timestamp before the window")
if timestamp_for_stake is None:
timestamp_for_stake = int(time.time())
chronological_block_hash_timestamps = self.chain_head_db.load_chronological_block_window(chronological_block_window_timestamp)
chronological_block_hashes = [x[1] for x in chronological_block_hash_timestamps]
coin_mature_time_for_staking = self.get_vm(timestamp=timestamp_for_stake).consensus_db.coin_mature_time_for_staking
return self.chaindb.get_total_block_stake_of_block_hashes(chronological_block_hashes, coin_mature_time_for_staking, timestamp_for_stake)
def get_new_block_hash_to_test_peer_node_health(self) -> Hash32:
'''
returns one of the newest blocks we have seen.
:return:
'''
before_this_timestamp = int(time.time()) - 60 # ask the peer for a block that was received at before 1 minute ago
current_historical_window = int(time.time() / TIME_BETWEEN_HEAD_HASH_SAVE) * TIME_BETWEEN_HEAD_HASH_SAVE
for timestamp in range(current_historical_window,
current_historical_window-NUMBER_OF_HEAD_HASH_TO_SAVE*TIME_BETWEEN_HEAD_HASH_SAVE,
-1* TIME_BETWEEN_HEAD_HASH_SAVE):
chronological_window = self.chain_head_db.load_chronological_block_window(timestamp)
if chronological_window is not None:
chronological_window.sort(key=lambda x: -1*x[0])
for timestamp_hash in chronological_window:
if timestamp_hash[0] < before_this_timestamp:
return timestamp_hash[1]
#if we get to here then we don't have any blocks within all chronological block windows...
raise NoChronologicalBlocks()
#
# Min Block Gas API used for throttling the network
#
def re_initialize_historical_minimum_gas_price_at_genesis(self) -> None:
'''
re-initializes system with last set min gas price and net tpc cap
'''
hist_min_gas_price = self.chaindb.load_historical_minimum_gas_price()
hist_tpc_cap = self.chaindb.load_historical_network_tpc_capability()
hist_tx_per_centisecond = self.chaindb.load_historical_tx_per_centisecond()
if hist_min_gas_price is not None:
init_min_gas_price = hist_min_gas_price[-1][1]
else:
init_min_gas_price = 1
if hist_tpc_cap is not None:
init_tpc_cap = hist_tpc_cap[-1][1]
else:
init_tpc_cap = self.get_local_tpc_cap()
if hist_tx_per_centisecond is not None:
init_tpc = hist_tx_per_centisecond[-1][1]
else:
init_tpc = None
self.chaindb.initialize_historical_minimum_gas_price_at_genesis(init_min_gas_price, init_tpc_cap, init_tpc)
def update_current_network_tpc_capability(self, current_network_tpc_cap: int, update_min_gas_price:bool = True) -> None:
validate_uint256(current_network_tpc_cap, title="current_network_tpc_cap")
self.chaindb.save_current_historical_network_tpc_capability(current_network_tpc_cap)
if update_min_gas_price:
current_centisecond = int(time.time()/100) * 100
timestamp_min_gas_price_updated = self.update_tpc_from_chronological(update_min_gas_price = True)
if timestamp_min_gas_price_updated > current_centisecond:
self.chaindb._recalculate_historical_mimimum_gas_price(current_centisecond)
def update_tpc_from_chronological(self, update_min_gas_price: bool = True):
#start at the newest window, if the same tps stop. but if different tps keep going back
self.logger.debug("Updating tpc from chronological")
current_historical_window = int(time.time()/TIME_BETWEEN_HEAD_HASH_SAVE) * TIME_BETWEEN_HEAD_HASH_SAVE
current_centisecond = int(time.time()/100) * 100
#load this once to find out if its None. If it is None, then the node just started, lets only go back 50 steps
#hist_tpc = self.chaindb.load_historical_tx_per_centisecond()
end_outer = current_historical_window-20*TIME_BETWEEN_HEAD_HASH_SAVE
for historical_window_timestamp in range(current_historical_window,
end_outer,
-TIME_BETWEEN_HEAD_HASH_SAVE):
tpc_sum_dict = {}
chronological_block_window = self.chain_head_db.load_chronological_block_window(historical_window_timestamp)
self.logger.debug('loading chronological block window for timestamp {}'.format(historical_window_timestamp))
#zero the dictionary
if historical_window_timestamp+TIME_BETWEEN_HEAD_HASH_SAVE < current_centisecond:
end = historical_window_timestamp +TIME_BETWEEN_HEAD_HASH_SAVE
else:
end = current_centisecond+100
for timestamp in range(historical_window_timestamp, end, 100):
tpc_sum_dict[timestamp] = 0
if chronological_block_window is not None:
for timestamp_block_hash in chronological_block_window:
#first count up the tx in the block
#if it is 0, then set to 1? in case block is all receive
num_tx_in_block = self.chaindb.get_number_of_total_tx_in_block(timestamp_block_hash[1])
if num_tx_in_block == 0:
num_tx_in_block = 1
#then add them to the dict
centisecond_window_for_block = int(timestamp_block_hash[0]/100) * 100
if centisecond_window_for_block <= end:
tpc_sum_dict[centisecond_window_for_block] += num_tx_in_block
same_as_database = self._update_tpc_from_chronological(tpc_sum_dict)
if same_as_database == True:
break
if update_min_gas_price:
self.chaindb._recalculate_historical_mimimum_gas_price(historical_window_timestamp + TIME_BETWEEN_HEAD_HASH_SAVE)
return historical_window_timestamp+TIME_BETWEEN_HEAD_HASH_SAVE
def _update_tpc_from_chronological(self, new_hist_tpc_dict):
'''
returns True if they are all the same as what we already had in the database, otherwise it returns False
'''
if not isinstance(new_hist_tpc_dict, dict):
raise ValidationError("Expected a dict. Didn't get a dict.")
hist_tpc = self.chaindb.load_historical_tx_per_centisecond()
difference_found = False
if hist_tpc is None:
hist_tpc = list(new_hist_tpc_dict.items())
else:
hist_tpc_dict = dict(hist_tpc)
for timestamp, tpc in new_hist_tpc_dict.items():
if timestamp not in hist_tpc_dict or hist_tpc_dict[timestamp] != tpc:
#if tpc != 0:
difference_found = True
hist_tpc_dict[timestamp] = tpc
hist_tpc = list(hist_tpc_dict.items())
#print(hist_tpc)
#save it to db
self.chaindb.save_historical_tx_per_centisecond(hist_tpc, de_sparse = False)
return not difference_found
def get_local_tpc_cap(self) -> int:
#base it on the time it takes to import a block
from hvm.utils.profile import profile
from hvm.db.backends.memory import MemoryDB
from hvm import MainnetChain
from hvm.chains.mainnet import (
MAINNET_TPC_CAP_TEST_GENESIS_PARAMS,
MAINNET_TPC_CAP_TEST_GENESIS_STATE,
TPC_CAP_TEST_GENESIS_PRIVATE_KEY,
MAINNET_TPC_CAP_TEST_BLOCK_TO_IMPORT,
)
from hvm.constants import random_private_keys
db = MemoryDB()
chain = MainnetChain.from_genesis(db,
TPC_CAP_TEST_GENESIS_PRIVATE_KEY.public_key.to_canonical_address(),
MAINNET_TPC_CAP_TEST_GENESIS_PARAMS,
MAINNET_TPC_CAP_TEST_GENESIS_STATE,
private_key = TPC_CAP_TEST_GENESIS_PRIVATE_KEY)
block_to_import = chain.get_vm(timestamp = MAINNET_TPC_CAP_TEST_BLOCK_TO_IMPORT['header']['timestamp']).get_block_class().from_dict(MAINNET_TPC_CAP_TEST_BLOCK_TO_IMPORT)
chain.genesis_wallet_address = MAINNET_TPC_CAP_TEST_GENESIS_PARAMS['chain_address']
chain.genesis_block_timestamp = MAINNET_TPC_CAP_TEST_GENESIS_PARAMS['timestamp']
#@profile(sortby='cumulative')
def temp():
chain.import_block(block_to_import)
start_time = time.time()
temp()
duration = time.time()-start_time
#self.logger.debug('duration = {} seconds'.format(duration))
tx_per_centisecond = int(100/duration)
return tx_per_centisecond
#
# Consensus DB passthrough's that depend on block timestamp
#
def get_signed_peer_score(self, private_key: PrivateKey,
network_id: int,
peer_wallet_address: Address,
after_block_number: BlockNumber = None,
) -> NodeStakingScore:
# This function should always use the vm for the current timestamp. So we dont need to ask for timestamp
return self.get_consensus_db(timestamp=Timestamp(int(time.time()))).get_signed_peer_score(private_key,
network_id,
peer_wallet_address,
after_block_number)
def get_signed_peer_score_string_private_key(self,
private_key_string: bytes,
peer_wallet_address: Address,
after_block_number: BlockNumber = None,
) -> NodeStakingScore:
network_id = self.network_id
# This always occurs at this time. So we take the current consensus db
return self.get_consensus_db(timestamp=Timestamp(int(time.time()))).get_signed_peer_score_string_private_key(private_key_string,
network_id,
peer_wallet_address,
after_block_number)
def validate_node_staking_score(self,
node_staking_score: NodeStakingScore,
since_block_number: BlockNumber) -> None:
# This depends on when the staking score was created. So get the consensus db given by that timestamp
return self.get_consensus_db(timestamp = node_staking_score.timestamp).validate_node_staking_score(node_staking_score, since_block_number)
def save_health_request(self, peer_wallet_address: Address, response_time_in_micros: int = float('inf')) -> None:
# This always occurs at this time. So we take the current consensus db
return self.get_consensus_db(timestamp=Timestamp(int(time.time()))).save_health_request(peer_wallet_address,
response_time_in_micros)
def get_current_peer_node_health(self,peer_wallet_address: Address) -> PeerNodeHealth:
return self.get_consensus_db(timestamp=Timestamp(int(time.time()))).get_current_peer_node_health(peer_wallet_address)
| [((18650, 18692), 'logging.getLogger', 'logging.getLogger', (['"""hvm.chain.chain.Chain"""'], {}), "('hvm.chain.chain.Chain')\n", (18667, 18692), False, 'import logging\n'), ((39949, 39980), 'functools.lru_cache', 'functools.lru_cache', ([], {'maxsize': '(32)'}), '(maxsize=32)\n', (39968, 39980), False, 'import functools\n'), ((75607, 75635), 'hvm.utils.profile.profile', 'profile', ([], {'sortby': '"""cumulative"""'}), "(sortby='cumulative')\n", (75614, 75635), False, 'from hvm.utils.profile import profile\n'), ((6303, 6330), 'hvm.validation.validate_uint256', 'validate_uint256', (['timestamp'], {}), '(timestamp)\n', (6319, 6330), False, 'from hvm.validation import validate_block_number, validate_uint256, validate_word, validate_vm_configuration, validate_canonical_address, validate_is_queue_block, validate_centisecond_timestamp\n'), ((19373, 19433), 'hvm.validation.validate_canonical_address', 'validate_canonical_address', (['wallet_address', '"""Wallet Address"""'], {}), "(wallet_address, 'Wallet Address')\n", (19399, 19433), False, 'from hvm.validation import validate_block_number, validate_uint256, validate_word, validate_vm_configuration, validate_canonical_address, validate_is_queue_block, validate_centisecond_timestamp\n'), ((24652, 24695), 'hvm.utils.db.apply_state_dict', 'apply_state_dict', (['account_db', 'genesis_state'], {}), '(account_db, genesis_state)\n', (24668, 24695), False, 'from hvm.utils.db import apply_state_dict\n'), ((24892, 24921), 'hvm.rlp.headers.BlockHeader', 'BlockHeader', ([], {}), '(**genesis_params)\n', (24903, 24921), False, 'from hvm.rlp.headers import BlockHeader, HeaderParams\n'), ((25841, 25884), 'hvm.utils.db.apply_state_dict', 'apply_state_dict', (['account_db', 'genesis_state'], {}), '(account_db, genesis_state)\n', (25857, 25884), False, 'from hvm.utils.db import apply_state_dict\n'), ((26009, 26038), 'hvm.rlp.headers.BlockHeader', 'BlockHeader', ([], {}), '(**genesis_params)\n', (26020, 26038), False, 'from hvm.rlp.headers import BlockHeader, HeaderParams\n'), ((29785, 29830), 'hvm.validation.validate_word', 'validate_word', (['block_hash'], {'title': '"""Block Hash"""'}), "(block_hash, title='Block Hash')\n", (29798, 29830), False, 'from hvm.validation import validate_block_number, validate_uint256, validate_word, validate_vm_configuration, validate_canonical_address, validate_is_queue_block, validate_centisecond_timestamp\n'), ((37027, 37094), 'hvm.validation.validate_is_queue_block', 'validate_is_queue_block', (['self.queue_block'], {'title': '"""self.queue_block"""'}), "(self.queue_block, title='self.queue_block')\n", (37050, 37094), False, 'from hvm.validation import validate_block_number, validate_uint256, validate_word, validate_vm_configuration, validate_canonical_address, validate_is_queue_block, validate_centisecond_timestamp\n'), ((67993, 68052), 'hvm.validation.validate_uint256', 'validate_uint256', (['window_start_timestamp'], {'title': '"""timestamp"""'}), "(window_start_timestamp, title='timestamp')\n", (68009, 68052), False, 'from hvm.validation import validate_block_number, validate_uint256, validate_word, validate_vm_configuration, validate_canonical_address, validate_is_queue_block, validate_centisecond_timestamp\n'), ((100421, 100474), 'hvm.validation.validate_uint256', 'validate_uint256', (['window_timestamp'], {'title': '"""timestamp"""'}), "(window_timestamp, title='timestamp')\n", (100437, 100474), False, 'from hvm.validation import validate_block_number, validate_uint256, validate_word, validate_vm_configuration, validate_canonical_address, validate_is_queue_block, validate_centisecond_timestamp\n'), ((102314, 102383), 'hvm.types.Timestamp', 'Timestamp', (['(canonical_head.timestamp + min_allowed_time_between_blocks)'], {}), '(canonical_head.timestamp + min_allowed_time_between_blocks)\n', (102323, 102383), False, 'from hvm.types import Timestamp\n'), ((104723, 104767), 'hvm.db.trie.make_trie_root_and_nodes', 'make_trie_root_and_nodes', (['block.transactions'], {}), '(block.transactions)\n', (104747, 104767), False, 'from hvm.db.trie import make_trie_root_and_nodes\n'), ((104939, 104991), 'hvm.db.trie.make_trie_root_and_nodes', 'make_trie_root_and_nodes', (['block.receive_transactions'], {}), '(block.receive_transactions)\n', (104963, 104991), False, 'from hvm.db.trie import make_trie_root_and_nodes\n'), ((107943, 107966), 'hvm.exceptions.NoChronologicalBlocks', 'NoChronologicalBlocks', ([], {}), '()\n', (107964, 107966), False, 'from hvm.exceptions import HeaderNotFound, TransactionNotFound, ValidationError, VMNotFound, BlockOnWrongChain, CanonicalHeadNotFound, CannotCalculateStake, NotEnoughTimeBetweenBlocks, ReceivableTransactionNotFound, TriedImportingGenesisBlock, JournalDbNotActivated, ReplacingBlocksNotAllowed, UnprocessedBlockNotAllowed, AppendHistoricalRootHashTooOld, HistoricalNetworkTPCMissing, HistoricalMinGasPriceError, UnprocessedBlockChildIsProcessed, ParentNotFound, NoChronologicalBlocks, RewardProofSenderBlockMissing, InvalidHeadRootTimestamp, RewardAmountRoundsToZero, TriedDeletingGenesisBlock, NoGenesisBlockPresent\n'), ((109155, 109229), 'hvm.validation.validate_uint256', 'validate_uint256', (['current_network_tpc_cap'], {'title': '"""current_network_tpc_cap"""'}), "(current_network_tpc_cap, title='current_network_tpc_cap')\n", (109171, 109229), False, 'from hvm.validation import validate_block_number, validate_uint256, validate_word, validate_vm_configuration, validate_canonical_address, validate_is_queue_block, validate_centisecond_timestamp\n'), ((113987, 113997), 'hvm.db.backends.memory.MemoryDB', 'MemoryDB', ([], {}), '()\n', (113995, 113997), False, 'from hvm.db.backends.memory import MemoryDB\n'), ((114892, 114903), 'time.time', 'time.time', ([], {}), '()\n', (114901, 114903), False, 'import time\n'), ((19314, 19362), 'hvm.validation.validate_vm_configuration', 'validate_vm_configuration', (['self.vm_configuration'], {}), '(self.vm_configuration)\n', (19339, 19362), False, 'from hvm.validation import validate_block_number, validate_uint256, validate_word, validate_vm_configuration, validate_canonical_address, validate_is_queue_block, validate_centisecond_timestamp\n'), ((20185, 20204), 'hvm.estimators.get_gas_estimator', 'get_gas_estimator', ([], {}), '()\n', (20202, 20204), False, 'from hvm.estimators import get_gas_estimator\n'), ((21758, 21782), 'hvm.db.read_only.ReadOnlyDB', 'ReadOnlyDB', (['self.base_db'], {}), '(self.base_db)\n', (21768, 21782), False, 'from hvm.db.read_only import ReadOnlyDB\n'), ((21951, 21974), 'hvm.db.journal.JournalDB', 'JournalDB', (['self.base_db'], {}), '(self.base_db)\n', (21960, 21974), False, 'from hvm.db.journal import JournalDB\n'), ((22662, 22685), 'hvm.exceptions.JournalDbNotActivated', 'JournalDbNotActivated', ([], {}), '()\n', (22683, 22685), False, 'from hvm.exceptions import HeaderNotFound, TransactionNotFound, ValidationError, VMNotFound, BlockOnWrongChain, CanonicalHeadNotFound, CannotCalculateStake, NotEnoughTimeBetweenBlocks, ReceivableTransactionNotFound, TriedImportingGenesisBlock, JournalDbNotActivated, ReplacingBlocksNotAllowed, UnprocessedBlockNotAllowed, AppendHistoricalRootHashTooOld, HistoricalNetworkTPCMissing, HistoricalMinGasPriceError, UnprocessedBlockChildIsProcessed, ParentNotFound, NoChronologicalBlocks, RewardProofSenderBlockMissing, InvalidHeadRootTimestamp, RewardAmountRoundsToZero, TriedDeletingGenesisBlock, NoGenesisBlockPresent\n'), ((22902, 22925), 'hvm.exceptions.JournalDbNotActivated', 'JournalDbNotActivated', ([], {}), '()\n', (22923, 22925), False, 'from hvm.exceptions import HeaderNotFound, TransactionNotFound, ValidationError, VMNotFound, BlockOnWrongChain, CanonicalHeadNotFound, CannotCalculateStake, NotEnoughTimeBetweenBlocks, ReceivableTransactionNotFound, TriedImportingGenesisBlock, JournalDbNotActivated, ReplacingBlocksNotAllowed, UnprocessedBlockNotAllowed, AppendHistoricalRootHashTooOld, HistoricalNetworkTPCMissing, HistoricalMinGasPriceError, UnprocessedBlockChildIsProcessed, ParentNotFound, NoChronologicalBlocks, RewardProofSenderBlockMissing, InvalidHeadRootTimestamp, RewardAmountRoundsToZero, TriedDeletingGenesisBlock, NoGenesisBlockPresent\n'), ((23140, 23163), 'hvm.exceptions.JournalDbNotActivated', 'JournalDbNotActivated', ([], {}), '()\n', (23161, 23163), False, 'from hvm.exceptions import HeaderNotFound, TransactionNotFound, ValidationError, VMNotFound, BlockOnWrongChain, CanonicalHeadNotFound, CannotCalculateStake, NotEnoughTimeBetweenBlocks, ReceivableTransactionNotFound, TriedImportingGenesisBlock, JournalDbNotActivated, ReplacingBlocksNotAllowed, UnprocessedBlockNotAllowed, AppendHistoricalRootHashTooOld, HistoricalNetworkTPCMissing, HistoricalMinGasPriceError, UnprocessedBlockChildIsProcessed, ParentNotFound, NoChronologicalBlocks, RewardProofSenderBlockMissing, InvalidHeadRootTimestamp, RewardAmountRoundsToZero, TriedDeletingGenesisBlock, NoGenesisBlockPresent\n'), ((23314, 23337), 'hvm.exceptions.JournalDbNotActivated', 'JournalDbNotActivated', ([], {}), '()\n', (23335, 23337), False, 'from hvm.exceptions import HeaderNotFound, TransactionNotFound, ValidationError, VMNotFound, BlockOnWrongChain, CanonicalHeadNotFound, CannotCalculateStake, NotEnoughTimeBetweenBlocks, ReceivableTransactionNotFound, TriedImportingGenesisBlock, JournalDbNotActivated, ReplacingBlocksNotAllowed, UnprocessedBlockNotAllowed, AppendHistoricalRootHashTooOld, HistoricalNetworkTPCMissing, HistoricalMinGasPriceError, UnprocessedBlockChildIsProcessed, ParentNotFound, NoChronologicalBlocks, RewardProofSenderBlockMissing, InvalidHeadRootTimestamp, RewardAmountRoundsToZero, TriedDeletingGenesisBlock, NoGenesisBlockPresent\n'), ((45106, 45175), 'hvm.exceptions.ValidationError', 'ValidationError', (['"""The provided tx hash is not for a send transaction"""'], {}), "('The provided tx hash is not for a send transaction')\n", (45121, 45175), False, 'from hvm.exceptions import HeaderNotFound, TransactionNotFound, ValidationError, VMNotFound, BlockOnWrongChain, CanonicalHeadNotFound, CannotCalculateStake, NotEnoughTimeBetweenBlocks, ReceivableTransactionNotFound, TriedImportingGenesisBlock, JournalDbNotActivated, ReplacingBlocksNotAllowed, UnprocessedBlockNotAllowed, AppendHistoricalRootHashTooOld, HistoricalNetworkTPCMissing, HistoricalMinGasPriceError, UnprocessedBlockChildIsProcessed, ParentNotFound, NoChronologicalBlocks, RewardProofSenderBlockMissing, InvalidHeadRootTimestamp, RewardAmountRoundsToZero, TriedDeletingGenesisBlock, NoGenesisBlockPresent\n'), ((47776, 47955), 'hvm.exceptions.InvalidHeadRootTimestamp', 'InvalidHeadRootTimestamp', (['"""Could not load block hashes for this historical_root_hash_timestamp because we don\'t have a root hash for this window or the previous window."""'], {}), '(\n "Could not load block hashes for this historical_root_hash_timestamp because we don\'t have a root hash for this window or the previous window."\n )\n', (47800, 47955), False, 'from hvm.exceptions import HeaderNotFound, TransactionNotFound, ValidationError, VMNotFound, BlockOnWrongChain, CanonicalHeadNotFound, CannotCalculateStake, NotEnoughTimeBetweenBlocks, ReceivableTransactionNotFound, TriedImportingGenesisBlock, JournalDbNotActivated, ReplacingBlocksNotAllowed, UnprocessedBlockNotAllowed, AppendHistoricalRootHashTooOld, HistoricalNetworkTPCMissing, HistoricalMinGasPriceError, UnprocessedBlockChildIsProcessed, ParentNotFound, NoChronologicalBlocks, RewardProofSenderBlockMissing, InvalidHeadRootTimestamp, RewardAmountRoundsToZero, TriedDeletingGenesisBlock, NoGenesisBlockPresent\n'), ((54696, 54706), 'hvm.db.backends.memory.MemoryDB', 'MemoryDB', ([], {}), '()\n', (54704, 54706), False, 'from hvm.db.backends.memory import MemoryDB\n'), ((62516, 62530), 'eth_typing.BlockNumber', 'BlockNumber', (['(0)'], {}), '(0)\n', (62527, 62530), False, 'from eth_typing import Address, BlockNumber, Hash32\n'), ((62634, 62723), 'hvm.exceptions.TriedDeletingGenesisBlock', 'TriedDeletingGenesisBlock', (['"""Attempted to delete genesis block. This is not allowed."""'], {}), "(\n 'Attempted to delete genesis block. This is not allowed.')\n", (62659, 62723), False, 'from hvm.exceptions import HeaderNotFound, TransactionNotFound, ValidationError, VMNotFound, BlockOnWrongChain, CanonicalHeadNotFound, CannotCalculateStake, NotEnoughTimeBetweenBlocks, ReceivableTransactionNotFound, TriedImportingGenesisBlock, JournalDbNotActivated, ReplacingBlocksNotAllowed, UnprocessedBlockNotAllowed, AppendHistoricalRootHashTooOld, HistoricalNetworkTPCMissing, HistoricalMinGasPriceError, UnprocessedBlockChildIsProcessed, ParentNotFound, NoChronologicalBlocks, RewardProofSenderBlockMissing, InvalidHeadRootTimestamp, RewardAmountRoundsToZero, TriedDeletingGenesisBlock, NoGenesisBlockPresent\n'), ((67517, 67555), 'hvm.utils.hexadecimal.encode_hex', 'encode_hex', (['block.header.chain_address'], {}), '(block.header.chain_address)\n', (67527, 67555), False, 'from hvm.utils.hexadecimal import encode_hex, decode_hex\n'), ((69236, 69294), 'hvm.utils.numeric.effecient_diff', 'effecient_diff', (['new_block_hash_list', 'local_block_hash_list'], {}), '(new_block_hash_list, local_block_hash_list)\n', (69250, 69294), False, 'from hvm.utils.numeric import effecient_diff, are_items_in_list_equal\n'), ((99607, 99629), 'hvm.utils.hexadecimal.encode_hex', 'encode_hex', (['block.hash'], {}), '(block.hash)\n', (99617, 99629), False, 'from hvm.utils.hexadecimal import encode_hex, decode_hex\n'), ((104850, 104903), 'hvm.exceptions.ValidationError', 'ValidationError', (['"""Block has invalid transaction root"""'], {}), "('Block has invalid transaction root')\n", (104865, 104903), False, 'from hvm.exceptions import HeaderNotFound, TransactionNotFound, ValidationError, VMNotFound, BlockOnWrongChain, CanonicalHeadNotFound, CannotCalculateStake, NotEnoughTimeBetweenBlocks, ReceivableTransactionNotFound, TriedImportingGenesisBlock, JournalDbNotActivated, ReplacingBlocksNotAllowed, UnprocessedBlockNotAllowed, AppendHistoricalRootHashTooOld, HistoricalNetworkTPCMissing, HistoricalMinGasPriceError, UnprocessedBlockChildIsProcessed, ParentNotFound, NoChronologicalBlocks, RewardProofSenderBlockMissing, InvalidHeadRootTimestamp, RewardAmountRoundsToZero, TriedDeletingGenesisBlock, NoGenesisBlockPresent\n'), ((105084, 105145), 'hvm.exceptions.ValidationError', 'ValidationError', (['"""Block has invalid receive transaction root"""'], {}), "('Block has invalid receive transaction root')\n", (105099, 105145), False, 'from hvm.exceptions import HeaderNotFound, TransactionNotFound, ValidationError, VMNotFound, BlockOnWrongChain, CanonicalHeadNotFound, CannotCalculateStake, NotEnoughTimeBetweenBlocks, ReceivableTransactionNotFound, TriedImportingGenesisBlock, JournalDbNotActivated, ReplacingBlocksNotAllowed, UnprocessedBlockNotAllowed, AppendHistoricalRootHashTooOld, HistoricalNetworkTPCMissing, HistoricalMinGasPriceError, UnprocessedBlockChildIsProcessed, ParentNotFound, NoChronologicalBlocks, RewardProofSenderBlockMissing, InvalidHeadRootTimestamp, RewardAmountRoundsToZero, TriedDeletingGenesisBlock, NoGenesisBlockPresent\n'), ((106118, 106224), 'hvm.exceptions.ValidationError', 'ValidationError', (['"""Cannot get chronological block window stake for a timestamp before the window"""'], {}), "(\n 'Cannot get chronological block window stake for a timestamp before the window'\n )\n", (106133, 106224), False, 'from hvm.exceptions import HeaderNotFound, TransactionNotFound, ValidationError, VMNotFound, BlockOnWrongChain, CanonicalHeadNotFound, CannotCalculateStake, NotEnoughTimeBetweenBlocks, ReceivableTransactionNotFound, TriedImportingGenesisBlock, JournalDbNotActivated, ReplacingBlocksNotAllowed, UnprocessedBlockNotAllowed, AppendHistoricalRootHashTooOld, HistoricalNetworkTPCMissing, HistoricalMinGasPriceError, UnprocessedBlockChildIsProcessed, ParentNotFound, NoChronologicalBlocks, RewardProofSenderBlockMissing, InvalidHeadRootTimestamp, RewardAmountRoundsToZero, TriedDeletingGenesisBlock, NoGenesisBlockPresent\n'), ((112645, 112699), 'hvm.exceptions.ValidationError', 'ValidationError', (['"""Expected a dict. Didn\'t get a dict."""'], {}), '("Expected a dict. Didn\'t get a dict.")\n', (112660, 112699), False, 'from hvm.exceptions import HeaderNotFound, TransactionNotFound, ValidationError, VMNotFound, BlockOnWrongChain, CanonicalHeadNotFound, CannotCalculateStake, NotEnoughTimeBetweenBlocks, ReceivableTransactionNotFound, TriedImportingGenesisBlock, JournalDbNotActivated, ReplacingBlocksNotAllowed, UnprocessedBlockNotAllowed, AppendHistoricalRootHashTooOld, HistoricalNetworkTPCMissing, HistoricalMinGasPriceError, UnprocessedBlockChildIsProcessed, ParentNotFound, NoChronologicalBlocks, RewardProofSenderBlockMissing, InvalidHeadRootTimestamp, RewardAmountRoundsToZero, TriedDeletingGenesisBlock, NoGenesisBlockPresent\n'), ((114086, 114152), 'hvm.chains.mainnet.TPC_CAP_TEST_GENESIS_PRIVATE_KEY.public_key.to_canonical_address', 'TPC_CAP_TEST_GENESIS_PRIVATE_KEY.public_key.to_canonical_address', ([], {}), '()\n', (114150, 114152), False, 'from hvm.chains.mainnet import MAINNET_TPC_CAP_TEST_GENESIS_PARAMS, MAINNET_TPC_CAP_TEST_GENESIS_STATE, TPC_CAP_TEST_GENESIS_PRIVATE_KEY, MAINNET_TPC_CAP_TEST_BLOCK_TO_IMPORT\n'), ((114938, 114949), 'time.time', 'time.time', ([], {}), '()\n', (114947, 114949), False, 'import time\n'), ((6151, 6162), 'time.time', 'time.time', ([], {}), '()\n', (6160, 6162), False, 'import time\n'), ((30465, 30479), 'eth_typing.BlockNumber', 'BlockNumber', (['(0)'], {}), '(0)\n', (30476, 30479), False, 'from eth_typing import Address, BlockNumber, Hash32\n'), ((56748, 56776), 'hvm.types.Timestamp', 'Timestamp', (['current_timestamp'], {}), '(current_timestamp)\n', (56757, 56776), False, 'from hvm.types import Timestamp\n'), ((58484, 58510), 'hvm.utils.hexadecimal.encode_hex', 'encode_hex', (['wallet_address'], {}), '(wallet_address)\n', (58494, 58510), False, 'from hvm.utils.hexadecimal import encode_hex, decode_hex\n'), ((59128, 59175), 'hvm.utils.hexadecimal.encode_hex', 'encode_hex', (['existing_block_header.chain_address'], {}), '(existing_block_header.chain_address)\n', (59138, 59175), False, 'from hvm.utils.hexadecimal import encode_hex, decode_hex\n'), ((59177, 59213), 'hvm.utils.hexadecimal.encode_hex', 'encode_hex', (['block_parent_header.hash'], {}), '(block_parent_header.hash)\n', (59187, 59213), False, 'from hvm.utils.hexadecimal import encode_hex, decode_hex\n'), ((59805, 59838), 'hvm.utils.hexadecimal.encode_hex', 'encode_hex', (['descendant_block_hash'], {}), '(descendant_block_hash)\n', (59815, 59838), False, 'from hvm.utils.hexadecimal import encode_hex, decode_hex\n'), ((78763, 79036), 'hvm.exceptions.RewardAmountRoundsToZero', 'RewardAmountRoundsToZero', (['"""The block has no send or receive transactions, and the reward bundle has amount = 0 for all types of rewards. This is not allowed. If this is just a reward block this usually means more time needs to pass before creating reward bundle."""'], {}), "(\n 'The block has no send or receive transactions, and the reward bundle has amount = 0 for all types of rewards. This is not allowed. If this is just a reward block this usually means more time needs to pass before creating reward bundle.'\n )\n", (78787, 79036), False, 'from hvm.exceptions import HeaderNotFound, TransactionNotFound, ValidationError, VMNotFound, BlockOnWrongChain, CanonicalHeadNotFound, CannotCalculateStake, NotEnoughTimeBetweenBlocks, ReceivableTransactionNotFound, TriedImportingGenesisBlock, JournalDbNotActivated, ReplacingBlocksNotAllowed, UnprocessedBlockNotAllowed, AppendHistoricalRootHashTooOld, HistoricalNetworkTPCMissing, HistoricalMinGasPriceError, UnprocessedBlockChildIsProcessed, ParentNotFound, NoChronologicalBlocks, RewardProofSenderBlockMissing, InvalidHeadRootTimestamp, RewardAmountRoundsToZero, TriedDeletingGenesisBlock, NoGenesisBlockPresent\n'), ((82806, 82847), 'hvm.exceptions.ParentNotFound', 'ParentNotFound', (['"""Parent is unprocessed 2"""'], {}), "('Parent is unprocessed 2')\n", (82820, 82847), False, 'from hvm.exceptions import HeaderNotFound, TransactionNotFound, ValidationError, VMNotFound, BlockOnWrongChain, CanonicalHeadNotFound, CannotCalculateStake, NotEnoughTimeBetweenBlocks, ReceivableTransactionNotFound, TriedImportingGenesisBlock, JournalDbNotActivated, ReplacingBlocksNotAllowed, UnprocessedBlockNotAllowed, AppendHistoricalRootHashTooOld, HistoricalNetworkTPCMissing, HistoricalMinGasPriceError, UnprocessedBlockChildIsProcessed, ParentNotFound, NoChronologicalBlocks, RewardProofSenderBlockMissing, InvalidHeadRootTimestamp, RewardAmountRoundsToZero, TriedDeletingGenesisBlock, NoGenesisBlockPresent\n'), ((83153, 83204), 'hvm.exceptions.ParentNotFound', 'ParentNotFound', (['"""Block is at the head of the chain"""'], {}), "('Block is at the head of the chain')\n", (83167, 83204), False, 'from hvm.exceptions import HeaderNotFound, TransactionNotFound, ValidationError, VMNotFound, BlockOnWrongChain, CanonicalHeadNotFound, CannotCalculateStake, NotEnoughTimeBetweenBlocks, ReceivableTransactionNotFound, TriedImportingGenesisBlock, JournalDbNotActivated, ReplacingBlocksNotAllowed, UnprocessedBlockNotAllowed, AppendHistoricalRootHashTooOld, HistoricalNetworkTPCMissing, HistoricalMinGasPriceError, UnprocessedBlockChildIsProcessed, ParentNotFound, NoChronologicalBlocks, RewardProofSenderBlockMissing, InvalidHeadRootTimestamp, RewardAmountRoundsToZero, TriedDeletingGenesisBlock, NoGenesisBlockPresent\n'), ((83319, 83386), 'hvm.exceptions.ReplacingBlocksNotAllowed', 'ReplacingBlocksNotAllowed', (['"""Attempted to replace a canonical block"""'], {}), "('Attempted to replace a canonical block')\n", (83344, 83386), False, 'from hvm.exceptions import HeaderNotFound, TransactionNotFound, ValidationError, VMNotFound, BlockOnWrongChain, CanonicalHeadNotFound, CannotCalculateStake, NotEnoughTimeBetweenBlocks, ReceivableTransactionNotFound, TriedImportingGenesisBlock, JournalDbNotActivated, ReplacingBlocksNotAllowed, UnprocessedBlockNotAllowed, AppendHistoricalRootHashTooOld, HistoricalNetworkTPCMissing, HistoricalMinGasPriceError, UnprocessedBlockChildIsProcessed, ParentNotFound, NoChronologicalBlocks, RewardProofSenderBlockMissing, InvalidHeadRootTimestamp, RewardAmountRoundsToZero, TriedDeletingGenesisBlock, NoGenesisBlockPresent\n'), ((93470, 93498), 'hvm.exceptions.UnprocessedBlockNotAllowed', 'UnprocessedBlockNotAllowed', ([], {}), '()\n', (93496, 93498), False, 'from hvm.exceptions import HeaderNotFound, TransactionNotFound, ValidationError, VMNotFound, BlockOnWrongChain, CanonicalHeadNotFound, CannotCalculateStake, NotEnoughTimeBetweenBlocks, ReceivableTransactionNotFound, TriedImportingGenesisBlock, JournalDbNotActivated, ReplacingBlocksNotAllowed, UnprocessedBlockNotAllowed, AppendHistoricalRootHashTooOld, HistoricalNetworkTPCMissing, HistoricalMinGasPriceError, UnprocessedBlockChildIsProcessed, ParentNotFound, NoChronologicalBlocks, RewardProofSenderBlockMissing, InvalidHeadRootTimestamp, RewardAmountRoundsToZero, TriedDeletingGenesisBlock, NoGenesisBlockPresent\n'), ((94563, 94591), 'collections.deque', 'deque', (['children_block_hashes'], {}), '(children_block_hashes)\n', (94568, 94591), False, 'from collections import deque\n'), ((102153, 102165), 'hvm.types.Timestamp', 'Timestamp', (['(0)'], {}), '(0)\n', (102162, 102165), False, 'from hvm.types import Timestamp\n'), ((106294, 106305), 'time.time', 'time.time', ([], {}), '()\n', (106303, 106305), False, 'import time\n'), ((107005, 107016), 'time.time', 'time.time', ([], {}), '()\n', (107014, 107016), False, 'import time\n'), ((31018, 31032), 'eth_typing.BlockNumber', 'BlockNumber', (['n'], {}), '(n)\n', (31029, 31032), False, 'from eth_typing import Address, BlockNumber, Hash32\n'), ((32878, 32903), 'eth_typing.BlockNumber', 'BlockNumber', (['block_number'], {}), '(block_number)\n', (32889, 32903), False, 'from eth_typing import Address, BlockNumber, Hash32\n'), ((39801, 39829), 'hvm.utils.hexadecimal.encode_hex', 'encode_hex', (['transaction.hash'], {}), '(transaction.hash)\n', (39811, 39829), False, 'from hvm.utils.hexadecimal import encode_hex, decode_hex\n'), ((39847, 39875), 'hvm.utils.hexadecimal.encode_hex', 'encode_hex', (['transaction_hash'], {}), '(transaction_hash)\n', (39857, 39875), False, 'from hvm.utils.hexadecimal import encode_hex, decode_hex\n'), ((76686, 76712), 'hvm.utils.hexadecimal.encode_hex', 'encode_hex', (['wallet_address'], {}), '(wallet_address)\n', (76696, 76712), False, 'from hvm.utils.hexadecimal import encode_hex, decode_hex\n'), ((78262, 78369), 'hvm.exceptions.ValidationError', 'ValidationError', (['"""Tried to import a new genesis block on the genesis chain. This is not allowed."""'], {}), "(\n 'Tried to import a new genesis block on the genesis chain. This is not allowed.'\n )\n", (78277, 78369), False, 'from hvm.exceptions import HeaderNotFound, TransactionNotFound, ValidationError, VMNotFound, BlockOnWrongChain, CanonicalHeadNotFound, CannotCalculateStake, NotEnoughTimeBetweenBlocks, ReceivableTransactionNotFound, TriedImportingGenesisBlock, JournalDbNotActivated, ReplacingBlocksNotAllowed, UnprocessedBlockNotAllowed, AppendHistoricalRootHashTooOld, HistoricalNetworkTPCMissing, HistoricalMinGasPriceError, UnprocessedBlockChildIsProcessed, ParentNotFound, NoChronologicalBlocks, RewardProofSenderBlockMissing, InvalidHeadRootTimestamp, RewardAmountRoundsToZero, TriedDeletingGenesisBlock, NoGenesisBlockPresent\n'), ((79575, 79646), 'hvm.exceptions.ReplacingBlocksNotAllowed', 'ReplacingBlocksNotAllowed', (['"""Attempted to replace an unprocessed block."""'], {}), "('Attempted to replace an unprocessed block.')\n", (79600, 79646), False, 'from hvm.exceptions import HeaderNotFound, TransactionNotFound, ValidationError, VMNotFound, BlockOnWrongChain, CanonicalHeadNotFound, CannotCalculateStake, NotEnoughTimeBetweenBlocks, ReceivableTransactionNotFound, TriedImportingGenesisBlock, JournalDbNotActivated, ReplacingBlocksNotAllowed, UnprocessedBlockNotAllowed, AppendHistoricalRootHashTooOld, HistoricalNetworkTPCMissing, HistoricalMinGasPriceError, UnprocessedBlockChildIsProcessed, ParentNotFound, NoChronologicalBlocks, RewardProofSenderBlockMissing, InvalidHeadRootTimestamp, RewardAmountRoundsToZero, TriedDeletingGenesisBlock, NoGenesisBlockPresent\n'), ((83678, 83709), 'hvm.utils.hexadecimal.encode_hex', 'encode_hex', (['self.wallet_address'], {}), '(self.wallet_address)\n', (83688, 83709), False, 'from hvm.utils.hexadecimal import encode_hex, decode_hex\n'), ((83711, 83735), 'hvm.utils.hexadecimal.encode_hex', 'encode_hex', (['block.sender'], {}), '(block.sender)\n', (83721, 83735), False, 'from hvm.utils.hexadecimal import encode_hex, decode_hex\n'), ((92286, 92317), 'hvm.utils.hexadecimal.encode_hex', 'encode_hex', (['imported_block.hash'], {}), '(imported_block.hash)\n', (92296, 92317), False, 'from hvm.utils.hexadecimal import encode_hex, decode_hex\n'), ((92515, 92583), 'hvm.exceptions.ValidationError', 'ValidationError', (['"""Attempted to import a block onto the wrong chain."""'], {}), "('Attempted to import a block onto the wrong chain.')\n", (92530, 92583), False, 'from hvm.exceptions import HeaderNotFound, TransactionNotFound, ValidationError, VMNotFound, BlockOnWrongChain, CanonicalHeadNotFound, CannotCalculateStake, NotEnoughTimeBetweenBlocks, ReceivableTransactionNotFound, TriedImportingGenesisBlock, JournalDbNotActivated, ReplacingBlocksNotAllowed, UnprocessedBlockNotAllowed, AppendHistoricalRootHashTooOld, HistoricalNetworkTPCMissing, HistoricalMinGasPriceError, UnprocessedBlockChildIsProcessed, ParentNotFound, NoChronologicalBlocks, RewardProofSenderBlockMissing, InvalidHeadRootTimestamp, RewardAmountRoundsToZero, TriedDeletingGenesisBlock, NoGenesisBlockPresent\n'), ((97830, 97875), 'hvm.utils.hexadecimal.encode_hex', 'encode_hex', (['sender_chain_header.chain_address'], {}), '(sender_chain_header.chain_address)\n', (97840, 97875), False, 'from hvm.utils.hexadecimal import encode_hex, decode_hex\n'), ((103495, 103518), 'hvm.utils.hexadecimal.encode_hex', 'encode_hex', (['header.hash'], {}), '(header.hash)\n', (103505, 103518), False, 'from hvm.utils.hexadecimal import encode_hex, decode_hex\n'), ((107131, 107142), 'time.time', 'time.time', ([], {}), '()\n', (107140, 107142), False, 'import time\n'), ((109972, 109983), 'time.time', 'time.time', ([], {}), '()\n', (109981, 109983), False, 'import time\n'), ((110077, 110088), 'time.time', 'time.time', ([], {}), '()\n', (110086, 110088), False, 'import time\n'), ((20953, 20964), 'time.time', 'time.time', ([], {}), '()\n', (20962, 20964), False, 'import time\n'), ((35970, 35981), 'time.time', 'time.time', ([], {}), '()\n', (35979, 35981), False, 'import time\n'), ((77999, 78131), 'hvm.exceptions.NoGenesisBlockPresent', 'NoGenesisBlockPresent', (['"""Tried importing a block, but we have no genesis block loaded. Need to load a genesis block first."""'], {}), "(\n 'Tried importing a block, but we have no genesis block loaded. Need to load a genesis block first.'\n )\n", (78020, 78131), False, 'from hvm.exceptions import HeaderNotFound, TransactionNotFound, ValidationError, VMNotFound, BlockOnWrongChain, CanonicalHeadNotFound, CannotCalculateStake, NotEnoughTimeBetweenBlocks, ReceivableTransactionNotFound, TriedImportingGenesisBlock, JournalDbNotActivated, ReplacingBlocksNotAllowed, UnprocessedBlockNotAllowed, AppendHistoricalRootHashTooOld, HistoricalNetworkTPCMissing, HistoricalMinGasPriceError, UnprocessedBlockChildIsProcessed, ParentNotFound, NoChronologicalBlocks, RewardProofSenderBlockMissing, InvalidHeadRootTimestamp, RewardAmountRoundsToZero, TriedDeletingGenesisBlock, NoGenesisBlockPresent\n'), ((82272, 82300), 'hvm.exceptions.UnprocessedBlockNotAllowed', 'UnprocessedBlockNotAllowed', ([], {}), '()\n', (82298, 82300), False, 'from hvm.exceptions import HeaderNotFound, TransactionNotFound, ValidationError, VMNotFound, BlockOnWrongChain, CanonicalHeadNotFound, CannotCalculateStake, NotEnoughTimeBetweenBlocks, ReceivableTransactionNotFound, TriedImportingGenesisBlock, JournalDbNotActivated, ReplacingBlocksNotAllowed, UnprocessedBlockNotAllowed, AppendHistoricalRootHashTooOld, HistoricalNetworkTPCMissing, HistoricalMinGasPriceError, UnprocessedBlockChildIsProcessed, ParentNotFound, NoChronologicalBlocks, RewardProofSenderBlockMissing, InvalidHeadRootTimestamp, RewardAmountRoundsToZero, TriedDeletingGenesisBlock, NoGenesisBlockPresent\n'), ((90165, 90235), 'hvm.utils.rlp.ensure_imported_block_unchanged', 'ensure_imported_block_unchanged', (['imported_block', 'corrected_micro_block'], {}), '(imported_block, corrected_micro_block)\n', (90196, 90235), False, 'from hvm.utils.rlp import ensure_imported_block_unchanged\n'), ((90356, 90410), 'hvm.utils.rlp.ensure_imported_block_unchanged', 'ensure_imported_block_unchanged', (['imported_block', 'block'], {}), '(imported_block, block)\n', (90387, 90410), False, 'from hvm.utils.rlp import ensure_imported_block_unchanged\n'), ((92756, 92784), 'hvm.exceptions.UnprocessedBlockNotAllowed', 'UnprocessedBlockNotAllowed', ([], {}), '()\n', (92782, 92784), False, 'from hvm.exceptions import HeaderNotFound, TransactionNotFound, ValidationError, VMNotFound, BlockOnWrongChain, CanonicalHeadNotFound, CannotCalculateStake, NotEnoughTimeBetweenBlocks, ReceivableTransactionNotFound, TriedImportingGenesisBlock, JournalDbNotActivated, ReplacingBlocksNotAllowed, UnprocessedBlockNotAllowed, AppendHistoricalRootHashTooOld, HistoricalNetworkTPCMissing, HistoricalMinGasPriceError, UnprocessedBlockChildIsProcessed, ParentNotFound, NoChronologicalBlocks, RewardProofSenderBlockMissing, InvalidHeadRootTimestamp, RewardAmountRoundsToZero, TriedDeletingGenesisBlock, NoGenesisBlockPresent\n'), ((93171, 93199), 'hvm.exceptions.UnprocessedBlockNotAllowed', 'UnprocessedBlockNotAllowed', ([], {}), '()\n', (93197, 93199), False, 'from hvm.exceptions import HeaderNotFound, TransactionNotFound, ValidationError, VMNotFound, BlockOnWrongChain, CanonicalHeadNotFound, CannotCalculateStake, NotEnoughTimeBetweenBlocks, ReceivableTransactionNotFound, TriedImportingGenesisBlock, JournalDbNotActivated, ReplacingBlocksNotAllowed, UnprocessedBlockNotAllowed, AppendHistoricalRootHashTooOld, HistoricalNetworkTPCMissing, HistoricalMinGasPriceError, UnprocessedBlockChildIsProcessed, ParentNotFound, NoChronologicalBlocks, RewardProofSenderBlockMissing, InvalidHeadRootTimestamp, RewardAmountRoundsToZero, TriedDeletingGenesisBlock, NoGenesisBlockPresent\n'), ((100039, 100050), 'time.time', 'time.time', ([], {}), '()\n', (100048, 100050), False, 'import time\n'), ((102215, 102226), 'time.time', 'time.time', ([], {}), '()\n', (102224, 102226), False, 'import time\n'), ((109395, 109406), 'time.time', 'time.time', ([], {}), '()\n', (109404, 109406), False, 'import time\n'), ((67173, 67330), 'hvm.exceptions.UnprocessedBlockChildIsProcessed', 'UnprocessedBlockChildIsProcessed', (['"""In process of deleting children of unprocessed block, and found one that is processed. This should never happen"""'], {}), "(\n 'In process of deleting children of unprocessed block, and found one that is processed. This should never happen'\n )\n", (67205, 67330), False, 'from hvm.exceptions import HeaderNotFound, TransactionNotFound, ValidationError, VMNotFound, BlockOnWrongChain, CanonicalHeadNotFound, CannotCalculateStake, NotEnoughTimeBetweenBlocks, ReceivableTransactionNotFound, TriedImportingGenesisBlock, JournalDbNotActivated, ReplacingBlocksNotAllowed, UnprocessedBlockNotAllowed, AppendHistoricalRootHashTooOld, HistoricalNetworkTPCMissing, HistoricalMinGasPriceError, UnprocessedBlockChildIsProcessed, ParentNotFound, NoChronologicalBlocks, RewardProofSenderBlockMissing, InvalidHeadRootTimestamp, RewardAmountRoundsToZero, TriedDeletingGenesisBlock, NoGenesisBlockPresent\n'), ((71211, 71320), 'hvm.utils.blocks.reorganize_chronological_block_list_for_correct_chronological_order_at_index', 'reorganize_chronological_block_list_for_correct_chronological_order_at_index', (['block_list', 'i', 'self.logger'], {}), '(\n block_list, i, self.logger)\n', (71287, 71320), False, 'from hvm.utils.blocks import reorganize_chronological_block_list_for_correct_chronological_order_at_index\n'), ((75301, 75335), 'eth_typing.BlockNumber', 'BlockNumber', (['block_number_to_purge'], {}), '(block_number_to_purge)\n', (75312, 75335), False, 'from eth_typing import Address, BlockNumber, Hash32\n'), ((77884, 77898), 'eth_typing.BlockNumber', 'BlockNumber', (['(0)'], {}), '(0)\n', (77895, 77898), False, 'from eth_typing import Address, BlockNumber, Hash32\n'), ((85009, 85022), 'hvm.utils.hexadecimal.encode_hex', 'encode_hex', (['x'], {}), '(x)\n', (85019, 85022), False, 'from hvm.utils.hexadecimal import encode_hex, decode_hex\n'), ((85527, 85549), 'hvm.utils.hexadecimal.encode_hex', 'encode_hex', (['block_hash'], {}), '(block_hash)\n', (85537, 85549), False, 'from hvm.utils.hexadecimal import encode_hex, decode_hex\n'), ((77636, 77647), 'time.time', 'time.time', ([], {}), '()\n', (77645, 77647), False, 'import time\n'), ((105481, 105492), 'time.time', 'time.time', ([], {}), '()\n', (105490, 105492), False, 'import time\n'), ((115656, 115667), 'time.time', 'time.time', ([], {}), '()\n', (115665, 115667), False, 'import time\n'), ((116473, 116484), 'time.time', 'time.time', ([], {}), '()\n', (116482, 116484), False, 'import time\n'), ((117536, 117547), 'time.time', 'time.time', ([], {}), '()\n', (117545, 117547), False, 'import time\n'), ((117829, 117840), 'time.time', 'time.time', ([], {}), '()\n', (117838, 117840), False, 'import time\n'), ((80410, 80454), 'hvm.utils.hexadecimal.encode_hex', 'encode_hex', (['existing_unprocessed_parent_hash'], {}), '(existing_unprocessed_parent_hash)\n', (80420, 80454), False, 'from hvm.utils.hexadecimal import encode_hex, decode_hex\n'), ((80456, 80492), 'hvm.utils.hexadecimal.encode_hex', 'encode_hex', (['block.header.parent_hash'], {}), '(block.header.parent_hash)\n', (80466, 80492), False, 'from hvm.utils.hexadecimal import encode_hex, decode_hex\n'), ((80963, 81005), 'hvm.utils.hexadecimal.encode_hex', 'encode_hex', (['existing_canonical_parent_hash'], {}), '(existing_canonical_parent_hash)\n', (80973, 81005), False, 'from hvm.utils.hexadecimal import encode_hex, decode_hex\n'), ((81007, 81043), 'hvm.utils.hexadecimal.encode_hex', 'encode_hex', (['block.header.parent_hash'], {}), '(block.header.parent_hash)\n', (81017, 81043), False, 'from hvm.utils.hexadecimal import encode_hex, decode_hex\n')] |
Integreat/cms-django | integreat_cms/api/v3/regions.py | ab0a89576ae901f4b30aa8e9c65ff43c44654a80 | """
This module includes functions related to the regions API endpoint.
"""
from django.http import JsonResponse
from ...cms.models import Region
from ...cms.constants import region_status
from ..decorators import json_response
def transform_region(region):
"""
Function to create a JSON from a single region object, including information if region is live/active.
:param region: The region object which should be converted
:type region: ~integreat_cms.cms.models.regions.region.Region
:return: data necessary for API
:rtype: dict
"""
return {
"id": region.id,
"name": region.full_name,
"path": region.slug,
"live": region.status == region_status.ACTIVE,
"prefix": region.prefix,
"name_without_prefix": region.name,
"plz": region.postal_code,
"extras": region.offers.exists(),
"events": region.events_enabled,
"pois": region.locations_enabled,
"push_notifications": region.push_notifications_enabled,
"longitude": region.longitude,
"latitude": region.latitude,
"bounding_box": region.bounding_box.api_representation,
"aliases": region.aliases,
"tunews": region.tunews_enabled,
}
def transform_region_by_status(region):
"""
Function to create a JSON from a single "active" region object.
:param region: The region object which should be converted
:type region: ~integreat_cms.cms.models.regions.region.Region
:return: data necessary for API
:rtype: dict
"""
result = transform_region(region)
# Remove status
del result["live"]
return result
@json_response
def regions(_):
"""
List all regions that are not archived and transform result into JSON
:return: JSON object according to APIv3 regions endpoint definition
:rtype: ~django.http.JsonResponse
"""
result = list(
map(transform_region, Region.objects.exclude(status=region_status.ARCHIVED))
)
return JsonResponse(
result, safe=False
) # Turn off Safe-Mode to allow serializing arrays
@json_response
def liveregions(_):
"""
List all regions that are not archived and transform result into JSON
:return: JSON object according to APIv3 live regions endpoint definition
:rtype: ~django.http.JsonResponse
"""
result = list(
map(
transform_region_by_status,
Region.objects.filter(status=region_status.ACTIVE),
)
)
return JsonResponse(
result, safe=False
) # Turn off Safe-Mode to allow serializing arrays
@json_response
def hiddenregions(_):
"""
List all regions that are hidden and transform result into JSON
:return: JSON object according to APIv3 hidden regions endpoint definition
:rtype: ~django.http.JsonResponse
"""
result = list(
map(
transform_region_by_status,
Region.objects.filter(status=region_status.HIDDEN),
)
)
return JsonResponse(
result, safe=False
) # Turn off Safe-Mode to allow serializing arrays
| [((2012, 2044), 'django.http.JsonResponse', 'JsonResponse', (['result'], {'safe': '(False)'}), '(result, safe=False)\n', (2024, 2044), False, 'from django.http import JsonResponse\n'), ((2515, 2547), 'django.http.JsonResponse', 'JsonResponse', (['result'], {'safe': '(False)'}), '(result, safe=False)\n', (2527, 2547), False, 'from django.http import JsonResponse\n'), ((3016, 3048), 'django.http.JsonResponse', 'JsonResponse', (['result'], {'safe': '(False)'}), '(result, safe=False)\n', (3028, 3048), False, 'from django.http import JsonResponse\n')] |
romsok24/epiphany | cli/src/ansible/AnsibleVarsGenerator.py | f058984939561fc8d51288765976118ae12e6c32 | import copy
import os
from cli.src.Config import Config
from cli.src.helpers.build_io import (get_ansible_path,
get_ansible_path_for_build,
get_ansible_vault_path)
from cli.src.helpers.data_loader import (load_all_schema_objs_from_directory,
load_schema_obj, types)
from cli.src.helpers.doc_list_helpers import (ExpectedSingleResultException,
select_first, select_single)
from cli.src.helpers.naming_helpers import to_feature_name, to_role_name
from cli.src.helpers.ObjDict import ObjDict
from cli.src.helpers.yaml_helpers import dump
from cli.src.schema.DefaultMerger import DefaultMerger
from cli.src.Step import Step
from cli.version import VERSION
class AnsibleVarsGenerator(Step):
def __init__(self, inventory_creator=None, inventory_upgrade=None):
super().__init__(__name__)
self.inventory_creator = inventory_creator
self.inventory_upgrade = inventory_upgrade
self.roles_with_generated_vars = []
self.manifest_docs = []
if inventory_creator is not None and inventory_upgrade is None:
self.cluster_model = inventory_creator.cluster_model
self.config_docs = [self.cluster_model] + inventory_creator.config_docs
elif inventory_upgrade is not None and inventory_creator is None:
self.cluster_model = inventory_upgrade.cluster_model
self.config_docs = []
defaults = load_all_schema_objs_from_directory(types.DEFAULT, 'common', 'configuration')
for default in defaults:
config_doc = select_first(inventory_upgrade.config_docs, lambda x: x.kind == default.kind)
if config_doc is None:
self.config_docs.append(default)
else:
self.config_docs.append(config_doc)
self.manifest_docs = inventory_upgrade.manifest_docs
else:
raise Exception('Invalid AnsibleVarsGenerator configuration')
def __enter__(self):
super().__enter__()
return self
def __exit__(self, exc_type, exc_value, traceback):
pass
def generate(self):
self.logger.info('Generate Ansible vars')
self.is_upgrade_run = self.inventory_creator is None
if self.is_upgrade_run:
ansible_dir = get_ansible_path_for_build(self.inventory_upgrade.build_dir)
else:
ansible_dir = get_ansible_path(self.cluster_model.specification.name)
cluster_config_file_path = os.path.join(ansible_dir, 'roles', 'common', 'vars', 'main.yml')
clean_cluster_model = self.get_clean_cluster_model()
with open(cluster_config_file_path, 'w') as stream:
if 'name' in clean_cluster_model:
del clean_cluster_model['name'] # reserved word in ansible!
dump(clean_cluster_model, stream)
if self.is_upgrade_run:
# For upgrade we always need common, repository, image_registry, node_exporter and postgresql. Common is
# already provisioned from the cluster model constructed from the inventory. As PostgreSQL configuration
# is changed between versions (e.g. wal_keep_segments -> wal_keep_size) and sometimes previous parameters
# are not compatible with the new ones, defaults are used for template processing
roles_with_defaults = [
'haproxy', 'image_registry', 'jmx_exporter', 'kafka_exporter',
'node_exporter', 'postgres_exporter', 'postgresql', 'repository'
]
# now lets add any external configs we want to load
roles_with_defaults = [*roles_with_defaults, *self.inventory_upgrade.get_new_config_roles()]
# In special cases (like haproxy), where user specifies majority of the config, it's easier (and less
# awkward) to re-render config templates instead of modifying (for example with regular expressions)
# no-longer-compatible config files.
roles_with_manifest = ['filebeat', 'postgresql', 'repository']
else:
roles_with_defaults = self.inventory_creator.get_enabled_roles()
roles_with_manifest = [] # applies only to upgrades
for role in roles_with_defaults:
kind = 'configuration/' + to_feature_name(role)
document = select_first(self.config_docs, lambda x: x.kind == kind)
if document is None:
self.logger.warn('No config document for enabled role: ' + role)
continue
document.specification['provider'] = self.cluster_model.provider
self.write_role_vars(ansible_dir, role, document)
for role in roles_with_manifest:
kind = 'configuration/' + to_feature_name(role)
self.write_role_manifest_vars(ansible_dir, role, kind)
self.populate_group_vars(ansible_dir)
def write_role_vars(self, ansible_dir, role, document, vars_file_name='main.yml'):
vars_dir = os.path.join(ansible_dir, 'roles', to_role_name(role), 'vars')
if not os.path.exists(vars_dir):
os.makedirs(vars_dir)
vars_file_path = os.path.join(vars_dir, vars_file_name)
with open(vars_file_path, 'w') as stream:
if 'name' in document:
del document['name'] # reserved word in ansible!
dump(document, stream)
if vars_file_name == 'main.yml':
self.roles_with_generated_vars.append(to_role_name(role))
def write_role_manifest_vars(self, ansible_dir, role, kind):
try:
cluster_model = select_single(self.manifest_docs, lambda x: x.kind == 'epiphany-cluster')
except ExpectedSingleResultException:
return # skip
document = select_first(self.manifest_docs, lambda x: x.kind == kind)
if document is None:
# If there is no document provided by the user, then fallback to defaults
document = load_schema_obj(types.DEFAULT, 'common', kind)
# Inject the required "version" attribute
document['version'] = VERSION
# Copy the "provider" value from the cluster model
document['provider'] = cluster_model['provider']
# Merge the document with defaults
with DefaultMerger([document]) as doc_merger:
document = doc_merger.run()[0]
self.write_role_vars(ansible_dir, role, document, vars_file_name='manifest.yml')
def populate_group_vars(self, ansible_dir):
main_vars = ObjDict()
main_vars['admin_user'] = self.cluster_model.specification.admin_user
main_vars['validate_certs'] = Config().validate_certs
main_vars['offline_requirements'] = Config().offline_requirements
main_vars['wait_for_pods'] = Config().wait_for_pods
main_vars['is_upgrade_run'] = self.is_upgrade_run
main_vars['roles_with_generated_vars'] = sorted(self.roles_with_generated_vars)
main_vars['upgrade_components'] = Config().upgrade_components
main_vars['epiphany_version'] = VERSION
# Consider to move this to the provider level.
if self.cluster_model.provider != 'any':
main_vars['k8s_as_cloud_service'] = self.cluster_model.specification.cloud.k8s_as_cloud_service
else:
main_vars['k8s_as_cloud_service'] = False
if self.is_upgrade_run:
shared_config_doc = self.get_shared_config_from_manifest()
else:
shared_config_doc = select_first(self.config_docs, lambda x: x.kind == 'configuration/shared-config')
# Fallback if there is completely no trace of the shared-config doc
if shared_config_doc is None:
shared_config_doc = load_schema_obj(types.DEFAULT, 'common', 'configuration/shared-config')
self.set_vault_path(shared_config_doc)
main_vars.update(shared_config_doc.specification)
vars_dir = os.path.join(ansible_dir, 'group_vars')
if not os.path.exists(vars_dir):
os.makedirs(vars_dir)
vars_file_name = 'all.yml'
vars_file_path = os.path.join(vars_dir, vars_file_name)
with open(vars_file_path, 'a') as stream:
dump(main_vars, stream)
def set_vault_path(self, shared_config):
if shared_config.specification.vault_location == '':
shared_config.specification.vault_tmp_file_location = Config().vault_password_location
cluster_name = self.get_cluster_name()
shared_config.specification.vault_location = get_ansible_vault_path(cluster_name)
def get_cluster_name(self):
if 'name' in self.cluster_model.specification.keys():
return self.cluster_model.specification.name
elif self.inventory_upgrade is not None:
return os.path.basename(self.inventory_upgrade.build_dir)
return 'default'
def get_clean_cluster_model(self):
cluster_model = copy.copy(self.cluster_model)
self.clear_object(cluster_model, 'credentials')
return cluster_model
def get_shared_config_from_manifest(self):
# Reuse shared config from existing manifest
# Shared config contains the use_ha_control_plane flag which is required during upgrades
cluster_model = select_single(self.manifest_docs, lambda x: x.kind == 'epiphany-cluster')
try:
shared_config_doc = select_single(self.manifest_docs, lambda x: x.kind == 'configuration/shared-config')
shared_config_doc['provider'] = cluster_model['provider']
except ExpectedSingleResultException:
# If there is no shared-config doc inside the manifest file, this is probably a v0.3 cluster
# Returning None here (there is nothing to merge at this point) and
# hoping that the shared-config doc from defaults will be enough
return None
# Remove un-used supported_os list if present from shared/config from manifest so we avoid namedlist merging errors.
# This has been refactored in from Epicli 1.0.x and no longer needed at this stage.
if hasattr(shared_config_doc.specification, 'supported_os'):
del shared_config_doc.specification['supported_os']
# Merge the shared config doc with defaults
with DefaultMerger([shared_config_doc]) as doc_merger:
shared_config_doc = doc_merger.run()[0]
del shared_config_doc['provider']
return shared_config_doc
def clear_object(self, obj_to_clean, key_to_clean):
for key, val in obj_to_clean.items():
if key == key_to_clean:
obj_to_clean[key] = ''
continue
if isinstance(obj_to_clean[key], ObjDict):
self.clear_object(obj_to_clean[key], key_to_clean)
| [((2633, 2697), 'os.path.join', 'os.path.join', (['ansible_dir', '"""roles"""', '"""common"""', '"""vars"""', '"""main.yml"""'], {}), "(ansible_dir, 'roles', 'common', 'vars', 'main.yml')\n", (2645, 2697), False, 'import os\n'), ((5307, 5345), 'os.path.join', 'os.path.join', (['vars_dir', 'vars_file_name'], {}), '(vars_dir, vars_file_name)\n', (5319, 5345), False, 'import os\n'), ((5919, 5977), 'cli.src.helpers.doc_list_helpers.select_first', 'select_first', (['self.manifest_docs', '(lambda x: x.kind == kind)'], {}), '(self.manifest_docs, lambda x: x.kind == kind)\n', (5931, 5977), False, 'from cli.src.helpers.doc_list_helpers import ExpectedSingleResultException, select_first, select_single\n'), ((6676, 6685), 'cli.src.helpers.ObjDict.ObjDict', 'ObjDict', ([], {}), '()\n', (6683, 6685), False, 'from cli.src.helpers.ObjDict import ObjDict\n'), ((8082, 8121), 'os.path.join', 'os.path.join', (['ansible_dir', '"""group_vars"""'], {}), "(ansible_dir, 'group_vars')\n", (8094, 8121), False, 'import os\n'), ((8258, 8296), 'os.path.join', 'os.path.join', (['vars_dir', 'vars_file_name'], {}), '(vars_dir, vars_file_name)\n', (8270, 8296), False, 'import os\n'), ((9095, 9124), 'copy.copy', 'copy.copy', (['self.cluster_model'], {}), '(self.cluster_model)\n', (9104, 9124), False, 'import copy\n'), ((9433, 9506), 'cli.src.helpers.doc_list_helpers.select_single', 'select_single', (['self.manifest_docs', "(lambda x: x.kind == 'epiphany-cluster')"], {}), "(self.manifest_docs, lambda x: x.kind == 'epiphany-cluster')\n", (9446, 9506), False, 'from cli.src.helpers.doc_list_helpers import ExpectedSingleResultException, select_first, select_single\n'), ((2440, 2500), 'cli.src.helpers.build_io.get_ansible_path_for_build', 'get_ansible_path_for_build', (['self.inventory_upgrade.build_dir'], {}), '(self.inventory_upgrade.build_dir)\n', (2466, 2500), False, 'from cli.src.helpers.build_io import get_ansible_path, get_ansible_path_for_build, get_ansible_vault_path\n'), ((2541, 2596), 'cli.src.helpers.build_io.get_ansible_path', 'get_ansible_path', (['self.cluster_model.specification.name'], {}), '(self.cluster_model.specification.name)\n', (2557, 2596), False, 'from cli.src.helpers.build_io import get_ansible_path, get_ansible_path_for_build, get_ansible_vault_path\n'), ((2954, 2987), 'cli.src.helpers.yaml_helpers.dump', 'dump', (['clean_cluster_model', 'stream'], {}), '(clean_cluster_model, stream)\n', (2958, 2987), False, 'from cli.src.helpers.yaml_helpers import dump\n'), ((4483, 4539), 'cli.src.helpers.doc_list_helpers.select_first', 'select_first', (['self.config_docs', '(lambda x: x.kind == kind)'], {}), '(self.config_docs, lambda x: x.kind == kind)\n', (4495, 4539), False, 'from cli.src.helpers.doc_list_helpers import ExpectedSingleResultException, select_first, select_single\n'), ((5178, 5196), 'cli.src.helpers.naming_helpers.to_role_name', 'to_role_name', (['role'], {}), '(role)\n', (5190, 5196), False, 'from cli.src.helpers.naming_helpers import to_feature_name, to_role_name\n'), ((5221, 5245), 'os.path.exists', 'os.path.exists', (['vars_dir'], {}), '(vars_dir)\n', (5235, 5245), False, 'import os\n'), ((5259, 5280), 'os.makedirs', 'os.makedirs', (['vars_dir'], {}), '(vars_dir)\n', (5270, 5280), False, 'import os\n'), ((5510, 5532), 'cli.src.helpers.yaml_helpers.dump', 'dump', (['document', 'stream'], {}), '(document, stream)\n', (5514, 5532), False, 'from cli.src.helpers.yaml_helpers import dump\n'), ((5752, 5825), 'cli.src.helpers.doc_list_helpers.select_single', 'select_single', (['self.manifest_docs', "(lambda x: x.kind == 'epiphany-cluster')"], {}), "(self.manifest_docs, lambda x: x.kind == 'epiphany-cluster')\n", (5765, 5825), False, 'from cli.src.helpers.doc_list_helpers import ExpectedSingleResultException, select_first, select_single\n'), ((6116, 6162), 'cli.src.helpers.data_loader.load_schema_obj', 'load_schema_obj', (['types.DEFAULT', '"""common"""', 'kind'], {}), "(types.DEFAULT, 'common', kind)\n", (6131, 6162), False, 'from cli.src.helpers.data_loader import load_all_schema_objs_from_directory, load_schema_obj, types\n'), ((6433, 6458), 'cli.src.schema.DefaultMerger.DefaultMerger', 'DefaultMerger', (['[document]'], {}), '([document])\n', (6446, 6458), False, 'from cli.src.schema.DefaultMerger import DefaultMerger\n'), ((6802, 6810), 'cli.src.Config.Config', 'Config', ([], {}), '()\n', (6808, 6810), False, 'from cli.src.Config import Config\n'), ((6870, 6878), 'cli.src.Config.Config', 'Config', ([], {}), '()\n', (6876, 6878), False, 'from cli.src.Config import Config\n'), ((6937, 6945), 'cli.src.Config.Config', 'Config', ([], {}), '()\n', (6943, 6945), False, 'from cli.src.Config import Config\n'), ((7148, 7156), 'cli.src.Config.Config', 'Config', ([], {}), '()\n', (7154, 7156), False, 'from cli.src.Config import Config\n'), ((7655, 7740), 'cli.src.helpers.doc_list_helpers.select_first', 'select_first', (['self.config_docs', "(lambda x: x.kind == 'configuration/shared-config')"], {}), "(self.config_docs, lambda x: x.kind ==\n 'configuration/shared-config')\n", (7667, 7740), False, 'from cli.src.helpers.doc_list_helpers import ExpectedSingleResultException, select_first, select_single\n'), ((7884, 7955), 'cli.src.helpers.data_loader.load_schema_obj', 'load_schema_obj', (['types.DEFAULT', '"""common"""', '"""configuration/shared-config"""'], {}), "(types.DEFAULT, 'common', 'configuration/shared-config')\n", (7899, 7955), False, 'from cli.src.helpers.data_loader import load_all_schema_objs_from_directory, load_schema_obj, types\n'), ((8137, 8161), 'os.path.exists', 'os.path.exists', (['vars_dir'], {}), '(vars_dir)\n', (8151, 8161), False, 'import os\n'), ((8175, 8196), 'os.makedirs', 'os.makedirs', (['vars_dir'], {}), '(vars_dir)\n', (8186, 8196), False, 'import os\n'), ((8360, 8383), 'cli.src.helpers.yaml_helpers.dump', 'dump', (['main_vars', 'stream'], {}), '(main_vars, stream)\n', (8364, 8383), False, 'from cli.src.helpers.yaml_helpers import dump\n'), ((8698, 8734), 'cli.src.helpers.build_io.get_ansible_vault_path', 'get_ansible_vault_path', (['cluster_name'], {}), '(cluster_name)\n', (8720, 8734), False, 'from cli.src.helpers.build_io import get_ansible_path, get_ansible_path_for_build, get_ansible_vault_path\n'), ((9553, 9641), 'cli.src.helpers.doc_list_helpers.select_single', 'select_single', (['self.manifest_docs', "(lambda x: x.kind == 'configuration/shared-config')"], {}), "(self.manifest_docs, lambda x: x.kind ==\n 'configuration/shared-config')\n", (9566, 9641), False, 'from cli.src.helpers.doc_list_helpers import ExpectedSingleResultException, select_first, select_single\n'), ((10457, 10491), 'cli.src.schema.DefaultMerger.DefaultMerger', 'DefaultMerger', (['[shared_config_doc]'], {}), '([shared_config_doc])\n', (10470, 10491), False, 'from cli.src.schema.DefaultMerger import DefaultMerger\n'), ((1557, 1634), 'cli.src.helpers.data_loader.load_all_schema_objs_from_directory', 'load_all_schema_objs_from_directory', (['types.DEFAULT', '"""common"""', '"""configuration"""'], {}), "(types.DEFAULT, 'common', 'configuration')\n", (1592, 1634), False, 'from cli.src.helpers.data_loader import load_all_schema_objs_from_directory, load_schema_obj, types\n'), ((4437, 4458), 'cli.src.helpers.naming_helpers.to_feature_name', 'to_feature_name', (['role'], {}), '(role)\n', (4452, 4458), False, 'from cli.src.helpers.naming_helpers import to_feature_name, to_role_name\n'), ((4899, 4920), 'cli.src.helpers.naming_helpers.to_feature_name', 'to_feature_name', (['role'], {}), '(role)\n', (4914, 4920), False, 'from cli.src.helpers.naming_helpers import to_feature_name, to_role_name\n'), ((5625, 5643), 'cli.src.helpers.naming_helpers.to_role_name', 'to_role_name', (['role'], {}), '(role)\n', (5637, 5643), False, 'from cli.src.helpers.naming_helpers import to_feature_name, to_role_name\n'), ((8557, 8565), 'cli.src.Config.Config', 'Config', ([], {}), '()\n', (8563, 8565), False, 'from cli.src.Config import Config\n'), ((8955, 9005), 'os.path.basename', 'os.path.basename', (['self.inventory_upgrade.build_dir'], {}), '(self.inventory_upgrade.build_dir)\n', (8971, 9005), False, 'import os\n'), ((1701, 1778), 'cli.src.helpers.doc_list_helpers.select_first', 'select_first', (['inventory_upgrade.config_docs', '(lambda x: x.kind == default.kind)'], {}), '(inventory_upgrade.config_docs, lambda x: x.kind == default.kind)\n', (1713, 1778), False, 'from cli.src.helpers.doc_list_helpers import ExpectedSingleResultException, select_first, select_single\n')] |
newtonsspawn/codewars_challenges | Python/4 kyu/Snail/test_snail.py | 62b20d4e729c8ba79eac7cae6a179af57abd45d4 | from unittest import TestCase
from snail import snail
class TestSnail(TestCase):
def test_snail_001(self):
self.assertEqual(snail([[]]), [])
def test_snail_002(self):
self.assertEqual(snail([[1]]), [1])
def test_snail_003(self):
self.assertEqual(snail([[1, 2, 3], [4, 5, 6], [7, 8, 9]]),
[1, 2, 3, 6, 9, 8, 7, 4, 5])
def test_snail_004(self):
self.assertEqual(snail(
[[1, 2, 3, 4, 5], [6, 7, 8, 9, 10], [11, 12, 13, 14, 15],
[16, 17, 18, 19, 20], [21, 22, 23, 24, 25]]),
[1, 2, 3, 4, 5, 10, 15, 20, 25, 24, 23, 22, 21, 16, 11,
6, 7, 8, 9, 14, 19, 18, 17, 12, 13])
def test_snail_005(self):
self.assertEqual(snail([[1, 2, 3, 4, 5, 6], [20, 21, 22, 23, 24, 7],
[19, 32, 33, 34, 25, 8],
[18, 31, 36, 35, 26, 9],
[17, 30, 29, 28, 27, 10],
[16, 15, 14, 13, 12, 11]]),
[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16,
17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29,
30, 31, 32, 33, 34, 35, 36])
def test_snail_006(self):
self.assertEqual(snail([[350]]), [350])
def test_snail_007(self):
self.assertEqual(snail([[545, 588, 42, 119, 791, 866, 142, 699, 611,
400, 465, 373, 30, 71, 950, 813, 850, 652],
[664, 853, 1000, 561, 102, 363, 807, 553, 973,
643, 142, 433, 378, 702, 250, 641, 967, 172],
[908, 928, 776, 82, 547, 224, 730, 158, 169, 8,
111, 847, 891, 142, 906, 609, 443, 211],
[417, 35, 192, 167, 579, 885, 160, 755, 522,
360, 382, 783, 986, 474, 761, 416, 564, 561],
[506, 160, 540, 575, 374, 854, 313, 656, 546,
924, 838, 831, 23, 146, 116, 136, 47, 889],
[932, 515, 627, 982, 886, 609, 67, 966, 262,
953, 299, 246, 488, 526, 524, 855, 954, 752],
[182, 310, 522, 423, 907, 743, 440, 827, 548,
162, 397, 494, 592, 629, 856, 288, 601, 188],
[963, 862, 9, 812, 947, 721, 37, 170, 69, 326,
661, 829, 69, 341, 100, 751, 951, 844],
[252, 831, 293, 346, 830, 639, 657, 425, 294,
47, 477, 786, 852, 821, 858, 438, 251, 296],
[136, 891, 795, 298, 144, 686, 845, 781, 737,
850, 413, 88, 333, 311, 628, 962, 785, 548],
[307, 294, 706, 298, 825, 108, 508, 358, 978,
707, 26, 774, 332, 252, 407, 466, 77, 141],
[803, 134, 246, 768, 431, 724, 448, 362, 875,
983, 188, 254, 332, 249, 162, 167, 911, 639],
[266, 399, 765, 878, 433, 414, 178, 225, 507,
112, 510, 124, 88, 969, 728, 18, 813, 763],
[714, 580, 290, 935, 331, 813, 781, 114, 183,
437, 287, 870, 719, 690, 880, 319, 939, 715],
[283, 165, 518, 34, 109, 638, 327, 3, 369, 979,
696, 845, 34, 498, 736, 372, 166, 931],
[728, 490, 910, 342, 460, 955, 876, 935, 976,
887, 190, 517, 362, 15, 486, 138, 681, 996],
[585, 139, 62, 485, 628, 667, 213, 29, 910, 333,
854, 201, 613, 27, 552, 244, 251, 177],
[222, 791, 454, 246, 525, 626, 58, 512, 642,
561, 309, 674, 607, 441, 728, 782, 375, 113]]),
[545, 588, 42, 119, 791, 866, 142, 699, 611, 400, 465,
373, 30, 71, 950, 813, 850, 652, 172, 211, 561, 889,
752, 188, 844, 296, 548, 141, 639, 763, 715, 931, 996,
177, 113, 375, 782, 728, 441, 607, 674, 309, 561, 642,
512, 58, 626, 525, 246, 454, 791, 222, 585, 728, 283,
714, 266, 803, 307, 136, 252, 963, 182, 932, 506, 417,
908, 664, 853, 1000, 561, 102, 363, 807, 553, 973,
643, 142, 433, 378, 702, 250, 641, 967, 443, 564, 47,
954, 601, 951, 251, 785, 77, 911, 813, 939, 166, 681,
251, 244, 552, 27, 613, 201, 854, 333, 910, 29, 213,
667, 628, 485, 62, 139, 490, 165, 580, 399, 134, 294,
891, 831, 862, 310, 515, 160, 35, 928, 776, 82, 547,
224, 730, 158, 169, 8, 111, 847, 891, 142, 906, 609,
416, 136, 855, 288, 751, 438, 962, 466, 167, 18, 319,
372, 138, 486, 15, 362, 517, 190, 887, 976, 935, 876,
955, 460, 342, 910, 518, 290, 765, 246, 706, 795, 293,
9, 522, 627, 540, 192, 167, 579, 885, 160, 755, 522,
360, 382, 783, 986, 474, 761, 116, 524, 856, 100, 858,
628, 407, 162, 728, 880, 736, 498, 34, 845, 696, 979,
369, 3, 327, 638, 109, 34, 935, 878, 768, 298, 298,
346, 812, 423, 982, 575, 374, 854, 313, 656, 546, 924,
838, 831, 23, 146, 526, 629, 341, 821, 311, 252, 249,
969, 690, 719, 870, 287, 437, 183, 114, 781, 813, 331,
433, 431, 825, 144, 830, 947, 907, 886, 609, 67, 966,
262, 953, 299, 246, 488, 592, 69, 852, 333, 332, 332,
88, 124, 510, 112, 507, 225, 178, 414, 724, 108, 686,
639, 721, 743, 440, 827, 548, 162, 397, 494, 829, 786,
88, 774, 254, 188, 983, 875, 362, 448, 508, 845, 657,
37, 170, 69, 326, 661, 477, 413, 26, 707, 978, 358,
781, 425, 294, 47, 850, 737])
def test_snail_008(self):
self.assertEqual(snail([[844, 865, 787, 987, 255, 928, 812],
[533, 376, 869, 60, 824, 527, 355],
[238, 330, 215, 201, 335, 29, 225],
[828, 63, 172, 620, 315, 361, 758],
[14, 964, 210, 530, 997, 568, 288],
[855, 152, 486, 856, 360, 545, 564],
[549, 259, 544, 508, 793, 934, 567]]),
[844, 865, 787, 987, 255, 928, 812, 355, 225, 758, 288,
564, 567, 934, 793, 508, 544, 259, 549, 855, 14, 828,
238, 533, 376, 869, 60, 824, 527, 29, 361, 568, 545,
360, 856, 486, 152, 964, 63, 330, 215, 201, 335, 315,
997, 530, 210, 172, 620])
def test_snail_009(self):
self.assertEqual(snail([[277, 149, 76, 473, 385, 633, 41, 517, 918, 462,
769, 726, 161, 694, 26, 717, 309, 484],
[822, 156, 851, 683, 303, 638, 818, 714, 303,
509, 353, 557, 51, 592, 663, 475, 725, 40],
[40, 155, 345, 977, 600, 812, 851, 559, 152,
256, 965, 586, 591, 966, 146, 868, 262, 931],
[855, 170, 534, 89, 73, 910, 741, 195, 4, 547,
916, 887, 912, 610, 815, 619, 508, 196],
[600, 735, 378, 713, 511, 639, 703, 269, 326,
650, 223, 993, 760, 894, 430, 705, 896, 814],
[444, 223, 939, 289, 624, 837, 541, 975, 608,
446, 787, 963, 647, 660, 827, 544, 894, 634],
[643, 836, 653, 921, 77, 574, 411, 242, 52, 242,
411, 827, 875, 617, 653, 180, 85, 390],
[592, 287, 28, 699, 663, 170, 548, 812, 792, 68,
376, 733, 147, 475, 803, 513, 815, 515],
[366, 76, 557, 607, 661, 516, 434, 136, 41, 551,
670, 662, 248, 205, 485, 509, 59, 833],
[394, 608, 437, 669, 92, 194, 441, 444, 68, 269,
512, 104, 121, 176, 422, 278, 953, 69],
[187, 714, 933, 50, 576, 276, 594, 283, 258,
268, 95, 111, 353, 139, 342, 274, 141, 69],
[588, 50, 105, 400, 470, 733, 51, 342, 193, 6,
909, 690, 697, 215, 612, 27, 629, 861],
[784, 253, 98, 563, 118, 138, 610, 486, 602,
779, 153, 478, 956, 107, 460, 850, 447, 21],
[690, 48, 219, 72, 384, 261, 474, 383, 632, 868,
922, 826, 651, 612, 684, 339, 418, 743],
[955, 462, 403, 996, 131, 70, 485, 523, 407,
932, 100, 688, 240, 970, 98, 681, 356, 609],
[376, 795, 982, 482, 813, 496, 635, 618, 728,
96, 982, 884, 362, 168, 470, 919, 672, 921],
[327, 201, 195, 628, 731, 453, 778, 719, 751,
115, 429, 675, 983, 281, 389, 396, 876, 484],
[867, 449, 958, 381, 640, 749, 216, 358, 226,
155, 568, 795, 584, 220, 900, 207, 12, 440]]),
[277, 149, 76, 473, 385, 633, 41, 517, 918, 462, 769,
726, 161, 694, 26, 717, 309, 484, 40, 931, 196, 814,
634, 390, 515, 833, 69, 69, 861, 21, 743, 609, 921,
484, 440, 12, 207, 900, 220, 584, 795, 568, 155, 226,
358, 216, 749, 640, 381, 958, 449, 867, 327, 376, 955,
690, 784, 588, 187, 394, 366, 592, 643, 444, 600, 855,
40, 822, 156, 851, 683, 303, 638, 818, 714, 303, 509,
353, 557, 51, 592, 663, 475, 725, 262, 508, 896, 894,
85, 815, 59, 953, 141, 629, 447, 418, 356, 672, 876,
396, 389, 281, 983, 675, 429, 115, 751, 719, 778, 453,
731, 628, 195, 201, 795, 462, 48, 253, 50, 714, 608,
76, 287, 836, 223, 735, 170, 155, 345, 977, 600, 812,
851, 559, 152, 256, 965, 586, 591, 966, 146, 868, 619,
705, 544, 180, 513, 509, 278, 274, 27, 850, 339, 681,
919, 470, 168, 362, 884, 982, 96, 728, 618, 635, 496,
813, 482, 982, 403, 219, 98, 105, 933, 437, 557, 28,
653, 939, 378, 534, 89, 73, 910, 741, 195, 4, 547,
916, 887, 912, 610, 815, 430, 827, 653, 803, 485, 422,
342, 612, 460, 684, 98, 970, 240, 688, 100, 932, 407,
523, 485, 70, 131, 996, 72, 563, 400, 50, 669, 607,
699, 921, 289, 713, 511, 639, 703, 269, 326, 650, 223,
993, 760, 894, 660, 617, 475, 205, 176, 139, 215, 107,
612, 651, 826, 922, 868, 632, 383, 474, 261, 384, 118,
470, 576, 92, 661, 663, 77, 624, 837, 541, 975, 608,
446, 787, 963, 647, 875, 147, 248, 121, 353, 697, 956,
478, 153, 779, 602, 486, 610, 138, 733, 276, 194, 516,
170, 574, 411, 242, 52, 242, 411, 827, 733, 662, 104,
111, 690, 909, 6, 193, 342, 51, 594, 441, 434, 548,
812, 792, 68, 376, 670, 512, 95, 268, 258, 283, 444,
136, 41, 551, 269, 68])
def test_snail_010(self):
self.assertEqual(snail([[831, 609, 235, 391, 645, 469, 352, 982, 96,
596, 79, 460, 438, 280, 390],
[639, 19, 257, 411, 862, 508, 652, 265, 609,
188, 443, 425, 584, 11, 329],
[616, 731, 442, 315, 530, 954, 306, 455, 808,
921, 604, 282, 695, 778, 711],
[205, 735, 423, 803, 480, 736, 47, 13, 478, 960,
268, 844, 611, 102, 489],
[271, 314, 134, 650, 634, 984, 925, 565, 67,
651, 139, 697, 735, 616, 83],
[124, 381, 202, 355, 488, 99, 269, 486, 900,
601, 449, 777, 607, 702, 504],
[259, 357, 104, 126, 784, 649, 30, 243, 716,
436, 917, 272, 629, 864, 131],
[333, 402, 81, 766, 352, 14, 227, 796, 572, 623,
176, 196, 870, 5, 822],
[469, 67, 286, 430, 711, 336, 78, 384, 71, 783,
832, 458, 940, 511, 160],
[783, 286, 352, 679, 233, 493, 549, 83, 137,
498, 450, 214, 856, 925, 585],
[360, 663, 80, 307, 411, 97, 42, 857, 865, 954,
30, 778, 691, 880, 898],
[354, 373, 818, 619, 465, 957, 268, 876, 19, 58,
163, 138, 283, 970, 267],
[773, 79, 892, 808, 810, 35, 147, 377, 502, 400,
742, 345, 35, 120, 859],
[933, 643, 548, 241, 817, 661, 936, 837, 571,
596, 177, 296, 531, 836, 805],
[915, 268, 534, 369, 791, 90, 843, 104, 293, 92,
270, 306, 226, 797, 903]]),
[831, 609, 235, 391, 645, 469, 352, 982, 96, 596, 79,
460, 438, 280, 390, 329, 711, 489, 83, 504, 131, 822,
160, 585, 898, 267, 859, 805, 903, 797, 226, 306, 270,
92, 293, 104, 843, 90, 791, 369, 534, 268, 915, 933,
773, 354, 360, 783, 469, 333, 259, 124, 271, 205, 616,
639, 19, 257, 411, 862, 508, 652, 265, 609, 188, 443,
425, 584, 11, 778, 102, 616, 702, 864, 5, 511, 925,
880, 970, 120, 836, 531, 296, 177, 596, 571, 837, 936,
661, 817, 241, 548, 643, 79, 373, 663, 286, 67, 402,
357, 381, 314, 735, 731, 442, 315, 530, 954, 306, 455,
808, 921, 604, 282, 695, 611, 735, 607, 629, 870, 940,
856, 691, 283, 35, 345, 742, 400, 502, 377, 147, 35,
810, 808, 892, 818, 80, 352, 286, 81, 104, 202, 134,
423, 803, 480, 736, 47, 13, 478, 960, 268, 844, 697,
777, 272, 196, 458, 214, 778, 138, 163, 58, 19, 876,
268, 957, 465, 619, 307, 679, 430, 766, 126, 355, 650,
634, 984, 925, 565, 67, 651, 139, 449, 917, 176, 832,
450, 30, 954, 865, 857, 42, 97, 411, 233, 711, 352,
784, 488, 99, 269, 486, 900, 601, 436, 623, 783, 498,
137, 83, 549, 493, 336, 14, 649, 30, 243, 716, 572,
71, 384, 78, 227, 796])
def test_snail_011(self):
self.assertEqual(snail([[900, 61, 525, 325, 420, 389, 718, 967, 116,
156, 877, 301, 815],
[325, 921, 851, 66, 226, 759, 166, 754, 972,
199, 26, 673, 81],
[953, 211, 277, 170, 498, 206, 11, 766, 742,
101, 661, 674, 501],
[613, 645, 897, 883, 24, 499, 408, 404, 93, 464,
815, 546, 830],
[103, 374, 494, 259, 597, 463, 83, 658, 867,
321, 311, 942, 265],
[279, 214, 989, 896, 644, 152, 130, 439, 917,
664, 293, 835, 469],
[114, 212, 935, 146, 589, 399, 128, 61, 242,
1000, 695, 340, 119],
[67, 258, 342, 377, 207, 186, 296, 249, 902,
607, 168, 151, 890],
[331, 274, 68, 643, 694, 918, 141, 718, 26, 659,
786, 247, 685],
[760, 128, 36, 115, 509, 292, 665, 755, 426,
380, 813, 1000, 366],
[459, 285, 200, 835, 851, 925, 217, 506, 749,
313, 546, 588, 902],
[475, 556, 67, 602, 323, 842, 248, 103, 413,
276, 513, 254, 478],
[478, 749, 519, 165, 158, 393, 952, 614, 291,
781, 344, 774, 42]]),
[900, 61, 525, 325, 420, 389, 718, 967, 116, 156, 877,
301, 815, 81, 501, 830, 265, 469, 119, 890, 685, 366,
902, 478, 42, 774, 344, 781, 291, 614, 952, 393, 158,
165, 519, 749, 478, 475, 459, 760, 331, 67, 114, 279,
103, 613, 953, 325, 921, 851, 66, 226, 759, 166, 754,
972, 199, 26, 673, 674, 546, 942, 835, 340, 151, 247,
1000, 588, 254, 513, 276, 413, 103, 248, 842, 323,
602, 67, 556, 285, 128, 274, 258, 212, 214, 374, 645,
211, 277, 170, 498, 206, 11, 766, 742, 101, 661, 815,
311, 293, 695, 168, 786, 813, 546, 313, 749, 506, 217,
925, 851, 835, 200, 36, 68, 342, 935, 989, 494, 897,
883, 24, 499, 408, 404, 93, 464, 321, 664, 1000, 607,
659, 380, 426, 755, 665, 292, 509, 115, 643, 377, 146,
896, 259, 597, 463, 83, 658, 867, 917, 242, 902, 26,
718, 141, 918, 694, 207, 589, 644, 152, 130, 439, 61,
249, 296, 186, 399, 128])
def test_snail_012(self):
self.assertEqual(snail([[743, 389, 404, 786, 6, 509, 887, 481, 858, 117,
671, 344, 7, 855, 551, 838, 500, 736, 981,
342],
[823, 940, 897, 877, 616, 425, 425, 300, 769,
780, 755, 505, 48, 339, 987, 285, 118, 949,
245, 644],
[68, 37, 515, 914, 885, 247, 552, 998, 53, 782,
913, 34, 413, 744, 462, 794, 589, 405, 233,
850],
[905, 208, 712, 995, 261, 154, 768, 118, 908,
452, 706, 612, 584, 638, 480, 969, 345, 780,
435, 898],
[714, 11, 654, 957, 564, 362, 231, 41, 721, 254,
202, 137, 126, 174, 832, 661, 382, 654, 516,
300],
[218, 667, 767, 610, 339, 531, 335, 234, 53,
735, 742, 818, 233, 26, 634, 229, 316, 436,
999, 348],
[943, 451, 142, 545, 186, 542, 934, 22, 287,
166, 63, 495, 13, 433, 739, 270, 535, 305, 272,
254],
[322, 892, 751, 856, 280, 706, 632, 796, 507,
633, 52, 86, 116, 753, 489, 294, 869, 135, 565,
102],
[691, 412, 615, 389, 973, 462, 624, 172, 170,
56, 744, 558, 339, 871, 878, 495, 810, 454,
349, 261],
[545, 378, 844, 494, 172, 465, 897, 608, 755,
74, 367, 853, 407, 865, 168, 644, 477, 73, 166,
822],
[428, 985, 729, 790, 40, 290, 549, 491, 160,
429, 113, 379, 704, 432, 225, 713, 634, 879,
837, 958],
[803, 796, 762, 778, 917, 794, 792, 752, 325,
953, 986, 867, 35, 957, 623, 662, 916, 513,
324, 185],
[190, 680, 689, 189, 78, 591, 532, 174, 927,
376, 635, 12, 908, 253, 569, 267, 396, 112,
180, 22],
[797, 925, 775, 831, 921, 87, 748, 141, 33, 45,
194, 270, 661, 78, 968, 333, 132, 976, 788,
137],
[854, 147, 902, 213, 365, 342, 962, 662, 491,
86, 701, 493, 736, 705, 115, 472, 354, 815,
240, 24],
[595, 473, 899, 866, 157, 973, 725, 107, 417,
989, 205, 921, 98, 632, 234, 938, 112, 181,
604, 707],
[889, 203, 968, 727, 409, 651, 695, 854, 423,
968, 745, 680, 560, 935, 54, 862, 790, 343,
884, 627],
[135, 694, 358, 75, 237, 924, 493, 758, 998,
279, 80, 86, 174, 991, 585, 251, 99, 718, 611,
462],
[109, 360, 882, 794, 631, 587, 73, 10, 696, 582,
352, 797, 897, 58, 246, 277, 690, 511, 495,
437],
[99, 898, 540, 657, 563, 267, 39, 556, 819, 738,
888, 8, 140, 400, 619, 267, 859, 919, 301,
265]]),
[743, 389, 404, 786, 6, 509, 887, 481, 858, 117, 671,
344, 7, 855, 551, 838, 500, 736, 981, 342, 644, 850,
898, 300, 348, 254, 102, 261, 822, 958, 185, 22, 137,
24, 707, 627, 462, 437, 265, 301, 919, 859, 267, 619,
400, 140, 8, 888, 738, 819, 556, 39, 267, 563, 657,
540, 898, 99, 109, 135, 889, 595, 854, 797, 190, 803,
428, 545, 691, 322, 943, 218, 714, 905, 68, 823, 940,
897, 877, 616, 425, 425, 300, 769, 780, 755, 505, 48,
339, 987, 285, 118, 949, 245, 233, 435, 516, 999, 272,
565, 349, 166, 837, 324, 180, 788, 240, 604, 884, 611,
495, 511, 690, 277, 246, 58, 897, 797, 352, 582, 696,
10, 73, 587, 631, 794, 882, 360, 694, 203, 473, 147,
925, 680, 796, 985, 378, 412, 892, 451, 667, 11, 208,
37, 515, 914, 885, 247, 552, 998, 53, 782, 913, 34,
413, 744, 462, 794, 589, 405, 780, 654, 436, 305, 135,
454, 73, 879, 513, 112, 976, 815, 181, 343, 718, 99,
251, 585, 991, 174, 86, 80, 279, 998, 758, 493, 924,
237, 75, 358, 968, 899, 902, 775, 689, 762, 729, 844,
615, 751, 142, 767, 654, 712, 995, 261, 154, 768, 118,
908, 452, 706, 612, 584, 638, 480, 969, 345, 382, 316,
535, 869, 810, 477, 634, 916, 396, 132, 354, 112, 790,
862, 54, 935, 560, 680, 745, 968, 423, 854, 695, 651,
409, 727, 866, 213, 831, 189, 778, 790, 494, 389, 856,
545, 610, 957, 564, 362, 231, 41, 721, 254, 202, 137,
126, 174, 832, 661, 229, 270, 294, 495, 644, 713, 662,
267, 333, 472, 938, 234, 632, 98, 921, 205, 989, 417,
107, 725, 973, 157, 365, 921, 78, 917, 40, 172, 973,
280, 186, 339, 531, 335, 234, 53, 735, 742, 818, 233,
26, 634, 739, 489, 878, 168, 225, 623, 569, 968, 115,
705, 736, 493, 701, 86, 491, 662, 962, 342, 87, 591,
794, 290, 465, 462, 706, 542, 934, 22, 287, 166, 63,
495, 13, 433, 753, 871, 865, 432, 957, 253, 78, 661,
270, 194, 45, 33, 141, 748, 532, 792, 549, 897, 624,
632, 796, 507, 633, 52, 86, 116, 339, 407, 704, 35,
908, 12, 635, 376, 927, 174, 752, 491, 608, 172, 170,
56, 744, 558, 853, 379, 867, 986, 953, 325, 160, 755,
74, 367, 113, 429])
def test_snail_013(self):
self.assertEqual(snail([[567]]), [567])
def test_snail_014(self):
self.assertEqual(snail(
[[724, 455, 919, 922, 779, 711, 36, 791, 104, 490, 480],
[95, 252, 691, 12, 786, 563, 890, 446, 275, 899, 930],
[359, 844, 866, 827, 758, 81, 441, 768, 499, 983, 438],
[860, 385, 512, 34, 351, 822, 501, 130, 111, 249, 25],
[638, 30, 5, 102, 998, 888, 800, 651, 402, 40, 236],
[872, 21, 731, 902, 613, 442, 437, 581, 710, 453, 877],
[236, 661, 418, 662, 869, 936, 251, 302, 474, 578, 18],
[413, 451, 241, 737, 641, 677, 263, 617, 9, 172, 246],
[628, 390, 511, 786, 219, 833, 722, 419, 743, 695, 400],
[711, 986, 834, 181, 855, 780, 869, 238, 931, 993, 203],
[625, 896, 172, 269, 273, 722, 528, 434, 211, 278, 645]]),
[724, 455, 919, 922, 779, 711, 36, 791, 104, 490, 480,
930, 438, 25, 236, 877, 18, 246, 400, 203, 645, 278,
211, 434, 528, 722, 273, 269, 172, 896, 625, 711, 628,
413, 236, 872, 638, 860, 359, 95, 252, 691, 12, 786,
563, 890, 446, 275, 899, 983, 249, 40, 453, 578, 172,
695, 993, 931, 238, 869, 780, 855, 181, 834, 986, 390,
451, 661, 21, 30, 385, 844, 866, 827, 758, 81, 441,
768, 499, 111, 402, 710, 474, 9, 743, 419, 722, 833,
219, 786, 511, 241, 418, 731, 5, 512, 34, 351, 822,
501, 130, 651, 581, 302, 617, 263, 677, 641, 737, 662,
902, 102, 998, 888, 800, 437, 251, 936, 869, 613,
442])
def test_snail_015(self):
self.assertEqual(snail([[547, 471, 740, 487, 758, 466, 885, 554, 312,
618, 76, 890, 416, 621, 45, 33, 14],
[935, 878, 428, 721, 79, 762, 116, 797, 676,
755, 7, 142, 464, 861, 192, 943, 822],
[100, 325, 962, 434, 413, 313, 908, 842, 366,
618, 803, 480, 391, 263, 122, 148, 582],
[281, 741, 182, 236, 351, 611, 588, 857, 354,
837, 867, 258, 508, 882, 305, 396, 796],
[127, 795, 960, 196, 93, 41, 113, 949, 999, 880,
215, 844, 86, 229, 436, 746, 224],
[709, 283, 219, 254, 913, 900, 537, 617, 80, 18,
944, 372, 805, 981, 798, 380, 868],
[741, 7, 686, 690, 987, 382, 38, 560, 89, 889,
243, 684, 951, 686, 795, 711, 304],
[309, 225, 691, 223, 245, 377, 786, 669, 126,
835, 245, 62, 803, 234, 486, 580, 192],
[895, 172, 347, 645, 113, 700, 419, 573, 987,
403, 527, 893, 348, 508, 530, 558, 477],
[307, 725, 128, 975, 498, 796, 359, 268, 204,
165, 349, 942, 527, 73, 815, 372, 749],
[848, 950, 312, 560, 350, 943, 576, 873, 230, 4,
807, 561, 944, 629, 422, 342, 678],
[275, 41, 349, 925, 579, 139, 836, 777, 256,
422, 884, 587, 126, 836, 347, 692, 87],
[351, 100, 739, 316, 666, 372, 441, 858, 25,
747, 474, 234, 943, 393, 530, 336, 185],
[784, 847, 392, 698, 866, 494, 370, 12, 221,
689, 428, 491, 15, 677, 118, 496, 941],
[748, 782, 298, 359, 981, 334, 520, 809, 253,
69, 70, 909, 7, 662, 574, 128, 125],
[570, 682, 863, 589, 421, 147, 262, 647, 749,
76, 468, 740, 107, 277, 484, 905, 399],
[936, 382, 383, 764, 679, 634, 999, 371, 34,
581, 336, 178, 137, 860, 269, 341, 166]]),
[547, 471, 740, 487, 758, 466, 885, 554, 312, 618, 76,
890, 416, 621, 45, 33, 14, 822, 582, 796, 224, 868,
304, 192, 477, 749, 678, 87, 185, 941, 125, 399, 166,
341, 269, 860, 137, 178, 336, 581, 34, 371, 999, 634,
679, 764, 383, 382, 936, 570, 748, 784, 351, 275, 848,
307, 895, 309, 741, 709, 127, 281, 100, 935, 878, 428,
721, 79, 762, 116, 797, 676, 755, 7, 142, 464, 861,
192, 943, 148, 396, 746, 380, 711, 580, 558, 372, 342,
692, 336, 496, 128, 905, 484, 277, 107, 740, 468, 76,
749, 647, 262, 147, 421, 589, 863, 682, 782, 847, 100,
41, 950, 725, 172, 225, 7, 283, 795, 741, 325, 962,
434, 413, 313, 908, 842, 366, 618, 803, 480, 391, 263,
122, 305, 436, 798, 795, 486, 530, 815, 422, 347, 530,
118, 574, 662, 7, 909, 70, 69, 253, 809, 520, 334,
981, 359, 298, 392, 739, 349, 312, 128, 347, 691, 686,
219, 960, 182, 236, 351, 611, 588, 857, 354, 837, 867,
258, 508, 882, 229, 981, 686, 234, 508, 73, 629, 836,
393, 677, 15, 491, 428, 689, 221, 12, 370, 494, 866,
698, 316, 925, 560, 975, 645, 223, 690, 254, 196, 93,
41, 113, 949, 999, 880, 215, 844, 86, 805, 951, 803,
348, 527, 944, 126, 943, 234, 474, 747, 25, 858, 441,
372, 666, 579, 350, 498, 113, 245, 987, 913, 900, 537,
617, 80, 18, 944, 372, 684, 62, 893, 942, 561, 587,
884, 422, 256, 777, 836, 139, 943, 796, 700, 377, 382,
38, 560, 89, 889, 243, 245, 527, 349, 807, 4, 230,
873, 576, 359, 419, 786, 669, 126, 835, 403, 165, 204,
268, 573, 987])
def test_snail_016(self):
self.assertEqual(snail([[665, 175], [31, 103]]), [665, 175, 103, 31])
def test_snail_017(self):
self.assertEqual(snail([[755]]), [755])
def test_snail_018(self):
self.assertEqual(snail([[126]]), [126])
def test_snail_019(self):
self.assertEqual(snail([[636, 479, 441, 159, 593, 904, 31, 21, 198],
[558, 377, 166, 504, 919, 20, 495, 71, 899],
[955, 466, 168, 459, 223, 535, 369, 881, 709],
[814, 54, 762, 941, 804, 810, 498, 583, 828],
[678, 489, 88, 976, 967, 218, 494, 1000, 550],
[501, 310, 668, 403, 558, 697, 247, 393, 990],
[346, 220, 92, 707, 460, 106, 187, 606, 447],
[589, 900, 867, 818, 647, 180, 878, 809, 191],
[278, 820, 427, 859, 985, 594, 218, 851, 286]]),
[636, 479, 441, 159, 593, 904, 31, 21, 198, 899, 709,
828, 550, 990, 447, 191, 286, 851, 218, 594, 985, 859,
427, 820, 278, 589, 346, 501, 678, 814, 955, 558, 377,
166, 504, 919, 20, 495, 71, 881, 583, 1000, 393, 606,
809, 878, 180, 647, 818, 867, 900, 220, 310, 489, 54,
466, 168, 459, 223, 535, 369, 498, 494, 247, 187, 106,
460, 707, 92, 668, 88, 762, 941, 804, 810, 218, 697,
558, 403, 976, 967])
def test_snail_020(self):
self.assertEqual(snail([[34, 174, 567, 523, 884, 681, 348, 879],
[860, 127, 97, 983, 245, 516, 214, 358],
[812, 405, 787, 630, 856, 384, 973, 803],
[452, 925, 253, 481, 678, 517, 246, 855],
[471, 121, 342, 671, 92, 770, 690, 538],
[706, 207, 63, 874, 366, 336, 848, 708],
[771, 637, 708, 977, 977, 3, 562, 324],
[453, 816, 461, 143, 874, 992, 346, 923]]),
[34, 174, 567, 523, 884, 681, 348, 879, 358, 803, 855,
538, 708, 324, 923, 346, 992, 874, 143, 461, 816, 453,
771, 706, 471, 452, 812, 860, 127, 97, 983, 245, 516,
214, 973, 246, 690, 848, 562, 3, 977, 977, 708, 637,
207, 121, 925, 405, 787, 630, 856, 384, 517, 770, 336,
366, 874, 63, 342, 253, 481, 678, 92, 671])
def test_snail_021(self):
self.assertEqual(snail([[950, 222, 988, 710, 321, 798, 51],
[640, 844, 782, 506, 155, 308, 384],
[703, 52, 197, 723, 690, 468, 962],
[326, 195, 134, 216, 302, 503, 212],
[718, 323, 17, 449, 601, 380, 396],
[985, 698, 502, 864, 257, 804, 942],
[888, 418, 187, 880, 152, 432, 651]]),
[950, 222, 988, 710, 321, 798, 51, 384, 962, 212, 396,
942, 651, 432, 152, 880, 187, 418, 888, 985, 718, 326,
703, 640, 844, 782, 506, 155, 308, 468, 503, 380, 804,
257, 864, 502, 698, 323, 195, 52, 197, 723, 690, 302,
601, 449, 17, 134, 216])
def test_snail_022(self):
self.assertEqual(snail([[188, 383, 11, 265, 829, 552, 184, 587, 149,
839, 640, 638, 292, 990],
[523, 992, 378, 958, 526, 735, 753, 216, 781,
183, 273, 433, 458, 900],
[645, 764, 450, 273, 769, 871, 125, 983, 864,
318, 160, 300, 677, 990],
[245, 169, 676, 300, 81, 19, 481, 549, 922, 13,
798, 37, 785, 831],
[202, 912, 399, 946, 877, 577, 211, 149, 515, 7,
783, 194, 903, 458],
[241, 530, 605, 143, 110, 318, 450, 365, 300,
901, 863, 973, 997, 46],
[217, 471, 358, 537, 270, 529, 512, 306, 402,
11, 275, 228, 737, 751],
[231, 344, 693, 847, 723, 898, 87, 700, 558,
116, 927, 425, 220, 505],
[119, 851, 664, 891, 32, 670, 224, 37, 428, 45,
679, 170, 522, 181],
[506, 264, 274, 87, 567, 324, 203, 715, 628,
288, 836, 353, 367, 458],
[377, 859, 308, 788, 792, 211, 738, 314, 972,
557, 583, 789, 132, 271],
[483, 158, 749, 560, 743, 592, 710, 442, 650,
896, 323, 221, 309, 299],
[858, 549, 118, 588, 674, 975, 799, 910, 465,
453, 139, 448, 537, 680],
[713, 851, 964, 542, 64, 296, 923, 440, 225,
479, 744, 119, 144, 399]]),
[188, 383, 11, 265, 829, 552, 184, 587, 149, 839, 640,
638, 292, 990, 900, 990, 831, 458, 46, 751, 505, 181,
458, 271, 299, 680, 399, 144, 119, 744, 479, 225, 440,
923, 296, 64, 542, 964, 851, 713, 858, 483, 377, 506,
119, 231, 217, 241, 202, 245, 645, 523, 992, 378, 958,
526, 735, 753, 216, 781, 183, 273, 433, 458, 677, 785,
903, 997, 737, 220, 522, 367, 132, 309, 537, 448, 139,
453, 465, 910, 799, 975, 674, 588, 118, 549, 158, 859,
264, 851, 344, 471, 530, 912, 169, 764, 450, 273, 769,
871, 125, 983, 864, 318, 160, 300, 37, 194, 973, 228,
425, 170, 353, 789, 221, 323, 896, 650, 442, 710, 592,
743, 560, 749, 308, 274, 664, 693, 358, 605, 399, 676,
300, 81, 19, 481, 549, 922, 13, 798, 783, 863, 275,
927, 679, 836, 583, 557, 972, 314, 738, 211, 792, 788,
87, 891, 847, 537, 143, 946, 877, 577, 211, 149, 515,
7, 901, 11, 116, 45, 288, 628, 715, 203, 324, 567, 32,
723, 270, 110, 318, 450, 365, 300, 402, 558, 428, 37,
224, 670, 898, 529, 512, 306, 700, 87])
def test_snail_023(self):
self.assertEqual(snail([[903, 852, 365, 142, 106, 848, 913, 461, 732,
281, 800, 952, 711, 122],
[805, 299, 188, 853, 984, 79, 432, 280, 510,
925, 155, 124, 736, 567],
[793, 219, 758, 522, 833, 232, 24, 494, 164,
365, 205, 548, 145, 603],
[711, 113, 979, 976, 706, 457, 185, 895, 310,
106, 142, 270, 209, 577],
[866, 160, 28, 737, 871, 900, 799, 516, 203,
294, 45, 256, 242, 397],
[901, 606, 892, 620, 61, 398, 300, 14, 365, 616,
230, 82, 352, 98],
[441, 320, 684, 572, 254, 331, 401, 375, 970,
223, 65, 26, 167, 858],
[915, 104, 113, 774, 436, 832, 181, 939, 238,
90, 67, 227, 426, 55],
[846, 135, 332, 105, 110, 301, 794, 431, 860,
715, 201, 69, 744, 657],
[341, 691, 666, 61, 827, 814, 82, 276, 274, 888,
738, 387, 429, 69],
[706, 204, 421, 382, 258, 466, 97, 189, 893,
523, 910, 633, 510, 351],
[560, 109, 533, 541, 825, 571, 608, 542, 92,
385, 694, 762, 465, 620],
[369, 509, 928, 286, 860, 142, 4, 926, 657, 697,
743, 858, 430, 638],
[812, 243, 974, 854, 283, 573, 121, 48, 71, 536,
561, 687, 375, 884]]),
[903, 852, 365, 142, 106, 848, 913, 461, 732, 281, 800,
952, 711, 122, 567, 603, 577, 397, 98, 858, 55, 657,
69, 351, 620, 638, 884, 375, 687, 561, 536, 71, 48,
121, 573, 283, 854, 974, 243, 812, 369, 560, 706, 341,
846, 915, 441, 901, 866, 711, 793, 805, 299, 188, 853,
984, 79, 432, 280, 510, 925, 155, 124, 736, 145, 209,
242, 352, 167, 426, 744, 429, 510, 465, 430, 858, 743,
697, 657, 926, 4, 142, 860, 286, 928, 509, 109, 204,
691, 135, 104, 320, 606, 160, 113, 219, 758, 522, 833,
232, 24, 494, 164, 365, 205, 548, 270, 256, 82, 26,
227, 69, 387, 633, 762, 694, 385, 92, 542, 608, 571,
825, 541, 533, 421, 666, 332, 113, 684, 892, 28, 979,
976, 706, 457, 185, 895, 310, 106, 142, 45, 230, 65,
67, 201, 738, 910, 523, 893, 189, 97, 466, 258, 382,
61, 105, 774, 572, 620, 737, 871, 900, 799, 516, 203,
294, 616, 223, 90, 715, 888, 274, 276, 82, 814, 827,
110, 436, 254, 61, 398, 300, 14, 365, 970, 238, 860,
431, 794, 301, 832, 331, 401, 375, 939, 181])
def test_snail_024(self):
self.assertEqual(snail([[733]]), [733])
def test_snail_025(self):
self.assertEqual(snail([[776, 298, 262, 318, 957, 178, 428, 566, 345,
169, 434, 817, 494, 398, 648, 512, 314, 465],
[843, 563, 885, 994, 556, 571, 786, 143, 731,
828, 992, 701, 211, 989, 361, 904, 168, 175],
[153, 906, 802, 413, 532, 445, 864, 275, 891,
169, 899, 36, 278, 126, 691, 437, 199, 30],
[449, 454, 466, 728, 660, 493, 312, 492, 198,
771, 359, 787, 302, 121, 292, 282, 739, 958],
[798, 332, 106, 365, 874, 905, 831, 462, 88,
380, 443, 602, 925, 421, 564, 986, 446, 580],
[78, 187, 603, 551, 283, 789, 262, 542, 551,
422, 581, 100, 108, 574, 249, 473, 606, 83],
[359, 14, 876, 400, 826, 868, 779, 67, 946, 568,
826, 561, 582, 815, 72, 771, 851, 21],
[41, 860, 746, 556, 979, 831, 335, 126, 212,
701, 18, 318, 725, 944, 65, 802, 182, 433],
[746, 66, 844, 140, 842, 49, 547, 451, 436, 434,
72, 973, 2, 212, 311, 691, 546, 176],
[630, 510, 740, 7, 888, 439, 231, 788, 524, 270,
126, 558, 969, 576, 166, 393, 856, 548],
[538, 867, 432, 194, 149, 678, 379, 801, 182,
738, 209, 161, 950, 810, 869, 627, 395, 1000],
[523, 863, 18, 340, 416, 658, 734, 699, 538, 62,
740, 808, 202, 69, 895, 785, 882, 368],
[997, 453, 658, 870, 438, 799, 870, 257, 681,
887, 109, 40, 178, 475, 550, 283, 90, 167],
[243, 774, 470, 223, 518, 660, 730, 117, 885,
377, 305, 744, 622, 484, 789, 498, 464, 837],
[753, 492, 372, 529, 47, 461, 160, 259, 282,
983, 73, 192, 366, 101, 307, 257, 89, 968],
[135, 25, 644, 83, 479, 794, 845, 60, 310, 821,
239, 247, 713, 343, 405, 407, 308, 63],
[297, 590, 149, 649, 317, 843, 23, 652, 69, 819,
886, 381, 411, 781, 477, 672, 822, 185],
[642, 274, 676, 957, 888, 269, 954, 78, 8, 944,
730, 846, 83, 218, 865, 327, 705, 629]]),
[776, 298, 262, 318, 957, 178, 428, 566, 345, 169, 434,
817, 494, 398, 648, 512, 314, 465, 175, 30, 958, 580,
83, 21, 433, 176, 548, 1000, 368, 167, 837, 968, 63,
185, 629, 705, 327, 865, 218, 83, 846, 730, 944, 8,
78, 954, 269, 888, 957, 676, 274, 642, 297, 135, 753,
243, 997, 523, 538, 630, 746, 41, 359, 78, 798, 449,
153, 843, 563, 885, 994, 556, 571, 786, 143, 731, 828,
992, 701, 211, 989, 361, 904, 168, 199, 739, 446, 606,
851, 182, 546, 856, 395, 882, 90, 464, 89, 308, 822,
672, 477, 781, 411, 381, 886, 819, 69, 652, 23, 843,
317, 649, 149, 590, 25, 492, 774, 453, 863, 867, 510,
66, 860, 14, 187, 332, 454, 906, 802, 413, 532, 445,
864, 275, 891, 169, 899, 36, 278, 126, 691, 437, 282,
986, 473, 771, 802, 691, 393, 627, 785, 283, 498, 257,
407, 405, 343, 713, 247, 239, 821, 310, 60, 845, 794,
479, 83, 644, 372, 470, 658, 18, 432, 740, 844, 746,
876, 603, 106, 466, 728, 660, 493, 312, 492, 198, 771,
359, 787, 302, 121, 292, 564, 249, 72, 65, 311, 166,
869, 895, 550, 789, 307, 101, 366, 192, 73, 983, 282,
259, 160, 461, 47, 529, 223, 870, 340, 194, 7, 140,
556, 400, 551, 365, 874, 905, 831, 462, 88, 380, 443,
602, 925, 421, 574, 815, 944, 212, 576, 810, 69, 475,
484, 622, 744, 305, 377, 885, 117, 730, 660, 518, 438,
416, 149, 888, 842, 979, 826, 283, 789, 262, 542, 551,
422, 581, 100, 108, 582, 725, 2, 969, 950, 202, 178,
40, 109, 887, 681, 257, 870, 799, 658, 678, 439, 49,
831, 868, 779, 67, 946, 568, 826, 561, 318, 973, 558,
161, 808, 740, 62, 538, 699, 734, 379, 231, 547, 335,
126, 212, 701, 18, 72, 126, 209, 738, 182, 801, 788,
451, 436, 434, 270, 524])
def test_snail_026(self):
self.assertEqual(snail(
[[348, 421, 186, 172, 681, 428, 955, 583, 1000, 631, 543],
[751, 963, 968, 739, 248, 380, 307, 61, 874, 248, 908],
[803, 186, 336, 83, 196, 775, 898, 148, 43, 24, 993],
[274, 904, 695, 140, 582, 766, 810, 824, 717, 591, 136],
[632, 95, 397, 516, 457, 937, 220, 150, 971, 391, 283],
[157, 543, 946, 629, 703, 392, 816, 292, 935, 107, 289],
[794, 824, 923, 134, 486, 165, 956, 714, 775, 265, 654],
[261, 551, 238, 976, 460, 921, 501, 439, 811, 202, 916],
[817, 671, 357, 391, 181, 639, 191, 534, 945, 204, 249],
[761, 208, 763, 142, 330, 832, 998, 706, 301, 117, 615],
[977, 386, 105, 274, 166, 993, 248, 316, 340, 378, 886]]),
[348, 421, 186, 172, 681, 428, 955, 583, 1000, 631,
543, 908, 993, 136, 283, 289, 654, 916, 249, 615, 886,
378, 340, 316, 248, 993, 166, 274, 105, 386, 977, 761,
817, 261, 794, 157, 632, 274, 803, 751, 963, 968, 739,
248, 380, 307, 61, 874, 248, 24, 591, 391, 107, 265,
202, 204, 117, 301, 706, 998, 832, 330, 142, 763, 208,
671, 551, 824, 543, 95, 904, 186, 336, 83, 196, 775,
898, 148, 43, 717, 971, 935, 775, 811, 945, 534, 191,
639, 181, 391, 357, 238, 923, 946, 397, 695, 140, 582,
766, 810, 824, 150, 292, 714, 439, 501, 921, 460, 976,
134, 629, 516, 457, 937, 220, 816, 956, 165, 486, 703,
392])
def test_snail_027(self):
self.assertEqual(snail([[279, 149, 635, 162, 437, 751, 73, 382, 918,
994, 660, 832, 818, 312, 381, 306, 375, 87,
245],
[54, 599, 406, 599, 951, 888, 231, 723, 287,
692, 617, 275, 719, 445, 361, 954, 583, 951,
162],
[966, 522, 282, 502, 739, 889, 323, 635, 486,
477, 231, 502, 471, 524, 566, 189, 91, 694,
768],
[164, 463, 961, 850, 665, 898, 53, 331, 507, 69,
164, 99, 435, 418, 104, 868, 998, 186, 161],
[138, 179, 498, 106, 803, 338, 361, 631, 370,
805, 156, 583, 102, 486, 989, 468, 772, 491,
656],
[450, 129, 723, 662, 665, 9, 227, 23, 222, 199,
111, 556, 897, 4, 81, 665, 108, 906, 457],
[442, 235, 249, 838, 26, 861, 927, 55, 260, 9,
140, 495, 478, 544, 693, 849, 727, 448, 421],
[812, 736, 968, 113, 205, 680, 936, 699, 733,
830, 760, 301, 891, 701, 530, 34, 234, 764,
136],
[191, 591, 992, 189, 987, 162, 784, 566, 788,
983, 584, 919, 410, 408, 225, 778, 200, 854,
852],
[424, 5, 610, 711, 796, 952, 899, 192, 643, 399,
953, 720, 406, 324, 706, 943, 139, 87, 668],
[412, 431, 428, 777, 880, 971, 931, 966, 281,
510, 63, 1000, 115, 833, 746, 390, 333, 636,
671],
[249, 695, 992, 731, 15, 843, 567, 332, 762,
942, 804, 601, 83, 738, 165, 517, 258, 171,
227],
[976, 808, 967, 898, 78, 231, 563, 182, 696,
611, 421, 809, 6, 954, 656, 338, 422, 777,
172],
[839, 795, 83, 698, 557, 584, 452, 382, 89, 858,
886, 514, 671, 669, 827, 78, 160, 694, 784],
[1000, 249, 558, 794, 891, 668, 564, 399, 18,
452, 938, 516, 359, 2, 140, 31, 16, 876, 532],
[706, 99, 684, 613, 93, 504, 584, 599, 513, 638,
645, 334, 448, 148, 802, 805, 255, 759, 176],
[262, 671, 68, 389, 36, 561, 104, 285, 968, 896,
20, 912, 215, 161, 564, 476, 828, 815, 331],
[74, 29, 857, 758, 382, 578, 150, 745, 684, 558,
384, 439, 118, 599, 779, 378, 816, 996, 206],
[83, 545, 645, 856, 457, 736, 454, 105, 282,
587, 180, 436, 188, 477, 503, 377, 696, 918,
592]]),
[279, 149, 635, 162, 437, 751, 73, 382, 918, 994, 660,
832, 818, 312, 381, 306, 375, 87, 245, 162, 768, 161,
656, 457, 421, 136, 852, 668, 671, 227, 172, 784, 532,
176, 331, 206, 592, 918, 696, 377, 503, 477, 188, 436,
180, 587, 282, 105, 454, 736, 457, 856, 645, 545, 83,
74, 262, 706, 1000, 839, 976, 249, 412, 424, 191, 812,
442, 450, 138, 164, 966, 54, 599, 406, 599, 951, 888,
231, 723, 287, 692, 617, 275, 719, 445, 361, 954, 583,
951, 694, 186, 491, 906, 448, 764, 854, 87, 636, 171,
777, 694, 876, 759, 815, 996, 816, 378, 779, 599, 118,
439, 384, 558, 684, 745, 150, 578, 382, 758, 857, 29,
671, 99, 249, 795, 808, 695, 431, 5, 591, 736, 235,
129, 179, 463, 522, 282, 502, 739, 889, 323, 635, 486,
477, 231, 502, 471, 524, 566, 189, 91, 998, 772, 108,
727, 234, 200, 139, 333, 258, 422, 160, 16, 255, 828,
476, 564, 161, 215, 912, 20, 896, 968, 285, 104, 561,
36, 389, 68, 684, 558, 83, 967, 992, 428, 610, 992,
968, 249, 723, 498, 961, 850, 665, 898, 53, 331, 507,
69, 164, 99, 435, 418, 104, 868, 468, 665, 849, 34,
778, 943, 390, 517, 338, 78, 31, 805, 802, 148, 448,
334, 645, 638, 513, 599, 584, 504, 93, 613, 794, 698,
898, 731, 777, 711, 189, 113, 838, 662, 106, 803, 338,
361, 631, 370, 805, 156, 583, 102, 486, 989, 81, 693,
530, 225, 706, 746, 165, 656, 827, 140, 2, 359, 516,
938, 452, 18, 399, 564, 668, 891, 557, 78, 15, 880,
796, 987, 205, 26, 665, 9, 227, 23, 222, 199, 111,
556, 897, 4, 544, 701, 408, 324, 833, 738, 954, 669,
671, 514, 886, 858, 89, 382, 452, 584, 231, 843, 971,
952, 162, 680, 861, 927, 55, 260, 9, 140, 495, 478,
891, 410, 406, 115, 83, 6, 809, 421, 611, 696, 182,
563, 567, 931, 899, 784, 936, 699, 733, 830, 760, 301,
919, 720, 1000, 601, 804, 942, 762, 332, 966, 192,
566, 788, 983, 584, 953, 63, 510, 281, 643, 399])
def test_snail_028(self):
self.assertEqual(snail([[694, 584, 826, 873, 217, 367, 668, 234, 472,
306, 498, 94, 613, 797],
[712, 162, 246, 54, 330, 345, 797, 656, 949,
377, 907, 79, 246, 655],
[393, 162, 490, 233, 843, 794, 437, 391, 266,
639, 553, 518, 364, 569],
[844, 274, 883, 549, 545, 431, 169, 974, 129,
186, 605, 391, 354, 562],
[439, 363, 626, 800, 507, 849, 391, 701, 310,
374, 946, 329, 720, 188],
[110, 517, 124, 454, 546, 362, 238, 717, 444,
560, 620, 885, 732, 631],
[849, 531, 960, 464, 448, 802, 101, 755, 69,
843, 256, 543, 728, 839],
[538, 525, 681, 672, 849, 637, 688, 939, 393,
184, 675, 434, 361, 557],
[483, 832, 588, 542, 124, 605, 146, 492, 359,
465, 278, 352, 815, 884],
[837, 448, 77, 252, 291, 313, 816, 79, 919, 188,
845, 26, 918, 190],
[994, 349, 148, 613, 557, 269, 695, 471, 944,
90, 2, 167, 136, 926],
[596, 304, 727, 835, 858, 635, 727, 136, 179,
266, 171, 679, 985, 945],
[152, 294, 615, 139, 465, 165, 578, 914, 232,
953, 268, 143, 847, 663],
[355, 96, 458, 217, 834, 690, 302, 691, 470,
344, 567, 66, 479, 144]]),
[694, 584, 826, 873, 217, 367, 668, 234, 472, 306, 498,
94, 613, 797, 655, 569, 562, 188, 631, 839, 557, 884,
190, 926, 945, 663, 144, 479, 66, 567, 344, 470, 691,
302, 690, 834, 217, 458, 96, 355, 152, 596, 994, 837,
483, 538, 849, 110, 439, 844, 393, 712, 162, 246, 54,
330, 345, 797, 656, 949, 377, 907, 79, 246, 364, 354,
720, 732, 728, 361, 815, 918, 136, 985, 847, 143, 268,
953, 232, 914, 578, 165, 465, 139, 615, 294, 304, 349,
448, 832, 525, 531, 517, 363, 274, 162, 490, 233, 843,
794, 437, 391, 266, 639, 553, 518, 391, 329, 885, 543,
434, 352, 26, 167, 679, 171, 266, 179, 136, 727, 635,
858, 835, 727, 148, 77, 588, 681, 960, 124, 626, 883,
549, 545, 431, 169, 974, 129, 186, 605, 946, 620, 256,
675, 278, 845, 2, 90, 944, 471, 695, 269, 557, 613,
252, 542, 672, 464, 454, 800, 507, 849, 391, 701, 310,
374, 560, 843, 184, 465, 188, 919, 79, 816, 313, 291,
124, 849, 448, 546, 362, 238, 717, 444, 69, 393, 359,
492, 146, 605, 637, 802, 101, 755, 939, 688])
def test_snail_029(self):
self.assertEqual(snail([[823, 448, 897, 244, 584, 461, 96],
[645, 751, 213, 852, 812, 16, 617],
[341, 284, 208, 458, 28, 238, 767],
[773, 348, 159, 197, 957, 501, 818],
[932, 118, 964, 418, 423, 847, 430],
[545, 667, 931, 75, 818, 645, 45],
[923, 151, 732, 63, 520, 681, 627]]),
[823, 448, 897, 244, 584, 461, 96, 617, 767, 818, 430,
45, 627, 681, 520, 63, 732, 151, 923, 545, 932, 773,
341, 645, 751, 213, 852, 812, 16, 238, 501, 847, 645,
818, 75, 931, 667, 118, 348, 284, 208, 458, 28, 957,
423, 418, 964, 159, 197])
def test_snail_030(self):
self.assertEqual(snail([[491, 432, 751, 729, 722, 964, 386, 710, 130,
369, 227, 487, 395, 914, 468, 885, 81, 569,
868, 900],
[925, 992, 601, 188, 204, 640, 239, 6, 26, 451,
26, 630, 429, 830, 38, 905, 555, 630, 296,
840],
[401, 86, 682, 405, 960, 499, 290, 765, 513,
376, 331, 78, 471, 999, 3, 328, 896, 758, 56,
75],
[542, 905, 880, 788, 546, 879, 658, 836, 787,
912, 968, 988, 98, 461, 973, 469, 371, 178,
984, 431],
[584, 627, 404, 160, 875, 721, 409, 163, 30,
127, 499, 300, 869, 690, 69, 260, 751, 151,
288, 319],
[748, 508, 826, 682, 70, 215, 89, 186, 418, 386,
474, 42, 389, 599, 872, 534, 181, 496, 186,
21],
[546, 745, 446, 346, 449, 807, 863, 996, 605,
427, 845, 182, 932, 282, 544, 650, 123, 188,
505, 745],
[107, 963, 507, 886, 162, 321, 597, 90, 576,
101, 818, 394, 542, 276, 578, 417, 797, 89,
366, 771],
[904, 230, 474, 400, 921, 749, 277, 826, 638,
294, 520, 617, 405, 983, 437, 87, 940, 492,
561, 407],
[877, 195, 809, 714, 64, 362, 585, 4, 995, 949,
383, 172, 55, 468, 637, 229, 746, 208, 91,
708],
[663, 758, 330, 359, 996, 67, 409, 169, 660,
688, 11, 50, 191, 88, 802, 834, 559, 139, 490,
412],
[310, 464, 204, 408, 801, 352, 18, 167, 815,
753, 758, 833, 85, 731, 253, 655, 290, 493,
356, 396],
[424, 931, 222, 6, 67, 347, 450, 528, 353, 444,
283, 971, 925, 76, 208, 101, 989, 64, 209,
875],
[903, 651, 952, 356, 647, 99, 895, 868, 203,
620, 147, 200, 657, 839, 745, 260, 916, 552,
896, 209],
[721, 17, 825, 638, 691, 971, 95, 844, 75, 203,
692, 210, 618, 113, 518, 82, 493, 463, 647,
122],
[335, 97, 438, 636, 568, 329, 681, 998, 316,
679, 597, 547, 505, 283, 748, 299, 800, 828,
521, 139],
[209, 110, 325, 990, 706, 379, 897, 133, 457,
573, 653, 863, 452, 819, 801, 756, 590, 925,
583, 731],
[816, 946, 134, 587, 645, 751, 780, 140, 731,
208, 504, 939, 401, 724, 140, 1000, 575, 15,
966, 719],
[929, 121, 255, 511, 401, 94, 7, 656, 871, 52,
589, 504, 456, 524, 492, 4, 513, 673, 536,
877],
[828, 402, 44, 162, 805, 675, 391, 875, 955,
410, 385, 625, 250, 837, 153, 922, 105, 279,
91, 121]]),
[491, 432, 751, 729, 722, 964, 386, 710, 130, 369, 227,
487, 395, 914, 468, 885, 81, 569, 868, 900, 840, 75,
431, 319, 21, 745, 771, 407, 708, 412, 396, 875, 209,
122, 139, 731, 719, 877, 121, 91, 279, 105, 922, 153,
837, 250, 625, 385, 410, 955, 875, 391, 675, 805, 162,
44, 402, 828, 929, 816, 209, 335, 721, 903, 424, 310,
663, 877, 904, 107, 546, 748, 584, 542, 401, 925, 992,
601, 188, 204, 640, 239, 6, 26, 451, 26, 630, 429,
830, 38, 905, 555, 630, 296, 56, 984, 288, 186, 505,
366, 561, 91, 490, 356, 209, 896, 647, 521, 583, 966,
536, 673, 513, 4, 492, 524, 456, 504, 589, 52, 871,
656, 7, 94, 401, 511, 255, 121, 946, 110, 97, 17, 651,
931, 464, 758, 195, 230, 963, 745, 508, 627, 905, 86,
682, 405, 960, 499, 290, 765, 513, 376, 331, 78, 471,
999, 3, 328, 896, 758, 178, 151, 496, 188, 89, 492,
208, 139, 493, 64, 552, 463, 828, 925, 15, 575, 1000,
140, 724, 401, 939, 504, 208, 731, 140, 780, 751, 645,
587, 134, 325, 438, 825, 952, 222, 204, 330, 809, 474,
507, 446, 826, 404, 880, 788, 546, 879, 658, 836, 787,
912, 968, 988, 98, 461, 973, 469, 371, 751, 181, 123,
797, 940, 746, 559, 290, 989, 916, 493, 800, 590, 756,
801, 819, 452, 863, 653, 573, 457, 133, 897, 379, 706,
990, 636, 638, 356, 6, 408, 359, 714, 400, 886, 346,
682, 160, 875, 721, 409, 163, 30, 127, 499, 300, 869,
690, 69, 260, 534, 650, 417, 87, 229, 834, 655, 101,
260, 82, 299, 748, 283, 505, 547, 597, 679, 316, 998,
681, 329, 568, 691, 647, 67, 801, 996, 64, 921, 162,
449, 70, 215, 89, 186, 418, 386, 474, 42, 389, 599,
872, 544, 578, 437, 637, 802, 253, 208, 745, 518, 113,
618, 210, 692, 203, 75, 844, 95, 971, 99, 347, 352,
67, 362, 749, 321, 807, 863, 996, 605, 427, 845, 182,
932, 282, 276, 983, 468, 88, 731, 76, 839, 657, 200,
147, 620, 203, 868, 895, 450, 18, 409, 585, 277, 597,
90, 576, 101, 818, 394, 542, 405, 55, 191, 85, 925,
971, 283, 444, 353, 528, 167, 169, 4, 826, 638, 294,
520, 617, 172, 50, 833, 758, 753, 815, 660, 995, 949,
383, 11, 688])
def test_snail_031(self):
self.assertEqual(snail(
[[751, 521, 950, 82], [455, 888, 335, 526], [105, 724, 129, 53],
[380, 655, 725, 828]]),
[751, 521, 950, 82, 526, 53, 828, 725, 655, 380, 105,
455, 888, 335, 129, 724])
def test_snail_032(self):
self.assertEqual(snail([[543]]), [543])
def test_snail_033(self):
self.assertEqual(snail([[229, 998, 713, 612, 345, 412, 73, 287, 921, 44,
509, 147, 815, 84],
[202, 726, 739, 170, 976, 345, 944, 506, 848,
942, 98, 297, 75, 807],
[893, 82, 958, 458, 916, 954, 418, 436, 492, 86,
792, 226, 925, 268],
[370, 388, 588, 171, 945, 358, 281, 657, 577,
147, 44, 352, 899, 119],
[63, 834, 521, 924, 276, 174, 483, 414, 999,
932, 97, 492, 833, 363],
[983, 187, 828, 23, 387, 853, 203, 130, 187,
820, 569, 974, 494, 870],
[265, 162, 207, 733, 32, 925, 259, 761, 166,
231, 504, 503, 64, 851],
[434, 330, 43, 791, 846, 790, 566, 474, 702,
462, 693, 826, 682, 881],
[752, 68, 291, 180, 294, 674, 433, 486, 768,
743, 498, 98, 61, 154],
[52, 47, 323, 362, 247, 135, 716, 566, 713, 977,
78, 222, 300, 909],
[265, 17, 534, 221, 142, 430, 935, 948, 600, 79,
898, 229, 949, 656],
[850, 639, 989, 941, 84, 62, 850, 437, 25, 538,
670, 868, 406, 755],
[370, 978, 377, 131, 102, 929, 459, 201, 14,
981, 461, 153, 665, 352],
[374, 581, 593, 665, 922, 259, 899, 586, 405,
812, 645, 820, 321, 535]]),
[229, 998, 713, 612, 345, 412, 73, 287, 921, 44, 509,
147, 815, 84, 807, 268, 119, 363, 870, 851, 881, 154,
909, 656, 755, 352, 535, 321, 820, 645, 812, 405, 586,
899, 259, 922, 665, 593, 581, 374, 370, 850, 265, 52,
752, 434, 265, 983, 63, 370, 893, 202, 726, 739, 170,
976, 345, 944, 506, 848, 942, 98, 297, 75, 925, 899,
833, 494, 64, 682, 61, 300, 949, 406, 665, 153, 461,
981, 14, 201, 459, 929, 102, 131, 377, 978, 639, 17,
47, 68, 330, 162, 187, 834, 388, 82, 958, 458, 916,
954, 418, 436, 492, 86, 792, 226, 352, 492, 974, 503,
826, 98, 222, 229, 868, 670, 538, 25, 437, 850, 62,
84, 941, 989, 534, 323, 291, 43, 207, 828, 521, 588,
171, 945, 358, 281, 657, 577, 147, 44, 97, 569, 504,
693, 498, 78, 898, 79, 600, 948, 935, 430, 142, 221,
362, 180, 791, 733, 23, 924, 276, 174, 483, 414, 999,
932, 820, 231, 462, 743, 977, 713, 566, 716, 135, 247,
294, 846, 32, 387, 853, 203, 130, 187, 166, 702, 768,
486, 433, 674, 790, 925, 259, 761, 474, 566])
def test_snail_034(self):
self.assertEqual(snail([[543, 159, 630, 512, 408, 22, 659, 938, 716,
955, 142, 6, 273, 723],
[899, 659, 592, 655, 57, 191, 321, 795, 226,
317, 372, 190, 368, 804],
[214, 369, 514, 853, 25, 423, 744, 462, 181,
663, 863, 747, 152, 353],
[117, 9, 923, 420, 253, 550, 729, 881, 696, 208,
269, 362, 242, 177],
[625, 547, 37, 512, 130, 542, 853, 646, 551,
801, 257, 306, 206, 361],
[271, 719, 731, 679, 306, 529, 531, 846, 891,
420, 871, 537, 514, 117],
[350, 890, 866, 614, 496, 485, 88, 13, 488, 842,
197, 891, 854, 554],
[278, 713, 485, 671, 556, 687, 246, 19, 293,
906, 1000, 375, 531, 126],
[641, 531, 586, 598, 991, 366, 229, 169, 644,
562, 847, 724, 546, 904],
[859, 329, 116, 455, 986, 255, 334, 156, 188,
438, 112, 409, 283, 653],
[844, 612, 215, 684, 518, 422, 922, 741, 33,
196, 272, 51, 604, 951],
[457, 68, 327, 589, 617, 942, 5, 200, 722, 725,
971, 886, 972, 961],
[817, 172, 829, 438, 738, 639, 453, 565, 270,
683, 405, 829, 664, 749],
[347, 518, 664, 43, 591, 52, 685, 427, 716, 578,
854, 88, 673, 458]]),
[543, 159, 630, 512, 408, 22, 659, 938, 716, 955, 142,
6, 273, 723, 804, 353, 177, 361, 117, 554, 126, 904,
653, 951, 961, 749, 458, 673, 88, 854, 578, 716, 427,
685, 52, 591, 43, 664, 518, 347, 817, 457, 844, 859,
641, 278, 350, 271, 625, 117, 214, 899, 659, 592, 655,
57, 191, 321, 795, 226, 317, 372, 190, 368, 152, 242,
206, 514, 854, 531, 546, 283, 604, 972, 664, 829, 405,
683, 270, 565, 453, 639, 738, 438, 829, 172, 68, 612,
329, 531, 713, 890, 719, 547, 9, 369, 514, 853, 25,
423, 744, 462, 181, 663, 863, 747, 362, 306, 537, 891,
375, 724, 409, 51, 886, 971, 725, 722, 200, 5, 942,
617, 589, 327, 215, 116, 586, 485, 866, 731, 37, 923,
420, 253, 550, 729, 881, 696, 208, 269, 257, 871, 197,
1000, 847, 112, 272, 196, 33, 741, 922, 422, 518, 684,
455, 598, 671, 614, 679, 512, 130, 542, 853, 646, 551,
801, 420, 842, 906, 562, 438, 188, 156, 334, 255, 986,
991, 556, 496, 306, 529, 531, 846, 891, 488, 293, 644,
169, 229, 366, 687, 485, 88, 13, 19, 246])
def test_snail_035(self):
self.assertEqual(snail(
[[805, 737, 255, 944, 227, 940, 373, 877, 581, 787, 278, 332],
[64, 412, 532, 342, 955, 252, 339, 890, 26, 793, 124, 394],
[814, 764, 949, 785, 415, 832, 711, 188, 65, 623, 255, 469],
[110, 743, 29, 583, 871, 275, 878, 329, 107, 698, 107, 523],
[212, 73, 731, 628, 188, 215, 22, 479, 650, 523, 1000, 926],
[383, 241, 377, 580, 798, 363, 103, 802, 427, 943, 877, 919],
[387, 291, 796, 951, 13, 601, 617, 451, 340, 203, 336, 42],
[412, 654, 456, 885, 799, 937, 971, 608, 17, 481, 383, 748],
[39, 178, 45, 684, 995, 672, 707, 397, 999, 98, 373, 396],
[62, 984, 818, 343, 914, 165, 470, 510, 86, 545, 993, 448],
[105, 178, 404, 878, 906, 445, 706, 798, 613, 433, 492, 518],
[744, 254, 817, 85, 813, 574, 193, 588, 505, 162, 819, 636]]),
[805, 737, 255, 944, 227, 940, 373, 877, 581, 787, 278,
332, 394, 469, 523, 926, 919, 42, 748, 396, 448, 518,
636, 819, 162, 505, 588, 193, 574, 813, 85, 817, 254,
744, 105, 62, 39, 412, 387, 383, 212, 110, 814, 64,
412, 532, 342, 955, 252, 339, 890, 26, 793, 124, 255,
107, 1000, 877, 336, 383, 373, 993, 492, 433, 613,
798, 706, 445, 906, 878, 404, 178, 984, 178, 654, 291,
241, 73, 743, 764, 949, 785, 415, 832, 711, 188, 65,
623, 698, 523, 943, 203, 481, 98, 545, 86, 510, 470,
165, 914, 343, 818, 45, 456, 796, 377, 731, 29, 583,
871, 275, 878, 329, 107, 650, 427, 340, 17, 999, 397,
707, 672, 995, 684, 885, 951, 580, 628, 188, 215, 22,
479, 802, 451, 608, 971, 937, 799, 13, 798, 363, 103,
617, 601])
def test_snail_036(self):
self.assertEqual(snail(
[[997, 44, 256, 241, 586, 435, 204, 852, 283, 678],
[536, 493, 608, 713, 378, 476, 645, 242, 657, 560],
[609, 310, 407, 973, 835, 59, 771, 982, 985, 55],
[948, 389, 927, 772, 391, 672, 254, 120, 915, 655],
[993, 544, 661, 167, 875, 343, 129, 64, 471, 611],
[186, 216, 598, 814, 94, 694, 135, 7, 374, 60],
[487, 528, 461, 860, 913, 283, 276, 354, 679, 778],
[636, 627, 996, 319, 813, 600, 548, 491, 948, 178],
[995, 381, 855, 47, 403, 250, 912, 709, 322, 993],
[604, 150, 814, 285, 749, 84, 752, 680, 900, 222]]),
[997, 44, 256, 241, 586, 435, 204, 852, 283, 678, 560,
55, 655, 611, 60, 778, 178, 993, 222, 900, 680, 752,
84, 749, 285, 814, 150, 604, 995, 636, 487, 186, 993,
948, 609, 536, 493, 608, 713, 378, 476, 645, 242, 657,
985, 915, 471, 374, 679, 948, 322, 709, 912, 250, 403,
47, 855, 381, 627, 528, 216, 544, 389, 310, 407, 973,
835, 59, 771, 982, 120, 64, 7, 354, 491, 548, 600,
813, 319, 996, 461, 598, 661, 927, 772, 391, 672, 254,
129, 135, 276, 283, 913, 860, 814, 167, 875, 343, 694,
94])
def test_snail_037(self):
self.assertEqual(snail([[924, 474, 327, 244, 69, 575, 52, 587, 477, 521,
871, 701, 236],
[521, 643, 870, 149, 368, 896, 185, 164, 142,
419, 686, 209, 67],
[161, 18, 876, 414, 245, 830, 900, 985, 627,
760, 366, 872, 85],
[885, 784, 859, 378, 232, 55, 455, 716, 558, 68,
430, 331, 35],
[859, 82, 149, 721, 581, 743, 272, 68, 600, 363,
433, 350, 62],
[435, 913, 330, 343, 219, 649, 84, 442, 282,
315, 368, 567, 33],
[756, 543, 726, 158, 116, 526, 43, 351, 731,
966, 190, 494, 396],
[991, 673, 736, 193, 693, 113, 21, 298, 699,
837, 141, 997, 872],
[589, 658, 79, 77, 493, 79, 163, 484, 631, 547,
53, 991, 387],
[536, 709, 286, 817, 344, 230, 460, 648, 13, 13,
268, 604, 512],
[107, 41, 145, 882, 103, 149, 377, 919, 188,
631, 686, 965, 945],
[983, 912, 408, 29, 227, 783, 589, 629, 432,
119, 498, 481, 652],
[470, 415, 9, 285, 695, 290, 688, 88, 702, 962,
280, 589, 7]]),
[924, 474, 327, 244, 69, 575, 52, 587, 477, 521, 871,
701, 236, 67, 85, 35, 62, 33, 396, 872, 387, 512, 945,
652, 7, 589, 280, 962, 702, 88, 688, 290, 695, 285, 9,
415, 470, 983, 107, 536, 589, 991, 756, 435, 859, 885,
161, 521, 643, 870, 149, 368, 896, 185, 164, 142, 419,
686, 209, 872, 331, 350, 567, 494, 997, 991, 604, 965,
481, 498, 119, 432, 629, 589, 783, 227, 29, 408, 912,
41, 709, 658, 673, 543, 913, 82, 784, 18, 876, 414,
245, 830, 900, 985, 627, 760, 366, 430, 433, 368, 190,
141, 53, 268, 686, 631, 188, 919, 377, 149, 103, 882,
145, 286, 79, 736, 726, 330, 149, 859, 378, 232, 55,
455, 716, 558, 68, 363, 315, 966, 837, 547, 13, 13,
648, 460, 230, 344, 817, 77, 193, 158, 343, 721, 581,
743, 272, 68, 600, 282, 731, 699, 631, 484, 163, 79,
493, 693, 116, 219, 649, 84, 442, 351, 298, 21, 113,
526, 43])
def test_snail_038(self):
self.assertEqual(snail([[332, 189, 638, 117, 858, 164, 701, 784, 749,
950, 707, 293, 233, 576],
[380, 752, 798, 298, 597, 470, 623, 773, 953,
86, 251, 504, 126, 633],
[337, 241, 413, 616, 605, 278, 289, 366, 162,
83, 632, 601, 771, 812],
[814, 497, 196, 480, 388, 471, 689, 147, 436,
568, 298, 36, 503, 120],
[867, 706, 472, 178, 529, 333, 885, 252, 864,
324, 288, 246, 463, 478],
[702, 781, 720, 927, 185, 781, 841, 175, 822,
170, 77, 144, 909, 301],
[779, 325, 154, 452, 539, 389, 191, 453, 664,
920, 216, 383, 873, 917],
[859, 868, 29, 729, 640, 104, 731, 668, 816,
335, 907, 242, 563, 950],
[230, 53, 485, 405, 276, 592, 563, 860, 770,
124, 501, 431, 370, 908],
[355, 994, 912, 644, 789, 852, 140, 693, 256,
677, 136, 488, 337, 317],
[346, 323, 9, 399, 577, 991, 9, 152, 271, 188,
222, 851, 696, 985],
[481, 705, 515, 680, 129, 670, 380, 894, 951,
245, 577, 654, 109, 754],
[889, 295, 885, 544, 579, 931, 693, 95, 772,
865, 210, 62, 232, 361],
[743, 942, 729, 57, 879, 664, 20, 779, 401, 449,
973, 521, 380, 393]]),
[332, 189, 638, 117, 858, 164, 701, 784, 749, 950, 707,
293, 233, 576, 633, 812, 120, 478, 301, 917, 950, 908,
317, 985, 754, 361, 393, 380, 521, 973, 449, 401, 779,
20, 664, 879, 57, 729, 942, 743, 889, 481, 346, 355,
230, 859, 779, 702, 867, 814, 337, 380, 752, 798, 298,
597, 470, 623, 773, 953, 86, 251, 504, 126, 771, 503,
463, 909, 873, 563, 370, 337, 696, 109, 232, 62, 210,
865, 772, 95, 693, 931, 579, 544, 885, 295, 705, 323,
994, 53, 868, 325, 781, 706, 497, 241, 413, 616, 605,
278, 289, 366, 162, 83, 632, 601, 36, 246, 144, 383,
242, 431, 488, 851, 654, 577, 245, 951, 894, 380, 670,
129, 680, 515, 9, 912, 485, 29, 154, 720, 472, 196,
480, 388, 471, 689, 147, 436, 568, 298, 288, 77, 216,
907, 501, 136, 222, 188, 271, 152, 9, 991, 577, 399,
644, 405, 729, 452, 927, 178, 529, 333, 885, 252, 864,
324, 170, 920, 335, 124, 677, 256, 693, 140, 852, 789,
276, 640, 539, 185, 781, 841, 175, 822, 664, 816, 770,
860, 563, 592, 104, 389, 191, 453, 668, 731])
def test_snail_039(self):
self.assertEqual(snail([[686, 345, 940, 678, 562, 159, 206, 990, 927,
298, 539, 662, 265, 951, 400],
[528, 940, 929, 898, 770, 67, 357, 491, 61, 867,
425, 746, 957, 613, 720],
[116, 943, 331, 211, 933, 899, 736, 195, 147,
366, 181, 973, 59, 873, 379],
[161, 879, 580, 471, 865, 871, 542, 206, 816,
807, 436, 387, 893, 970, 145],
[367, 686, 933, 883, 434, 565, 652, 528, 199,
346, 378, 377, 911, 746, 288],
[844, 843, 849, 486, 101, 957, 940, 223, 930,
113, 359, 782, 652, 783, 559],
[56, 652, 242, 424, 531, 187, 16, 752, 168, 603,
702, 435, 237, 814, 398],
[10, 230, 815, 547, 434, 244, 56, 947, 758, 155,
407, 213, 366, 418, 518],
[438, 950, 214, 575, 809, 811, 370, 916, 57,
964, 918, 461, 428, 971, 456],
[190, 751, 7, 549, 101, 648, 636, 735, 371, 122,
316, 848, 463, 552, 41],
[82, 332, 595, 889, 290, 652, 211, 874, 249,
740, 352, 870, 517, 810, 422],
[248, 681, 64, 600, 6, 399, 108, 991, 123, 413,
862, 309, 28, 957, 861],
[603, 104, 908, 12, 827, 54, 796, 166, 701, 933,
180, 308, 604, 374, 950],
[495, 877, 743, 460, 546, 160, 966, 712, 708,
606, 52, 445, 957, 762, 950],
[39, 185, 527, 228, 972, 273, 584, 336, 352,
376, 681, 554, 34, 322, 125]]),
[686, 345, 940, 678, 562, 159, 206, 990, 927, 298, 539,
662, 265, 951, 400, 720, 379, 145, 288, 559, 398, 518,
456, 41, 422, 861, 950, 950, 125, 322, 34, 554, 681,
376, 352, 336, 584, 273, 972, 228, 527, 185, 39, 495,
603, 248, 82, 190, 438, 10, 56, 844, 367, 161, 116,
528, 940, 929, 898, 770, 67, 357, 491, 61, 867, 425,
746, 957, 613, 873, 970, 746, 783, 814, 418, 971, 552,
810, 957, 374, 762, 957, 445, 52, 606, 708, 712, 966,
160, 546, 460, 743, 877, 104, 681, 332, 751, 950, 230,
652, 843, 686, 879, 943, 331, 211, 933, 899, 736, 195,
147, 366, 181, 973, 59, 893, 911, 652, 237, 366, 428,
463, 517, 28, 604, 308, 180, 933, 701, 166, 796, 54,
827, 12, 908, 64, 595, 7, 214, 815, 242, 849, 933,
580, 471, 865, 871, 542, 206, 816, 807, 436, 387, 377,
782, 435, 213, 461, 848, 870, 309, 862, 413, 123, 991,
108, 399, 6, 600, 889, 549, 575, 547, 424, 486, 883,
434, 565, 652, 528, 199, 346, 378, 359, 702, 407, 918,
316, 352, 740, 249, 874, 211, 652, 290, 101, 809, 434,
531, 101, 957, 940, 223, 930, 113, 603, 155, 964, 122,
371, 735, 636, 648, 811, 244, 187, 16, 752, 168, 758,
57, 916, 370, 56, 947])
def test_snail_040(self):
self.assertEqual(snail([[234, 459, 8, 740, 18, 612, 971, 482, 105, 70],
[725, 582, 552, 166, 909, 83, 323, 842, 901,
479],
[139, 880, 685, 560, 197, 820, 458, 261, 491,
930],
[917, 677, 674, 610, 470, 744, 893, 604, 310,
818],
[826, 470, 627, 391, 222, 544, 687, 939, 544,
952],
[68, 614, 803, 517, 852, 251, 87, 88, 838, 229],
[269, 848, 520, 498, 486, 567, 575, 779, 706,
74],
[567, 438, 209, 639, 573, 640, 885, 830, 665,
130],
[183, 483, 877, 703, 75, 515, 323, 482, 901,
562],
[426, 570, 572, 144, 924, 285, 48, 976, 282,
802]]),
[234, 459, 8, 740, 18, 612, 971, 482, 105, 70, 479,
930, 818, 952, 229, 74, 130, 562, 802, 282, 976, 48,
285, 924, 144, 572, 570, 426, 183, 567, 269, 68, 826,
917, 139, 725, 582, 552, 166, 909, 83, 323, 842, 901,
491, 310, 544, 838, 706, 665, 901, 482, 323, 515, 75,
703, 877, 483, 438, 848, 614, 470, 677, 880, 685, 560,
197, 820, 458, 261, 604, 939, 88, 779, 830, 885, 640,
573, 639, 209, 520, 803, 627, 674, 610, 470, 744, 893,
687, 87, 575, 567, 486, 498, 517, 391, 222, 544, 251,
852])
def test_snail_041(self):
self.assertEqual(snail(
[[966, 770, 415, 443, 591], [733, 239, 637, 938, 246],
[567, 292, 816, 631, 702], [315, 312, 771, 408, 474],
[275, 740, 146, 719, 961]]),
[966, 770, 415, 443, 591, 246, 702, 474, 961, 719, 146,
740, 275, 315, 567, 733, 239, 637, 938, 631, 408, 771,
312, 292, 816])
def test_snail_042(self):
self.assertEqual(snail([[928, 128, 90, 593, 147, 757, 325, 206, 400,
949, 633, 558, 879],
[190, 99, 708, 968, 665, 847, 159, 388, 584,
547, 469, 788, 586],
[684, 65, 832, 834, 651, 891, 458, 712, 596,
377, 465, 789, 44],
[653, 136, 125, 990, 21, 351, 405, 771, 910,
922, 213, 998, 75],
[165, 220, 334, 367, 603, 930, 821, 232, 624,
209, 353, 156, 271],
[437, 145, 802, 747, 716, 565, 784, 364, 524,
475, 283, 81, 501],
[821, 590, 652, 948, 704, 922, 334, 102, 905,
13, 335, 462, 425],
[118, 633, 924, 637, 123, 245, 432, 807, 579,
480, 828, 79, 942],
[805, 592, 718, 356, 790, 549, 125, 844, 691,
71, 835, 150, 747],
[87, 541, 24, 922, 952, 881, 463, 192, 319, 765,
771, 368, 432],
[149, 859, 949, 368, 342, 942, 337, 598, 490,
889, 50, 794, 786],
[868, 167, 392, 93, 126, 521, 922, 941, 210,
170, 982, 94, 43],
[583, 931, 24, 750, 990, 453, 518, 9, 657, 789,
678, 676, 756]]),
[928, 128, 90, 593, 147, 757, 325, 206, 400, 949, 633,
558, 879, 586, 44, 75, 271, 501, 425, 942, 747, 432,
786, 43, 756, 676, 678, 789, 657, 9, 518, 453, 990,
750, 24, 931, 583, 868, 149, 87, 805, 118, 821, 437,
165, 653, 684, 190, 99, 708, 968, 665, 847, 159, 388,
584, 547, 469, 788, 789, 998, 156, 81, 462, 79, 150,
368, 794, 94, 982, 170, 210, 941, 922, 521, 126, 93,
392, 167, 859, 541, 592, 633, 590, 145, 220, 136, 65,
832, 834, 651, 891, 458, 712, 596, 377, 465, 213, 353,
283, 335, 828, 835, 771, 50, 889, 490, 598, 337, 942,
342, 368, 949, 24, 718, 924, 652, 802, 334, 125, 990,
21, 351, 405, 771, 910, 922, 209, 475, 13, 480, 71,
765, 319, 192, 463, 881, 952, 922, 356, 637, 948, 747,
367, 603, 930, 821, 232, 624, 524, 905, 579, 691, 844,
125, 549, 790, 123, 704, 716, 565, 784, 364, 102, 807,
432, 245, 922, 334])
def test_snail_043(self):
self.assertEqual(snail(
[[87, 462, 110, 33, 41, 613, 234, 971, 424, 490, 399],
[489, 50, 350, 304, 182, 24, 614, 707, 935, 678, 706],
[363, 94, 140, 854, 757, 467, 369, 903, 629, 342, 144],
[838, 301, 145, 18, 841, 484, 374, 723, 136, 333, 757],
[316, 713, 514, 19, 847, 337, 830, 358, 313, 138, 270],
[869, 803, 76, 126, 424, 80, 383, 117, 180, 519, 534],
[663, 709, 774, 866, 180, 59, 780, 653, 290, 958, 920],
[931, 926, 174, 65, 301, 51, 255, 19, 439, 910, 474],
[229, 84, 159, 158, 470, 597, 842, 83, 794, 285, 20],
[248, 938, 591, 246, 529, 506, 869, 146, 600, 738, 931],
[391, 267, 55, 182, 281, 344, 431, 338, 792, 443, 687]]),
[87, 462, 110, 33, 41, 613, 234, 971, 424, 490, 399,
706, 144, 757, 270, 534, 920, 474, 20, 931, 687, 443,
792, 338, 431, 344, 281, 182, 55, 267, 391, 248, 229,
931, 663, 869, 316, 838, 363, 489, 50, 350, 304, 182,
24, 614, 707, 935, 678, 342, 333, 138, 519, 958, 910,
285, 738, 600, 146, 869, 506, 529, 246, 591, 938, 84,
926, 709, 803, 713, 301, 94, 140, 854, 757, 467, 369,
903, 629, 136, 313, 180, 290, 439, 794, 83, 842, 597,
470, 158, 159, 174, 774, 76, 514, 145, 18, 841, 484,
374, 723, 358, 117, 653, 19, 255, 51, 301, 65, 866,
126, 19, 847, 337, 830, 383, 780, 59, 180, 424, 80])
def test_snail_044(self):
self.assertEqual(snail(
[[64, 644, 694, 5, 163, 760, 568, 84, 67, 517, 872],
[933, 412, 172, 162, 97, 626, 830, 969, 809, 522, 539],
[910, 121, 228, 803, 443, 4, 341, 64, 60, 438, 964],
[320, 135, 26, 700, 58, 741, 111, 944, 580, 855, 195],
[2, 802, 971, 42, 232, 432, 910, 803, 694, 46, 826],
[612, 974, 539, 639, 21, 878, 809, 246, 218, 331, 974],
[804, 448, 962, 406, 439, 556, 826, 109, 798, 609, 867],
[260, 335, 33, 122, 577, 639, 88, 887, 760, 705, 784],
[893, 908, 88, 16, 905, 923, 220, 690, 648, 747, 591],
[276, 217, 551, 996, 879, 575, 154, 724, 468, 856, 317],
[427, 269, 210, 221, 352, 980, 952, 189, 573, 520, 383]]),
[64, 644, 694, 5, 163, 760, 568, 84, 67, 517, 872, 539,
964, 195, 826, 974, 867, 784, 591, 317, 383, 520, 573,
189, 952, 980, 352, 221, 210, 269, 427, 276, 893, 260,
804, 612, 2, 320, 910, 933, 412, 172, 162, 97, 626,
830, 969, 809, 522, 438, 855, 46, 331, 609, 705, 747,
856, 468, 724, 154, 575, 879, 996, 551, 217, 908, 335,
448, 974, 802, 135, 121, 228, 803, 443, 4, 341, 64,
60, 580, 694, 218, 798, 760, 648, 690, 220, 923, 905,
16, 88, 33, 962, 539, 971, 26, 700, 58, 741, 111, 944,
803, 246, 109, 887, 88, 639, 577, 122, 406, 639, 42,
232, 432, 910, 809, 826, 556, 439, 21, 878])
def test_snail_045(self):
self.assertEqual(snail([[631, 374, 877, 595, 738, 324, 704, 280, 468,
923, 505, 471, 786],
[725, 339, 191, 381, 268, 471, 998, 922, 108,
118, 197, 889, 647],
[422, 442, 5, 197, 843, 702, 57, 58, 593, 76,
159, 773, 840],
[166, 158, 990, 841, 117, 450, 765, 455, 254,
99, 224, 624, 608],
[129, 252, 729, 219, 199, 309, 229, 855, 542,
972, 470, 850, 286],
[255, 368, 57, 890, 572, 308, 655, 779, 134,
580, 335, 387, 888],
[27, 281, 301, 15, 780, 318, 425, 931, 277, 972,
499, 622, 692],
[466, 70, 405, 84, 594, 63, 228, 946, 440, 354,
720, 480, 259],
[257, 323, 934, 503, 258, 510, 921, 254, 430,
508, 484, 353, 949],
[321, 168, 497, 248, 670, 628, 258, 877, 585,
965, 796, 567, 233],
[190, 127, 715, 552, 621, 806, 212, 367, 420,
826, 534, 428, 604],
[908, 504, 880, 691, 117, 289, 731, 232, 629,
161, 417, 942, 52],
[341, 721, 127, 728, 46, 763, 884, 431, 905,
951, 338, 775, 868]]),
[631, 374, 877, 595, 738, 324, 704, 280, 468, 923, 505,
471, 786, 647, 840, 608, 286, 888, 692, 259, 949, 233,
604, 52, 868, 775, 338, 951, 905, 431, 884, 763, 46,
728, 127, 721, 341, 908, 190, 321, 257, 466, 27, 255,
129, 166, 422, 725, 339, 191, 381, 268, 471, 998, 922,
108, 118, 197, 889, 773, 624, 850, 387, 622, 480, 353,
567, 428, 942, 417, 161, 629, 232, 731, 289, 117, 691,
880, 504, 127, 168, 323, 70, 281, 368, 252, 158, 442,
5, 197, 843, 702, 57, 58, 593, 76, 159, 224, 470, 335,
499, 720, 484, 796, 534, 826, 420, 367, 212, 806, 621,
552, 715, 497, 934, 405, 301, 57, 729, 990, 841, 117,
450, 765, 455, 254, 99, 972, 580, 972, 354, 508, 965,
585, 877, 258, 628, 670, 248, 503, 84, 15, 890, 219,
199, 309, 229, 855, 542, 134, 277, 440, 430, 254, 921,
510, 258, 594, 780, 572, 308, 655, 779, 931, 946, 228,
63, 318, 425])
def test_snail_046(self):
self.assertEqual(snail(
[[448, 727, 434, 177, 987], [288, 839, 372, 379, 326],
[266, 287, 407, 590, 327], [782, 941, 470, 580, 365],
[823, 313, 939, 776, 834]]),
[448, 727, 434, 177, 987, 326, 327, 365, 834, 776, 939,
313, 823, 782, 266, 288, 839, 372, 379, 590, 580, 470,
941, 287, 407])
def test_snail_047(self):
self.assertEqual(snail([[134, 625, 697, 457, 3, 817, 998, 303, 562, 680,
864, 613, 483, 648, 569, 37],
[328, 426, 402, 699, 409, 971, 63, 339, 238,
759, 392, 835, 574, 349, 949, 842],
[491, 104, 329, 958, 321, 561, 47, 185, 759,
121, 608, 163, 746, 268, 114, 96],
[166, 374, 830, 603, 171, 472, 891, 395, 650,
879, 219, 441, 151, 672, 331, 202],
[763, 122, 903, 770, 555, 406, 876, 126, 509,
564, 333, 937, 863, 163, 970, 818],
[736, 749, 999, 758, 110, 809, 701, 861, 153,
823, 721, 107, 944, 830, 750, 333],
[750, 454, 398, 921, 852, 451, 774, 157, 715,
578, 474, 135, 955, 838, 386, 887],
[140, 935, 474, 862, 292, 785, 433, 271, 153,
908, 426, 686, 694, 206, 251, 533],
[13, 708, 970, 604, 773, 469, 663, 311, 734,
930, 528, 284, 558, 278, 112, 796],
[737, 293, 588, 611, 94, 821, 436, 105, 464,
543, 35, 623, 3, 33, 611, 809],
[812, 394, 490, 319, 385, 300, 47, 217, 181,
839, 527, 229, 889, 212, 754, 34],
[615, 169, 501, 732, 472, 298, 728, 494, 639,
582, 167, 79, 679, 66, 116, 445],
[307, 688, 864, 469, 119, 374, 338, 182, 396,
651, 77, 319, 744, 499, 95, 599],
[684, 884, 412, 446, 154, 747, 892, 34, 875,
845, 609, 455, 551, 940, 151, 932],
[949, 14, 409, 86, 966, 430, 157, 919, 875, 783,
268, 184, 420, 162, 970, 673],
[65, 50, 700, 314, 348, 547, 655, 313, 165, 573,
789, 164, 219, 216, 353, 975]]),
[134, 625, 697, 457, 3, 817, 998, 303, 562, 680, 864,
613, 483, 648, 569, 37, 842, 96, 202, 818, 333, 887,
533, 796, 809, 34, 445, 599, 932, 673, 975, 353, 216,
219, 164, 789, 573, 165, 313, 655, 547, 348, 314, 700,
50, 65, 949, 684, 307, 615, 812, 737, 13, 140, 750,
736, 763, 166, 491, 328, 426, 402, 699, 409, 971, 63,
339, 238, 759, 392, 835, 574, 349, 949, 114, 331, 970,
750, 386, 251, 112, 611, 754, 116, 95, 151, 970, 162,
420, 184, 268, 783, 875, 919, 157, 430, 966, 86, 409,
14, 884, 688, 169, 394, 293, 708, 935, 454, 749, 122,
374, 104, 329, 958, 321, 561, 47, 185, 759, 121, 608,
163, 746, 268, 672, 163, 830, 838, 206, 278, 33, 212,
66, 499, 940, 551, 455, 609, 845, 875, 34, 892, 747,
154, 446, 412, 864, 501, 490, 588, 970, 474, 398, 999,
903, 830, 603, 171, 472, 891, 395, 650, 879, 219, 441,
151, 863, 944, 955, 694, 558, 3, 889, 679, 744, 319,
77, 651, 396, 182, 338, 374, 119, 469, 732, 319, 611,
604, 862, 921, 758, 770, 555, 406, 876, 126, 509, 564,
333, 937, 107, 135, 686, 284, 623, 229, 79, 167, 582,
639, 494, 728, 298, 472, 385, 94, 773, 292, 852, 110,
809, 701, 861, 153, 823, 721, 474, 426, 528, 35, 527,
839, 181, 217, 47, 300, 821, 469, 785, 451, 774, 157,
715, 578, 908, 930, 543, 464, 105, 436, 663, 433, 271,
153, 734, 311])
def test_snail_048(self):
self.assertEqual(snail(
[[148, 131, 809, 558, 988], [226, 872, 217, 699, 709],
[326, 703, 976, 559, 826], [749, 582, 891, 321, 58],
[773, 142, 687, 234, 325]]),
[148, 131, 809, 558, 988, 709, 826, 58, 325, 234, 687,
142, 773, 749, 326, 226, 872, 217, 699, 559, 321, 891,
582, 703, 976])
def test_snail_049(self):
self.assertEqual(snail([[705, 149, 326, 506, 792, 406, 65, 525, 996,
158, 592, 282, 643, 696, 31, 520, 757, 275,
276],
[395, 704, 227, 598, 163, 173, 844, 171, 882,
571, 228, 161, 943, 43, 278, 3, 680, 719, 746],
[871, 369, 979, 617, 840, 771, 315, 81, 751,
543, 799, 516, 452, 899, 115, 102, 262, 234,
751],
[667, 55, 885, 708, 943, 586, 330, 992, 663, 19,
180, 786, 89, 208, 486, 706, 742, 854, 883],
[103, 917, 812, 629, 301, 326, 783, 757, 747,
217, 464, 220, 562, 390, 45, 883, 755, 278,
96],
[58, 584, 52, 378, 774, 536, 631, 392, 592, 219,
897, 685, 895, 23, 749, 884, 417, 365, 463],
[664, 410, 795, 818, 377, 952, 133, 43, 696,
342, 423, 412, 93, 337, 963, 722, 730, 824,
748],
[274, 831, 339, 463, 30, 408, 981, 958, 816,
799, 771, 659, 368, 494, 854, 878, 811, 16,
433],
[55, 449, 296, 203, 66, 988, 124, 753, 17, 600,
108, 79, 710, 973, 4, 847, 137, 725, 579],
[180, 356, 532, 283, 330, 882, 65, 248, 621,
572, 310, 119, 394, 624, 463, 326, 872, 983,
335],
[125, 938, 860, 228, 485, 110, 914, 335, 985,
788, 16, 492, 118, 658, 83, 673, 310, 463,
128],
[896, 593, 150, 280, 186, 824, 408, 2, 842, 388,
750, 674, 634, 221, 435, 728, 183, 685, 119],
[478, 785, 827, 612, 727, 240, 864, 732, 557,
136, 936, 280, 12, 953, 210, 158, 70, 762, 97],
[209, 645, 785, 915, 570, 703, 602, 696, 480,
206, 360, 662, 900, 39, 967, 709, 439, 5, 933],
[441, 925, 978, 564, 488, 326, 796, 781, 197,
696, 81, 630, 144, 317, 215, 987, 154, 30,
142],
[112, 745, 428, 350, 952, 650, 589, 463, 204,
57, 648, 273, 336, 497, 280, 697, 991, 997,
700],
[731, 537, 58, 463, 528, 662, 243, 565, 934,
490, 476, 244, 23, 859, 237, 545, 623, 6, 478],
[54, 910, 609, 160, 253, 282, 264, 395, 951,
466, 832, 888, 589, 309, 698, 27, 242, 647,
506],
[954, 699, 371, 224, 674, 22, 408, 24, 461, 574,
344, 658, 634, 902, 534, 85, 9, 177, 809]]),
[705, 149, 326, 506, 792, 406, 65, 525, 996, 158, 592,
282, 643, 696, 31, 520, 757, 275, 276, 746, 751, 883,
96, 463, 748, 433, 579, 335, 128, 119, 97, 933, 142,
700, 478, 506, 809, 177, 9, 85, 534, 902, 634, 658,
344, 574, 461, 24, 408, 22, 674, 224, 371, 699, 954,
54, 731, 112, 441, 209, 478, 896, 125, 180, 55, 274,
664, 58, 103, 667, 871, 395, 704, 227, 598, 163, 173,
844, 171, 882, 571, 228, 161, 943, 43, 278, 3, 680,
719, 234, 854, 278, 365, 824, 16, 725, 983, 463, 685,
762, 5, 30, 997, 6, 647, 242, 27, 698, 309, 589, 888,
832, 466, 951, 395, 264, 282, 253, 160, 609, 910, 537,
745, 925, 645, 785, 593, 938, 356, 449, 831, 410, 584,
917, 55, 369, 979, 617, 840, 771, 315, 81, 751, 543,
799, 516, 452, 899, 115, 102, 262, 742, 755, 417, 730,
811, 137, 872, 310, 183, 70, 439, 154, 991, 623, 545,
237, 859, 23, 244, 476, 490, 934, 565, 243, 662, 528,
463, 58, 428, 978, 785, 827, 150, 860, 532, 296, 339,
795, 52, 812, 885, 708, 943, 586, 330, 992, 663, 19,
180, 786, 89, 208, 486, 706, 883, 884, 722, 878, 847,
326, 673, 728, 158, 709, 987, 697, 280, 497, 336, 273,
648, 57, 204, 463, 589, 650, 952, 350, 564, 915, 612,
280, 228, 283, 203, 463, 818, 378, 629, 301, 326, 783,
757, 747, 217, 464, 220, 562, 390, 45, 749, 963, 854,
4, 463, 83, 435, 210, 967, 215, 317, 144, 630, 81,
696, 197, 781, 796, 326, 488, 570, 727, 186, 485, 330,
66, 30, 377, 774, 536, 631, 392, 592, 219, 897, 685,
895, 23, 337, 494, 973, 624, 658, 221, 953, 39, 900,
662, 360, 206, 480, 696, 602, 703, 240, 824, 110, 882,
988, 408, 952, 133, 43, 696, 342, 423, 412, 93, 368,
710, 394, 118, 634, 12, 280, 936, 136, 557, 732, 864,
408, 914, 65, 124, 981, 958, 816, 799, 771, 659, 79,
119, 492, 674, 750, 388, 842, 2, 335, 248, 753, 17,
600, 108, 310, 16, 788, 985, 621, 572])
def test_snail_050(self):
self.assertEqual(snail([[772, 352, 920, 451, 295, 883, 38, 33, 562, 598,
383, 190, 999, 918, 657, 173, 310, 243, 749,
460],
[868, 561, 653, 879, 805, 379, 61, 961, 64, 493,
854, 923, 284, 452, 702, 792, 656, 695, 771,
962],
[575, 398, 964, 954, 690, 209, 562, 164, 793,
976, 475, 330, 814, 655, 710, 811, 570, 20,
373, 610],
[799, 80, 212, 607, 883, 605, 697, 849, 982,
661, 68, 1000, 250, 950, 796, 122, 601, 798,
50, 473],
[648, 224, 554, 600, 467, 884, 2, 280, 425, 105,
226, 557, 661, 814, 881, 477, 370, 872, 659,
927],
[50, 371, 90, 503, 987, 116, 255, 374, 300, 948,
323, 898, 296, 361, 455, 546, 622, 633, 987,
34],
[579, 674, 687, 200, 919, 843, 315, 736, 450,
74, 420, 214, 736, 519, 696, 801, 571, 265, 46,
237],
[563, 899, 518, 645, 484, 597, 183, 589, 888,
826, 717, 73, 576, 799, 888, 231, 304, 510,
167, 831],
[724, 228, 499, 474, 241, 346, 119, 150, 200,
443, 641, 64, 147, 137, 161, 378, 536, 46, 176,
711],
[516, 527, 767, 644, 171, 340, 400, 775, 654,
690, 324, 130, 873, 579, 34, 550, 42, 834, 905,
174],
[26, 59, 337, 14, 944, 322, 490, 974, 40, 995,
912, 636, 919, 123, 190, 482, 123, 182, 178,
289],
[730, 222, 970, 185, 610, 987, 177, 447, 885,
117, 172, 22, 795, 119, 487, 673, 245, 819,
515, 318],
[962, 540, 368, 132, 532, 594, 446, 193, 252,
326, 872, 981, 622, 416, 624, 320, 71, 891, 65,
983],
[566, 302, 822, 651, 557, 542, 962, 668, 813,
564, 474, 351, 757, 454, 748, 63, 925, 612,
389, 297],
[946, 119, 214, 810, 762, 218, 502, 3, 429, 607,
197, 465, 126, 778, 887, 847, 487, 519, 304,
84],
[442, 477, 56, 239, 116, 18, 277, 482, 595, 442,
203, 989, 474, 240, 792, 795, 57, 710, 537,
814],
[53, 528, 760, 968, 553, 920, 449, 642, 913,
164, 142, 430, 374, 756, 484, 778, 935, 732,
652, 532],
[372, 903, 178, 880, 456, 257, 221, 977, 746,
807, 729, 207, 281, 9, 208, 994, 701, 260, 811,
925],
[377, 548, 971, 375, 954, 496, 764, 731, 346,
439, 38, 33, 65, 533, 830, 448, 465, 934, 44,
687],
[213, 763, 43, 223, 196, 717, 119, 184, 247,
740, 909, 305, 646, 450, 666, 173, 25, 546,
108, 783]]),
[772, 352, 920, 451, 295, 883, 38, 33, 562, 598, 383,
190, 999, 918, 657, 173, 310, 243, 749, 460, 962, 610,
473, 927, 34, 237, 831, 711, 174, 289, 318, 983, 297,
84, 814, 532, 925, 687, 783, 108, 546, 25, 173, 666,
450, 646, 305, 909, 740, 247, 184, 119, 717, 196, 223,
43, 763, 213, 377, 372, 53, 442, 946, 566, 962, 730,
26, 516, 724, 563, 579, 50, 648, 799, 575, 868, 561,
653, 879, 805, 379, 61, 961, 64, 493, 854, 923, 284,
452, 702, 792, 656, 695, 771, 373, 50, 659, 987, 46,
167, 176, 905, 178, 515, 65, 389, 304, 537, 652, 811,
44, 934, 465, 448, 830, 533, 65, 33, 38, 439, 346,
731, 764, 496, 954, 375, 971, 548, 903, 528, 477, 119,
302, 540, 222, 59, 527, 228, 899, 674, 371, 224, 80,
398, 964, 954, 690, 209, 562, 164, 793, 976, 475, 330,
814, 655, 710, 811, 570, 20, 798, 872, 633, 265, 510,
46, 834, 182, 819, 891, 612, 519, 710, 732, 260, 701,
994, 208, 9, 281, 207, 729, 807, 746, 977, 221, 257,
456, 880, 178, 760, 56, 214, 822, 368, 970, 337, 767,
499, 518, 687, 90, 554, 212, 607, 883, 605, 697, 849,
982, 661, 68, 1000, 250, 950, 796, 122, 601, 370, 622,
571, 304, 536, 42, 123, 245, 71, 925, 487, 57, 935,
778, 484, 756, 374, 430, 142, 164, 913, 642, 449, 920,
553, 968, 239, 810, 651, 132, 185, 14, 644, 474, 645,
200, 503, 600, 467, 884, 2, 280, 425, 105, 226, 557,
661, 814, 881, 477, 546, 801, 231, 378, 550, 482, 673,
320, 63, 847, 795, 792, 240, 474, 989, 203, 442, 595,
482, 277, 18, 116, 762, 557, 532, 610, 944, 171, 241,
484, 919, 987, 116, 255, 374, 300, 948, 323, 898, 296,
361, 455, 696, 888, 161, 34, 190, 487, 624, 748, 887,
778, 126, 465, 197, 607, 429, 3, 502, 218, 542, 594,
987, 322, 340, 346, 597, 843, 315, 736, 450, 74, 420,
214, 736, 519, 799, 137, 579, 123, 119, 416, 454, 757,
351, 474, 564, 813, 668, 962, 446, 177, 490, 400, 119,
183, 589, 888, 826, 717, 73, 576, 147, 873, 919, 795,
622, 981, 872, 326, 252, 193, 447, 974, 775, 150, 200,
443, 641, 64, 130, 636, 22, 172, 117, 885, 40, 654,
690, 324, 912, 995])
def test_snail_051(self):
self.assertEqual(snail([[697, 690, 45, 97, 974, 564, 828, 482, 459, 457,
247, 709, 849, 755, 636, 252, 174],
[878, 182, 418, 18, 296, 541, 463, 226, 390,
399, 86, 57, 352, 505, 880, 822, 596],
[312, 932, 870, 982, 37, 485, 327, 970, 614,
352, 485, 832, 443, 243, 116, 468, 437],
[283, 947, 1000, 474, 878, 672, 130, 269, 601,
862, 608, 896, 683, 65, 5, 7, 854],
[103, 886, 322, 406, 644, 252, 162, 590, 859,
997, 222, 316, 188, 581, 796, 969, 58],
[229, 54, 972, 517, 133, 800, 959, 577, 62, 954,
234, 40, 491, 22, 580, 862, 428],
[853, 197, 664, 207, 581, 868, 982, 935, 2, 818,
51, 950, 425, 673, 513, 507, 992],
[917, 788, 132, 184, 895, 383, 592, 175, 810,
711, 802, 86, 43, 192, 598, 515, 822],
[59, 393, 360, 66, 673, 904, 665, 258, 264, 39,
667, 780, 679, 563, 100, 30, 272],
[150, 367, 289, 44, 24, 249, 470, 487, 212, 802,
989, 338, 650, 813, 518, 64, 465],
[523, 744, 969, 535, 138, 123, 784, 424, 16,
638, 518, 692, 26, 253, 134, 334, 279],
[563, 345, 64, 97, 67, 966, 282, 163, 530, 69,
821, 159, 70, 657, 766, 312, 667],
[102, 543, 515, 548, 410, 417, 570, 834, 78,
297, 961, 164, 375, 429, 318, 636, 506],
[358, 824, 326, 229, 271, 557, 286, 19, 74, 375,
713, 292, 984, 730, 734, 281, 275],
[9, 812, 979, 24, 319, 707, 337, 99, 454, 499,
124, 291, 400, 809, 566, 290, 151],
[815, 554, 264, 774, 823, 520, 185, 11, 860,
938, 566, 15, 367, 729, 540, 623, 14],
[13, 808, 108, 848, 278, 568, 551, 248, 3, 814,
211, 204, 808, 452, 564, 477, 744]]),
[697, 690, 45, 97, 974, 564, 828, 482, 459, 457, 247,
709, 849, 755, 636, 252, 174, 596, 437, 854, 58, 428,
992, 822, 272, 465, 279, 667, 506, 275, 151, 14, 744,
477, 564, 452, 808, 204, 211, 814, 3, 248, 551, 568,
278, 848, 108, 808, 13, 815, 9, 358, 102, 563, 523,
150, 59, 917, 853, 229, 103, 283, 312, 878, 182, 418,
18, 296, 541, 463, 226, 390, 399, 86, 57, 352, 505,
880, 822, 468, 7, 969, 862, 507, 515, 30, 64, 334,
312, 636, 281, 290, 623, 540, 729, 367, 15, 566, 938,
860, 11, 185, 520, 823, 774, 264, 554, 812, 824, 543,
345, 744, 367, 393, 788, 197, 54, 886, 947, 932, 870,
982, 37, 485, 327, 970, 614, 352, 485, 832, 443, 243,
116, 5, 796, 580, 513, 598, 100, 518, 134, 766, 318,
734, 566, 809, 400, 291, 124, 499, 454, 99, 337, 707,
319, 24, 979, 326, 515, 64, 969, 289, 360, 132, 664,
972, 322, 1000, 474, 878, 672, 130, 269, 601, 862,
608, 896, 683, 65, 581, 22, 673, 192, 563, 813, 253,
657, 429, 730, 984, 292, 713, 375, 74, 19, 286, 557,
271, 229, 548, 97, 535, 44, 66, 184, 207, 517, 406,
644, 252, 162, 590, 859, 997, 222, 316, 188, 491, 425,
43, 679, 650, 26, 70, 375, 164, 961, 297, 78, 834,
570, 417, 410, 67, 138, 24, 673, 895, 581, 133, 800,
959, 577, 62, 954, 234, 40, 950, 86, 780, 338, 692,
159, 821, 69, 530, 163, 282, 966, 123, 249, 904, 383,
868, 982, 935, 2, 818, 51, 802, 667, 989, 518, 638,
16, 424, 784, 470, 665, 592, 175, 810, 711, 39, 802,
212, 487, 258, 264])
def test_snail_052(self):
self.assertEqual(snail(
[[20, 403, 806, 88, 823], [815, 182, 755, 134, 479],
[267, 452, 774, 27, 393], [680, 645, 139, 170, 600],
[345, 733, 858, 567, 786]]),
[20, 403, 806, 88, 823, 479, 393, 600, 786, 567, 858,
733, 345, 680, 267, 815, 182, 755, 134, 27, 170, 139,
645, 452, 774])
def test_snail_053(self):
self.assertEqual(snail([[196, 838, 193, 215, 121, 793, 196, 949, 361,
294, 910, 341, 538, 137, 777],
[733, 398, 687, 983, 435, 870, 229, 107, 407,
772, 68, 915, 209, 859, 737],
[212, 594, 822, 823, 492, 867, 788, 511, 744,
679, 68, 763, 663, 708, 835],
[207, 592, 305, 579, 378, 864, 922, 874, 424,
364, 237, 930, 250, 343, 516],
[817, 144, 317, 932, 246, 346, 160, 676, 51,
860, 889, 532, 902, 60, 300],
[132, 26, 383, 247, 812, 338, 673, 679, 88, 254,
502, 553, 165, 334, 186],
[59, 683, 976, 614, 311, 493, 17, 433, 171, 254,
478, 430, 6, 238, 216],
[70, 590, 861, 521, 494, 163, 91, 792, 848, 892,
525, 313, 845, 455, 222],
[471, 326, 678, 405, 72, 724, 69, 630, 206, 767,
730, 223, 860, 290, 477],
[848, 786, 184, 788, 614, 38, 213, 908, 258,
752, 927, 756, 780, 835, 260],
[240, 604, 469, 663, 791, 671, 405, 848, 731,
335, 905, 129, 239, 679, 516],
[28, 935, 400, 783, 206, 777, 836, 627, 32, 475,
736, 206, 469, 495, 543],
[271, 429, 63, 55, 402, 237, 622, 711, 443, 603,
307, 107, 892, 627, 360],
[265, 323, 177, 700, 4, 43, 396, 551, 646, 392,
735, 686, 784, 445, 603],
[807, 589, 84, 393, 478, 843, 317, 717, 678,
341, 257, 31, 498, 454, 260]]),
[196, 838, 193, 215, 121, 793, 196, 949, 361, 294, 910,
341, 538, 137, 777, 737, 835, 516, 300, 186, 216, 222,
477, 260, 516, 543, 360, 603, 260, 454, 498, 31, 257,
341, 678, 717, 317, 843, 478, 393, 84, 589, 807, 265,
271, 28, 240, 848, 471, 70, 59, 132, 817, 207, 212,
733, 398, 687, 983, 435, 870, 229, 107, 407, 772, 68,
915, 209, 859, 708, 343, 60, 334, 238, 455, 290, 835,
679, 495, 627, 445, 784, 686, 735, 392, 646, 551, 396,
43, 4, 700, 177, 323, 429, 935, 604, 786, 326, 590,
683, 26, 144, 592, 594, 822, 823, 492, 867, 788, 511,
744, 679, 68, 763, 663, 250, 902, 165, 6, 845, 860,
780, 239, 469, 892, 107, 307, 603, 443, 711, 622, 237,
402, 55, 63, 400, 469, 184, 678, 861, 976, 383, 317,
305, 579, 378, 864, 922, 874, 424, 364, 237, 930, 532,
553, 430, 313, 223, 756, 129, 206, 736, 475, 32, 627,
836, 777, 206, 783, 663, 788, 405, 521, 614, 247, 932,
246, 346, 160, 676, 51, 860, 889, 502, 478, 525, 730,
927, 905, 335, 731, 848, 405, 671, 791, 614, 72, 494,
311, 812, 338, 673, 679, 88, 254, 254, 892, 767, 752,
258, 908, 213, 38, 724, 163, 493, 17, 433, 171, 848,
206, 630, 69, 91, 792])
def test_snail_054(self):
self.assertEqual(snail(
[[680, 28, 574, 89, 186, 359], [110, 422, 21, 950, 715, 79],
[344, 688, 686, 338, 239, 840], [320, 321, 492, 418, 905, 628],
[684, 383, 704, 429, 457, 932],
[977, 861, 351, 408, 652, 42]]),
[680, 28, 574, 89, 186, 359, 79, 840, 628, 932, 42,
652, 408, 351, 861, 977, 684, 320, 344, 110, 422, 21,
950, 715, 239, 905, 457, 429, 704, 383, 321, 688, 686,
338, 418, 492])
def test_snail_055(self):
self.assertEqual(snail([[58, 407, 6, 598, 246, 664, 722, 382, 779, 444,
939, 572, 998, 857, 973, 783, 332, 192],
[664, 600, 824, 153, 433, 187, 978, 637, 740,
427, 135, 816, 393, 522, 351, 940, 896, 65],
[126, 399, 993, 472, 941, 3, 717, 884, 803, 688,
203, 219, 414, 589, 972, 999, 730, 672],
[43, 467, 608, 228, 380, 252, 318, 177, 251,
657, 281, 509, 714, 14, 49, 909, 934, 672],
[718, 635, 676, 235, 349, 435, 914, 136, 476,
562, 653, 497, 338, 58, 63, 716, 187, 48],
[530, 480, 937, 218, 277, 678, 434, 266, 334,
95, 270, 449, 631, 192, 309, 389, 564, 924],
[279, 697, 22, 866, 170, 218, 584, 387, 992,
727, 188, 755, 564, 367, 27, 250, 250, 999],
[561, 200, 392, 765, 31, 517, 431, 463, 28, 376,
49, 428, 336, 994, 311, 814, 27, 288],
[928, 846, 706, 704, 835, 565, 752, 294, 404,
519, 269, 311, 38, 914, 216, 74, 364, 83],
[415, 30, 240, 897, 143, 567, 250, 27, 872, 101,
345, 1000, 12, 47, 485, 188, 675, 861],
[837, 586, 441, 706, 658, 312, 12, 823, 414,
485, 975, 621, 788, 912, 923, 260, 611, 863],
[299, 973, 177, 461, 147, 265, 732, 9, 521, 211,
73, 300, 919, 316, 839, 956, 164, 950],
[289, 604, 206, 623, 94, 84, 544, 200, 955, 230,
186, 194, 852, 47, 586, 687, 559, 809],
[139, 266, 610, 674, 20, 856, 866, 721, 224, 61,
754, 599, 97, 827, 934, 724, 207, 281],
[59, 42, 40, 155, 346, 392, 602, 768, 428, 104,
285, 74, 913, 885, 258, 79, 366, 114],
[205, 16, 543, 155, 384, 415, 64, 375, 841, 387,
922, 909, 489, 846, 666, 378, 933, 908],
[389, 178, 394, 265, 728, 108, 599, 398, 569,
480, 159, 635, 255, 421, 260, 230, 855, 267],
[767, 767, 591, 319, 141, 136, 915, 262, 723,
932, 887, 891, 417, 101, 415, 178, 369, 179]]),
[58, 407, 6, 598, 246, 664, 722, 382, 779, 444, 939,
572, 998, 857, 973, 783, 332, 192, 65, 672, 672, 48,
924, 999, 288, 83, 861, 863, 950, 809, 281, 114, 908,
267, 179, 369, 178, 415, 101, 417, 891, 887, 932, 723,
262, 915, 136, 141, 319, 591, 767, 767, 389, 205, 59,
139, 289, 299, 837, 415, 928, 561, 279, 530, 718, 43,
126, 664, 600, 824, 153, 433, 187, 978, 637, 740, 427,
135, 816, 393, 522, 351, 940, 896, 730, 934, 187, 564,
250, 27, 364, 675, 611, 164, 559, 207, 366, 933, 855,
230, 260, 421, 255, 635, 159, 480, 569, 398, 599, 108,
728, 265, 394, 178, 16, 42, 266, 604, 973, 586, 30,
846, 200, 697, 480, 635, 467, 399, 993, 472, 941, 3,
717, 884, 803, 688, 203, 219, 414, 589, 972, 999, 909,
716, 389, 250, 814, 74, 188, 260, 956, 687, 724, 79,
378, 666, 846, 489, 909, 922, 387, 841, 375, 64, 415,
384, 155, 543, 40, 610, 206, 177, 441, 240, 706, 392,
22, 937, 676, 608, 228, 380, 252, 318, 177, 251, 657,
281, 509, 714, 14, 49, 63, 309, 27, 311, 216, 485,
923, 839, 586, 934, 258, 885, 913, 74, 285, 104, 428,
768, 602, 392, 346, 155, 674, 623, 461, 706, 897, 704,
765, 866, 218, 235, 349, 435, 914, 136, 476, 562, 653,
497, 338, 58, 192, 367, 994, 914, 47, 912, 316, 47,
827, 97, 599, 754, 61, 224, 721, 866, 856, 20, 94,
147, 658, 143, 835, 31, 170, 277, 678, 434, 266, 334,
95, 270, 449, 631, 564, 336, 38, 12, 788, 919, 852,
194, 186, 230, 955, 200, 544, 84, 265, 312, 567, 565,
517, 218, 584, 387, 992, 727, 188, 755, 428, 311,
1000, 621, 300, 73, 211, 521, 9, 732, 12, 250, 752,
431, 463, 28, 376, 49, 269, 345, 975, 485, 414, 823,
27, 294, 404, 519, 101, 872])
def test_snail_056(self):
self.assertEqual(snail([[990, 568, 232, 648, 150, 961, 543, 323, 970,
480, 247, 655, 234, 766],
[445, 283, 695, 616, 307, 693, 516, 267, 772,
614, 375, 354, 874, 139],
[56, 886, 202, 95, 850, 689, 279, 633, 473, 300,
210, 950, 264, 392],
[656, 90, 399, 263, 200, 764, 793, 125, 644,
341, 1, 41, 315, 577],
[703, 868, 597, 797, 445, 159, 899, 961, 421,
400, 865, 37, 475, 501],
[554, 14, 369, 351, 506, 615, 921, 242, 972,
625, 402, 906, 693, 251],
[727, 518, 523, 314, 40, 458, 338, 814, 508,
135, 515, 151, 288, 433],
[456, 696, 183, 605, 963, 882, 243, 721, 924,
276, 244, 341, 592, 746],
[275, 799, 613, 400, 259, 241, 12, 991, 844, 51,
532, 893, 933, 357],
[649, 500, 240, 430, 276, 488, 583, 197, 11,
646, 285, 552, 812, 520],
[654, 829, 189, 560, 146, 26, 397, 206, 605, 64,
132, 791, 264, 469],
[823, 419, 690, 389, 997, 854, 416, 97, 267,
499, 383, 250, 856, 510],
[863, 725, 195, 653, 568, 668, 761, 598, 379,
810, 674, 535, 350, 215],
[880, 492, 584, 822, 260, 81, 726, 737, 166,
379, 347, 66, 990, 381]]),
[990, 568, 232, 648, 150, 961, 543, 323, 970, 480, 247,
655, 234, 766, 139, 392, 577, 501, 251, 433, 746, 357,
520, 469, 510, 215, 381, 990, 66, 347, 379, 166, 737,
726, 81, 260, 822, 584, 492, 880, 863, 823, 654, 649,
275, 456, 727, 554, 703, 656, 56, 445, 283, 695, 616,
307, 693, 516, 267, 772, 614, 375, 354, 874, 264, 315,
475, 693, 288, 592, 933, 812, 264, 856, 350, 535, 674,
810, 379, 598, 761, 668, 568, 653, 195, 725, 419, 829,
500, 799, 696, 518, 14, 868, 90, 886, 202, 95, 850,
689, 279, 633, 473, 300, 210, 950, 41, 37, 906, 151,
341, 893, 552, 791, 250, 383, 499, 267, 97, 416, 854,
997, 389, 690, 189, 240, 613, 183, 523, 369, 597, 399,
263, 200, 764, 793, 125, 644, 341, 1, 865, 402, 515,
244, 532, 285, 132, 64, 605, 206, 397, 26, 146, 560,
430, 400, 605, 314, 351, 797, 445, 159, 899, 961, 421,
400, 625, 135, 276, 51, 646, 11, 197, 583, 488, 276,
259, 963, 40, 506, 615, 921, 242, 972, 508, 924, 844,
991, 12, 241, 882, 458, 338, 814, 721, 243])
def test_snail_057(self):
self.assertEqual(snail([[40, 406, 36, 505, 634, 102, 702, 130],
[441, 809, 470, 914, 796, 852, 306, 978],
[919, 501, 158, 558, 536, 141, 229, 678],
[841, 688, 115, 374, 638, 735, 687, 358],
[432, 204, 983, 343, 5, 717, 999, 912],
[380, 253, 737, 263, 790, 515, 817, 270],
[298, 335, 347, 644, 356, 931, 594, 954],
[977, 832, 618, 875, 547, 995, 47, 183]]),
[40, 406, 36, 505, 634, 102, 702, 130, 978, 678, 358,
912, 270, 954, 183, 47, 995, 547, 875, 618, 832, 977,
298, 380, 432, 841, 919, 441, 809, 470, 914, 796, 852,
306, 229, 687, 999, 817, 594, 931, 356, 644, 347, 335,
253, 204, 688, 501, 158, 558, 536, 141, 735, 717, 515,
790, 263, 737, 983, 115, 374, 638, 5, 343])
def test_snail_058(self):
self.assertEqual(
snail([[935, 756, 641], [827, 444, 751], [166, 61, 775]]),
[935, 756, 641, 751, 775, 61, 166, 827, 444])
def test_snail_059(self):
self.assertEqual(snail([[21, 182], [507, 380]]), [21, 182, 380, 507])
def test_snail_060(self):
self.assertEqual(snail(
[[535, 230, 195, 719, 377], [95, 348, 60, 911, 645],
[654, 459, 570, 244, 205], [728, 622, 509, 484, 25],
[253, 883, 275, 80, 276]]),
[535, 230, 195, 719, 377, 645, 205, 25, 276, 80, 275,
883, 253, 728, 654, 95, 348, 60, 911, 244, 484, 509,
622, 459, 570])
def test_snail_061(self):
self.assertEqual(snail([[785, 961, 393, 614, 388, 455, 610, 908, 516,
364, 872, 655, 842, 764, 246],
[747, 313, 906, 923, 915, 201, 951, 533, 862,
575, 735, 729, 14, 730, 25],
[170, 790, 377, 815, 635, 93, 45, 31, 555, 762,
119, 935, 885, 180, 891],
[806, 414, 178, 167, 636, 597, 562, 768, 302,
74, 481, 549, 962, 118, 40],
[742, 767, 826, 738, 501, 914, 693, 644, 409,
81, 185, 361, 197, 649, 124],
[232, 878, 30, 498, 260, 724, 650, 544, 388,
384, 2, 159, 714, 198, 532],
[375, 157, 579, 641, 683, 263, 576, 500, 177,
402, 659, 489, 438, 839, 314],
[834, 355, 434, 66, 333, 91, 207, 2, 651, 340,
505, 175, 443, 446, 740],
[688, 517, 275, 811, 515, 461, 833, 811, 849,
406, 79, 631, 431, 108, 122],
[252, 712, 973, 15, 536, 36, 55, 901, 503, 701,
520, 690, 918, 759, 217],
[350, 784, 946, 63, 801, 911, 233, 411, 116,
355, 973, 352, 784, 50, 352],
[242, 23, 728, 771, 881, 975, 565, 498, 405,
694, 441, 621, 741, 586, 48],
[551, 928, 529, 150, 280, 388, 741, 717, 699,
211, 677, 512, 900, 416, 689],
[404, 225, 869, 308, 827, 93, 424, 276, 775,
180, 569, 51, 710, 847, 792],
[771, 527, 737, 805, 705, 766, 900, 757, 994,
640, 335, 733, 588, 921, 265]]),
[785, 961, 393, 614, 388, 455, 610, 908, 516, 364, 872,
655, 842, 764, 246, 25, 891, 40, 124, 532, 314, 740,
122, 217, 352, 48, 689, 792, 265, 921, 588, 733, 335,
640, 994, 757, 900, 766, 705, 805, 737, 527, 771, 404,
551, 242, 350, 252, 688, 834, 375, 232, 742, 806, 170,
747, 313, 906, 923, 915, 201, 951, 533, 862, 575, 735,
729, 14, 730, 180, 118, 649, 198, 839, 446, 108, 759,
50, 586, 416, 847, 710, 51, 569, 180, 775, 276, 424,
93, 827, 308, 869, 225, 928, 23, 784, 712, 517, 355,
157, 878, 767, 414, 790, 377, 815, 635, 93, 45, 31,
555, 762, 119, 935, 885, 962, 197, 714, 438, 443, 431,
918, 784, 741, 900, 512, 677, 211, 699, 717, 741, 388,
280, 150, 529, 728, 946, 973, 275, 434, 579, 30, 826,
178, 167, 636, 597, 562, 768, 302, 74, 481, 549, 361,
159, 489, 175, 631, 690, 352, 621, 441, 694, 405, 498,
565, 975, 881, 771, 63, 15, 811, 66, 641, 498, 738,
501, 914, 693, 644, 409, 81, 185, 2, 659, 505, 79,
520, 973, 355, 116, 411, 233, 911, 801, 536, 515, 333,
683, 260, 724, 650, 544, 388, 384, 402, 340, 406, 701,
503, 901, 55, 36, 461, 91, 263, 576, 500, 177, 651,
849, 811, 833, 207, 2])
def test_snail_062(self):
self.assertEqual(snail([[353, 85, 930, 216], [626, 576, 495, 991],
[581, 192, 891, 709], [350, 925, 349, 502]]),
[353, 85, 930, 216, 991, 709, 502, 349, 925, 350, 581,
626, 576, 495, 891, 192])
def test_snail_063(self):
self.assertEqual(snail([[540]]), [540])
def test_snail_064(self):
self.assertEqual(snail(
[[323, 110, 157, 740, 114, 704, 774, 106, 268, 508, 566, 474],
[399, 944, 938, 434, 715, 475, 929, 705, 940, 246, 787, 528],
[807, 311, 393, 557, 372, 756, 260, 12, 811, 4, 368, 282],
[618, 918, 279, 23, 755, 16, 141, 214, 837, 333, 916, 937],
[353, 304, 904, 659, 345, 217, 882, 563, 845, 34, 318, 763],
[420, 645, 620, 910, 271, 243, 705, 909, 841, 907, 954, 745],
[114, 445, 310, 574, 25, 779, 262, 381, 319, 231, 460, 811],
[768, 163, 698, 307, 647, 712, 617, 700, 549, 215, 645, 839],
[779, 475, 357, 508, 819, 672, 250, 228, 602, 747, 734, 598],
[217, 834, 271, 442, 745, 526, 141, 571, 331, 715, 937, 24],
[159, 231, 655, 435, 450, 532, 913, 91, 527, 105, 40, 294],
[654, 903, 196, 676, 451, 502, 602, 539, 429, 795, 646, 117]]),
[323, 110, 157, 740, 114, 704, 774, 106, 268, 508, 566,
474, 528, 282, 937, 763, 745, 811, 839, 598, 24, 294,
117, 646, 795, 429, 539, 602, 502, 451, 676, 196, 903,
654, 159, 217, 779, 768, 114, 420, 353, 618, 807, 399,
944, 938, 434, 715, 475, 929, 705, 940, 246, 787, 368,
916, 318, 954, 460, 645, 734, 937, 40, 105, 527, 91,
913, 532, 450, 435, 655, 231, 834, 475, 163, 445, 645,
304, 918, 311, 393, 557, 372, 756, 260, 12, 811, 4,
333, 34, 907, 231, 215, 747, 715, 331, 571, 141, 526,
745, 442, 271, 357, 698, 310, 620, 904, 279, 23, 755,
16, 141, 214, 837, 845, 841, 319, 549, 602, 228, 250,
672, 819, 508, 307, 574, 910, 659, 345, 217, 882, 563,
909, 381, 700, 617, 712, 647, 25, 271, 243, 705, 262,
779])
def test_snail_065(self):
self.assertEqual(snail([[986, 240, 922, 622, 119, 802, 582, 105, 664,
791, 735, 699, 470, 252, 698, 185, 108, 345,
492, 923],
[240, 476, 677, 30, 653, 350, 500, 837, 871,
723, 277, 232, 913, 969, 363, 209, 806, 50,
395, 85],
[685, 728, 491, 175, 714, 445, 721, 940, 935,
357, 321, 462, 884, 97, 210, 183, 804, 892,
424, 518],
[43, 751, 907, 556, 279, 812, 613, 69, 915, 20,
19, 446, 737, 739, 400, 713, 203, 94, 294,
335],
[48, 183, 597, 479, 293, 803, 657, 501, 358,
165, 14, 999, 153, 35, 638, 561, 25, 565, 891,
543],
[918, 781, 555, 285, 954, 969, 636, 883, 200,
883, 426, 521, 528, 495, 964, 773, 799, 545,
116, 512],
[279, 668, 405, 945, 213, 573, 712, 99, 713,
688, 492, 589, 177, 718, 651, 252, 843, 376,
657, 428],
[332, 282, 54, 321, 724, 679, 50, 698, 727, 252,
661, 306, 790, 269, 958, 673, 742, 806, 310,
568],
[785, 236, 107, 886, 498, 650, 569, 967, 185,
57, 448, 25, 101, 787, 194, 464, 508, 925, 944,
531],
[141, 283, 763, 387, 423, 348, 93, 286, 448, 71,
745, 231, 949, 228, 838, 717, 673, 24, 42,
634],
[861, 730, 300, 615, 603, 945, 225, 319, 418,
919, 514, 27, 884, 628, 229, 87, 193, 140, 692,
508],
[568, 394, 305, 601, 237, 948, 275, 480, 33,
277, 821, 38, 313, 236, 216, 27, 650, 972, 284,
554],
[613, 892, 806, 441, 975, 777, 615, 741, 534,
43, 203, 991, 405, 302, 447, 313, 800, 345, 54,
670],
[689, 699, 296, 498, 793, 199, 282, 489, 224,
839, 870, 409, 686, 935, 196, 2, 755, 257, 246,
712],
[328, 236, 205, 980, 365, 90, 865, 401, 528,
368, 802, 971, 48, 218, 30, 655, 308, 690, 285,
387],
[215, 929, 894, 328, 40, 718, 33, 112, 729, 609,
598, 956, 838, 252, 727, 798, 486, 797, 65,
758],
[162, 746, 960, 376, 695, 473, 664, 960, 948,
375, 354, 980, 614, 540, 300, 538, 822, 816,
117, 371],
[343, 801, 497, 285, 121, 244, 913, 709, 271,
252, 301, 557, 115, 678, 161, 389, 169, 38,
765, 240],
[815, 108, 350, 304, 736, 991, 769, 383, 399,
621, 397, 798, 382, 738, 344, 280, 479, 255,
398, 280],
[411, 702, 791, 603, 849, 743, 594, 468, 396,
752, 297, 515, 426, 426, 806, 385, 878, 815,
840, 50]]),
[986, 240, 922, 622, 119, 802, 582, 105, 664, 791, 735,
699, 470, 252, 698, 185, 108, 345, 492, 923, 85, 518,
335, 543, 512, 428, 568, 531, 634, 508, 554, 670, 712,
387, 758, 371, 240, 280, 50, 840, 815, 878, 385, 806,
426, 426, 515, 297, 752, 396, 468, 594, 743, 849, 603,
791, 702, 411, 815, 343, 162, 215, 328, 689, 613, 568,
861, 141, 785, 332, 279, 918, 48, 43, 685, 240, 476,
677, 30, 653, 350, 500, 837, 871, 723, 277, 232, 913,
969, 363, 209, 806, 50, 395, 424, 294, 891, 116, 657,
310, 944, 42, 692, 284, 54, 246, 285, 65, 117, 765,
398, 255, 479, 280, 344, 738, 382, 798, 397, 621, 399,
383, 769, 991, 736, 304, 350, 108, 801, 746, 929, 236,
699, 892, 394, 730, 283, 236, 282, 668, 781, 183, 751,
728, 491, 175, 714, 445, 721, 940, 935, 357, 321, 462,
884, 97, 210, 183, 804, 892, 94, 565, 545, 376, 806,
925, 24, 140, 972, 345, 257, 690, 797, 816, 38, 169,
389, 161, 678, 115, 557, 301, 252, 271, 709, 913, 244,
121, 285, 497, 960, 894, 205, 296, 806, 305, 300, 763,
107, 54, 405, 555, 597, 907, 556, 279, 812, 613, 69,
915, 20, 19, 446, 737, 739, 400, 713, 203, 25, 799,
843, 742, 508, 673, 193, 650, 800, 755, 308, 486, 822,
538, 300, 540, 614, 980, 354, 375, 948, 960, 664, 473,
695, 376, 328, 980, 498, 441, 601, 615, 387, 886, 321,
945, 285, 479, 293, 803, 657, 501, 358, 165, 14, 999,
153, 35, 638, 561, 773, 252, 673, 464, 717, 87, 27,
313, 2, 655, 798, 727, 252, 838, 956, 598, 609, 729,
112, 33, 718, 40, 365, 793, 975, 237, 603, 423, 498,
724, 213, 954, 969, 636, 883, 200, 883, 426, 521, 528,
495, 964, 651, 958, 194, 838, 229, 216, 447, 196, 30,
218, 48, 971, 802, 368, 528, 401, 865, 90, 199, 777,
948, 945, 348, 650, 679, 573, 712, 99, 713, 688, 492,
589, 177, 718, 269, 787, 228, 628, 236, 302, 935, 686,
409, 870, 839, 224, 489, 282, 615, 275, 225, 93, 569,
50, 698, 727, 252, 661, 306, 790, 101, 949, 884, 313,
405, 991, 203, 43, 534, 741, 480, 319, 286, 967, 185,
57, 448, 25, 231, 27, 38, 821, 277, 33, 418, 448, 71,
745, 514, 919])
def test_snail_066(self):
self.assertEqual(snail([[779, 390, 935, 443, 441, 932, 526, 627, 761,
633, 708, 770, 21, 872],
[754, 424, 961, 78, 264, 512, 496, 963, 781, 96,
127, 102, 443, 432],
[462, 403, 123, 808, 836, 958, 574, 126, 686,
524, 508, 557, 61, 901],
[6, 257, 831, 713, 790, 660, 2, 775, 268, 337,
75, 804, 357, 961],
[604, 802, 2, 87, 101, 475, 192, 722, 345, 173,
926, 171, 170, 293],
[12, 776, 242, 639, 641, 929, 898, 119, 5, 501,
358, 518, 440, 395],
[635, 821, 94, 345, 146, 460, 246, 555, 618,
331, 959, 907, 717, 521],
[669, 178, 275, 457, 549, 963, 216, 69, 228,
722, 444, 914, 58, 643],
[826, 947, 674, 252, 707, 10, 968, 492, 418,
191, 393, 595, 278, 540],
[797, 490, 818, 461, 131, 884, 421, 935, 299,
970, 715, 75, 516, 507],
[546, 784, 474, 248, 573, 366, 638, 696, 927,
892, 508, 311, 606, 632],
[846, 536, 776, 553, 586, 170, 327, 24, 828,
282, 927, 787, 202, 550],
[739, 628, 146, 910, 843, 244, 23, 430, 521,
810, 923, 467, 875, 938],
[222, 359, 852, 608, 514, 865, 674, 391, 344,
161, 69, 418, 188, 375]]),
[779, 390, 935, 443, 441, 932, 526, 627, 761, 633, 708,
770, 21, 872, 432, 901, 961, 293, 395, 521, 643, 540,
507, 632, 550, 938, 375, 188, 418, 69, 161, 344, 391,
674, 865, 514, 608, 852, 359, 222, 739, 846, 546, 797,
826, 669, 635, 12, 604, 6, 462, 754, 424, 961, 78,
264, 512, 496, 963, 781, 96, 127, 102, 443, 61, 357,
170, 440, 717, 58, 278, 516, 606, 202, 875, 467, 923,
810, 521, 430, 23, 244, 843, 910, 146, 628, 536, 784,
490, 947, 178, 821, 776, 802, 257, 403, 123, 808, 836,
958, 574, 126, 686, 524, 508, 557, 804, 171, 518, 907,
914, 595, 75, 311, 787, 927, 282, 828, 24, 327, 170,
586, 553, 776, 474, 818, 674, 275, 94, 242, 2, 831,
713, 790, 660, 2, 775, 268, 337, 75, 926, 358, 959,
444, 393, 715, 508, 892, 927, 696, 638, 366, 573, 248,
461, 252, 457, 345, 639, 87, 101, 475, 192, 722, 345,
173, 501, 331, 722, 191, 970, 299, 935, 421, 884, 131,
707, 549, 146, 641, 929, 898, 119, 5, 618, 228, 418,
492, 968, 10, 963, 460, 246, 555, 69, 216])
def test_snail_067(self):
self.assertEqual(snail(
[[771, 906, 164, 502, 151], [560, 297, 260, 485, 632],
[3, 884, 664, 507, 325], [639, 813, 354, 560, 226],
[274, 555, 978, 288, 756]]),
[771, 906, 164, 502, 151, 632, 325, 226, 756, 288, 978,
555, 274, 639, 3, 560, 297, 260, 485, 507, 560, 354,
813, 884, 664])
def test_snail_068(self):
self.assertEqual(snail(
[[254, 173, 160, 399, 691, 434], [849, 456, 758, 273, 917, 347],
[653, 544, 515, 483, 827, 638], [145, 862, 862, 170, 518, 727],
[702, 527, 461, 204, 727, 749], [478, 342, 652, 960, 6, 699]]),
[254, 173, 160, 399, 691, 434, 347, 638, 727, 749, 699,
6, 960, 652, 342, 478, 702, 145, 653, 849, 456, 758,
273, 917, 827, 518, 727, 204, 461, 527, 862, 544, 515,
483, 170, 862])
def test_snail_069(self):
self.assertEqual(snail(
[[226, 704, 457, 816, 131, 280], [360, 476, 612, 26, 934, 390],
[456, 641, 669, 251, 211, 954], [152, 516, 380, 865, 617, 824],
[887, 422, 509, 185, 322, 688],
[593, 21, 364, 475, 965, 533]]),
[226, 704, 457, 816, 131, 280, 390, 954, 824, 688, 533,
965, 475, 364, 21, 593, 887, 152, 456, 360, 476, 612,
26, 934, 211, 617, 322, 185, 509, 422, 516, 641, 669,
251, 865, 380])
def test_snail_070(self):
self.assertEqual(snail([[721, 438], [320, 489]]), [721, 438, 489, 320])
def test_snail_071(self):
self.assertEqual(snail(
[[27, 894, 555, 256, 430, 208], [554, 476, 381, 291, 303, 274],
[129, 801, 899, 605, 365, 252], [764, 675, 459, 554, 426, 85],
[816, 231, 149, 674, 303, 499],
[305, 116, 340, 865, 168, 954]]),
[27, 894, 555, 256, 430, 208, 274, 252, 85, 499, 954,
168, 865, 340, 116, 305, 816, 764, 129, 554, 476, 381,
291, 303, 365, 426, 303, 674, 149, 231, 675, 801, 899,
605, 554, 459])
def test_snail_072(self):
self.assertEqual(snail([[572, 40, 328, 370, 500, 359, 678, 378, 538,
858, 934, 597, 558, 719, 33, 895, 744, 664,
144, 942],
[866, 855, 310, 833, 63, 797, 898, 803, 651,
882, 732, 735, 675, 3, 262, 223, 173, 342, 85,
611],
[899, 287, 100, 560, 479, 542, 148, 688, 447,
575, 738, 640, 312, 25, 231, 757, 683, 260,
858, 346],
[1000, 625, 581, 457, 792, 537, 711, 735, 189,
665, 68, 774, 132, 208, 510, 10, 797, 727, 525,
799],
[465, 650, 818, 258, 110, 531, 816, 811, 259,
429, 56, 497, 701, 350, 938, 112, 318, 260, 88,
597],
[505, 112, 776, 421, 332, 521, 824, 55, 871,
114, 715, 725, 882, 579, 481, 425, 59, 382,
959, 807],
[258, 37, 320, 581, 567, 950, 77, 948, 540, 28,
560, 911, 307, 508, 163, 679, 687, 37, 246,
838],
[782, 59, 179, 588, 996, 675, 37, 425, 607, 688,
629, 34, 975, 885, 188, 852, 343, 841, 952,
103],
[432, 454, 425, 198, 425, 305, 909, 997, 263,
813, 666, 922, 619, 942, 262, 386, 730, 197,
664, 643],
[536, 668, 164, 476, 477, 667, 875, 990, 655,
985, 824, 684, 263, 111, 82, 828, 657, 131,
819, 210],
[943, 974, 501, 727, 825, 510, 913, 133, 947,
301, 117, 283, 952, 643, 787, 24, 345, 104,
323, 525],
[461, 589, 200, 794, 521, 39, 167, 52, 836, 477,
437, 507, 264, 717, 663, 347, 623, 669, 262,
34],
[370, 556, 920, 122, 82, 952, 628, 124, 245, 87,
213, 238, 792, 388, 47, 531, 918, 634, 368,
312],
[635, 472, 7, 883, 622, 910, 757, 959, 318, 933,
887, 877, 242, 418, 571, 610, 671, 745, 303,
14],
[128, 96, 532, 485, 66, 665, 373, 829, 848, 850,
124, 732, 618, 724, 34, 686, 851, 832, 407,
75],
[38, 836, 222, 635, 388, 936, 793, 187, 803,
227, 561, 481, 635, 9, 437, 922, 86, 272, 439,
452],
[303, 667, 784, 818, 908, 142, 768, 342, 350,
959, 210, 494, 592, 918, 494, 108, 795, 617,
169, 142],
[344, 618, 79, 320, 667, 726, 960, 900, 525,
776, 549, 292, 938, 390, 975, 423, 555, 963,
965, 440],
[220, 497, 705, 449, 161, 225, 73, 164, 796,
438, 978, 623, 304, 917, 584, 118, 700, 222,
476, 825],
[896, 392, 14, 489, 226, 742, 932, 303, 767,
487, 859, 637, 327, 399, 804, 304, 922, 119,
687, 755]]),
[572, 40, 328, 370, 500, 359, 678, 378, 538, 858, 934,
597, 558, 719, 33, 895, 744, 664, 144, 942, 611, 346,
799, 597, 807, 838, 103, 643, 210, 525, 34, 312, 14,
75, 452, 142, 440, 825, 755, 687, 119, 922, 304, 804,
399, 327, 637, 859, 487, 767, 303, 932, 742, 226, 489,
14, 392, 896, 220, 344, 303, 38, 128, 635, 370, 461,
943, 536, 432, 782, 258, 505, 465, 1000, 899, 866,
855, 310, 833, 63, 797, 898, 803, 651, 882, 732, 735,
675, 3, 262, 223, 173, 342, 85, 858, 525, 88, 959,
246, 952, 664, 819, 323, 262, 368, 303, 407, 439, 169,
965, 476, 222, 700, 118, 584, 917, 304, 623, 978, 438,
796, 164, 73, 225, 161, 449, 705, 497, 618, 667, 836,
96, 472, 556, 589, 974, 668, 454, 59, 37, 112, 650,
625, 287, 100, 560, 479, 542, 148, 688, 447, 575, 738,
640, 312, 25, 231, 757, 683, 260, 727, 260, 382, 37,
841, 197, 131, 104, 669, 634, 745, 832, 272, 617, 963,
555, 423, 975, 390, 938, 292, 549, 776, 525, 900, 960,
726, 667, 320, 79, 784, 222, 532, 7, 920, 200, 501,
164, 425, 179, 320, 776, 818, 581, 457, 792, 537, 711,
735, 189, 665, 68, 774, 132, 208, 510, 10, 797, 318,
59, 687, 343, 730, 657, 345, 623, 918, 671, 851, 86,
795, 108, 494, 918, 592, 494, 210, 959, 350, 342, 768,
142, 908, 818, 635, 485, 883, 122, 794, 727, 476, 198,
588, 581, 421, 258, 110, 531, 816, 811, 259, 429, 56,
497, 701, 350, 938, 112, 425, 679, 852, 386, 828, 24,
347, 531, 610, 686, 922, 437, 9, 635, 481, 561, 227,
803, 187, 793, 936, 388, 66, 622, 82, 521, 825, 477,
425, 996, 567, 332, 521, 824, 55, 871, 114, 715, 725,
882, 579, 481, 163, 188, 262, 82, 787, 663, 47, 571,
34, 724, 618, 732, 124, 850, 848, 829, 373, 665, 910,
952, 39, 510, 667, 305, 675, 950, 77, 948, 540, 28,
560, 911, 307, 508, 885, 942, 111, 643, 717, 388, 418,
242, 877, 887, 933, 318, 959, 757, 628, 167, 913, 875,
909, 37, 425, 607, 688, 629, 34, 975, 619, 263, 952,
264, 792, 238, 213, 87, 245, 124, 52, 133, 990, 997,
263, 813, 666, 922, 684, 283, 507, 437, 477, 836, 947,
655, 985, 824, 117, 301])
def test_snail_073(self):
self.assertEqual(snail([[785, 373, 215, 440], [948, 869, 882, 65],
[236, 227, 508, 450], [46, 69, 45, 237]]),
[785, 373, 215, 440, 65, 450, 237, 45, 69, 46, 236,
948, 869, 882, 508, 227])
def test_snail_074(self):
self.assertEqual(snail([[319, 115, 440, 26, 579, 418, 402, 165, 517,
784, 878, 694, 93, 128, 44],
[852, 607, 878, 871, 517, 532, 992, 374, 11, 98,
518, 711, 147, 227, 506],
[201, 469, 258, 872, 604, 990, 830, 450, 143,
19, 552, 694, 210, 758, 103],
[716, 320, 227, 464, 249, 476, 868, 589, 739,
445, 2, 718, 961, 95, 220],
[928, 536, 957, 213, 258, 403, 998, 925, 940,
860, 860, 119, 145, 74, 928],
[516, 421, 697, 192, 26, 251, 294, 643, 476,
959, 442, 826, 31, 582, 629],
[542, 446, 841, 808, 696, 30, 179, 795, 269,
917, 643, 306, 284, 20, 840],
[513, 218, 830, 912, 862, 388, 741, 525, 630,
405, 631, 383, 531, 318, 426],
[434, 565, 697, 621, 308, 675, 252, 683, 842,
26, 133, 402, 692, 674, 531],
[351, 597, 455, 57, 498, 523, 349, 688, 114,
881, 103, 692, 829, 40, 375],
[630, 400, 244, 600, 467, 618, 505, 435, 821,
670, 896, 248, 743, 83, 784],
[349, 703, 796, 713, 477, 203, 15, 468, 921,
837, 517, 134, 641, 899, 504],
[690, 699, 610, 990, 139, 296, 914, 196, 333,
876, 29, 979, 869, 355, 472],
[187, 787, 932, 687, 662, 625, 759, 371, 438,
893, 838, 876, 442, 442, 697],
[454, 871, 70, 541, 598, 597, 402, 472, 327,
160, 913, 735, 518, 770, 635]]),
[319, 115, 440, 26, 579, 418, 402, 165, 517, 784, 878,
694, 93, 128, 44, 506, 103, 220, 928, 629, 840, 426,
531, 375, 784, 504, 472, 697, 635, 770, 518, 735, 913,
160, 327, 472, 402, 597, 598, 541, 70, 871, 454, 187,
690, 349, 630, 351, 434, 513, 542, 516, 928, 716, 201,
852, 607, 878, 871, 517, 532, 992, 374, 11, 98, 518,
711, 147, 227, 758, 95, 74, 582, 20, 318, 674, 40, 83,
899, 355, 442, 442, 876, 838, 893, 438, 371, 759, 625,
662, 687, 932, 787, 699, 703, 400, 597, 565, 218, 446,
421, 536, 320, 469, 258, 872, 604, 990, 830, 450, 143,
19, 552, 694, 210, 961, 145, 31, 284, 531, 692, 829,
743, 641, 869, 979, 29, 876, 333, 196, 914, 296, 139,
990, 610, 796, 244, 455, 697, 830, 841, 697, 957, 227,
464, 249, 476, 868, 589, 739, 445, 2, 718, 119, 826,
306, 383, 402, 692, 248, 134, 517, 837, 921, 468, 15,
203, 477, 713, 600, 57, 621, 912, 808, 192, 213, 258,
403, 998, 925, 940, 860, 860, 442, 643, 631, 133, 103,
896, 670, 821, 435, 505, 618, 467, 498, 308, 862, 696,
26, 251, 294, 643, 476, 959, 917, 405, 26, 881, 114,
688, 349, 523, 675, 388, 30, 179, 795, 269, 630, 842,
683, 252, 741, 525])
def test_snail_075(self):
self.assertEqual(snail([[117, 708, 570, 27, 409, 596, 355, 42, 480],
[874, 320, 499, 489, 767, 179, 912, 813, 855],
[929, 737, 403, 431, 219, 710, 107, 450, 61],
[860, 446, 119, 88, 448, 553, 833, 293, 803],
[868, 141, 930, 398, 882, 135, 585, 348, 890],
[506, 859, 833, 31, 808, 663, 384, 341, 457],
[864, 183, 143, 954, 427, 680, 940, 411, 585],
[995, 374, 784, 568, 200, 777, 468, 69, 902],
[206, 588, 712, 813, 721, 746, 11, 284, 45]]),
[117, 708, 570, 27, 409, 596, 355, 42, 480, 855, 61,
803, 890, 457, 585, 902, 45, 284, 11, 746, 721, 813,
712, 588, 206, 995, 864, 506, 868, 860, 929, 874, 320,
499, 489, 767, 179, 912, 813, 450, 293, 348, 341, 411,
69, 468, 777, 200, 568, 784, 374, 183, 859, 141, 446,
737, 403, 431, 219, 710, 107, 833, 585, 384, 940, 680,
427, 954, 143, 833, 930, 119, 88, 448, 553, 135, 663,
808, 31, 398, 882])
def test_snail_076(self):
self.assertEqual(snail([[385, 928, 460, 539, 984, 516, 609, 769, 825,
857, 819, 422, 989, 319, 60, 450, 495, 64,
624],
[604, 59, 272, 470, 997, 980, 563, 632, 353,
366, 750, 740, 395, 978, 995, 848, 72, 820,
410],
[703, 427, 351, 469, 685, 297, 362, 947, 998,
434, 896, 773, 441, 562, 785, 704, 529, 471,
798],
[564, 846, 756, 916, 435, 184, 785, 930, 349,
161, 253, 365, 82, 976, 499, 461, 398, 278,
331],
[529, 354, 643, 338, 772, 629, 726, 296, 672,
282, 268, 741, 330, 272, 217, 188, 754, 875,
58],
[869, 349, 328, 585, 442, 812, 645, 854, 317,
437, 314, 343, 571, 202, 534, 22, 307, 874,
859],
[161, 579, 865, 703, 276, 889, 374, 792, 123,
668, 970, 737, 846, 416, 704, 204, 660, 223,
509],
[243, 645, 359, 427, 636, 193, 663, 857, 712,
510, 367, 862, 352, 715, 811, 986, 292, 391,
475],
[645, 767, 117, 907, 321, 906, 592, 508, 647,
289, 307, 519, 425, 659, 219, 459, 537, 505,
328],
[743, 74, 374, 226, 356, 28, 5, 215, 459, 232,
18, 123, 308, 277, 490, 345, 68, 763, 93],
[21, 927, 770, 760, 75, 751, 387, 686, 366, 108,
327, 196, 603, 676, 337, 59, 799, 41, 699],
[777, 779, 755, 647, 718, 144, 749, 35, 282,
233, 552, 936, 391, 140, 877, 874, 472, 86,
836],
[966, 63, 26, 21, 595, 325, 521, 636, 481, 485,
664, 897, 151, 132, 969, 967, 856, 953, 425],
[352, 849, 157, 520, 272, 9, 934, 441, 261, 380,
868, 260, 375, 547, 699, 924, 794, 617, 222],
[559, 901, 435, 537, 620, 779, 708, 848, 903,
701, 570, 115, 114, 342, 57, 878, 278, 697,
629],
[541, 502, 381, 168, 792, 268, 21, 59, 581, 691,
695, 906, 616, 808, 366, 804, 36, 210, 295],
[662, 142, 571, 207, 905, 913, 414, 341, 956,
602, 115, 78, 688, 67, 148, 92, 930, 68, 258],
[301, 970, 837, 91, 679, 574, 119, 324, 554,
233, 617, 382, 876, 516, 380, 584, 516, 911,
331],
[894, 637, 193, 54, 14, 503, 221, 127, 118, 565,
234, 828, 753, 97, 257, 619, 811, 803, 934]]),
[385, 928, 460, 539, 984, 516, 609, 769, 825, 857, 819,
422, 989, 319, 60, 450, 495, 64, 624, 410, 798, 331,
58, 859, 509, 475, 328, 93, 699, 836, 425, 222, 629,
295, 258, 331, 934, 803, 811, 619, 257, 97, 753, 828,
234, 565, 118, 127, 221, 503, 14, 54, 193, 637, 894,
301, 662, 541, 559, 352, 966, 777, 21, 743, 645, 243,
161, 869, 529, 564, 703, 604, 59, 272, 470, 997, 980,
563, 632, 353, 366, 750, 740, 395, 978, 995, 848, 72,
820, 471, 278, 875, 874, 223, 391, 505, 763, 41, 86,
953, 617, 697, 210, 68, 911, 516, 584, 380, 516, 876,
382, 617, 233, 554, 324, 119, 574, 679, 91, 837, 970,
142, 502, 901, 849, 63, 779, 927, 74, 767, 645, 579,
349, 354, 846, 427, 351, 469, 685, 297, 362, 947, 998,
434, 896, 773, 441, 562, 785, 704, 529, 398, 754, 307,
660, 292, 537, 68, 799, 472, 856, 794, 278, 36, 930,
92, 148, 67, 688, 78, 115, 602, 956, 341, 414, 913,
905, 207, 571, 381, 435, 157, 26, 755, 770, 374, 117,
359, 865, 328, 643, 756, 916, 435, 184, 785, 930, 349,
161, 253, 365, 82, 976, 499, 461, 188, 22, 204, 986,
459, 345, 59, 874, 967, 924, 878, 804, 366, 808, 616,
906, 695, 691, 581, 59, 21, 268, 792, 168, 537, 520,
21, 647, 760, 226, 907, 427, 703, 585, 338, 772, 629,
726, 296, 672, 282, 268, 741, 330, 272, 217, 534, 704,
811, 219, 490, 337, 877, 969, 699, 57, 342, 114, 115,
570, 701, 903, 848, 708, 779, 620, 272, 595, 718, 75,
356, 321, 636, 276, 442, 812, 645, 854, 317, 437, 314,
343, 571, 202, 416, 715, 659, 277, 676, 140, 132, 547,
375, 260, 868, 380, 261, 441, 934, 9, 325, 144, 751,
28, 906, 193, 889, 374, 792, 123, 668, 970, 737, 846,
352, 425, 308, 603, 391, 151, 897, 664, 485, 481, 636,
521, 749, 387, 5, 592, 663, 857, 712, 510, 367, 862,
519, 123, 196, 936, 552, 233, 282, 35, 686, 215, 508,
647, 289, 307, 18, 327, 108, 366, 459, 232])
def test_snail_077(self):
self.assertEqual(snail([[666, 962, 235, 436, 68, 11, 222, 412, 346, 108,
83, 505, 615, 899, 111, 149, 740, 452, 988,
476],
[546, 18, 303, 148, 420, 385, 556, 547, 944,
980, 346, 821, 402, 114, 287, 328, 884, 420,
476, 327],
[586, 711, 282, 581, 620, 649, 276, 979, 359,
916, 897, 797, 676, 359, 510, 229, 621, 782,
559, 406],
[888, 758, 801, 266, 597, 509, 541, 501, 301,
109, 298, 676, 542, 803, 434, 40, 601, 224, 72,
387],
[402, 960, 825, 515, 400, 282, 102, 787, 226,
256, 446, 116, 926, 868, 497, 885, 645, 228,
37, 263],
[589, 332, 700, 507, 657, 509, 28, 46, 60, 615,
43, 439, 545, 382, 249, 1, 511, 411, 369, 336],
[470, 14, 533, 919, 248, 40, 292, 559, 970, 850,
609, 202, 315, 100, 52, 467, 332, 666, 620,
145],
[117, 906, 282, 526, 168, 206, 689, 213, 207,
78, 270, 186, 877, 744, 191, 86, 56, 626, 47,
777],
[491, 902, 689, 519, 278, 647, 890, 903, 351,
125, 873, 92, 510, 765, 213, 298, 972, 42, 667,
61],
[689, 759, 825, 676, 249, 697, 684, 112, 347,
73, 863, 91, 150, 311, 140, 814, 984, 838, 458,
505],
[176, 115, 727, 603, 981, 695, 255, 165, 433,
82, 576, 392, 401, 736, 469, 685, 684, 473,
599, 275],
[400, 527, 489, 949, 267, 523, 711, 642, 204,
140, 298, 162, 730, 26, 745, 748, 641, 378,
187, 208],
[424, 742, 633, 608, 645, 642, 876, 276, 408,
985, 695, 3, 772, 967, 436, 422, 333, 626, 980,
279],
[363, 401, 873, 167, 355, 259, 678, 424, 558,
957, 171, 284, 664, 517, 855, 849, 112, 470,
331, 112],
[353, 257, 463, 706, 552, 957, 255, 596, 453,
950, 352, 914, 493, 798, 735, 633, 747, 552,
368, 547],
[524, 688, 975, 145, 704, 232, 190, 483, 617,
262, 882, 782, 5, 345, 285, 483, 325, 321, 866,
806],
[99, 972, 262, 332, 81, 103, 425, 156, 240, 599,
508, 755, 783, 585, 354, 515, 694, 638, 22,
815],
[789, 616, 172, 544, 827, 862, 286, 844, 376,
844, 508, 320, 675, 197, 350, 545, 505, 78,
155, 606],
[203, 167, 992, 723, 682, 83, 534, 315, 376, 89,
267, 107, 346, 924, 306, 752, 627, 496, 994,
613],
[581, 737, 393, 879, 406, 15, 265, 238, 125,
683, 505, 835, 174, 509, 284, 12, 364, 345,
395, 1]]),
[666, 962, 235, 436, 68, 11, 222, 412, 346, 108, 83,
505, 615, 899, 111, 149, 740, 452, 988, 476, 327, 406,
387, 263, 336, 145, 777, 61, 505, 275, 208, 279, 112,
547, 806, 815, 606, 613, 1, 395, 345, 364, 12, 284,
509, 174, 835, 505, 683, 125, 238, 265, 15, 406, 879,
393, 737, 581, 203, 789, 99, 524, 353, 363, 424, 400,
176, 689, 491, 117, 470, 589, 402, 888, 586, 546, 18,
303, 148, 420, 385, 556, 547, 944, 980, 346, 821, 402,
114, 287, 328, 884, 420, 476, 559, 72, 37, 369, 620,
47, 667, 458, 599, 187, 980, 331, 368, 866, 22, 155,
994, 496, 627, 752, 306, 924, 346, 107, 267, 89, 376,
315, 534, 83, 682, 723, 992, 167, 616, 972, 688, 257,
401, 742, 527, 115, 759, 902, 906, 14, 332, 960, 758,
711, 282, 581, 620, 649, 276, 979, 359, 916, 897, 797,
676, 359, 510, 229, 621, 782, 224, 228, 411, 666, 626,
42, 838, 473, 378, 626, 470, 552, 321, 638, 78, 505,
545, 350, 197, 675, 320, 508, 844, 376, 844, 286, 862,
827, 544, 172, 262, 975, 463, 873, 633, 489, 727, 825,
689, 282, 533, 700, 825, 801, 266, 597, 509, 541, 501,
301, 109, 298, 676, 542, 803, 434, 40, 601, 645, 511,
332, 56, 972, 984, 684, 641, 333, 112, 747, 325, 694,
515, 354, 585, 783, 755, 508, 599, 240, 156, 425, 103,
81, 332, 145, 706, 167, 608, 949, 603, 676, 519, 526,
919, 507, 515, 400, 282, 102, 787, 226, 256, 446, 116,
926, 868, 497, 885, 1, 467, 86, 298, 814, 685, 748,
422, 849, 633, 483, 285, 345, 5, 782, 882, 262, 617,
483, 190, 232, 704, 552, 355, 645, 267, 981, 249, 278,
168, 248, 657, 509, 28, 46, 60, 615, 43, 439, 545,
382, 249, 52, 191, 213, 140, 469, 745, 436, 855, 735,
798, 493, 914, 352, 950, 453, 596, 255, 957, 259, 642,
523, 695, 697, 647, 206, 40, 292, 559, 970, 850, 609,
202, 315, 100, 744, 765, 311, 736, 26, 967, 517, 664,
284, 171, 957, 558, 424, 678, 876, 711, 255, 684, 890,
689, 213, 207, 78, 270, 186, 877, 510, 150, 401, 730,
772, 3, 695, 985, 408, 276, 642, 165, 112, 903, 351,
125, 873, 92, 91, 392, 162, 298, 140, 204, 433, 347,
73, 863, 576, 82])
def test_snail_078(self):
self.assertEqual(snail([[46, 105, 755, 137, 836, 162, 149, 478, 258],
[379, 307, 501, 642, 573, 610, 945, 506, 956],
[896, 309, 293, 526, 429, 298, 636, 989, 80],
[880, 153, 23, 95, 765, 124, 818, 836, 15],
[242, 510, 792, 823, 494, 479, 737, 231, 317],
[982, 293, 984, 13, 276, 39, 167, 146, 236],
[431, 546, 246, 860, 207, 380, 306, 577, 405],
[905, 276, 247, 949, 479, 6, 61, 479, 257],
[106, 744, 940, 112, 474, 457, 968, 106, 834]]),
[46, 105, 755, 137, 836, 162, 149, 478, 258, 956, 80,
15, 317, 236, 405, 257, 834, 106, 968, 457, 474, 112,
940, 744, 106, 905, 431, 982, 242, 880, 896, 379, 307,
501, 642, 573, 610, 945, 506, 989, 836, 231, 146, 577,
479, 61, 6, 479, 949, 247, 276, 546, 293, 510, 153,
309, 293, 526, 429, 298, 636, 818, 737, 167, 306, 380,
207, 860, 246, 984, 792, 23, 95, 765, 124, 479, 39,
276, 13, 823, 494])
def test_snail_079(self):
self.assertEqual(snail([[601]]), [601])
def test_snail_080(self):
self.assertEqual(snail([[536, 275, 747, 8, 428, 685, 425, 412, 645, 533,
654, 886, 275, 373, 341, 70, 650],
[756, 413, 436, 934, 70, 645, 837, 399, 729,
158, 115, 212, 529, 627, 892, 58, 619],
[518, 780, 787, 240, 167, 877, 45, 186, 204, 22,
90, 292, 440, 612, 569, 934, 587],
[409, 521, 219, 974, 972, 466, 222, 367, 656,
763, 432, 42, 652, 251, 806, 486, 266],
[144, 220, 975, 993, 678, 28, 287, 838, 236,
632, 211, 508, 380, 748, 908, 232, 311],
[59, 581, 843, 66, 293, 134, 177, 377, 903, 109,
289, 797, 915, 171, 878, 695, 826],
[5, 889, 628, 878, 997, 251, 772, 414, 963, 863,
628, 454, 971, 275, 246, 243, 471],
[589, 24, 469, 290, 96, 747, 174, 213, 135, 156,
578, 50, 459, 831, 340, 571, 230],
[943, 385, 307, 46, 800, 569, 449, 634, 899,
355, 786, 960, 487, 267, 905, 890, 626],
[382, 736, 909, 496, 936, 828, 684, 105, 44,
589, 477, 225, 434, 649, 419, 273, 447],
[629, 814, 94, 606, 640, 272, 12, 589, 421, 327,
552, 249, 232, 32, 713, 179, 812],
[432, 535, 53, 10, 658, 475, 431, 61, 830, 716,
173, 797, 766, 76, 537, 4, 156],
[360, 671, 868, 928, 838, 705, 359, 128, 397,
793, 696, 194, 904, 174, 818, 833, 48],
[789, 821, 540, 10, 356, 267, 847, 783, 914,
391, 93, 977, 426, 780, 755, 35, 914],
[862, 389, 478, 904, 272, 512, 567, 469, 802,
492, 300, 137, 908, 585, 72, 928, 133],
[29, 33, 792, 193, 910, 404, 972, 524, 301, 32,
385, 813, 353, 322, 112, 606, 138],
[930, 878, 888, 703, 28, 422, 72, 939, 971, 551,
49, 363, 45, 723, 659, 553, 133]]),
[536, 275, 747, 8, 428, 685, 425, 412, 645, 533, 654,
886, 275, 373, 341, 70, 650, 619, 587, 266, 311, 826,
471, 230, 626, 447, 812, 156, 48, 914, 133, 138, 133,
553, 659, 723, 45, 363, 49, 551, 971, 939, 72, 422,
28, 703, 888, 878, 930, 29, 862, 789, 360, 432, 629,
382, 943, 589, 5, 59, 144, 409, 518, 756, 413, 436,
934, 70, 645, 837, 399, 729, 158, 115, 212, 529, 627,
892, 58, 934, 486, 232, 695, 243, 571, 890, 273, 179,
4, 833, 35, 928, 606, 112, 322, 353, 813, 385, 32,
301, 524, 972, 404, 910, 193, 792, 33, 389, 821, 671,
535, 814, 736, 385, 24, 889, 581, 220, 521, 780, 787,
240, 167, 877, 45, 186, 204, 22, 90, 292, 440, 612,
569, 806, 908, 878, 246, 340, 905, 419, 713, 537, 818,
755, 72, 585, 908, 137, 300, 492, 802, 469, 567, 512,
272, 904, 478, 540, 868, 53, 94, 909, 307, 469, 628,
843, 975, 219, 974, 972, 466, 222, 367, 656, 763, 432,
42, 652, 251, 748, 171, 275, 831, 267, 649, 32, 76,
174, 780, 426, 977, 93, 391, 914, 783, 847, 267, 356,
10, 928, 10, 606, 496, 46, 290, 878, 66, 993, 678, 28,
287, 838, 236, 632, 211, 508, 380, 915, 971, 459, 487,
434, 232, 766, 904, 194, 696, 793, 397, 128, 359, 705,
838, 658, 640, 936, 800, 96, 997, 293, 134, 177, 377,
903, 109, 289, 797, 454, 50, 960, 225, 249, 797, 173,
716, 830, 61, 431, 475, 272, 828, 569, 747, 251, 772,
414, 963, 863, 628, 578, 786, 477, 552, 327, 421, 589,
12, 684, 449, 174, 213, 135, 156, 355, 589, 44, 105,
634, 899])
def test_snail_081(self):
self.assertEqual(snail([[350, 303, 624, 66, 319, 723, 677],
[44, 616, 64, 859, 683, 425, 556],
[551, 592, 382, 678, 823, 63, 881],
[956, 8, 601, 384, 191, 811, 32],
[815, 931, 592, 1, 230, 786, 446],
[597, 948, 908, 590, 858, 850, 974],
[533, 204, 906, 134, 27, 882, 14]]),
[350, 303, 624, 66, 319, 723, 677, 556, 881, 32, 446,
974, 14, 882, 27, 134, 906, 204, 533, 597, 815, 956,
551, 44, 616, 64, 859, 683, 425, 63, 811, 786, 850,
858, 590, 908, 948, 931, 8, 592, 382, 678, 823, 191,
230, 1, 592, 601, 384])
def test_snail_082(self):
self.assertEqual(snail([[232, 617, 922, 488, 792, 289, 488, 451, 844,
714, 179, 844, 377, 576, 242],
[638, 339, 530, 807, 417, 862, 71, 352, 963, 49,
822, 978, 162, 924, 461],
[148, 636, 263, 858, 140, 630, 654, 304, 258,
412, 64, 1, 326, 311, 284],
[573, 758, 197, 133, 766, 783, 550, 324, 290,
293, 337, 479, 415, 587, 133],
[798, 60, 844, 834, 303, 77, 121, 339, 885, 691,
848, 272, 484, 758, 485],
[364, 590, 109, 407, 502, 59, 341, 166, 109,
382, 90, 226, 362, 299, 761],
[308, 771, 541, 867, 867, 886, 671, 302, 894,
517, 713, 570, 682, 603, 209],
[512, 975, 223, 313, 340, 376, 751, 76, 332, 80,
81, 581, 401, 768, 373],
[933, 400, 289, 659, 875, 869, 5, 606, 314, 989,
494, 403, 925, 341, 839],
[470, 504, 721, 26, 765, 821, 985, 750, 905,
938, 975, 950, 288, 196, 603],
[977, 989, 127, 507, 747, 679, 671, 797, 819,
775, 439, 994, 870, 970, 164],
[761, 489, 634, 539, 499, 48, 61, 515, 675, 8,
194, 621, 828, 638, 801],
[366, 575, 629, 798, 838, 201, 769, 989, 507,
142, 3, 561, 225, 282, 604],
[122, 776, 797, 161, 244, 963, 385, 715, 120,
321, 752, 489, 233, 904, 843],
[739, 637, 324, 232, 751, 507, 800, 548, 486,
781, 554, 267, 721, 845, 6]]),
[232, 617, 922, 488, 792, 289, 488, 451, 844, 714, 179,
844, 377, 576, 242, 461, 284, 133, 485, 761, 209, 373,
839, 603, 164, 801, 604, 843, 6, 845, 721, 267, 554,
781, 486, 548, 800, 507, 751, 232, 324, 637, 739, 122,
366, 761, 977, 470, 933, 512, 308, 364, 798, 573, 148,
638, 339, 530, 807, 417, 862, 71, 352, 963, 49, 822,
978, 162, 924, 311, 587, 758, 299, 603, 768, 341, 196,
970, 638, 282, 904, 233, 489, 752, 321, 120, 715, 385,
963, 244, 161, 797, 776, 575, 489, 989, 504, 400, 975,
771, 590, 60, 758, 636, 263, 858, 140, 630, 654, 304,
258, 412, 64, 1, 326, 415, 484, 362, 682, 401, 925,
288, 870, 828, 225, 561, 3, 142, 507, 989, 769, 201,
838, 798, 629, 634, 127, 721, 289, 223, 541, 109, 844,
197, 133, 766, 783, 550, 324, 290, 293, 337, 479, 272,
226, 570, 581, 403, 950, 994, 621, 194, 8, 675, 515,
61, 48, 499, 539, 507, 26, 659, 313, 867, 407, 834,
303, 77, 121, 339, 885, 691, 848, 90, 713, 81, 494,
975, 439, 775, 819, 797, 671, 679, 747, 765, 875, 340,
867, 502, 59, 341, 166, 109, 382, 517, 80, 989, 938,
905, 750, 985, 821, 869, 376, 886, 671, 302, 894, 332,
314, 606, 5, 751, 76])
def test_snail_083(self):
self.assertEqual(snail([[144, 568, 21, 727, 740, 122, 743, 378, 519,
294, 987, 449, 688, 91],
[552, 330, 129, 484, 303, 770, 794, 409, 32,
995, 764, 458, 386, 946],
[232, 385, 662, 477, 897, 597, 969, 609, 361,
529, 422, 18, 645, 653],
[819, 179, 340, 828, 667, 374, 420, 151, 671,
281, 326, 381, 172, 12],
[695, 954, 432, 746, 292, 212, 544, 792, 75,
976, 26, 534, 887, 349],
[313, 163, 954, 749, 295, 980, 883, 133, 74,
156, 703, 232, 232, 743],
[417, 501, 155, 131, 331, 691, 333, 873, 964,
12, 447, 684, 455, 434],
[135, 141, 836, 947, 767, 389, 477, 646, 470,
281, 296, 182, 898, 681],
[373, 491, 318, 425, 872, 981, 276, 414, 883,
170, 585, 494, 993, 789],
[419, 172, 127, 49, 377, 384, 279, 958, 572,
535, 777, 121, 226, 728],
[683, 986, 545, 205, 129, 816, 117, 474, 159,
577, 380, 149, 42, 360],
[317, 525, 283, 558, 762, 813, 230, 435, 944,
500, 260, 211, 728, 666],
[858, 49, 772, 565, 195, 376, 938, 653, 992, 54,
819, 552, 93, 486],
[246, 99, 795, 769, 705, 916, 422, 117, 882, 41,
23, 612, 426, 556]]),
[144, 568, 21, 727, 740, 122, 743, 378, 519, 294, 987,
449, 688, 91, 946, 653, 12, 349, 743, 434, 681, 789,
728, 360, 666, 486, 556, 426, 612, 23, 41, 882, 117,
422, 916, 705, 769, 795, 99, 246, 858, 317, 683, 419,
373, 135, 417, 313, 695, 819, 232, 552, 330, 129, 484,
303, 770, 794, 409, 32, 995, 764, 458, 386, 645, 172,
887, 232, 455, 898, 993, 226, 42, 728, 93, 552, 819,
54, 992, 653, 938, 376, 195, 565, 772, 49, 525, 986,
172, 491, 141, 501, 163, 954, 179, 385, 662, 477, 897,
597, 969, 609, 361, 529, 422, 18, 381, 534, 232, 684,
182, 494, 121, 149, 211, 260, 500, 944, 435, 230, 813,
762, 558, 283, 545, 127, 318, 836, 155, 954, 432, 340,
828, 667, 374, 420, 151, 671, 281, 326, 26, 703, 447,
296, 585, 777, 380, 577, 159, 474, 117, 816, 129, 205,
49, 425, 947, 131, 749, 746, 292, 212, 544, 792, 75,
976, 156, 12, 281, 170, 535, 572, 958, 279, 384, 377,
872, 767, 331, 295, 980, 883, 133, 74, 964, 470, 883,
414, 276, 981, 389, 691, 333, 873, 646, 477])
def test_snail_084(self):
self.assertEqual(snail([[189, 117, 130, 56, 993, 906, 843, 983, 823,
485, 420, 275, 333, 394, 68, 33],
[818, 830, 502, 978, 273, 428, 157, 621, 121,
411, 509, 279, 263, 56, 108, 82],
[632, 484, 962, 408, 95, 161, 463, 823, 500,
110, 616, 113, 355, 800, 916, 304],
[146, 217, 702, 32, 929, 794, 249, 734, 284,
757, 354, 826, 842, 992, 651, 820],
[644, 413, 389, 168, 871, 136, 95, 987, 101,
790, 634, 771, 802, 35, 528, 248],
[636, 81, 890, 390, 966, 16, 584, 150, 112, 563,
432, 522, 231, 817, 111, 490],
[572, 77, 887, 337, 985, 822, 83, 788, 986, 767,
996, 442, 328, 24, 906, 496],
[889, 781, 904, 723, 475, 507, 809, 682, 839,
436, 614, 415, 490, 892, 778, 879],
[423, 699, 788, 677, 630, 121, 568, 397, 366,
495, 850, 43, 181, 296, 671, 181],
[849, 828, 840, 490, 665, 921, 666, 346, 315,
287, 347, 527, 346, 38, 599, 743],
[196, 68, 364, 681, 321, 104, 86, 948, 393, 201,
470, 539, 459, 60, 156, 742],
[820, 525, 485, 892, 653, 694, 287, 887, 729,
75, 466, 354, 568, 850, 732, 654],
[670, 174, 472, 262, 890, 410, 362, 234, 335,
92, 451, 167, 706, 177, 955, 612],
[647, 138, 198, 265, 541, 673, 41, 818, 564,
863, 932, 552, 245, 71, 535, 289],
[626, 514, 854, 694, 783, 469, 674, 473, 537,
157, 546, 891, 615, 399, 547, 699],
[662, 868, 468, 922, 99, 268, 120, 280, 983,
586, 712, 206, 750, 43, 640, 116]]),
[189, 117, 130, 56, 993, 906, 843, 983, 823, 485, 420,
275, 333, 394, 68, 33, 82, 304, 820, 248, 490, 496,
879, 181, 743, 742, 654, 612, 289, 699, 116, 640, 43,
750, 206, 712, 586, 983, 280, 120, 268, 99, 922, 468,
868, 662, 626, 647, 670, 820, 196, 849, 423, 889, 572,
636, 644, 146, 632, 818, 830, 502, 978, 273, 428, 157,
621, 121, 411, 509, 279, 263, 56, 108, 916, 651, 528,
111, 906, 778, 671, 599, 156, 732, 955, 535, 547, 399,
615, 891, 546, 157, 537, 473, 674, 469, 783, 694, 854,
514, 138, 174, 525, 68, 828, 699, 781, 77, 81, 413,
217, 484, 962, 408, 95, 161, 463, 823, 500, 110, 616,
113, 355, 800, 992, 35, 817, 24, 892, 296, 38, 60,
850, 177, 71, 245, 552, 932, 863, 564, 818, 41, 673,
541, 265, 198, 472, 485, 364, 840, 788, 904, 887, 890,
389, 702, 32, 929, 794, 249, 734, 284, 757, 354, 826,
842, 802, 231, 328, 490, 181, 346, 459, 568, 706, 167,
451, 92, 335, 234, 362, 410, 890, 262, 892, 681, 490,
677, 723, 337, 390, 168, 871, 136, 95, 987, 101, 790,
634, 771, 522, 442, 415, 43, 527, 539, 354, 466, 75,
729, 887, 287, 694, 653, 321, 665, 630, 475, 985, 966,
16, 584, 150, 112, 563, 432, 996, 614, 850, 347, 470,
201, 393, 948, 86, 104, 921, 121, 507, 822, 83, 788,
986, 767, 436, 495, 287, 315, 346, 666, 568, 809, 682,
839, 366, 397])
def test_snail_085(self):
self.assertEqual(snail([[830, 253, 625, 973, 491, 433, 340, 950, 941],
[594, 74, 780, 39, 840, 620, 979, 117, 869],
[382, 233, 384, 673, 659, 501, 886, 415, 947],
[608, 73, 246, 530, 429, 506, 573, 552, 505],
[482, 346, 767, 910, 939, 200, 398, 831, 979],
[382, 723, 528, 936, 648, 300, 650, 564, 823],
[68, 395, 220, 265, 125, 912, 658, 384, 764],
[934, 378, 160, 882, 201, 23, 74, 287, 899],
[927, 183, 846, 677, 44, 118, 611, 685, 902]]),
[830, 253, 625, 973, 491, 433, 340, 950, 941, 869, 947,
505, 979, 823, 764, 899, 902, 685, 611, 118, 44, 677,
846, 183, 927, 934, 68, 382, 482, 608, 382, 594, 74,
780, 39, 840, 620, 979, 117, 415, 552, 831, 564, 384,
287, 74, 23, 201, 882, 160, 378, 395, 723, 346, 73,
233, 384, 673, 659, 501, 886, 573, 398, 650, 658, 912,
125, 265, 220, 528, 767, 246, 530, 429, 506, 200, 300,
648, 936, 910, 939])
def test_snail_086(self):
self.assertEqual(snail([[138, 741, 417, 159, 204, 874, 124, 662, 424,
454, 485, 437, 98, 149, 674, 523, 367],
[743, 936, 693, 93, 718, 309, 249, 664, 820,
647, 450, 8, 94, 521, 633, 704, 11],
[111, 422, 331, 297, 369, 809, 16, 991, 98, 871,
429, 887, 906, 706, 16, 576, 635],
[897, 820, 362, 595, 369, 495, 206, 231, 632,
842, 225, 550, 663, 364, 556, 586, 890],
[370, 949, 756, 447, 637, 326, 862, 636, 342,
228, 582, 876, 52, 210, 298, 922, 59],
[842, 58, 731, 756, 306, 879, 101, 280, 670,
612, 636, 200, 613, 915, 21, 756, 625],
[22, 850, 811, 476, 515, 485, 579, 707, 730,
343, 492, 324, 588, 692, 954, 339, 674],
[851, 681, 842, 320, 85, 795, 471, 779, 259,
472, 217, 985, 410, 371, 408, 401, 649],
[581, 45, 563, 964, 455, 888, 78, 345, 479, 891,
302, 874, 477, 740, 634, 339, 13],
[794, 39, 180, 191, 463, 573, 704, 333, 920,
508, 373, 622, 378, 615, 1, 778, 186],
[850, 537, 431, 4, 427, 172, 687, 344, 35, 847,
745, 818, 394, 935, 796, 428, 562],
[487, 80, 446, 506, 159, 277, 773, 958, 222,
805, 906, 369, 807, 59, 834, 866, 923],
[169, 750, 548, 544, 7, 987, 629, 344, 516, 916,
352, 303, 926, 466, 44, 635, 703],
[956, 621, 626, 932, 280, 837, 977, 529, 761,
567, 636, 162, 415, 12, 347, 336, 835],
[544, 924, 11, 155, 311, 602, 943, 30, 742, 627,
21, 905, 443, 295, 369, 462, 617],
[445, 648, 244, 728, 706, 492, 740, 402, 226,
605, 748, 201, 717, 135, 785, 306, 535],
[827, 874, 302, 890, 902, 317, 511, 214, 761,
852, 180, 653, 300, 780, 147, 744, 661]]),
[138, 741, 417, 159, 204, 874, 124, 662, 424, 454, 485,
437, 98, 149, 674, 523, 367, 11, 635, 890, 59, 625,
674, 649, 13, 186, 562, 923, 703, 835, 617, 535, 661,
744, 147, 780, 300, 653, 180, 852, 761, 214, 511, 317,
902, 890, 302, 874, 827, 445, 544, 956, 169, 487, 850,
794, 581, 851, 22, 842, 370, 897, 111, 743, 936, 693,
93, 718, 309, 249, 664, 820, 647, 450, 8, 94, 521,
633, 704, 576, 586, 922, 756, 339, 401, 339, 778, 428,
866, 635, 336, 462, 306, 785, 135, 717, 201, 748, 605,
226, 402, 740, 492, 706, 728, 244, 648, 924, 621, 750,
80, 537, 39, 45, 681, 850, 58, 949, 820, 422, 331,
297, 369, 809, 16, 991, 98, 871, 429, 887, 906, 706,
16, 556, 298, 21, 954, 408, 634, 1, 796, 834, 44, 347,
369, 295, 443, 905, 21, 627, 742, 30, 943, 602, 311,
155, 11, 626, 548, 446, 431, 180, 563, 842, 811, 731,
756, 362, 595, 369, 495, 206, 231, 632, 842, 225, 550,
663, 364, 210, 915, 692, 371, 740, 615, 935, 59, 466,
12, 415, 162, 636, 567, 761, 529, 977, 837, 280, 932,
544, 506, 4, 191, 964, 320, 476, 756, 447, 637, 326,
862, 636, 342, 228, 582, 876, 52, 613, 588, 410, 477,
378, 394, 807, 926, 303, 352, 916, 516, 344, 629, 987,
7, 159, 427, 463, 455, 85, 515, 306, 879, 101, 280,
670, 612, 636, 200, 324, 985, 874, 622, 818, 369, 906,
805, 222, 958, 773, 277, 172, 573, 888, 795, 485, 579,
707, 730, 343, 492, 217, 302, 373, 745, 847, 35, 344,
687, 704, 78, 471, 779, 259, 472, 891, 508, 920, 333,
345, 479])
def test_snail_087(self):
self.assertEqual(snail([[525, 36, 964], [45, 650, 15], [487, 52, 333]]),
[525, 36, 964, 15, 333, 52, 487, 45, 650])
def test_snail_088(self):
self.assertEqual(snail(
[[708, 433, 591, 277, 518, 564, 794, 454, 666, 595, 254, 870],
[886, 628, 399, 222, 594, 65, 44, 567, 666, 356, 421, 594],
[612, 162, 307, 42, 245, 10, 620, 187, 413, 350, 972, 964],
[669, 577, 98, 332, 950, 236, 393, 682, 764, 432, 849, 338],
[495, 741, 186, 613, 557, 277, 861, 390, 228, 247, 535, 87],
[485, 401, 526, 512, 170, 157, 575, 284, 814, 215, 897, 797],
[447, 679, 519, 8, 329, 991, 568, 414, 967, 643, 213, 426],
[139, 739, 730, 456, 961, 236, 501, 409, 293, 812, 141, 773],
[930, 834, 528, 146, 556, 677, 991, 143, 911, 118, 301, 165],
[467, 666, 632, 210, 394, 124, 348, 615, 620, 216, 340, 549],
[116, 624, 238, 45, 716, 894, 380, 921, 708, 998, 910, 844],
[790, 25, 61, 218, 901, 200, 833, 57, 472, 464, 523, 573]]),
[708, 433, 591, 277, 518, 564, 794, 454, 666, 595, 254,
870, 594, 964, 338, 87, 797, 426, 773, 165, 549, 844,
573, 523, 464, 472, 57, 833, 200, 901, 218, 61, 25,
790, 116, 467, 930, 139, 447, 485, 495, 669, 612, 886,
628, 399, 222, 594, 65, 44, 567, 666, 356, 421, 972,
849, 535, 897, 213, 141, 301, 340, 910, 998, 708, 921,
380, 894, 716, 45, 238, 624, 666, 834, 739, 679, 401,
741, 577, 162, 307, 42, 245, 10, 620, 187, 413, 350,
432, 247, 215, 643, 812, 118, 216, 620, 615, 348, 124,
394, 210, 632, 528, 730, 519, 526, 186, 98, 332, 950,
236, 393, 682, 764, 228, 814, 967, 293, 911, 143, 991,
677, 556, 146, 456, 8, 512, 613, 557, 277, 861, 390,
284, 414, 409, 501, 236, 961, 329, 170, 157, 575, 568,
991])
def test_snail_089(self):
self.assertEqual(snail([[293, 385, 292, 757, 361, 655, 659, 966, 615,
684, 335, 393, 474],
[478, 315, 712, 147, 750, 338, 9, 707, 159, 91,
170, 85, 27],
[499, 881, 299, 789, 431, 756, 734, 872, 384,
26, 520, 489, 819],
[801, 283, 442, 398, 640, 355, 827, 403, 368,
238, 481, 404, 108],
[295, 858, 223, 425, 139, 752, 720, 679, 43,
976, 817, 77, 607],
[877, 392, 230, 626, 447, 760, 462, 860, 519,
976, 935, 473, 237],
[341, 760, 55, 653, 403, 84, 673, 64, 997, 241,
957, 851, 858],
[946, 296, 603, 287, 348, 837, 508, 741, 461,
920, 693, 131, 343],
[584, 127, 8, 16, 54, 563, 356, 193, 904, 812,
588, 915, 556],
[318, 934, 918, 261, 821, 590, 962, 870, 590,
99, 658, 259, 484],
[350, 129, 627, 13, 625, 760, 268, 552, 427,
303, 818, 860, 190],
[944, 524, 876, 51, 827, 34, 526, 269, 903, 5,
902, 246, 609],
[51, 555, 781, 892, 378, 981, 385, 744, 956,
684, 390, 384, 689]]),
[293, 385, 292, 757, 361, 655, 659, 966, 615, 684, 335,
393, 474, 27, 819, 108, 607, 237, 858, 343, 556, 484,
190, 609, 689, 384, 390, 684, 956, 744, 385, 981, 378,
892, 781, 555, 51, 944, 350, 318, 584, 946, 341, 877,
295, 801, 499, 478, 315, 712, 147, 750, 338, 9, 707,
159, 91, 170, 85, 489, 404, 77, 473, 851, 131, 915,
259, 860, 246, 902, 5, 903, 269, 526, 34, 827, 51,
876, 524, 129, 934, 127, 296, 760, 392, 858, 283, 881,
299, 789, 431, 756, 734, 872, 384, 26, 520, 481, 817,
935, 957, 693, 588, 658, 818, 303, 427, 552, 268, 760,
625, 13, 627, 918, 8, 603, 55, 230, 223, 442, 398,
640, 355, 827, 403, 368, 238, 976, 976, 241, 920, 812,
99, 590, 870, 962, 590, 821, 261, 16, 287, 653, 626,
425, 139, 752, 720, 679, 43, 519, 997, 461, 904, 193,
356, 563, 54, 348, 403, 447, 760, 462, 860, 64, 741,
508, 837, 84, 673])
def test_snail_090(self):
self.assertEqual(snail(
[[232, 704, 47, 593, 30, 394, 932, 781, 504, 760],
[319, 806, 940, 647, 412, 710, 335, 109, 34, 130],
[447, 743, 128, 497, 547, 155, 153, 676, 930, 401],
[502, 42, 815, 824, 166, 627, 893, 500, 753, 136],
[343, 384, 627, 328, 944, 841, 583, 899, 598, 558],
[260, 468, 889, 544, 526, 498, 749, 87, 741, 862],
[323, 885, 390, 955, 965, 997, 974, 210, 611, 890],
[598, 888, 146, 717, 118, 63, 858, 484, 310, 265],
[825, 98, 520, 519, 70, 602, 698, 662, 73, 902],
[53, 156, 588, 118, 981, 519, 39, 536, 147, 295]]),
[232, 704, 47, 593, 30, 394, 932, 781, 504, 760, 130,
401, 136, 558, 862, 890, 265, 902, 295, 147, 536, 39,
519, 981, 118, 588, 156, 53, 825, 598, 323, 260, 343,
502, 447, 319, 806, 940, 647, 412, 710, 335, 109, 34,
930, 753, 598, 741, 611, 310, 73, 662, 698, 602, 70,
519, 520, 98, 888, 885, 468, 384, 42, 743, 128, 497,
547, 155, 153, 676, 500, 899, 87, 210, 484, 858, 63,
118, 717, 146, 390, 889, 627, 815, 824, 166, 627, 893,
583, 749, 974, 997, 965, 955, 544, 328, 944, 841, 498,
526])
def test_snail_091(self):
self.assertEqual(snail([[143, 117, 15, 361, 949, 412, 30, 789, 293],
[517, 943, 527, 914, 119, 984, 619, 878, 694],
[548, 939, 625, 550, 787, 169, 633, 216, 84],
[649, 772, 533, 591, 101, 87, 115, 248, 263],
[650, 436, 659, 76, 395, 878, 470, 424, 7],
[218, 861, 265, 632, 294, 34, 433, 364, 370],
[164, 390, 869, 489, 226, 371, 295, 776, 503],
[424, 284, 985, 209, 70, 146, 636, 797, 176],
[951, 264, 54, 144, 844, 301, 390, 678, 639]]),
[143, 117, 15, 361, 949, 412, 30, 789, 293, 694, 84,
263, 7, 370, 503, 176, 639, 678, 390, 301, 844, 144,
54, 264, 951, 424, 164, 218, 650, 649, 548, 517, 943,
527, 914, 119, 984, 619, 878, 216, 248, 424, 364, 776,
797, 636, 146, 70, 209, 985, 284, 390, 861, 436, 772,
939, 625, 550, 787, 169, 633, 115, 470, 433, 295, 371,
226, 489, 869, 265, 659, 533, 591, 101, 87, 878, 34,
294, 632, 76, 395])
def test_snail_092(self):
self.assertEqual(snail([[746, 798, 736, 433, 822, 82, 487, 32, 500, 717,
52, 320, 238, 1, 529, 973, 97, 6],
[37, 761, 26, 63, 86, 770, 670, 899, 738, 619,
967, 959, 663, 317, 395, 302, 45, 8],
[316, 296, 58, 653, 99, 271, 767, 122, 344, 644,
604, 617, 566, 441, 699, 287, 775, 950],
[60, 610, 295, 13, 143, 480, 575, 808, 896, 416,
919, 970, 163, 38, 726, 482, 18, 940],
[62, 758, 907, 148, 994, 42, 547, 843, 296, 348,
625, 884, 823, 148, 682, 358, 627, 792],
[259, 128, 820, 772, 767, 852, 3, 331, 540, 196,
560, 312, 291, 376, 162, 666, 420, 426],
[3, 362, 144, 383, 219, 657, 301, 113, 46, 771,
132, 697, 38, 486, 299, 218, 254, 692],
[397, 76, 152, 653, 161, 199, 721, 574, 729,
810, 383, 223, 626, 96, 817, 872, 945, 220],
[961, 967, 364, 568, 958, 566, 517, 693, 841,
509, 751, 713, 888, 764, 200, 16, 501, 524],
[195, 523, 624, 652, 113, 281, 983, 472, 482,
681, 206, 733, 742, 985, 226, 207, 699, 575],
[533, 22, 231, 758, 951, 649, 867, 381, 734,
538, 733, 814, 392, 86, 322, 758, 550, 42],
[775, 777, 347, 884, 298, 126, 826, 595, 151,
560, 119, 375, 416, 94, 245, 659, 238, 229],
[562, 956, 429, 47, 922, 974, 953, 624, 583, 40,
507, 752, 446, 779, 788, 952, 354, 798],
[495, 809, 528, 500, 277, 35, 827, 486, 12, 453,
73, 91, 129, 810, 828, 698, 401, 824],
[549, 11, 22, 952, 943, 177, 372, 535, 785, 315,
879, 864, 606, 945, 720, 960, 997, 482],
[810, 804, 171, 44, 552, 317, 293, 863, 71, 416,
294, 995, 267, 152, 973, 777, 936, 790],
[264, 874, 485, 179, 373, 496, 786, 520, 437,
363, 619, 97, 895, 21, 9, 170, 757, 855],
[699, 372, 806, 630, 759, 884, 175, 149, 192,
684, 457, 237, 46, 938, 845, 207, 265, 114]]),
[746, 798, 736, 433, 822, 82, 487, 32, 500, 717, 52,
320, 238, 1, 529, 973, 97, 6, 8, 950, 940, 792, 426,
692, 220, 524, 575, 42, 229, 798, 824, 482, 790, 855,
114, 265, 207, 845, 938, 46, 237, 457, 684, 192, 149,
175, 884, 759, 630, 806, 372, 699, 264, 810, 549, 495,
562, 775, 533, 195, 961, 397, 3, 259, 62, 60, 316, 37,
761, 26, 63, 86, 770, 670, 899, 738, 619, 967, 959,
663, 317, 395, 302, 45, 775, 18, 627, 420, 254, 945,
501, 699, 550, 238, 354, 401, 997, 936, 757, 170, 9,
21, 895, 97, 619, 363, 437, 520, 786, 496, 373, 179,
485, 874, 804, 11, 809, 956, 777, 22, 523, 967, 76,
362, 128, 758, 610, 296, 58, 653, 99, 271, 767, 122,
344, 644, 604, 617, 566, 441, 699, 287, 482, 358, 666,
218, 872, 16, 207, 758, 659, 952, 698, 960, 777, 973,
152, 267, 995, 294, 416, 71, 863, 293, 317, 552, 44,
171, 22, 528, 429, 347, 231, 624, 364, 152, 144, 820,
907, 295, 13, 143, 480, 575, 808, 896, 416, 919, 970,
163, 38, 726, 682, 162, 299, 817, 200, 226, 322, 245,
788, 828, 720, 945, 606, 864, 879, 315, 785, 535, 372,
177, 943, 952, 500, 47, 884, 758, 652, 568, 653, 383,
772, 148, 994, 42, 547, 843, 296, 348, 625, 884, 823,
148, 376, 486, 96, 764, 985, 86, 94, 779, 810, 129,
91, 73, 453, 12, 486, 827, 35, 277, 922, 298, 951,
113, 958, 161, 219, 767, 852, 3, 331, 540, 196, 560,
312, 291, 38, 626, 888, 742, 392, 416, 446, 752, 507,
40, 583, 624, 953, 974, 126, 649, 281, 566, 199, 657,
301, 113, 46, 771, 132, 697, 223, 713, 733, 814, 375,
119, 560, 151, 595, 826, 867, 983, 517, 721, 574, 729,
810, 383, 751, 206, 733, 538, 734, 381, 472, 693, 841,
509, 681, 482])
def test_snail_093(self):
self.assertEqual(snail(
[[600, 705, 630, 641, 878], [29, 799, 688, 274, 954],
[642, 533, 486, 590, 446], [386, 706, 769, 884, 88],
[40, 949, 713, 263, 542]]),
[600, 705, 630, 641, 878, 954, 446, 88, 542, 263, 713,
949, 40, 386, 642, 29, 799, 688, 274, 590, 884, 769,
706, 533, 486])
def test_snail_094(self):
self.assertEqual(snail([[907, 736, 956, 378, 282, 128, 890, 360, 476,
774, 662, 76, 440, 146, 260, 503, 594, 753,
601, 758],
[154, 508, 696, 345, 591, 993, 883, 517, 744,
441, 519, 59, 241, 932, 612, 853, 681, 580,
189, 616],
[252, 776, 960, 174, 414, 719, 865, 586, 514,
147, 885, 941, 624, 902, 920, 281, 788, 666,
645, 885],
[449, 571, 489, 461, 175, 497, 36, 529, 700,
833, 843, 360, 129, 148, 858, 887, 601, 368,
850, 693],
[490, 627, 711, 829, 183, 271, 1, 656, 384, 296,
344, 478, 251, 806, 930, 50, 586, 526, 851,
77],
[567, 200, 336, 555, 849, 867, 680, 585, 750,
865, 85, 520, 229, 438, 712, 500, 846, 37, 795,
591],
[861, 605, 491, 552, 577, 316, 923, 18, 304,
237, 105, 132, 420, 458, 716, 500, 745, 488,
419, 900],
[212, 651, 759, 532, 972, 701, 178, 862, 383,
683, 647, 361, 804, 619, 203, 834, 177, 789,
648, 172],
[824, 811, 117, 519, 107, 371, 638, 841, 931,
397, 381, 19, 625, 501, 644, 332, 421, 616, 9,
451],
[626, 774, 505, 848, 925, 320, 635, 85, 491,
700, 980, 154, 778, 639, 893, 455, 491, 23,
488, 902],
[842, 984, 751, 823, 776, 261, 411, 239, 490,
828, 123, 248, 555, 308, 340, 997, 464, 901,
931, 278],
[826, 354, 702, 996, 583, 195, 939, 381, 649,
830, 991, 167, 337, 328, 237, 120, 678, 64,
943, 232],
[393, 920, 761, 889, 953, 112, 493, 891, 517,
937, 212, 549, 37, 800, 902, 200, 45, 958, 400,
609],
[190, 982, 178, 364, 881, 102, 889, 873, 976,
191, 973, 742, 680, 718, 585, 924, 638, 936,
62, 644],
[565, 513, 575, 193, 561, 750, 953, 226, 691,
562, 655, 294, 877, 651, 343, 328, 599, 277,
883, 447],
[224, 782, 630, 902, 677, 276, 35, 489, 941,
122, 950, 593, 808, 738, 901, 228, 621, 730,
567, 484],
[252, 491, 679, 882, 157, 6, 674, 542, 384, 508,
93, 981, 502, 342, 732, 265, 135, 309, 814,
377],
[609, 16, 276, 999, 676, 620, 662, 276, 598, 79,
983, 105, 959, 328, 7, 486, 112, 484, 117,
970],
[592, 391, 807, 39, 654, 757, 676, 569, 589,
920, 935, 443, 821, 220, 406, 551, 649, 605,
753, 277],
[474, 183, 917, 831, 371, 55, 70, 631, 827, 1,
526, 648, 466, 575, 916, 776, 237, 18, 671,
244]]),
[907, 736, 956, 378, 282, 128, 890, 360, 476, 774, 662,
76, 440, 146, 260, 503, 594, 753, 601, 758, 616, 885,
693, 77, 591, 900, 172, 451, 902, 278, 232, 609, 644,
447, 484, 377, 970, 277, 244, 671, 18, 237, 776, 916,
575, 466, 648, 526, 1, 827, 631, 70, 55, 371, 831,
917, 183, 474, 592, 609, 252, 224, 565, 190, 393, 826,
842, 626, 824, 212, 861, 567, 490, 449, 252, 154, 508,
696, 345, 591, 993, 883, 517, 744, 441, 519, 59, 241,
932, 612, 853, 681, 580, 189, 645, 850, 851, 795, 419,
648, 9, 488, 931, 943, 400, 62, 883, 567, 814, 117,
753, 605, 649, 551, 406, 220, 821, 443, 935, 920, 589,
569, 676, 757, 654, 39, 807, 391, 16, 491, 782, 513,
982, 920, 354, 984, 774, 811, 651, 605, 200, 627, 571,
776, 960, 174, 414, 719, 865, 586, 514, 147, 885, 941,
624, 902, 920, 281, 788, 666, 368, 526, 37, 488, 789,
616, 23, 901, 64, 958, 936, 277, 730, 309, 484, 112,
486, 7, 328, 959, 105, 983, 79, 598, 276, 662, 620,
676, 999, 276, 679, 630, 575, 178, 761, 702, 751, 505,
117, 759, 491, 336, 711, 489, 461, 175, 497, 36, 529,
700, 833, 843, 360, 129, 148, 858, 887, 601, 586, 846,
745, 177, 421, 491, 464, 678, 45, 638, 599, 621, 135,
265, 732, 342, 502, 981, 93, 508, 384, 542, 674, 6,
157, 882, 902, 193, 364, 889, 996, 823, 848, 519, 532,
552, 555, 829, 183, 271, 1, 656, 384, 296, 344, 478,
251, 806, 930, 50, 500, 500, 834, 332, 455, 997, 120,
200, 924, 328, 228, 901, 738, 808, 593, 950, 122, 941,
489, 35, 276, 677, 561, 881, 953, 583, 776, 925, 107,
972, 577, 849, 867, 680, 585, 750, 865, 85, 520, 229,
438, 712, 716, 203, 644, 893, 340, 237, 902, 585, 343,
651, 877, 294, 655, 562, 691, 226, 953, 750, 102, 112,
195, 261, 320, 371, 701, 316, 923, 18, 304, 237, 105,
132, 420, 458, 619, 501, 639, 308, 328, 800, 718, 680,
742, 973, 191, 976, 873, 889, 493, 939, 411, 635, 638,
178, 862, 383, 683, 647, 361, 804, 625, 778, 555, 337,
37, 549, 212, 937, 517, 891, 381, 239, 85, 841, 931,
397, 381, 19, 154, 248, 167, 991, 830, 649, 490, 491,
700, 980, 123, 828])
def test_snail_095(self):
self.assertEqual(snail([[247, 36, 147, 670, 85, 302, 290, 318, 625, 571,
925, 293, 329, 386, 513, 32],
[886, 355, 260, 484, 589, 633, 64, 999, 160,
927, 937, 306, 722, 480, 171, 593],
[243, 262, 207, 601, 850, 221, 834, 478, 394, 6,
926, 500, 705, 771, 947, 559],
[894, 64, 204, 221, 196, 17, 465, 978, 251, 395,
208, 623, 457, 274, 198, 982],
[826, 24, 211, 166, 285, 800, 358, 180, 336,
708, 965, 855, 607, 283, 186, 114],
[177, 887, 42, 168, 420, 708, 632, 953, 929,
246, 355, 617, 576, 783, 892, 527],
[393, 714, 22, 905, 724, 749, 226, 128, 689,
924, 203, 353, 502, 583, 363, 249],
[633, 275, 241, 730, 109, 748, 482, 465, 672,
567, 739, 772, 677, 299, 492, 832],
[701, 706, 283, 866, 551, 893, 928, 136, 822,
892, 100, 11, 686, 759, 780, 799],
[818, 515, 137, 699, 122, 187, 587, 708, 819,
842, 689, 234, 229, 763, 484, 512],
[770, 663, 833, 676, 994, 54, 207, 133, 444,
707, 541, 23, 588, 214, 752, 980],
[121, 54, 432, 672, 767, 47, 945, 497, 433, 422,
913, 688, 703, 289, 933, 736],
[80, 683, 447, 359, 245, 935, 348, 196, 118,
637, 938, 270, 532, 97, 647, 329],
[385, 201, 425, 426, 579, 166, 983, 31, 646,
810, 156, 102, 151, 13, 212, 127],
[677, 439, 224, 931, 557, 572, 31, 122, 107,
812, 796, 934, 956, 74, 372, 311],
[807, 154, 33, 598, 333, 42, 7, 937, 312, 911,
186, 918, 962, 554, 746, 436]]),
[247, 36, 147, 670, 85, 302, 290, 318, 625, 571, 925,
293, 329, 386, 513, 32, 593, 559, 982, 114, 527, 249,
832, 799, 512, 980, 736, 329, 127, 311, 436, 746, 554,
962, 918, 186, 911, 312, 937, 7, 42, 333, 598, 33,
154, 807, 677, 385, 80, 121, 770, 818, 701, 633, 393,
177, 826, 894, 243, 886, 355, 260, 484, 589, 633, 64,
999, 160, 927, 937, 306, 722, 480, 171, 947, 198, 186,
892, 363, 492, 780, 484, 752, 933, 647, 212, 372, 74,
956, 934, 796, 812, 107, 122, 31, 572, 557, 931, 224,
439, 201, 683, 54, 663, 515, 706, 275, 714, 887, 24,
64, 262, 207, 601, 850, 221, 834, 478, 394, 6, 926,
500, 705, 771, 274, 283, 783, 583, 299, 759, 763, 214,
289, 97, 13, 151, 102, 156, 810, 646, 31, 983, 166,
579, 426, 425, 447, 432, 833, 137, 283, 241, 22, 42,
211, 204, 221, 196, 17, 465, 978, 251, 395, 208, 623,
457, 607, 576, 502, 677, 686, 229, 588, 703, 532, 270,
938, 637, 118, 196, 348, 935, 245, 359, 672, 676, 699,
866, 730, 905, 168, 166, 285, 800, 358, 180, 336, 708,
965, 855, 617, 353, 772, 11, 234, 23, 688, 913, 422,
433, 497, 945, 47, 767, 994, 122, 551, 109, 724, 420,
708, 632, 953, 929, 246, 355, 203, 739, 100, 689, 541,
707, 444, 133, 207, 54, 187, 893, 748, 749, 226, 128,
689, 924, 567, 892, 842, 819, 708, 587, 928, 482, 465,
672, 822, 136])
def test_snail_096(self):
self.assertEqual(snail([[433, 873, 34, 538, 182, 479, 447, 919, 491,
799, 321, 798, 96, 351, 199, 595, 384],
[688, 520, 440, 10, 768, 283, 286, 980, 786,
632, 724, 772, 776, 791, 526, 902, 143],
[221, 380, 963, 134, 81, 12, 212, 931, 854, 929,
258, 266, 191, 692, 975, 245, 686],
[371, 60, 849, 373, 934, 222, 750, 480, 817,
384, 623, 223, 965, 716, 502, 306, 419],
[137, 668, 412, 520, 759, 695, 35, 791, 512,
272, 880, 453, 79, 2, 813, 383, 715],
[350, 505, 927, 713, 478, 969, 462, 3, 343, 237,
219, 780, 231, 486, 539, 82, 129],
[405, 363, 901, 599, 117, 102, 317, 683, 880,
226, 757, 863, 175, 434, 903, 555, 152],
[918, 331, 443, 864, 933, 126, 463, 526, 570,
243, 866, 184, 895, 478, 413, 143, 900],
[976, 855, 41, 630, 829, 195, 443, 10, 447, 401,
592, 779, 213, 162, 359, 592, 496],
[892, 131, 875, 900, 416, 266, 524, 162, 561,
14, 148, 103, 869, 412, 229, 490, 961],
[589, 282, 373, 491, 878, 25, 541, 207, 642,
380, 971, 581, 721, 500, 135, 98, 425],
[523, 846, 203, 737, 445, 213, 138, 238, 295,
272, 338, 760, 539, 354, 195, 109, 271],
[948, 521, 513, 819, 497, 73, 487, 760, 899,
687, 330, 409, 476, 725, 3, 261, 101],
[690, 406, 882, 6, 341, 931, 135, 659, 746, 960,
709, 42, 621, 741, 6, 444, 496],
[351, 159, 223, 361, 865, 142, 82, 556, 953,
789, 642, 491, 346, 912, 262, 534, 442],
[397, 421, 707, 864, 685, 406, 76, 577, 159,
210, 885, 229, 54, 617, 945, 153, 928],
[778, 175, 280, 641, 290, 911, 692, 538, 48,
480, 772, 400, 119, 691, 539, 728, 27]]),
[433, 873, 34, 538, 182, 479, 447, 919, 491, 799, 321,
798, 96, 351, 199, 595, 384, 143, 686, 419, 715, 129,
152, 900, 496, 961, 425, 271, 101, 496, 442, 928, 27,
728, 539, 691, 119, 400, 772, 480, 48, 538, 692, 911,
290, 641, 280, 175, 778, 397, 351, 690, 948, 523, 589,
892, 976, 918, 405, 350, 137, 371, 221, 688, 520, 440,
10, 768, 283, 286, 980, 786, 632, 724, 772, 776, 791,
526, 902, 245, 306, 383, 82, 555, 143, 592, 490, 98,
109, 261, 444, 534, 153, 945, 617, 54, 229, 885, 210,
159, 577, 76, 406, 685, 864, 707, 421, 159, 406, 521,
846, 282, 131, 855, 331, 363, 505, 668, 60, 380, 963,
134, 81, 12, 212, 931, 854, 929, 258, 266, 191, 692,
975, 502, 813, 539, 903, 413, 359, 229, 135, 195, 3,
6, 262, 912, 346, 491, 642, 789, 953, 556, 82, 142,
865, 361, 223, 882, 513, 203, 373, 875, 41, 443, 901,
927, 412, 849, 373, 934, 222, 750, 480, 817, 384, 623,
223, 965, 716, 2, 486, 434, 478, 162, 412, 500, 354,
725, 741, 621, 42, 709, 960, 746, 659, 135, 931, 341,
6, 819, 737, 491, 900, 630, 864, 599, 713, 520, 759,
695, 35, 791, 512, 272, 880, 453, 79, 231, 175, 895,
213, 869, 721, 539, 476, 409, 330, 687, 899, 760, 487,
73, 497, 445, 878, 416, 829, 933, 117, 478, 969, 462,
3, 343, 237, 219, 780, 863, 184, 779, 103, 581, 760,
338, 272, 295, 238, 138, 213, 25, 266, 195, 126, 102,
317, 683, 880, 226, 757, 866, 592, 148, 971, 380, 642,
207, 541, 524, 443, 463, 526, 570, 243, 401, 14, 561,
162, 10, 447])
def test_snail_097(self):
self.assertEqual(snail(
[[631, 668, 646, 712, 825], [953, 573, 100, 756, 783],
[445, 553, 384, 130, 668], [157, 805, 969, 18, 304],
[551, 676, 558, 200, 793]]),
[631, 668, 646, 712, 825, 783, 668, 304, 793, 200, 558,
676, 551, 157, 445, 953, 573, 100, 756, 130, 18, 969,
805, 553, 384])
def test_snail_098(self):
self.assertEqual(snail(
[[236, 796, 566, 79, 878, 3], [813, 495, 352, 703, 329, 840],
[874, 879, 560, 307, 997, 4], [596, 458, 407, 889, 536, 319],
[334, 151, 460, 511, 411, 855],
[144, 572, 272, 495, 545, 622]]),
[236, 796, 566, 79, 878, 3, 840, 4, 319, 855, 622, 545,
495, 272, 572, 144, 334, 596, 874, 813, 495, 352, 703,
329, 997, 536, 411, 511, 460, 151, 458, 879, 560, 307,
889, 407])
def test_snail_099(self):
self.assertEqual(snail(
[[222, 261, 661, 331, 511, 364], [571, 689, 58, 265, 565, 413],
[68, 5, 853, 541, 890, 410], [571, 71, 770, 563, 603, 521],
[42, 417, 725, 971, 15, 780], [958, 98, 870, 20, 856, 994]]),
[222, 261, 661, 331, 511, 364, 413, 410, 521, 780, 994,
856, 20, 870, 98, 958, 42, 571, 68, 571, 689, 58, 265,
565, 890, 603, 15, 971, 725, 417, 71, 5, 853, 541,
563, 770])
def test_snail_100(self):
self.assertEqual(snail(
[[641, 678, 48, 894, 850], [974, 949, 998, 825, 286],
[979, 768, 792, 384, 688], [430, 622, 694, 337, 275],
[494, 313, 309, 70, 415]]),
[641, 678, 48, 894, 850, 286, 688, 275, 415, 70, 309,
313, 494, 430, 979, 974, 949, 998, 825, 384, 337, 694,
622, 768, 792])
def test_snail_101(self):
self.assertEqual(snail(
[[600, 786, 254, 655, 13], [815, 239, 774, 325, 57],
[499, 747, 251, 914, 861], [605, 594, 499, 646, 290],
[650, 496, 385, 387, 819]]),
[600, 786, 254, 655, 13, 57, 861, 290, 819, 387, 385,
496, 650, 605, 499, 815, 239, 774, 325, 914, 646, 499,
594, 747, 251])
def test_snail_102(self):
self.assertEqual(snail(
[[903, 696, 410, 542, 956, 889], [381, 306, 48, 102, 268, 355],
[341, 279, 537, 59, 605, 18], [909, 865, 234, 770, 206, 806],
[784, 781, 945, 285, 355, 765],
[100, 748, 473, 319, 150, 998]]),
[903, 696, 410, 542, 956, 889, 355, 18, 806, 765, 998,
150, 319, 473, 748, 100, 784, 909, 341, 381, 306, 48,
102, 268, 605, 206, 355, 285, 945, 781, 865, 279, 537,
59, 770, 234])
def test_snail_103(self):
self.assertEqual(snail([[67, 123, 678, 842, 28, 690, 189, 182, 636, 645,
118, 123, 95, 723],
[119, 324, 247, 8, 860, 329, 180, 791, 92, 5,
896, 921, 157, 781],
[756, 950, 738, 573, 101, 446, 468, 594, 316,
962, 708, 168, 889, 8],
[301, 352, 920, 673, 245, 759, 242, 43, 761,
460, 76, 551, 315, 376],
[915, 367, 345, 8, 132, 840, 451, 965, 757, 558,
94, 882, 847, 82],
[949, 651, 239, 941, 544, 782, 220, 763, 384,
515, 840, 377, 809, 300],
[527, 728, 875, 620, 247, 792, 385, 44, 925,
697, 947, 541, 224, 364],
[824, 885, 723, 353, 77, 915, 880, 339, 809,
919, 931, 569, 980, 357],
[89, 100, 893, 516, 786, 59, 365, 967, 122, 103,
55, 42, 683, 101],
[281, 457, 924, 813, 624, 997, 346, 613, 116,
655, 465, 786, 936, 94],
[185, 898, 588, 272, 712, 367, 435, 660, 152,
896, 792, 670, 272, 397],
[891, 754, 108, 844, 44, 648, 684, 571, 201,
745, 440, 88, 511, 214],
[130, 632, 977, 354, 353, 918, 736, 349, 662,
185, 31, 307, 460, 17],
[910, 947, 686, 668, 857, 345, 654, 678, 27, 78,
445, 639, 130, 970]]),
[67, 123, 678, 842, 28, 690, 189, 182, 636, 645, 118,
123, 95, 723, 781, 8, 376, 82, 300, 364, 357, 101, 94,
397, 214, 17, 970, 130, 639, 445, 78, 27, 678, 654,
345, 857, 668, 686, 947, 910, 130, 891, 185, 281, 89,
824, 527, 949, 915, 301, 756, 119, 324, 247, 8, 860,
329, 180, 791, 92, 5, 896, 921, 157, 889, 315, 847,
809, 224, 980, 683, 936, 272, 511, 460, 307, 31, 185,
662, 349, 736, 918, 353, 354, 977, 632, 754, 898, 457,
100, 885, 728, 651, 367, 352, 950, 738, 573, 101, 446,
468, 594, 316, 962, 708, 168, 551, 882, 377, 541, 569,
42, 786, 670, 88, 440, 745, 201, 571, 684, 648, 44,
844, 108, 588, 924, 893, 723, 875, 239, 345, 920, 673,
245, 759, 242, 43, 761, 460, 76, 94, 840, 947, 931,
55, 465, 792, 896, 152, 660, 435, 367, 712, 272, 813,
516, 353, 620, 941, 8, 132, 840, 451, 965, 757, 558,
515, 697, 919, 103, 655, 116, 613, 346, 997, 624, 786,
77, 247, 544, 782, 220, 763, 384, 925, 809, 122, 967,
365, 59, 915, 792, 385, 44, 339, 880])
def test_snail_104(self):
self.assertEqual(snail(
[[676, 9, 91, 957, 699, 130, 983, 164, 75, 728, 784, 125],
[959, 741, 710, 154, 452, 919, 66, 444, 298, 140, 516, 760],
[969, 514, 898, 621, 984, 616, 724, 738, 410, 758, 829, 482],
[82, 491, 417, 173, 563, 494, 155, 763, 850, 915, 625, 396],
[407, 299, 773, 4, 428, 630, 822, 484, 922, 625, 114, 116],
[571, 180, 379, 284, 947, 688, 749, 312, 502, 935, 879, 166],
[641, 719, 73, 623, 940, 590, 81, 644, 997, 378, 847, 501],
[143, 843, 814, 259, 354, 459, 804, 43, 854, 1000, 382, 717],
[602, 64, 272, 149, 285, 862, 958, 662, 556, 426, 259, 197],
[341, 882, 400, 559, 227, 498, 699, 298, 354, 739, 67, 465],
[43, 85, 467, 557, 614, 63, 680, 434, 937, 472, 488, 243],
[978, 607, 559, 134, 531, 907, 4, 813, 259, 203, 384, 836]]),
[676, 9, 91, 957, 699, 130, 983, 164, 75, 728, 784,
125, 760, 482, 396, 116, 166, 501, 717, 197, 465, 243,
836, 384, 203, 259, 813, 4, 907, 531, 134, 559, 607,
978, 43, 341, 602, 143, 641, 571, 407, 82, 969, 959,
741, 710, 154, 452, 919, 66, 444, 298, 140, 516, 829,
625, 114, 879, 847, 382, 259, 67, 488, 472, 937, 434,
680, 63, 614, 557, 467, 85, 882, 64, 843, 719, 180,
299, 491, 514, 898, 621, 984, 616, 724, 738, 410, 758,
915, 625, 935, 378, 1000, 426, 739, 354, 298, 699,
498, 227, 559, 400, 272, 814, 73, 379, 773, 417, 173,
563, 494, 155, 763, 850, 922, 502, 997, 854, 556, 662,
958, 862, 285, 149, 259, 623, 284, 4, 428, 630, 822,
484, 312, 644, 43, 804, 459, 354, 940, 947, 688, 749,
81, 590])
def test_snail_105(self):
self.assertEqual(snail([[221, 977, 163, 642, 495, 250, 823, 751, 152,
681, 814, 539, 941],
[468, 2, 934, 705, 319, 208, 994, 960, 167, 267,
861, 499, 535],
[516, 31, 300, 893, 235, 842, 13, 44, 235, 236,
590, 711, 174],
[485, 83, 36, 462, 822, 458, 963, 829, 626, 699,
602, 248, 286],
[978, 561, 292, 53, 972, 119, 694, 401, 852,
589, 498, 115, 828],
[737, 896, 881, 863, 502, 217, 584, 390, 621,
373, 777, 312, 425],
[453, 827, 774, 783, 322, 350, 606, 786, 709,
627, 579, 314, 700],
[651, 203, 681, 752, 10, 116, 64, 885, 121, 445,
385, 283, 307],
[884, 35, 523, 791, 169, 338, 411, 749, 48, 662,
878, 314, 802],
[137, 755, 589, 409, 870, 857, 687, 37, 818,
206, 952, 505, 337],
[695, 928, 533, 370, 363, 71, 386, 823, 685,
859, 107, 313, 958],
[58, 267, 988, 746, 601, 767, 701, 27, 565, 434,
734, 942, 572],
[405, 442, 424, 298, 14, 428, 699, 906, 900,
928, 97, 783, 273]]),
[221, 977, 163, 642, 495, 250, 823, 751, 152, 681, 814,
539, 941, 535, 174, 286, 828, 425, 700, 307, 802, 337,
958, 572, 273, 783, 97, 928, 900, 906, 699, 428, 14,
298, 424, 442, 405, 58, 695, 137, 884, 651, 453, 737,
978, 485, 516, 468, 2, 934, 705, 319, 208, 994, 960,
167, 267, 861, 499, 711, 248, 115, 312, 314, 283, 314,
505, 313, 942, 734, 434, 565, 27, 701, 767, 601, 746,
988, 267, 928, 755, 35, 203, 827, 896, 561, 83, 31,
300, 893, 235, 842, 13, 44, 235, 236, 590, 602, 498,
777, 579, 385, 878, 952, 107, 859, 685, 823, 386, 71,
363, 370, 533, 589, 523, 681, 774, 881, 292, 36, 462,
822, 458, 963, 829, 626, 699, 589, 373, 627, 445, 662,
206, 818, 37, 687, 857, 870, 409, 791, 752, 783, 863,
53, 972, 119, 694, 401, 852, 621, 709, 121, 48, 749,
411, 338, 169, 10, 322, 502, 217, 584, 390, 786, 885,
64, 116, 350, 606])
| [((144, 155), 'snail.snail', 'snail', (['[[]]'], {}), '([[]])\n', (149, 155), False, 'from snail import snail\n'), ((221, 233), 'snail.snail', 'snail', (['[[1]]'], {}), '([[1]])\n', (226, 233), False, 'from snail import snail\n'), ((300, 340), 'snail.snail', 'snail', (['[[1, 2, 3], [4, 5, 6], [7, 8, 9]]'], {}), '([[1, 2, 3], [4, 5, 6], [7, 8, 9]])\n', (305, 340), False, 'from snail import snail\n'), ((456, 568), 'snail.snail', 'snail', (['[[1, 2, 3, 4, 5], [6, 7, 8, 9, 10], [11, 12, 13, 14, 15], [16, 17, 18, 19, \n 20], [21, 22, 23, 24, 25]]'], {}), '([[1, 2, 3, 4, 5], [6, 7, 8, 9, 10], [11, 12, 13, 14, 15], [16, 17, 18,\n 19, 20], [21, 22, 23, 24, 25]])\n', (461, 568), False, 'from snail import snail\n'), ((804, 966), 'snail.snail', 'snail', (['[[1, 2, 3, 4, 5, 6], [20, 21, 22, 23, 24, 7], [19, 32, 33, 34, 25, 8], [18,\n 31, 36, 35, 26, 9], [17, 30, 29, 28, 27, 10], [16, 15, 14, 13, 12, 11]]'], {}), '([[1, 2, 3, 4, 5, 6], [20, 21, 22, 23, 24, 7], [19, 32, 33, 34, 25, 8],\n [18, 31, 36, 35, 26, 9], [17, 30, 29, 28, 27, 10], [16, 15, 14, 13, 12,\n 11]])\n', (809, 966), False, 'from snail import snail\n'), ((1362, 1376), 'snail.snail', 'snail', (['[[350]]'], {}), '([[350]])\n', (1367, 1376), False, 'from snail import snail\n'), ((1445, 3181), 'snail.snail', 'snail', (['[[545, 588, 42, 119, 791, 866, 142, 699, 611, 400, 465, 373, 30, 71, 950, \n 813, 850, 652], [664, 853, 1000, 561, 102, 363, 807, 553, 973, 643, 142,\n 433, 378, 702, 250, 641, 967, 172], [908, 928, 776, 82, 547, 224, 730, \n 158, 169, 8, 111, 847, 891, 142, 906, 609, 443, 211], [417, 35, 192, \n 167, 579, 885, 160, 755, 522, 360, 382, 783, 986, 474, 761, 416, 564, \n 561], [506, 160, 540, 575, 374, 854, 313, 656, 546, 924, 838, 831, 23, \n 146, 116, 136, 47, 889], [932, 515, 627, 982, 886, 609, 67, 966, 262, \n 953, 299, 246, 488, 526, 524, 855, 954, 752], [182, 310, 522, 423, 907,\n 743, 440, 827, 548, 162, 397, 494, 592, 629, 856, 288, 601, 188], [963,\n 862, 9, 812, 947, 721, 37, 170, 69, 326, 661, 829, 69, 341, 100, 751, \n 951, 844], [252, 831, 293, 346, 830, 639, 657, 425, 294, 47, 477, 786, \n 852, 821, 858, 438, 251, 296], [136, 891, 795, 298, 144, 686, 845, 781,\n 737, 850, 413, 88, 333, 311, 628, 962, 785, 548], [307, 294, 706, 298, \n 825, 108, 508, 358, 978, 707, 26, 774, 332, 252, 407, 466, 77, 141], [\n 803, 134, 246, 768, 431, 724, 448, 362, 875, 983, 188, 254, 332, 249, \n 162, 167, 911, 639], [266, 399, 765, 878, 433, 414, 178, 225, 507, 112,\n 510, 124, 88, 969, 728, 18, 813, 763], [714, 580, 290, 935, 331, 813, \n 781, 114, 183, 437, 287, 870, 719, 690, 880, 319, 939, 715], [283, 165,\n 518, 34, 109, 638, 327, 3, 369, 979, 696, 845, 34, 498, 736, 372, 166, \n 931], [728, 490, 910, 342, 460, 955, 876, 935, 976, 887, 190, 517, 362,\n 15, 486, 138, 681, 996], [585, 139, 62, 485, 628, 667, 213, 29, 910, \n 333, 854, 201, 613, 27, 552, 244, 251, 177], [222, 791, 454, 246, 525, \n 626, 58, 512, 642, 561, 309, 674, 607, 441, 728, 782, 375, 113]]'], {}), '([[545, 588, 42, 119, 791, 866, 142, 699, 611, 400, 465, 373, 30, 71, \n 950, 813, 850, 652], [664, 853, 1000, 561, 102, 363, 807, 553, 973, 643,\n 142, 433, 378, 702, 250, 641, 967, 172], [908, 928, 776, 82, 547, 224, \n 730, 158, 169, 8, 111, 847, 891, 142, 906, 609, 443, 211], [417, 35, \n 192, 167, 579, 885, 160, 755, 522, 360, 382, 783, 986, 474, 761, 416, \n 564, 561], [506, 160, 540, 575, 374, 854, 313, 656, 546, 924, 838, 831,\n 23, 146, 116, 136, 47, 889], [932, 515, 627, 982, 886, 609, 67, 966, \n 262, 953, 299, 246, 488, 526, 524, 855, 954, 752], [182, 310, 522, 423,\n 907, 743, 440, 827, 548, 162, 397, 494, 592, 629, 856, 288, 601, 188],\n [963, 862, 9, 812, 947, 721, 37, 170, 69, 326, 661, 829, 69, 341, 100, \n 751, 951, 844], [252, 831, 293, 346, 830, 639, 657, 425, 294, 47, 477, \n 786, 852, 821, 858, 438, 251, 296], [136, 891, 795, 298, 144, 686, 845,\n 781, 737, 850, 413, 88, 333, 311, 628, 962, 785, 548], [307, 294, 706, \n 298, 825, 108, 508, 358, 978, 707, 26, 774, 332, 252, 407, 466, 77, 141\n ], [803, 134, 246, 768, 431, 724, 448, 362, 875, 983, 188, 254, 332, \n 249, 162, 167, 911, 639], [266, 399, 765, 878, 433, 414, 178, 225, 507,\n 112, 510, 124, 88, 969, 728, 18, 813, 763], [714, 580, 290, 935, 331, \n 813, 781, 114, 183, 437, 287, 870, 719, 690, 880, 319, 939, 715], [283,\n 165, 518, 34, 109, 638, 327, 3, 369, 979, 696, 845, 34, 498, 736, 372, \n 166, 931], [728, 490, 910, 342, 460, 955, 876, 935, 976, 887, 190, 517,\n 362, 15, 486, 138, 681, 996], [585, 139, 62, 485, 628, 667, 213, 29, \n 910, 333, 854, 201, 613, 27, 552, 244, 251, 177], [222, 791, 454, 246, \n 525, 626, 58, 512, 642, 561, 309, 674, 607, 441, 728, 782, 375, 113]])\n', (1450, 3181), False, 'from snail import snail\n'), ((6651, 6926), 'snail.snail', 'snail', (['[[844, 865, 787, 987, 255, 928, 812], [533, 376, 869, 60, 824, 527, 355], [\n 238, 330, 215, 201, 335, 29, 225], [828, 63, 172, 620, 315, 361, 758],\n [14, 964, 210, 530, 997, 568, 288], [855, 152, 486, 856, 360, 545, 564],\n [549, 259, 544, 508, 793, 934, 567]]'], {}), '([[844, 865, 787, 987, 255, 928, 812], [533, 376, 869, 60, 824, 527, \n 355], [238, 330, 215, 201, 335, 29, 225], [828, 63, 172, 620, 315, 361,\n 758], [14, 964, 210, 530, 997, 568, 288], [855, 152, 486, 856, 360, 545,\n 564], [549, 259, 544, 508, 793, 934, 567]])\n', (6656, 6926), False, 'from snail import snail\n'), ((7539, 9266), 'snail.snail', 'snail', (['[[277, 149, 76, 473, 385, 633, 41, 517, 918, 462, 769, 726, 161, 694, 26, \n 717, 309, 484], [822, 156, 851, 683, 303, 638, 818, 714, 303, 509, 353,\n 557, 51, 592, 663, 475, 725, 40], [40, 155, 345, 977, 600, 812, 851, \n 559, 152, 256, 965, 586, 591, 966, 146, 868, 262, 931], [855, 170, 534,\n 89, 73, 910, 741, 195, 4, 547, 916, 887, 912, 610, 815, 619, 508, 196],\n [600, 735, 378, 713, 511, 639, 703, 269, 326, 650, 223, 993, 760, 894, \n 430, 705, 896, 814], [444, 223, 939, 289, 624, 837, 541, 975, 608, 446,\n 787, 963, 647, 660, 827, 544, 894, 634], [643, 836, 653, 921, 77, 574, \n 411, 242, 52, 242, 411, 827, 875, 617, 653, 180, 85, 390], [592, 287, \n 28, 699, 663, 170, 548, 812, 792, 68, 376, 733, 147, 475, 803, 513, 815,\n 515], [366, 76, 557, 607, 661, 516, 434, 136, 41, 551, 670, 662, 248, \n 205, 485, 509, 59, 833], [394, 608, 437, 669, 92, 194, 441, 444, 68, \n 269, 512, 104, 121, 176, 422, 278, 953, 69], [187, 714, 933, 50, 576, \n 276, 594, 283, 258, 268, 95, 111, 353, 139, 342, 274, 141, 69], [588, \n 50, 105, 400, 470, 733, 51, 342, 193, 6, 909, 690, 697, 215, 612, 27, \n 629, 861], [784, 253, 98, 563, 118, 138, 610, 486, 602, 779, 153, 478, \n 956, 107, 460, 850, 447, 21], [690, 48, 219, 72, 384, 261, 474, 383, \n 632, 868, 922, 826, 651, 612, 684, 339, 418, 743], [955, 462, 403, 996,\n 131, 70, 485, 523, 407, 932, 100, 688, 240, 970, 98, 681, 356, 609], [\n 376, 795, 982, 482, 813, 496, 635, 618, 728, 96, 982, 884, 362, 168, \n 470, 919, 672, 921], [327, 201, 195, 628, 731, 453, 778, 719, 751, 115,\n 429, 675, 983, 281, 389, 396, 876, 484], [867, 449, 958, 381, 640, 749,\n 216, 358, 226, 155, 568, 795, 584, 220, 900, 207, 12, 440]]'], {}), '([[277, 149, 76, 473, 385, 633, 41, 517, 918, 462, 769, 726, 161, 694,\n 26, 717, 309, 484], [822, 156, 851, 683, 303, 638, 818, 714, 303, 509, \n 353, 557, 51, 592, 663, 475, 725, 40], [40, 155, 345, 977, 600, 812, \n 851, 559, 152, 256, 965, 586, 591, 966, 146, 868, 262, 931], [855, 170,\n 534, 89, 73, 910, 741, 195, 4, 547, 916, 887, 912, 610, 815, 619, 508, \n 196], [600, 735, 378, 713, 511, 639, 703, 269, 326, 650, 223, 993, 760,\n 894, 430, 705, 896, 814], [444, 223, 939, 289, 624, 837, 541, 975, 608,\n 446, 787, 963, 647, 660, 827, 544, 894, 634], [643, 836, 653, 921, 77, \n 574, 411, 242, 52, 242, 411, 827, 875, 617, 653, 180, 85, 390], [592, \n 287, 28, 699, 663, 170, 548, 812, 792, 68, 376, 733, 147, 475, 803, 513,\n 815, 515], [366, 76, 557, 607, 661, 516, 434, 136, 41, 551, 670, 662, \n 248, 205, 485, 509, 59, 833], [394, 608, 437, 669, 92, 194, 441, 444, \n 68, 269, 512, 104, 121, 176, 422, 278, 953, 69], [187, 714, 933, 50, \n 576, 276, 594, 283, 258, 268, 95, 111, 353, 139, 342, 274, 141, 69], [\n 588, 50, 105, 400, 470, 733, 51, 342, 193, 6, 909, 690, 697, 215, 612, \n 27, 629, 861], [784, 253, 98, 563, 118, 138, 610, 486, 602, 779, 153, \n 478, 956, 107, 460, 850, 447, 21], [690, 48, 219, 72, 384, 261, 474, \n 383, 632, 868, 922, 826, 651, 612, 684, 339, 418, 743], [955, 462, 403,\n 996, 131, 70, 485, 523, 407, 932, 100, 688, 240, 970, 98, 681, 356, 609\n ], [376, 795, 982, 482, 813, 496, 635, 618, 728, 96, 982, 884, 362, 168,\n 470, 919, 672, 921], [327, 201, 195, 628, 731, 453, 778, 719, 751, 115,\n 429, 675, 983, 281, 389, 396, 876, 484], [867, 449, 958, 381, 640, 749,\n 216, 358, 226, 155, 568, 795, 584, 220, 900, 207, 12, 440]])\n', (7544, 9266), False, 'from snail import snail\n'), ((12729, 13934), 'snail.snail', 'snail', (['[[831, 609, 235, 391, 645, 469, 352, 982, 96, 596, 79, 460, 438, 280, 390],\n [639, 19, 257, 411, 862, 508, 652, 265, 609, 188, 443, 425, 584, 11, \n 329], [616, 731, 442, 315, 530, 954, 306, 455, 808, 921, 604, 282, 695,\n 778, 711], [205, 735, 423, 803, 480, 736, 47, 13, 478, 960, 268, 844, \n 611, 102, 489], [271, 314, 134, 650, 634, 984, 925, 565, 67, 651, 139, \n 697, 735, 616, 83], [124, 381, 202, 355, 488, 99, 269, 486, 900, 601, \n 449, 777, 607, 702, 504], [259, 357, 104, 126, 784, 649, 30, 243, 716, \n 436, 917, 272, 629, 864, 131], [333, 402, 81, 766, 352, 14, 227, 796, \n 572, 623, 176, 196, 870, 5, 822], [469, 67, 286, 430, 711, 336, 78, 384,\n 71, 783, 832, 458, 940, 511, 160], [783, 286, 352, 679, 233, 493, 549, \n 83, 137, 498, 450, 214, 856, 925, 585], [360, 663, 80, 307, 411, 97, 42,\n 857, 865, 954, 30, 778, 691, 880, 898], [354, 373, 818, 619, 465, 957, \n 268, 876, 19, 58, 163, 138, 283, 970, 267], [773, 79, 892, 808, 810, 35,\n 147, 377, 502, 400, 742, 345, 35, 120, 859], [933, 643, 548, 241, 817, \n 661, 936, 837, 571, 596, 177, 296, 531, 836, 805], [915, 268, 534, 369,\n 791, 90, 843, 104, 293, 92, 270, 306, 226, 797, 903]]'], {}), '([[831, 609, 235, 391, 645, 469, 352, 982, 96, 596, 79, 460, 438, 280,\n 390], [639, 19, 257, 411, 862, 508, 652, 265, 609, 188, 443, 425, 584, \n 11, 329], [616, 731, 442, 315, 530, 954, 306, 455, 808, 921, 604, 282, \n 695, 778, 711], [205, 735, 423, 803, 480, 736, 47, 13, 478, 960, 268, \n 844, 611, 102, 489], [271, 314, 134, 650, 634, 984, 925, 565, 67, 651, \n 139, 697, 735, 616, 83], [124, 381, 202, 355, 488, 99, 269, 486, 900, \n 601, 449, 777, 607, 702, 504], [259, 357, 104, 126, 784, 649, 30, 243, \n 716, 436, 917, 272, 629, 864, 131], [333, 402, 81, 766, 352, 14, 227, \n 796, 572, 623, 176, 196, 870, 5, 822], [469, 67, 286, 430, 711, 336, 78,\n 384, 71, 783, 832, 458, 940, 511, 160], [783, 286, 352, 679, 233, 493, \n 549, 83, 137, 498, 450, 214, 856, 925, 585], [360, 663, 80, 307, 411, \n 97, 42, 857, 865, 954, 30, 778, 691, 880, 898], [354, 373, 818, 619, \n 465, 957, 268, 876, 19, 58, 163, 138, 283, 970, 267], [773, 79, 892, \n 808, 810, 35, 147, 377, 502, 400, 742, 345, 35, 120, 859], [933, 643, \n 548, 241, 817, 661, 936, 837, 571, 596, 177, 296, 531, 836, 805], [915,\n 268, 534, 369, 791, 90, 843, 104, 293, 92, 270, 306, 226, 797, 903]])\n', (12734, 13934), False, 'from snail import snail\n'), ((16510, 17432), 'snail.snail', 'snail', (['[[900, 61, 525, 325, 420, 389, 718, 967, 116, 156, 877, 301, 815], [325, \n 921, 851, 66, 226, 759, 166, 754, 972, 199, 26, 673, 81], [953, 211, \n 277, 170, 498, 206, 11, 766, 742, 101, 661, 674, 501], [613, 645, 897, \n 883, 24, 499, 408, 404, 93, 464, 815, 546, 830], [103, 374, 494, 259, \n 597, 463, 83, 658, 867, 321, 311, 942, 265], [279, 214, 989, 896, 644, \n 152, 130, 439, 917, 664, 293, 835, 469], [114, 212, 935, 146, 589, 399,\n 128, 61, 242, 1000, 695, 340, 119], [67, 258, 342, 377, 207, 186, 296, \n 249, 902, 607, 168, 151, 890], [331, 274, 68, 643, 694, 918, 141, 718, \n 26, 659, 786, 247, 685], [760, 128, 36, 115, 509, 292, 665, 755, 426, \n 380, 813, 1000, 366], [459, 285, 200, 835, 851, 925, 217, 506, 749, 313,\n 546, 588, 902], [475, 556, 67, 602, 323, 842, 248, 103, 413, 276, 513, \n 254, 478], [478, 749, 519, 165, 158, 393, 952, 614, 291, 781, 344, 774, 42]\n ]'], {}), '([[900, 61, 525, 325, 420, 389, 718, 967, 116, 156, 877, 301, 815], [\n 325, 921, 851, 66, 226, 759, 166, 754, 972, 199, 26, 673, 81], [953, \n 211, 277, 170, 498, 206, 11, 766, 742, 101, 661, 674, 501], [613, 645, \n 897, 883, 24, 499, 408, 404, 93, 464, 815, 546, 830], [103, 374, 494, \n 259, 597, 463, 83, 658, 867, 321, 311, 942, 265], [279, 214, 989, 896, \n 644, 152, 130, 439, 917, 664, 293, 835, 469], [114, 212, 935, 146, 589,\n 399, 128, 61, 242, 1000, 695, 340, 119], [67, 258, 342, 377, 207, 186, \n 296, 249, 902, 607, 168, 151, 890], [331, 274, 68, 643, 694, 918, 141, \n 718, 26, 659, 786, 247, 685], [760, 128, 36, 115, 509, 292, 665, 755, \n 426, 380, 813, 1000, 366], [459, 285, 200, 835, 851, 925, 217, 506, 749,\n 313, 546, 588, 902], [475, 556, 67, 602, 323, 842, 248, 103, 413, 276, \n 513, 254, 478], [478, 749, 519, 165, 158, 393, 952, 614, 291, 781, 344,\n 774, 42]])\n', (16515, 17432), False, 'from snail import snail\n'), ((19499, 21625), 'snail.snail', 'snail', (['[[743, 389, 404, 786, 6, 509, 887, 481, 858, 117, 671, 344, 7, 855, 551, \n 838, 500, 736, 981, 342], [823, 940, 897, 877, 616, 425, 425, 300, 769,\n 780, 755, 505, 48, 339, 987, 285, 118, 949, 245, 644], [68, 37, 515, \n 914, 885, 247, 552, 998, 53, 782, 913, 34, 413, 744, 462, 794, 589, 405,\n 233, 850], [905, 208, 712, 995, 261, 154, 768, 118, 908, 452, 706, 612,\n 584, 638, 480, 969, 345, 780, 435, 898], [714, 11, 654, 957, 564, 362, \n 231, 41, 721, 254, 202, 137, 126, 174, 832, 661, 382, 654, 516, 300], [\n 218, 667, 767, 610, 339, 531, 335, 234, 53, 735, 742, 818, 233, 26, 634,\n 229, 316, 436, 999, 348], [943, 451, 142, 545, 186, 542, 934, 22, 287, \n 166, 63, 495, 13, 433, 739, 270, 535, 305, 272, 254], [322, 892, 751, \n 856, 280, 706, 632, 796, 507, 633, 52, 86, 116, 753, 489, 294, 869, 135,\n 565, 102], [691, 412, 615, 389, 973, 462, 624, 172, 170, 56, 744, 558, \n 339, 871, 878, 495, 810, 454, 349, 261], [545, 378, 844, 494, 172, 465,\n 897, 608, 755, 74, 367, 853, 407, 865, 168, 644, 477, 73, 166, 822], [\n 428, 985, 729, 790, 40, 290, 549, 491, 160, 429, 113, 379, 704, 432, \n 225, 713, 634, 879, 837, 958], [803, 796, 762, 778, 917, 794, 792, 752,\n 325, 953, 986, 867, 35, 957, 623, 662, 916, 513, 324, 185], [190, 680, \n 689, 189, 78, 591, 532, 174, 927, 376, 635, 12, 908, 253, 569, 267, 396,\n 112, 180, 22], [797, 925, 775, 831, 921, 87, 748, 141, 33, 45, 194, 270,\n 661, 78, 968, 333, 132, 976, 788, 137], [854, 147, 902, 213, 365, 342, \n 962, 662, 491, 86, 701, 493, 736, 705, 115, 472, 354, 815, 240, 24], [\n 595, 473, 899, 866, 157, 973, 725, 107, 417, 989, 205, 921, 98, 632, \n 234, 938, 112, 181, 604, 707], [889, 203, 968, 727, 409, 651, 695, 854,\n 423, 968, 745, 680, 560, 935, 54, 862, 790, 343, 884, 627], [135, 694, \n 358, 75, 237, 924, 493, 758, 998, 279, 80, 86, 174, 991, 585, 251, 99, \n 718, 611, 462], [109, 360, 882, 794, 631, 587, 73, 10, 696, 582, 352, \n 797, 897, 58, 246, 277, 690, 511, 495, 437], [99, 898, 540, 657, 563, \n 267, 39, 556, 819, 738, 888, 8, 140, 400, 619, 267, 859, 919, 301, 265]]'], {}), '([[743, 389, 404, 786, 6, 509, 887, 481, 858, 117, 671, 344, 7, 855, \n 551, 838, 500, 736, 981, 342], [823, 940, 897, 877, 616, 425, 425, 300,\n 769, 780, 755, 505, 48, 339, 987, 285, 118, 949, 245, 644], [68, 37, \n 515, 914, 885, 247, 552, 998, 53, 782, 913, 34, 413, 744, 462, 794, 589,\n 405, 233, 850], [905, 208, 712, 995, 261, 154, 768, 118, 908, 452, 706,\n 612, 584, 638, 480, 969, 345, 780, 435, 898], [714, 11, 654, 957, 564, \n 362, 231, 41, 721, 254, 202, 137, 126, 174, 832, 661, 382, 654, 516, \n 300], [218, 667, 767, 610, 339, 531, 335, 234, 53, 735, 742, 818, 233, \n 26, 634, 229, 316, 436, 999, 348], [943, 451, 142, 545, 186, 542, 934, \n 22, 287, 166, 63, 495, 13, 433, 739, 270, 535, 305, 272, 254], [322, \n 892, 751, 856, 280, 706, 632, 796, 507, 633, 52, 86, 116, 753, 489, 294,\n 869, 135, 565, 102], [691, 412, 615, 389, 973, 462, 624, 172, 170, 56, \n 744, 558, 339, 871, 878, 495, 810, 454, 349, 261], [545, 378, 844, 494,\n 172, 465, 897, 608, 755, 74, 367, 853, 407, 865, 168, 644, 477, 73, 166,\n 822], [428, 985, 729, 790, 40, 290, 549, 491, 160, 429, 113, 379, 704, \n 432, 225, 713, 634, 879, 837, 958], [803, 796, 762, 778, 917, 794, 792,\n 752, 325, 953, 986, 867, 35, 957, 623, 662, 916, 513, 324, 185], [190, \n 680, 689, 189, 78, 591, 532, 174, 927, 376, 635, 12, 908, 253, 569, 267,\n 396, 112, 180, 22], [797, 925, 775, 831, 921, 87, 748, 141, 33, 45, 194,\n 270, 661, 78, 968, 333, 132, 976, 788, 137], [854, 147, 902, 213, 365, \n 342, 962, 662, 491, 86, 701, 493, 736, 705, 115, 472, 354, 815, 240, 24\n ], [595, 473, 899, 866, 157, 973, 725, 107, 417, 989, 205, 921, 98, 632,\n 234, 938, 112, 181, 604, 707], [889, 203, 968, 727, 409, 651, 695, 854,\n 423, 968, 745, 680, 560, 935, 54, 862, 790, 343, 884, 627], [135, 694, \n 358, 75, 237, 924, 493, 758, 998, 279, 80, 86, 174, 991, 585, 251, 99, \n 718, 611, 462], [109, 360, 882, 794, 631, 587, 73, 10, 696, 582, 352, \n 797, 897, 58, 246, 277, 690, 511, 495, 437], [99, 898, 540, 657, 563, \n 267, 39, 556, 819, 738, 888, 8, 140, 400, 619, 267, 859, 919, 301, 265]])\n', (19504, 21625), False, 'from snail import snail\n'), ((26409, 26423), 'snail.snail', 'snail', (['[[567]]'], {}), '([[567]])\n', (26414, 26423), False, 'from snail import snail\n'), ((26492, 27152), 'snail.snail', 'snail', (['[[724, 455, 919, 922, 779, 711, 36, 791, 104, 490, 480], [95, 252, 691, 12,\n 786, 563, 890, 446, 275, 899, 930], [359, 844, 866, 827, 758, 81, 441, \n 768, 499, 983, 438], [860, 385, 512, 34, 351, 822, 501, 130, 111, 249, \n 25], [638, 30, 5, 102, 998, 888, 800, 651, 402, 40, 236], [872, 21, 731,\n 902, 613, 442, 437, 581, 710, 453, 877], [236, 661, 418, 662, 869, 936,\n 251, 302, 474, 578, 18], [413, 451, 241, 737, 641, 677, 263, 617, 9, \n 172, 246], [628, 390, 511, 786, 219, 833, 722, 419, 743, 695, 400], [\n 711, 986, 834, 181, 855, 780, 869, 238, 931, 993, 203], [625, 896, 172,\n 269, 273, 722, 528, 434, 211, 278, 645]]'], {}), '([[724, 455, 919, 922, 779, 711, 36, 791, 104, 490, 480], [95, 252, \n 691, 12, 786, 563, 890, 446, 275, 899, 930], [359, 844, 866, 827, 758, \n 81, 441, 768, 499, 983, 438], [860, 385, 512, 34, 351, 822, 501, 130, \n 111, 249, 25], [638, 30, 5, 102, 998, 888, 800, 651, 402, 40, 236], [\n 872, 21, 731, 902, 613, 442, 437, 581, 710, 453, 877], [236, 661, 418, \n 662, 869, 936, 251, 302, 474, 578, 18], [413, 451, 241, 737, 641, 677, \n 263, 617, 9, 172, 246], [628, 390, 511, 786, 219, 833, 722, 419, 743, \n 695, 400], [711, 986, 834, 181, 855, 780, 869, 238, 931, 993, 203], [\n 625, 896, 172, 269, 273, 722, 528, 434, 211, 278, 645]])\n', (26497, 27152), False, 'from snail import snail\n'), ((28265, 29809), 'snail.snail', 'snail', (['[[547, 471, 740, 487, 758, 466, 885, 554, 312, 618, 76, 890, 416, 621, 45, \n 33, 14], [935, 878, 428, 721, 79, 762, 116, 797, 676, 755, 7, 142, 464,\n 861, 192, 943, 822], [100, 325, 962, 434, 413, 313, 908, 842, 366, 618,\n 803, 480, 391, 263, 122, 148, 582], [281, 741, 182, 236, 351, 611, 588,\n 857, 354, 837, 867, 258, 508, 882, 305, 396, 796], [127, 795, 960, 196,\n 93, 41, 113, 949, 999, 880, 215, 844, 86, 229, 436, 746, 224], [709, \n 283, 219, 254, 913, 900, 537, 617, 80, 18, 944, 372, 805, 981, 798, 380,\n 868], [741, 7, 686, 690, 987, 382, 38, 560, 89, 889, 243, 684, 951, 686,\n 795, 711, 304], [309, 225, 691, 223, 245, 377, 786, 669, 126, 835, 245,\n 62, 803, 234, 486, 580, 192], [895, 172, 347, 645, 113, 700, 419, 573, \n 987, 403, 527, 893, 348, 508, 530, 558, 477], [307, 725, 128, 975, 498,\n 796, 359, 268, 204, 165, 349, 942, 527, 73, 815, 372, 749], [848, 950, \n 312, 560, 350, 943, 576, 873, 230, 4, 807, 561, 944, 629, 422, 342, 678\n ], [275, 41, 349, 925, 579, 139, 836, 777, 256, 422, 884, 587, 126, 836,\n 347, 692, 87], [351, 100, 739, 316, 666, 372, 441, 858, 25, 747, 474, \n 234, 943, 393, 530, 336, 185], [784, 847, 392, 698, 866, 494, 370, 12, \n 221, 689, 428, 491, 15, 677, 118, 496, 941], [748, 782, 298, 359, 981, \n 334, 520, 809, 253, 69, 70, 909, 7, 662, 574, 128, 125], [570, 682, 863,\n 589, 421, 147, 262, 647, 749, 76, 468, 740, 107, 277, 484, 905, 399], [\n 936, 382, 383, 764, 679, 634, 999, 371, 34, 581, 336, 178, 137, 860, \n 269, 341, 166]]'], {}), '([[547, 471, 740, 487, 758, 466, 885, 554, 312, 618, 76, 890, 416, 621,\n 45, 33, 14], [935, 878, 428, 721, 79, 762, 116, 797, 676, 755, 7, 142, \n 464, 861, 192, 943, 822], [100, 325, 962, 434, 413, 313, 908, 842, 366,\n 618, 803, 480, 391, 263, 122, 148, 582], [281, 741, 182, 236, 351, 611,\n 588, 857, 354, 837, 867, 258, 508, 882, 305, 396, 796], [127, 795, 960,\n 196, 93, 41, 113, 949, 999, 880, 215, 844, 86, 229, 436, 746, 224], [\n 709, 283, 219, 254, 913, 900, 537, 617, 80, 18, 944, 372, 805, 981, 798,\n 380, 868], [741, 7, 686, 690, 987, 382, 38, 560, 89, 889, 243, 684, 951,\n 686, 795, 711, 304], [309, 225, 691, 223, 245, 377, 786, 669, 126, 835,\n 245, 62, 803, 234, 486, 580, 192], [895, 172, 347, 645, 113, 700, 419, \n 573, 987, 403, 527, 893, 348, 508, 530, 558, 477], [307, 725, 128, 975,\n 498, 796, 359, 268, 204, 165, 349, 942, 527, 73, 815, 372, 749], [848, \n 950, 312, 560, 350, 943, 576, 873, 230, 4, 807, 561, 944, 629, 422, 342,\n 678], [275, 41, 349, 925, 579, 139, 836, 777, 256, 422, 884, 587, 126, \n 836, 347, 692, 87], [351, 100, 739, 316, 666, 372, 441, 858, 25, 747, \n 474, 234, 943, 393, 530, 336, 185], [784, 847, 392, 698, 866, 494, 370,\n 12, 221, 689, 428, 491, 15, 677, 118, 496, 941], [748, 782, 298, 359, \n 981, 334, 520, 809, 253, 69, 70, 909, 7, 662, 574, 128, 125], [570, 682,\n 863, 589, 421, 147, 262, 647, 749, 76, 468, 740, 107, 277, 484, 905, \n 399], [936, 382, 383, 764, 679, 634, 999, 371, 34, 581, 336, 178, 137, \n 860, 269, 341, 166]])\n', (28270, 29809), False, 'from snail import snail\n'), ((32972, 33002), 'snail.snail', 'snail', (['[[665, 175], [31, 103]]'], {}), '([[665, 175], [31, 103]])\n', (32977, 33002), False, 'from snail import snail\n'), ((33085, 33099), 'snail.snail', 'snail', (['[[755]]'], {}), '([[755]])\n', (33090, 33099), False, 'from snail import snail\n'), ((33168, 33182), 'snail.snail', 'snail', (['[[126]]'], {}), '([[126]])\n', (33173, 33182), False, 'from snail import snail\n'), ((33251, 33699), 'snail.snail', 'snail', (['[[636, 479, 441, 159, 593, 904, 31, 21, 198], [558, 377, 166, 504, 919, 20,\n 495, 71, 899], [955, 466, 168, 459, 223, 535, 369, 881, 709], [814, 54,\n 762, 941, 804, 810, 498, 583, 828], [678, 489, 88, 976, 967, 218, 494, \n 1000, 550], [501, 310, 668, 403, 558, 697, 247, 393, 990], [346, 220, \n 92, 707, 460, 106, 187, 606, 447], [589, 900, 867, 818, 647, 180, 878, \n 809, 191], [278, 820, 427, 859, 985, 594, 218, 851, 286]]'], {}), '([[636, 479, 441, 159, 593, 904, 31, 21, 198], [558, 377, 166, 504, \n 919, 20, 495, 71, 899], [955, 466, 168, 459, 223, 535, 369, 881, 709],\n [814, 54, 762, 941, 804, 810, 498, 583, 828], [678, 489, 88, 976, 967, \n 218, 494, 1000, 550], [501, 310, 668, 403, 558, 697, 247, 393, 990], [\n 346, 220, 92, 707, 460, 106, 187, 606, 447], [589, 900, 867, 818, 647, \n 180, 878, 809, 191], [278, 820, 427, 859, 985, 594, 218, 851, 286]])\n', (33256, 33699), False, 'from snail import snail\n'), ((34601, 34957), 'snail.snail', 'snail', (['[[34, 174, 567, 523, 884, 681, 348, 879], [860, 127, 97, 983, 245, 516, 214,\n 358], [812, 405, 787, 630, 856, 384, 973, 803], [452, 925, 253, 481, \n 678, 517, 246, 855], [471, 121, 342, 671, 92, 770, 690, 538], [706, 207,\n 63, 874, 366, 336, 848, 708], [771, 637, 708, 977, 977, 3, 562, 324], [\n 453, 816, 461, 143, 874, 992, 346, 923]]'], {}), '([[34, 174, 567, 523, 884, 681, 348, 879], [860, 127, 97, 983, 245, \n 516, 214, 358], [812, 405, 787, 630, 856, 384, 973, 803], [452, 925, \n 253, 481, 678, 517, 246, 855], [471, 121, 342, 671, 92, 770, 690, 538],\n [706, 207, 63, 874, 366, 336, 848, 708], [771, 637, 708, 977, 977, 3, \n 562, 324], [453, 816, 461, 143, 874, 992, 346, 923]])\n', (34606, 34957), False, 'from snail import snail\n'), ((35695, 35971), 'snail.snail', 'snail', (['[[950, 222, 988, 710, 321, 798, 51], [640, 844, 782, 506, 155, 308, 384], [\n 703, 52, 197, 723, 690, 468, 962], [326, 195, 134, 216, 302, 503, 212],\n [718, 323, 17, 449, 601, 380, 396], [985, 698, 502, 864, 257, 804, 942],\n [888, 418, 187, 880, 152, 432, 651]]'], {}), '([[950, 222, 988, 710, 321, 798, 51], [640, 844, 782, 506, 155, 308, \n 384], [703, 52, 197, 723, 690, 468, 962], [326, 195, 134, 216, 302, 503,\n 212], [718, 323, 17, 449, 601, 380, 396], [985, 698, 502, 864, 257, 804,\n 942], [888, 418, 187, 880, 152, 432, 651]])\n', (35700, 35971), False, 'from snail import snail\n'), ((36585, 37642), 'snail.snail', 'snail', (['[[188, 383, 11, 265, 829, 552, 184, 587, 149, 839, 640, 638, 292, 990], [\n 523, 992, 378, 958, 526, 735, 753, 216, 781, 183, 273, 433, 458, 900],\n [645, 764, 450, 273, 769, 871, 125, 983, 864, 318, 160, 300, 677, 990],\n [245, 169, 676, 300, 81, 19, 481, 549, 922, 13, 798, 37, 785, 831], [\n 202, 912, 399, 946, 877, 577, 211, 149, 515, 7, 783, 194, 903, 458], [\n 241, 530, 605, 143, 110, 318, 450, 365, 300, 901, 863, 973, 997, 46], [\n 217, 471, 358, 537, 270, 529, 512, 306, 402, 11, 275, 228, 737, 751], [\n 231, 344, 693, 847, 723, 898, 87, 700, 558, 116, 927, 425, 220, 505], [\n 119, 851, 664, 891, 32, 670, 224, 37, 428, 45, 679, 170, 522, 181], [\n 506, 264, 274, 87, 567, 324, 203, 715, 628, 288, 836, 353, 367, 458], [\n 377, 859, 308, 788, 792, 211, 738, 314, 972, 557, 583, 789, 132, 271],\n [483, 158, 749, 560, 743, 592, 710, 442, 650, 896, 323, 221, 309, 299],\n [858, 549, 118, 588, 674, 975, 799, 910, 465, 453, 139, 448, 537, 680],\n [713, 851, 964, 542, 64, 296, 923, 440, 225, 479, 744, 119, 144, 399]]'], {}), '([[188, 383, 11, 265, 829, 552, 184, 587, 149, 839, 640, 638, 292, 990\n ], [523, 992, 378, 958, 526, 735, 753, 216, 781, 183, 273, 433, 458, \n 900], [645, 764, 450, 273, 769, 871, 125, 983, 864, 318, 160, 300, 677,\n 990], [245, 169, 676, 300, 81, 19, 481, 549, 922, 13, 798, 37, 785, 831\n ], [202, 912, 399, 946, 877, 577, 211, 149, 515, 7, 783, 194, 903, 458],\n [241, 530, 605, 143, 110, 318, 450, 365, 300, 901, 863, 973, 997, 46],\n [217, 471, 358, 537, 270, 529, 512, 306, 402, 11, 275, 228, 737, 751],\n [231, 344, 693, 847, 723, 898, 87, 700, 558, 116, 927, 425, 220, 505],\n [119, 851, 664, 891, 32, 670, 224, 37, 428, 45, 679, 170, 522, 181], [\n 506, 264, 274, 87, 567, 324, 203, 715, 628, 288, 836, 353, 367, 458], [\n 377, 859, 308, 788, 792, 211, 738, 314, 972, 557, 583, 789, 132, 271],\n [483, 158, 749, 560, 743, 592, 710, 442, 650, 896, 323, 221, 309, 299],\n [858, 549, 118, 588, 674, 975, 799, 910, 465, 453, 139, 448, 537, 680],\n [713, 851, 964, 542, 64, 296, 923, 440, 225, 479, 744, 119, 144, 399]])\n', (36590, 37642), False, 'from snail import snail\n'), ((39959, 41014), 'snail.snail', 'snail', (['[[903, 852, 365, 142, 106, 848, 913, 461, 732, 281, 800, 952, 711, 122], [\n 805, 299, 188, 853, 984, 79, 432, 280, 510, 925, 155, 124, 736, 567], [\n 793, 219, 758, 522, 833, 232, 24, 494, 164, 365, 205, 548, 145, 603], [\n 711, 113, 979, 976, 706, 457, 185, 895, 310, 106, 142, 270, 209, 577],\n [866, 160, 28, 737, 871, 900, 799, 516, 203, 294, 45, 256, 242, 397], [\n 901, 606, 892, 620, 61, 398, 300, 14, 365, 616, 230, 82, 352, 98], [441,\n 320, 684, 572, 254, 331, 401, 375, 970, 223, 65, 26, 167, 858], [915, \n 104, 113, 774, 436, 832, 181, 939, 238, 90, 67, 227, 426, 55], [846, \n 135, 332, 105, 110, 301, 794, 431, 860, 715, 201, 69, 744, 657], [341, \n 691, 666, 61, 827, 814, 82, 276, 274, 888, 738, 387, 429, 69], [706, \n 204, 421, 382, 258, 466, 97, 189, 893, 523, 910, 633, 510, 351], [560, \n 109, 533, 541, 825, 571, 608, 542, 92, 385, 694, 762, 465, 620], [369, \n 509, 928, 286, 860, 142, 4, 926, 657, 697, 743, 858, 430, 638], [812, \n 243, 974, 854, 283, 573, 121, 48, 71, 536, 561, 687, 375, 884]]'], {}), '([[903, 852, 365, 142, 106, 848, 913, 461, 732, 281, 800, 952, 711, \n 122], [805, 299, 188, 853, 984, 79, 432, 280, 510, 925, 155, 124, 736, \n 567], [793, 219, 758, 522, 833, 232, 24, 494, 164, 365, 205, 548, 145, \n 603], [711, 113, 979, 976, 706, 457, 185, 895, 310, 106, 142, 270, 209,\n 577], [866, 160, 28, 737, 871, 900, 799, 516, 203, 294, 45, 256, 242, \n 397], [901, 606, 892, 620, 61, 398, 300, 14, 365, 616, 230, 82, 352, 98\n ], [441, 320, 684, 572, 254, 331, 401, 375, 970, 223, 65, 26, 167, 858],\n [915, 104, 113, 774, 436, 832, 181, 939, 238, 90, 67, 227, 426, 55], [\n 846, 135, 332, 105, 110, 301, 794, 431, 860, 715, 201, 69, 744, 657], [\n 341, 691, 666, 61, 827, 814, 82, 276, 274, 888, 738, 387, 429, 69], [\n 706, 204, 421, 382, 258, 466, 97, 189, 893, 523, 910, 633, 510, 351], [\n 560, 109, 533, 541, 825, 571, 608, 542, 92, 385, 694, 762, 465, 620], [\n 369, 509, 928, 286, 860, 142, 4, 926, 657, 697, 743, 858, 430, 638], [\n 812, 243, 974, 854, 283, 573, 121, 48, 71, 536, 561, 687, 375, 884]])\n', (39964, 41014), False, 'from snail import snail\n'), ((43317, 43331), 'snail.snail', 'snail', (['[[733]]'], {}), '([[733]])\n', (43322, 43331), False, 'from snail import snail\n'), ((43400, 45124), 'snail.snail', 'snail', (['[[776, 298, 262, 318, 957, 178, 428, 566, 345, 169, 434, 817, 494, 398, 648,\n 512, 314, 465], [843, 563, 885, 994, 556, 571, 786, 143, 731, 828, 992,\n 701, 211, 989, 361, 904, 168, 175], [153, 906, 802, 413, 532, 445, 864,\n 275, 891, 169, 899, 36, 278, 126, 691, 437, 199, 30], [449, 454, 466, \n 728, 660, 493, 312, 492, 198, 771, 359, 787, 302, 121, 292, 282, 739, \n 958], [798, 332, 106, 365, 874, 905, 831, 462, 88, 380, 443, 602, 925, \n 421, 564, 986, 446, 580], [78, 187, 603, 551, 283, 789, 262, 542, 551, \n 422, 581, 100, 108, 574, 249, 473, 606, 83], [359, 14, 876, 400, 826, \n 868, 779, 67, 946, 568, 826, 561, 582, 815, 72, 771, 851, 21], [41, 860,\n 746, 556, 979, 831, 335, 126, 212, 701, 18, 318, 725, 944, 65, 802, 182,\n 433], [746, 66, 844, 140, 842, 49, 547, 451, 436, 434, 72, 973, 2, 212,\n 311, 691, 546, 176], [630, 510, 740, 7, 888, 439, 231, 788, 524, 270, \n 126, 558, 969, 576, 166, 393, 856, 548], [538, 867, 432, 194, 149, 678,\n 379, 801, 182, 738, 209, 161, 950, 810, 869, 627, 395, 1000], [523, 863,\n 18, 340, 416, 658, 734, 699, 538, 62, 740, 808, 202, 69, 895, 785, 882,\n 368], [997, 453, 658, 870, 438, 799, 870, 257, 681, 887, 109, 40, 178, \n 475, 550, 283, 90, 167], [243, 774, 470, 223, 518, 660, 730, 117, 885, \n 377, 305, 744, 622, 484, 789, 498, 464, 837], [753, 492, 372, 529, 47, \n 461, 160, 259, 282, 983, 73, 192, 366, 101, 307, 257, 89, 968], [135, \n 25, 644, 83, 479, 794, 845, 60, 310, 821, 239, 247, 713, 343, 405, 407,\n 308, 63], [297, 590, 149, 649, 317, 843, 23, 652, 69, 819, 886, 381, \n 411, 781, 477, 672, 822, 185], [642, 274, 676, 957, 888, 269, 954, 78, \n 8, 944, 730, 846, 83, 218, 865, 327, 705, 629]]'], {}), '([[776, 298, 262, 318, 957, 178, 428, 566, 345, 169, 434, 817, 494, \n 398, 648, 512, 314, 465], [843, 563, 885, 994, 556, 571, 786, 143, 731,\n 828, 992, 701, 211, 989, 361, 904, 168, 175], [153, 906, 802, 413, 532,\n 445, 864, 275, 891, 169, 899, 36, 278, 126, 691, 437, 199, 30], [449, \n 454, 466, 728, 660, 493, 312, 492, 198, 771, 359, 787, 302, 121, 292, \n 282, 739, 958], [798, 332, 106, 365, 874, 905, 831, 462, 88, 380, 443, \n 602, 925, 421, 564, 986, 446, 580], [78, 187, 603, 551, 283, 789, 262, \n 542, 551, 422, 581, 100, 108, 574, 249, 473, 606, 83], [359, 14, 876, \n 400, 826, 868, 779, 67, 946, 568, 826, 561, 582, 815, 72, 771, 851, 21],\n [41, 860, 746, 556, 979, 831, 335, 126, 212, 701, 18, 318, 725, 944, 65,\n 802, 182, 433], [746, 66, 844, 140, 842, 49, 547, 451, 436, 434, 72, \n 973, 2, 212, 311, 691, 546, 176], [630, 510, 740, 7, 888, 439, 231, 788,\n 524, 270, 126, 558, 969, 576, 166, 393, 856, 548], [538, 867, 432, 194,\n 149, 678, 379, 801, 182, 738, 209, 161, 950, 810, 869, 627, 395, 1000],\n [523, 863, 18, 340, 416, 658, 734, 699, 538, 62, 740, 808, 202, 69, 895,\n 785, 882, 368], [997, 453, 658, 870, 438, 799, 870, 257, 681, 887, 109,\n 40, 178, 475, 550, 283, 90, 167], [243, 774, 470, 223, 518, 660, 730, \n 117, 885, 377, 305, 744, 622, 484, 789, 498, 464, 837], [753, 492, 372,\n 529, 47, 461, 160, 259, 282, 983, 73, 192, 366, 101, 307, 257, 89, 968],\n [135, 25, 644, 83, 479, 794, 845, 60, 310, 821, 239, 247, 713, 343, 405,\n 407, 308, 63], [297, 590, 149, 649, 317, 843, 23, 652, 69, 819, 886, \n 381, 411, 781, 477, 672, 822, 185], [642, 274, 676, 957, 888, 269, 954,\n 78, 8, 944, 730, 846, 83, 218, 865, 327, 705, 629]])\n', (43405, 45124), False, 'from snail import snail\n'), ((48592, 49257), 'snail.snail', 'snail', (['[[348, 421, 186, 172, 681, 428, 955, 583, 1000, 631, 543], [751, 963, 968, \n 739, 248, 380, 307, 61, 874, 248, 908], [803, 186, 336, 83, 196, 775, \n 898, 148, 43, 24, 993], [274, 904, 695, 140, 582, 766, 810, 824, 717, \n 591, 136], [632, 95, 397, 516, 457, 937, 220, 150, 971, 391, 283], [157,\n 543, 946, 629, 703, 392, 816, 292, 935, 107, 289], [794, 824, 923, 134,\n 486, 165, 956, 714, 775, 265, 654], [261, 551, 238, 976, 460, 921, 501,\n 439, 811, 202, 916], [817, 671, 357, 391, 181, 639, 191, 534, 945, 204,\n 249], [761, 208, 763, 142, 330, 832, 998, 706, 301, 117, 615], [977, \n 386, 105, 274, 166, 993, 248, 316, 340, 378, 886]]'], {}), '([[348, 421, 186, 172, 681, 428, 955, 583, 1000, 631, 543], [751, 963,\n 968, 739, 248, 380, 307, 61, 874, 248, 908], [803, 186, 336, 83, 196, \n 775, 898, 148, 43, 24, 993], [274, 904, 695, 140, 582, 766, 810, 824, \n 717, 591, 136], [632, 95, 397, 516, 457, 937, 220, 150, 971, 391, 283],\n [157, 543, 946, 629, 703, 392, 816, 292, 935, 107, 289], [794, 824, 923,\n 134, 486, 165, 956, 714, 775, 265, 654], [261, 551, 238, 976, 460, 921,\n 501, 439, 811, 202, 916], [817, 671, 357, 391, 181, 639, 191, 534, 945,\n 204, 249], [761, 208, 763, 142, 330, 832, 998, 706, 301, 117, 615], [\n 977, 386, 105, 274, 166, 993, 248, 316, 340, 378, 886]])\n', (48597, 49257), False, 'from snail import snail\n'), ((50385, 52311), 'snail.snail', 'snail', (['[[279, 149, 635, 162, 437, 751, 73, 382, 918, 994, 660, 832, 818, 312, 381,\n 306, 375, 87, 245], [54, 599, 406, 599, 951, 888, 231, 723, 287, 692, \n 617, 275, 719, 445, 361, 954, 583, 951, 162], [966, 522, 282, 502, 739,\n 889, 323, 635, 486, 477, 231, 502, 471, 524, 566, 189, 91, 694, 768], [\n 164, 463, 961, 850, 665, 898, 53, 331, 507, 69, 164, 99, 435, 418, 104,\n 868, 998, 186, 161], [138, 179, 498, 106, 803, 338, 361, 631, 370, 805,\n 156, 583, 102, 486, 989, 468, 772, 491, 656], [450, 129, 723, 662, 665,\n 9, 227, 23, 222, 199, 111, 556, 897, 4, 81, 665, 108, 906, 457], [442, \n 235, 249, 838, 26, 861, 927, 55, 260, 9, 140, 495, 478, 544, 693, 849, \n 727, 448, 421], [812, 736, 968, 113, 205, 680, 936, 699, 733, 830, 760,\n 301, 891, 701, 530, 34, 234, 764, 136], [191, 591, 992, 189, 987, 162, \n 784, 566, 788, 983, 584, 919, 410, 408, 225, 778, 200, 854, 852], [424,\n 5, 610, 711, 796, 952, 899, 192, 643, 399, 953, 720, 406, 324, 706, 943,\n 139, 87, 668], [412, 431, 428, 777, 880, 971, 931, 966, 281, 510, 63, \n 1000, 115, 833, 746, 390, 333, 636, 671], [249, 695, 992, 731, 15, 843,\n 567, 332, 762, 942, 804, 601, 83, 738, 165, 517, 258, 171, 227], [976, \n 808, 967, 898, 78, 231, 563, 182, 696, 611, 421, 809, 6, 954, 656, 338,\n 422, 777, 172], [839, 795, 83, 698, 557, 584, 452, 382, 89, 858, 886, \n 514, 671, 669, 827, 78, 160, 694, 784], [1000, 249, 558, 794, 891, 668,\n 564, 399, 18, 452, 938, 516, 359, 2, 140, 31, 16, 876, 532], [706, 99, \n 684, 613, 93, 504, 584, 599, 513, 638, 645, 334, 448, 148, 802, 805, \n 255, 759, 176], [262, 671, 68, 389, 36, 561, 104, 285, 968, 896, 20, \n 912, 215, 161, 564, 476, 828, 815, 331], [74, 29, 857, 758, 382, 578, \n 150, 745, 684, 558, 384, 439, 118, 599, 779, 378, 816, 996, 206], [83, \n 545, 645, 856, 457, 736, 454, 105, 282, 587, 180, 436, 188, 477, 503, \n 377, 696, 918, 592]]'], {}), '([[279, 149, 635, 162, 437, 751, 73, 382, 918, 994, 660, 832, 818, 312,\n 381, 306, 375, 87, 245], [54, 599, 406, 599, 951, 888, 231, 723, 287, \n 692, 617, 275, 719, 445, 361, 954, 583, 951, 162], [966, 522, 282, 502,\n 739, 889, 323, 635, 486, 477, 231, 502, 471, 524, 566, 189, 91, 694, \n 768], [164, 463, 961, 850, 665, 898, 53, 331, 507, 69, 164, 99, 435, \n 418, 104, 868, 998, 186, 161], [138, 179, 498, 106, 803, 338, 361, 631,\n 370, 805, 156, 583, 102, 486, 989, 468, 772, 491, 656], [450, 129, 723,\n 662, 665, 9, 227, 23, 222, 199, 111, 556, 897, 4, 81, 665, 108, 906, \n 457], [442, 235, 249, 838, 26, 861, 927, 55, 260, 9, 140, 495, 478, 544,\n 693, 849, 727, 448, 421], [812, 736, 968, 113, 205, 680, 936, 699, 733,\n 830, 760, 301, 891, 701, 530, 34, 234, 764, 136], [191, 591, 992, 189, \n 987, 162, 784, 566, 788, 983, 584, 919, 410, 408, 225, 778, 200, 854, \n 852], [424, 5, 610, 711, 796, 952, 899, 192, 643, 399, 953, 720, 406, \n 324, 706, 943, 139, 87, 668], [412, 431, 428, 777, 880, 971, 931, 966, \n 281, 510, 63, 1000, 115, 833, 746, 390, 333, 636, 671], [249, 695, 992,\n 731, 15, 843, 567, 332, 762, 942, 804, 601, 83, 738, 165, 517, 258, 171,\n 227], [976, 808, 967, 898, 78, 231, 563, 182, 696, 611, 421, 809, 6, \n 954, 656, 338, 422, 777, 172], [839, 795, 83, 698, 557, 584, 452, 382, \n 89, 858, 886, 514, 671, 669, 827, 78, 160, 694, 784], [1000, 249, 558, \n 794, 891, 668, 564, 399, 18, 452, 938, 516, 359, 2, 140, 31, 16, 876, \n 532], [706, 99, 684, 613, 93, 504, 584, 599, 513, 638, 645, 334, 448, \n 148, 802, 805, 255, 759, 176], [262, 671, 68, 389, 36, 561, 104, 285, \n 968, 896, 20, 912, 215, 161, 564, 476, 828, 815, 331], [74, 29, 857, \n 758, 382, 578, 150, 745, 684, 558, 384, 439, 118, 599, 779, 378, 816, \n 996, 206], [83, 545, 645, 856, 457, 736, 454, 105, 282, 587, 180, 436, \n 188, 477, 503, 377, 696, 918, 592]])\n', (50390, 52311), False, 'from snail import snail\n'), ((56412, 57470), 'snail.snail', 'snail', (['[[694, 584, 826, 873, 217, 367, 668, 234, 472, 306, 498, 94, 613, 797], [\n 712, 162, 246, 54, 330, 345, 797, 656, 949, 377, 907, 79, 246, 655], [\n 393, 162, 490, 233, 843, 794, 437, 391, 266, 639, 553, 518, 364, 569],\n [844, 274, 883, 549, 545, 431, 169, 974, 129, 186, 605, 391, 354, 562],\n [439, 363, 626, 800, 507, 849, 391, 701, 310, 374, 946, 329, 720, 188],\n [110, 517, 124, 454, 546, 362, 238, 717, 444, 560, 620, 885, 732, 631],\n [849, 531, 960, 464, 448, 802, 101, 755, 69, 843, 256, 543, 728, 839],\n [538, 525, 681, 672, 849, 637, 688, 939, 393, 184, 675, 434, 361, 557],\n [483, 832, 588, 542, 124, 605, 146, 492, 359, 465, 278, 352, 815, 884],\n [837, 448, 77, 252, 291, 313, 816, 79, 919, 188, 845, 26, 918, 190], [\n 994, 349, 148, 613, 557, 269, 695, 471, 944, 90, 2, 167, 136, 926], [\n 596, 304, 727, 835, 858, 635, 727, 136, 179, 266, 171, 679, 985, 945],\n [152, 294, 615, 139, 465, 165, 578, 914, 232, 953, 268, 143, 847, 663],\n [355, 96, 458, 217, 834, 690, 302, 691, 470, 344, 567, 66, 479, 144]]'], {}), '([[694, 584, 826, 873, 217, 367, 668, 234, 472, 306, 498, 94, 613, 797\n ], [712, 162, 246, 54, 330, 345, 797, 656, 949, 377, 907, 79, 246, 655],\n [393, 162, 490, 233, 843, 794, 437, 391, 266, 639, 553, 518, 364, 569],\n [844, 274, 883, 549, 545, 431, 169, 974, 129, 186, 605, 391, 354, 562],\n [439, 363, 626, 800, 507, 849, 391, 701, 310, 374, 946, 329, 720, 188],\n [110, 517, 124, 454, 546, 362, 238, 717, 444, 560, 620, 885, 732, 631],\n [849, 531, 960, 464, 448, 802, 101, 755, 69, 843, 256, 543, 728, 839],\n [538, 525, 681, 672, 849, 637, 688, 939, 393, 184, 675, 434, 361, 557],\n [483, 832, 588, 542, 124, 605, 146, 492, 359, 465, 278, 352, 815, 884],\n [837, 448, 77, 252, 291, 313, 816, 79, 919, 188, 845, 26, 918, 190], [\n 994, 349, 148, 613, 557, 269, 695, 471, 944, 90, 2, 167, 136, 926], [\n 596, 304, 727, 835, 858, 635, 727, 136, 179, 266, 171, 679, 985, 945],\n [152, 294, 615, 139, 465, 165, 578, 914, 232, 953, 268, 143, 847, 663],\n [355, 96, 458, 217, 834, 690, 302, 691, 470, 344, 567, 66, 479, 144]])\n', (56417, 57470), False, 'from snail import snail\n'), ((59792, 60065), 'snail.snail', 'snail', (['[[823, 448, 897, 244, 584, 461, 96], [645, 751, 213, 852, 812, 16, 617], [\n 341, 284, 208, 458, 28, 238, 767], [773, 348, 159, 197, 957, 501, 818],\n [932, 118, 964, 418, 423, 847, 430], [545, 667, 931, 75, 818, 645, 45],\n [923, 151, 732, 63, 520, 681, 627]]'], {}), '([[823, 448, 897, 244, 584, 461, 96], [645, 751, 213, 852, 812, 16, \n 617], [341, 284, 208, 458, 28, 238, 767], [773, 348, 159, 197, 957, 501,\n 818], [932, 118, 964, 418, 423, 847, 430], [545, 667, 931, 75, 818, 645,\n 45], [923, 151, 732, 63, 520, 681, 627]])\n', (59797, 60065), False, 'from snail import snail\n'), ((60676, 62799), 'snail.snail', 'snail', (['[[491, 432, 751, 729, 722, 964, 386, 710, 130, 369, 227, 487, 395, 914, 468,\n 885, 81, 569, 868, 900], [925, 992, 601, 188, 204, 640, 239, 6, 26, 451,\n 26, 630, 429, 830, 38, 905, 555, 630, 296, 840], [401, 86, 682, 405, \n 960, 499, 290, 765, 513, 376, 331, 78, 471, 999, 3, 328, 896, 758, 56, \n 75], [542, 905, 880, 788, 546, 879, 658, 836, 787, 912, 968, 988, 98, \n 461, 973, 469, 371, 178, 984, 431], [584, 627, 404, 160, 875, 721, 409,\n 163, 30, 127, 499, 300, 869, 690, 69, 260, 751, 151, 288, 319], [748, \n 508, 826, 682, 70, 215, 89, 186, 418, 386, 474, 42, 389, 599, 872, 534,\n 181, 496, 186, 21], [546, 745, 446, 346, 449, 807, 863, 996, 605, 427, \n 845, 182, 932, 282, 544, 650, 123, 188, 505, 745], [107, 963, 507, 886,\n 162, 321, 597, 90, 576, 101, 818, 394, 542, 276, 578, 417, 797, 89, 366,\n 771], [904, 230, 474, 400, 921, 749, 277, 826, 638, 294, 520, 617, 405,\n 983, 437, 87, 940, 492, 561, 407], [877, 195, 809, 714, 64, 362, 585, 4,\n 995, 949, 383, 172, 55, 468, 637, 229, 746, 208, 91, 708], [663, 758, \n 330, 359, 996, 67, 409, 169, 660, 688, 11, 50, 191, 88, 802, 834, 559, \n 139, 490, 412], [310, 464, 204, 408, 801, 352, 18, 167, 815, 753, 758, \n 833, 85, 731, 253, 655, 290, 493, 356, 396], [424, 931, 222, 6, 67, 347,\n 450, 528, 353, 444, 283, 971, 925, 76, 208, 101, 989, 64, 209, 875], [\n 903, 651, 952, 356, 647, 99, 895, 868, 203, 620, 147, 200, 657, 839, \n 745, 260, 916, 552, 896, 209], [721, 17, 825, 638, 691, 971, 95, 844, \n 75, 203, 692, 210, 618, 113, 518, 82, 493, 463, 647, 122], [335, 97, \n 438, 636, 568, 329, 681, 998, 316, 679, 597, 547, 505, 283, 748, 299, \n 800, 828, 521, 139], [209, 110, 325, 990, 706, 379, 897, 133, 457, 573,\n 653, 863, 452, 819, 801, 756, 590, 925, 583, 731], [816, 946, 134, 587,\n 645, 751, 780, 140, 731, 208, 504, 939, 401, 724, 140, 1000, 575, 15, \n 966, 719], [929, 121, 255, 511, 401, 94, 7, 656, 871, 52, 589, 504, 456,\n 524, 492, 4, 513, 673, 536, 877], [828, 402, 44, 162, 805, 675, 391, \n 875, 955, 410, 385, 625, 250, 837, 153, 922, 105, 279, 91, 121]]'], {}), '([[491, 432, 751, 729, 722, 964, 386, 710, 130, 369, 227, 487, 395, \n 914, 468, 885, 81, 569, 868, 900], [925, 992, 601, 188, 204, 640, 239, \n 6, 26, 451, 26, 630, 429, 830, 38, 905, 555, 630, 296, 840], [401, 86, \n 682, 405, 960, 499, 290, 765, 513, 376, 331, 78, 471, 999, 3, 328, 896,\n 758, 56, 75], [542, 905, 880, 788, 546, 879, 658, 836, 787, 912, 968, \n 988, 98, 461, 973, 469, 371, 178, 984, 431], [584, 627, 404, 160, 875, \n 721, 409, 163, 30, 127, 499, 300, 869, 690, 69, 260, 751, 151, 288, 319\n ], [748, 508, 826, 682, 70, 215, 89, 186, 418, 386, 474, 42, 389, 599, \n 872, 534, 181, 496, 186, 21], [546, 745, 446, 346, 449, 807, 863, 996, \n 605, 427, 845, 182, 932, 282, 544, 650, 123, 188, 505, 745], [107, 963,\n 507, 886, 162, 321, 597, 90, 576, 101, 818, 394, 542, 276, 578, 417, \n 797, 89, 366, 771], [904, 230, 474, 400, 921, 749, 277, 826, 638, 294, \n 520, 617, 405, 983, 437, 87, 940, 492, 561, 407], [877, 195, 809, 714, \n 64, 362, 585, 4, 995, 949, 383, 172, 55, 468, 637, 229, 746, 208, 91, \n 708], [663, 758, 330, 359, 996, 67, 409, 169, 660, 688, 11, 50, 191, 88,\n 802, 834, 559, 139, 490, 412], [310, 464, 204, 408, 801, 352, 18, 167, \n 815, 753, 758, 833, 85, 731, 253, 655, 290, 493, 356, 396], [424, 931, \n 222, 6, 67, 347, 450, 528, 353, 444, 283, 971, 925, 76, 208, 101, 989, \n 64, 209, 875], [903, 651, 952, 356, 647, 99, 895, 868, 203, 620, 147, \n 200, 657, 839, 745, 260, 916, 552, 896, 209], [721, 17, 825, 638, 691, \n 971, 95, 844, 75, 203, 692, 210, 618, 113, 518, 82, 493, 463, 647, 122],\n [335, 97, 438, 636, 568, 329, 681, 998, 316, 679, 597, 547, 505, 283, \n 748, 299, 800, 828, 521, 139], [209, 110, 325, 990, 706, 379, 897, 133,\n 457, 573, 653, 863, 452, 819, 801, 756, 590, 925, 583, 731], [816, 946,\n 134, 587, 645, 751, 780, 140, 731, 208, 504, 939, 401, 724, 140, 1000, \n 575, 15, 966, 719], [929, 121, 255, 511, 401, 94, 7, 656, 871, 52, 589,\n 504, 456, 524, 492, 4, 513, 673, 536, 877], [828, 402, 44, 162, 805, \n 675, 391, 875, 955, 410, 385, 625, 250, 837, 153, 922, 105, 279, 91, 121]])\n', (60681, 62799), False, 'from snail import snail\n'), ((67572, 67669), 'snail.snail', 'snail', (['[[751, 521, 950, 82], [455, 888, 335, 526], [105, 724, 129, 53], [380, 655,\n 725, 828]]'], {}), '([[751, 521, 950, 82], [455, 888, 335, 526], [105, 724, 129, 53], [380,\n 655, 725, 828]])\n', (67577, 67669), False, 'from snail import snail\n'), ((67892, 67906), 'snail.snail', 'snail', (['[[543]]'], {}), '([[543]])\n', (67897, 67906), False, 'from snail import snail\n'), ((67975, 69027), 'snail.snail', 'snail', (['[[229, 998, 713, 612, 345, 412, 73, 287, 921, 44, 509, 147, 815, 84], [202,\n 726, 739, 170, 976, 345, 944, 506, 848, 942, 98, 297, 75, 807], [893, \n 82, 958, 458, 916, 954, 418, 436, 492, 86, 792, 226, 925, 268], [370, \n 388, 588, 171, 945, 358, 281, 657, 577, 147, 44, 352, 899, 119], [63, \n 834, 521, 924, 276, 174, 483, 414, 999, 932, 97, 492, 833, 363], [983, \n 187, 828, 23, 387, 853, 203, 130, 187, 820, 569, 974, 494, 870], [265, \n 162, 207, 733, 32, 925, 259, 761, 166, 231, 504, 503, 64, 851], [434, \n 330, 43, 791, 846, 790, 566, 474, 702, 462, 693, 826, 682, 881], [752, \n 68, 291, 180, 294, 674, 433, 486, 768, 743, 498, 98, 61, 154], [52, 47,\n 323, 362, 247, 135, 716, 566, 713, 977, 78, 222, 300, 909], [265, 17, \n 534, 221, 142, 430, 935, 948, 600, 79, 898, 229, 949, 656], [850, 639, \n 989, 941, 84, 62, 850, 437, 25, 538, 670, 868, 406, 755], [370, 978, \n 377, 131, 102, 929, 459, 201, 14, 981, 461, 153, 665, 352], [374, 581, \n 593, 665, 922, 259, 899, 586, 405, 812, 645, 820, 321, 535]]'], {}), '([[229, 998, 713, 612, 345, 412, 73, 287, 921, 44, 509, 147, 815, 84],\n [202, 726, 739, 170, 976, 345, 944, 506, 848, 942, 98, 297, 75, 807], [\n 893, 82, 958, 458, 916, 954, 418, 436, 492, 86, 792, 226, 925, 268], [\n 370, 388, 588, 171, 945, 358, 281, 657, 577, 147, 44, 352, 899, 119], [\n 63, 834, 521, 924, 276, 174, 483, 414, 999, 932, 97, 492, 833, 363], [\n 983, 187, 828, 23, 387, 853, 203, 130, 187, 820, 569, 974, 494, 870], [\n 265, 162, 207, 733, 32, 925, 259, 761, 166, 231, 504, 503, 64, 851], [\n 434, 330, 43, 791, 846, 790, 566, 474, 702, 462, 693, 826, 682, 881], [\n 752, 68, 291, 180, 294, 674, 433, 486, 768, 743, 498, 98, 61, 154], [52,\n 47, 323, 362, 247, 135, 716, 566, 713, 977, 78, 222, 300, 909], [265, \n 17, 534, 221, 142, 430, 935, 948, 600, 79, 898, 229, 949, 656], [850, \n 639, 989, 941, 84, 62, 850, 437, 25, 538, 670, 868, 406, 755], [370, \n 978, 377, 131, 102, 929, 459, 201, 14, 981, 461, 153, 665, 352], [374, \n 581, 593, 665, 922, 259, 899, 586, 405, 812, 645, 820, 321, 535]])\n', (67980, 69027), False, 'from snail import snail\n'), ((71327, 72381), 'snail.snail', 'snail', (['[[543, 159, 630, 512, 408, 22, 659, 938, 716, 955, 142, 6, 273, 723], [899,\n 659, 592, 655, 57, 191, 321, 795, 226, 317, 372, 190, 368, 804], [214, \n 369, 514, 853, 25, 423, 744, 462, 181, 663, 863, 747, 152, 353], [117, \n 9, 923, 420, 253, 550, 729, 881, 696, 208, 269, 362, 242, 177], [625, \n 547, 37, 512, 130, 542, 853, 646, 551, 801, 257, 306, 206, 361], [271, \n 719, 731, 679, 306, 529, 531, 846, 891, 420, 871, 537, 514, 117], [350,\n 890, 866, 614, 496, 485, 88, 13, 488, 842, 197, 891, 854, 554], [278, \n 713, 485, 671, 556, 687, 246, 19, 293, 906, 1000, 375, 531, 126], [641,\n 531, 586, 598, 991, 366, 229, 169, 644, 562, 847, 724, 546, 904], [859,\n 329, 116, 455, 986, 255, 334, 156, 188, 438, 112, 409, 283, 653], [844,\n 612, 215, 684, 518, 422, 922, 741, 33, 196, 272, 51, 604, 951], [457, \n 68, 327, 589, 617, 942, 5, 200, 722, 725, 971, 886, 972, 961], [817, \n 172, 829, 438, 738, 639, 453, 565, 270, 683, 405, 829, 664, 749], [347,\n 518, 664, 43, 591, 52, 685, 427, 716, 578, 854, 88, 673, 458]]'], {}), '([[543, 159, 630, 512, 408, 22, 659, 938, 716, 955, 142, 6, 273, 723],\n [899, 659, 592, 655, 57, 191, 321, 795, 226, 317, 372, 190, 368, 804],\n [214, 369, 514, 853, 25, 423, 744, 462, 181, 663, 863, 747, 152, 353],\n [117, 9, 923, 420, 253, 550, 729, 881, 696, 208, 269, 362, 242, 177], [\n 625, 547, 37, 512, 130, 542, 853, 646, 551, 801, 257, 306, 206, 361], [\n 271, 719, 731, 679, 306, 529, 531, 846, 891, 420, 871, 537, 514, 117],\n [350, 890, 866, 614, 496, 485, 88, 13, 488, 842, 197, 891, 854, 554], [\n 278, 713, 485, 671, 556, 687, 246, 19, 293, 906, 1000, 375, 531, 126],\n [641, 531, 586, 598, 991, 366, 229, 169, 644, 562, 847, 724, 546, 904],\n [859, 329, 116, 455, 986, 255, 334, 156, 188, 438, 112, 409, 283, 653],\n [844, 612, 215, 684, 518, 422, 922, 741, 33, 196, 272, 51, 604, 951], [\n 457, 68, 327, 589, 617, 942, 5, 200, 722, 725, 971, 886, 972, 961], [\n 817, 172, 829, 438, 738, 639, 453, 565, 270, 683, 405, 829, 664, 749],\n [347, 518, 664, 43, 591, 52, 685, 427, 716, 578, 854, 88, 673, 458]])\n', (71332, 72381), False, 'from snail import snail\n'), ((74695, 75481), 'snail.snail', 'snail', (['[[805, 737, 255, 944, 227, 940, 373, 877, 581, 787, 278, 332], [64, 412, \n 532, 342, 955, 252, 339, 890, 26, 793, 124, 394], [814, 764, 949, 785, \n 415, 832, 711, 188, 65, 623, 255, 469], [110, 743, 29, 583, 871, 275, \n 878, 329, 107, 698, 107, 523], [212, 73, 731, 628, 188, 215, 22, 479, \n 650, 523, 1000, 926], [383, 241, 377, 580, 798, 363, 103, 802, 427, 943,\n 877, 919], [387, 291, 796, 951, 13, 601, 617, 451, 340, 203, 336, 42],\n [412, 654, 456, 885, 799, 937, 971, 608, 17, 481, 383, 748], [39, 178, \n 45, 684, 995, 672, 707, 397, 999, 98, 373, 396], [62, 984, 818, 343, \n 914, 165, 470, 510, 86, 545, 993, 448], [105, 178, 404, 878, 906, 445, \n 706, 798, 613, 433, 492, 518], [744, 254, 817, 85, 813, 574, 193, 588, \n 505, 162, 819, 636]]'], {}), '([[805, 737, 255, 944, 227, 940, 373, 877, 581, 787, 278, 332], [64, \n 412, 532, 342, 955, 252, 339, 890, 26, 793, 124, 394], [814, 764, 949, \n 785, 415, 832, 711, 188, 65, 623, 255, 469], [110, 743, 29, 583, 871, \n 275, 878, 329, 107, 698, 107, 523], [212, 73, 731, 628, 188, 215, 22, \n 479, 650, 523, 1000, 926], [383, 241, 377, 580, 798, 363, 103, 802, 427,\n 943, 877, 919], [387, 291, 796, 951, 13, 601, 617, 451, 340, 203, 336, \n 42], [412, 654, 456, 885, 799, 937, 971, 608, 17, 481, 383, 748], [39, \n 178, 45, 684, 995, 672, 707, 397, 999, 98, 373, 396], [62, 984, 818, \n 343, 914, 165, 470, 510, 86, 545, 993, 448], [105, 178, 404, 878, 906, \n 445, 706, 798, 613, 433, 492, 518], [744, 254, 817, 85, 813, 574, 193, \n 588, 505, 162, 819, 636]])\n', (74700, 75481), False, 'from snail import snail\n'), ((76769, 77320), 'snail.snail', 'snail', (['[[997, 44, 256, 241, 586, 435, 204, 852, 283, 678], [536, 493, 608, 713, \n 378, 476, 645, 242, 657, 560], [609, 310, 407, 973, 835, 59, 771, 982, \n 985, 55], [948, 389, 927, 772, 391, 672, 254, 120, 915, 655], [993, 544,\n 661, 167, 875, 343, 129, 64, 471, 611], [186, 216, 598, 814, 94, 694, \n 135, 7, 374, 60], [487, 528, 461, 860, 913, 283, 276, 354, 679, 778], [\n 636, 627, 996, 319, 813, 600, 548, 491, 948, 178], [995, 381, 855, 47, \n 403, 250, 912, 709, 322, 993], [604, 150, 814, 285, 749, 84, 752, 680, \n 900, 222]]'], {}), '([[997, 44, 256, 241, 586, 435, 204, 852, 283, 678], [536, 493, 608, \n 713, 378, 476, 645, 242, 657, 560], [609, 310, 407, 973, 835, 59, 771, \n 982, 985, 55], [948, 389, 927, 772, 391, 672, 254, 120, 915, 655], [993,\n 544, 661, 167, 875, 343, 129, 64, 471, 611], [186, 216, 598, 814, 94, \n 694, 135, 7, 374, 60], [487, 528, 461, 860, 913, 283, 276, 354, 679, \n 778], [636, 627, 996, 319, 813, 600, 548, 491, 948, 178], [995, 381, \n 855, 47, 403, 250, 912, 709, 322, 993], [604, 150, 814, 285, 749, 84, \n 752, 680, 900, 222]])\n', (76774, 77320), False, 'from snail import snail\n'), ((78269, 79173), 'snail.snail', 'snail', (['[[924, 474, 327, 244, 69, 575, 52, 587, 477, 521, 871, 701, 236], [521, 643,\n 870, 149, 368, 896, 185, 164, 142, 419, 686, 209, 67], [161, 18, 876, \n 414, 245, 830, 900, 985, 627, 760, 366, 872, 85], [885, 784, 859, 378, \n 232, 55, 455, 716, 558, 68, 430, 331, 35], [859, 82, 149, 721, 581, 743,\n 272, 68, 600, 363, 433, 350, 62], [435, 913, 330, 343, 219, 649, 84, \n 442, 282, 315, 368, 567, 33], [756, 543, 726, 158, 116, 526, 43, 351, \n 731, 966, 190, 494, 396], [991, 673, 736, 193, 693, 113, 21, 298, 699, \n 837, 141, 997, 872], [589, 658, 79, 77, 493, 79, 163, 484, 631, 547, 53,\n 991, 387], [536, 709, 286, 817, 344, 230, 460, 648, 13, 13, 268, 604, \n 512], [107, 41, 145, 882, 103, 149, 377, 919, 188, 631, 686, 965, 945],\n [983, 912, 408, 29, 227, 783, 589, 629, 432, 119, 498, 481, 652], [470,\n 415, 9, 285, 695, 290, 688, 88, 702, 962, 280, 589, 7]]'], {}), '([[924, 474, 327, 244, 69, 575, 52, 587, 477, 521, 871, 701, 236], [\n 521, 643, 870, 149, 368, 896, 185, 164, 142, 419, 686, 209, 67], [161, \n 18, 876, 414, 245, 830, 900, 985, 627, 760, 366, 872, 85], [885, 784, \n 859, 378, 232, 55, 455, 716, 558, 68, 430, 331, 35], [859, 82, 149, 721,\n 581, 743, 272, 68, 600, 363, 433, 350, 62], [435, 913, 330, 343, 219, \n 649, 84, 442, 282, 315, 368, 567, 33], [756, 543, 726, 158, 116, 526, \n 43, 351, 731, 966, 190, 494, 396], [991, 673, 736, 193, 693, 113, 21, \n 298, 699, 837, 141, 997, 872], [589, 658, 79, 77, 493, 79, 163, 484, \n 631, 547, 53, 991, 387], [536, 709, 286, 817, 344, 230, 460, 648, 13, \n 13, 268, 604, 512], [107, 41, 145, 882, 103, 149, 377, 919, 188, 631, \n 686, 965, 945], [983, 912, 408, 29, 227, 783, 589, 629, 432, 119, 498, \n 481, 652], [470, 415, 9, 285, 695, 290, 688, 88, 702, 962, 280, 589, 7]])\n', (78274, 79173), False, 'from snail import snail\n'), ((81228, 82296), 'snail.snail', 'snail', (['[[332, 189, 638, 117, 858, 164, 701, 784, 749, 950, 707, 293, 233, 576], [\n 380, 752, 798, 298, 597, 470, 623, 773, 953, 86, 251, 504, 126, 633], [\n 337, 241, 413, 616, 605, 278, 289, 366, 162, 83, 632, 601, 771, 812], [\n 814, 497, 196, 480, 388, 471, 689, 147, 436, 568, 298, 36, 503, 120], [\n 867, 706, 472, 178, 529, 333, 885, 252, 864, 324, 288, 246, 463, 478],\n [702, 781, 720, 927, 185, 781, 841, 175, 822, 170, 77, 144, 909, 301],\n [779, 325, 154, 452, 539, 389, 191, 453, 664, 920, 216, 383, 873, 917],\n [859, 868, 29, 729, 640, 104, 731, 668, 816, 335, 907, 242, 563, 950],\n [230, 53, 485, 405, 276, 592, 563, 860, 770, 124, 501, 431, 370, 908],\n [355, 994, 912, 644, 789, 852, 140, 693, 256, 677, 136, 488, 337, 317],\n [346, 323, 9, 399, 577, 991, 9, 152, 271, 188, 222, 851, 696, 985], [\n 481, 705, 515, 680, 129, 670, 380, 894, 951, 245, 577, 654, 109, 754],\n [889, 295, 885, 544, 579, 931, 693, 95, 772, 865, 210, 62, 232, 361], [\n 743, 942, 729, 57, 879, 664, 20, 779, 401, 449, 973, 521, 380, 393]]'], {}), '([[332, 189, 638, 117, 858, 164, 701, 784, 749, 950, 707, 293, 233, \n 576], [380, 752, 798, 298, 597, 470, 623, 773, 953, 86, 251, 504, 126, \n 633], [337, 241, 413, 616, 605, 278, 289, 366, 162, 83, 632, 601, 771, \n 812], [814, 497, 196, 480, 388, 471, 689, 147, 436, 568, 298, 36, 503, \n 120], [867, 706, 472, 178, 529, 333, 885, 252, 864, 324, 288, 246, 463,\n 478], [702, 781, 720, 927, 185, 781, 841, 175, 822, 170, 77, 144, 909, \n 301], [779, 325, 154, 452, 539, 389, 191, 453, 664, 920, 216, 383, 873,\n 917], [859, 868, 29, 729, 640, 104, 731, 668, 816, 335, 907, 242, 563, \n 950], [230, 53, 485, 405, 276, 592, 563, 860, 770, 124, 501, 431, 370, \n 908], [355, 994, 912, 644, 789, 852, 140, 693, 256, 677, 136, 488, 337,\n 317], [346, 323, 9, 399, 577, 991, 9, 152, 271, 188, 222, 851, 696, 985\n ], [481, 705, 515, 680, 129, 670, 380, 894, 951, 245, 577, 654, 109, \n 754], [889, 295, 885, 544, 579, 931, 693, 95, 772, 865, 210, 62, 232, \n 361], [743, 942, 729, 57, 879, 664, 20, 779, 401, 449, 973, 521, 380, 393]]\n )\n', (81233, 82296), False, 'from snail import snail\n'), ((84604, 85815), 'snail.snail', 'snail', (['[[686, 345, 940, 678, 562, 159, 206, 990, 927, 298, 539, 662, 265, 951, 400\n ], [528, 940, 929, 898, 770, 67, 357, 491, 61, 867, 425, 746, 957, 613,\n 720], [116, 943, 331, 211, 933, 899, 736, 195, 147, 366, 181, 973, 59, \n 873, 379], [161, 879, 580, 471, 865, 871, 542, 206, 816, 807, 436, 387,\n 893, 970, 145], [367, 686, 933, 883, 434, 565, 652, 528, 199, 346, 378,\n 377, 911, 746, 288], [844, 843, 849, 486, 101, 957, 940, 223, 930, 113,\n 359, 782, 652, 783, 559], [56, 652, 242, 424, 531, 187, 16, 752, 168, \n 603, 702, 435, 237, 814, 398], [10, 230, 815, 547, 434, 244, 56, 947, \n 758, 155, 407, 213, 366, 418, 518], [438, 950, 214, 575, 809, 811, 370,\n 916, 57, 964, 918, 461, 428, 971, 456], [190, 751, 7, 549, 101, 648, \n 636, 735, 371, 122, 316, 848, 463, 552, 41], [82, 332, 595, 889, 290, \n 652, 211, 874, 249, 740, 352, 870, 517, 810, 422], [248, 681, 64, 600, \n 6, 399, 108, 991, 123, 413, 862, 309, 28, 957, 861], [603, 104, 908, 12,\n 827, 54, 796, 166, 701, 933, 180, 308, 604, 374, 950], [495, 877, 743, \n 460, 546, 160, 966, 712, 708, 606, 52, 445, 957, 762, 950], [39, 185, \n 527, 228, 972, 273, 584, 336, 352, 376, 681, 554, 34, 322, 125]]'], {}), '([[686, 345, 940, 678, 562, 159, 206, 990, 927, 298, 539, 662, 265, \n 951, 400], [528, 940, 929, 898, 770, 67, 357, 491, 61, 867, 425, 746, \n 957, 613, 720], [116, 943, 331, 211, 933, 899, 736, 195, 147, 366, 181,\n 973, 59, 873, 379], [161, 879, 580, 471, 865, 871, 542, 206, 816, 807, \n 436, 387, 893, 970, 145], [367, 686, 933, 883, 434, 565, 652, 528, 199,\n 346, 378, 377, 911, 746, 288], [844, 843, 849, 486, 101, 957, 940, 223,\n 930, 113, 359, 782, 652, 783, 559], [56, 652, 242, 424, 531, 187, 16, \n 752, 168, 603, 702, 435, 237, 814, 398], [10, 230, 815, 547, 434, 244, \n 56, 947, 758, 155, 407, 213, 366, 418, 518], [438, 950, 214, 575, 809, \n 811, 370, 916, 57, 964, 918, 461, 428, 971, 456], [190, 751, 7, 549, \n 101, 648, 636, 735, 371, 122, 316, 848, 463, 552, 41], [82, 332, 595, \n 889, 290, 652, 211, 874, 249, 740, 352, 870, 517, 810, 422], [248, 681,\n 64, 600, 6, 399, 108, 991, 123, 413, 862, 309, 28, 957, 861], [603, 104,\n 908, 12, 827, 54, 796, 166, 701, 933, 180, 308, 604, 374, 950], [495, \n 877, 743, 460, 546, 160, 966, 712, 708, 606, 52, 445, 957, 762, 950], [\n 39, 185, 527, 228, 972, 273, 584, 336, 352, 376, 681, 554, 34, 322, 125]])\n', (84609, 85815), False, 'from snail import snail\n'), ((88401, 88950), 'snail.snail', 'snail', (['[[234, 459, 8, 740, 18, 612, 971, 482, 105, 70], [725, 582, 552, 166, 909, \n 83, 323, 842, 901, 479], [139, 880, 685, 560, 197, 820, 458, 261, 491, \n 930], [917, 677, 674, 610, 470, 744, 893, 604, 310, 818], [826, 470, \n 627, 391, 222, 544, 687, 939, 544, 952], [68, 614, 803, 517, 852, 251, \n 87, 88, 838, 229], [269, 848, 520, 498, 486, 567, 575, 779, 706, 74], [\n 567, 438, 209, 639, 573, 640, 885, 830, 665, 130], [183, 483, 877, 703,\n 75, 515, 323, 482, 901, 562], [426, 570, 572, 144, 924, 285, 48, 976, \n 282, 802]]'], {}), '([[234, 459, 8, 740, 18, 612, 971, 482, 105, 70], [725, 582, 552, 166,\n 909, 83, 323, 842, 901, 479], [139, 880, 685, 560, 197, 820, 458, 261, \n 491, 930], [917, 677, 674, 610, 470, 744, 893, 604, 310, 818], [826, \n 470, 627, 391, 222, 544, 687, 939, 544, 952], [68, 614, 803, 517, 852, \n 251, 87, 88, 838, 229], [269, 848, 520, 498, 486, 567, 575, 779, 706, \n 74], [567, 438, 209, 639, 573, 640, 885, 830, 665, 130], [183, 483, 877,\n 703, 75, 515, 323, 482, 901, 562], [426, 570, 572, 144, 924, 285, 48, \n 976, 282, 802]])\n', (88406, 88950), False, 'from snail import snail\n'), ((90281, 90427), 'snail.snail', 'snail', (['[[966, 770, 415, 443, 591], [733, 239, 637, 938, 246], [567, 292, 816, 631,\n 702], [315, 312, 771, 408, 474], [275, 740, 146, 719, 961]]'], {}), '([[966, 770, 415, 443, 591], [733, 239, 637, 938, 246], [567, 292, 816,\n 631, 702], [315, 312, 771, 408, 474], [275, 740, 146, 719, 961]])\n', (90286, 90427), False, 'from snail import snail\n'), ((90740, 91656), 'snail.snail', 'snail', (['[[928, 128, 90, 593, 147, 757, 325, 206, 400, 949, 633, 558, 879], [190, 99,\n 708, 968, 665, 847, 159, 388, 584, 547, 469, 788, 586], [684, 65, 832, \n 834, 651, 891, 458, 712, 596, 377, 465, 789, 44], [653, 136, 125, 990, \n 21, 351, 405, 771, 910, 922, 213, 998, 75], [165, 220, 334, 367, 603, \n 930, 821, 232, 624, 209, 353, 156, 271], [437, 145, 802, 747, 716, 565,\n 784, 364, 524, 475, 283, 81, 501], [821, 590, 652, 948, 704, 922, 334, \n 102, 905, 13, 335, 462, 425], [118, 633, 924, 637, 123, 245, 432, 807, \n 579, 480, 828, 79, 942], [805, 592, 718, 356, 790, 549, 125, 844, 691, \n 71, 835, 150, 747], [87, 541, 24, 922, 952, 881, 463, 192, 319, 765, \n 771, 368, 432], [149, 859, 949, 368, 342, 942, 337, 598, 490, 889, 50, \n 794, 786], [868, 167, 392, 93, 126, 521, 922, 941, 210, 170, 982, 94, \n 43], [583, 931, 24, 750, 990, 453, 518, 9, 657, 789, 678, 676, 756]]'], {}), '([[928, 128, 90, 593, 147, 757, 325, 206, 400, 949, 633, 558, 879], [\n 190, 99, 708, 968, 665, 847, 159, 388, 584, 547, 469, 788, 586], [684, \n 65, 832, 834, 651, 891, 458, 712, 596, 377, 465, 789, 44], [653, 136, \n 125, 990, 21, 351, 405, 771, 910, 922, 213, 998, 75], [165, 220, 334, \n 367, 603, 930, 821, 232, 624, 209, 353, 156, 271], [437, 145, 802, 747,\n 716, 565, 784, 364, 524, 475, 283, 81, 501], [821, 590, 652, 948, 704, \n 922, 334, 102, 905, 13, 335, 462, 425], [118, 633, 924, 637, 123, 245, \n 432, 807, 579, 480, 828, 79, 942], [805, 592, 718, 356, 790, 549, 125, \n 844, 691, 71, 835, 150, 747], [87, 541, 24, 922, 952, 881, 463, 192, \n 319, 765, 771, 368, 432], [149, 859, 949, 368, 342, 942, 337, 598, 490,\n 889, 50, 794, 786], [868, 167, 392, 93, 126, 521, 922, 941, 210, 170, \n 982, 94, 43], [583, 931, 24, 750, 990, 453, 518, 9, 657, 789, 678, 676,\n 756]])\n', (90745, 91656), False, 'from snail import snail\n'), ((93717, 94370), 'snail.snail', 'snail', (['[[87, 462, 110, 33, 41, 613, 234, 971, 424, 490, 399], [489, 50, 350, 304, \n 182, 24, 614, 707, 935, 678, 706], [363, 94, 140, 854, 757, 467, 369, \n 903, 629, 342, 144], [838, 301, 145, 18, 841, 484, 374, 723, 136, 333, \n 757], [316, 713, 514, 19, 847, 337, 830, 358, 313, 138, 270], [869, 803,\n 76, 126, 424, 80, 383, 117, 180, 519, 534], [663, 709, 774, 866, 180, \n 59, 780, 653, 290, 958, 920], [931, 926, 174, 65, 301, 51, 255, 19, 439,\n 910, 474], [229, 84, 159, 158, 470, 597, 842, 83, 794, 285, 20], [248, \n 938, 591, 246, 529, 506, 869, 146, 600, 738, 931], [391, 267, 55, 182, \n 281, 344, 431, 338, 792, 443, 687]]'], {}), '([[87, 462, 110, 33, 41, 613, 234, 971, 424, 490, 399], [489, 50, 350,\n 304, 182, 24, 614, 707, 935, 678, 706], [363, 94, 140, 854, 757, 467, \n 369, 903, 629, 342, 144], [838, 301, 145, 18, 841, 484, 374, 723, 136, \n 333, 757], [316, 713, 514, 19, 847, 337, 830, 358, 313, 138, 270], [869,\n 803, 76, 126, 424, 80, 383, 117, 180, 519, 534], [663, 709, 774, 866, \n 180, 59, 780, 653, 290, 958, 920], [931, 926, 174, 65, 301, 51, 255, 19,\n 439, 910, 474], [229, 84, 159, 158, 470, 597, 842, 83, 794, 285, 20], [\n 248, 938, 591, 246, 529, 506, 869, 146, 600, 738, 931], [391, 267, 55, \n 182, 281, 344, 431, 338, 792, 443, 687]])\n', (93722, 94370), False, 'from snail import snail\n'), ((95456, 96106), 'snail.snail', 'snail', (['[[64, 644, 694, 5, 163, 760, 568, 84, 67, 517, 872], [933, 412, 172, 162, \n 97, 626, 830, 969, 809, 522, 539], [910, 121, 228, 803, 443, 4, 341, 64,\n 60, 438, 964], [320, 135, 26, 700, 58, 741, 111, 944, 580, 855, 195], [\n 2, 802, 971, 42, 232, 432, 910, 803, 694, 46, 826], [612, 974, 539, 639,\n 21, 878, 809, 246, 218, 331, 974], [804, 448, 962, 406, 439, 556, 826, \n 109, 798, 609, 867], [260, 335, 33, 122, 577, 639, 88, 887, 760, 705, \n 784], [893, 908, 88, 16, 905, 923, 220, 690, 648, 747, 591], [276, 217,\n 551, 996, 879, 575, 154, 724, 468, 856, 317], [427, 269, 210, 221, 352,\n 980, 952, 189, 573, 520, 383]]'], {}), '([[64, 644, 694, 5, 163, 760, 568, 84, 67, 517, 872], [933, 412, 172, \n 162, 97, 626, 830, 969, 809, 522, 539], [910, 121, 228, 803, 443, 4, \n 341, 64, 60, 438, 964], [320, 135, 26, 700, 58, 741, 111, 944, 580, 855,\n 195], [2, 802, 971, 42, 232, 432, 910, 803, 694, 46, 826], [612, 974, \n 539, 639, 21, 878, 809, 246, 218, 331, 974], [804, 448, 962, 406, 439, \n 556, 826, 109, 798, 609, 867], [260, 335, 33, 122, 577, 639, 88, 887, \n 760, 705, 784], [893, 908, 88, 16, 905, 923, 220, 690, 648, 747, 591],\n [276, 217, 551, 996, 879, 575, 154, 724, 468, 856, 317], [427, 269, 210,\n 221, 352, 980, 952, 189, 573, 520, 383]])\n', (95461, 96106), False, 'from snail import snail\n'), ((97189, 98107), 'snail.snail', 'snail', (['[[631, 374, 877, 595, 738, 324, 704, 280, 468, 923, 505, 471, 786], [725, \n 339, 191, 381, 268, 471, 998, 922, 108, 118, 197, 889, 647], [422, 442,\n 5, 197, 843, 702, 57, 58, 593, 76, 159, 773, 840], [166, 158, 990, 841,\n 117, 450, 765, 455, 254, 99, 224, 624, 608], [129, 252, 729, 219, 199, \n 309, 229, 855, 542, 972, 470, 850, 286], [255, 368, 57, 890, 572, 308, \n 655, 779, 134, 580, 335, 387, 888], [27, 281, 301, 15, 780, 318, 425, \n 931, 277, 972, 499, 622, 692], [466, 70, 405, 84, 594, 63, 228, 946, \n 440, 354, 720, 480, 259], [257, 323, 934, 503, 258, 510, 921, 254, 430,\n 508, 484, 353, 949], [321, 168, 497, 248, 670, 628, 258, 877, 585, 965,\n 796, 567, 233], [190, 127, 715, 552, 621, 806, 212, 367, 420, 826, 534,\n 428, 604], [908, 504, 880, 691, 117, 289, 731, 232, 629, 161, 417, 942,\n 52], [341, 721, 127, 728, 46, 763, 884, 431, 905, 951, 338, 775, 868]]'], {}), '([[631, 374, 877, 595, 738, 324, 704, 280, 468, 923, 505, 471, 786], [\n 725, 339, 191, 381, 268, 471, 998, 922, 108, 118, 197, 889, 647], [422,\n 442, 5, 197, 843, 702, 57, 58, 593, 76, 159, 773, 840], [166, 158, 990,\n 841, 117, 450, 765, 455, 254, 99, 224, 624, 608], [129, 252, 729, 219, \n 199, 309, 229, 855, 542, 972, 470, 850, 286], [255, 368, 57, 890, 572, \n 308, 655, 779, 134, 580, 335, 387, 888], [27, 281, 301, 15, 780, 318, \n 425, 931, 277, 972, 499, 622, 692], [466, 70, 405, 84, 594, 63, 228, \n 946, 440, 354, 720, 480, 259], [257, 323, 934, 503, 258, 510, 921, 254,\n 430, 508, 484, 353, 949], [321, 168, 497, 248, 670, 628, 258, 877, 585,\n 965, 796, 567, 233], [190, 127, 715, 552, 621, 806, 212, 367, 420, 826,\n 534, 428, 604], [908, 504, 880, 691, 117, 289, 731, 232, 629, 161, 417,\n 942, 52], [341, 721, 127, 728, 46, 763, 884, 431, 905, 951, 338, 775, 868]]\n )\n', (97194, 98107), False, 'from snail import snail\n'), ((100176, 100322), 'snail.snail', 'snail', (['[[448, 727, 434, 177, 987], [288, 839, 372, 379, 326], [266, 287, 407, 590,\n 327], [782, 941, 470, 580, 365], [823, 313, 939, 776, 834]]'], {}), '([[448, 727, 434, 177, 987], [288, 839, 372, 379, 326], [266, 287, 407,\n 590, 327], [782, 941, 470, 580, 365], [823, 313, 939, 776, 834]])\n', (100181, 100322), False, 'from snail import snail\n'), ((100635, 102014), 'snail.snail', 'snail', (['[[134, 625, 697, 457, 3, 817, 998, 303, 562, 680, 864, 613, 483, 648, 569, \n 37], [328, 426, 402, 699, 409, 971, 63, 339, 238, 759, 392, 835, 574, \n 349, 949, 842], [491, 104, 329, 958, 321, 561, 47, 185, 759, 121, 608, \n 163, 746, 268, 114, 96], [166, 374, 830, 603, 171, 472, 891, 395, 650, \n 879, 219, 441, 151, 672, 331, 202], [763, 122, 903, 770, 555, 406, 876,\n 126, 509, 564, 333, 937, 863, 163, 970, 818], [736, 749, 999, 758, 110,\n 809, 701, 861, 153, 823, 721, 107, 944, 830, 750, 333], [750, 454, 398,\n 921, 852, 451, 774, 157, 715, 578, 474, 135, 955, 838, 386, 887], [140,\n 935, 474, 862, 292, 785, 433, 271, 153, 908, 426, 686, 694, 206, 251, \n 533], [13, 708, 970, 604, 773, 469, 663, 311, 734, 930, 528, 284, 558, \n 278, 112, 796], [737, 293, 588, 611, 94, 821, 436, 105, 464, 543, 35, \n 623, 3, 33, 611, 809], [812, 394, 490, 319, 385, 300, 47, 217, 181, 839,\n 527, 229, 889, 212, 754, 34], [615, 169, 501, 732, 472, 298, 728, 494, \n 639, 582, 167, 79, 679, 66, 116, 445], [307, 688, 864, 469, 119, 374, \n 338, 182, 396, 651, 77, 319, 744, 499, 95, 599], [684, 884, 412, 446, \n 154, 747, 892, 34, 875, 845, 609, 455, 551, 940, 151, 932], [949, 14, \n 409, 86, 966, 430, 157, 919, 875, 783, 268, 184, 420, 162, 970, 673], [\n 65, 50, 700, 314, 348, 547, 655, 313, 165, 573, 789, 164, 219, 216, 353,\n 975]]'], {}), '([[134, 625, 697, 457, 3, 817, 998, 303, 562, 680, 864, 613, 483, 648,\n 569, 37], [328, 426, 402, 699, 409, 971, 63, 339, 238, 759, 392, 835, \n 574, 349, 949, 842], [491, 104, 329, 958, 321, 561, 47, 185, 759, 121, \n 608, 163, 746, 268, 114, 96], [166, 374, 830, 603, 171, 472, 891, 395, \n 650, 879, 219, 441, 151, 672, 331, 202], [763, 122, 903, 770, 555, 406,\n 876, 126, 509, 564, 333, 937, 863, 163, 970, 818], [736, 749, 999, 758,\n 110, 809, 701, 861, 153, 823, 721, 107, 944, 830, 750, 333], [750, 454,\n 398, 921, 852, 451, 774, 157, 715, 578, 474, 135, 955, 838, 386, 887],\n [140, 935, 474, 862, 292, 785, 433, 271, 153, 908, 426, 686, 694, 206, \n 251, 533], [13, 708, 970, 604, 773, 469, 663, 311, 734, 930, 528, 284, \n 558, 278, 112, 796], [737, 293, 588, 611, 94, 821, 436, 105, 464, 543, \n 35, 623, 3, 33, 611, 809], [812, 394, 490, 319, 385, 300, 47, 217, 181,\n 839, 527, 229, 889, 212, 754, 34], [615, 169, 501, 732, 472, 298, 728, \n 494, 639, 582, 167, 79, 679, 66, 116, 445], [307, 688, 864, 469, 119, \n 374, 338, 182, 396, 651, 77, 319, 744, 499, 95, 599], [684, 884, 412, \n 446, 154, 747, 892, 34, 875, 845, 609, 455, 551, 940, 151, 932], [949, \n 14, 409, 86, 966, 430, 157, 919, 875, 783, 268, 184, 420, 162, 970, 673\n ], [65, 50, 700, 314, 348, 547, 655, 313, 165, 573, 789, 164, 219, 216,\n 353, 975]])\n', (100640, 102014), False, 'from snail import snail\n'), ((104883, 105028), 'snail.snail', 'snail', (['[[148, 131, 809, 558, 988], [226, 872, 217, 699, 709], [326, 703, 976, 559,\n 826], [749, 582, 891, 321, 58], [773, 142, 687, 234, 325]]'], {}), '([[148, 131, 809, 558, 988], [226, 872, 217, 699, 709], [326, 703, 976,\n 559, 826], [749, 582, 891, 321, 58], [773, 142, 687, 234, 325]])\n', (104888, 105028), False, 'from snail import snail\n'), ((105340, 107259), 'snail.snail', 'snail', (['[[705, 149, 326, 506, 792, 406, 65, 525, 996, 158, 592, 282, 643, 696, 31, \n 520, 757, 275, 276], [395, 704, 227, 598, 163, 173, 844, 171, 882, 571,\n 228, 161, 943, 43, 278, 3, 680, 719, 746], [871, 369, 979, 617, 840, \n 771, 315, 81, 751, 543, 799, 516, 452, 899, 115, 102, 262, 234, 751], [\n 667, 55, 885, 708, 943, 586, 330, 992, 663, 19, 180, 786, 89, 208, 486,\n 706, 742, 854, 883], [103, 917, 812, 629, 301, 326, 783, 757, 747, 217,\n 464, 220, 562, 390, 45, 883, 755, 278, 96], [58, 584, 52, 378, 774, 536,\n 631, 392, 592, 219, 897, 685, 895, 23, 749, 884, 417, 365, 463], [664, \n 410, 795, 818, 377, 952, 133, 43, 696, 342, 423, 412, 93, 337, 963, 722,\n 730, 824, 748], [274, 831, 339, 463, 30, 408, 981, 958, 816, 799, 771, \n 659, 368, 494, 854, 878, 811, 16, 433], [55, 449, 296, 203, 66, 988, \n 124, 753, 17, 600, 108, 79, 710, 973, 4, 847, 137, 725, 579], [180, 356,\n 532, 283, 330, 882, 65, 248, 621, 572, 310, 119, 394, 624, 463, 326, \n 872, 983, 335], [125, 938, 860, 228, 485, 110, 914, 335, 985, 788, 16, \n 492, 118, 658, 83, 673, 310, 463, 128], [896, 593, 150, 280, 186, 824, \n 408, 2, 842, 388, 750, 674, 634, 221, 435, 728, 183, 685, 119], [478, \n 785, 827, 612, 727, 240, 864, 732, 557, 136, 936, 280, 12, 953, 210, \n 158, 70, 762, 97], [209, 645, 785, 915, 570, 703, 602, 696, 480, 206, \n 360, 662, 900, 39, 967, 709, 439, 5, 933], [441, 925, 978, 564, 488, \n 326, 796, 781, 197, 696, 81, 630, 144, 317, 215, 987, 154, 30, 142], [\n 112, 745, 428, 350, 952, 650, 589, 463, 204, 57, 648, 273, 336, 497, \n 280, 697, 991, 997, 700], [731, 537, 58, 463, 528, 662, 243, 565, 934, \n 490, 476, 244, 23, 859, 237, 545, 623, 6, 478], [54, 910, 609, 160, 253,\n 282, 264, 395, 951, 466, 832, 888, 589, 309, 698, 27, 242, 647, 506], [\n 954, 699, 371, 224, 674, 22, 408, 24, 461, 574, 344, 658, 634, 902, 534,\n 85, 9, 177, 809]]'], {}), '([[705, 149, 326, 506, 792, 406, 65, 525, 996, 158, 592, 282, 643, 696,\n 31, 520, 757, 275, 276], [395, 704, 227, 598, 163, 173, 844, 171, 882, \n 571, 228, 161, 943, 43, 278, 3, 680, 719, 746], [871, 369, 979, 617, \n 840, 771, 315, 81, 751, 543, 799, 516, 452, 899, 115, 102, 262, 234, \n 751], [667, 55, 885, 708, 943, 586, 330, 992, 663, 19, 180, 786, 89, \n 208, 486, 706, 742, 854, 883], [103, 917, 812, 629, 301, 326, 783, 757,\n 747, 217, 464, 220, 562, 390, 45, 883, 755, 278, 96], [58, 584, 52, 378,\n 774, 536, 631, 392, 592, 219, 897, 685, 895, 23, 749, 884, 417, 365, \n 463], [664, 410, 795, 818, 377, 952, 133, 43, 696, 342, 423, 412, 93, \n 337, 963, 722, 730, 824, 748], [274, 831, 339, 463, 30, 408, 981, 958, \n 816, 799, 771, 659, 368, 494, 854, 878, 811, 16, 433], [55, 449, 296, \n 203, 66, 988, 124, 753, 17, 600, 108, 79, 710, 973, 4, 847, 137, 725, \n 579], [180, 356, 532, 283, 330, 882, 65, 248, 621, 572, 310, 119, 394, \n 624, 463, 326, 872, 983, 335], [125, 938, 860, 228, 485, 110, 914, 335,\n 985, 788, 16, 492, 118, 658, 83, 673, 310, 463, 128], [896, 593, 150, \n 280, 186, 824, 408, 2, 842, 388, 750, 674, 634, 221, 435, 728, 183, 685,\n 119], [478, 785, 827, 612, 727, 240, 864, 732, 557, 136, 936, 280, 12, \n 953, 210, 158, 70, 762, 97], [209, 645, 785, 915, 570, 703, 602, 696, \n 480, 206, 360, 662, 900, 39, 967, 709, 439, 5, 933], [441, 925, 978, \n 564, 488, 326, 796, 781, 197, 696, 81, 630, 144, 317, 215, 987, 154, 30,\n 142], [112, 745, 428, 350, 952, 650, 589, 463, 204, 57, 648, 273, 336, \n 497, 280, 697, 991, 997, 700], [731, 537, 58, 463, 528, 662, 243, 565, \n 934, 490, 476, 244, 23, 859, 237, 545, 623, 6, 478], [54, 910, 609, 160,\n 253, 282, 264, 395, 951, 466, 832, 888, 589, 309, 698, 27, 242, 647, \n 506], [954, 699, 371, 224, 674, 22, 408, 24, 461, 574, 344, 658, 634, \n 902, 534, 85, 9, 177, 809]])\n', (105345, 107259), False, 'from snail import snail\n'), ((111351, 113486), 'snail.snail', 'snail', (['[[772, 352, 920, 451, 295, 883, 38, 33, 562, 598, 383, 190, 999, 918, 657, \n 173, 310, 243, 749, 460], [868, 561, 653, 879, 805, 379, 61, 961, 64, \n 493, 854, 923, 284, 452, 702, 792, 656, 695, 771, 962], [575, 398, 964,\n 954, 690, 209, 562, 164, 793, 976, 475, 330, 814, 655, 710, 811, 570, \n 20, 373, 610], [799, 80, 212, 607, 883, 605, 697, 849, 982, 661, 68, \n 1000, 250, 950, 796, 122, 601, 798, 50, 473], [648, 224, 554, 600, 467,\n 884, 2, 280, 425, 105, 226, 557, 661, 814, 881, 477, 370, 872, 659, 927\n ], [50, 371, 90, 503, 987, 116, 255, 374, 300, 948, 323, 898, 296, 361,\n 455, 546, 622, 633, 987, 34], [579, 674, 687, 200, 919, 843, 315, 736, \n 450, 74, 420, 214, 736, 519, 696, 801, 571, 265, 46, 237], [563, 899, \n 518, 645, 484, 597, 183, 589, 888, 826, 717, 73, 576, 799, 888, 231, \n 304, 510, 167, 831], [724, 228, 499, 474, 241, 346, 119, 150, 200, 443,\n 641, 64, 147, 137, 161, 378, 536, 46, 176, 711], [516, 527, 767, 644, \n 171, 340, 400, 775, 654, 690, 324, 130, 873, 579, 34, 550, 42, 834, 905,\n 174], [26, 59, 337, 14, 944, 322, 490, 974, 40, 995, 912, 636, 919, 123,\n 190, 482, 123, 182, 178, 289], [730, 222, 970, 185, 610, 987, 177, 447,\n 885, 117, 172, 22, 795, 119, 487, 673, 245, 819, 515, 318], [962, 540, \n 368, 132, 532, 594, 446, 193, 252, 326, 872, 981, 622, 416, 624, 320, \n 71, 891, 65, 983], [566, 302, 822, 651, 557, 542, 962, 668, 813, 564, \n 474, 351, 757, 454, 748, 63, 925, 612, 389, 297], [946, 119, 214, 810, \n 762, 218, 502, 3, 429, 607, 197, 465, 126, 778, 887, 847, 487, 519, 304,\n 84], [442, 477, 56, 239, 116, 18, 277, 482, 595, 442, 203, 989, 474, \n 240, 792, 795, 57, 710, 537, 814], [53, 528, 760, 968, 553, 920, 449, \n 642, 913, 164, 142, 430, 374, 756, 484, 778, 935, 732, 652, 532], [372,\n 903, 178, 880, 456, 257, 221, 977, 746, 807, 729, 207, 281, 9, 208, 994,\n 701, 260, 811, 925], [377, 548, 971, 375, 954, 496, 764, 731, 346, 439,\n 38, 33, 65, 533, 830, 448, 465, 934, 44, 687], [213, 763, 43, 223, 196,\n 717, 119, 184, 247, 740, 909, 305, 646, 450, 666, 173, 25, 546, 108, 783]]'], {}), '([[772, 352, 920, 451, 295, 883, 38, 33, 562, 598, 383, 190, 999, 918,\n 657, 173, 310, 243, 749, 460], [868, 561, 653, 879, 805, 379, 61, 961, \n 64, 493, 854, 923, 284, 452, 702, 792, 656, 695, 771, 962], [575, 398, \n 964, 954, 690, 209, 562, 164, 793, 976, 475, 330, 814, 655, 710, 811, \n 570, 20, 373, 610], [799, 80, 212, 607, 883, 605, 697, 849, 982, 661, \n 68, 1000, 250, 950, 796, 122, 601, 798, 50, 473], [648, 224, 554, 600, \n 467, 884, 2, 280, 425, 105, 226, 557, 661, 814, 881, 477, 370, 872, 659,\n 927], [50, 371, 90, 503, 987, 116, 255, 374, 300, 948, 323, 898, 296, \n 361, 455, 546, 622, 633, 987, 34], [579, 674, 687, 200, 919, 843, 315, \n 736, 450, 74, 420, 214, 736, 519, 696, 801, 571, 265, 46, 237], [563, \n 899, 518, 645, 484, 597, 183, 589, 888, 826, 717, 73, 576, 799, 888, \n 231, 304, 510, 167, 831], [724, 228, 499, 474, 241, 346, 119, 150, 200,\n 443, 641, 64, 147, 137, 161, 378, 536, 46, 176, 711], [516, 527, 767, \n 644, 171, 340, 400, 775, 654, 690, 324, 130, 873, 579, 34, 550, 42, 834,\n 905, 174], [26, 59, 337, 14, 944, 322, 490, 974, 40, 995, 912, 636, 919,\n 123, 190, 482, 123, 182, 178, 289], [730, 222, 970, 185, 610, 987, 177,\n 447, 885, 117, 172, 22, 795, 119, 487, 673, 245, 819, 515, 318], [962, \n 540, 368, 132, 532, 594, 446, 193, 252, 326, 872, 981, 622, 416, 624, \n 320, 71, 891, 65, 983], [566, 302, 822, 651, 557, 542, 962, 668, 813, \n 564, 474, 351, 757, 454, 748, 63, 925, 612, 389, 297], [946, 119, 214, \n 810, 762, 218, 502, 3, 429, 607, 197, 465, 126, 778, 887, 847, 487, 519,\n 304, 84], [442, 477, 56, 239, 116, 18, 277, 482, 595, 442, 203, 989, \n 474, 240, 792, 795, 57, 710, 537, 814], [53, 528, 760, 968, 553, 920, \n 449, 642, 913, 164, 142, 430, 374, 756, 484, 778, 935, 732, 652, 532],\n [372, 903, 178, 880, 456, 257, 221, 977, 746, 807, 729, 207, 281, 9, \n 208, 994, 701, 260, 811, 925], [377, 548, 971, 375, 954, 496, 764, 731,\n 346, 439, 38, 33, 65, 533, 830, 448, 465, 934, 44, 687], [213, 763, 43,\n 223, 196, 717, 119, 184, 247, 740, 909, 305, 646, 450, 666, 173, 25, \n 546, 108, 783]])\n', (111356, 113486), False, 'from snail import snail\n'), ((118267, 119797), 'snail.snail', 'snail', (['[[697, 690, 45, 97, 974, 564, 828, 482, 459, 457, 247, 709, 849, 755, 636, \n 252, 174], [878, 182, 418, 18, 296, 541, 463, 226, 390, 399, 86, 57, \n 352, 505, 880, 822, 596], [312, 932, 870, 982, 37, 485, 327, 970, 614, \n 352, 485, 832, 443, 243, 116, 468, 437], [283, 947, 1000, 474, 878, 672,\n 130, 269, 601, 862, 608, 896, 683, 65, 5, 7, 854], [103, 886, 322, 406,\n 644, 252, 162, 590, 859, 997, 222, 316, 188, 581, 796, 969, 58], [229, \n 54, 972, 517, 133, 800, 959, 577, 62, 954, 234, 40, 491, 22, 580, 862, \n 428], [853, 197, 664, 207, 581, 868, 982, 935, 2, 818, 51, 950, 425, \n 673, 513, 507, 992], [917, 788, 132, 184, 895, 383, 592, 175, 810, 711,\n 802, 86, 43, 192, 598, 515, 822], [59, 393, 360, 66, 673, 904, 665, 258,\n 264, 39, 667, 780, 679, 563, 100, 30, 272], [150, 367, 289, 44, 24, 249,\n 470, 487, 212, 802, 989, 338, 650, 813, 518, 64, 465], [523, 744, 969, \n 535, 138, 123, 784, 424, 16, 638, 518, 692, 26, 253, 134, 334, 279], [\n 563, 345, 64, 97, 67, 966, 282, 163, 530, 69, 821, 159, 70, 657, 766, \n 312, 667], [102, 543, 515, 548, 410, 417, 570, 834, 78, 297, 961, 164, \n 375, 429, 318, 636, 506], [358, 824, 326, 229, 271, 557, 286, 19, 74, \n 375, 713, 292, 984, 730, 734, 281, 275], [9, 812, 979, 24, 319, 707, \n 337, 99, 454, 499, 124, 291, 400, 809, 566, 290, 151], [815, 554, 264, \n 774, 823, 520, 185, 11, 860, 938, 566, 15, 367, 729, 540, 623, 14], [13,\n 808, 108, 848, 278, 568, 551, 248, 3, 814, 211, 204, 808, 452, 564, 477,\n 744]]'], {}), '([[697, 690, 45, 97, 974, 564, 828, 482, 459, 457, 247, 709, 849, 755,\n 636, 252, 174], [878, 182, 418, 18, 296, 541, 463, 226, 390, 399, 86, \n 57, 352, 505, 880, 822, 596], [312, 932, 870, 982, 37, 485, 327, 970, \n 614, 352, 485, 832, 443, 243, 116, 468, 437], [283, 947, 1000, 474, 878,\n 672, 130, 269, 601, 862, 608, 896, 683, 65, 5, 7, 854], [103, 886, 322,\n 406, 644, 252, 162, 590, 859, 997, 222, 316, 188, 581, 796, 969, 58], [\n 229, 54, 972, 517, 133, 800, 959, 577, 62, 954, 234, 40, 491, 22, 580, \n 862, 428], [853, 197, 664, 207, 581, 868, 982, 935, 2, 818, 51, 950, \n 425, 673, 513, 507, 992], [917, 788, 132, 184, 895, 383, 592, 175, 810,\n 711, 802, 86, 43, 192, 598, 515, 822], [59, 393, 360, 66, 673, 904, 665,\n 258, 264, 39, 667, 780, 679, 563, 100, 30, 272], [150, 367, 289, 44, 24,\n 249, 470, 487, 212, 802, 989, 338, 650, 813, 518, 64, 465], [523, 744, \n 969, 535, 138, 123, 784, 424, 16, 638, 518, 692, 26, 253, 134, 334, 279\n ], [563, 345, 64, 97, 67, 966, 282, 163, 530, 69, 821, 159, 70, 657, \n 766, 312, 667], [102, 543, 515, 548, 410, 417, 570, 834, 78, 297, 961, \n 164, 375, 429, 318, 636, 506], [358, 824, 326, 229, 271, 557, 286, 19, \n 74, 375, 713, 292, 984, 730, 734, 281, 275], [9, 812, 979, 24, 319, 707,\n 337, 99, 454, 499, 124, 291, 400, 809, 566, 290, 151], [815, 554, 264, \n 774, 823, 520, 185, 11, 860, 938, 566, 15, 367, 729, 540, 623, 14], [13,\n 808, 108, 848, 278, 568, 551, 248, 3, 814, 211, 204, 808, 452, 564, 477,\n 744]])\n', (118272, 119797), False, 'from snail import snail\n'), ((122942, 123086), 'snail.snail', 'snail', (['[[20, 403, 806, 88, 823], [815, 182, 755, 134, 479], [267, 452, 774, 27, \n 393], [680, 645, 139, 170, 600], [345, 733, 858, 567, 786]]'], {}), '([[20, 403, 806, 88, 823], [815, 182, 755, 134, 479], [267, 452, 774, \n 27, 393], [680, 645, 139, 170, 600], [345, 733, 858, 567, 786]])\n', (122947, 123086), False, 'from snail import snail\n'), ((123395, 124606), 'snail.snail', 'snail', (['[[196, 838, 193, 215, 121, 793, 196, 949, 361, 294, 910, 341, 538, 137, 777\n ], [733, 398, 687, 983, 435, 870, 229, 107, 407, 772, 68, 915, 209, 859,\n 737], [212, 594, 822, 823, 492, 867, 788, 511, 744, 679, 68, 763, 663, \n 708, 835], [207, 592, 305, 579, 378, 864, 922, 874, 424, 364, 237, 930,\n 250, 343, 516], [817, 144, 317, 932, 246, 346, 160, 676, 51, 860, 889, \n 532, 902, 60, 300], [132, 26, 383, 247, 812, 338, 673, 679, 88, 254, \n 502, 553, 165, 334, 186], [59, 683, 976, 614, 311, 493, 17, 433, 171, \n 254, 478, 430, 6, 238, 216], [70, 590, 861, 521, 494, 163, 91, 792, 848,\n 892, 525, 313, 845, 455, 222], [471, 326, 678, 405, 72, 724, 69, 630, \n 206, 767, 730, 223, 860, 290, 477], [848, 786, 184, 788, 614, 38, 213, \n 908, 258, 752, 927, 756, 780, 835, 260], [240, 604, 469, 663, 791, 671,\n 405, 848, 731, 335, 905, 129, 239, 679, 516], [28, 935, 400, 783, 206, \n 777, 836, 627, 32, 475, 736, 206, 469, 495, 543], [271, 429, 63, 55, \n 402, 237, 622, 711, 443, 603, 307, 107, 892, 627, 360], [265, 323, 177,\n 700, 4, 43, 396, 551, 646, 392, 735, 686, 784, 445, 603], [807, 589, 84,\n 393, 478, 843, 317, 717, 678, 341, 257, 31, 498, 454, 260]]'], {}), '([[196, 838, 193, 215, 121, 793, 196, 949, 361, 294, 910, 341, 538, \n 137, 777], [733, 398, 687, 983, 435, 870, 229, 107, 407, 772, 68, 915, \n 209, 859, 737], [212, 594, 822, 823, 492, 867, 788, 511, 744, 679, 68, \n 763, 663, 708, 835], [207, 592, 305, 579, 378, 864, 922, 874, 424, 364,\n 237, 930, 250, 343, 516], [817, 144, 317, 932, 246, 346, 160, 676, 51, \n 860, 889, 532, 902, 60, 300], [132, 26, 383, 247, 812, 338, 673, 679, \n 88, 254, 502, 553, 165, 334, 186], [59, 683, 976, 614, 311, 493, 17, \n 433, 171, 254, 478, 430, 6, 238, 216], [70, 590, 861, 521, 494, 163, 91,\n 792, 848, 892, 525, 313, 845, 455, 222], [471, 326, 678, 405, 72, 724, \n 69, 630, 206, 767, 730, 223, 860, 290, 477], [848, 786, 184, 788, 614, \n 38, 213, 908, 258, 752, 927, 756, 780, 835, 260], [240, 604, 469, 663, \n 791, 671, 405, 848, 731, 335, 905, 129, 239, 679, 516], [28, 935, 400, \n 783, 206, 777, 836, 627, 32, 475, 736, 206, 469, 495, 543], [271, 429, \n 63, 55, 402, 237, 622, 711, 443, 603, 307, 107, 892, 627, 360], [265, \n 323, 177, 700, 4, 43, 396, 551, 646, 392, 735, 686, 784, 445, 603], [\n 807, 589, 84, 393, 478, 843, 317, 717, 678, 341, 257, 31, 498, 454, 260]])\n', (123400, 124606), False, 'from snail import snail\n'), ((127186, 127390), 'snail.snail', 'snail', (['[[680, 28, 574, 89, 186, 359], [110, 422, 21, 950, 715, 79], [344, 688, 686,\n 338, 239, 840], [320, 321, 492, 418, 905, 628], [684, 383, 704, 429, \n 457, 932], [977, 861, 351, 408, 652, 42]]'], {}), '([[680, 28, 574, 89, 186, 359], [110, 422, 21, 950, 715, 79], [344, \n 688, 686, 338, 239, 840], [320, 321, 492, 418, 905, 628], [684, 383, \n 704, 429, 457, 932], [977, 861, 351, 408, 652, 42]])\n', (127191, 127390), False, 'from snail import snail\n'), ((127790, 129512), 'snail.snail', 'snail', (['[[58, 407, 6, 598, 246, 664, 722, 382, 779, 444, 939, 572, 998, 857, 973, \n 783, 332, 192], [664, 600, 824, 153, 433, 187, 978, 637, 740, 427, 135,\n 816, 393, 522, 351, 940, 896, 65], [126, 399, 993, 472, 941, 3, 717, \n 884, 803, 688, 203, 219, 414, 589, 972, 999, 730, 672], [43, 467, 608, \n 228, 380, 252, 318, 177, 251, 657, 281, 509, 714, 14, 49, 909, 934, 672\n ], [718, 635, 676, 235, 349, 435, 914, 136, 476, 562, 653, 497, 338, 58,\n 63, 716, 187, 48], [530, 480, 937, 218, 277, 678, 434, 266, 334, 95, \n 270, 449, 631, 192, 309, 389, 564, 924], [279, 697, 22, 866, 170, 218, \n 584, 387, 992, 727, 188, 755, 564, 367, 27, 250, 250, 999], [561, 200, \n 392, 765, 31, 517, 431, 463, 28, 376, 49, 428, 336, 994, 311, 814, 27, \n 288], [928, 846, 706, 704, 835, 565, 752, 294, 404, 519, 269, 311, 38, \n 914, 216, 74, 364, 83], [415, 30, 240, 897, 143, 567, 250, 27, 872, 101,\n 345, 1000, 12, 47, 485, 188, 675, 861], [837, 586, 441, 706, 658, 312, \n 12, 823, 414, 485, 975, 621, 788, 912, 923, 260, 611, 863], [299, 973, \n 177, 461, 147, 265, 732, 9, 521, 211, 73, 300, 919, 316, 839, 956, 164,\n 950], [289, 604, 206, 623, 94, 84, 544, 200, 955, 230, 186, 194, 852, \n 47, 586, 687, 559, 809], [139, 266, 610, 674, 20, 856, 866, 721, 224, \n 61, 754, 599, 97, 827, 934, 724, 207, 281], [59, 42, 40, 155, 346, 392,\n 602, 768, 428, 104, 285, 74, 913, 885, 258, 79, 366, 114], [205, 16, \n 543, 155, 384, 415, 64, 375, 841, 387, 922, 909, 489, 846, 666, 378, \n 933, 908], [389, 178, 394, 265, 728, 108, 599, 398, 569, 480, 159, 635,\n 255, 421, 260, 230, 855, 267], [767, 767, 591, 319, 141, 136, 915, 262,\n 723, 932, 887, 891, 417, 101, 415, 178, 369, 179]]'], {}), '([[58, 407, 6, 598, 246, 664, 722, 382, 779, 444, 939, 572, 998, 857, \n 973, 783, 332, 192], [664, 600, 824, 153, 433, 187, 978, 637, 740, 427,\n 135, 816, 393, 522, 351, 940, 896, 65], [126, 399, 993, 472, 941, 3, \n 717, 884, 803, 688, 203, 219, 414, 589, 972, 999, 730, 672], [43, 467, \n 608, 228, 380, 252, 318, 177, 251, 657, 281, 509, 714, 14, 49, 909, 934,\n 672], [718, 635, 676, 235, 349, 435, 914, 136, 476, 562, 653, 497, 338,\n 58, 63, 716, 187, 48], [530, 480, 937, 218, 277, 678, 434, 266, 334, 95,\n 270, 449, 631, 192, 309, 389, 564, 924], [279, 697, 22, 866, 170, 218, \n 584, 387, 992, 727, 188, 755, 564, 367, 27, 250, 250, 999], [561, 200, \n 392, 765, 31, 517, 431, 463, 28, 376, 49, 428, 336, 994, 311, 814, 27, \n 288], [928, 846, 706, 704, 835, 565, 752, 294, 404, 519, 269, 311, 38, \n 914, 216, 74, 364, 83], [415, 30, 240, 897, 143, 567, 250, 27, 872, 101,\n 345, 1000, 12, 47, 485, 188, 675, 861], [837, 586, 441, 706, 658, 312, \n 12, 823, 414, 485, 975, 621, 788, 912, 923, 260, 611, 863], [299, 973, \n 177, 461, 147, 265, 732, 9, 521, 211, 73, 300, 919, 316, 839, 956, 164,\n 950], [289, 604, 206, 623, 94, 84, 544, 200, 955, 230, 186, 194, 852, \n 47, 586, 687, 559, 809], [139, 266, 610, 674, 20, 856, 866, 721, 224, \n 61, 754, 599, 97, 827, 934, 724, 207, 281], [59, 42, 40, 155, 346, 392,\n 602, 768, 428, 104, 285, 74, 913, 885, 258, 79, 366, 114], [205, 16, \n 543, 155, 384, 415, 64, 375, 841, 387, 922, 909, 489, 846, 666, 378, \n 933, 908], [389, 178, 394, 265, 728, 108, 599, 398, 569, 480, 159, 635,\n 255, 421, 260, 230, 855, 267], [767, 767, 591, 319, 141, 136, 915, 262,\n 723, 932, 887, 891, 417, 101, 415, 178, 369, 179]])\n', (127795, 129512), False, 'from snail import snail\n'), ((132970, 134036), 'snail.snail', 'snail', (['[[990, 568, 232, 648, 150, 961, 543, 323, 970, 480, 247, 655, 234, 766], [\n 445, 283, 695, 616, 307, 693, 516, 267, 772, 614, 375, 354, 874, 139],\n [56, 886, 202, 95, 850, 689, 279, 633, 473, 300, 210, 950, 264, 392], [\n 656, 90, 399, 263, 200, 764, 793, 125, 644, 341, 1, 41, 315, 577], [703,\n 868, 597, 797, 445, 159, 899, 961, 421, 400, 865, 37, 475, 501], [554, \n 14, 369, 351, 506, 615, 921, 242, 972, 625, 402, 906, 693, 251], [727, \n 518, 523, 314, 40, 458, 338, 814, 508, 135, 515, 151, 288, 433], [456, \n 696, 183, 605, 963, 882, 243, 721, 924, 276, 244, 341, 592, 746], [275,\n 799, 613, 400, 259, 241, 12, 991, 844, 51, 532, 893, 933, 357], [649, \n 500, 240, 430, 276, 488, 583, 197, 11, 646, 285, 552, 812, 520], [654, \n 829, 189, 560, 146, 26, 397, 206, 605, 64, 132, 791, 264, 469], [823, \n 419, 690, 389, 997, 854, 416, 97, 267, 499, 383, 250, 856, 510], [863, \n 725, 195, 653, 568, 668, 761, 598, 379, 810, 674, 535, 350, 215], [880,\n 492, 584, 822, 260, 81, 726, 737, 166, 379, 347, 66, 990, 381]]'], {}), '([[990, 568, 232, 648, 150, 961, 543, 323, 970, 480, 247, 655, 234, \n 766], [445, 283, 695, 616, 307, 693, 516, 267, 772, 614, 375, 354, 874,\n 139], [56, 886, 202, 95, 850, 689, 279, 633, 473, 300, 210, 950, 264, \n 392], [656, 90, 399, 263, 200, 764, 793, 125, 644, 341, 1, 41, 315, 577\n ], [703, 868, 597, 797, 445, 159, 899, 961, 421, 400, 865, 37, 475, 501\n ], [554, 14, 369, 351, 506, 615, 921, 242, 972, 625, 402, 906, 693, 251\n ], [727, 518, 523, 314, 40, 458, 338, 814, 508, 135, 515, 151, 288, 433\n ], [456, 696, 183, 605, 963, 882, 243, 721, 924, 276, 244, 341, 592, \n 746], [275, 799, 613, 400, 259, 241, 12, 991, 844, 51, 532, 893, 933, \n 357], [649, 500, 240, 430, 276, 488, 583, 197, 11, 646, 285, 552, 812, \n 520], [654, 829, 189, 560, 146, 26, 397, 206, 605, 64, 132, 791, 264, \n 469], [823, 419, 690, 389, 997, 854, 416, 97, 267, 499, 383, 250, 856, \n 510], [863, 725, 195, 653, 568, 668, 761, 598, 379, 810, 674, 535, 350,\n 215], [880, 492, 584, 822, 260, 81, 726, 737, 166, 379, 347, 66, 990, 381]]\n )\n', (132975, 134036), False, 'from snail import snail\n'), ((136340, 136696), 'snail.snail', 'snail', (['[[40, 406, 36, 505, 634, 102, 702, 130], [441, 809, 470, 914, 796, 852, 306,\n 978], [919, 501, 158, 558, 536, 141, 229, 678], [841, 688, 115, 374, \n 638, 735, 687, 358], [432, 204, 983, 343, 5, 717, 999, 912], [380, 253,\n 737, 263, 790, 515, 817, 270], [298, 335, 347, 644, 356, 931, 594, 954],\n [977, 832, 618, 875, 547, 995, 47, 183]]'], {}), '([[40, 406, 36, 505, 634, 102, 702, 130], [441, 809, 470, 914, 796, \n 852, 306, 978], [919, 501, 158, 558, 536, 141, 229, 678], [841, 688, \n 115, 374, 638, 735, 687, 358], [432, 204, 983, 343, 5, 717, 999, 912],\n [380, 253, 737, 263, 790, 515, 817, 270], [298, 335, 347, 644, 356, 931,\n 594, 954], [977, 832, 618, 875, 547, 995, 47, 183]])\n', (136345, 136696), False, 'from snail import snail\n'), ((137449, 137506), 'snail.snail', 'snail', (['[[935, 756, 641], [827, 444, 751], [166, 61, 775]]'], {}), '([[935, 756, 641], [827, 444, 751], [166, 61, 775]])\n', (137454, 137506), False, 'from snail import snail\n'), ((137626, 137656), 'snail.snail', 'snail', (['[[21, 182], [507, 380]]'], {}), '([[21, 182], [507, 380]])\n', (137631, 137656), False, 'from snail import snail\n'), ((137739, 137882), 'snail.snail', 'snail', (['[[535, 230, 195, 719, 377], [95, 348, 60, 911, 645], [654, 459, 570, 244, \n 205], [728, 622, 509, 484, 25], [253, 883, 275, 80, 276]]'], {}), '([[535, 230, 195, 719, 377], [95, 348, 60, 911, 645], [654, 459, 570, \n 244, 205], [728, 622, 509, 484, 25], [253, 883, 275, 80, 276]])\n', (137744, 137882), False, 'from snail import snail\n'), ((138190, 139396), 'snail.snail', 'snail', (['[[785, 961, 393, 614, 388, 455, 610, 908, 516, 364, 872, 655, 842, 764, 246\n ], [747, 313, 906, 923, 915, 201, 951, 533, 862, 575, 735, 729, 14, 730,\n 25], [170, 790, 377, 815, 635, 93, 45, 31, 555, 762, 119, 935, 885, 180,\n 891], [806, 414, 178, 167, 636, 597, 562, 768, 302, 74, 481, 549, 962, \n 118, 40], [742, 767, 826, 738, 501, 914, 693, 644, 409, 81, 185, 361, \n 197, 649, 124], [232, 878, 30, 498, 260, 724, 650, 544, 388, 384, 2, \n 159, 714, 198, 532], [375, 157, 579, 641, 683, 263, 576, 500, 177, 402,\n 659, 489, 438, 839, 314], [834, 355, 434, 66, 333, 91, 207, 2, 651, 340,\n 505, 175, 443, 446, 740], [688, 517, 275, 811, 515, 461, 833, 811, 849,\n 406, 79, 631, 431, 108, 122], [252, 712, 973, 15, 536, 36, 55, 901, 503,\n 701, 520, 690, 918, 759, 217], [350, 784, 946, 63, 801, 911, 233, 411, \n 116, 355, 973, 352, 784, 50, 352], [242, 23, 728, 771, 881, 975, 565, \n 498, 405, 694, 441, 621, 741, 586, 48], [551, 928, 529, 150, 280, 388, \n 741, 717, 699, 211, 677, 512, 900, 416, 689], [404, 225, 869, 308, 827,\n 93, 424, 276, 775, 180, 569, 51, 710, 847, 792], [771, 527, 737, 805, \n 705, 766, 900, 757, 994, 640, 335, 733, 588, 921, 265]]'], {}), '([[785, 961, 393, 614, 388, 455, 610, 908, 516, 364, 872, 655, 842, \n 764, 246], [747, 313, 906, 923, 915, 201, 951, 533, 862, 575, 735, 729,\n 14, 730, 25], [170, 790, 377, 815, 635, 93, 45, 31, 555, 762, 119, 935,\n 885, 180, 891], [806, 414, 178, 167, 636, 597, 562, 768, 302, 74, 481, \n 549, 962, 118, 40], [742, 767, 826, 738, 501, 914, 693, 644, 409, 81, \n 185, 361, 197, 649, 124], [232, 878, 30, 498, 260, 724, 650, 544, 388, \n 384, 2, 159, 714, 198, 532], [375, 157, 579, 641, 683, 263, 576, 500, \n 177, 402, 659, 489, 438, 839, 314], [834, 355, 434, 66, 333, 91, 207, 2,\n 651, 340, 505, 175, 443, 446, 740], [688, 517, 275, 811, 515, 461, 833,\n 811, 849, 406, 79, 631, 431, 108, 122], [252, 712, 973, 15, 536, 36, 55,\n 901, 503, 701, 520, 690, 918, 759, 217], [350, 784, 946, 63, 801, 911, \n 233, 411, 116, 355, 973, 352, 784, 50, 352], [242, 23, 728, 771, 881, \n 975, 565, 498, 405, 694, 441, 621, 741, 586, 48], [551, 928, 529, 150, \n 280, 388, 741, 717, 699, 211, 677, 512, 900, 416, 689], [404, 225, 869,\n 308, 827, 93, 424, 276, 775, 180, 569, 51, 710, 847, 792], [771, 527, \n 737, 805, 705, 766, 900, 757, 994, 640, 335, 733, 588, 921, 265]])\n', (138195, 139396), False, 'from snail import snail\n'), ((141979, 142078), 'snail.snail', 'snail', (['[[353, 85, 930, 216], [626, 576, 495, 991], [581, 192, 891, 709], [350, 925,\n 349, 502]]'], {}), '([[353, 85, 930, 216], [626, 576, 495, 991], [581, 192, 891, 709], [\n 350, 925, 349, 502]])\n', (141984, 142078), False, 'from snail import snail\n'), ((142299, 142313), 'snail.snail', 'snail', (['[[540]]'], {}), '([[540]])\n', (142304, 142313), False, 'from snail import snail\n'), ((142382, 143167), 'snail.snail', 'snail', (['[[323, 110, 157, 740, 114, 704, 774, 106, 268, 508, 566, 474], [399, 944, \n 938, 434, 715, 475, 929, 705, 940, 246, 787, 528], [807, 311, 393, 557,\n 372, 756, 260, 12, 811, 4, 368, 282], [618, 918, 279, 23, 755, 16, 141,\n 214, 837, 333, 916, 937], [353, 304, 904, 659, 345, 217, 882, 563, 845,\n 34, 318, 763], [420, 645, 620, 910, 271, 243, 705, 909, 841, 907, 954, \n 745], [114, 445, 310, 574, 25, 779, 262, 381, 319, 231, 460, 811], [768,\n 163, 698, 307, 647, 712, 617, 700, 549, 215, 645, 839], [779, 475, 357,\n 508, 819, 672, 250, 228, 602, 747, 734, 598], [217, 834, 271, 442, 745,\n 526, 141, 571, 331, 715, 937, 24], [159, 231, 655, 435, 450, 532, 913, \n 91, 527, 105, 40, 294], [654, 903, 196, 676, 451, 502, 602, 539, 429, \n 795, 646, 117]]'], {}), '([[323, 110, 157, 740, 114, 704, 774, 106, 268, 508, 566, 474], [399, \n 944, 938, 434, 715, 475, 929, 705, 940, 246, 787, 528], [807, 311, 393,\n 557, 372, 756, 260, 12, 811, 4, 368, 282], [618, 918, 279, 23, 755, 16,\n 141, 214, 837, 333, 916, 937], [353, 304, 904, 659, 345, 217, 882, 563,\n 845, 34, 318, 763], [420, 645, 620, 910, 271, 243, 705, 909, 841, 907, \n 954, 745], [114, 445, 310, 574, 25, 779, 262, 381, 319, 231, 460, 811],\n [768, 163, 698, 307, 647, 712, 617, 700, 549, 215, 645, 839], [779, 475,\n 357, 508, 819, 672, 250, 228, 602, 747, 734, 598], [217, 834, 271, 442,\n 745, 526, 141, 571, 331, 715, 937, 24], [159, 231, 655, 435, 450, 532, \n 913, 91, 527, 105, 40, 294], [654, 903, 196, 676, 451, 502, 602, 539, \n 429, 795, 646, 117]])\n', (142387, 143167), False, 'from snail import snail\n'), ((144464, 146604), 'snail.snail', 'snail', (['[[986, 240, 922, 622, 119, 802, 582, 105, 664, 791, 735, 699, 470, 252, 698,\n 185, 108, 345, 492, 923], [240, 476, 677, 30, 653, 350, 500, 837, 871, \n 723, 277, 232, 913, 969, 363, 209, 806, 50, 395, 85], [685, 728, 491, \n 175, 714, 445, 721, 940, 935, 357, 321, 462, 884, 97, 210, 183, 804, \n 892, 424, 518], [43, 751, 907, 556, 279, 812, 613, 69, 915, 20, 19, 446,\n 737, 739, 400, 713, 203, 94, 294, 335], [48, 183, 597, 479, 293, 803, \n 657, 501, 358, 165, 14, 999, 153, 35, 638, 561, 25, 565, 891, 543], [\n 918, 781, 555, 285, 954, 969, 636, 883, 200, 883, 426, 521, 528, 495, \n 964, 773, 799, 545, 116, 512], [279, 668, 405, 945, 213, 573, 712, 99, \n 713, 688, 492, 589, 177, 718, 651, 252, 843, 376, 657, 428], [332, 282,\n 54, 321, 724, 679, 50, 698, 727, 252, 661, 306, 790, 269, 958, 673, 742,\n 806, 310, 568], [785, 236, 107, 886, 498, 650, 569, 967, 185, 57, 448, \n 25, 101, 787, 194, 464, 508, 925, 944, 531], [141, 283, 763, 387, 423, \n 348, 93, 286, 448, 71, 745, 231, 949, 228, 838, 717, 673, 24, 42, 634],\n [861, 730, 300, 615, 603, 945, 225, 319, 418, 919, 514, 27, 884, 628, \n 229, 87, 193, 140, 692, 508], [568, 394, 305, 601, 237, 948, 275, 480, \n 33, 277, 821, 38, 313, 236, 216, 27, 650, 972, 284, 554], [613, 892, \n 806, 441, 975, 777, 615, 741, 534, 43, 203, 991, 405, 302, 447, 313, \n 800, 345, 54, 670], [689, 699, 296, 498, 793, 199, 282, 489, 224, 839, \n 870, 409, 686, 935, 196, 2, 755, 257, 246, 712], [328, 236, 205, 980, \n 365, 90, 865, 401, 528, 368, 802, 971, 48, 218, 30, 655, 308, 690, 285,\n 387], [215, 929, 894, 328, 40, 718, 33, 112, 729, 609, 598, 956, 838, \n 252, 727, 798, 486, 797, 65, 758], [162, 746, 960, 376, 695, 473, 664, \n 960, 948, 375, 354, 980, 614, 540, 300, 538, 822, 816, 117, 371], [343,\n 801, 497, 285, 121, 244, 913, 709, 271, 252, 301, 557, 115, 678, 161, \n 389, 169, 38, 765, 240], [815, 108, 350, 304, 736, 991, 769, 383, 399, \n 621, 397, 798, 382, 738, 344, 280, 479, 255, 398, 280], [411, 702, 791,\n 603, 849, 743, 594, 468, 396, 752, 297, 515, 426, 426, 806, 385, 878, \n 815, 840, 50]]'], {}), '([[986, 240, 922, 622, 119, 802, 582, 105, 664, 791, 735, 699, 470, \n 252, 698, 185, 108, 345, 492, 923], [240, 476, 677, 30, 653, 350, 500, \n 837, 871, 723, 277, 232, 913, 969, 363, 209, 806, 50, 395, 85], [685, \n 728, 491, 175, 714, 445, 721, 940, 935, 357, 321, 462, 884, 97, 210, \n 183, 804, 892, 424, 518], [43, 751, 907, 556, 279, 812, 613, 69, 915, \n 20, 19, 446, 737, 739, 400, 713, 203, 94, 294, 335], [48, 183, 597, 479,\n 293, 803, 657, 501, 358, 165, 14, 999, 153, 35, 638, 561, 25, 565, 891,\n 543], [918, 781, 555, 285, 954, 969, 636, 883, 200, 883, 426, 521, 528,\n 495, 964, 773, 799, 545, 116, 512], [279, 668, 405, 945, 213, 573, 712,\n 99, 713, 688, 492, 589, 177, 718, 651, 252, 843, 376, 657, 428], [332, \n 282, 54, 321, 724, 679, 50, 698, 727, 252, 661, 306, 790, 269, 958, 673,\n 742, 806, 310, 568], [785, 236, 107, 886, 498, 650, 569, 967, 185, 57, \n 448, 25, 101, 787, 194, 464, 508, 925, 944, 531], [141, 283, 763, 387, \n 423, 348, 93, 286, 448, 71, 745, 231, 949, 228, 838, 717, 673, 24, 42, \n 634], [861, 730, 300, 615, 603, 945, 225, 319, 418, 919, 514, 27, 884, \n 628, 229, 87, 193, 140, 692, 508], [568, 394, 305, 601, 237, 948, 275, \n 480, 33, 277, 821, 38, 313, 236, 216, 27, 650, 972, 284, 554], [613, \n 892, 806, 441, 975, 777, 615, 741, 534, 43, 203, 991, 405, 302, 447, \n 313, 800, 345, 54, 670], [689, 699, 296, 498, 793, 199, 282, 489, 224, \n 839, 870, 409, 686, 935, 196, 2, 755, 257, 246, 712], [328, 236, 205, \n 980, 365, 90, 865, 401, 528, 368, 802, 971, 48, 218, 30, 655, 308, 690,\n 285, 387], [215, 929, 894, 328, 40, 718, 33, 112, 729, 609, 598, 956, \n 838, 252, 727, 798, 486, 797, 65, 758], [162, 746, 960, 376, 695, 473, \n 664, 960, 948, 375, 354, 980, 614, 540, 300, 538, 822, 816, 117, 371],\n [343, 801, 497, 285, 121, 244, 913, 709, 271, 252, 301, 557, 115, 678, \n 161, 389, 169, 38, 765, 240], [815, 108, 350, 304, 736, 991, 769, 383, \n 399, 621, 397, 798, 382, 738, 344, 280, 479, 255, 398, 280], [411, 702,\n 791, 603, 849, 743, 594, 468, 396, 752, 297, 515, 426, 426, 806, 385, \n 878, 815, 840, 50]])\n', (144469, 146604), False, 'from snail import snail\n'), ((151386, 152438), 'snail.snail', 'snail', (['[[779, 390, 935, 443, 441, 932, 526, 627, 761, 633, 708, 770, 21, 872], [\n 754, 424, 961, 78, 264, 512, 496, 963, 781, 96, 127, 102, 443, 432], [\n 462, 403, 123, 808, 836, 958, 574, 126, 686, 524, 508, 557, 61, 901], [\n 6, 257, 831, 713, 790, 660, 2, 775, 268, 337, 75, 804, 357, 961], [604,\n 802, 2, 87, 101, 475, 192, 722, 345, 173, 926, 171, 170, 293], [12, 776,\n 242, 639, 641, 929, 898, 119, 5, 501, 358, 518, 440, 395], [635, 821, \n 94, 345, 146, 460, 246, 555, 618, 331, 959, 907, 717, 521], [669, 178, \n 275, 457, 549, 963, 216, 69, 228, 722, 444, 914, 58, 643], [826, 947, \n 674, 252, 707, 10, 968, 492, 418, 191, 393, 595, 278, 540], [797, 490, \n 818, 461, 131, 884, 421, 935, 299, 970, 715, 75, 516, 507], [546, 784, \n 474, 248, 573, 366, 638, 696, 927, 892, 508, 311, 606, 632], [846, 536,\n 776, 553, 586, 170, 327, 24, 828, 282, 927, 787, 202, 550], [739, 628, \n 146, 910, 843, 244, 23, 430, 521, 810, 923, 467, 875, 938], [222, 359, \n 852, 608, 514, 865, 674, 391, 344, 161, 69, 418, 188, 375]]'], {}), '([[779, 390, 935, 443, 441, 932, 526, 627, 761, 633, 708, 770, 21, 872\n ], [754, 424, 961, 78, 264, 512, 496, 963, 781, 96, 127, 102, 443, 432],\n [462, 403, 123, 808, 836, 958, 574, 126, 686, 524, 508, 557, 61, 901],\n [6, 257, 831, 713, 790, 660, 2, 775, 268, 337, 75, 804, 357, 961], [604,\n 802, 2, 87, 101, 475, 192, 722, 345, 173, 926, 171, 170, 293], [12, 776,\n 242, 639, 641, 929, 898, 119, 5, 501, 358, 518, 440, 395], [635, 821, \n 94, 345, 146, 460, 246, 555, 618, 331, 959, 907, 717, 521], [669, 178, \n 275, 457, 549, 963, 216, 69, 228, 722, 444, 914, 58, 643], [826, 947, \n 674, 252, 707, 10, 968, 492, 418, 191, 393, 595, 278, 540], [797, 490, \n 818, 461, 131, 884, 421, 935, 299, 970, 715, 75, 516, 507], [546, 784, \n 474, 248, 573, 366, 638, 696, 927, 892, 508, 311, 606, 632], [846, 536,\n 776, 553, 586, 170, 327, 24, 828, 282, 927, 787, 202, 550], [739, 628, \n 146, 910, 843, 244, 23, 430, 521, 810, 923, 467, 875, 938], [222, 359, \n 852, 608, 514, 865, 674, 391, 344, 161, 69, 418, 188, 375]])\n', (151391, 152438), False, 'from snail import snail\n'), ((154744, 154889), 'snail.snail', 'snail', (['[[771, 906, 164, 502, 151], [560, 297, 260, 485, 632], [3, 884, 664, 507, \n 325], [639, 813, 354, 560, 226], [274, 555, 978, 288, 756]]'], {}), '([[771, 906, 164, 502, 151], [560, 297, 260, 485, 632], [3, 884, 664, \n 507, 325], [639, 813, 354, 560, 226], [274, 555, 978, 288, 756]])\n', (154749, 154889), False, 'from snail import snail\n'), ((155199, 155405), 'snail.snail', 'snail', (['[[254, 173, 160, 399, 691, 434], [849, 456, 758, 273, 917, 347], [653, 544,\n 515, 483, 827, 638], [145, 862, 862, 170, 518, 727], [702, 527, 461, \n 204, 727, 749], [478, 342, 652, 960, 6, 699]]'], {}), '([[254, 173, 160, 399, 691, 434], [849, 456, 758, 273, 917, 347], [653,\n 544, 515, 483, 827, 638], [145, 862, 862, 170, 518, 727], [702, 527, \n 461, 204, 727, 749], [478, 342, 652, 960, 6, 699]])\n', (155204, 155405), False, 'from snail import snail\n'), ((155792, 155998), 'snail.snail', 'snail', (['[[226, 704, 457, 816, 131, 280], [360, 476, 612, 26, 934, 390], [456, 641, \n 669, 251, 211, 954], [152, 516, 380, 865, 617, 824], [887, 422, 509, \n 185, 322, 688], [593, 21, 364, 475, 965, 533]]'], {}), '([[226, 704, 457, 816, 131, 280], [360, 476, 612, 26, 934, 390], [456,\n 641, 669, 251, 211, 954], [152, 516, 380, 865, 617, 824], [887, 422, \n 509, 185, 322, 688], [593, 21, 364, 475, 965, 533]])\n', (155797, 155998), False, 'from snail import snail\n'), ((156402, 156433), 'snail.snail', 'snail', (['[[721, 438], [320, 489]]'], {}), '([[721, 438], [320, 489]])\n', (156407, 156433), False, 'from snail import snail\n'), ((156517, 156722), 'snail.snail', 'snail', (['[[27, 894, 555, 256, 430, 208], [554, 476, 381, 291, 303, 274], [129, 801, \n 899, 605, 365, 252], [764, 675, 459, 554, 426, 85], [816, 231, 149, 674,\n 303, 499], [305, 116, 340, 865, 168, 954]]'], {}), '([[27, 894, 555, 256, 430, 208], [554, 476, 381, 291, 303, 274], [129,\n 801, 899, 605, 365, 252], [764, 675, 459, 554, 426, 85], [816, 231, 149,\n 674, 303, 499], [305, 116, 340, 865, 168, 954]])\n', (156522, 156722), False, 'from snail import snail\n'), ((157127, 159262), 'snail.snail', 'snail', (['[[572, 40, 328, 370, 500, 359, 678, 378, 538, 858, 934, 597, 558, 719, 33, \n 895, 744, 664, 144, 942], [866, 855, 310, 833, 63, 797, 898, 803, 651, \n 882, 732, 735, 675, 3, 262, 223, 173, 342, 85, 611], [899, 287, 100, \n 560, 479, 542, 148, 688, 447, 575, 738, 640, 312, 25, 231, 757, 683, \n 260, 858, 346], [1000, 625, 581, 457, 792, 537, 711, 735, 189, 665, 68,\n 774, 132, 208, 510, 10, 797, 727, 525, 799], [465, 650, 818, 258, 110, \n 531, 816, 811, 259, 429, 56, 497, 701, 350, 938, 112, 318, 260, 88, 597\n ], [505, 112, 776, 421, 332, 521, 824, 55, 871, 114, 715, 725, 882, 579,\n 481, 425, 59, 382, 959, 807], [258, 37, 320, 581, 567, 950, 77, 948, \n 540, 28, 560, 911, 307, 508, 163, 679, 687, 37, 246, 838], [782, 59, \n 179, 588, 996, 675, 37, 425, 607, 688, 629, 34, 975, 885, 188, 852, 343,\n 841, 952, 103], [432, 454, 425, 198, 425, 305, 909, 997, 263, 813, 666,\n 922, 619, 942, 262, 386, 730, 197, 664, 643], [536, 668, 164, 476, 477,\n 667, 875, 990, 655, 985, 824, 684, 263, 111, 82, 828, 657, 131, 819, \n 210], [943, 974, 501, 727, 825, 510, 913, 133, 947, 301, 117, 283, 952,\n 643, 787, 24, 345, 104, 323, 525], [461, 589, 200, 794, 521, 39, 167, \n 52, 836, 477, 437, 507, 264, 717, 663, 347, 623, 669, 262, 34], [370, \n 556, 920, 122, 82, 952, 628, 124, 245, 87, 213, 238, 792, 388, 47, 531,\n 918, 634, 368, 312], [635, 472, 7, 883, 622, 910, 757, 959, 318, 933, \n 887, 877, 242, 418, 571, 610, 671, 745, 303, 14], [128, 96, 532, 485, \n 66, 665, 373, 829, 848, 850, 124, 732, 618, 724, 34, 686, 851, 832, 407,\n 75], [38, 836, 222, 635, 388, 936, 793, 187, 803, 227, 561, 481, 635, 9,\n 437, 922, 86, 272, 439, 452], [303, 667, 784, 818, 908, 142, 768, 342, \n 350, 959, 210, 494, 592, 918, 494, 108, 795, 617, 169, 142], [344, 618,\n 79, 320, 667, 726, 960, 900, 525, 776, 549, 292, 938, 390, 975, 423, \n 555, 963, 965, 440], [220, 497, 705, 449, 161, 225, 73, 164, 796, 438, \n 978, 623, 304, 917, 584, 118, 700, 222, 476, 825], [896, 392, 14, 489, \n 226, 742, 932, 303, 767, 487, 859, 637, 327, 399, 804, 304, 922, 119, \n 687, 755]]'], {}), '([[572, 40, 328, 370, 500, 359, 678, 378, 538, 858, 934, 597, 558, 719,\n 33, 895, 744, 664, 144, 942], [866, 855, 310, 833, 63, 797, 898, 803, \n 651, 882, 732, 735, 675, 3, 262, 223, 173, 342, 85, 611], [899, 287, \n 100, 560, 479, 542, 148, 688, 447, 575, 738, 640, 312, 25, 231, 757, \n 683, 260, 858, 346], [1000, 625, 581, 457, 792, 537, 711, 735, 189, 665,\n 68, 774, 132, 208, 510, 10, 797, 727, 525, 799], [465, 650, 818, 258, \n 110, 531, 816, 811, 259, 429, 56, 497, 701, 350, 938, 112, 318, 260, 88,\n 597], [505, 112, 776, 421, 332, 521, 824, 55, 871, 114, 715, 725, 882, \n 579, 481, 425, 59, 382, 959, 807], [258, 37, 320, 581, 567, 950, 77, \n 948, 540, 28, 560, 911, 307, 508, 163, 679, 687, 37, 246, 838], [782, \n 59, 179, 588, 996, 675, 37, 425, 607, 688, 629, 34, 975, 885, 188, 852,\n 343, 841, 952, 103], [432, 454, 425, 198, 425, 305, 909, 997, 263, 813,\n 666, 922, 619, 942, 262, 386, 730, 197, 664, 643], [536, 668, 164, 476,\n 477, 667, 875, 990, 655, 985, 824, 684, 263, 111, 82, 828, 657, 131, \n 819, 210], [943, 974, 501, 727, 825, 510, 913, 133, 947, 301, 117, 283,\n 952, 643, 787, 24, 345, 104, 323, 525], [461, 589, 200, 794, 521, 39, \n 167, 52, 836, 477, 437, 507, 264, 717, 663, 347, 623, 669, 262, 34], [\n 370, 556, 920, 122, 82, 952, 628, 124, 245, 87, 213, 238, 792, 388, 47,\n 531, 918, 634, 368, 312], [635, 472, 7, 883, 622, 910, 757, 959, 318, \n 933, 887, 877, 242, 418, 571, 610, 671, 745, 303, 14], [128, 96, 532, \n 485, 66, 665, 373, 829, 848, 850, 124, 732, 618, 724, 34, 686, 851, 832,\n 407, 75], [38, 836, 222, 635, 388, 936, 793, 187, 803, 227, 561, 481, \n 635, 9, 437, 922, 86, 272, 439, 452], [303, 667, 784, 818, 908, 142, \n 768, 342, 350, 959, 210, 494, 592, 918, 494, 108, 795, 617, 169, 142],\n [344, 618, 79, 320, 667, 726, 960, 900, 525, 776, 549, 292, 938, 390, \n 975, 423, 555, 963, 965, 440], [220, 497, 705, 449, 161, 225, 73, 164, \n 796, 438, 978, 623, 304, 917, 584, 118, 700, 222, 476, 825], [896, 392,\n 14, 489, 226, 742, 932, 303, 767, 487, 859, 637, 327, 399, 804, 304, \n 922, 119, 687, 755]])\n', (157132, 159262), False, 'from snail import snail\n'), ((164045, 164140), 'snail.snail', 'snail', (['[[785, 373, 215, 440], [948, 869, 882, 65], [236, 227, 508, 450], [46, 69, \n 45, 237]]'], {}), '([[785, 373, 215, 440], [948, 869, 882, 65], [236, 227, 508, 450], [46,\n 69, 45, 237]])\n', (164050, 164140), False, 'from snail import snail\n'), ((164359, 165577), 'snail.snail', 'snail', (['[[319, 115, 440, 26, 579, 418, 402, 165, 517, 784, 878, 694, 93, 128, 44],\n [852, 607, 878, 871, 517, 532, 992, 374, 11, 98, 518, 711, 147, 227, \n 506], [201, 469, 258, 872, 604, 990, 830, 450, 143, 19, 552, 694, 210, \n 758, 103], [716, 320, 227, 464, 249, 476, 868, 589, 739, 445, 2, 718, \n 961, 95, 220], [928, 536, 957, 213, 258, 403, 998, 925, 940, 860, 860, \n 119, 145, 74, 928], [516, 421, 697, 192, 26, 251, 294, 643, 476, 959, \n 442, 826, 31, 582, 629], [542, 446, 841, 808, 696, 30, 179, 795, 269, \n 917, 643, 306, 284, 20, 840], [513, 218, 830, 912, 862, 388, 741, 525, \n 630, 405, 631, 383, 531, 318, 426], [434, 565, 697, 621, 308, 675, 252,\n 683, 842, 26, 133, 402, 692, 674, 531], [351, 597, 455, 57, 498, 523, \n 349, 688, 114, 881, 103, 692, 829, 40, 375], [630, 400, 244, 600, 467, \n 618, 505, 435, 821, 670, 896, 248, 743, 83, 784], [349, 703, 796, 713, \n 477, 203, 15, 468, 921, 837, 517, 134, 641, 899, 504], [690, 699, 610, \n 990, 139, 296, 914, 196, 333, 876, 29, 979, 869, 355, 472], [187, 787, \n 932, 687, 662, 625, 759, 371, 438, 893, 838, 876, 442, 442, 697], [454,\n 871, 70, 541, 598, 597, 402, 472, 327, 160, 913, 735, 518, 770, 635]]'], {}), '([[319, 115, 440, 26, 579, 418, 402, 165, 517, 784, 878, 694, 93, 128,\n 44], [852, 607, 878, 871, 517, 532, 992, 374, 11, 98, 518, 711, 147, \n 227, 506], [201, 469, 258, 872, 604, 990, 830, 450, 143, 19, 552, 694, \n 210, 758, 103], [716, 320, 227, 464, 249, 476, 868, 589, 739, 445, 2, \n 718, 961, 95, 220], [928, 536, 957, 213, 258, 403, 998, 925, 940, 860, \n 860, 119, 145, 74, 928], [516, 421, 697, 192, 26, 251, 294, 643, 476, \n 959, 442, 826, 31, 582, 629], [542, 446, 841, 808, 696, 30, 179, 795, \n 269, 917, 643, 306, 284, 20, 840], [513, 218, 830, 912, 862, 388, 741, \n 525, 630, 405, 631, 383, 531, 318, 426], [434, 565, 697, 621, 308, 675,\n 252, 683, 842, 26, 133, 402, 692, 674, 531], [351, 597, 455, 57, 498, \n 523, 349, 688, 114, 881, 103, 692, 829, 40, 375], [630, 400, 244, 600, \n 467, 618, 505, 435, 821, 670, 896, 248, 743, 83, 784], [349, 703, 796, \n 713, 477, 203, 15, 468, 921, 837, 517, 134, 641, 899, 504], [690, 699, \n 610, 990, 139, 296, 914, 196, 333, 876, 29, 979, 869, 355, 472], [187, \n 787, 932, 687, 662, 625, 759, 371, 438, 893, 838, 876, 442, 442, 697],\n [454, 871, 70, 541, 598, 597, 402, 472, 327, 160, 913, 735, 518, 770, 635]]\n )\n', (164364, 165577), False, 'from snail import snail\n'), ((168156, 168599), 'snail.snail', 'snail', (['[[117, 708, 570, 27, 409, 596, 355, 42, 480], [874, 320, 499, 489, 767, 179,\n 912, 813, 855], [929, 737, 403, 431, 219, 710, 107, 450, 61], [860, 446,\n 119, 88, 448, 553, 833, 293, 803], [868, 141, 930, 398, 882, 135, 585, \n 348, 890], [506, 859, 833, 31, 808, 663, 384, 341, 457], [864, 183, 143,\n 954, 427, 680, 940, 411, 585], [995, 374, 784, 568, 200, 777, 468, 69, \n 902], [206, 588, 712, 813, 721, 746, 11, 284, 45]]'], {}), '([[117, 708, 570, 27, 409, 596, 355, 42, 480], [874, 320, 499, 489, \n 767, 179, 912, 813, 855], [929, 737, 403, 431, 219, 710, 107, 450, 61],\n [860, 446, 119, 88, 448, 553, 833, 293, 803], [868, 141, 930, 398, 882,\n 135, 585, 348, 890], [506, 859, 833, 31, 808, 663, 384, 341, 457], [864,\n 183, 143, 954, 427, 680, 940, 411, 585], [995, 374, 784, 568, 200, 777,\n 468, 69, 902], [206, 588, 712, 813, 721, 746, 11, 284, 45]])\n', (168161, 168599), False, 'from snail import snail\n'), ((169502, 171431), 'snail.snail', 'snail', (['[[385, 928, 460, 539, 984, 516, 609, 769, 825, 857, 819, 422, 989, 319, 60,\n 450, 495, 64, 624], [604, 59, 272, 470, 997, 980, 563, 632, 353, 366, \n 750, 740, 395, 978, 995, 848, 72, 820, 410], [703, 427, 351, 469, 685, \n 297, 362, 947, 998, 434, 896, 773, 441, 562, 785, 704, 529, 471, 798],\n [564, 846, 756, 916, 435, 184, 785, 930, 349, 161, 253, 365, 82, 976, \n 499, 461, 398, 278, 331], [529, 354, 643, 338, 772, 629, 726, 296, 672,\n 282, 268, 741, 330, 272, 217, 188, 754, 875, 58], [869, 349, 328, 585, \n 442, 812, 645, 854, 317, 437, 314, 343, 571, 202, 534, 22, 307, 874, \n 859], [161, 579, 865, 703, 276, 889, 374, 792, 123, 668, 970, 737, 846,\n 416, 704, 204, 660, 223, 509], [243, 645, 359, 427, 636, 193, 663, 857,\n 712, 510, 367, 862, 352, 715, 811, 986, 292, 391, 475], [645, 767, 117,\n 907, 321, 906, 592, 508, 647, 289, 307, 519, 425, 659, 219, 459, 537, \n 505, 328], [743, 74, 374, 226, 356, 28, 5, 215, 459, 232, 18, 123, 308,\n 277, 490, 345, 68, 763, 93], [21, 927, 770, 760, 75, 751, 387, 686, 366,\n 108, 327, 196, 603, 676, 337, 59, 799, 41, 699], [777, 779, 755, 647, \n 718, 144, 749, 35, 282, 233, 552, 936, 391, 140, 877, 874, 472, 86, 836\n ], [966, 63, 26, 21, 595, 325, 521, 636, 481, 485, 664, 897, 151, 132, \n 969, 967, 856, 953, 425], [352, 849, 157, 520, 272, 9, 934, 441, 261, \n 380, 868, 260, 375, 547, 699, 924, 794, 617, 222], [559, 901, 435, 537,\n 620, 779, 708, 848, 903, 701, 570, 115, 114, 342, 57, 878, 278, 697, \n 629], [541, 502, 381, 168, 792, 268, 21, 59, 581, 691, 695, 906, 616, \n 808, 366, 804, 36, 210, 295], [662, 142, 571, 207, 905, 913, 414, 341, \n 956, 602, 115, 78, 688, 67, 148, 92, 930, 68, 258], [301, 970, 837, 91,\n 679, 574, 119, 324, 554, 233, 617, 382, 876, 516, 380, 584, 516, 911, \n 331], [894, 637, 193, 54, 14, 503, 221, 127, 118, 565, 234, 828, 753, \n 97, 257, 619, 811, 803, 934]]'], {}), '([[385, 928, 460, 539, 984, 516, 609, 769, 825, 857, 819, 422, 989, \n 319, 60, 450, 495, 64, 624], [604, 59, 272, 470, 997, 980, 563, 632, \n 353, 366, 750, 740, 395, 978, 995, 848, 72, 820, 410], [703, 427, 351, \n 469, 685, 297, 362, 947, 998, 434, 896, 773, 441, 562, 785, 704, 529, \n 471, 798], [564, 846, 756, 916, 435, 184, 785, 930, 349, 161, 253, 365,\n 82, 976, 499, 461, 398, 278, 331], [529, 354, 643, 338, 772, 629, 726, \n 296, 672, 282, 268, 741, 330, 272, 217, 188, 754, 875, 58], [869, 349, \n 328, 585, 442, 812, 645, 854, 317, 437, 314, 343, 571, 202, 534, 22, \n 307, 874, 859], [161, 579, 865, 703, 276, 889, 374, 792, 123, 668, 970,\n 737, 846, 416, 704, 204, 660, 223, 509], [243, 645, 359, 427, 636, 193,\n 663, 857, 712, 510, 367, 862, 352, 715, 811, 986, 292, 391, 475], [645,\n 767, 117, 907, 321, 906, 592, 508, 647, 289, 307, 519, 425, 659, 219, \n 459, 537, 505, 328], [743, 74, 374, 226, 356, 28, 5, 215, 459, 232, 18,\n 123, 308, 277, 490, 345, 68, 763, 93], [21, 927, 770, 760, 75, 751, 387,\n 686, 366, 108, 327, 196, 603, 676, 337, 59, 799, 41, 699], [777, 779, \n 755, 647, 718, 144, 749, 35, 282, 233, 552, 936, 391, 140, 877, 874, \n 472, 86, 836], [966, 63, 26, 21, 595, 325, 521, 636, 481, 485, 664, 897,\n 151, 132, 969, 967, 856, 953, 425], [352, 849, 157, 520, 272, 9, 934, \n 441, 261, 380, 868, 260, 375, 547, 699, 924, 794, 617, 222], [559, 901,\n 435, 537, 620, 779, 708, 848, 903, 701, 570, 115, 114, 342, 57, 878, \n 278, 697, 629], [541, 502, 381, 168, 792, 268, 21, 59, 581, 691, 695, \n 906, 616, 808, 366, 804, 36, 210, 295], [662, 142, 571, 207, 905, 913, \n 414, 341, 956, 602, 115, 78, 688, 67, 148, 92, 930, 68, 258], [301, 970,\n 837, 91, 679, 574, 119, 324, 554, 233, 617, 382, 876, 516, 380, 584, \n 516, 911, 331], [894, 637, 193, 54, 14, 503, 221, 127, 118, 565, 234, \n 828, 753, 97, 257, 619, 811, 803, 934]])\n', (169507, 171431), False, 'from snail import snail\n'), ((175603, 177738), 'snail.snail', 'snail', (['[[666, 962, 235, 436, 68, 11, 222, 412, 346, 108, 83, 505, 615, 899, 111, \n 149, 740, 452, 988, 476], [546, 18, 303, 148, 420, 385, 556, 547, 944, \n 980, 346, 821, 402, 114, 287, 328, 884, 420, 476, 327], [586, 711, 282,\n 581, 620, 649, 276, 979, 359, 916, 897, 797, 676, 359, 510, 229, 621, \n 782, 559, 406], [888, 758, 801, 266, 597, 509, 541, 501, 301, 109, 298,\n 676, 542, 803, 434, 40, 601, 224, 72, 387], [402, 960, 825, 515, 400, \n 282, 102, 787, 226, 256, 446, 116, 926, 868, 497, 885, 645, 228, 37, \n 263], [589, 332, 700, 507, 657, 509, 28, 46, 60, 615, 43, 439, 545, 382,\n 249, 1, 511, 411, 369, 336], [470, 14, 533, 919, 248, 40, 292, 559, 970,\n 850, 609, 202, 315, 100, 52, 467, 332, 666, 620, 145], [117, 906, 282, \n 526, 168, 206, 689, 213, 207, 78, 270, 186, 877, 744, 191, 86, 56, 626,\n 47, 777], [491, 902, 689, 519, 278, 647, 890, 903, 351, 125, 873, 92, \n 510, 765, 213, 298, 972, 42, 667, 61], [689, 759, 825, 676, 249, 697, \n 684, 112, 347, 73, 863, 91, 150, 311, 140, 814, 984, 838, 458, 505], [\n 176, 115, 727, 603, 981, 695, 255, 165, 433, 82, 576, 392, 401, 736, \n 469, 685, 684, 473, 599, 275], [400, 527, 489, 949, 267, 523, 711, 642,\n 204, 140, 298, 162, 730, 26, 745, 748, 641, 378, 187, 208], [424, 742, \n 633, 608, 645, 642, 876, 276, 408, 985, 695, 3, 772, 967, 436, 422, 333,\n 626, 980, 279], [363, 401, 873, 167, 355, 259, 678, 424, 558, 957, 171,\n 284, 664, 517, 855, 849, 112, 470, 331, 112], [353, 257, 463, 706, 552,\n 957, 255, 596, 453, 950, 352, 914, 493, 798, 735, 633, 747, 552, 368, \n 547], [524, 688, 975, 145, 704, 232, 190, 483, 617, 262, 882, 782, 5, \n 345, 285, 483, 325, 321, 866, 806], [99, 972, 262, 332, 81, 103, 425, \n 156, 240, 599, 508, 755, 783, 585, 354, 515, 694, 638, 22, 815], [789, \n 616, 172, 544, 827, 862, 286, 844, 376, 844, 508, 320, 675, 197, 350, \n 545, 505, 78, 155, 606], [203, 167, 992, 723, 682, 83, 534, 315, 376, \n 89, 267, 107, 346, 924, 306, 752, 627, 496, 994, 613], [581, 737, 393, \n 879, 406, 15, 265, 238, 125, 683, 505, 835, 174, 509, 284, 12, 364, 345,\n 395, 1]]'], {}), '([[666, 962, 235, 436, 68, 11, 222, 412, 346, 108, 83, 505, 615, 899, \n 111, 149, 740, 452, 988, 476], [546, 18, 303, 148, 420, 385, 556, 547, \n 944, 980, 346, 821, 402, 114, 287, 328, 884, 420, 476, 327], [586, 711,\n 282, 581, 620, 649, 276, 979, 359, 916, 897, 797, 676, 359, 510, 229, \n 621, 782, 559, 406], [888, 758, 801, 266, 597, 509, 541, 501, 301, 109,\n 298, 676, 542, 803, 434, 40, 601, 224, 72, 387], [402, 960, 825, 515, \n 400, 282, 102, 787, 226, 256, 446, 116, 926, 868, 497, 885, 645, 228, \n 37, 263], [589, 332, 700, 507, 657, 509, 28, 46, 60, 615, 43, 439, 545,\n 382, 249, 1, 511, 411, 369, 336], [470, 14, 533, 919, 248, 40, 292, 559,\n 970, 850, 609, 202, 315, 100, 52, 467, 332, 666, 620, 145], [117, 906, \n 282, 526, 168, 206, 689, 213, 207, 78, 270, 186, 877, 744, 191, 86, 56,\n 626, 47, 777], [491, 902, 689, 519, 278, 647, 890, 903, 351, 125, 873, \n 92, 510, 765, 213, 298, 972, 42, 667, 61], [689, 759, 825, 676, 249, \n 697, 684, 112, 347, 73, 863, 91, 150, 311, 140, 814, 984, 838, 458, 505\n ], [176, 115, 727, 603, 981, 695, 255, 165, 433, 82, 576, 392, 401, 736,\n 469, 685, 684, 473, 599, 275], [400, 527, 489, 949, 267, 523, 711, 642,\n 204, 140, 298, 162, 730, 26, 745, 748, 641, 378, 187, 208], [424, 742, \n 633, 608, 645, 642, 876, 276, 408, 985, 695, 3, 772, 967, 436, 422, 333,\n 626, 980, 279], [363, 401, 873, 167, 355, 259, 678, 424, 558, 957, 171,\n 284, 664, 517, 855, 849, 112, 470, 331, 112], [353, 257, 463, 706, 552,\n 957, 255, 596, 453, 950, 352, 914, 493, 798, 735, 633, 747, 552, 368, \n 547], [524, 688, 975, 145, 704, 232, 190, 483, 617, 262, 882, 782, 5, \n 345, 285, 483, 325, 321, 866, 806], [99, 972, 262, 332, 81, 103, 425, \n 156, 240, 599, 508, 755, 783, 585, 354, 515, 694, 638, 22, 815], [789, \n 616, 172, 544, 827, 862, 286, 844, 376, 844, 508, 320, 675, 197, 350, \n 545, 505, 78, 155, 606], [203, 167, 992, 723, 682, 83, 534, 315, 376, \n 89, 267, 107, 346, 924, 306, 752, 627, 496, 994, 613], [581, 737, 393, \n 879, 406, 15, 265, 238, 125, 683, 505, 835, 174, 509, 284, 12, 364, 345,\n 395, 1]])\n', (175608, 177738), False, 'from snail import snail\n'), ((182488, 182931), 'snail.snail', 'snail', (['[[46, 105, 755, 137, 836, 162, 149, 478, 258], [379, 307, 501, 642, 573, \n 610, 945, 506, 956], [896, 309, 293, 526, 429, 298, 636, 989, 80], [880,\n 153, 23, 95, 765, 124, 818, 836, 15], [242, 510, 792, 823, 494, 479, \n 737, 231, 317], [982, 293, 984, 13, 276, 39, 167, 146, 236], [431, 546,\n 246, 860, 207, 380, 306, 577, 405], [905, 276, 247, 949, 479, 6, 61, \n 479, 257], [106, 744, 940, 112, 474, 457, 968, 106, 834]]'], {}), '([[46, 105, 755, 137, 836, 162, 149, 478, 258], [379, 307, 501, 642, \n 573, 610, 945, 506, 956], [896, 309, 293, 526, 429, 298, 636, 989, 80],\n [880, 153, 23, 95, 765, 124, 818, 836, 15], [242, 510, 792, 823, 494, \n 479, 737, 231, 317], [982, 293, 984, 13, 276, 39, 167, 146, 236], [431,\n 546, 246, 860, 207, 380, 306, 577, 405], [905, 276, 247, 949, 479, 6, \n 61, 479, 257], [106, 744, 940, 112, 474, 457, 968, 106, 834]])\n', (182493, 182931), False, 'from snail import snail\n'), ((183830, 183844), 'snail.snail', 'snail', (['[[601]]'], {}), '([[601]])\n', (183835, 183844), False, 'from snail import snail\n'), ((183913, 185456), 'snail.snail', 'snail', (['[[536, 275, 747, 8, 428, 685, 425, 412, 645, 533, 654, 886, 275, 373, 341, \n 70, 650], [756, 413, 436, 934, 70, 645, 837, 399, 729, 158, 115, 212, \n 529, 627, 892, 58, 619], [518, 780, 787, 240, 167, 877, 45, 186, 204, \n 22, 90, 292, 440, 612, 569, 934, 587], [409, 521, 219, 974, 972, 466, \n 222, 367, 656, 763, 432, 42, 652, 251, 806, 486, 266], [144, 220, 975, \n 993, 678, 28, 287, 838, 236, 632, 211, 508, 380, 748, 908, 232, 311], [\n 59, 581, 843, 66, 293, 134, 177, 377, 903, 109, 289, 797, 915, 171, 878,\n 695, 826], [5, 889, 628, 878, 997, 251, 772, 414, 963, 863, 628, 454, \n 971, 275, 246, 243, 471], [589, 24, 469, 290, 96, 747, 174, 213, 135, \n 156, 578, 50, 459, 831, 340, 571, 230], [943, 385, 307, 46, 800, 569, \n 449, 634, 899, 355, 786, 960, 487, 267, 905, 890, 626], [382, 736, 909,\n 496, 936, 828, 684, 105, 44, 589, 477, 225, 434, 649, 419, 273, 447], [\n 629, 814, 94, 606, 640, 272, 12, 589, 421, 327, 552, 249, 232, 32, 713,\n 179, 812], [432, 535, 53, 10, 658, 475, 431, 61, 830, 716, 173, 797, \n 766, 76, 537, 4, 156], [360, 671, 868, 928, 838, 705, 359, 128, 397, \n 793, 696, 194, 904, 174, 818, 833, 48], [789, 821, 540, 10, 356, 267, \n 847, 783, 914, 391, 93, 977, 426, 780, 755, 35, 914], [862, 389, 478, \n 904, 272, 512, 567, 469, 802, 492, 300, 137, 908, 585, 72, 928, 133], [\n 29, 33, 792, 193, 910, 404, 972, 524, 301, 32, 385, 813, 353, 322, 112,\n 606, 138], [930, 878, 888, 703, 28, 422, 72, 939, 971, 551, 49, 363, 45,\n 723, 659, 553, 133]]'], {}), '([[536, 275, 747, 8, 428, 685, 425, 412, 645, 533, 654, 886, 275, 373,\n 341, 70, 650], [756, 413, 436, 934, 70, 645, 837, 399, 729, 158, 115, \n 212, 529, 627, 892, 58, 619], [518, 780, 787, 240, 167, 877, 45, 186, \n 204, 22, 90, 292, 440, 612, 569, 934, 587], [409, 521, 219, 974, 972, \n 466, 222, 367, 656, 763, 432, 42, 652, 251, 806, 486, 266], [144, 220, \n 975, 993, 678, 28, 287, 838, 236, 632, 211, 508, 380, 748, 908, 232, \n 311], [59, 581, 843, 66, 293, 134, 177, 377, 903, 109, 289, 797, 915, \n 171, 878, 695, 826], [5, 889, 628, 878, 997, 251, 772, 414, 963, 863, \n 628, 454, 971, 275, 246, 243, 471], [589, 24, 469, 290, 96, 747, 174, \n 213, 135, 156, 578, 50, 459, 831, 340, 571, 230], [943, 385, 307, 46, \n 800, 569, 449, 634, 899, 355, 786, 960, 487, 267, 905, 890, 626], [382,\n 736, 909, 496, 936, 828, 684, 105, 44, 589, 477, 225, 434, 649, 419, \n 273, 447], [629, 814, 94, 606, 640, 272, 12, 589, 421, 327, 552, 249, \n 232, 32, 713, 179, 812], [432, 535, 53, 10, 658, 475, 431, 61, 830, 716,\n 173, 797, 766, 76, 537, 4, 156], [360, 671, 868, 928, 838, 705, 359, \n 128, 397, 793, 696, 194, 904, 174, 818, 833, 48], [789, 821, 540, 10, \n 356, 267, 847, 783, 914, 391, 93, 977, 426, 780, 755, 35, 914], [862, \n 389, 478, 904, 272, 512, 567, 469, 802, 492, 300, 137, 908, 585, 72, \n 928, 133], [29, 33, 792, 193, 910, 404, 972, 524, 301, 32, 385, 813, \n 353, 322, 112, 606, 138], [930, 878, 888, 703, 28, 422, 72, 939, 971, \n 551, 49, 363, 45, 723, 659, 553, 133]])\n', (183918, 185456), False, 'from snail import snail\n'), ((188602, 188870), 'snail.snail', 'snail', (['[[350, 303, 624, 66, 319, 723, 677], [44, 616, 64, 859, 683, 425, 556], [\n 551, 592, 382, 678, 823, 63, 881], [956, 8, 601, 384, 191, 811, 32], [\n 815, 931, 592, 1, 230, 786, 446], [597, 948, 908, 590, 858, 850, 974],\n [533, 204, 906, 134, 27, 882, 14]]'], {}), '([[350, 303, 624, 66, 319, 723, 677], [44, 616, 64, 859, 683, 425, 556\n ], [551, 592, 382, 678, 823, 63, 881], [956, 8, 601, 384, 191, 811, 32],\n [815, 931, 592, 1, 230, 786, 446], [597, 948, 908, 590, 858, 850, 974],\n [533, 204, 906, 134, 27, 882, 14]])\n', (188607, 188870), False, 'from snail import snail\n'), ((189476, 190685), 'snail.snail', 'snail', (['[[232, 617, 922, 488, 792, 289, 488, 451, 844, 714, 179, 844, 377, 576, 242\n ], [638, 339, 530, 807, 417, 862, 71, 352, 963, 49, 822, 978, 162, 924,\n 461], [148, 636, 263, 858, 140, 630, 654, 304, 258, 412, 64, 1, 326, \n 311, 284], [573, 758, 197, 133, 766, 783, 550, 324, 290, 293, 337, 479,\n 415, 587, 133], [798, 60, 844, 834, 303, 77, 121, 339, 885, 691, 848, \n 272, 484, 758, 485], [364, 590, 109, 407, 502, 59, 341, 166, 109, 382, \n 90, 226, 362, 299, 761], [308, 771, 541, 867, 867, 886, 671, 302, 894, \n 517, 713, 570, 682, 603, 209], [512, 975, 223, 313, 340, 376, 751, 76, \n 332, 80, 81, 581, 401, 768, 373], [933, 400, 289, 659, 875, 869, 5, 606,\n 314, 989, 494, 403, 925, 341, 839], [470, 504, 721, 26, 765, 821, 985, \n 750, 905, 938, 975, 950, 288, 196, 603], [977, 989, 127, 507, 747, 679,\n 671, 797, 819, 775, 439, 994, 870, 970, 164], [761, 489, 634, 539, 499,\n 48, 61, 515, 675, 8, 194, 621, 828, 638, 801], [366, 575, 629, 798, 838,\n 201, 769, 989, 507, 142, 3, 561, 225, 282, 604], [122, 776, 797, 161, \n 244, 963, 385, 715, 120, 321, 752, 489, 233, 904, 843], [739, 637, 324,\n 232, 751, 507, 800, 548, 486, 781, 554, 267, 721, 845, 6]]'], {}), '([[232, 617, 922, 488, 792, 289, 488, 451, 844, 714, 179, 844, 377, \n 576, 242], [638, 339, 530, 807, 417, 862, 71, 352, 963, 49, 822, 978, \n 162, 924, 461], [148, 636, 263, 858, 140, 630, 654, 304, 258, 412, 64, \n 1, 326, 311, 284], [573, 758, 197, 133, 766, 783, 550, 324, 290, 293, \n 337, 479, 415, 587, 133], [798, 60, 844, 834, 303, 77, 121, 339, 885, \n 691, 848, 272, 484, 758, 485], [364, 590, 109, 407, 502, 59, 341, 166, \n 109, 382, 90, 226, 362, 299, 761], [308, 771, 541, 867, 867, 886, 671, \n 302, 894, 517, 713, 570, 682, 603, 209], [512, 975, 223, 313, 340, 376,\n 751, 76, 332, 80, 81, 581, 401, 768, 373], [933, 400, 289, 659, 875, \n 869, 5, 606, 314, 989, 494, 403, 925, 341, 839], [470, 504, 721, 26, \n 765, 821, 985, 750, 905, 938, 975, 950, 288, 196, 603], [977, 989, 127,\n 507, 747, 679, 671, 797, 819, 775, 439, 994, 870, 970, 164], [761, 489,\n 634, 539, 499, 48, 61, 515, 675, 8, 194, 621, 828, 638, 801], [366, 575,\n 629, 798, 838, 201, 769, 989, 507, 142, 3, 561, 225, 282, 604], [122, \n 776, 797, 161, 244, 963, 385, 715, 120, 321, 752, 489, 233, 904, 843],\n [739, 637, 324, 232, 751, 507, 800, 548, 486, 781, 554, 267, 721, 845, 6]])\n', (189481, 190685), False, 'from snail import snail\n'), ((193269, 194323), 'snail.snail', 'snail', (['[[144, 568, 21, 727, 740, 122, 743, 378, 519, 294, 987, 449, 688, 91], [552,\n 330, 129, 484, 303, 770, 794, 409, 32, 995, 764, 458, 386, 946], [232, \n 385, 662, 477, 897, 597, 969, 609, 361, 529, 422, 18, 645, 653], [819, \n 179, 340, 828, 667, 374, 420, 151, 671, 281, 326, 381, 172, 12], [695, \n 954, 432, 746, 292, 212, 544, 792, 75, 976, 26, 534, 887, 349], [313, \n 163, 954, 749, 295, 980, 883, 133, 74, 156, 703, 232, 232, 743], [417, \n 501, 155, 131, 331, 691, 333, 873, 964, 12, 447, 684, 455, 434], [135, \n 141, 836, 947, 767, 389, 477, 646, 470, 281, 296, 182, 898, 681], [373,\n 491, 318, 425, 872, 981, 276, 414, 883, 170, 585, 494, 993, 789], [419,\n 172, 127, 49, 377, 384, 279, 958, 572, 535, 777, 121, 226, 728], [683, \n 986, 545, 205, 129, 816, 117, 474, 159, 577, 380, 149, 42, 360], [317, \n 525, 283, 558, 762, 813, 230, 435, 944, 500, 260, 211, 728, 666], [858,\n 49, 772, 565, 195, 376, 938, 653, 992, 54, 819, 552, 93, 486], [246, 99,\n 795, 769, 705, 916, 422, 117, 882, 41, 23, 612, 426, 556]]'], {}), '([[144, 568, 21, 727, 740, 122, 743, 378, 519, 294, 987, 449, 688, 91],\n [552, 330, 129, 484, 303, 770, 794, 409, 32, 995, 764, 458, 386, 946],\n [232, 385, 662, 477, 897, 597, 969, 609, 361, 529, 422, 18, 645, 653],\n [819, 179, 340, 828, 667, 374, 420, 151, 671, 281, 326, 381, 172, 12],\n [695, 954, 432, 746, 292, 212, 544, 792, 75, 976, 26, 534, 887, 349], [\n 313, 163, 954, 749, 295, 980, 883, 133, 74, 156, 703, 232, 232, 743], [\n 417, 501, 155, 131, 331, 691, 333, 873, 964, 12, 447, 684, 455, 434], [\n 135, 141, 836, 947, 767, 389, 477, 646, 470, 281, 296, 182, 898, 681],\n [373, 491, 318, 425, 872, 981, 276, 414, 883, 170, 585, 494, 993, 789],\n [419, 172, 127, 49, 377, 384, 279, 958, 572, 535, 777, 121, 226, 728],\n [683, 986, 545, 205, 129, 816, 117, 474, 159, 577, 380, 149, 42, 360],\n [317, 525, 283, 558, 762, 813, 230, 435, 944, 500, 260, 211, 728, 666],\n [858, 49, 772, 565, 195, 376, 938, 653, 992, 54, 819, 552, 93, 486], [\n 246, 99, 795, 769, 705, 916, 422, 117, 882, 41, 23, 612, 426, 556]])\n', (193274, 194323), False, 'from snail import snail\n'), ((196639, 198020), 'snail.snail', 'snail', (['[[189, 117, 130, 56, 993, 906, 843, 983, 823, 485, 420, 275, 333, 394, 68, \n 33], [818, 830, 502, 978, 273, 428, 157, 621, 121, 411, 509, 279, 263, \n 56, 108, 82], [632, 484, 962, 408, 95, 161, 463, 823, 500, 110, 616, \n 113, 355, 800, 916, 304], [146, 217, 702, 32, 929, 794, 249, 734, 284, \n 757, 354, 826, 842, 992, 651, 820], [644, 413, 389, 168, 871, 136, 95, \n 987, 101, 790, 634, 771, 802, 35, 528, 248], [636, 81, 890, 390, 966, \n 16, 584, 150, 112, 563, 432, 522, 231, 817, 111, 490], [572, 77, 887, \n 337, 985, 822, 83, 788, 986, 767, 996, 442, 328, 24, 906, 496], [889, \n 781, 904, 723, 475, 507, 809, 682, 839, 436, 614, 415, 490, 892, 778, \n 879], [423, 699, 788, 677, 630, 121, 568, 397, 366, 495, 850, 43, 181, \n 296, 671, 181], [849, 828, 840, 490, 665, 921, 666, 346, 315, 287, 347,\n 527, 346, 38, 599, 743], [196, 68, 364, 681, 321, 104, 86, 948, 393, \n 201, 470, 539, 459, 60, 156, 742], [820, 525, 485, 892, 653, 694, 287, \n 887, 729, 75, 466, 354, 568, 850, 732, 654], [670, 174, 472, 262, 890, \n 410, 362, 234, 335, 92, 451, 167, 706, 177, 955, 612], [647, 138, 198, \n 265, 541, 673, 41, 818, 564, 863, 932, 552, 245, 71, 535, 289], [626, \n 514, 854, 694, 783, 469, 674, 473, 537, 157, 546, 891, 615, 399, 547, \n 699], [662, 868, 468, 922, 99, 268, 120, 280, 983, 586, 712, 206, 750, \n 43, 640, 116]]'], {}), '([[189, 117, 130, 56, 993, 906, 843, 983, 823, 485, 420, 275, 333, 394,\n 68, 33], [818, 830, 502, 978, 273, 428, 157, 621, 121, 411, 509, 279, \n 263, 56, 108, 82], [632, 484, 962, 408, 95, 161, 463, 823, 500, 110, \n 616, 113, 355, 800, 916, 304], [146, 217, 702, 32, 929, 794, 249, 734, \n 284, 757, 354, 826, 842, 992, 651, 820], [644, 413, 389, 168, 871, 136,\n 95, 987, 101, 790, 634, 771, 802, 35, 528, 248], [636, 81, 890, 390, \n 966, 16, 584, 150, 112, 563, 432, 522, 231, 817, 111, 490], [572, 77, \n 887, 337, 985, 822, 83, 788, 986, 767, 996, 442, 328, 24, 906, 496], [\n 889, 781, 904, 723, 475, 507, 809, 682, 839, 436, 614, 415, 490, 892, \n 778, 879], [423, 699, 788, 677, 630, 121, 568, 397, 366, 495, 850, 43, \n 181, 296, 671, 181], [849, 828, 840, 490, 665, 921, 666, 346, 315, 287,\n 347, 527, 346, 38, 599, 743], [196, 68, 364, 681, 321, 104, 86, 948, \n 393, 201, 470, 539, 459, 60, 156, 742], [820, 525, 485, 892, 653, 694, \n 287, 887, 729, 75, 466, 354, 568, 850, 732, 654], [670, 174, 472, 262, \n 890, 410, 362, 234, 335, 92, 451, 167, 706, 177, 955, 612], [647, 138, \n 198, 265, 541, 673, 41, 818, 564, 863, 932, 552, 245, 71, 535, 289], [\n 626, 514, 854, 694, 783, 469, 674, 473, 537, 157, 546, 891, 615, 399, \n 547, 699], [662, 868, 468, 922, 99, 268, 120, 280, 983, 586, 712, 206, \n 750, 43, 640, 116]])\n', (196644, 198020), False, 'from snail import snail\n'), ((200883, 201328), 'snail.snail', 'snail', (['[[830, 253, 625, 973, 491, 433, 340, 950, 941], [594, 74, 780, 39, 840, 620,\n 979, 117, 869], [382, 233, 384, 673, 659, 501, 886, 415, 947], [608, 73,\n 246, 530, 429, 506, 573, 552, 505], [482, 346, 767, 910, 939, 200, 398,\n 831, 979], [382, 723, 528, 936, 648, 300, 650, 564, 823], [68, 395, 220,\n 265, 125, 912, 658, 384, 764], [934, 378, 160, 882, 201, 23, 74, 287, \n 899], [927, 183, 846, 677, 44, 118, 611, 685, 902]]'], {}), '([[830, 253, 625, 973, 491, 433, 340, 950, 941], [594, 74, 780, 39, \n 840, 620, 979, 117, 869], [382, 233, 384, 673, 659, 501, 886, 415, 947],\n [608, 73, 246, 530, 429, 506, 573, 552, 505], [482, 346, 767, 910, 939,\n 200, 398, 831, 979], [382, 723, 528, 936, 648, 300, 650, 564, 823], [68,\n 395, 220, 265, 125, 912, 658, 384, 764], [934, 378, 160, 882, 201, 23, \n 74, 287, 899], [927, 183, 846, 677, 44, 118, 611, 685, 902]])\n', (200888, 201328), False, 'from snail import snail\n'), ((202231, 203779), 'snail.snail', 'snail', (['[[138, 741, 417, 159, 204, 874, 124, 662, 424, 454, 485, 437, 98, 149, 674,\n 523, 367], [743, 936, 693, 93, 718, 309, 249, 664, 820, 647, 450, 8, 94,\n 521, 633, 704, 11], [111, 422, 331, 297, 369, 809, 16, 991, 98, 871, \n 429, 887, 906, 706, 16, 576, 635], [897, 820, 362, 595, 369, 495, 206, \n 231, 632, 842, 225, 550, 663, 364, 556, 586, 890], [370, 949, 756, 447,\n 637, 326, 862, 636, 342, 228, 582, 876, 52, 210, 298, 922, 59], [842, \n 58, 731, 756, 306, 879, 101, 280, 670, 612, 636, 200, 613, 915, 21, 756,\n 625], [22, 850, 811, 476, 515, 485, 579, 707, 730, 343, 492, 324, 588, \n 692, 954, 339, 674], [851, 681, 842, 320, 85, 795, 471, 779, 259, 472, \n 217, 985, 410, 371, 408, 401, 649], [581, 45, 563, 964, 455, 888, 78, \n 345, 479, 891, 302, 874, 477, 740, 634, 339, 13], [794, 39, 180, 191, \n 463, 573, 704, 333, 920, 508, 373, 622, 378, 615, 1, 778, 186], [850, \n 537, 431, 4, 427, 172, 687, 344, 35, 847, 745, 818, 394, 935, 796, 428,\n 562], [487, 80, 446, 506, 159, 277, 773, 958, 222, 805, 906, 369, 807, \n 59, 834, 866, 923], [169, 750, 548, 544, 7, 987, 629, 344, 516, 916, \n 352, 303, 926, 466, 44, 635, 703], [956, 621, 626, 932, 280, 837, 977, \n 529, 761, 567, 636, 162, 415, 12, 347, 336, 835], [544, 924, 11, 155, \n 311, 602, 943, 30, 742, 627, 21, 905, 443, 295, 369, 462, 617], [445, \n 648, 244, 728, 706, 492, 740, 402, 226, 605, 748, 201, 717, 135, 785, \n 306, 535], [827, 874, 302, 890, 902, 317, 511, 214, 761, 852, 180, 653,\n 300, 780, 147, 744, 661]]'], {}), '([[138, 741, 417, 159, 204, 874, 124, 662, 424, 454, 485, 437, 98, 149,\n 674, 523, 367], [743, 936, 693, 93, 718, 309, 249, 664, 820, 647, 450, \n 8, 94, 521, 633, 704, 11], [111, 422, 331, 297, 369, 809, 16, 991, 98, \n 871, 429, 887, 906, 706, 16, 576, 635], [897, 820, 362, 595, 369, 495, \n 206, 231, 632, 842, 225, 550, 663, 364, 556, 586, 890], [370, 949, 756,\n 447, 637, 326, 862, 636, 342, 228, 582, 876, 52, 210, 298, 922, 59], [\n 842, 58, 731, 756, 306, 879, 101, 280, 670, 612, 636, 200, 613, 915, 21,\n 756, 625], [22, 850, 811, 476, 515, 485, 579, 707, 730, 343, 492, 324, \n 588, 692, 954, 339, 674], [851, 681, 842, 320, 85, 795, 471, 779, 259, \n 472, 217, 985, 410, 371, 408, 401, 649], [581, 45, 563, 964, 455, 888, \n 78, 345, 479, 891, 302, 874, 477, 740, 634, 339, 13], [794, 39, 180, \n 191, 463, 573, 704, 333, 920, 508, 373, 622, 378, 615, 1, 778, 186], [\n 850, 537, 431, 4, 427, 172, 687, 344, 35, 847, 745, 818, 394, 935, 796,\n 428, 562], [487, 80, 446, 506, 159, 277, 773, 958, 222, 805, 906, 369, \n 807, 59, 834, 866, 923], [169, 750, 548, 544, 7, 987, 629, 344, 516, \n 916, 352, 303, 926, 466, 44, 635, 703], [956, 621, 626, 932, 280, 837, \n 977, 529, 761, 567, 636, 162, 415, 12, 347, 336, 835], [544, 924, 11, \n 155, 311, 602, 943, 30, 742, 627, 21, 905, 443, 295, 369, 462, 617], [\n 445, 648, 244, 728, 706, 492, 740, 402, 226, 605, 748, 201, 717, 135, \n 785, 306, 535], [827, 874, 302, 890, 902, 317, 511, 214, 761, 852, 180,\n 653, 300, 780, 147, 744, 661]])\n', (202236, 203779), False, 'from snail import snail\n'), ((206934, 206988), 'snail.snail', 'snail', (['[[525, 36, 964], [45, 650, 15], [487, 52, 333]]'], {}), '([[525, 36, 964], [45, 650, 15], [487, 52, 333]])\n', (206939, 206988), False, 'from snail import snail\n'), ((207118, 207905), 'snail.snail', 'snail', (['[[708, 433, 591, 277, 518, 564, 794, 454, 666, 595, 254, 870], [886, 628, \n 399, 222, 594, 65, 44, 567, 666, 356, 421, 594], [612, 162, 307, 42, \n 245, 10, 620, 187, 413, 350, 972, 964], [669, 577, 98, 332, 950, 236, \n 393, 682, 764, 432, 849, 338], [495, 741, 186, 613, 557, 277, 861, 390,\n 228, 247, 535, 87], [485, 401, 526, 512, 170, 157, 575, 284, 814, 215, \n 897, 797], [447, 679, 519, 8, 329, 991, 568, 414, 967, 643, 213, 426],\n [139, 739, 730, 456, 961, 236, 501, 409, 293, 812, 141, 773], [930, 834,\n 528, 146, 556, 677, 991, 143, 911, 118, 301, 165], [467, 666, 632, 210,\n 394, 124, 348, 615, 620, 216, 340, 549], [116, 624, 238, 45, 716, 894, \n 380, 921, 708, 998, 910, 844], [790, 25, 61, 218, 901, 200, 833, 57, \n 472, 464, 523, 573]]'], {}), '([[708, 433, 591, 277, 518, 564, 794, 454, 666, 595, 254, 870], [886, \n 628, 399, 222, 594, 65, 44, 567, 666, 356, 421, 594], [612, 162, 307, \n 42, 245, 10, 620, 187, 413, 350, 972, 964], [669, 577, 98, 332, 950, \n 236, 393, 682, 764, 432, 849, 338], [495, 741, 186, 613, 557, 277, 861,\n 390, 228, 247, 535, 87], [485, 401, 526, 512, 170, 157, 575, 284, 814, \n 215, 897, 797], [447, 679, 519, 8, 329, 991, 568, 414, 967, 643, 213, \n 426], [139, 739, 730, 456, 961, 236, 501, 409, 293, 812, 141, 773], [\n 930, 834, 528, 146, 556, 677, 991, 143, 911, 118, 301, 165], [467, 666,\n 632, 210, 394, 124, 348, 615, 620, 216, 340, 549], [116, 624, 238, 45, \n 716, 894, 380, 921, 708, 998, 910, 844], [790, 25, 61, 218, 901, 200, \n 833, 57, 472, 464, 523, 573]])\n', (207123, 207905), False, 'from snail import snail\n'), ((209196, 210104), 'snail.snail', 'snail', (['[[293, 385, 292, 757, 361, 655, 659, 966, 615, 684, 335, 393, 474], [478, \n 315, 712, 147, 750, 338, 9, 707, 159, 91, 170, 85, 27], [499, 881, 299,\n 789, 431, 756, 734, 872, 384, 26, 520, 489, 819], [801, 283, 442, 398, \n 640, 355, 827, 403, 368, 238, 481, 404, 108], [295, 858, 223, 425, 139,\n 752, 720, 679, 43, 976, 817, 77, 607], [877, 392, 230, 626, 447, 760, \n 462, 860, 519, 976, 935, 473, 237], [341, 760, 55, 653, 403, 84, 673, \n 64, 997, 241, 957, 851, 858], [946, 296, 603, 287, 348, 837, 508, 741, \n 461, 920, 693, 131, 343], [584, 127, 8, 16, 54, 563, 356, 193, 904, 812,\n 588, 915, 556], [318, 934, 918, 261, 821, 590, 962, 870, 590, 99, 658, \n 259, 484], [350, 129, 627, 13, 625, 760, 268, 552, 427, 303, 818, 860, \n 190], [944, 524, 876, 51, 827, 34, 526, 269, 903, 5, 902, 246, 609], [\n 51, 555, 781, 892, 378, 981, 385, 744, 956, 684, 390, 384, 689]]'], {}), '([[293, 385, 292, 757, 361, 655, 659, 966, 615, 684, 335, 393, 474], [\n 478, 315, 712, 147, 750, 338, 9, 707, 159, 91, 170, 85, 27], [499, 881,\n 299, 789, 431, 756, 734, 872, 384, 26, 520, 489, 819], [801, 283, 442, \n 398, 640, 355, 827, 403, 368, 238, 481, 404, 108], [295, 858, 223, 425,\n 139, 752, 720, 679, 43, 976, 817, 77, 607], [877, 392, 230, 626, 447, \n 760, 462, 860, 519, 976, 935, 473, 237], [341, 760, 55, 653, 403, 84, \n 673, 64, 997, 241, 957, 851, 858], [946, 296, 603, 287, 348, 837, 508, \n 741, 461, 920, 693, 131, 343], [584, 127, 8, 16, 54, 563, 356, 193, 904,\n 812, 588, 915, 556], [318, 934, 918, 261, 821, 590, 962, 870, 590, 99, \n 658, 259, 484], [350, 129, 627, 13, 625, 760, 268, 552, 427, 303, 818, \n 860, 190], [944, 524, 876, 51, 827, 34, 526, 269, 903, 5, 902, 246, 609\n ], [51, 555, 781, 892, 378, 981, 385, 744, 956, 684, 390, 384, 689]])\n', (209201, 210104), False, 'from snail import snail\n'), ((212167, 212714), 'snail.snail', 'snail', (['[[232, 704, 47, 593, 30, 394, 932, 781, 504, 760], [319, 806, 940, 647, 412,\n 710, 335, 109, 34, 130], [447, 743, 128, 497, 547, 155, 153, 676, 930, \n 401], [502, 42, 815, 824, 166, 627, 893, 500, 753, 136], [343, 384, 627,\n 328, 944, 841, 583, 899, 598, 558], [260, 468, 889, 544, 526, 498, 749,\n 87, 741, 862], [323, 885, 390, 955, 965, 997, 974, 210, 611, 890], [598,\n 888, 146, 717, 118, 63, 858, 484, 310, 265], [825, 98, 520, 519, 70, \n 602, 698, 662, 73, 902], [53, 156, 588, 118, 981, 519, 39, 536, 147, 295]]'], {}), '([[232, 704, 47, 593, 30, 394, 932, 781, 504, 760], [319, 806, 940, \n 647, 412, 710, 335, 109, 34, 130], [447, 743, 128, 497, 547, 155, 153, \n 676, 930, 401], [502, 42, 815, 824, 166, 627, 893, 500, 753, 136], [343,\n 384, 627, 328, 944, 841, 583, 899, 598, 558], [260, 468, 889, 544, 526,\n 498, 749, 87, 741, 862], [323, 885, 390, 955, 965, 997, 974, 210, 611, \n 890], [598, 888, 146, 717, 118, 63, 858, 484, 310, 265], [825, 98, 520,\n 519, 70, 602, 698, 662, 73, 902], [53, 156, 588, 118, 981, 519, 39, 536,\n 147, 295]])\n', (212172, 212714), False, 'from snail import snail\n'), ((213665, 214109), 'snail.snail', 'snail', (['[[143, 117, 15, 361, 949, 412, 30, 789, 293], [517, 943, 527, 914, 119, 984,\n 619, 878, 694], [548, 939, 625, 550, 787, 169, 633, 216, 84], [649, 772,\n 533, 591, 101, 87, 115, 248, 263], [650, 436, 659, 76, 395, 878, 470, \n 424, 7], [218, 861, 265, 632, 294, 34, 433, 364, 370], [164, 390, 869, \n 489, 226, 371, 295, 776, 503], [424, 284, 985, 209, 70, 146, 636, 797, \n 176], [951, 264, 54, 144, 844, 301, 390, 678, 639]]'], {}), '([[143, 117, 15, 361, 949, 412, 30, 789, 293], [517, 943, 527, 914, \n 119, 984, 619, 878, 694], [548, 939, 625, 550, 787, 169, 633, 216, 84],\n [649, 772, 533, 591, 101, 87, 115, 248, 263], [650, 436, 659, 76, 395, \n 878, 470, 424, 7], [218, 861, 265, 632, 294, 34, 433, 364, 370], [164, \n 390, 869, 489, 226, 371, 295, 776, 503], [424, 284, 985, 209, 70, 146, \n 636, 797, 176], [951, 264, 54, 144, 844, 301, 390, 678, 639]])\n', (213670, 214109), False, 'from snail import snail\n'), ((215007, 216722), 'snail.snail', 'snail', (['[[746, 798, 736, 433, 822, 82, 487, 32, 500, 717, 52, 320, 238, 1, 529, 973,\n 97, 6], [37, 761, 26, 63, 86, 770, 670, 899, 738, 619, 967, 959, 663, \n 317, 395, 302, 45, 8], [316, 296, 58, 653, 99, 271, 767, 122, 344, 644,\n 604, 617, 566, 441, 699, 287, 775, 950], [60, 610, 295, 13, 143, 480, \n 575, 808, 896, 416, 919, 970, 163, 38, 726, 482, 18, 940], [62, 758, \n 907, 148, 994, 42, 547, 843, 296, 348, 625, 884, 823, 148, 682, 358, \n 627, 792], [259, 128, 820, 772, 767, 852, 3, 331, 540, 196, 560, 312, \n 291, 376, 162, 666, 420, 426], [3, 362, 144, 383, 219, 657, 301, 113, \n 46, 771, 132, 697, 38, 486, 299, 218, 254, 692], [397, 76, 152, 653, \n 161, 199, 721, 574, 729, 810, 383, 223, 626, 96, 817, 872, 945, 220], [\n 961, 967, 364, 568, 958, 566, 517, 693, 841, 509, 751, 713, 888, 764, \n 200, 16, 501, 524], [195, 523, 624, 652, 113, 281, 983, 472, 482, 681, \n 206, 733, 742, 985, 226, 207, 699, 575], [533, 22, 231, 758, 951, 649, \n 867, 381, 734, 538, 733, 814, 392, 86, 322, 758, 550, 42], [775, 777, \n 347, 884, 298, 126, 826, 595, 151, 560, 119, 375, 416, 94, 245, 659, \n 238, 229], [562, 956, 429, 47, 922, 974, 953, 624, 583, 40, 507, 752, \n 446, 779, 788, 952, 354, 798], [495, 809, 528, 500, 277, 35, 827, 486, \n 12, 453, 73, 91, 129, 810, 828, 698, 401, 824], [549, 11, 22, 952, 943,\n 177, 372, 535, 785, 315, 879, 864, 606, 945, 720, 960, 997, 482], [810,\n 804, 171, 44, 552, 317, 293, 863, 71, 416, 294, 995, 267, 152, 973, 777,\n 936, 790], [264, 874, 485, 179, 373, 496, 786, 520, 437, 363, 619, 97, \n 895, 21, 9, 170, 757, 855], [699, 372, 806, 630, 759, 884, 175, 149, \n 192, 684, 457, 237, 46, 938, 845, 207, 265, 114]]'], {}), '([[746, 798, 736, 433, 822, 82, 487, 32, 500, 717, 52, 320, 238, 1, \n 529, 973, 97, 6], [37, 761, 26, 63, 86, 770, 670, 899, 738, 619, 967, \n 959, 663, 317, 395, 302, 45, 8], [316, 296, 58, 653, 99, 271, 767, 122,\n 344, 644, 604, 617, 566, 441, 699, 287, 775, 950], [60, 610, 295, 13, \n 143, 480, 575, 808, 896, 416, 919, 970, 163, 38, 726, 482, 18, 940], [\n 62, 758, 907, 148, 994, 42, 547, 843, 296, 348, 625, 884, 823, 148, 682,\n 358, 627, 792], [259, 128, 820, 772, 767, 852, 3, 331, 540, 196, 560, \n 312, 291, 376, 162, 666, 420, 426], [3, 362, 144, 383, 219, 657, 301, \n 113, 46, 771, 132, 697, 38, 486, 299, 218, 254, 692], [397, 76, 152, \n 653, 161, 199, 721, 574, 729, 810, 383, 223, 626, 96, 817, 872, 945, \n 220], [961, 967, 364, 568, 958, 566, 517, 693, 841, 509, 751, 713, 888,\n 764, 200, 16, 501, 524], [195, 523, 624, 652, 113, 281, 983, 472, 482, \n 681, 206, 733, 742, 985, 226, 207, 699, 575], [533, 22, 231, 758, 951, \n 649, 867, 381, 734, 538, 733, 814, 392, 86, 322, 758, 550, 42], [775, \n 777, 347, 884, 298, 126, 826, 595, 151, 560, 119, 375, 416, 94, 245, \n 659, 238, 229], [562, 956, 429, 47, 922, 974, 953, 624, 583, 40, 507, \n 752, 446, 779, 788, 952, 354, 798], [495, 809, 528, 500, 277, 35, 827, \n 486, 12, 453, 73, 91, 129, 810, 828, 698, 401, 824], [549, 11, 22, 952,\n 943, 177, 372, 535, 785, 315, 879, 864, 606, 945, 720, 960, 997, 482],\n [810, 804, 171, 44, 552, 317, 293, 863, 71, 416, 294, 995, 267, 152, \n 973, 777, 936, 790], [264, 874, 485, 179, 373, 496, 786, 520, 437, 363,\n 619, 97, 895, 21, 9, 170, 757, 855], [699, 372, 806, 630, 759, 884, 175,\n 149, 192, 684, 457, 237, 46, 938, 845, 207, 265, 114]])\n', (215012, 216722), False, 'from snail import snail\n'), ((220169, 220312), 'snail.snail', 'snail', (['[[600, 705, 630, 641, 878], [29, 799, 688, 274, 954], [642, 533, 486, 590, \n 446], [386, 706, 769, 884, 88], [40, 949, 713, 263, 542]]'], {}), '([[600, 705, 630, 641, 878], [29, 799, 688, 274, 954], [642, 533, 486,\n 590, 446], [386, 706, 769, 884, 88], [40, 949, 713, 263, 542]])\n', (220174, 220312), False, 'from snail import snail\n'), ((220622, 222765), 'snail.snail', 'snail', (['[[907, 736, 956, 378, 282, 128, 890, 360, 476, 774, 662, 76, 440, 146, 260,\n 503, 594, 753, 601, 758], [154, 508, 696, 345, 591, 993, 883, 517, 744,\n 441, 519, 59, 241, 932, 612, 853, 681, 580, 189, 616], [252, 776, 960, \n 174, 414, 719, 865, 586, 514, 147, 885, 941, 624, 902, 920, 281, 788, \n 666, 645, 885], [449, 571, 489, 461, 175, 497, 36, 529, 700, 833, 843, \n 360, 129, 148, 858, 887, 601, 368, 850, 693], [490, 627, 711, 829, 183,\n 271, 1, 656, 384, 296, 344, 478, 251, 806, 930, 50, 586, 526, 851, 77],\n [567, 200, 336, 555, 849, 867, 680, 585, 750, 865, 85, 520, 229, 438, \n 712, 500, 846, 37, 795, 591], [861, 605, 491, 552, 577, 316, 923, 18, \n 304, 237, 105, 132, 420, 458, 716, 500, 745, 488, 419, 900], [212, 651,\n 759, 532, 972, 701, 178, 862, 383, 683, 647, 361, 804, 619, 203, 834, \n 177, 789, 648, 172], [824, 811, 117, 519, 107, 371, 638, 841, 931, 397,\n 381, 19, 625, 501, 644, 332, 421, 616, 9, 451], [626, 774, 505, 848, \n 925, 320, 635, 85, 491, 700, 980, 154, 778, 639, 893, 455, 491, 23, 488,\n 902], [842, 984, 751, 823, 776, 261, 411, 239, 490, 828, 123, 248, 555,\n 308, 340, 997, 464, 901, 931, 278], [826, 354, 702, 996, 583, 195, 939,\n 381, 649, 830, 991, 167, 337, 328, 237, 120, 678, 64, 943, 232], [393, \n 920, 761, 889, 953, 112, 493, 891, 517, 937, 212, 549, 37, 800, 902, \n 200, 45, 958, 400, 609], [190, 982, 178, 364, 881, 102, 889, 873, 976, \n 191, 973, 742, 680, 718, 585, 924, 638, 936, 62, 644], [565, 513, 575, \n 193, 561, 750, 953, 226, 691, 562, 655, 294, 877, 651, 343, 328, 599, \n 277, 883, 447], [224, 782, 630, 902, 677, 276, 35, 489, 941, 122, 950, \n 593, 808, 738, 901, 228, 621, 730, 567, 484], [252, 491, 679, 882, 157,\n 6, 674, 542, 384, 508, 93, 981, 502, 342, 732, 265, 135, 309, 814, 377],\n [609, 16, 276, 999, 676, 620, 662, 276, 598, 79, 983, 105, 959, 328, 7,\n 486, 112, 484, 117, 970], [592, 391, 807, 39, 654, 757, 676, 569, 589, \n 920, 935, 443, 821, 220, 406, 551, 649, 605, 753, 277], [474, 183, 917,\n 831, 371, 55, 70, 631, 827, 1, 526, 648, 466, 575, 916, 776, 237, 18, \n 671, 244]]'], {}), '([[907, 736, 956, 378, 282, 128, 890, 360, 476, 774, 662, 76, 440, 146,\n 260, 503, 594, 753, 601, 758], [154, 508, 696, 345, 591, 993, 883, 517,\n 744, 441, 519, 59, 241, 932, 612, 853, 681, 580, 189, 616], [252, 776, \n 960, 174, 414, 719, 865, 586, 514, 147, 885, 941, 624, 902, 920, 281, \n 788, 666, 645, 885], [449, 571, 489, 461, 175, 497, 36, 529, 700, 833, \n 843, 360, 129, 148, 858, 887, 601, 368, 850, 693], [490, 627, 711, 829,\n 183, 271, 1, 656, 384, 296, 344, 478, 251, 806, 930, 50, 586, 526, 851,\n 77], [567, 200, 336, 555, 849, 867, 680, 585, 750, 865, 85, 520, 229, \n 438, 712, 500, 846, 37, 795, 591], [861, 605, 491, 552, 577, 316, 923, \n 18, 304, 237, 105, 132, 420, 458, 716, 500, 745, 488, 419, 900], [212, \n 651, 759, 532, 972, 701, 178, 862, 383, 683, 647, 361, 804, 619, 203, \n 834, 177, 789, 648, 172], [824, 811, 117, 519, 107, 371, 638, 841, 931,\n 397, 381, 19, 625, 501, 644, 332, 421, 616, 9, 451], [626, 774, 505, \n 848, 925, 320, 635, 85, 491, 700, 980, 154, 778, 639, 893, 455, 491, 23,\n 488, 902], [842, 984, 751, 823, 776, 261, 411, 239, 490, 828, 123, 248,\n 555, 308, 340, 997, 464, 901, 931, 278], [826, 354, 702, 996, 583, 195,\n 939, 381, 649, 830, 991, 167, 337, 328, 237, 120, 678, 64, 943, 232], [\n 393, 920, 761, 889, 953, 112, 493, 891, 517, 937, 212, 549, 37, 800, \n 902, 200, 45, 958, 400, 609], [190, 982, 178, 364, 881, 102, 889, 873, \n 976, 191, 973, 742, 680, 718, 585, 924, 638, 936, 62, 644], [565, 513, \n 575, 193, 561, 750, 953, 226, 691, 562, 655, 294, 877, 651, 343, 328, \n 599, 277, 883, 447], [224, 782, 630, 902, 677, 276, 35, 489, 941, 122, \n 950, 593, 808, 738, 901, 228, 621, 730, 567, 484], [252, 491, 679, 882,\n 157, 6, 674, 542, 384, 508, 93, 981, 502, 342, 732, 265, 135, 309, 814,\n 377], [609, 16, 276, 999, 676, 620, 662, 276, 598, 79, 983, 105, 959, \n 328, 7, 486, 112, 484, 117, 970], [592, 391, 807, 39, 654, 757, 676, \n 569, 589, 920, 935, 443, 821, 220, 406, 551, 649, 605, 753, 277], [474,\n 183, 917, 831, 371, 55, 70, 631, 827, 1, 526, 648, 466, 575, 916, 776, \n 237, 18, 671, 244]])\n', (220627, 222765), False, 'from snail import snail\n'), ((227556, 228934), 'snail.snail', 'snail', (['[[247, 36, 147, 670, 85, 302, 290, 318, 625, 571, 925, 293, 329, 386, 513, \n 32], [886, 355, 260, 484, 589, 633, 64, 999, 160, 927, 937, 306, 722, \n 480, 171, 593], [243, 262, 207, 601, 850, 221, 834, 478, 394, 6, 926, \n 500, 705, 771, 947, 559], [894, 64, 204, 221, 196, 17, 465, 978, 251, \n 395, 208, 623, 457, 274, 198, 982], [826, 24, 211, 166, 285, 800, 358, \n 180, 336, 708, 965, 855, 607, 283, 186, 114], [177, 887, 42, 168, 420, \n 708, 632, 953, 929, 246, 355, 617, 576, 783, 892, 527], [393, 714, 22, \n 905, 724, 749, 226, 128, 689, 924, 203, 353, 502, 583, 363, 249], [633,\n 275, 241, 730, 109, 748, 482, 465, 672, 567, 739, 772, 677, 299, 492, \n 832], [701, 706, 283, 866, 551, 893, 928, 136, 822, 892, 100, 11, 686, \n 759, 780, 799], [818, 515, 137, 699, 122, 187, 587, 708, 819, 842, 689,\n 234, 229, 763, 484, 512], [770, 663, 833, 676, 994, 54, 207, 133, 444, \n 707, 541, 23, 588, 214, 752, 980], [121, 54, 432, 672, 767, 47, 945, \n 497, 433, 422, 913, 688, 703, 289, 933, 736], [80, 683, 447, 359, 245, \n 935, 348, 196, 118, 637, 938, 270, 532, 97, 647, 329], [385, 201, 425, \n 426, 579, 166, 983, 31, 646, 810, 156, 102, 151, 13, 212, 127], [677, \n 439, 224, 931, 557, 572, 31, 122, 107, 812, 796, 934, 956, 74, 372, 311\n ], [807, 154, 33, 598, 333, 42, 7, 937, 312, 911, 186, 918, 962, 554, \n 746, 436]]'], {}), '([[247, 36, 147, 670, 85, 302, 290, 318, 625, 571, 925, 293, 329, 386,\n 513, 32], [886, 355, 260, 484, 589, 633, 64, 999, 160, 927, 937, 306, \n 722, 480, 171, 593], [243, 262, 207, 601, 850, 221, 834, 478, 394, 6, \n 926, 500, 705, 771, 947, 559], [894, 64, 204, 221, 196, 17, 465, 978, \n 251, 395, 208, 623, 457, 274, 198, 982], [826, 24, 211, 166, 285, 800, \n 358, 180, 336, 708, 965, 855, 607, 283, 186, 114], [177, 887, 42, 168, \n 420, 708, 632, 953, 929, 246, 355, 617, 576, 783, 892, 527], [393, 714,\n 22, 905, 724, 749, 226, 128, 689, 924, 203, 353, 502, 583, 363, 249], [\n 633, 275, 241, 730, 109, 748, 482, 465, 672, 567, 739, 772, 677, 299, \n 492, 832], [701, 706, 283, 866, 551, 893, 928, 136, 822, 892, 100, 11, \n 686, 759, 780, 799], [818, 515, 137, 699, 122, 187, 587, 708, 819, 842,\n 689, 234, 229, 763, 484, 512], [770, 663, 833, 676, 994, 54, 207, 133, \n 444, 707, 541, 23, 588, 214, 752, 980], [121, 54, 432, 672, 767, 47, \n 945, 497, 433, 422, 913, 688, 703, 289, 933, 736], [80, 683, 447, 359, \n 245, 935, 348, 196, 118, 637, 938, 270, 532, 97, 647, 329], [385, 201, \n 425, 426, 579, 166, 983, 31, 646, 810, 156, 102, 151, 13, 212, 127], [\n 677, 439, 224, 931, 557, 572, 31, 122, 107, 812, 796, 934, 956, 74, 372,\n 311], [807, 154, 33, 598, 333, 42, 7, 937, 312, 911, 186, 918, 962, 554,\n 746, 436]])\n', (227561, 228934), False, 'from snail import snail\n'), ((231798, 233348), 'snail.snail', 'snail', (['[[433, 873, 34, 538, 182, 479, 447, 919, 491, 799, 321, 798, 96, 351, 199, \n 595, 384], [688, 520, 440, 10, 768, 283, 286, 980, 786, 632, 724, 772, \n 776, 791, 526, 902, 143], [221, 380, 963, 134, 81, 12, 212, 931, 854, \n 929, 258, 266, 191, 692, 975, 245, 686], [371, 60, 849, 373, 934, 222, \n 750, 480, 817, 384, 623, 223, 965, 716, 502, 306, 419], [137, 668, 412,\n 520, 759, 695, 35, 791, 512, 272, 880, 453, 79, 2, 813, 383, 715], [350,\n 505, 927, 713, 478, 969, 462, 3, 343, 237, 219, 780, 231, 486, 539, 82,\n 129], [405, 363, 901, 599, 117, 102, 317, 683, 880, 226, 757, 863, 175,\n 434, 903, 555, 152], [918, 331, 443, 864, 933, 126, 463, 526, 570, 243,\n 866, 184, 895, 478, 413, 143, 900], [976, 855, 41, 630, 829, 195, 443, \n 10, 447, 401, 592, 779, 213, 162, 359, 592, 496], [892, 131, 875, 900, \n 416, 266, 524, 162, 561, 14, 148, 103, 869, 412, 229, 490, 961], [589, \n 282, 373, 491, 878, 25, 541, 207, 642, 380, 971, 581, 721, 500, 135, 98,\n 425], [523, 846, 203, 737, 445, 213, 138, 238, 295, 272, 338, 760, 539,\n 354, 195, 109, 271], [948, 521, 513, 819, 497, 73, 487, 760, 899, 687, \n 330, 409, 476, 725, 3, 261, 101], [690, 406, 882, 6, 341, 931, 135, 659,\n 746, 960, 709, 42, 621, 741, 6, 444, 496], [351, 159, 223, 361, 865, \n 142, 82, 556, 953, 789, 642, 491, 346, 912, 262, 534, 442], [397, 421, \n 707, 864, 685, 406, 76, 577, 159, 210, 885, 229, 54, 617, 945, 153, 928\n ], [778, 175, 280, 641, 290, 911, 692, 538, 48, 480, 772, 400, 119, 691,\n 539, 728, 27]]'], {}), '([[433, 873, 34, 538, 182, 479, 447, 919, 491, 799, 321, 798, 96, 351,\n 199, 595, 384], [688, 520, 440, 10, 768, 283, 286, 980, 786, 632, 724, \n 772, 776, 791, 526, 902, 143], [221, 380, 963, 134, 81, 12, 212, 931, \n 854, 929, 258, 266, 191, 692, 975, 245, 686], [371, 60, 849, 373, 934, \n 222, 750, 480, 817, 384, 623, 223, 965, 716, 502, 306, 419], [137, 668,\n 412, 520, 759, 695, 35, 791, 512, 272, 880, 453, 79, 2, 813, 383, 715],\n [350, 505, 927, 713, 478, 969, 462, 3, 343, 237, 219, 780, 231, 486, \n 539, 82, 129], [405, 363, 901, 599, 117, 102, 317, 683, 880, 226, 757, \n 863, 175, 434, 903, 555, 152], [918, 331, 443, 864, 933, 126, 463, 526,\n 570, 243, 866, 184, 895, 478, 413, 143, 900], [976, 855, 41, 630, 829, \n 195, 443, 10, 447, 401, 592, 779, 213, 162, 359, 592, 496], [892, 131, \n 875, 900, 416, 266, 524, 162, 561, 14, 148, 103, 869, 412, 229, 490, \n 961], [589, 282, 373, 491, 878, 25, 541, 207, 642, 380, 971, 581, 721, \n 500, 135, 98, 425], [523, 846, 203, 737, 445, 213, 138, 238, 295, 272, \n 338, 760, 539, 354, 195, 109, 271], [948, 521, 513, 819, 497, 73, 487, \n 760, 899, 687, 330, 409, 476, 725, 3, 261, 101], [690, 406, 882, 6, 341,\n 931, 135, 659, 746, 960, 709, 42, 621, 741, 6, 444, 496], [351, 159, \n 223, 361, 865, 142, 82, 556, 953, 789, 642, 491, 346, 912, 262, 534, \n 442], [397, 421, 707, 864, 685, 406, 76, 577, 159, 210, 885, 229, 54, \n 617, 945, 153, 928], [778, 175, 280, 641, 290, 911, 692, 538, 48, 480, \n 772, 400, 119, 691, 539, 728, 27]])\n', (231803, 233348), False, 'from snail import snail\n'), ((236505, 236650), 'snail.snail', 'snail', (['[[631, 668, 646, 712, 825], [953, 573, 100, 756, 783], [445, 553, 384, 130,\n 668], [157, 805, 969, 18, 304], [551, 676, 558, 200, 793]]'], {}), '([[631, 668, 646, 712, 825], [953, 573, 100, 756, 783], [445, 553, 384,\n 130, 668], [157, 805, 969, 18, 304], [551, 676, 558, 200, 793]])\n', (236510, 236650), False, 'from snail import snail\n'), ((236962, 237165), 'snail.snail', 'snail', (['[[236, 796, 566, 79, 878, 3], [813, 495, 352, 703, 329, 840], [874, 879, \n 560, 307, 997, 4], [596, 458, 407, 889, 536, 319], [334, 151, 460, 511,\n 411, 855], [144, 572, 272, 495, 545, 622]]'], {}), '([[236, 796, 566, 79, 878, 3], [813, 495, 352, 703, 329, 840], [874, \n 879, 560, 307, 997, 4], [596, 458, 407, 889, 536, 319], [334, 151, 460,\n 511, 411, 855], [144, 572, 272, 495, 545, 622]])\n', (236967, 237165), False, 'from snail import snail\n'), ((237566, 237766), 'snail.snail', 'snail', (['[[222, 261, 661, 331, 511, 364], [571, 689, 58, 265, 565, 413], [68, 5, 853,\n 541, 890, 410], [571, 71, 770, 563, 603, 521], [42, 417, 725, 971, 15, \n 780], [958, 98, 870, 20, 856, 994]]'], {}), '([[222, 261, 661, 331, 511, 364], [571, 689, 58, 265, 565, 413], [68, \n 5, 853, 541, 890, 410], [571, 71, 770, 563, 603, 521], [42, 417, 725, \n 971, 15, 780], [958, 98, 870, 20, 856, 994]])\n', (237571, 237766), False, 'from snail import snail\n'), ((238145, 238289), 'snail.snail', 'snail', (['[[641, 678, 48, 894, 850], [974, 949, 998, 825, 286], [979, 768, 792, 384, \n 688], [430, 622, 694, 337, 275], [494, 313, 309, 70, 415]]'], {}), '([[641, 678, 48, 894, 850], [974, 949, 998, 825, 286], [979, 768, 792,\n 384, 688], [430, 622, 694, 337, 275], [494, 313, 309, 70, 415]])\n', (238150, 238289), False, 'from snail import snail\n'), ((238600, 238745), 'snail.snail', 'snail', (['[[600, 786, 254, 655, 13], [815, 239, 774, 325, 57], [499, 747, 251, 914, \n 861], [605, 594, 499, 646, 290], [650, 496, 385, 387, 819]]'], {}), '([[600, 786, 254, 655, 13], [815, 239, 774, 325, 57], [499, 747, 251, \n 914, 861], [605, 594, 499, 646, 290], [650, 496, 385, 387, 819]])\n', (238605, 238745), False, 'from snail import snail\n'), ((239055, 239259), 'snail.snail', 'snail', (['[[903, 696, 410, 542, 956, 889], [381, 306, 48, 102, 268, 355], [341, 279, \n 537, 59, 605, 18], [909, 865, 234, 770, 206, 806], [784, 781, 945, 285,\n 355, 765], [100, 748, 473, 319, 150, 998]]'], {}), '([[903, 696, 410, 542, 956, 889], [381, 306, 48, 102, 268, 355], [341,\n 279, 537, 59, 605, 18], [909, 865, 234, 770, 206, 806], [784, 781, 945,\n 285, 355, 765], [100, 748, 473, 319, 150, 998]])\n', (239060, 239259), False, 'from snail import snail\n'), ((239663, 240709), 'snail.snail', 'snail', (['[[67, 123, 678, 842, 28, 690, 189, 182, 636, 645, 118, 123, 95, 723], [119,\n 324, 247, 8, 860, 329, 180, 791, 92, 5, 896, 921, 157, 781], [756, 950,\n 738, 573, 101, 446, 468, 594, 316, 962, 708, 168, 889, 8], [301, 352, \n 920, 673, 245, 759, 242, 43, 761, 460, 76, 551, 315, 376], [915, 367, \n 345, 8, 132, 840, 451, 965, 757, 558, 94, 882, 847, 82], [949, 651, 239,\n 941, 544, 782, 220, 763, 384, 515, 840, 377, 809, 300], [527, 728, 875,\n 620, 247, 792, 385, 44, 925, 697, 947, 541, 224, 364], [824, 885, 723, \n 353, 77, 915, 880, 339, 809, 919, 931, 569, 980, 357], [89, 100, 893, \n 516, 786, 59, 365, 967, 122, 103, 55, 42, 683, 101], [281, 457, 924, \n 813, 624, 997, 346, 613, 116, 655, 465, 786, 936, 94], [185, 898, 588, \n 272, 712, 367, 435, 660, 152, 896, 792, 670, 272, 397], [891, 754, 108,\n 844, 44, 648, 684, 571, 201, 745, 440, 88, 511, 214], [130, 632, 977, \n 354, 353, 918, 736, 349, 662, 185, 31, 307, 460, 17], [910, 947, 686, \n 668, 857, 345, 654, 678, 27, 78, 445, 639, 130, 970]]'], {}), '([[67, 123, 678, 842, 28, 690, 189, 182, 636, 645, 118, 123, 95, 723],\n [119, 324, 247, 8, 860, 329, 180, 791, 92, 5, 896, 921, 157, 781], [756,\n 950, 738, 573, 101, 446, 468, 594, 316, 962, 708, 168, 889, 8], [301, \n 352, 920, 673, 245, 759, 242, 43, 761, 460, 76, 551, 315, 376], [915, \n 367, 345, 8, 132, 840, 451, 965, 757, 558, 94, 882, 847, 82], [949, 651,\n 239, 941, 544, 782, 220, 763, 384, 515, 840, 377, 809, 300], [527, 728,\n 875, 620, 247, 792, 385, 44, 925, 697, 947, 541, 224, 364], [824, 885, \n 723, 353, 77, 915, 880, 339, 809, 919, 931, 569, 980, 357], [89, 100, \n 893, 516, 786, 59, 365, 967, 122, 103, 55, 42, 683, 101], [281, 457, \n 924, 813, 624, 997, 346, 613, 116, 655, 465, 786, 936, 94], [185, 898, \n 588, 272, 712, 367, 435, 660, 152, 896, 792, 670, 272, 397], [891, 754,\n 108, 844, 44, 648, 684, 571, 201, 745, 440, 88, 511, 214], [130, 632, \n 977, 354, 353, 918, 736, 349, 662, 185, 31, 307, 460, 17], [910, 947, \n 686, 668, 857, 345, 654, 678, 27, 78, 445, 639, 130, 970]])\n', (239668, 240709), False, 'from snail import snail\n'), ((243009, 243787), 'snail.snail', 'snail', (['[[676, 9, 91, 957, 699, 130, 983, 164, 75, 728, 784, 125], [959, 741, 710, \n 154, 452, 919, 66, 444, 298, 140, 516, 760], [969, 514, 898, 621, 984, \n 616, 724, 738, 410, 758, 829, 482], [82, 491, 417, 173, 563, 494, 155, \n 763, 850, 915, 625, 396], [407, 299, 773, 4, 428, 630, 822, 484, 922, \n 625, 114, 116], [571, 180, 379, 284, 947, 688, 749, 312, 502, 935, 879,\n 166], [641, 719, 73, 623, 940, 590, 81, 644, 997, 378, 847, 501], [143,\n 843, 814, 259, 354, 459, 804, 43, 854, 1000, 382, 717], [602, 64, 272, \n 149, 285, 862, 958, 662, 556, 426, 259, 197], [341, 882, 400, 559, 227,\n 498, 699, 298, 354, 739, 67, 465], [43, 85, 467, 557, 614, 63, 680, 434,\n 937, 472, 488, 243], [978, 607, 559, 134, 531, 907, 4, 813, 259, 203, \n 384, 836]]'], {}), '([[676, 9, 91, 957, 699, 130, 983, 164, 75, 728, 784, 125], [959, 741,\n 710, 154, 452, 919, 66, 444, 298, 140, 516, 760], [969, 514, 898, 621, \n 984, 616, 724, 738, 410, 758, 829, 482], [82, 491, 417, 173, 563, 494, \n 155, 763, 850, 915, 625, 396], [407, 299, 773, 4, 428, 630, 822, 484, \n 922, 625, 114, 116], [571, 180, 379, 284, 947, 688, 749, 312, 502, 935,\n 879, 166], [641, 719, 73, 623, 940, 590, 81, 644, 997, 378, 847, 501],\n [143, 843, 814, 259, 354, 459, 804, 43, 854, 1000, 382, 717], [602, 64,\n 272, 149, 285, 862, 958, 662, 556, 426, 259, 197], [341, 882, 400, 559,\n 227, 498, 699, 298, 354, 739, 67, 465], [43, 85, 467, 557, 614, 63, 680,\n 434, 937, 472, 488, 243], [978, 607, 559, 134, 531, 907, 4, 813, 259, \n 203, 384, 836]])\n', (243014, 243787), False, 'from snail import snail\n'), ((245077, 245994), 'snail.snail', 'snail', (['[[221, 977, 163, 642, 495, 250, 823, 751, 152, 681, 814, 539, 941], [468, 2,\n 934, 705, 319, 208, 994, 960, 167, 267, 861, 499, 535], [516, 31, 300, \n 893, 235, 842, 13, 44, 235, 236, 590, 711, 174], [485, 83, 36, 462, 822,\n 458, 963, 829, 626, 699, 602, 248, 286], [978, 561, 292, 53, 972, 119, \n 694, 401, 852, 589, 498, 115, 828], [737, 896, 881, 863, 502, 217, 584,\n 390, 621, 373, 777, 312, 425], [453, 827, 774, 783, 322, 350, 606, 786,\n 709, 627, 579, 314, 700], [651, 203, 681, 752, 10, 116, 64, 885, 121, \n 445, 385, 283, 307], [884, 35, 523, 791, 169, 338, 411, 749, 48, 662, \n 878, 314, 802], [137, 755, 589, 409, 870, 857, 687, 37, 818, 206, 952, \n 505, 337], [695, 928, 533, 370, 363, 71, 386, 823, 685, 859, 107, 313, \n 958], [58, 267, 988, 746, 601, 767, 701, 27, 565, 434, 734, 942, 572],\n [405, 442, 424, 298, 14, 428, 699, 906, 900, 928, 97, 783, 273]]'], {}), '([[221, 977, 163, 642, 495, 250, 823, 751, 152, 681, 814, 539, 941], [\n 468, 2, 934, 705, 319, 208, 994, 960, 167, 267, 861, 499, 535], [516, \n 31, 300, 893, 235, 842, 13, 44, 235, 236, 590, 711, 174], [485, 83, 36,\n 462, 822, 458, 963, 829, 626, 699, 602, 248, 286], [978, 561, 292, 53, \n 972, 119, 694, 401, 852, 589, 498, 115, 828], [737, 896, 881, 863, 502,\n 217, 584, 390, 621, 373, 777, 312, 425], [453, 827, 774, 783, 322, 350,\n 606, 786, 709, 627, 579, 314, 700], [651, 203, 681, 752, 10, 116, 64, \n 885, 121, 445, 385, 283, 307], [884, 35, 523, 791, 169, 338, 411, 749, \n 48, 662, 878, 314, 802], [137, 755, 589, 409, 870, 857, 687, 37, 818, \n 206, 952, 505, 337], [695, 928, 533, 370, 363, 71, 386, 823, 685, 859, \n 107, 313, 958], [58, 267, 988, 746, 601, 767, 701, 27, 565, 434, 734, \n 942, 572], [405, 442, 424, 298, 14, 428, 699, 906, 900, 928, 97, 783, 273]]\n )\n', (245082, 245994), False, 'from snail import snail\n')] |
cuonglm/tink | python/tink/jwt/_raw_jwt.py | df5fa42e45b4d43aac6c3506ceba2956b79a62b8 | # Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
"""The raw JSON Web Token (JWT)."""
import copy
import datetime
import json
from typing import cast, Mapping, Set, List, Dict, Optional, Text, Union, Any
from tink import core
from tink.jwt import _jwt_error
from tink.jwt import _jwt_format
_REGISTERED_NAMES = frozenset({'iss', 'sub', 'jti', 'aud', 'exp', 'nbf', 'iat'})
_MAX_TIMESTAMP_VALUE = 253402300799 # 31 Dec 9999, 23:59:59 GMT
Claim = Union[None, bool, int, float, Text, List[Any], Dict[Text, Any]]
def _from_datetime(t: datetime.datetime) -> float:
if not t.tzinfo:
raise _jwt_error.JwtInvalidError('datetime must have tzinfo')
return t.timestamp()
def _to_datetime(timestamp: float) -> datetime.datetime:
return datetime.datetime.fromtimestamp(timestamp, datetime.timezone.utc)
def _validate_custom_claim_name(name: Text) -> None:
if name in _REGISTERED_NAMES:
raise _jwt_error.JwtInvalidError(
'registered name %s cannot be custom claim name' % name)
class RawJwt(object):
"""A raw JSON Web Token (JWT).
It can be signed to obtain a compact JWT. It is also used as a parse token
that has not yet been verified.
"""
def __new__(cls):
raise core.TinkError('RawJwt cannot be instantiated directly.')
def __init__(self, type_header: Optional[Text], payload: Dict[Text,
Any]) -> None:
# No need to copy payload, because only create and from_json_payload
# call this method.
if not isinstance(payload, Dict):
raise _jwt_error.JwtInvalidError('payload must be a dict')
self._type_header = type_header
self._payload = payload
self._validate_string_claim('iss')
self._validate_string_claim('sub')
self._validate_string_claim('jti')
self._validate_timestamp_claim('exp')
self._validate_timestamp_claim('nbf')
self._validate_timestamp_claim('iat')
self._validate_audience_claim()
def _validate_string_claim(self, name: Text):
if name in self._payload:
if not isinstance(self._payload[name], Text):
raise _jwt_error.JwtInvalidError('claim %s must be a String' % name)
def _validate_timestamp_claim(self, name: Text):
if name in self._payload:
timestamp = self._payload[name]
if not isinstance(timestamp, (int, float)):
raise _jwt_error.JwtInvalidError('claim %s must be a Number' % name)
if timestamp > _MAX_TIMESTAMP_VALUE or timestamp < 0:
raise _jwt_error.JwtInvalidError(
'timestamp of claim %s is out of range' % name)
def _validate_audience_claim(self):
if 'aud' in self._payload:
audiences = self._payload['aud']
if isinstance(audiences, Text):
self._payload['aud'] = [audiences]
return
if not isinstance(audiences, list) or not audiences:
raise _jwt_error.JwtInvalidError('audiences must be a non-empty list')
if not all(isinstance(value, Text) for value in audiences):
raise _jwt_error.JwtInvalidError('audiences must only contain Text')
# TODO(juerg): Consider adding a raw_ prefix to all access methods
def has_type_header(self) -> bool:
return self._type_header is not None
def type_header(self) -> Text:
if not self.has_type_header():
raise KeyError('type header is not set')
return self._type_header
def has_issuer(self) -> bool:
return 'iss' in self._payload
def issuer(self) -> Text:
return cast(Text, self._payload['iss'])
def has_subject(self) -> bool:
return 'sub' in self._payload
def subject(self) -> Text:
return cast(Text, self._payload['sub'])
def has_audiences(self) -> bool:
return 'aud' in self._payload
def audiences(self) -> List[Text]:
return list(self._payload['aud'])
def has_jwt_id(self) -> bool:
return 'jti' in self._payload
def jwt_id(self) -> Text:
return cast(Text, self._payload['jti'])
def has_expiration(self) -> bool:
return 'exp' in self._payload
def expiration(self) -> datetime.datetime:
return _to_datetime(self._payload['exp'])
def has_not_before(self) -> bool:
return 'nbf' in self._payload
def not_before(self) -> datetime.datetime:
return _to_datetime(self._payload['nbf'])
def has_issued_at(self) -> bool:
return 'iat' in self._payload
def issued_at(self) -> datetime.datetime:
return _to_datetime(self._payload['iat'])
def custom_claim_names(self) -> Set[Text]:
return {n for n in self._payload.keys() if n not in _REGISTERED_NAMES}
def custom_claim(self, name: Text) -> Claim:
_validate_custom_claim_name(name)
value = self._payload[name]
if isinstance(value, (list, dict)):
return copy.deepcopy(value)
else:
return value
def json_payload(self) -> Text:
"""Returns the payload encoded as JSON string."""
return _jwt_format.json_dumps(self._payload)
@classmethod
def create(cls,
*,
type_header: Optional[Text] = None,
issuer: Optional[Text] = None,
subject: Optional[Text] = None,
audiences: Optional[List[Text]] = None,
jwt_id: Optional[Text] = None,
expiration: Optional[datetime.datetime] = None,
not_before: Optional[datetime.datetime] = None,
issued_at: Optional[datetime.datetime] = None,
custom_claims: Mapping[Text, Claim] = None) -> 'RawJwt':
"""Create a new RawJwt instance."""
payload = {}
if issuer:
payload['iss'] = issuer
if subject:
payload['sub'] = subject
if jwt_id is not None:
payload['jti'] = jwt_id
if audiences is not None:
payload['aud'] = copy.copy(audiences)
if expiration:
payload['exp'] = _from_datetime(expiration)
if not_before:
payload['nbf'] = _from_datetime(not_before)
if issued_at:
payload['iat'] = _from_datetime(issued_at)
if custom_claims:
for name, value in custom_claims.items():
_validate_custom_claim_name(name)
if not isinstance(name, Text):
raise _jwt_error.JwtInvalidError('claim name must be Text')
if (value is None or isinstance(value, (bool, int, float, Text))):
payload[name] = value
elif isinstance(value, list):
payload[name] = json.loads(json.dumps(value))
elif isinstance(value, dict):
payload[name] = json.loads(json.dumps(value))
else:
raise _jwt_error.JwtInvalidError('claim %s has unknown type' % name)
raw_jwt = object.__new__(cls)
raw_jwt.__init__(type_header, payload)
return raw_jwt
@classmethod
def from_json(cls, type_header: Optional[Text], payload: Text) -> 'RawJwt':
"""Creates a RawJwt from payload encoded as JSON string."""
raw_jwt = object.__new__(cls)
raw_jwt.__init__(type_header, _jwt_format.json_loads(payload))
return raw_jwt
| [((1236, 1301), 'datetime.datetime.fromtimestamp', 'datetime.datetime.fromtimestamp', (['timestamp', 'datetime.timezone.utc'], {}), '(timestamp, datetime.timezone.utc)\n', (1267, 1301), False, 'import datetime\n'), ((1089, 1144), 'tink.jwt._jwt_error.JwtInvalidError', '_jwt_error.JwtInvalidError', (['"""datetime must have tzinfo"""'], {}), "('datetime must have tzinfo')\n", (1115, 1144), False, 'from tink.jwt import _jwt_error\n'), ((1399, 1486), 'tink.jwt._jwt_error.JwtInvalidError', '_jwt_error.JwtInvalidError', (["('registered name %s cannot be custom claim name' % name)"], {}), "('registered name %s cannot be custom claim name' %\n name)\n", (1425, 1486), False, 'from tink.jwt import _jwt_error\n'), ((1698, 1755), 'tink.core.TinkError', 'core.TinkError', (['"""RawJwt cannot be instantiated directly."""'], {}), "('RawJwt cannot be instantiated directly.')\n", (1712, 1755), False, 'from tink import core\n'), ((3952, 3984), 'typing.cast', 'cast', (['Text', "self._payload['iss']"], {}), "(Text, self._payload['iss'])\n", (3956, 3984), False, 'from typing import cast, Mapping, Set, List, Dict, Optional, Text, Union, Any\n'), ((4094, 4126), 'typing.cast', 'cast', (['Text', "self._payload['sub']"], {}), "(Text, self._payload['sub'])\n", (4098, 4126), False, 'from typing import cast, Mapping, Set, List, Dict, Optional, Text, Union, Any\n'), ((4380, 4412), 'typing.cast', 'cast', (['Text', "self._payload['jti']"], {}), "(Text, self._payload['jti'])\n", (4384, 4412), False, 'from typing import cast, Mapping, Set, List, Dict, Optional, Text, Union, Any\n'), ((5342, 5379), 'tink.jwt._jwt_format.json_dumps', '_jwt_format.json_dumps', (['self._payload'], {}), '(self._payload)\n', (5364, 5379), False, 'from tink.jwt import _jwt_format\n'), ((2053, 2105), 'tink.jwt._jwt_error.JwtInvalidError', '_jwt_error.JwtInvalidError', (['"""payload must be a dict"""'], {}), "('payload must be a dict')\n", (2079, 2105), False, 'from tink.jwt import _jwt_error\n'), ((5192, 5212), 'copy.deepcopy', 'copy.deepcopy', (['value'], {}), '(value)\n', (5205, 5212), False, 'import copy\n'), ((6176, 6196), 'copy.copy', 'copy.copy', (['audiences'], {}), '(audiences)\n', (6185, 6196), False, 'import copy\n'), ((7333, 7364), 'tink.jwt._jwt_format.json_loads', '_jwt_format.json_loads', (['payload'], {}), '(payload)\n', (7355, 7364), False, 'from tink.jwt import _jwt_format\n'), ((2594, 2656), 'tink.jwt._jwt_error.JwtInvalidError', '_jwt_error.JwtInvalidError', (["('claim %s must be a String' % name)"], {}), "('claim %s must be a String' % name)\n", (2620, 2656), False, 'from tink.jwt import _jwt_error\n'), ((2841, 2903), 'tink.jwt._jwt_error.JwtInvalidError', '_jwt_error.JwtInvalidError', (["('claim %s must be a Number' % name)"], {}), "('claim %s must be a Number' % name)\n", (2867, 2903), False, 'from tink.jwt import _jwt_error\n'), ((2978, 3052), 'tink.jwt._jwt_error.JwtInvalidError', '_jwt_error.JwtInvalidError', (["('timestamp of claim %s is out of range' % name)"], {}), "('timestamp of claim %s is out of range' % name)\n", (3004, 3052), False, 'from tink.jwt import _jwt_error\n'), ((3344, 3408), 'tink.jwt._jwt_error.JwtInvalidError', '_jwt_error.JwtInvalidError', (['"""audiences must be a non-empty list"""'], {}), "('audiences must be a non-empty list')\n", (3370, 3408), False, 'from tink.jwt import _jwt_error\n'), ((3489, 3551), 'tink.jwt._jwt_error.JwtInvalidError', '_jwt_error.JwtInvalidError', (['"""audiences must only contain Text"""'], {}), "('audiences must only contain Text')\n", (3515, 3551), False, 'from tink.jwt import _jwt_error\n'), ((6569, 6622), 'tink.jwt._jwt_error.JwtInvalidError', '_jwt_error.JwtInvalidError', (['"""claim name must be Text"""'], {}), "('claim name must be Text')\n", (6595, 6622), False, 'from tink.jwt import _jwt_error\n'), ((6805, 6822), 'json.dumps', 'json.dumps', (['value'], {}), '(value)\n', (6815, 6822), False, 'import json\n'), ((6948, 7010), 'tink.jwt._jwt_error.JwtInvalidError', '_jwt_error.JwtInvalidError', (["('claim %s has unknown type' % name)"], {}), "('claim %s has unknown type' % name)\n", (6974, 7010), False, 'from tink.jwt import _jwt_error\n'), ((6899, 6916), 'json.dumps', 'json.dumps', (['value'], {}), '(value)\n', (6909, 6916), False, 'import json\n')] |
evernym/indy-plenum | plenum/test/view_change/test_no_instance_change_before_node_is_ready.py | dc390caa16c0b15dcc549d557ede6f64c0c1b842 | import pytest
from plenum.server.view_change.view_changer import ViewChanger
from stp_core.common.log import getlogger
from plenum.test.pool_transactions.helper import start_not_added_node, add_started_node
logger = getlogger()
@pytest.fixture(scope="module", autouse=True)
def tconf(tconf):
old_vc_timeout = tconf.VIEW_CHANGE_TIMEOUT
tconf.VIEW_CHANGE_TIMEOUT = 10
yield tconf
tconf.VIEW_CHANGE_TIMEOUT = old_vc_timeout
def test_no_instance_change_on_primary_disconnection_for_not_ready_node(
looper, txnPoolNodeSet, tdir, tconf,
allPluginsPath, sdk_pool_handle, sdk_wallet_steward):
"""
Test steps:
1. create a new node, but don't add it to the pool (so not send NODE txn), so that the node is not ready.
2. wait for more than VIEW_CHANGE_TIMEOUT (a timeout for initial check for disconnected primary)
3. make sure no InstanceChange sent by the new node
4. add the node to the pool (send NODE txn) and make sure that the node is ready now.
5. wait for more than VIEW_CHANGE_TIMEOUT (a timeout for initial check for disconnected primary)
6. make sure no InstanceChange sent by the new node
"""
# 1. create a new node, but don't add it to the pool (so not send NODE txn), so that the node is not ready.
sigseed, bls_key, new_node, node_ha, client_ha = \
start_not_added_node(looper,
tdir, tconf, allPluginsPath,
"TestTheta")
# 2. wait for more than VIEW_CHANGE_TIMEOUT (a timeout for initial check for disconnected primary)
looper.runFor(tconf.VIEW_CHANGE_TIMEOUT + 2)
# 3. make sure no InstanceChange sent by the new node
assert 0 == new_node.view_changer.spylog.count(ViewChanger.sendInstanceChange.__name__)
logger.info("Start added node {}".format(new_node))
# 4. add the node to the pool (send NODE txn) and make sure that the node is ready now.
add_started_node(looper,
new_node,
node_ha,
client_ha,
txnPoolNodeSet,
sdk_pool_handle,
sdk_wallet_steward,
bls_key)
# 5. wait for more than VIEW_CHANGE_TIMEOUT (a timeout for initial check for disconnected primary)
looper.runFor(tconf.VIEW_CHANGE_TIMEOUT + 2)
# 6. make sure no InstanceChange sent by the new node
assert 0 == new_node.view_changer.spylog.count(ViewChanger.sendInstanceChange.__name__)
| [((219, 230), 'stp_core.common.log.getlogger', 'getlogger', ([], {}), '()\n', (228, 230), False, 'from stp_core.common.log import getlogger\n'), ((234, 278), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""', 'autouse': '(True)'}), "(scope='module', autouse=True)\n", (248, 278), False, 'import pytest\n'), ((1346, 1416), 'plenum.test.pool_transactions.helper.start_not_added_node', 'start_not_added_node', (['looper', 'tdir', 'tconf', 'allPluginsPath', '"""TestTheta"""'], {}), "(looper, tdir, tconf, allPluginsPath, 'TestTheta')\n", (1366, 1416), False, 'from plenum.test.pool_transactions.helper import start_not_added_node, add_started_node\n'), ((1933, 2053), 'plenum.test.pool_transactions.helper.add_started_node', 'add_started_node', (['looper', 'new_node', 'node_ha', 'client_ha', 'txnPoolNodeSet', 'sdk_pool_handle', 'sdk_wallet_steward', 'bls_key'], {}), '(looper, new_node, node_ha, client_ha, txnPoolNodeSet,\n sdk_pool_handle, sdk_wallet_steward, bls_key)\n', (1949, 2053), False, 'from plenum.test.pool_transactions.helper import start_not_added_node, add_started_node\n')] |
tomaszkingukrol/rest-api-cache-proxy | src/cache/requests_cache_abstract.py | 50738f168f36d285b9a924d9f9d106a65b5617c8 | from abc import ABC, abstractclassmethod
from model.response import ResponseModel
class CacheInterface(ABC):
@abstractclassmethod
async def get(cls, url: str) -> ResponseModel: pass
@abstractclassmethod
async def set(cls, url: str, value: ResponseModel, ttl=0): pass
| [] |
cotobadesign/cotoba-agent-oss | dialogue-engine/test/programytest/config/file/test_json.py | 3833d56e79dcd7529c3e8b3a3a8a782d513d9b12 | """
Copyright (c) 2020 COTOBA DESIGN, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
import os
from programy.config.file.json_file import JSONConfigurationFile
from programy.clients.events.console.config import ConsoleConfiguration
from programy.utils.substitutions.substitues import Substitutions
from programytest.config.file.base_file_tests import ConfigurationBaseFileTests
class JSONConfigurationFileTests(ConfigurationBaseFileTests):
def test_get_methods(self):
config_data = JSONConfigurationFile()
self.assertIsNotNone(config_data)
configuration = config_data.load_from_text("""
{
"brain": {
"overrides": {
"allow_system_aiml": true,
"allow_learn_aiml": true,
"allow_learnf_aiml": true
}
}
}
""", ConsoleConfiguration(), ".")
self.assertIsNotNone(configuration)
section = config_data.get_section("brainx")
self.assertIsNone(section)
section = config_data.get_section("brain")
self.assertIsNotNone(section)
child_section = config_data.get_section("overrides", section)
self.assertIsNotNone(child_section)
keys = list(config_data.get_child_section_keys("overrides", section))
self.assertIsNotNone(keys)
self.assertEqual(3, len(keys))
self.assertTrue("allow_system_aiml" in keys)
self.assertTrue("allow_learn_aiml" in keys)
self.assertTrue("allow_learnf_aiml" in keys)
self.assertIsNone(config_data.get_child_section_keys("missing", section))
self.assertEqual(True, config_data.get_option(child_section, "allow_system_aiml"))
self.assertEqual(True, config_data.get_option(child_section, "missing", missing_value=True))
self.assertEqual(True, config_data.get_bool_option(child_section, "allow_system_aiml"))
self.assertEqual(False, config_data.get_bool_option(child_section, "other_value"))
self.assertEqual(0, config_data.get_int_option(child_section, "other_value"))
def test_load_from_file(self):
config = JSONConfigurationFile()
self.assertIsNotNone(config)
configuration = config.load_from_file(os.path.dirname(__file__) + os.sep + "test_json.json", ConsoleConfiguration(), ".")
self.assertIsNotNone(configuration)
self.assert_configuration(configuration)
def test_load_from_text_multis_one_value(self):
config = JSONConfigurationFile()
self.assertIsNotNone(config)
configuration = config.load_from_text("""
{
"bot": {
"brain": "bot1"
}
}
""", ConsoleConfiguration(), ".")
self.assertIsNotNone(configuration)
self.assertEqual(1, len(configuration.client_configuration.configurations[0].configurations))
def test_load_from_text_multis_multiple_values(self):
config = JSONConfigurationFile()
self.assertIsNotNone(config)
configuration = config.load_from_text("""
{
"console": {
"bot": "bot"
},
"bot": {
"brain": ["bot1", "bot2"]
}
}
""", ConsoleConfiguration(), ".")
self.assertIsNotNone(configuration)
self.assertEqual(2, len(configuration.client_configuration.configurations[0].configurations))
def test_load_from_text(self):
config = JSONConfigurationFile()
self.assertIsNotNone(config)
configuration = config.load_from_text("""
{
"console": {
"bot": "bot",
"prompt": ">>>",
"scheduler": {
"name": "Scheduler1",
"debug_level": 50,
"add_listeners": false,
"remove_all_jobs": false
},
"storage": {
"entities": {
"users": "sql",
"linked_accounts": "sql",
"links": "sql",
"properties": "file",
"conversations": "file",
"categories": "file",
"maps": "file",
"sets": "file",
"rdf": "file",
"denormal": "file",
"normal": "file",
"gender": "file",
"person": "file",
"person2": "file",
"spelling_corpus": "file",
"license_keys": "file",
"nodes": "file",
"binaries": "file",
"braintree": "file",
"preprocessors": "file",
"postprocessors": "file",
"regex_templates": "file",
"usergroups": "file",
"learnf": "file"
},
"stores": {
"sql": {
"type": "sql",
"config": {
"url": "sqlite:///:memory",
"echo": false,
"encoding": "utf-8",
"create_db": true,
"drop_all_first": true
}
},
"mongo": {
"type": "mongo",
"config": {
"url": "mongodb://localhost:27017/",
"database": "programy",
"drop_all_first": true
}
},
"redis": {
"type": "redis",
"config": {
"host": "localhost",
"port": 6379,
"password": null,
"db": 0,
"prefix": "programy",
"drop_all_first": true
}
},
"file": {
"type": "file",
"config": {
"category_storage": {
"files": "./storage/categories"
},
"conversations_storage": {
"files": "./storage/conversations"
},
"sets_storage": {
"files": "./storage/sets",
"extension": ".txt",
"directories": false
},
"maps_storage": {
"files": "./storage/maps",
"extension": ".txt",
"directories": false
},
"regex_templates": {
"files": "./storage/regex"
},
"lookups_storage": {
"files": "./storage/lookups",
"extension": ".txt",
"directories": false
},
"properties_storage": {
"file": "./storage/properties.txt"
},
"defaults_storage": {
"file": "./storage/defaults.txt"
},
"rdf_storage": {
"files": "./storage/rdfs",
"extension": ".txt",
"directories": true
},
"spelling_corpus": {
"file": "./storage/spelling/corpus.txt"
},
"license_keys": {
"file": "./storage/license.keys"
},
"nodes": {
"files": "./storage/nodes"
},
"binaries": {
"files": "./storage/binaries"
},
"braintree": {
"file": "./storage/braintree/braintree.xml",
"format": "xml"
},
"preprocessors": {
"file": "./storage/processing/preprocessors.txt"
},
"postprocessors": {
"file": "./storage/processing/postprocessing.txt"
},
"usergroups": {
"files": "./storage/security/usergroups.txt"
},
"learnf": {
"files": "./storage/categories/learnf"
}
}
}
}
},
"logger": {
"type": "logger",
"config": {
"conversation_logger": "conversation"
}
}
},
"voice": {
"license_keys": "$BOT_ROOT/config/license.keys",
"tts": "osx",
"stt": "azhang",
"osx": {
"classname": "talky.clients.voice.tts.osxsay.OSXSayTextToSpeach"
},
"pytts": {
"classname": "talky.clients.voice.tts.pyttssay.PyTTSSayTextToSpeach",
"rate_adjust": 10
},
"azhang": {
"classname": "talky.clients.voice.stt.azhang.AnthonyZhangSpeechToText",
"ambient_adjust": 3,
"service": "ibm"
}
},
"rest": {
"host": "0.0.0.0",
"port": 8989,
"debug": false,
"workers": 4,
"license_keys": "$BOT_ROOT/config/license.keys"
},
"webchat": {
"host": "0.0.0.0",
"port": 8090,
"debug": false,
"license_keys": "$BOT_ROOT/config/license.keys",
"api": "/api/web/v1.0/ask"
},
"twitter": {
"polling": true,
"polling_interval": 49,
"streaming": false,
"use_status": true,
"use_direct_message": true,
"auto_follow": true,
"storage": "file",
"welcome_message": "Thanks for following me, send me a message and I'll try and help",
"license_keys": "file"
},
"xmpp": {
"server": "talk.google.com",
"port": 5222,
"xep_0030": true,
"xep_0004": true,
"xep_0060": true,
"xep_0199": true,
"license_keys": "file"
},
"socket": {
"host": "127.0.0.1",
"port": 9999,
"queue": 5,
"debug": true,
"license_keys": "file"
},
"telegram": {
"unknown_command": "Sorry, that is not a command I have been taught yet!",
"license_keys": "file"
},
"facebook": {
"host": "127.0.0.1",
"port": 5000,
"debug": false,
"license_keys": "file"
},
"twilio": {
"host": "127.0.0.1",
"port": 5000,
"debug": false,
"license_keys": "file"
},
"slack": {
"polling_interval": 1,
"license_keys": "file"
},
"viber": {
"name": "Servusai",
"avatar": "http://viber.com/avatar.jpg",
"license_keys": "file"
},
"line": {
"host": "127.0.0.1",
"port": 8084,
"debug": false,
"license_keys": "file"
},
"kik": {
"bot_name": "servusai",
"webhook": "https://93638f7a.ngrok.io/api/kik/v1.0/ask",
"host": "127.0.0.1",
"port": 8082,
"debug": false,
"license_keys": "file"
},
"bot": {
"brain": "brain",
"initial_question": "Hi, how can I help you today?",
"initial_question_srai": "YINITIALQUESTION",
"default_response": "Sorry, I don't have an answer for that!",
"default_response_srai": "YEMPTY",
"empty_string": "YEMPTY",
"exit_response": "So long, and thanks for the fish!",
"exit_response_srai": "YEXITRESPONSE",
"override_properties": true,
"max_question_recursion": 1000,
"max_question_timeout": 60,
"max_search_depth": 100,
"max_search_timeout": 60,
"spelling": {
"load": true,
"classname": "programy.spelling.norvig.NorvigSpellingChecker",
"check_before": true,
"check_and_retry": true
},
"conversations": {
"max_histories": 100,
"restore_last_topic": false,
"initial_topic": "TOPIC1",
"empty_on_start": false
}
},
"brain": {
"overrides": {
"allow_system_aiml": true,
"allow_learn_aiml": true,
"allow_learnf_aiml": true
},
"defaults": {
"default-get": "unknown",
"default-property": "unknown",
"default-map": "unknown",
"learnf-path": "file"
},
"binaries": {
"save_binary": true,
"load_binary": true,
"load_aiml_on_binary_fail": true
},
"braintree": {
"create": true
},
"services": {
"REST": {
"classname": "programy.services.rest.GenericRESTService",
"method": "GET",
"host": "0.0.0.0",
"port": 8080
},
"Pannous": {
"classname": "programy.services.pannous.PannousService",
"url": "http://weannie.pannous.com/api"
}
},
"security": {
"authentication": {
"classname": "programy.security.authenticate.passthrough.BasicPassThroughAuthenticationService",
"denied_srai": "AUTHENTICATION_FAILED"
},
"authorisation": {
"classname": "programy.security.authorise.usergroupsauthorisor.BasicUserGroupAuthorisationService",
"denied_srai": "AUTHORISATION_FAILED",
"usergroups": {
"storage": "file"
}
}
},
"oob": {
"default": {
"classname": "programy.oob.defaults.default.DefaultOutOfBandProcessor"
},
"alarm": {
"classname": "programy.oob.defaults.alarm.AlarmOutOfBandProcessor"
},
"camera": {
"classname": "programy.oob.defaults.camera.CameraOutOfBandProcessor"
},
"clear": {
"classname": "programy.oob.defaults.clear.ClearOutOfBandProcessor"
},
"dial": {
"classname": "programy.oob.defaults.dial.DialOutOfBandProcessor"
},
"dialog": {
"classname": "programy.oob.defaults.dialog.DialogOutOfBandProcessor"
},
"email": {
"classname": "programy.oob.defaults.email.EmailOutOfBandProcessor"
},
"geomap": {
"classname": "programy.oob.defaults.map.MapOutOfBandProcessor"
},
"schedule": {
"classname": "programy.oob.defaults.schedule.ScheduleOutOfBandProcessor"
},
"search": {
"classname": "programy.oob.defaults.search.SearchOutOfBandProcessor"
},
"sms": {
"classname": "programy.oob.defaults.sms.SMSOutOfBandProcessor"
},
"url": {
"classname": "programy.oob.defaults.url.URLOutOfBandProcessor"
},
"wifi": {
"classname": "programy.oob.defaults.wifi.WifiOutOfBandProcessor"
}
},
"dynamic": {
"variables": {
"gettime": "programy.dynamic.variables.datetime.GetTime"
},
"sets": {
"numeric": "programy.dynamic.sets.numeric.IsNumeric",
"roman": "programy.dynamic.sets.roman.IsRomanNumeral"
},
"maps": {
"romantodec": "programy.dynamic.maps.roman.MapRomanToDecimal",
"dectoroman": "programy.dynamic.maps.roman.MapDecimalToRoman"
}
}
}
}
""", ConsoleConfiguration(), ".")
self.assertIsNotNone(configuration)
self.assert_configuration(configuration)
def test_load_additionals(self):
config = JSONConfigurationFile()
self.assertIsNotNone(config)
configuration = config.load_from_text("""
{
"console": {
"bot": "bot"
},
"bot": {
"brain": "brain"
},
"brain": {
"security": {
"authentication": {
"classname": "programy.security.authenticate.passthrough.PassThroughAuthenticationService",
"denied_srai": "ACCESS_DENIED"
}
}
}
}
""", ConsoleConfiguration(), ".")
self.assertIsNotNone(configuration)
auth_service = configuration.client_configuration.configurations[0].configurations[0].security.authentication
self.assertIsNotNone(auth_service)
self.assertEqual("ACCESS_DENIED", auth_service.denied_srai)
def test_load_with_subs(self):
subs = Substitutions()
subs.add_substitute("$ALLOW_SYSTEM", True)
config_data = JSONConfigurationFile()
self.assertIsNotNone(config_data)
configuration = config_data.load_from_text("""
{
"brain": {
"overrides": {
"allow_system_aiml": true,
"allow_learn_aiml": true,
"allow_learnf_aiml": true
}
}
}
""", ConsoleConfiguration(), ".")
self.assertIsNotNone(configuration)
section = config_data.get_section("brainx")
self.assertIsNone(section)
section = config_data.get_section("brain")
self.assertIsNotNone(section)
child_section = config_data.get_section("overrides", section)
self.assertIsNotNone(child_section)
self.assertEqual(True, config_data.get_option(child_section, "allow_system_aiml"))
self.assertEqual(True, config_data.get_bool_option(child_section, "allow_system_aiml"))
self.assertEqual(False, config_data.get_bool_option(child_section, "other_value"))
| [((1485, 1508), 'programy.config.file.json_file.JSONConfigurationFile', 'JSONConfigurationFile', ([], {}), '()\n', (1506, 1508), False, 'from programy.config.file.json_file import JSONConfigurationFile\n'), ((3072, 3095), 'programy.config.file.json_file.JSONConfigurationFile', 'JSONConfigurationFile', ([], {}), '()\n', (3093, 3095), False, 'from programy.config.file.json_file import JSONConfigurationFile\n'), ((3426, 3449), 'programy.config.file.json_file.JSONConfigurationFile', 'JSONConfigurationFile', ([], {}), '()\n', (3447, 3449), False, 'from programy.config.file.json_file import JSONConfigurationFile\n'), ((3909, 3932), 'programy.config.file.json_file.JSONConfigurationFile', 'JSONConfigurationFile', ([], {}), '()\n', (3930, 3932), False, 'from programy.config.file.json_file import JSONConfigurationFile\n'), ((4460, 4483), 'programy.config.file.json_file.JSONConfigurationFile', 'JSONConfigurationFile', ([], {}), '()\n', (4481, 4483), False, 'from programy.config.file.json_file import JSONConfigurationFile\n'), ((14814, 14837), 'programy.config.file.json_file.JSONConfigurationFile', 'JSONConfigurationFile', ([], {}), '()\n', (14835, 14837), False, 'from programy.config.file.json_file import JSONConfigurationFile\n'), ((15782, 15797), 'programy.utils.substitutions.substitues.Substitutions', 'Substitutions', ([], {}), '()\n', (15795, 15797), False, 'from programy.utils.substitutions.substitues import Substitutions\n'), ((15872, 15895), 'programy.config.file.json_file.JSONConfigurationFile', 'JSONConfigurationFile', ([], {}), '()\n', (15893, 15895), False, 'from programy.config.file.json_file import JSONConfigurationFile\n'), ((1795, 1817), 'programy.clients.events.console.config.ConsoleConfiguration', 'ConsoleConfiguration', ([], {}), '()\n', (1815, 1817), False, 'from programy.clients.events.console.config import ConsoleConfiguration\n'), ((3234, 3256), 'programy.clients.events.console.config.ConsoleConfiguration', 'ConsoleConfiguration', ([], {}), '()\n', (3254, 3256), False, 'from programy.clients.events.console.config import ConsoleConfiguration\n'), ((3657, 3679), 'programy.clients.events.console.config.ConsoleConfiguration', 'ConsoleConfiguration', ([], {}), '()\n', (3677, 3679), False, 'from programy.clients.events.console.config import ConsoleConfiguration\n'), ((4231, 4253), 'programy.clients.events.console.config.ConsoleConfiguration', 'ConsoleConfiguration', ([], {}), '()\n', (4251, 4253), False, 'from programy.clients.events.console.config import ConsoleConfiguration\n'), ((14636, 14658), 'programy.clients.events.console.config.ConsoleConfiguration', 'ConsoleConfiguration', ([], {}), '()\n', (14656, 14658), False, 'from programy.clients.events.console.config import ConsoleConfiguration\n'), ((15426, 15448), 'programy.clients.events.console.config.ConsoleConfiguration', 'ConsoleConfiguration', ([], {}), '()\n', (15446, 15448), False, 'from programy.clients.events.console.config import ConsoleConfiguration\n'), ((16259, 16281), 'programy.clients.events.console.config.ConsoleConfiguration', 'ConsoleConfiguration', ([], {}), '()\n', (16279, 16281), False, 'from programy.clients.events.console.config import ConsoleConfiguration\n'), ((3179, 3204), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (3194, 3204), False, 'import os\n')] |
magnusrodseth/data-structures-and-algorithms | searching/jump_search.py | 45dfdc0859683d5c76b82b87f415e2c0cdbc15e8 | import math
from typing import List
def jump_search(array: List[int], value: int) -> int:
"""
Performs a jump search on a list of integers.
:param array: is the array to search.
:param value: is the value to search.
:return: the index of the value, or -1 if it doesn't exist.'
"""
if len(array) == 0:
return -1
block_size = get_block_size(array)
# Pointers for traversing the array
start_pointer = 0
next_pointer = block_size
while (start_pointer < len(array)) and (array[next_pointer - 1] < value):
start_pointer = next_pointer
next_pointer += block_size
# Prevent next from going out of bounds
if next_pointer > len(array):
next_pointer = len(array)
# Linear search through the relevant block
for i in range(start_pointer, next_pointer):
if array[i] == value:
return i
return -1
def get_block_size(array: List[int]) -> int:
"""
Gets the block size of an array for jump search.
The block size is the square root of the length of the array.
We then calculate the absolute value of this block size, because we're using the value as
index pointer, and negative values do not make sense here.
This value is then floored to act as index pointer in the array.
:param array: is the array to search.
:return: the block size to be used in jump search.
"""
return math.floor(abs(math.sqrt(len(array))))
if __name__ == '__main__':
# Array must be sorted in order for binary search to work
array = [3, 5, 6, 9, 11, 18, 20, 21, 24, 30]
print(array)
index = jump_search(array, 31)
print(index)
| [] |
mjneff2/Pincer | pincer/objects/message/sticker.py | a11bc3e4bad319fdf927d913c58c933576ec7c99 | # Copyright Pincer 2021-Present
# Full MIT License can be found in `LICENSE` at the project root.
from __future__ import annotations
from dataclasses import dataclass
from enum import IntEnum
from typing import List, Optional, TYPE_CHECKING
from ...utils.api_object import APIObject
from ...utils.types import MISSING
if TYPE_CHECKING:
from ..user import User
from ...utils import APINullable, Snowflake
class StickerType(IntEnum):
"""
Displays from where the sticker comes from.
:param STANDARD:
Sticker is included in the default Discord sticker pack.
:param GUILD:
Sticker is a custom sticker from a discord server.
"""
STANDARD = 1
GUILD = 2
class StickerFormatType(IntEnum):
"""
The type of the sticker.
:param PNG:
Sticker is of PNG format.
:param APNG:
Sticker is animated with APNG format.
:param LOTTIE:
Sticker is animated with with LOTTIE format. (vector based)
"""
PNG = 1
APNG = 2
LOTTIE = 3
@dataclass
class Sticker(APIObject):
"""
Represents a Discord sticker.
:param description:
description of the sticker
:param format_type:
type of sticker format
:param id:
id of the sticker
:param name:
name of the sticker
:param tags:
for guild stickers, the Discord name of a unicode emoji
representing the sticker's expression. For standard stickers,
a comma-separated list of related expressions.
:param type:
type of sticker
:param available:
whether this guild sticker can be used,
may be false due to loss of Server Boosts
:param guild_id:
id of the guild that owns this sticker
:param pack_id:
for standard stickers, id of the pack the sticker is from
:param sort_value:
the standard sticker's sort order within its pack
:param user:
the user that uploaded the guild sticker
"""
description: Optional[str]
format_type: StickerFormatType
id: Snowflake
name: str
tags: str
type: StickerType
available: APINullable[bool] = MISSING
guild_id: APINullable[Snowflake] = MISSING
pack_id: APINullable[Snowflake] = MISSING
sort_value: APINullable[int] = MISSING
user: APINullable[User] = MISSING
@dataclass
class StickerItem(APIObject):
"""
Represents the smallest amount of data required to render a sticker.
A partial sticker object.
:param id:
id of the sticker
:param name:
name of the sticker
:param format_type:
type of sticker format
"""
id: Snowflake
name: str
format_type: StickerFormatType
@dataclass
class StickerPack(APIObject):
"""
Represents a pack of standard stickers.
:param id:
id of the sticker pack
:param stickers:
the stickers in the pack
:param name:
name of the sticker pack
:param sku_id:
id of the pack's SKU
:param description:
description of the sticker pack
:param cover_sticker_id:
id of a sticker in the pack which is shown as the pack's icon
:param banner_asset_id:
id of the sticker pack's banner image
"""
id: Snowflake
stickers: List[Sticker]
name: str
sku_id: Snowflake
description: str
cover_sticker_id: APINullable[Snowflake] = MISSING
banner_asset_id: APINullable[Snowflake] = MISSING
| [] |
yayunl/llfselfhelp | app/core/utils.py | 55994003ad51e1664a733c84ed6afcb9f28b6499 | from django.views.generic import \
UpdateView as BaseUpdateView
class UpdateView(BaseUpdateView):
template_name_suffix = '_form_update'
| [] |
zhanwj/multi-task-pytorch | demo/test_bug_3d.py | 7d57645ec8be0ca0c258cfa99fb788e3cd37f106 | import torch
import lib.modeling.resnet as resnet
import lib.modeling.semseg_heads as snet
import torch.nn as nn
import torch.optim as optim
import utils.resnet_weights_helper as resnet_utils
from torch.autograd import Variable
from roi_data.loader import RoiDataLoader, MinibatchSampler, collate_minibatch, collate_minibatch_semseg
from datasets.roidb import combined_roidb_for_training, combined_roidb_for_training_semseg
import os
import numpy as np
import nn as mynn
import cv2
from modeling.model_builder_3DSD import Generalized_3DSD
from modeling.model_builder_PSP3D import DispSeg
from core.config import cfg, cfg_from_file, cfg_from_list, assert_and_infer_cfg
#load net
class load_net(nn.Module):
def __init__(self):
super(load_net, self).__init__()
build=snet.ModelBuilder()
fc_dim = 2048
self.encoder = build.build_encoder(
arch= 'resnet50_dilated8',
fc_dim=fc_dim)
self.decoder = build.build_decoder(
arch = 'ppm_bilinear',
num_class=19,
fc_dim=fc_dim,
use_softmax=False)
def _init_modules(self):
resnet_utils.load_pretrained_imagenet_weights(self)
def forward(self, data):
pred=self.decoder(self.encoder(data, return_feature_maps=True))
pred = nn.functional.interpolate(
pred, size=[128,128],
mode='bilinear', align_corners=False)
pred = nn.functional.log_softmax(pred, dim=1)
return pred
def dataloader(bs, gpus):
inputs = {}
inputs['data'] = Variable(torch.randn(2*bs, 3, 128, 128)).to('cuda')
inputs['semseg_label_0'] = Variable(torch.LongTensor(
np.random.randint(0, 19, (bs, 128//8, 128//8), dtype=np.long))).to('cuda')
inputs['disp_label_0'] = Variable(torch.rand(bs, 128//8, 128//8)).to('cuda')
inputs['disp_scans'] = Variable(torch.arange(0,
cfg.DISP.MAX_DISPLACEMENT).float().view(1,cfg.DISP.MAX_DISPLACEMENT,1,1).repeat(bs,1,1,1)).to('cuda')
inputs['semseg_scans'] = Variable(torch.arange(0,
cfg.MODEL.NUM_CLASSES).float().view(1, cfg.MODEL.NUM_CLASSES, 1, 1).repeat(bs,1,1,1)).to('cuda')
return inputs
cfg_file = 'e2e_segdisp-R-50_3Dpool_1x.yaml'
cfg_from_file(cfg_file)
print (cfg.SEM)
print (cfg.DISP)
#cfg_from_list(cfg_file)
#assert_and_infer_cfg()
devices_ids=[5]
os.environ["CUDA_VISIBLE_DEVICES"] = ','.join([str(ids) for ids in devices_ids])
torch.backends.cudnn.benchmark=True
#torch.cuda.set_device(3)
len_gpus = len(devices_ids)
batch_size = 2 * len_gpus
#net = mynn.DataParallel(load_net().to('cuda'), minibatch=True)
net = mynn.DataParallel(DispSeg().to('cuda'), minibatch=True)
optimizer = optim.SGD(net.parameters(), lr=0.000875, momentum=0.9)
criterion = nn.NLLLoss(ignore_index=255)
#dataloader= dataloader(batch_size, len_gpus)
for i in range(10):
#for i, inputs in zip(range(1000), dataloader):
inputs = dataloader(batch_size, len_gpus)
for key in inputs:
inputs[key] = torch.chunk(inputs[key], chunks=len_gpus, dim=0)
optimizer.zero_grad()
loss=net(**inputs)
optimizer.step()
for k in loss['losses'].keys():
print (loss['losses'][k].item())
| [((2264, 2287), 'core.config.cfg_from_file', 'cfg_from_file', (['cfg_file'], {}), '(cfg_file)\n', (2277, 2287), False, 'from core.config import cfg, cfg_from_file, cfg_from_list, assert_and_infer_cfg\n'), ((2788, 2816), 'torch.nn.NLLLoss', 'nn.NLLLoss', ([], {'ignore_index': '(255)'}), '(ignore_index=255)\n', (2798, 2816), True, 'import torch.nn as nn\n'), ((786, 805), 'lib.modeling.semseg_heads.ModelBuilder', 'snet.ModelBuilder', ([], {}), '()\n', (803, 805), True, 'import lib.modeling.semseg_heads as snet\n'), ((1162, 1213), 'utils.resnet_weights_helper.load_pretrained_imagenet_weights', 'resnet_utils.load_pretrained_imagenet_weights', (['self'], {}), '(self)\n', (1207, 1213), True, 'import utils.resnet_weights_helper as resnet_utils\n'), ((1340, 1430), 'torch.nn.functional.interpolate', 'nn.functional.interpolate', (['pred'], {'size': '[128, 128]', 'mode': '"""bilinear"""', 'align_corners': '(False)'}), "(pred, size=[128, 128], mode='bilinear',\n align_corners=False)\n", (1365, 1430), True, 'import torch.nn as nn\n'), ((1474, 1512), 'torch.nn.functional.log_softmax', 'nn.functional.log_softmax', (['pred'], {'dim': '(1)'}), '(pred, dim=1)\n', (1499, 1512), True, 'import torch.nn as nn\n'), ((3022, 3070), 'torch.chunk', 'torch.chunk', (['inputs[key]'], {'chunks': 'len_gpus', 'dim': '(0)'}), '(inputs[key], chunks=len_gpus, dim=0)\n', (3033, 3070), False, 'import torch\n'), ((2671, 2680), 'modeling.model_builder_PSP3D.DispSeg', 'DispSeg', ([], {}), '()\n', (2678, 2680), False, 'from modeling.model_builder_PSP3D import DispSeg\n'), ((1605, 1637), 'torch.randn', 'torch.randn', (['(2 * bs)', '(3)', '(128)', '(128)'], {}), '(2 * bs, 3, 128, 128)\n', (1616, 1637), False, 'import torch\n'), ((1827, 1861), 'torch.rand', 'torch.rand', (['bs', '(128 // 8)', '(128 // 8)'], {}), '(bs, 128 // 8, 128 // 8)\n', (1837, 1861), False, 'import torch\n'), ((1714, 1779), 'numpy.random.randint', 'np.random.randint', (['(0)', '(19)', '(bs, 128 // 8, 128 // 8)'], {'dtype': 'np.long'}), '(0, 19, (bs, 128 // 8, 128 // 8), dtype=np.long)\n', (1731, 1779), True, 'import numpy as np\n'), ((1907, 1949), 'torch.arange', 'torch.arange', (['(0)', 'cfg.DISP.MAX_DISPLACEMENT'], {}), '(0, cfg.DISP.MAX_DISPLACEMENT)\n', (1919, 1949), False, 'import torch\n'), ((2075, 2113), 'torch.arange', 'torch.arange', (['(0)', 'cfg.MODEL.NUM_CLASSES'], {}), '(0, cfg.MODEL.NUM_CLASSES)\n', (2087, 2113), False, 'import torch\n')] |
crijke/regenesis | regenesis/modelgen.py | e53a0c6302aa458ff9ae95f573d5594351e5434c | import json
from regenesis.queries import get_cubes, get_all_dimensions, get_dimensions
from pprint import pprint
def generate_dimensions():
dimensions = []
for dimension in get_all_dimensions():
pprint (dimension)
if dimension.get('measure_type').startswith('W-'):
continue
attrs = ['name', 'label']
if 'ZI' in dimension.get('measure_type'):
attrs = ['text', 'from', 'until']
dim = {
'name': dimension.get('name'),
'label': dimension.get('title_de'),
'description': dimension.get('definition_de'),
'attributes': attrs
}
dimensions.append(dim)
return dimensions
def generate_cubes():
cubes = []
for cube in get_cubes():
dimensions = []
measures = []
joins = []
mappings = {}
cube_name = cube.get('cube_name')
for dim in get_dimensions(cube_name):
dn = dim.get('dim_name')
if dim.get('dim_measure_type').startswith('W-'):
measures.append(dn)
continue
dimensions.append(dn)
if dim.get('dim_measure_type').startswith('ZI-'):
mappings[dn + '.text'] = 'fact_%s.%s' % (cube_name, dn)
mappings[dn + '.from'] = 'fact_%s.%s_from' % (cube_name, dn)
mappings[dn + '.until'] = 'fact_%s.%s_until' % (cube_name, dn)
else:
tn = 'tbl_' + dn
joins.append({
'master': dn,
'detail': 'value.value_id',
'alias': tn
})
mappings[dn + '.name'] = tn + '.name'
mappings[dn + '.label'] = tn + '.title_de'
cubes.append({
'dimensions': dimensions,
'measures': measures,
'mappings': mappings,
'joins': joins,
'fact': 'fact_%s' % cube_name,
'name': cube.get('cube_name'),
'label': cube.get('statistic_title_de'),
'description': cube.get('statistic_description_de'),
})
return cubes
def generate_model():
model = {
'dimensions': generate_dimensions(),
'cubes': generate_cubes(),
'locale': 'de'
}
pprint(model)
return model
if __name__ == '__main__':
with open('model.json', 'wb') as fh:
model = generate_model()
json.dump(model, fh, indent=2)
| [((184, 204), 'regenesis.queries.get_all_dimensions', 'get_all_dimensions', ([], {}), '()\n', (202, 204), False, 'from regenesis.queries import get_cubes, get_all_dimensions, get_dimensions\n'), ((759, 770), 'regenesis.queries.get_cubes', 'get_cubes', ([], {}), '()\n', (768, 770), False, 'from regenesis.queries import get_cubes, get_all_dimensions, get_dimensions\n'), ((2333, 2346), 'pprint.pprint', 'pprint', (['model'], {}), '(model)\n', (2339, 2346), False, 'from pprint import pprint\n'), ((214, 231), 'pprint.pprint', 'pprint', (['dimension'], {}), '(dimension)\n', (220, 231), False, 'from pprint import pprint\n'), ((920, 945), 'regenesis.queries.get_dimensions', 'get_dimensions', (['cube_name'], {}), '(cube_name)\n', (934, 945), False, 'from regenesis.queries import get_cubes, get_all_dimensions, get_dimensions\n'), ((2479, 2509), 'json.dump', 'json.dump', (['model', 'fh'], {'indent': '(2)'}), '(model, fh, indent=2)\n', (2488, 2509), False, 'import json\n')] |
liangleslie/core | tests/components/evil_genius_labs/test_light.py | cc807b4d597daaaadc92df4a93c6e30da4f570c6 | """Test Evil Genius Labs light."""
from unittest.mock import patch
import pytest
from homeassistant.components.light import (
ATTR_COLOR_MODE,
ATTR_SUPPORTED_COLOR_MODES,
ColorMode,
LightEntityFeature,
)
from homeassistant.const import ATTR_SUPPORTED_FEATURES
@pytest.mark.parametrize("platforms", [("light",)])
async def test_works(hass, setup_evil_genius_labs):
"""Test it works."""
state = hass.states.get("light.fibonacci256_23d4")
assert state is not None
assert state.state == "on"
assert state.attributes["brightness"] == 128
assert state.attributes[ATTR_COLOR_MODE] == ColorMode.RGB
assert state.attributes[ATTR_SUPPORTED_COLOR_MODES] == [ColorMode.RGB]
assert state.attributes[ATTR_SUPPORTED_FEATURES] == LightEntityFeature.EFFECT
@pytest.mark.parametrize("platforms", [("light",)])
async def test_turn_on_color(hass, setup_evil_genius_labs):
"""Test turning on with a color."""
with patch(
"pyevilgenius.EvilGeniusDevice.set_path_value"
) as mock_set_path_value, patch(
"pyevilgenius.EvilGeniusDevice.set_rgb_color"
) as mock_set_rgb_color:
await hass.services.async_call(
"light",
"turn_on",
{
"entity_id": "light.fibonacci256_23d4",
"brightness": 100,
"rgb_color": (10, 20, 30),
},
blocking=True,
)
assert len(mock_set_path_value.mock_calls) == 2
mock_set_path_value.mock_calls[0][1] == ("brightness", 100)
mock_set_path_value.mock_calls[1][1] == ("power", 1)
assert len(mock_set_rgb_color.mock_calls) == 1
mock_set_rgb_color.mock_calls[0][1] == (10, 20, 30)
@pytest.mark.parametrize("platforms", [("light",)])
async def test_turn_on_effect(hass, setup_evil_genius_labs):
"""Test turning on with an effect."""
with patch("pyevilgenius.EvilGeniusDevice.set_path_value") as mock_set_path_value:
await hass.services.async_call(
"light",
"turn_on",
{
"entity_id": "light.fibonacci256_23d4",
"effect": "Pride Playground",
},
blocking=True,
)
assert len(mock_set_path_value.mock_calls) == 2
mock_set_path_value.mock_calls[0][1] == ("pattern", 4)
mock_set_path_value.mock_calls[1][1] == ("power", 1)
@pytest.mark.parametrize("platforms", [("light",)])
async def test_turn_off(hass, setup_evil_genius_labs):
"""Test turning off."""
with patch("pyevilgenius.EvilGeniusDevice.set_path_value") as mock_set_path_value:
await hass.services.async_call(
"light",
"turn_off",
{
"entity_id": "light.fibonacci256_23d4",
},
blocking=True,
)
assert len(mock_set_path_value.mock_calls) == 1
mock_set_path_value.mock_calls[0][1] == ("power", 0)
| [((281, 331), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""platforms"""', "[('light',)]"], {}), "('platforms', [('light',)])\n", (304, 331), False, 'import pytest\n'), ((795, 845), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""platforms"""', "[('light',)]"], {}), "('platforms', [('light',)])\n", (818, 845), False, 'import pytest\n'), ((1706, 1756), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""platforms"""', "[('light',)]"], {}), "('platforms', [('light',)])\n", (1729, 1756), False, 'import pytest\n'), ((2371, 2421), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""platforms"""', "[('light',)]"], {}), "('platforms', [('light',)])\n", (2394, 2421), False, 'import pytest\n'), ((955, 1008), 'unittest.mock.patch', 'patch', (['"""pyevilgenius.EvilGeniusDevice.set_path_value"""'], {}), "('pyevilgenius.EvilGeniusDevice.set_path_value')\n", (960, 1008), False, 'from unittest.mock import patch\n'), ((1047, 1099), 'unittest.mock.patch', 'patch', (['"""pyevilgenius.EvilGeniusDevice.set_rgb_color"""'], {}), "('pyevilgenius.EvilGeniusDevice.set_rgb_color')\n", (1052, 1099), False, 'from unittest.mock import patch\n'), ((1869, 1922), 'unittest.mock.patch', 'patch', (['"""pyevilgenius.EvilGeniusDevice.set_path_value"""'], {}), "('pyevilgenius.EvilGeniusDevice.set_path_value')\n", (1874, 1922), False, 'from unittest.mock import patch\n'), ((2514, 2567), 'unittest.mock.patch', 'patch', (['"""pyevilgenius.EvilGeniusDevice.set_path_value"""'], {}), "('pyevilgenius.EvilGeniusDevice.set_path_value')\n", (2519, 2567), False, 'from unittest.mock import patch\n')] |
joshbode/python-on-whales | python_on_whales/download_binaries.py | 4d5b8b4c5c6dc3ac0af5713e4fe5a72788f44cda | import platform
import shutil
import tempfile
import warnings
from pathlib import Path
import requests
from tqdm import tqdm
DOCKER_VERSION = "20.10.5"
BUILDX_VERSION = "0.5.1"
CACHE_DIR = Path.home() / ".cache" / "python-on-whales"
TEMPLATE_CLI = (
"https://download.docker.com/{os}/static/stable/{arch}/docker-{version}.tgz"
)
WINDOWS_CLI_URL = "https://github.com/StefanScherer/docker-cli-builder/releases/download/{version}/docker.exe"
def get_docker_binary_path_in_cache():
return CACHE_DIR / "docker-cli" / DOCKER_VERSION / "docker"
def get_docker_cli_url():
user_os = get_user_os()
if user_os == "windows":
return WINDOWS_CLI_URL.format(version=DOCKER_VERSION)
arch = get_arch_for_docker_cli_url()
return TEMPLATE_CLI.format(os=user_os, arch=arch, version=DOCKER_VERSION)
def download_docker_cli():
file_to_download = get_docker_cli_url()
extension = file_to_download.split(".")[-1]
with tempfile.TemporaryDirectory() as tmp_dir:
tmp_dir = Path(tmp_dir)
downloaded_file_path = tmp_dir / f"docker.{extension}"
download_from_url(file_to_download, downloaded_file_path)
docker_binary_path = get_docker_binary_path_in_cache()
docker_binary_path.parent.mkdir(exist_ok=True, parents=True)
if extension == "tgz":
extract_dir = tmp_dir / "extracted"
shutil.unpack_archive(str(downloaded_file_path), str(extract_dir))
shutil.move(extract_dir / "docker" / "docker", docker_binary_path)
elif extension == "exe":
shutil.move(downloaded_file_path, docker_binary_path)
warnings.warn(
f"The docker client binary file {DOCKER_VERSION} was downloaded and put "
f"in `{docker_binary_path.absolute()}`. \n"
f"You can feel free to remove it if you wish, Python on whales will download "
f"it again if needed."
)
def download_from_url(url, dst):
try:
_download_from_url(url, dst)
except Exception as e:
raise ConnectionError(f"Error while downloading {url}") from e
def _download_from_url(url, dst):
# Streaming, so we can iterate over the response.
response = requests.get(url, stream=True)
total_size_in_bytes = int(response.headers.get("content-length", 0))
block_size = 1024
progress_bar = tqdm(total=total_size_in_bytes, unit="iB", unit_scale=True)
with open(dst, "wb") as file:
for data in response.iter_content(block_size):
progress_bar.update(len(data))
file.write(data)
progress_bar.close()
if total_size_in_bytes != 0 and progress_bar.n != total_size_in_bytes:
raise ConnectionError(
f"Total size should be {total_size_in_bytes}, downloaded {progress_bar.n}"
)
def get_user_os():
user_os = platform.system()
if user_os == "Linux":
return "linux"
elif user_os == "Darwin":
return "mac"
elif user_os == "Windows":
return "windows"
else:
raise NotImplementedError(
f"Unknown OS: {user_os}, cannot determine which Docker CLI binary file to "
f"download. \n"
f"Please open an issue at \n"
f"https://github.com/gabrieldemarmiesse/python-on-whales/issues \n"
f"and in the meantime, install Docker manually to make python-on-whales "
f"work."
)
def get_arch_for_docker_cli_url():
arch = platform.architecture()[0]
# I don't know the exact list of possible architectures,
# so if a user reports a NotImplementedError, we can easily add
# his/her platform here.
arch_mapping = {
"NotImplementedError": "aarch64",
"NotImplementedError2": "armel",
"NotImplementedError3": "armhf",
"NotImplementedError4": "ppc64le",
"NotImplementedError5": "s390x",
"64bit": "x86_64",
}
try:
return arch_mapping[arch]
except KeyError:
raise NotImplementedError(
f"The architecture detected on your system is `{arch}`, the list of "
f"available architectures is {list(arch_mapping.values())}. \n"
f"Please open an issue at \n"
f"https://github.com/gabrieldemarmiesse/python-on-whales/issues "
f"and make sure to copy past this error message. \n"
f"In the meantime, install Docker manually on your system."
)
| [((2185, 2215), 'requests.get', 'requests.get', (['url'], {'stream': '(True)'}), '(url, stream=True)\n', (2197, 2215), False, 'import requests\n'), ((2330, 2389), 'tqdm.tqdm', 'tqdm', ([], {'total': 'total_size_in_bytes', 'unit': '"""iB"""', 'unit_scale': '(True)'}), "(total=total_size_in_bytes, unit='iB', unit_scale=True)\n", (2334, 2389), False, 'from tqdm import tqdm\n'), ((2814, 2831), 'platform.system', 'platform.system', ([], {}), '()\n', (2829, 2831), False, 'import platform\n'), ((192, 203), 'pathlib.Path.home', 'Path.home', ([], {}), '()\n', (201, 203), False, 'from pathlib import Path\n'), ((950, 979), 'tempfile.TemporaryDirectory', 'tempfile.TemporaryDirectory', ([], {}), '()\n', (977, 979), False, 'import tempfile\n'), ((1010, 1023), 'pathlib.Path', 'Path', (['tmp_dir'], {}), '(tmp_dir)\n', (1014, 1023), False, 'from pathlib import Path\n'), ((3437, 3460), 'platform.architecture', 'platform.architecture', ([], {}), '()\n', (3458, 3460), False, 'import platform\n'), ((1457, 1523), 'shutil.move', 'shutil.move', (["(extract_dir / 'docker' / 'docker')", 'docker_binary_path'], {}), "(extract_dir / 'docker' / 'docker', docker_binary_path)\n", (1468, 1523), False, 'import shutil\n'), ((1569, 1622), 'shutil.move', 'shutil.move', (['downloaded_file_path', 'docker_binary_path'], {}), '(downloaded_file_path, docker_binary_path)\n', (1580, 1622), False, 'import shutil\n')] |
chriscoombs/aws-builders-fair-projects | reinvent-2019/connected-photo-booth/lambda_code/Cerebro_GetQRCode.py | eee405931030b833fa8c51e906c73d09ce051bcd | import boto3
import json
import os
import logging
from contextlib import closing
from boto3.dynamodb.conditions import Key, Attr
from botocore.exceptions import ClientError
from random import shuffle
import time
import pyqrcode
import png
__BUCKET_NAME__ = "project-cerebro"
dynamo = boto3.client('dynamodb')
logger = None
print("In initialize fn ...")
logger = logging.getLogger()
if int(os.environ['DEBUG_MODE']):
logger.setLevel(logging.DEBUG)
else:
logger.setLevel(logging.INFO)
logger.info("Initialize: Just a test")
logger.debug("Initialize: debug a test")
def create_presigned_url(bucket_name, object_name, expiration=3600):
"""Generate a presigned URL to share an S3 object
:param bucket_name: string
:param object_name: string
:param expiration: Time in seconds for the presigned URL to remain valid
:return: Presigned URL as string. If error, returns None.
"""
# Generate a presigned URL for the S3 object
s3_client = boto3.client('s3')
try:
response = s3_client.generate_presigned_url('get_object',
Params={'Bucket': bucket_name,
'Key': object_name},
ExpiresIn=expiration)
except ClientError as e:
logging.error(e)
return None
# The response contains the presigned URL
return response
def respond(err, res=None):
return {
'statusCode': '400' if err else '200',
'body': err.message if err else json.dumps(res),
'headers': {
'Content-Type': 'application/json',
'Access-Control-Allow-Origin': '*'
},
}
# input parameters are:
# 1. image ID
# output parameters are:
# 1. generated QRCode
# workflow:
# 1. first get the image_id
# 2. confirm this exists in s3
# 3. generate a presigned URL with this s3 path
# 4. create a QR Code image with this url embedded
# 5. return the QR code stored in S3 temp.
def main(event, context):
logger.info("In main ...")
start_time = int(round(time.time() * 1000))
body_params = json.loads(event["body"])
logger.debug("Body params:")
logger.debug(body_params)
response_data = {}
# 1. get the image_id
if "image_id" in body_params:
image_id = body_params["image_id"]
# prefix and check for existence
s3_prefix = "production/%s" % image_id
# 2. check for the object in s3
s3 = boto3.resource('s3')
s3_object = s3.Object(__BUCKET_NAME__, s3_prefix)
obj_metadata = s3_object.load() # fetches metadata for the object, but not data.
logger.info("metadata found:")
logger.info(obj_metadata)
if obj_metadata:
response_data["s3_image"] = s3_prefix
# 3. generate the presigned url
presigned_url = create_presigned_url(bucket_name = __BUCKET_NAME__, object_name=s3_prefix, expiration=5*60)
logger.info("generated the presigned URL:")
logger.info(presigned_url)
if presigned_url:
response_data["presigned_url"] = presigned_url
logger.info("assigned presigned url")
# 4. generate the qrcode, convert to png
url = pyqrcode.create(presigned_url)
url.png('/tmp/code.png', scale=5)
logger.info("Created a png file by now!")
# 5. save to s3
target_file='/tmp/code.png'
qrcode_key = "qrcodes/current_qrcode.png"
logger.info("Now trying to put s3 object ...")
# Create an S3 client
s3 = boto3.client('s3')
response = s3.put_object(
Body=open(target_file, 'rb'),
Bucket=__BUCKET_NAME__,
Key=qrcode_key)
logger.info("Now trying to put s3 object - completed!")
response_data["qrcode_key"] = qrcode_key
else:
response_data["result"] = "Failure"
return respond(None, response_data)
end_time = int(round(time.time() * 1000))
logger.info("Time Taken: %f" % (end_time - start_time))
logger.info("Done with main!")
response_data["result"] = "Success"
response_data["time_taken"] = str(end_time - start_time)
return respond(None, response_data)
def lambda_handler(event, context):
return main(event, context)
| [((291, 315), 'boto3.client', 'boto3.client', (['"""dynamodb"""'], {}), "('dynamodb')\n", (303, 315), False, 'import boto3\n'), ((371, 390), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (388, 390), False, 'import logging\n'), ((989, 1007), 'boto3.client', 'boto3.client', (['"""s3"""'], {}), "('s3')\n", (1001, 1007), False, 'import boto3\n'), ((2172, 2197), 'json.loads', 'json.loads', (["event['body']"], {}), "(event['body'])\n", (2182, 2197), False, 'import json\n'), ((2530, 2550), 'boto3.resource', 'boto3.resource', (['"""s3"""'], {}), "('s3')\n", (2544, 2550), False, 'import boto3\n'), ((1358, 1374), 'logging.error', 'logging.error', (['e'], {}), '(e)\n', (1371, 1374), False, 'import logging\n'), ((1591, 1606), 'json.dumps', 'json.dumps', (['res'], {}), '(res)\n', (1601, 1606), False, 'import json\n'), ((3298, 3328), 'pyqrcode.create', 'pyqrcode.create', (['presigned_url'], {}), '(presigned_url)\n', (3313, 3328), False, 'import pyqrcode\n'), ((3663, 3681), 'boto3.client', 'boto3.client', (['"""s3"""'], {}), "('s3')\n", (3675, 3681), False, 'import boto3\n'), ((2132, 2143), 'time.time', 'time.time', ([], {}), '()\n', (2141, 2143), False, 'import time\n'), ((4087, 4098), 'time.time', 'time.time', ([], {}), '()\n', (4096, 4098), False, 'import time\n')] |
charlesmchen/typefacet | dependencies/svgwrite/tests/test_drawing.py | 8c6db26d0c599ece16f3704696811275120a4044 | #!/usr/bin/env python
#coding:utf-8
# Author: mozman --<[email protected]>
# Purpose: test drawing module
# Created: 11.09.2010
# Copyright (C) 2010, Manfred Moitzi
# License: GPLv3
from __future__ import unicode_literals
import os
import unittest
from io import StringIO
from svgwrite.drawing import Drawing
from svgwrite.container import Group
class TestDrawingFullProfile(unittest.TestCase):
def test_empty_drawing(self):
dwg = Drawing()
result = dwg.tostring()
self.assertEqual(result, '<svg baseProfile="full" height="100%" version="1.1" '\
'width="100%" xmlns="http://www.w3.org/2000/svg" '\
'xmlns:ev="http://www.w3.org/2001/xml-events" '\
'xmlns:xlink="http://www.w3.org/1999/xlink"><defs /></svg>')
def test_stylesheet(self):
dwg = Drawing()
dwg.add_stylesheet('test.css', 'Test')
f = StringIO()
dwg.write(f)
result = f.getvalue()
f.close()
self.assertEqual(result, '<?xml version="1.0" encoding="utf-8" ?>\n' \
'<?xml-stylesheet href="test.css" type="text/css" title="Test" alternate="no" media="screen"?>\n'
'<svg baseProfile="full" height="100%" version="1.1" width="100%" '\
'xmlns="http://www.w3.org/2000/svg" xmlns:ev="http://www.w3.org/2001/xml-events" '\
'xmlns:xlink="http://www.w3.org/1999/xlink"><defs /></svg>')
def test_save(self):
fn = 'test_drawing.svg'
if os.path.exists(fn):
os.remove(fn)
dwg = Drawing(fn)
dwg.save()
self.assertTrue(os.path.exists(fn))
os.remove(fn)
def test_save_as(self):
fn = 'test_drawing.svg'
if os.path.exists(fn):
os.remove(fn)
dwg = Drawing()
dwg.saveas(fn)
self.assertTrue(os.path.exists(fn))
os.remove(fn)
def test_non_us_ascii_chars(self):
dwg = Drawing()
dwg.set_desc('öäü')
f = StringIO()
dwg.write(f)
result = f.getvalue()
f.close()
self.assertEqual(result,
'<?xml version="1.0" encoding="utf-8" ?>\n' \
'<svg baseProfile="full" height="100%" version="1.1" width="100%" '\
'xmlns="http://www.w3.org/2000/svg" xmlns:ev="http://www.w3.org/2001/xml-events" '\
'xmlns:xlink="http://www.w3.org/1999/xlink">'
'<title>öäü</title><defs /></svg>')
class TestDrawingTinyProfile(unittest.TestCase):
def test_empty_drawing(self):
dwg = Drawing(profile="tiny")
result = dwg.tostring()
self.assertEqual(result, '<svg baseProfile="tiny" height="100%" version="1.2" '\
'width="100%" xmlns="http://www.w3.org/2000/svg" '\
'xmlns:ev="http://www.w3.org/2001/xml-events" '\
'xmlns:xlink="http://www.w3.org/1999/xlink"><defs /></svg>')
def test_stylesheet(self):
dwg = Drawing(profile="tiny")
dwg.add_stylesheet('test.css', 'Test')
f = StringIO()
dwg.write(f)
result = f.getvalue()
f.close()
self.assertEqual(result, '<?xml version="1.0" encoding="utf-8" ?>\n' \
'<?xml-stylesheet href="test.css" type="text/css" title="Test" alternate="no" media="screen"?>\n'
'<svg baseProfile="tiny" height="100%" version="1.2" width="100%" '\
'xmlns="http://www.w3.org/2000/svg" '\
'xmlns:ev="http://www.w3.org/2001/xml-events" '\
'xmlns:xlink="http://www.w3.org/1999/xlink"><defs /></svg>')
class TestDefs(unittest.TestCase):
def test_simple_defs(self):
dwg = Drawing()
g = dwg.defs.add(Group(id='test'))
inner_g = g.add(Group(id='innerTest'))
result = dwg.tostring()
self.assertEqual(result, '<svg baseProfile="full" height="100%" version="1.1" '\
'width="100%" xmlns="http://www.w3.org/2000/svg" '\
'xmlns:ev="http://www.w3.org/2001/xml-events" '\
'xmlns:xlink="http://www.w3.org/1999/xlink">' \
'<defs><g id="test"><g id="innerTest" /></g></defs></svg>')
if __name__=='__main__':
unittest.main()
| [((4224, 4239), 'unittest.main', 'unittest.main', ([], {}), '()\n', (4237, 4239), False, 'import unittest\n'), ((461, 470), 'svgwrite.drawing.Drawing', 'Drawing', ([], {}), '()\n', (468, 470), False, 'from svgwrite.drawing import Drawing\n'), ((844, 853), 'svgwrite.drawing.Drawing', 'Drawing', ([], {}), '()\n', (851, 853), False, 'from svgwrite.drawing import Drawing\n'), ((915, 925), 'io.StringIO', 'StringIO', ([], {}), '()\n', (923, 925), False, 'from io import StringIO\n'), ((1515, 1533), 'os.path.exists', 'os.path.exists', (['fn'], {}), '(fn)\n', (1529, 1533), False, 'import os\n'), ((1577, 1588), 'svgwrite.drawing.Drawing', 'Drawing', (['fn'], {}), '(fn)\n', (1584, 1588), False, 'from svgwrite.drawing import Drawing\n'), ((1663, 1676), 'os.remove', 'os.remove', (['fn'], {}), '(fn)\n', (1672, 1676), False, 'import os\n'), ((1753, 1771), 'os.path.exists', 'os.path.exists', (['fn'], {}), '(fn)\n', (1767, 1771), False, 'import os\n'), ((1815, 1824), 'svgwrite.drawing.Drawing', 'Drawing', ([], {}), '()\n', (1822, 1824), False, 'from svgwrite.drawing import Drawing\n'), ((1903, 1916), 'os.remove', 'os.remove', (['fn'], {}), '(fn)\n', (1912, 1916), False, 'import os\n'), ((1974, 1983), 'svgwrite.drawing.Drawing', 'Drawing', ([], {}), '()\n', (1981, 1983), False, 'from svgwrite.drawing import Drawing\n'), ((2026, 2036), 'io.StringIO', 'StringIO', ([], {}), '()\n', (2034, 2036), False, 'from io import StringIO\n'), ((2591, 2614), 'svgwrite.drawing.Drawing', 'Drawing', ([], {'profile': '"""tiny"""'}), "(profile='tiny')\n", (2598, 2614), False, 'from svgwrite.drawing import Drawing\n'), ((2988, 3011), 'svgwrite.drawing.Drawing', 'Drawing', ([], {'profile': '"""tiny"""'}), "(profile='tiny')\n", (2995, 3011), False, 'from svgwrite.drawing import Drawing\n'), ((3073, 3083), 'io.StringIO', 'StringIO', ([], {}), '()\n', (3081, 3083), False, 'from io import StringIO\n'), ((3703, 3712), 'svgwrite.drawing.Drawing', 'Drawing', ([], {}), '()\n', (3710, 3712), False, 'from svgwrite.drawing import Drawing\n'), ((1548, 1561), 'os.remove', 'os.remove', (['fn'], {}), '(fn)\n', (1557, 1561), False, 'import os\n'), ((1634, 1652), 'os.path.exists', 'os.path.exists', (['fn'], {}), '(fn)\n', (1648, 1652), False, 'import os\n'), ((1786, 1799), 'os.remove', 'os.remove', (['fn'], {}), '(fn)\n', (1795, 1799), False, 'import os\n'), ((1874, 1892), 'os.path.exists', 'os.path.exists', (['fn'], {}), '(fn)\n', (1888, 1892), False, 'import os\n'), ((3739, 3755), 'svgwrite.container.Group', 'Group', ([], {'id': '"""test"""'}), "(id='test')\n", (3744, 3755), False, 'from svgwrite.container import Group\n'), ((3782, 3803), 'svgwrite.container.Group', 'Group', ([], {'id': '"""innerTest"""'}), "(id='innerTest')\n", (3787, 3803), False, 'from svgwrite.container import Group\n')] |
digitalfabrik/coldaid-backend | src/cms/views/error_handler/error_handler.py | b769510570d5921e30876565263813c0362994e2 | from django.shortcuts import render
from django.utils.translation import ugettext as _
# pylint: disable=unused-argument
def handler400(request, exception):
ctx = {'code': 400, 'title': _('Bad request'),
'message': _('There was an error in your request.')}
response = render(request, 'error_handler/http_error.html', ctx)
response.status_code = 400
return response
# pylint: disable=unused-argument
def handler403(request, exception):
ctx = {'code': 403, 'title': _('Forbidden'),
'message': _("You don't have the permission to access this page.")}
response = render(request, 'error_handler/http_error.html', ctx)
response.status_code = 403
return response
# pylint: disable=unused-argument
def handler404(request, exception):
ctx = {'code': 404, 'title': _('Page not found'),
'message': _('The page you requested could not be found.')}
response = render(request, 'error_handler/http_error.html', ctx)
response.status_code = 404
return response
# pylint: disable=unused-argument
def handler500(request):
ctx = {'code': 500, 'title': _('Internal Server Error'),
'message': _('An unexpected error has occurred.')}
response = render(request, 'error_handler/http_error.html', ctx)
response.status_code = 500
return response
# pylint: disable=unused-argument
def csrf_failure(request, reason):
return render(request, 'error_handler/csrf_failure.html')
| [((289, 342), 'django.shortcuts.render', 'render', (['request', '"""error_handler/http_error.html"""', 'ctx'], {}), "(request, 'error_handler/http_error.html', ctx)\n", (295, 342), False, 'from django.shortcuts import render\n'), ((612, 665), 'django.shortcuts.render', 'render', (['request', '"""error_handler/http_error.html"""', 'ctx'], {}), "(request, 'error_handler/http_error.html', ctx)\n", (618, 665), False, 'from django.shortcuts import render\n'), ((932, 985), 'django.shortcuts.render', 'render', (['request', '"""error_handler/http_error.html"""', 'ctx'], {}), "(request, 'error_handler/http_error.html', ctx)\n", (938, 985), False, 'from django.shortcuts import render\n'), ((1239, 1292), 'django.shortcuts.render', 'render', (['request', '"""error_handler/http_error.html"""', 'ctx'], {}), "(request, 'error_handler/http_error.html', ctx)\n", (1245, 1292), False, 'from django.shortcuts import render\n'), ((1429, 1479), 'django.shortcuts.render', 'render', (['request', '"""error_handler/csrf_failure.html"""'], {}), "(request, 'error_handler/csrf_failure.html')\n", (1435, 1479), False, 'from django.shortcuts import render\n'), ((192, 208), 'django.utils.translation.ugettext', '_', (['"""Bad request"""'], {}), "('Bad request')\n", (193, 208), True, 'from django.utils.translation import ugettext as _\n'), ((232, 272), 'django.utils.translation.ugettext', '_', (['"""There was an error in your request."""'], {}), "('There was an error in your request.')\n", (233, 272), True, 'from django.utils.translation import ugettext as _\n'), ((502, 516), 'django.utils.translation.ugettext', '_', (['"""Forbidden"""'], {}), "('Forbidden')\n", (503, 516), True, 'from django.utils.translation import ugettext as _\n'), ((540, 595), 'django.utils.translation.ugettext', '_', (['"""You don\'t have the permission to access this page."""'], {}), '("You don\'t have the permission to access this page.")\n', (541, 595), True, 'from django.utils.translation import ugettext as _\n'), ((825, 844), 'django.utils.translation.ugettext', '_', (['"""Page not found"""'], {}), "('Page not found')\n", (826, 844), True, 'from django.utils.translation import ugettext as _\n'), ((868, 915), 'django.utils.translation.ugettext', '_', (['"""The page you requested could not be found."""'], {}), "('The page you requested could not be found.')\n", (869, 915), True, 'from django.utils.translation import ugettext as _\n'), ((1134, 1160), 'django.utils.translation.ugettext', '_', (['"""Internal Server Error"""'], {}), "('Internal Server Error')\n", (1135, 1160), True, 'from django.utils.translation import ugettext as _\n'), ((1184, 1222), 'django.utils.translation.ugettext', '_', (['"""An unexpected error has occurred."""'], {}), "('An unexpected error has occurred.')\n", (1185, 1222), True, 'from django.utils.translation import ugettext as _\n')] |
trym-inc/django-msg | examples/ex3/app/models.py | 0b306524515a8fb4840d1a2ef8cf20901b64bc11 | from typing import NamedTuple
from django.contrib.auth.models import AbstractUser
from django.db import models
from msg.models import Msg
class User(AbstractUser):
phone_number: 'str' = models.CharField(max_length=255,
null=True, blank=True)
class HelloSMSMessage(NamedTuple):
phone_number: 'str'
username: 'str'
def send_hello_sms(self):
if not self.phone_number:
raise ValueError('User has to have a phone number'
'to send a sms message.')
hello = self.HelloSMSMessage(
username=self.username,
phone_number=self.phone_number,
)
Msg.new(hello, dispatch_now=True)
| [((194, 249), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'null': '(True)', 'blank': '(True)'}), '(max_length=255, null=True, blank=True)\n', (210, 249), False, 'from django.db import models\n'), ((704, 737), 'msg.models.Msg.new', 'Msg.new', (['hello'], {'dispatch_now': '(True)'}), '(hello, dispatch_now=True)\n', (711, 737), False, 'from msg.models import Msg\n')] |
csixteen/HackerRank | Data_Structures/2d_array_ds.py | 3ef6fa48599341f481b9e266c69df2d449a7b313 | matrix = [list(map(int, input().split())) for _ in range(6)]
max_sum = None
for i in range(4):
for j in range(4):
s = sum(matrix[i][j:j+3]) + matrix[i+1][j+1] + sum(matrix[i+2][j:j+3])
if max_sum is None or s > max_sum:
max_sum = s
print(max_sum)
| [] |
jlopezNEU/scikit-learn | sklearn/utils/_bunch.py | 593495eebc3c2f2ffdb244036adf57fab707a47d | class Bunch(dict):
"""Container object exposing keys as attributes.
Bunch objects are sometimes used as an output for functions and methods.
They extend dictionaries by enabling values to be accessed by key,
`bunch["value_key"]`, or by an attribute, `bunch.value_key`.
Examples
--------
>>> from sklearn.utils import Bunch
>>> b = Bunch(a=1, b=2)
>>> b['b']
2
>>> b.b
2
>>> b.a = 3
>>> b['a']
3
>>> b.c = 6
>>> b['c']
6
"""
def __init__(self, **kwargs):
super().__init__(kwargs)
def __setattr__(self, key, value):
self[key] = value
def __dir__(self):
return self.keys()
def __getattr__(self, key):
try:
return self[key]
except KeyError:
raise AttributeError(key)
def __setstate__(self, state):
# Bunch pickles generated with scikit-learn 0.16.* have an non
# empty __dict__. This causes a surprising behaviour when
# loading these pickles scikit-learn 0.17: reading bunch.key
# uses __dict__ but assigning to bunch.key use __setattr__ and
# only changes bunch['key']. More details can be found at:
# https://github.com/scikit-learn/scikit-learn/issues/6196.
# Overriding __setstate__ to be a noop has the effect of
# ignoring the pickled __dict__
pass
| [] |
pingsutw/tfx | tfx/examples/chicago_taxi_pipeline/serving/chicago_taxi_client.py | bf0d1d74e3f6ea429989fc7b80b82bea08077857 | # Lint as: python2, python3
# Copyright 2019 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A client for the chicago_taxi demo."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import base64
import json
import os
import subprocess
import tempfile
import requests
from tensorflow_transform import coders as tft_coders
from tensorflow_transform.tf_metadata import dataset_schema
from tensorflow_transform.tf_metadata import schema_utils
from google.protobuf import text_format
from tensorflow.python.lib.io import file_io # pylint: disable=g-direct-tensorflow-import
from tensorflow.python.platform import app # pylint: disable=g-direct-tensorflow-import
from tensorflow_metadata.proto.v0 import schema_pb2
from tfx.utils import io_utils
_LOCAL_INFERENCE_TIMEOUT_SECONDS = 5.0
_LABEL_KEY = 'tips'
# Tf.Transform considers these features as "raw"
def _get_raw_feature_spec(schema):
return schema_utils.schema_as_feature_spec(schema).feature_spec
def _make_proto_coder(schema):
raw_feature_spec = _get_raw_feature_spec(schema)
raw_schema = dataset_schema.from_feature_spec(raw_feature_spec)
return tft_coders.ExampleProtoCoder(raw_schema)
def _make_csv_coder(schema, column_names):
"""Return a coder for tf.transform to read csv files."""
raw_feature_spec = _get_raw_feature_spec(schema)
parsing_schema = dataset_schema.from_feature_spec(raw_feature_spec)
return tft_coders.CsvCoder(column_names, parsing_schema)
def _read_schema(path):
"""Reads a schema from the provided location.
Args:
path: The location of the file holding a serialized Schema proto.
Returns:
An instance of Schema or None if the input argument is None
"""
result = schema_pb2.Schema()
contents = file_io.read_file_to_string(path)
text_format.Parse(contents, result)
return result
def _do_local_inference(host, port, serialized_examples):
"""Performs inference on a model hosted by the host:port server."""
json_examples = []
for serialized_example in serialized_examples:
# The encoding follows the guidelines in:
# https://www.tensorflow.org/tfx/serving/api_rest
example_bytes = base64.b64encode(serialized_example).decode('utf-8')
predict_request = '{ "b64": "%s" }' % example_bytes
json_examples.append(predict_request)
json_request = '{ "instances": [' + ','.join(map(str, json_examples)) + ']}'
server_url = 'http://' + host + ':' + port + '/v1/models/chicago_taxi:predict'
response = requests.post(
server_url, data=json_request, timeout=_LOCAL_INFERENCE_TIMEOUT_SECONDS)
response.raise_for_status()
prediction = response.json()
print(json.dumps(prediction, indent=4))
def _do_aiplatform_inference(model, version, serialized_examples):
"""Performs inference on the model:version in AI Platform."""
working_dir = tempfile.mkdtemp()
instances_file = os.path.join(working_dir, 'test.json')
json_examples = []
for serialized_example in serialized_examples:
# The encoding follows the example in:
# https://github.com/GoogleCloudPlatform/training-data-analyst/blob/master/quests/tpu/invoke_model.py
json_examples.append('{ "inputs": { "b64": "%s" } }' %
base64.b64encode(serialized_example).decode('utf-8'))
file_io.write_string_to_file(instances_file, '\n'.join(json_examples))
gcloud_command = [
'gcloud', 'ai-platform', 'predict', '--model', model, '--version',
version, '--json-instances', instances_file
]
print(subprocess.check_output(gcloud_command))
def _do_inference(model_handle, examples_file, num_examples, schema):
"""Sends requests to the model and prints the results.
Args:
model_handle: handle to the model. This can be either
"aiplatform:model:version" or "host:port"
examples_file: path to csv file containing examples, with the first line
assumed to have the column headers
num_examples: number of requests to send to the server
schema: a Schema describing the input data
Returns:
Response from model server
"""
filtered_features = [
feature for feature in schema.feature if feature.name != _LABEL_KEY
]
del schema.feature[:]
schema.feature.extend(filtered_features)
column_names = io_utils.load_csv_column_names(examples_file)
csv_coder = _make_csv_coder(schema, column_names)
proto_coder = _make_proto_coder(schema)
input_file = open(examples_file, 'r')
input_file.readline() # skip header line
serialized_examples = []
for _ in range(num_examples):
one_line = input_file.readline()
if not one_line:
print('End of example file reached')
break
one_example = csv_coder.decode(one_line)
serialized_example = proto_coder.encode(one_example)
serialized_examples.append(serialized_example)
parsed_model_handle = model_handle.split(':')
if parsed_model_handle[0] == 'aiplatform':
_do_aiplatform_inference(
model=parsed_model_handle[1],
version=parsed_model_handle[2],
serialized_examples=serialized_examples)
else:
_do_local_inference(
host=parsed_model_handle[0],
port=parsed_model_handle[1],
serialized_examples=serialized_examples)
def main(_):
parser = argparse.ArgumentParser()
parser.add_argument(
'--num_examples',
help=('Number of examples to send to the server.'),
default=1,
type=int)
parser.add_argument(
'--server',
help=('Prediction service host:port or aiplatform:model:version'),
required=True)
parser.add_argument(
'--examples_file',
help=('Path to csv file containing examples.'),
required=True)
parser.add_argument(
'--schema_file', help='File holding the schema for the input data')
known_args, _ = parser.parse_known_args()
_do_inference(known_args.server, known_args.examples_file,
known_args.num_examples, _read_schema(known_args.schema_file))
if __name__ == '__main__':
app.run(main)
| [((1670, 1720), 'tensorflow_transform.tf_metadata.dataset_schema.from_feature_spec', 'dataset_schema.from_feature_spec', (['raw_feature_spec'], {}), '(raw_feature_spec)\n', (1702, 1720), False, 'from tensorflow_transform.tf_metadata import dataset_schema\n'), ((1730, 1770), 'tensorflow_transform.coders.ExampleProtoCoder', 'tft_coders.ExampleProtoCoder', (['raw_schema'], {}), '(raw_schema)\n', (1758, 1770), True, 'from tensorflow_transform import coders as tft_coders\n'), ((1945, 1995), 'tensorflow_transform.tf_metadata.dataset_schema.from_feature_spec', 'dataset_schema.from_feature_spec', (['raw_feature_spec'], {}), '(raw_feature_spec)\n', (1977, 1995), False, 'from tensorflow_transform.tf_metadata import dataset_schema\n'), ((2005, 2054), 'tensorflow_transform.coders.CsvCoder', 'tft_coders.CsvCoder', (['column_names', 'parsing_schema'], {}), '(column_names, parsing_schema)\n', (2024, 2054), True, 'from tensorflow_transform import coders as tft_coders\n'), ((2301, 2320), 'tensorflow_metadata.proto.v0.schema_pb2.Schema', 'schema_pb2.Schema', ([], {}), '()\n', (2318, 2320), False, 'from tensorflow_metadata.proto.v0 import schema_pb2\n'), ((2334, 2367), 'tensorflow.python.lib.io.file_io.read_file_to_string', 'file_io.read_file_to_string', (['path'], {}), '(path)\n', (2361, 2367), False, 'from tensorflow.python.lib.io import file_io\n'), ((2370, 2405), 'google.protobuf.text_format.Parse', 'text_format.Parse', (['contents', 'result'], {}), '(contents, result)\n', (2387, 2405), False, 'from google.protobuf import text_format\n'), ((3069, 3160), 'requests.post', 'requests.post', (['server_url'], {'data': 'json_request', 'timeout': '_LOCAL_INFERENCE_TIMEOUT_SECONDS'}), '(server_url, data=json_request, timeout=\n _LOCAL_INFERENCE_TIMEOUT_SECONDS)\n', (3082, 3160), False, 'import requests\n'), ((3415, 3433), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (3431, 3433), False, 'import tempfile\n'), ((3453, 3491), 'os.path.join', 'os.path.join', (['working_dir', '"""test.json"""'], {}), "(working_dir, 'test.json')\n", (3465, 3491), False, 'import os\n'), ((4822, 4867), 'tfx.utils.io_utils.load_csv_column_names', 'io_utils.load_csv_column_names', (['examples_file'], {}), '(examples_file)\n', (4852, 4867), False, 'from tfx.utils import io_utils\n'), ((5807, 5832), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (5830, 5832), False, 'import argparse\n'), ((6544, 6557), 'tensorflow.python.platform.app.run', 'app.run', (['main'], {}), '(main)\n', (6551, 6557), False, 'from tensorflow.python.platform import app\n'), ((1514, 1557), 'tensorflow_transform.tf_metadata.schema_utils.schema_as_feature_spec', 'schema_utils.schema_as_feature_spec', (['schema'], {}), '(schema)\n', (1549, 1557), False, 'from tensorflow_transform.tf_metadata import schema_utils\n'), ((3232, 3264), 'json.dumps', 'json.dumps', (['prediction'], {'indent': '(4)'}), '(prediction, indent=4)\n', (3242, 3264), False, 'import json\n'), ((4078, 4117), 'subprocess.check_output', 'subprocess.check_output', (['gcloud_command'], {}), '(gcloud_command)\n', (4101, 4117), False, 'import subprocess\n'), ((2743, 2779), 'base64.b64encode', 'base64.b64encode', (['serialized_example'], {}), '(serialized_example)\n', (2759, 2779), False, 'import base64\n'), ((3795, 3831), 'base64.b64encode', 'base64.b64encode', (['serialized_example'], {}), '(serialized_example)\n', (3811, 3831), False, 'import base64\n')] |
BlackIQ/Cute | PyVideo/main.py | 5835e989d661f23b04b6e436589c6e844167522e | from PyQt5.QtCore import (pyqtSignal, pyqtSlot, Q_ARG, QAbstractItemModel,
QFileInfo, qFuzzyCompare, QMetaObject, QModelIndex, QObject, Qt,
QThread, QTime, QUrl)
from PyQt5.QtGui import QColor, qGray, QImage, QPainter, QPalette
from PyQt5.QtMultimedia import (QAbstractVideoBuffer, QMediaContent,
QMediaMetaData, QMediaPlayer, QMediaPlaylist, QVideoFrame, QVideoProbe)
from PyQt5.QtMultimediaWidgets import QVideoWidget
from PyQt5.QtWidgets import (QApplication, QComboBox, QDialog, QFileDialog,
QFormLayout, QHBoxLayout, QLabel, QListView, QMessageBox, QPushButton,
QSizePolicy, QSlider, QStyle, QToolButton, QVBoxLayout, QWidget)
class VideoWidget(QVideoWidget):
def __init__(self, parent=None):
super(VideoWidget, self).__init__(parent)
self.setSizePolicy(QSizePolicy.Ignored, QSizePolicy.Ignored)
p = self.palette()
p.setColor(QPalette.Window, Qt.black)
self.setPalette(p)
self.setAttribute(Qt.WA_OpaquePaintEvent)
def keyPressEvent(self, event):
if event.key() == Qt.Key_Escape and self.isFullScreen():
self.setFullScreen(False)
event.accept()
elif event.key() == Qt.Key_Enter and event.modifiers() & Qt.Key_Alt:
self.setFullScreen(not self.isFullScreen())
event.accept()
else:
super(VideoWidget, self).keyPressEvent(event)
def mouseDoubleClickEvent(self, event):
self.setFullScreen(not self.isFullScreen())
event.accept()
class PlaylistModel(QAbstractItemModel):
Title, ColumnCount = range(2)
def __init__(self, parent=None):
super(PlaylistModel, self).__init__(parent)
self.m_playlist = None
def rowCount(self, parent=QModelIndex()):
return self.m_playlist.mediaCount() if self.m_playlist is not None and not parent.isValid() else 0
def columnCount(self, parent=QModelIndex()):
return self.ColumnCount if not parent.isValid() else 0
def index(self, row, column, parent=QModelIndex()):
return self.createIndex(row, column) if self.m_playlist is not None and not parent.isValid() and row >= 0 and row < self.m_playlist.mediaCount() and column >= 0 and column < self.ColumnCount else QModelIndex()
def parent(self, child):
return QModelIndex()
def data(self, index, role=Qt.DisplayRole):
if index.isValid() and role == Qt.DisplayRole:
if index.column() == self.Title:
location = self.m_playlist.media(index.row()).canonicalUrl()
return QFileInfo(location.path()).fileName()
return self.m_data[index]
return None
def playlist(self):
return self.m_playlist
def setPlaylist(self, playlist):
if self.m_playlist is not None:
self.m_playlist.mediaAboutToBeInserted.disconnect(
self.beginInsertItems)
self.m_playlist.mediaInserted.disconnect(self.endInsertItems)
self.m_playlist.mediaAboutToBeRemoved.disconnect(
self.beginRemoveItems)
self.m_playlist.mediaRemoved.disconnect(self.endRemoveItems)
self.m_playlist.mediaChanged.disconnect(self.changeItems)
self.beginResetModel()
self.m_playlist = playlist
if self.m_playlist is not None:
self.m_playlist.mediaAboutToBeInserted.connect(
self.beginInsertItems)
self.m_playlist.mediaInserted.connect(self.endInsertItems)
self.m_playlist.mediaAboutToBeRemoved.connect(
self.beginRemoveItems)
self.m_playlist.mediaRemoved.connect(self.endRemoveItems)
self.m_playlist.mediaChanged.connect(self.changeItems)
self.endResetModel()
def beginInsertItems(self, start, end):
self.beginInsertRows(QModelIndex(), start, end)
def endInsertItems(self):
self.endInsertRows()
def beginRemoveItems(self, start, end):
self.beginRemoveRows(QModelIndex(), start, end)
def endRemoveItems(self):
self.endRemoveRows()
def changeItems(self, start, end):
self.dataChanged.emit(self.index(start, 0),
self.index(end, self.ColumnCount))
class PlayerControls(QWidget):
play = pyqtSignal()
pause = pyqtSignal()
stop = pyqtSignal()
next = pyqtSignal()
previous = pyqtSignal()
changeVolume = pyqtSignal(int)
changeMuting = pyqtSignal(bool)
changeRate = pyqtSignal(float)
def __init__(self, parent=None):
super(PlayerControls, self).__init__(parent)
self.playerState = QMediaPlayer.StoppedState
self.playerMuted = False
self.playButton = QToolButton(clicked=self.playClicked)
self.playButton.setIcon(self.style().standardIcon(QStyle.SP_MediaPlay))
self.stopButton = QToolButton(clicked=self.stop)
self.stopButton.setIcon(self.style().standardIcon(QStyle.SP_MediaStop))
self.stopButton.setEnabled(False)
self.nextButton = QToolButton(clicked=self.next)
self.nextButton.setIcon(
self.style().standardIcon(QStyle.SP_MediaSkipForward))
self.previousButton = QToolButton(clicked=self.previous)
self.previousButton.setIcon(
self.style().standardIcon(QStyle.SP_MediaSkipBackward))
self.muteButton = QToolButton(clicked=self.muteClicked)
self.muteButton.setIcon(
self.style().standardIcon(QStyle.SP_MediaVolume))
self.volumeSlider = QSlider(Qt.Horizontal,
sliderMoved=self.changeVolume)
self.volumeSlider.setRange(0, 100)
self.rateBox = QComboBox(activated=self.updateRate)
self.rateBox.addItem("0.5x", 0.5)
self.rateBox.addItem("1.0x", 1.0)
self.rateBox.addItem("2.0x", 2.0)
self.rateBox.setCurrentIndex(1)
layout = QHBoxLayout()
layout.setContentsMargins(0, 0, 0, 0)
layout.addWidget(self.stopButton)
layout.addWidget(self.previousButton)
layout.addWidget(self.playButton)
layout.addWidget(self.nextButton)
layout.addWidget(self.muteButton)
layout.addWidget(self.volumeSlider)
layout.addWidget(self.rateBox)
self.setLayout(layout)
def state(self):
return self.playerState
def setState(self,state):
if state != self.playerState:
self.playerState = state
if state == QMediaPlayer.StoppedState:
self.stopButton.setEnabled(False)
self.playButton.setIcon(
self.style().standardIcon(QStyle.SP_MediaPlay))
elif state == QMediaPlayer.PlayingState:
self.stopButton.setEnabled(True)
self.playButton.setIcon(
self.style().standardIcon(QStyle.SP_MediaPause))
elif state == QMediaPlayer.PausedState:
self.stopButton.setEnabled(True)
self.playButton.setIcon(
self.style().standardIcon(QStyle.SP_MediaPlay))
def volume(self):
return self.volumeSlider.value()
def setVolume(self, volume):
self.volumeSlider.setValue(volume)
def isMuted(self):
return self.playerMuted
def setMuted(self, muted):
if muted != self.playerMuted:
self.playerMuted = muted
self.muteButton.setIcon(
self.style().standardIcon(
QStyle.SP_MediaVolumeMuted if muted else QStyle.SP_MediaVolume))
def playClicked(self):
if self.playerState in (QMediaPlayer.StoppedState, QMediaPlayer.PausedState):
self.play.emit()
elif self.playerState == QMediaPlayer.PlayingState:
self.pause.emit()
def muteClicked(self):
self.changeMuting.emit(not self.playerMuted)
def playbackRate(self):
return self.rateBox.itemData(self.rateBox.currentIndex())
def setPlaybackRate(self, rate):
for i in range(self.rateBox.count()):
if qFuzzyCompare(rate, self.rateBox.itemData(i)):
self.rateBox.setCurrentIndex(i)
return
self.rateBox.addItem("%dx" % rate, rate)
self.rateBox.setCurrentIndex(self.rateBox.count() - 1)
def updateRate(self):
self.changeRate.emit(self.playbackRate())
class FrameProcessor(QObject):
histogramReady = pyqtSignal(list)
@pyqtSlot(QVideoFrame, int)
def processFrame(self, frame, levels):
histogram = [0.0] * levels
if levels and frame.map(QAbstractVideoBuffer.ReadOnly):
pixelFormat = frame.pixelFormat()
if pixelFormat == QVideoFrame.Format_YUV420P or pixelFormat == QVideoFrame.Format_NV12:
# Process YUV data.
bits = frame.bits()
for idx in range(frame.height() * frame.width()):
histogram[(bits[idx] * levels) >> 8] += 1.0
else:
imageFormat = QVideoFrame.imageFormatFromPixelFormat(pixelFormat)
if imageFormat != QImage.Format_Invalid:
# Process RGB data.
image = QImage(frame.bits(), frame.width(), frame.height(), imageFormat)
for y in range(image.height()):
for x in range(image.width()):
pixel = image.pixel(x, y)
histogram[(qGray(pixel) * levels) >> 8] += 1.0
# Find the maximum value.
maxValue = 0.0
for value in histogram:
if value > maxValue:
maxValue = value
# Normalise the values between 0 and 1.
if maxValue > 0.0:
for i in range(len(histogram)):
histogram[i] /= maxValue
frame.unmap()
self.histogramReady.emit(histogram)
class HistogramWidget(QWidget):
def __init__(self, parent=None):
super(HistogramWidget, self).__init__(parent)
self.m_levels = 128
self.m_isBusy = False
self.m_histogram = []
self.m_processor = FrameProcessor()
self.m_processorThread = QThread()
self.m_processor.moveToThread(self.m_processorThread)
self.m_processor.histogramReady.connect(self.setHistogram)
def __del__(self):
self.m_processorThread.quit()
self.m_processorThread.wait(10000)
def setLevels(self, levels):
self.m_levels = levels
def processFrame(self, frame):
if self.m_isBusy:
return
self.m_isBusy = True
QMetaObject.invokeMethod(self.m_processor, 'processFrame',
Qt.QueuedConnection, Q_ARG(QVideoFrame, frame),
Q_ARG(int, self.m_levels))
@pyqtSlot(list)
def setHistogram(self, histogram):
self.m_isBusy = False
self.m_histogram = list(histogram)
self.update()
def paintEvent(self, event):
painter = QPainter(self)
if len(self.m_histogram) == 0:
painter.fillRect(0, 0, self.width(), self.height(),
QColor.fromRgb(0, 0, 0))
return
barWidth = self.width() / float(len(self.m_histogram))
for i, value in enumerate(self.m_histogram):
h = value * self.height()
# Draw the level.
painter.fillRect(barWidth * i, self.height() - h,
barWidth * (i + 1), self.height(), Qt.red)
# Clear the rest of the control.
painter.fillRect(barWidth * i, 0, barWidth * (i + 1),
self.height() - h, Qt.black)
class Player(QWidget):
fullScreenChanged = pyqtSignal(bool)
def __init__(self, playlist, parent=None):
super(Player, self).__init__(parent)
self.colorDialog = None
self.trackInfo = ""
self.statusInfo = ""
self.duration = 0
self.player = QMediaPlayer()
self.playlist = QMediaPlaylist()
self.player.setPlaylist(self.playlist)
self.player.durationChanged.connect(self.durationChanged)
self.player.positionChanged.connect(self.positionChanged)
self.player.metaDataChanged.connect(self.metaDataChanged)
self.playlist.currentIndexChanged.connect(self.playlistPositionChanged)
self.player.mediaStatusChanged.connect(self.statusChanged)
self.player.bufferStatusChanged.connect(self.bufferingProgress)
self.player.videoAvailableChanged.connect(self.videoAvailableChanged)
self.player.error.connect(self.displayErrorMessage)
self.videoWidget = VideoWidget()
self.player.setVideoOutput(self.videoWidget)
self.playlistModel = PlaylistModel()
self.playlistModel.setPlaylist(self.playlist)
self.playlistView = QListView()
self.playlistView.setModel(self.playlistModel)
self.playlistView.setCurrentIndex(
self.playlistModel.index(self.playlist.currentIndex(), 0))
self.playlistView.activated.connect(self.jump)
self.slider = QSlider(Qt.Horizontal)
self.slider.setRange(0, self.player.duration() / 1000)
self.labelDuration = QLabel()
self.slider.sliderMoved.connect(self.seek)
self.labelHistogram = QLabel()
self.labelHistogram.setText("Histogram:")
self.histogram = HistogramWidget()
histogramLayout = QHBoxLayout()
histogramLayout.addWidget(self.labelHistogram)
histogramLayout.addWidget(self.histogram, 1)
self.probe = QVideoProbe()
self.probe.videoFrameProbed.connect(self.histogram.processFrame)
self.probe.setSource(self.player)
openButton = QPushButton("Open", clicked=self.open)
controls = PlayerControls()
controls.setState(self.player.state())
controls.setVolume(self.player.volume())
controls.setMuted(controls.isMuted())
controls.play.connect(self.player.play)
controls.pause.connect(self.player.pause)
controls.stop.connect(self.player.stop)
controls.next.connect(self.playlist.next)
controls.previous.connect(self.previousClicked)
controls.changeVolume.connect(self.player.setVolume)
controls.changeMuting.connect(self.player.setMuted)
controls.changeRate.connect(self.player.setPlaybackRate)
controls.stop.connect(self.videoWidget.update)
self.player.stateChanged.connect(controls.setState)
self.player.volumeChanged.connect(controls.setVolume)
self.player.mutedChanged.connect(controls.setMuted)
self.fullScreenButton = QPushButton("FullScreen")
self.fullScreenButton.setCheckable(True)
self.colorButton = QPushButton("Color Options...")
self.colorButton.setEnabled(False)
self.colorButton.clicked.connect(self.showColorDialog)
displayLayout = QHBoxLayout()
displayLayout.addWidget(self.videoWidget, 2)
displayLayout.addWidget(self.playlistView)
controlLayout = QHBoxLayout()
controlLayout.setContentsMargins(0, 0, 0, 0)
controlLayout.addWidget(openButton)
controlLayout.addStretch(1)
controlLayout.addWidget(controls)
controlLayout.addStretch(1)
controlLayout.addWidget(self.fullScreenButton)
controlLayout.addWidget(self.colorButton)
layout = QVBoxLayout()
layout.addLayout(displayLayout)
hLayout = QHBoxLayout()
hLayout.addWidget(self.slider)
hLayout.addWidget(self.labelDuration)
layout.addLayout(hLayout)
layout.addLayout(controlLayout)
layout.addLayout(histogramLayout)
self.setLayout(layout)
if not self.player.isAvailable():
QMessageBox.warning(self, "Service not available",
"The QMediaPlayer object does not have a valid service.\n"
"Please check the media service plugins are installed.")
controls.setEnabled(False)
self.playlistView.setEnabled(False)
openButton.setEnabled(False)
self.colorButton.setEnabled(False)
self.fullScreenButton.setEnabled(False)
self.metaDataChanged()
self.addToPlaylist(playlist)
def open(self):
fileNames, _ = QFileDialog.getOpenFileNames(self, "Open Files")
self.addToPlaylist(fileNames)
def addToPlaylist(self, fileNames):
for name in fileNames:
fileInfo = QFileInfo(name)
if fileInfo.exists():
url = QUrl.fromLocalFile(fileInfo.absoluteFilePath())
if fileInfo.suffix().lower() == 'm3u':
self.playlist.load(url)
else:
self.playlist.addMedia(QMediaContent(url))
else:
url = QUrl(name)
if url.isValid():
self.playlist.addMedia(QMediaContent(url))
def durationChanged(self, duration):
duration /= 1000
self.duration = duration
self.slider.setMaximum(duration)
def positionChanged(self, progress):
progress /= 1000
if not self.slider.isSliderDown():
self.slider.setValue(progress)
self.updateDurationInfo(progress)
def metaDataChanged(self):
if self.player.isMetaDataAvailable():
self.setTrackInfo("%s - %s" % (
self.player.metaData(QMediaMetaData.AlbumArtist),
self.player.metaData(QMediaMetaData.Title)))
def previousClicked(self):
# Go to the previous track if we are within the first 5 seconds of
# playback. Otherwise, seek to the beginning.
if self.player.position() <= 5000:
self.playlist.previous()
else:
self.player.setPosition(0)
def jump(self, index):
if index.isValid():
self.playlist.setCurrentIndex(index.row())
self.player.play()
def playlistPositionChanged(self, position):
self.playlistView.setCurrentIndex(
self.playlistModel.index(position, 0))
def seek(self, seconds):
self.player.setPosition(seconds * 1000)
def statusChanged(self, status):
self.handleCursor(status)
if status == QMediaPlayer.LoadingMedia:
self.setStatusInfo("Loading...")
elif status == QMediaPlayer.StalledMedia:
self.setStatusInfo("Media Stalled")
elif status == QMediaPlayer.EndOfMedia:
QApplication.alert(self)
elif status == QMediaPlayer.InvalidMedia:
self.displayErrorMessage()
else:
self.setStatusInfo("")
def handleCursor(self, status):
if status in (QMediaPlayer.LoadingMedia, QMediaPlayer.BufferingMedia, QMediaPlayer.StalledMedia):
self.setCursor(Qt.BusyCursor)
else:
self.unsetCursor()
def bufferingProgress(self, progress):
self.setStatusInfo("Buffering %d%" % progress)
def videoAvailableChanged(self, available):
if available:
self.fullScreenButton.clicked.connect(
self.videoWidget.setFullScreen)
self.videoWidget.fullScreenChanged.connect(
self.fullScreenButton.setChecked)
if self.fullScreenButton.isChecked():
self.videoWidget.setFullScreen(True)
else:
self.fullScreenButton.clicked.disconnect(
self.videoWidget.setFullScreen)
self.videoWidget.fullScreenChanged.disconnect(
self.fullScreenButton.setChecked)
self.videoWidget.setFullScreen(False)
self.colorButton.setEnabled(available)
def setTrackInfo(self, info):
self.trackInfo = info
if self.statusInfo != "":
self.setWindowTitle("%s | %s" % (self.trackInfo, self.statusInfo))
else:
self.setWindowTitle(self.trackInfo)
def setStatusInfo(self, info):
self.statusInfo = info
if self.statusInfo != "":
self.setWindowTitle("%s | %s" % (self.trackInfo, self.statusInfo))
else:
self.setWindowTitle(self.trackInfo)
def displayErrorMessage(self):
self.setStatusInfo(self.player.errorString())
def updateDurationInfo(self, currentInfo):
duration = self.duration
if currentInfo or duration:
currentTime = QTime((currentInfo/3600)%60, (currentInfo/60)%60,
currentInfo%60, (currentInfo*1000)%1000)
totalTime = QTime((duration/3600)%60, (duration/60)%60,
duration%60, (duration*1000)%1000);
format = 'hh:mm:ss' if duration > 3600 else 'mm:ss'
tStr = currentTime.toString(format) + " / " + totalTime.toString(format)
else:
tStr = ""
self.labelDuration.setText(tStr)
def showColorDialog(self):
if self.colorDialog is None:
brightnessSlider = QSlider(Qt.Horizontal)
brightnessSlider.setRange(-100, 100)
brightnessSlider.setValue(self.videoWidget.brightness())
brightnessSlider.sliderMoved.connect(
self.videoWidget.setBrightness)
self.videoWidget.brightnessChanged.connect(
brightnessSlider.setValue)
contrastSlider = QSlider(Qt.Horizontal)
contrastSlider.setRange(-100, 100)
contrastSlider.setValue(self.videoWidget.contrast())
contrastSlider.sliderMoved.connect(self.videoWidget.setContrast)
self.videoWidget.contrastChanged.connect(contrastSlider.setValue)
hueSlider = QSlider(Qt.Horizontal)
hueSlider.setRange(-100, 100)
hueSlider.setValue(self.videoWidget.hue())
hueSlider.sliderMoved.connect(self.videoWidget.setHue)
self.videoWidget.hueChanged.connect(hueSlider.setValue)
saturationSlider = QSlider(Qt.Horizontal)
saturationSlider.setRange(-100, 100)
saturationSlider.setValue(self.videoWidget.saturation())
saturationSlider.sliderMoved.connect(
self.videoWidget.setSaturation)
self.videoWidget.saturationChanged.connect(
saturationSlider.setValue)
layout = QFormLayout()
layout.addRow("Brightness", brightnessSlider)
layout.addRow("Contrast", contrastSlider)
layout.addRow("Hue", hueSlider)
layout.addRow("Saturation", saturationSlider)
button = QPushButton("Close")
layout.addRow(button)
self.colorDialog = QDialog(self)
self.colorDialog.setWindowTitle("Color Options")
self.colorDialog.setLayout(layout)
button.clicked.connect(self.colorDialog.close)
self.colorDialog.show()
if __name__ == '__main__':
import sys
app = QApplication(sys.argv)
player = Player(sys.argv[1:])
player.show()
sys.exit(app.exec_())
| [((4307, 4319), 'PyQt5.QtCore.pyqtSignal', 'pyqtSignal', ([], {}), '()\n', (4317, 4319), False, 'from PyQt5.QtCore import pyqtSignal, pyqtSlot, Q_ARG, QAbstractItemModel, QFileInfo, qFuzzyCompare, QMetaObject, QModelIndex, QObject, Qt, QThread, QTime, QUrl\n'), ((4332, 4344), 'PyQt5.QtCore.pyqtSignal', 'pyqtSignal', ([], {}), '()\n', (4342, 4344), False, 'from PyQt5.QtCore import pyqtSignal, pyqtSlot, Q_ARG, QAbstractItemModel, QFileInfo, qFuzzyCompare, QMetaObject, QModelIndex, QObject, Qt, QThread, QTime, QUrl\n'), ((4356, 4368), 'PyQt5.QtCore.pyqtSignal', 'pyqtSignal', ([], {}), '()\n', (4366, 4368), False, 'from PyQt5.QtCore import pyqtSignal, pyqtSlot, Q_ARG, QAbstractItemModel, QFileInfo, qFuzzyCompare, QMetaObject, QModelIndex, QObject, Qt, QThread, QTime, QUrl\n'), ((4380, 4392), 'PyQt5.QtCore.pyqtSignal', 'pyqtSignal', ([], {}), '()\n', (4390, 4392), False, 'from PyQt5.QtCore import pyqtSignal, pyqtSlot, Q_ARG, QAbstractItemModel, QFileInfo, qFuzzyCompare, QMetaObject, QModelIndex, QObject, Qt, QThread, QTime, QUrl\n'), ((4408, 4420), 'PyQt5.QtCore.pyqtSignal', 'pyqtSignal', ([], {}), '()\n', (4418, 4420), False, 'from PyQt5.QtCore import pyqtSignal, pyqtSlot, Q_ARG, QAbstractItemModel, QFileInfo, qFuzzyCompare, QMetaObject, QModelIndex, QObject, Qt, QThread, QTime, QUrl\n'), ((4440, 4455), 'PyQt5.QtCore.pyqtSignal', 'pyqtSignal', (['int'], {}), '(int)\n', (4450, 4455), False, 'from PyQt5.QtCore import pyqtSignal, pyqtSlot, Q_ARG, QAbstractItemModel, QFileInfo, qFuzzyCompare, QMetaObject, QModelIndex, QObject, Qt, QThread, QTime, QUrl\n'), ((4475, 4491), 'PyQt5.QtCore.pyqtSignal', 'pyqtSignal', (['bool'], {}), '(bool)\n', (4485, 4491), False, 'from PyQt5.QtCore import pyqtSignal, pyqtSlot, Q_ARG, QAbstractItemModel, QFileInfo, qFuzzyCompare, QMetaObject, QModelIndex, QObject, Qt, QThread, QTime, QUrl\n'), ((4509, 4526), 'PyQt5.QtCore.pyqtSignal', 'pyqtSignal', (['float'], {}), '(float)\n', (4519, 4526), False, 'from PyQt5.QtCore import pyqtSignal, pyqtSlot, Q_ARG, QAbstractItemModel, QFileInfo, qFuzzyCompare, QMetaObject, QModelIndex, QObject, Qt, QThread, QTime, QUrl\n'), ((8464, 8480), 'PyQt5.QtCore.pyqtSignal', 'pyqtSignal', (['list'], {}), '(list)\n', (8474, 8480), False, 'from PyQt5.QtCore import pyqtSignal, pyqtSlot, Q_ARG, QAbstractItemModel, QFileInfo, qFuzzyCompare, QMetaObject, QModelIndex, QObject, Qt, QThread, QTime, QUrl\n'), ((8487, 8513), 'PyQt5.QtCore.pyqtSlot', 'pyqtSlot', (['QVideoFrame', 'int'], {}), '(QVideoFrame, int)\n', (8495, 8513), False, 'from PyQt5.QtCore import pyqtSignal, pyqtSlot, Q_ARG, QAbstractItemModel, QFileInfo, qFuzzyCompare, QMetaObject, QModelIndex, QObject, Qt, QThread, QTime, QUrl\n'), ((10851, 10865), 'PyQt5.QtCore.pyqtSlot', 'pyqtSlot', (['list'], {}), '(list)\n', (10859, 10865), False, 'from PyQt5.QtCore import pyqtSignal, pyqtSlot, Q_ARG, QAbstractItemModel, QFileInfo, qFuzzyCompare, QMetaObject, QModelIndex, QObject, Qt, QThread, QTime, QUrl\n'), ((11756, 11772), 'PyQt5.QtCore.pyqtSignal', 'pyqtSignal', (['bool'], {}), '(bool)\n', (11766, 11772), False, 'from PyQt5.QtCore import pyqtSignal, pyqtSlot, Q_ARG, QAbstractItemModel, QFileInfo, qFuzzyCompare, QMetaObject, QModelIndex, QObject, Qt, QThread, QTime, QUrl\n'), ((23053, 23075), 'PyQt5.QtWidgets.QApplication', 'QApplication', (['sys.argv'], {}), '(sys.argv)\n', (23065, 23075), False, 'from PyQt5.QtWidgets import QApplication, QComboBox, QDialog, QFileDialog, QFormLayout, QHBoxLayout, QLabel, QListView, QMessageBox, QPushButton, QSizePolicy, QSlider, QStyle, QToolButton, QVBoxLayout, QWidget\n'), ((1767, 1780), 'PyQt5.QtCore.QModelIndex', 'QModelIndex', ([], {}), '()\n', (1778, 1780), False, 'from PyQt5.QtCore import pyqtSignal, pyqtSlot, Q_ARG, QAbstractItemModel, QFileInfo, qFuzzyCompare, QMetaObject, QModelIndex, QObject, Qt, QThread, QTime, QUrl\n'), ((1924, 1937), 'PyQt5.QtCore.QModelIndex', 'QModelIndex', ([], {}), '()\n', (1935, 1937), False, 'from PyQt5.QtCore import pyqtSignal, pyqtSlot, Q_ARG, QAbstractItemModel, QFileInfo, qFuzzyCompare, QMetaObject, QModelIndex, QObject, Qt, QThread, QTime, QUrl\n'), ((2044, 2057), 'PyQt5.QtCore.QModelIndex', 'QModelIndex', ([], {}), '()\n', (2055, 2057), False, 'from PyQt5.QtCore import pyqtSignal, pyqtSlot, Q_ARG, QAbstractItemModel, QFileInfo, qFuzzyCompare, QMetaObject, QModelIndex, QObject, Qt, QThread, QTime, QUrl\n'), ((2323, 2336), 'PyQt5.QtCore.QModelIndex', 'QModelIndex', ([], {}), '()\n', (2334, 2336), False, 'from PyQt5.QtCore import pyqtSignal, pyqtSlot, Q_ARG, QAbstractItemModel, QFileInfo, qFuzzyCompare, QMetaObject, QModelIndex, QObject, Qt, QThread, QTime, QUrl\n'), ((4732, 4769), 'PyQt5.QtWidgets.QToolButton', 'QToolButton', ([], {'clicked': 'self.playClicked'}), '(clicked=self.playClicked)\n', (4743, 4769), False, 'from PyQt5.QtWidgets import QApplication, QComboBox, QDialog, QFileDialog, QFormLayout, QHBoxLayout, QLabel, QListView, QMessageBox, QPushButton, QSizePolicy, QSlider, QStyle, QToolButton, QVBoxLayout, QWidget\n'), ((4877, 4907), 'PyQt5.QtWidgets.QToolButton', 'QToolButton', ([], {'clicked': 'self.stop'}), '(clicked=self.stop)\n', (4888, 4907), False, 'from PyQt5.QtWidgets import QApplication, QComboBox, QDialog, QFileDialog, QFormLayout, QHBoxLayout, QLabel, QListView, QMessageBox, QPushButton, QSizePolicy, QSlider, QStyle, QToolButton, QVBoxLayout, QWidget\n'), ((5057, 5087), 'PyQt5.QtWidgets.QToolButton', 'QToolButton', ([], {'clicked': 'self.next'}), '(clicked=self.next)\n', (5068, 5087), False, 'from PyQt5.QtWidgets import QApplication, QComboBox, QDialog, QFileDialog, QFormLayout, QHBoxLayout, QLabel, QListView, QMessageBox, QPushButton, QSizePolicy, QSlider, QStyle, QToolButton, QVBoxLayout, QWidget\n'), ((5223, 5257), 'PyQt5.QtWidgets.QToolButton', 'QToolButton', ([], {'clicked': 'self.previous'}), '(clicked=self.previous)\n', (5234, 5257), False, 'from PyQt5.QtWidgets import QApplication, QComboBox, QDialog, QFileDialog, QFormLayout, QHBoxLayout, QLabel, QListView, QMessageBox, QPushButton, QSizePolicy, QSlider, QStyle, QToolButton, QVBoxLayout, QWidget\n'), ((5394, 5431), 'PyQt5.QtWidgets.QToolButton', 'QToolButton', ([], {'clicked': 'self.muteClicked'}), '(clicked=self.muteClicked)\n', (5405, 5431), False, 'from PyQt5.QtWidgets import QApplication, QComboBox, QDialog, QFileDialog, QFormLayout, QHBoxLayout, QLabel, QListView, QMessageBox, QPushButton, QSizePolicy, QSlider, QStyle, QToolButton, QVBoxLayout, QWidget\n'), ((5560, 5613), 'PyQt5.QtWidgets.QSlider', 'QSlider', (['Qt.Horizontal'], {'sliderMoved': 'self.changeVolume'}), '(Qt.Horizontal, sliderMoved=self.changeVolume)\n', (5567, 5613), False, 'from PyQt5.QtWidgets import QApplication, QComboBox, QDialog, QFileDialog, QFormLayout, QHBoxLayout, QLabel, QListView, QMessageBox, QPushButton, QSizePolicy, QSlider, QStyle, QToolButton, QVBoxLayout, QWidget\n'), ((5697, 5733), 'PyQt5.QtWidgets.QComboBox', 'QComboBox', ([], {'activated': 'self.updateRate'}), '(activated=self.updateRate)\n', (5706, 5733), False, 'from PyQt5.QtWidgets import QApplication, QComboBox, QDialog, QFileDialog, QFormLayout, QHBoxLayout, QLabel, QListView, QMessageBox, QPushButton, QSizePolicy, QSlider, QStyle, QToolButton, QVBoxLayout, QWidget\n'), ((5918, 5931), 'PyQt5.QtWidgets.QHBoxLayout', 'QHBoxLayout', ([], {}), '()\n', (5929, 5931), False, 'from PyQt5.QtWidgets import QApplication, QComboBox, QDialog, QFileDialog, QFormLayout, QHBoxLayout, QLabel, QListView, QMessageBox, QPushButton, QSizePolicy, QSlider, QStyle, QToolButton, QVBoxLayout, QWidget\n'), ((10250, 10259), 'PyQt5.QtCore.QThread', 'QThread', ([], {}), '()\n', (10257, 10259), False, 'from PyQt5.QtCore import pyqtSignal, pyqtSlot, Q_ARG, QAbstractItemModel, QFileInfo, qFuzzyCompare, QMetaObject, QModelIndex, QObject, Qt, QThread, QTime, QUrl\n'), ((11052, 11066), 'PyQt5.QtGui.QPainter', 'QPainter', (['self'], {}), '(self)\n', (11060, 11066), False, 'from PyQt5.QtGui import QColor, qGray, QImage, QPainter, QPalette\n'), ((12005, 12019), 'PyQt5.QtMultimedia.QMediaPlayer', 'QMediaPlayer', ([], {}), '()\n', (12017, 12019), False, 'from PyQt5.QtMultimedia import QAbstractVideoBuffer, QMediaContent, QMediaMetaData, QMediaPlayer, QMediaPlaylist, QVideoFrame, QVideoProbe\n'), ((12044, 12060), 'PyQt5.QtMultimedia.QMediaPlaylist', 'QMediaPlaylist', ([], {}), '()\n', (12058, 12060), False, 'from PyQt5.QtMultimedia import QAbstractVideoBuffer, QMediaContent, QMediaMetaData, QMediaPlayer, QMediaPlaylist, QVideoFrame, QVideoProbe\n'), ((12888, 12899), 'PyQt5.QtWidgets.QListView', 'QListView', ([], {}), '()\n', (12897, 12899), False, 'from PyQt5.QtWidgets import QApplication, QComboBox, QDialog, QFileDialog, QFormLayout, QHBoxLayout, QLabel, QListView, QMessageBox, QPushButton, QSizePolicy, QSlider, QStyle, QToolButton, QVBoxLayout, QWidget\n'), ((13152, 13174), 'PyQt5.QtWidgets.QSlider', 'QSlider', (['Qt.Horizontal'], {}), '(Qt.Horizontal)\n', (13159, 13174), False, 'from PyQt5.QtWidgets import QApplication, QComboBox, QDialog, QFileDialog, QFormLayout, QHBoxLayout, QLabel, QListView, QMessageBox, QPushButton, QSizePolicy, QSlider, QStyle, QToolButton, QVBoxLayout, QWidget\n'), ((13268, 13276), 'PyQt5.QtWidgets.QLabel', 'QLabel', ([], {}), '()\n', (13274, 13276), False, 'from PyQt5.QtWidgets import QApplication, QComboBox, QDialog, QFileDialog, QFormLayout, QHBoxLayout, QLabel, QListView, QMessageBox, QPushButton, QSizePolicy, QSlider, QStyle, QToolButton, QVBoxLayout, QWidget\n'), ((13359, 13367), 'PyQt5.QtWidgets.QLabel', 'QLabel', ([], {}), '()\n', (13365, 13367), False, 'from PyQt5.QtWidgets import QApplication, QComboBox, QDialog, QFileDialog, QFormLayout, QHBoxLayout, QLabel, QListView, QMessageBox, QPushButton, QSizePolicy, QSlider, QStyle, QToolButton, QVBoxLayout, QWidget\n'), ((13487, 13500), 'PyQt5.QtWidgets.QHBoxLayout', 'QHBoxLayout', ([], {}), '()\n', (13498, 13500), False, 'from PyQt5.QtWidgets import QApplication, QComboBox, QDialog, QFileDialog, QFormLayout, QHBoxLayout, QLabel, QListView, QMessageBox, QPushButton, QSizePolicy, QSlider, QStyle, QToolButton, QVBoxLayout, QWidget\n'), ((13631, 13644), 'PyQt5.QtMultimedia.QVideoProbe', 'QVideoProbe', ([], {}), '()\n', (13642, 13644), False, 'from PyQt5.QtMultimedia import QAbstractVideoBuffer, QMediaContent, QMediaMetaData, QMediaPlayer, QMediaPlaylist, QVideoFrame, QVideoProbe\n'), ((13782, 13820), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['"""Open"""'], {'clicked': 'self.open'}), "('Open', clicked=self.open)\n", (13793, 13820), False, 'from PyQt5.QtWidgets import QApplication, QComboBox, QDialog, QFileDialog, QFormLayout, QHBoxLayout, QLabel, QListView, QMessageBox, QPushButton, QSizePolicy, QSlider, QStyle, QToolButton, QVBoxLayout, QWidget\n'), ((14710, 14735), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['"""FullScreen"""'], {}), "('FullScreen')\n", (14721, 14735), False, 'from PyQt5.QtWidgets import QApplication, QComboBox, QDialog, QFileDialog, QFormLayout, QHBoxLayout, QLabel, QListView, QMessageBox, QPushButton, QSizePolicy, QSlider, QStyle, QToolButton, QVBoxLayout, QWidget\n'), ((14813, 14844), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['"""Color Options..."""'], {}), "('Color Options...')\n", (14824, 14844), False, 'from PyQt5.QtWidgets import QApplication, QComboBox, QDialog, QFileDialog, QFormLayout, QHBoxLayout, QLabel, QListView, QMessageBox, QPushButton, QSizePolicy, QSlider, QStyle, QToolButton, QVBoxLayout, QWidget\n'), ((14976, 14989), 'PyQt5.QtWidgets.QHBoxLayout', 'QHBoxLayout', ([], {}), '()\n', (14987, 14989), False, 'from PyQt5.QtWidgets import QApplication, QComboBox, QDialog, QFileDialog, QFormLayout, QHBoxLayout, QLabel, QListView, QMessageBox, QPushButton, QSizePolicy, QSlider, QStyle, QToolButton, QVBoxLayout, QWidget\n'), ((15119, 15132), 'PyQt5.QtWidgets.QHBoxLayout', 'QHBoxLayout', ([], {}), '()\n', (15130, 15132), False, 'from PyQt5.QtWidgets import QApplication, QComboBox, QDialog, QFileDialog, QFormLayout, QHBoxLayout, QLabel, QListView, QMessageBox, QPushButton, QSizePolicy, QSlider, QStyle, QToolButton, QVBoxLayout, QWidget\n'), ((15467, 15480), 'PyQt5.QtWidgets.QVBoxLayout', 'QVBoxLayout', ([], {}), '()\n', (15478, 15480), False, 'from PyQt5.QtWidgets import QApplication, QComboBox, QDialog, QFileDialog, QFormLayout, QHBoxLayout, QLabel, QListView, QMessageBox, QPushButton, QSizePolicy, QSlider, QStyle, QToolButton, QVBoxLayout, QWidget\n'), ((15539, 15552), 'PyQt5.QtWidgets.QHBoxLayout', 'QHBoxLayout', ([], {}), '()\n', (15550, 15552), False, 'from PyQt5.QtWidgets import QApplication, QComboBox, QDialog, QFileDialog, QFormLayout, QHBoxLayout, QLabel, QListView, QMessageBox, QPushButton, QSizePolicy, QSlider, QStyle, QToolButton, QVBoxLayout, QWidget\n'), ((16390, 16438), 'PyQt5.QtWidgets.QFileDialog.getOpenFileNames', 'QFileDialog.getOpenFileNames', (['self', '"""Open Files"""'], {}), "(self, 'Open Files')\n", (16418, 16438), False, 'from PyQt5.QtWidgets import QApplication, QComboBox, QDialog, QFileDialog, QFormLayout, QHBoxLayout, QLabel, QListView, QMessageBox, QPushButton, QSizePolicy, QSlider, QStyle, QToolButton, QVBoxLayout, QWidget\n'), ((2264, 2277), 'PyQt5.QtCore.QModelIndex', 'QModelIndex', ([], {}), '()\n', (2275, 2277), False, 'from PyQt5.QtCore import pyqtSignal, pyqtSlot, Q_ARG, QAbstractItemModel, QFileInfo, qFuzzyCompare, QMetaObject, QModelIndex, QObject, Qt, QThread, QTime, QUrl\n'), ((3871, 3884), 'PyQt5.QtCore.QModelIndex', 'QModelIndex', ([], {}), '()\n', (3882, 3884), False, 'from PyQt5.QtCore import pyqtSignal, pyqtSlot, Q_ARG, QAbstractItemModel, QFileInfo, qFuzzyCompare, QMetaObject, QModelIndex, QObject, Qt, QThread, QTime, QUrl\n'), ((4032, 4045), 'PyQt5.QtCore.QModelIndex', 'QModelIndex', ([], {}), '()\n', (4043, 4045), False, 'from PyQt5.QtCore import pyqtSignal, pyqtSlot, Q_ARG, QAbstractItemModel, QFileInfo, qFuzzyCompare, QMetaObject, QModelIndex, QObject, Qt, QThread, QTime, QUrl\n'), ((10775, 10800), 'PyQt5.QtCore.Q_ARG', 'Q_ARG', (['QVideoFrame', 'frame'], {}), '(QVideoFrame, frame)\n', (10780, 10800), False, 'from PyQt5.QtCore import pyqtSignal, pyqtSlot, Q_ARG, QAbstractItemModel, QFileInfo, qFuzzyCompare, QMetaObject, QModelIndex, QObject, Qt, QThread, QTime, QUrl\n'), ((10818, 10843), 'PyQt5.QtCore.Q_ARG', 'Q_ARG', (['int', 'self.m_levels'], {}), '(int, self.m_levels)\n', (10823, 10843), False, 'from PyQt5.QtCore import pyqtSignal, pyqtSlot, Q_ARG, QAbstractItemModel, QFileInfo, qFuzzyCompare, QMetaObject, QModelIndex, QObject, Qt, QThread, QTime, QUrl\n'), ((15841, 16016), 'PyQt5.QtWidgets.QMessageBox.warning', 'QMessageBox.warning', (['self', '"""Service not available"""', '"""The QMediaPlayer object does not have a valid service.\nPlease check the media service plugins are installed."""'], {}), '(self, \'Service not available\',\n """The QMediaPlayer object does not have a valid service.\nPlease check the media service plugins are installed."""\n )\n', (15860, 16016), False, 'from PyQt5.QtWidgets import QApplication, QComboBox, QDialog, QFileDialog, QFormLayout, QHBoxLayout, QLabel, QListView, QMessageBox, QPushButton, QSizePolicy, QSlider, QStyle, QToolButton, QVBoxLayout, QWidget\n'), ((16572, 16587), 'PyQt5.QtCore.QFileInfo', 'QFileInfo', (['name'], {}), '(name)\n', (16581, 16587), False, 'from PyQt5.QtCore import pyqtSignal, pyqtSlot, Q_ARG, QAbstractItemModel, QFileInfo, qFuzzyCompare, QMetaObject, QModelIndex, QObject, Qt, QThread, QTime, QUrl\n'), ((20536, 20639), 'PyQt5.QtCore.QTime', 'QTime', (['(currentInfo / 3600 % 60)', '(currentInfo / 60 % 60)', '(currentInfo % 60)', '(currentInfo * 1000 % 1000)'], {}), '(currentInfo / 3600 % 60, currentInfo / 60 % 60, currentInfo % 60, \n currentInfo * 1000 % 1000)\n', (20541, 20639), False, 'from PyQt5.QtCore import pyqtSignal, pyqtSlot, Q_ARG, QAbstractItemModel, QFileInfo, qFuzzyCompare, QMetaObject, QModelIndex, QObject, Qt, QThread, QTime, QUrl\n'), ((20671, 20762), 'PyQt5.QtCore.QTime', 'QTime', (['(duration / 3600 % 60)', '(duration / 60 % 60)', '(duration % 60)', '(duration * 1000 % 1000)'], {}), '(duration / 3600 % 60, duration / 60 % 60, duration % 60, duration * \n 1000 % 1000)\n', (20676, 20762), False, 'from PyQt5.QtCore import pyqtSignal, pyqtSlot, Q_ARG, QAbstractItemModel, QFileInfo, qFuzzyCompare, QMetaObject, QModelIndex, QObject, Qt, QThread, QTime, QUrl\n'), ((21099, 21121), 'PyQt5.QtWidgets.QSlider', 'QSlider', (['Qt.Horizontal'], {}), '(Qt.Horizontal)\n', (21106, 21121), False, 'from PyQt5.QtWidgets import QApplication, QComboBox, QDialog, QFileDialog, QFormLayout, QHBoxLayout, QLabel, QListView, QMessageBox, QPushButton, QSizePolicy, QSlider, QStyle, QToolButton, QVBoxLayout, QWidget\n'), ((21475, 21497), 'PyQt5.QtWidgets.QSlider', 'QSlider', (['Qt.Horizontal'], {}), '(Qt.Horizontal)\n', (21482, 21497), False, 'from PyQt5.QtWidgets import QApplication, QComboBox, QDialog, QFileDialog, QFormLayout, QHBoxLayout, QLabel, QListView, QMessageBox, QPushButton, QSizePolicy, QSlider, QStyle, QToolButton, QVBoxLayout, QWidget\n'), ((21790, 21812), 'PyQt5.QtWidgets.QSlider', 'QSlider', (['Qt.Horizontal'], {}), '(Qt.Horizontal)\n', (21797, 21812), False, 'from PyQt5.QtWidgets import QApplication, QComboBox, QDialog, QFileDialog, QFormLayout, QHBoxLayout, QLabel, QListView, QMessageBox, QPushButton, QSizePolicy, QSlider, QStyle, QToolButton, QVBoxLayout, QWidget\n'), ((22077, 22099), 'PyQt5.QtWidgets.QSlider', 'QSlider', (['Qt.Horizontal'], {}), '(Qt.Horizontal)\n', (22084, 22099), False, 'from PyQt5.QtWidgets import QApplication, QComboBox, QDialog, QFileDialog, QFormLayout, QHBoxLayout, QLabel, QListView, QMessageBox, QPushButton, QSizePolicy, QSlider, QStyle, QToolButton, QVBoxLayout, QWidget\n'), ((22445, 22458), 'PyQt5.QtWidgets.QFormLayout', 'QFormLayout', ([], {}), '()\n', (22456, 22458), False, 'from PyQt5.QtWidgets import QApplication, QComboBox, QDialog, QFileDialog, QFormLayout, QHBoxLayout, QLabel, QListView, QMessageBox, QPushButton, QSizePolicy, QSlider, QStyle, QToolButton, QVBoxLayout, QWidget\n'), ((22695, 22715), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['"""Close"""'], {}), "('Close')\n", (22706, 22715), False, 'from PyQt5.QtWidgets import QApplication, QComboBox, QDialog, QFileDialog, QFormLayout, QHBoxLayout, QLabel, QListView, QMessageBox, QPushButton, QSizePolicy, QSlider, QStyle, QToolButton, QVBoxLayout, QWidget\n'), ((22782, 22795), 'PyQt5.QtWidgets.QDialog', 'QDialog', (['self'], {}), '(self)\n', (22789, 22795), False, 'from PyQt5.QtWidgets import QApplication, QComboBox, QDialog, QFileDialog, QFormLayout, QHBoxLayout, QLabel, QListView, QMessageBox, QPushButton, QSizePolicy, QSlider, QStyle, QToolButton, QVBoxLayout, QWidget\n'), ((9054, 9105), 'PyQt5.QtMultimedia.QVideoFrame.imageFormatFromPixelFormat', 'QVideoFrame.imageFormatFromPixelFormat', (['pixelFormat'], {}), '(pixelFormat)\n', (9092, 9105), False, 'from PyQt5.QtMultimedia import QAbstractVideoBuffer, QMediaContent, QMediaMetaData, QMediaPlayer, QMediaPlaylist, QVideoFrame, QVideoProbe\n'), ((11191, 11214), 'PyQt5.QtGui.QColor.fromRgb', 'QColor.fromRgb', (['(0)', '(0)', '(0)'], {}), '(0, 0, 0)\n', (11205, 11214), False, 'from PyQt5.QtGui import QColor, qGray, QImage, QPainter, QPalette\n'), ((16916, 16926), 'PyQt5.QtCore.QUrl', 'QUrl', (['name'], {}), '(name)\n', (16920, 16926), False, 'from PyQt5.QtCore import pyqtSignal, pyqtSlot, Q_ARG, QAbstractItemModel, QFileInfo, qFuzzyCompare, QMetaObject, QModelIndex, QObject, Qt, QThread, QTime, QUrl\n'), ((18607, 18631), 'PyQt5.QtWidgets.QApplication.alert', 'QApplication.alert', (['self'], {}), '(self)\n', (18625, 18631), False, 'from PyQt5.QtWidgets import QApplication, QComboBox, QDialog, QFileDialog, QFormLayout, QHBoxLayout, QLabel, QListView, QMessageBox, QPushButton, QSizePolicy, QSlider, QStyle, QToolButton, QVBoxLayout, QWidget\n'), ((16856, 16874), 'PyQt5.QtMultimedia.QMediaContent', 'QMediaContent', (['url'], {}), '(url)\n', (16869, 16874), False, 'from PyQt5.QtMultimedia import QAbstractVideoBuffer, QMediaContent, QMediaMetaData, QMediaPlayer, QMediaPlaylist, QVideoFrame, QVideoProbe\n'), ((17004, 17022), 'PyQt5.QtMultimedia.QMediaContent', 'QMediaContent', (['url'], {}), '(url)\n', (17017, 17022), False, 'from PyQt5.QtMultimedia import QAbstractVideoBuffer, QMediaContent, QMediaMetaData, QMediaPlayer, QMediaPlaylist, QVideoFrame, QVideoProbe\n'), ((9497, 9509), 'PyQt5.QtGui.qGray', 'qGray', (['pixel'], {}), '(pixel)\n', (9502, 9509), False, 'from PyQt5.QtGui import QColor, qGray, QImage, QPainter, QPalette\n')] |
Uornca/mirheo | tests/snapshot/periodic.py | 162c722ffa27c02e1f5b0d1866816e44c2393f0f | #!/usr/bin/env python
"""Test checkpoint-like periodic snapshots.
We test that there are that many folders and that the currentStep changes.
"""
import mirheo as mir
u = mir.Mirheo(nranks=(1, 1, 1), domain=(4, 6, 8), debug_level=3,
log_filename='log', no_splash=True,
checkpoint_every=10, checkpoint_mode='Incremental',
checkpoint_folder='periodic_snapshots/snapshot_', checkpoint_mechanism='Snapshot')
pv = mir.ParticleVectors.ParticleVector('pv', mass=1)
ic = mir.InitialConditions.Uniform(number_density=2)
u.registerParticleVector(pv, ic)
dpd = mir.Interactions.Pairwise('dpd', rc=1.0, kind='DPD', a=10.0, gamma=10.0, kBT=1.0, power=0.5)
lj = mir.Interactions.Pairwise('lj', rc=1.0, kind='LJ', epsilon=1.25, sigma=0.75)
u.registerInteraction(dpd)
u.registerInteraction(lj)
u.setInteraction(dpd, pv, pv)
minimize = mir.Integrators.Minimize('minimize', max_displacement=1. / 1024)
u.registerIntegrator(minimize)
u.run(45, dt=0.125)
# TEST: snapshot.periodic
# cd snapshot
# rm -rf periodic_snapshots/
# mir.run --runargs "-n 2" ./periodic.py
# ls periodic_snapshots | cat > snapshot.out.txt
# grep -rH --include=*.json currentStep periodic_snapshots/ | sort >> snapshot.out.txt
| [((174, 420), 'mirheo.Mirheo', 'mir.Mirheo', ([], {'nranks': '(1, 1, 1)', 'domain': '(4, 6, 8)', 'debug_level': '(3)', 'log_filename': '"""log"""', 'no_splash': '(True)', 'checkpoint_every': '(10)', 'checkpoint_mode': '"""Incremental"""', 'checkpoint_folder': '"""periodic_snapshots/snapshot_"""', 'checkpoint_mechanism': '"""Snapshot"""'}), "(nranks=(1, 1, 1), domain=(4, 6, 8), debug_level=3, log_filename=\n 'log', no_splash=True, checkpoint_every=10, checkpoint_mode=\n 'Incremental', checkpoint_folder='periodic_snapshots/snapshot_',\n checkpoint_mechanism='Snapshot')\n", (184, 420), True, 'import mirheo as mir\n'), ((458, 506), 'mirheo.ParticleVectors.ParticleVector', 'mir.ParticleVectors.ParticleVector', (['"""pv"""'], {'mass': '(1)'}), "('pv', mass=1)\n", (492, 506), True, 'import mirheo as mir\n'), ((512, 559), 'mirheo.InitialConditions.Uniform', 'mir.InitialConditions.Uniform', ([], {'number_density': '(2)'}), '(number_density=2)\n', (541, 559), True, 'import mirheo as mir\n'), ((600, 696), 'mirheo.Interactions.Pairwise', 'mir.Interactions.Pairwise', (['"""dpd"""'], {'rc': '(1.0)', 'kind': '"""DPD"""', 'a': '(10.0)', 'gamma': '(10.0)', 'kBT': '(1.0)', 'power': '(0.5)'}), "('dpd', rc=1.0, kind='DPD', a=10.0, gamma=10.0,\n kBT=1.0, power=0.5)\n", (625, 696), True, 'import mirheo as mir\n'), ((698, 774), 'mirheo.Interactions.Pairwise', 'mir.Interactions.Pairwise', (['"""lj"""'], {'rc': '(1.0)', 'kind': '"""LJ"""', 'epsilon': '(1.25)', 'sigma': '(0.75)'}), "('lj', rc=1.0, kind='LJ', epsilon=1.25, sigma=0.75)\n", (723, 774), True, 'import mirheo as mir\n'), ((871, 936), 'mirheo.Integrators.Minimize', 'mir.Integrators.Minimize', (['"""minimize"""'], {'max_displacement': '(1.0 / 1024)'}), "('minimize', max_displacement=1.0 / 1024)\n", (895, 936), True, 'import mirheo as mir\n')] |
xzhan96/chromium.src | tools/resource_prefetch_predictor/generate_database.py | 1bd0cf3997f947746c0fc5406a2466e7b5f6159e | #!/usr/bin/python
#
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Loads a set of web pages several times on a device, and extracts the
predictor database.
"""
import argparse
import logging
import os
import sys
_SRC_PATH = os.path.abspath(os.path.join(
os.path.dirname(__file__), os.pardir, os.pardir))
sys.path.append(os.path.join(_SRC_PATH, 'third_party', 'catapult', 'devil'))
from devil.android import device_utils
sys.path.append(os.path.join(_SRC_PATH, 'build', 'android'))
import devil_chromium
sys.path.append(os.path.join(_SRC_PATH, 'tools', 'android', 'loading'))
import controller
from options import OPTIONS
import page_track
_PAGE_LOAD_TIMEOUT = 20
def _CreateArgumentParser():
"""Creates and returns the argument parser."""
parser = argparse.ArgumentParser(
description=('Loads a set of web pages several times on a device, and '
'extracts the predictor database.'),
parents=[OPTIONS.GetParentParser()])
parser.add_argument('--device', help='Device ID')
parser.add_argument('--urls_filename', help='File containing a list of URLs '
'(one per line). URLs can be repeated.')
parser.add_argument('--output_filename',
help='File to store the database in.')
parser.add_argument('--url_repeat',
help=('Number of times each URL in the input '
'file is loaded.'),
default=3)
return parser
def _FindDevice(device_id):
"""Returns a device matching |device_id| or the first one if None, or None."""
devices = device_utils.DeviceUtils.HealthyDevices()
if device_id is None:
return devices[0]
matching_devices = [d for d in devices if str(d) == device_id]
if not matching_devices:
return None
return matching_devices[0]
def _Setup(device):
"""Sets up a device and returns an instance of RemoteChromeController."""
chrome_controller = controller.RemoteChromeController(device)
device.ForceStop(OPTIONS.ChromePackage().package)
chrome_controller.AddChromeArguments(
['--speculative-resource-prefetching=learning'])
chrome_controller.ResetBrowserState()
return chrome_controller
def _Go(chrome_controller, urls_filename, output_filename, repeats):
urls = []
with open(urls_filename) as f:
urls = [line.strip() for line in f.readlines()]
with chrome_controller.Open() as connection:
for repeat in range(repeats):
logging.info('Repeat #%d', repeat)
for url in urls:
logging.info('\tLoading %s', url)
page_track.PageTrack(connection) # Registers the listeners.
connection.MonitorUrl(url, timeout_seconds=_PAGE_LOAD_TIMEOUT,
stop_delay_multiplier=1.5)
device = chrome_controller.GetDevice()
device.ForceStop(OPTIONS.ChromePackage().package)
database_filename = (
'/data/user/0/%s/app_chrome/Default/Network Action Predictor' %
OPTIONS.ChromePackage().package)
device.PullFile(database_filename, output_filename)
def main():
logging.basicConfig(level=logging.INFO)
parser = _CreateArgumentParser()
args = parser.parse_args()
OPTIONS.SetParsedArgs(args)
devil_chromium.Initialize()
device = _FindDevice(args.device)
if device is None:
logging.error('Could not find device: %s.', args.device)
sys.exit(1)
chrome_controller = _Setup(device)
_Go(chrome_controller, args.urls_filename, args.output_filename,
int(args.url_repeat))
if __name__ == '__main__':
main()
| [((446, 505), 'os.path.join', 'os.path.join', (['_SRC_PATH', '"""third_party"""', '"""catapult"""', '"""devil"""'], {}), "(_SRC_PATH, 'third_party', 'catapult', 'devil')\n", (458, 505), False, 'import os\n'), ((564, 607), 'os.path.join', 'os.path.join', (['_SRC_PATH', '"""build"""', '"""android"""'], {}), "(_SRC_PATH, 'build', 'android')\n", (576, 607), False, 'import os\n'), ((648, 702), 'os.path.join', 'os.path.join', (['_SRC_PATH', '"""tools"""', '"""android"""', '"""loading"""'], {}), "(_SRC_PATH, 'tools', 'android', 'loading')\n", (660, 702), False, 'import os\n'), ((1713, 1754), 'devil.android.device_utils.DeviceUtils.HealthyDevices', 'device_utils.DeviceUtils.HealthyDevices', ([], {}), '()\n', (1752, 1754), False, 'from devil.android import device_utils\n'), ((2058, 2099), 'controller.RemoteChromeController', 'controller.RemoteChromeController', (['device'], {}), '(device)\n', (2091, 2099), False, 'import controller\n'), ((3164, 3203), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO'}), '(level=logging.INFO)\n', (3183, 3203), False, 'import logging\n'), ((3270, 3297), 'options.OPTIONS.SetParsedArgs', 'OPTIONS.SetParsedArgs', (['args'], {}), '(args)\n', (3291, 3297), False, 'from options import OPTIONS\n'), ((3300, 3327), 'devil_chromium.Initialize', 'devil_chromium.Initialize', ([], {}), '()\n', (3325, 3327), False, 'import devil_chromium\n'), ((380, 405), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (395, 405), False, 'import os\n'), ((3389, 3445), 'logging.error', 'logging.error', (['"""Could not find device: %s."""', 'args.device'], {}), "('Could not find device: %s.', args.device)\n", (3402, 3445), False, 'import logging\n'), ((3450, 3461), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (3458, 3461), False, 'import sys\n'), ((2119, 2142), 'options.OPTIONS.ChromePackage', 'OPTIONS.ChromePackage', ([], {}), '()\n', (2140, 2142), False, 'from options import OPTIONS\n'), ((2570, 2604), 'logging.info', 'logging.info', (['"""Repeat #%d"""', 'repeat'], {}), "('Repeat #%d', repeat)\n", (2582, 2604), False, 'import logging\n'), ((2928, 2951), 'options.OPTIONS.ChromePackage', 'OPTIONS.ChromePackage', ([], {}), '()\n', (2949, 2951), False, 'from options import OPTIONS\n'), ((3061, 3084), 'options.OPTIONS.ChromePackage', 'OPTIONS.ChromePackage', ([], {}), '()\n', (3082, 3084), False, 'from options import OPTIONS\n'), ((1059, 1084), 'options.OPTIONS.GetParentParser', 'OPTIONS.GetParentParser', ([], {}), '()\n', (1082, 1084), False, 'from options import OPTIONS\n'), ((2636, 2669), 'logging.info', 'logging.info', (['"""\tLoading %s"""', 'url'], {}), "('\\tLoading %s', url)\n", (2648, 2669), False, 'import logging\n'), ((2678, 2710), 'page_track.PageTrack', 'page_track.PageTrack', (['connection'], {}), '(connection)\n', (2698, 2710), False, 'import page_track\n')] |
madeline-scyphers/palm | palm_wrapper/job_submission/domain.py | 0ecf9eb49f66b86f284bac9506c9570159aba02b | from abc import ABC, abstractmethod
from typing import Optional
from xml import dom
import numpy as np
import pandas as pd
from .utils import get_factors_rev
def calc_plot_size(domain_x, domain_y, plot_goal, house_goal):
f1 = sorted(get_factors_rev(domain_x))
f2 = sorted(get_factors_rev(domain_y))
plot_x, plot_y = None, None
for x in f1:
for y in f2:
if x * y - house_goal >= 0 and plot_goal - x * y >= 0:
if not plot_x and not plot_y:
plot_x, plot_y = x, y
if (plot_goal - x * y) < (plot_goal - plot_x * plot_y):
plot_x, plot_y = x, y
elif ((plot_goal - x * y) == (plot_goal - plot_x * plot_y)) and ((x - y) < (plot_x - plot_y)):
plot_x, plot_y = x, y
return plot_x, plot_y
def calc_plot_sizes(
domain_x, domain_y, plot_footprint, house_footprint, plot_ratio, dx, dy, full_domain, x_spread=None, y_spread=None
):
x_spread = x_spread if x_spread is not None else (-round(domain_x / 15), 0)
y_spread = (
y_spread if y_spread is not None else (-round(domain_y / 20), min(full_domain - domain_y, round(domain_y / 10)))
)
goal = plot_footprint / (dx * dy)
house_goal = house_footprint / (dx * dy)
dom_x = range(domain_x + x_spread[0], domain_x + x_spread[1] + 1)
dom_y = range(domain_y + y_spread[0], domain_y + y_spread[1] + 1)
plots = []
for d_x in dom_x:
for d_y in dom_y:
trimmed_d_y = int(d_y * plot_ratio)
plot_x, plot_y = calc_plot_size(d_x, trimmed_d_y, goal, house_goal)
if plot_x is not None and plot_y is not None:
plots.append((plot_x, plot_y, d_x, d_y, trimmed_d_y))
return plots
def get_best_plot_size(plots, plot_footprint, plot_ratio, dx, dy):
goal = plot_footprint / (dx * dy)
tmp = pd.DataFrame(plots, columns=["px", "py", "domx", "domy", "trimmed_dy"])
tmp["plt_area"] = tmp["px"] * tmp["py"]
tmp["goal_diff"] = goal - tmp.plt_area
tmp["domain_y_diff"] = tmp.domy * plot_ratio - tmp.trimmed_dy
tmp["trimmed_area"] = tmp["domx"] * tmp["trimmed_dy"]
tmp["full_domain"] = tmp["domx"] * tmp["domy"]
tmp["ratio_diff"] = abs((((tmp.trimmed_area + round(tmp.domain_y_diff * tmp.domx))) / tmp.full_domain - plot_ratio))
normalized_ratio_diff = (tmp.ratio_diff + plot_ratio) / plot_ratio
normalized_goal_diff = (tmp.goal_diff + goal) / goal
tmp["weighted_sorter"] = (tmp.px + tmp.py) ** (normalized_ratio_diff * normalized_goal_diff)
# tmp["ratio_diff"] = abs(((tmp.trimmed_area) / tmp.full_domain - plot_ratio))
tmp = tmp.sort_values(
by=["weighted_sorter", "goal_diff", "ratio_diff", "domain_y_diff", "trimmed_area"],
ascending=[True, True, True, True, False],
)
# tmp = tmp.sort_values(by=["goal_diff", "domain_y_diff", "trimmed_area"], ascending=[True, True, False])
tplot_x, tplot_y, tdomain_x, tdomain_y, trimmed_y = tmp[["px", "py", "domx", "domy", "trimmed_dy"]].iloc[0]
return tplot_x, tplot_y, tdomain_x, tdomain_y, trimmed_y
def calc_house_size(plot_x, plot_y, house_footprint, dx, dy):
goal = house_footprint / (dx * dy)
f1 = range(1, plot_x + 1)
f2 = range(1, plot_y + 1)
true_x, true_y = f1[0], f2[0]
for x in f1:
for y in f2:
padded_x, padded_y = x - 0, y - 0
nums = sorted([padded_x, padded_y])
if nums[0] * 2 < nums[1]:
continue
if abs(goal - padded_x * padded_y) < abs(goal - true_x * true_y):
true_x, true_y = padded_x, padded_y
elif (abs(goal - padded_x * padded_y) == abs(goal - true_x * true_y)) and (
abs(padded_x - padded_y) < abs(true_x - true_y)
):
true_x, true_y = padded_x, padded_y
return true_x, true_y
class BaseDomainArea(ABC):
subplot: Optional["BaseDomainArea"]
x: int
y: int
z: Optional[int]
matrix: np.ndarray
def __str__(self) -> str:
string = ""
for row in self.matrix:
string += f'{" ".join(str(int(pixel)) for pixel in row)}\n'
return string
@abstractmethod
def get_matrix(self) -> np.ndarray:
"""Get the numpy matrix representation of the domain area"""
def _validate_matrix_size(self, subplot):
for value in ["x", "y"]:
cell_val = getattr(self, value)
subplot_val = getattr(subplot, value)
if subplot_val and cell_val < subplot_val:
raise ValueError(
f"The {value} ({cell_val}) value of {self.__class__.__name__}"
f" must be larger than the house ({subplot_val}) going on it!"
)
def save_matrix(self, filename: str, matrix_name: str = None) -> None:
matrix = self.matrix if matrix_name is None else getattr(self, matrix_name)
np.savetxt(filename, matrix, delimiter=",")
class House(BaseDomainArea):
def __init__(self, x: int, y: int, z: int) -> None:
self.x = x
self.y = y
self.z = z
self.matrix = self.get_matrix()
def get_matrix(self) -> np.ndarray:
house = np.full((self.x, self.y), self.z)
return house
class Cell(BaseDomainArea):
def __init__(self, subplot: House, x: int, y: int) -> None:
self.subplot = subplot
self.x = x
self.y = y
self._validate_matrix_size(subplot=self.subplot)
self.matrix = self.get_matrix()
def get_matrix(self) -> np.ndarray:
left = (self.x - self.subplot.x) // 2
top = (self.y - self.subplot.y) // 2
plot = np.zeros((self.x, self.y), dtype=int)
plot[left : left + self.subplot.x, top : top + self.subplot.y] = self.subplot.matrix
return plot
class Domain(BaseDomainArea):
def __init__(self, subplot: Cell, tdomain_x, tdomain_y, full_x, full_y, trimmed_y, plot_ratio, stack_height) -> None:
self.subplot = subplot
self.temp_x = tdomain_x
self.temp_y = tdomain_y
self.full_x = full_x
self.full_y = full_y
self.trimmed_y = trimmed_y
self.plot_ratio = plot_ratio
self.stack_height = stack_height
# self._validate_matrix_size(subplot=self.subplot)
self.matrix, self.trees_matrix = self.get_matrix()
def print_tree_matrix(self) -> str:
string = ""
for row in self.trees_matrix:
string += f'{" ".join(str(int(pixel)) for pixel in row)}\n'
return string
def get_matrix(self) -> np.ndarray:
houses_row = np.tile(
self.subplot.matrix,
(
self.temp_x // self.subplot.x,
1,
),
)
number_of_house_rows = self.trimmed_y // self.subplot.y
number_of_full_tree_rows = self.temp_y - self.trimmed_y - 1
mixed_row_ratio = self.temp_y * self.plot_ratio - self.trimmed_y
tree_row = np.full((self.temp_x, 1), -1)
mixed_row = np.array(
[-1 if i <= mixed_row_ratio * self.temp_x else 0 for i in range(1, self.temp_x + 1)]
).reshape(self.temp_x, 1)
rows = [[houses_row.copy()] for _ in range(number_of_house_rows)]
trees = [tree_row.copy() for _ in range(number_of_full_tree_rows)]
trees.insert(number_of_house_rows // 2, mixed_row)
while trees:
for row in rows:
if not trees:
break
row.append(trees.pop())
domain_with_trees = np.concatenate([np.concatenate(row, axis=1) for row in rows], axis=1)
dwtx = domain_with_trees.shape[0]
dwty = domain_with_trees.shape[1]
xs = int(np.floor((self.full_x - dwtx) / 2)), int(np.ceil((self.full_x - dwtx) / 2))
full_domain = np.pad(domain_with_trees, (xs, (self.full_y - dwty, 0)))
mid_x = self.full_x // 2
full_domain[mid_x - 2:mid_x + 2, :1] = self.stack_height # stack for surface scalar to come out of
domain = np.where(full_domain != -1, full_domain, 0)
trees = np.where(full_domain == -1, full_domain, 0)
return domain.T, trees.T
@classmethod
def from_domain_config(cls, house, config):
cell = Cell(house, tree_domain_fraction=config["trees"]["domain_fraction"], **config["plot_size"])
x = config["domain"]["x"]
y = config["domain"]["y"]
return cls(subplot=cell, x=x, y=y)
@classmethod
def from_plot_size(cls, house, config, tplot_x, tplot_y, tdomain_x, tdomain_y, trimmed_y, plot_ratio, stack_height):
cell = Cell(house, x=tplot_x, y=tplot_y)
# x = config["domain"]["x"]
# y = config["domain"]["y"]
return cls(cell, tdomain_x, tdomain_y, config["domain"]["x"], config["domain"]["y"], trimmed_y, plot_ratio, stack_height)
def setup_domain(cfg):
domain_x, domain_y = cfg["domain"]["x"], (round(cfg["domain"]["y"] * cfg["domain"]["urban_ratio"]))
plot_footprint, plot_ratio, dx, dy = (
cfg["plot"]["plot_footprint"],
cfg["plot"]["plot_ratio"],
cfg["domain"]["dx"],
cfg["domain"]["dy"],
)
plots = calc_plot_sizes(
domain_x,
domain_y,
plot_footprint,
cfg["house"]["footprint"],
plot_ratio,
dx,
dy,
cfg["domain"]["y"],
)
tplot_x, tplot_y, tdomain_x, tdomain_y, trimmed_y = get_best_plot_size(plots, plot_footprint, plot_ratio, dx, dy)
house_x, house_y = calc_house_size(tplot_x, tplot_y, cfg["house"]["footprint"], dx, dy)
house = House(house_x, house_y, cfg["house"]["height"])
return Domain.from_plot_size(house, cfg, tplot_x, tplot_y, tdomain_x, tdomain_y, trimmed_y, plot_ratio, cfg["domain"]["stack_height"])
if __name__ == "__main__":
from .load_wrapper_config import get_wrapper_config
config = get_wrapper_config()
domain = setup_domain(config)
domain
| [((1874, 1945), 'pandas.DataFrame', 'pd.DataFrame', (['plots'], {'columns': "['px', 'py', 'domx', 'domy', 'trimmed_dy']"}), "(plots, columns=['px', 'py', 'domx', 'domy', 'trimmed_dy'])\n", (1886, 1945), True, 'import pandas as pd\n'), ((4922, 4965), 'numpy.savetxt', 'np.savetxt', (['filename', 'matrix'], {'delimiter': '""","""'}), "(filename, matrix, delimiter=',')\n", (4932, 4965), True, 'import numpy as np\n'), ((5207, 5240), 'numpy.full', 'np.full', (['(self.x, self.y)', 'self.z'], {}), '((self.x, self.y), self.z)\n', (5214, 5240), True, 'import numpy as np\n'), ((5669, 5706), 'numpy.zeros', 'np.zeros', (['(self.x, self.y)'], {'dtype': 'int'}), '((self.x, self.y), dtype=int)\n', (5677, 5706), True, 'import numpy as np\n'), ((6614, 6678), 'numpy.tile', 'np.tile', (['self.subplot.matrix', '(self.temp_x // self.subplot.x, 1)'], {}), '(self.subplot.matrix, (self.temp_x // self.subplot.x, 1))\n', (6621, 6678), True, 'import numpy as np\n'), ((6986, 7015), 'numpy.full', 'np.full', (['(self.temp_x, 1)', '(-1)'], {}), '((self.temp_x, 1), -1)\n', (6993, 7015), True, 'import numpy as np\n'), ((7831, 7887), 'numpy.pad', 'np.pad', (['domain_with_trees', '(xs, (self.full_y - dwty, 0))'], {}), '(domain_with_trees, (xs, (self.full_y - dwty, 0)))\n', (7837, 7887), True, 'import numpy as np\n'), ((8047, 8090), 'numpy.where', 'np.where', (['(full_domain != -1)', 'full_domain', '(0)'], {}), '(full_domain != -1, full_domain, 0)\n', (8055, 8090), True, 'import numpy as np\n'), ((8107, 8150), 'numpy.where', 'np.where', (['(full_domain == -1)', 'full_domain', '(0)'], {}), '(full_domain == -1, full_domain, 0)\n', (8115, 8150), True, 'import numpy as np\n'), ((7577, 7604), 'numpy.concatenate', 'np.concatenate', (['row'], {'axis': '(1)'}), '(row, axis=1)\n', (7591, 7604), True, 'import numpy as np\n'), ((7733, 7767), 'numpy.floor', 'np.floor', (['((self.full_x - dwtx) / 2)'], {}), '((self.full_x - dwtx) / 2)\n', (7741, 7767), True, 'import numpy as np\n'), ((7774, 7807), 'numpy.ceil', 'np.ceil', (['((self.full_x - dwtx) / 2)'], {}), '((self.full_x - dwtx) / 2)\n', (7781, 7807), True, 'import numpy as np\n')] |
Alba126/Laba21 | zad5.py | ce5735ca223d92287efa64bc3347f4356234b399 | #!/usr/bin/env python3
# -*- config: utf-8 -*-
from tkinter import *
from random import random
def on_click():
x = random()
y = random()
bt1.place(relx=x, rely=y)
root = Tk()
root['bg'] = 'white'
root.title('crown')
img = PhotoImage(file='crown.png')
bt1 = Button(image=img, command=on_click)
bt1.place(relx=0.5, rely=0.5, anchor=CENTER)
root.mainloop()
| [((122, 130), 'random.random', 'random', ([], {}), '()\n', (128, 130), False, 'from random import random\n'), ((139, 147), 'random.random', 'random', ([], {}), '()\n', (145, 147), False, 'from random import random\n')] |
HumanCellAtlas/ingest-common | tests/importer/utils/test_utils.py | 6a230f9606f64cd787b67c143854db36e012a2b7 | from openpyxl import Workbook
def create_test_workbook(*worksheet_titles, include_default_sheet=False):
workbook = Workbook()
for title in worksheet_titles:
workbook.create_sheet(title)
if not include_default_sheet:
default_sheet = workbook['Sheet']
workbook.remove(default_sheet)
return workbook
| [((121, 131), 'openpyxl.Workbook', 'Workbook', ([], {}), '()\n', (129, 131), False, 'from openpyxl import Workbook\n')] |
WBobby/pytorch | test/test_import_stats.py | 655960460ccca936fa5c06df6bbafd25b5582115 | import subprocess
import sys
import unittest
import pathlib
from torch.testing._internal.common_utils import TestCase, run_tests, IS_LINUX, IS_IN_CI
REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent
try:
# Just in case PyTorch was not built in 'develop' mode
sys.path.append(str(REPO_ROOT))
from tools.stats.scribe import rds_write, register_rds_schema
except ImportError:
register_rds_schema = None
rds_write = None
# these tests could eventually be changed to fail if the import/init
# time is greater than a certain threshold, but for now we just use them
# as a way to track the duration of `import torch` in our ossci-metrics
# S3 bucket (see tools/stats/print_test_stats.py)
class TestImportTime(TestCase):
def test_time_import_torch(self):
TestCase.runWithPytorchAPIUsageStderr("import torch")
def test_time_cuda_device_count(self):
TestCase.runWithPytorchAPIUsageStderr(
"import torch; torch.cuda.device_count()",
)
@unittest.skipIf(not IS_LINUX, "Memory test is only implemented for Linux")
@unittest.skipIf(not IS_IN_CI, "Memory test only runs in CI")
def test_peak_memory(self):
def profile(module, name):
command = f"import {module}; import resource; print(resource.getrusage(resource.RUSAGE_SELF).ru_maxrss)"
result = subprocess.run(
[sys.executable, "-c", command],
stdout=subprocess.PIPE,
)
max_rss = int(result.stdout.decode().strip())
return {
"test_name": name,
"peak_memory_bytes": max_rss,
}
data = profile("torch", "pytorch")
baseline = profile("sys", "baseline")
rds_write(
"import_stats", [data, baseline]
)
if __name__ == "__main__":
if register_rds_schema and IS_IN_CI:
register_rds_schema(
"import_stats",
{
"test_name": "string",
"peak_memory_bytes": "int",
"time_ms": "int",
},
)
run_tests()
| [((1010, 1084), 'unittest.skipIf', 'unittest.skipIf', (['(not IS_LINUX)', '"""Memory test is only implemented for Linux"""'], {}), "(not IS_LINUX, 'Memory test is only implemented for Linux')\n", (1025, 1084), False, 'import unittest\n'), ((1090, 1150), 'unittest.skipIf', 'unittest.skipIf', (['(not IS_IN_CI)', '"""Memory test only runs in CI"""'], {}), "(not IS_IN_CI, 'Memory test only runs in CI')\n", (1105, 1150), False, 'import unittest\n'), ((2102, 2113), 'torch.testing._internal.common_utils.run_tests', 'run_tests', ([], {}), '()\n', (2111, 2113), False, 'from torch.testing._internal.common_utils import TestCase, run_tests, IS_LINUX, IS_IN_CI\n'), ((794, 847), 'torch.testing._internal.common_utils.TestCase.runWithPytorchAPIUsageStderr', 'TestCase.runWithPytorchAPIUsageStderr', (['"""import torch"""'], {}), "('import torch')\n", (831, 847), False, 'from torch.testing._internal.common_utils import TestCase, run_tests, IS_LINUX, IS_IN_CI\n'), ((900, 985), 'torch.testing._internal.common_utils.TestCase.runWithPytorchAPIUsageStderr', 'TestCase.runWithPytorchAPIUsageStderr', (['"""import torch; torch.cuda.device_count()"""'], {}), "('import torch; torch.cuda.device_count()'\n )\n", (937, 985), False, 'from torch.testing._internal.common_utils import TestCase, run_tests, IS_LINUX, IS_IN_CI\n'), ((1748, 1791), 'tools.stats.scribe.rds_write', 'rds_write', (['"""import_stats"""', '[data, baseline]'], {}), "('import_stats', [data, baseline])\n", (1757, 1791), False, 'from tools.stats.scribe import rds_write, register_rds_schema\n'), ((1892, 2002), 'tools.stats.scribe.register_rds_schema', 'register_rds_schema', (['"""import_stats"""', "{'test_name': 'string', 'peak_memory_bytes': 'int', 'time_ms': 'int'}"], {}), "('import_stats', {'test_name': 'string',\n 'peak_memory_bytes': 'int', 'time_ms': 'int'})\n", (1911, 2002), False, 'from tools.stats.scribe import rds_write, register_rds_schema\n'), ((1356, 1427), 'subprocess.run', 'subprocess.run', (["[sys.executable, '-c', command]"], {'stdout': 'subprocess.PIPE'}), "([sys.executable, '-c', command], stdout=subprocess.PIPE)\n", (1370, 1427), False, 'import subprocess\n'), ((164, 186), 'pathlib.Path', 'pathlib.Path', (['__file__'], {}), '(__file__)\n', (176, 186), False, 'import pathlib\n')] |
fasih/django-post_office | post_office/validators.py | e4086527a48bc0d1e5b8e0dfe9c27ab3a6260224 | from django.core.exceptions import ValidationError
from django.core.validators import validate_email
from django.template import Template, TemplateSyntaxError, TemplateDoesNotExist
from django.utils.encoding import force_str
def validate_email_with_name(value):
"""
Validate email address.
Both "Recipient Name <[email protected]>" and "[email protected]" are valid.
"""
value = force_str(value)
recipient = value
if '<' in value and '>' in value:
start = value.find('<') + 1
end = value.find('>')
if start < end:
recipient = value[start:end]
validate_email(recipient)
def validate_comma_separated_emails(value):
"""
Validate every email address in a comma separated list of emails.
"""
if not isinstance(value, (tuple, list)):
raise ValidationError('Email list must be a list/tuple.')
for email in value:
try:
validate_email_with_name(email)
except ValidationError:
raise ValidationError('Invalid email: %s' % email, code='invalid')
def validate_template_syntax(source):
"""
Basic Django Template syntax validation. This allows for robuster template
authoring.
"""
try:
Template(source)
except (TemplateSyntaxError, TemplateDoesNotExist) as err:
raise ValidationError(str(err))
| [((402, 418), 'django.utils.encoding.force_str', 'force_str', (['value'], {}), '(value)\n', (411, 418), False, 'from django.utils.encoding import force_str\n'), ((616, 641), 'django.core.validators.validate_email', 'validate_email', (['recipient'], {}), '(recipient)\n', (630, 641), False, 'from django.core.validators import validate_email\n'), ((833, 884), 'django.core.exceptions.ValidationError', 'ValidationError', (['"""Email list must be a list/tuple."""'], {}), "('Email list must be a list/tuple.')\n", (848, 884), False, 'from django.core.exceptions import ValidationError\n'), ((1245, 1261), 'django.template.Template', 'Template', (['source'], {}), '(source)\n', (1253, 1261), False, 'from django.template import Template, TemplateSyntaxError, TemplateDoesNotExist\n'), ((1017, 1077), 'django.core.exceptions.ValidationError', 'ValidationError', (["('Invalid email: %s' % email)"], {'code': '"""invalid"""'}), "('Invalid email: %s' % email, code='invalid')\n", (1032, 1077), False, 'from django.core.exceptions import ValidationError\n')] |
GiuseppeBaldini/PaperHub | paperhub/input.py | 5efdee1a0374c995a6717a4baee2106df808af12 | # Input DOI / URL
import re
import sys
# Pyperclip is not built-in, check and download if needed
try:
import pyperclip
except (ImportError, ModuleNotFoundError):
print('Pyperclip module not found. Please download it.')
sys.exit(0)
# Regex for links
link_regex = re.compile(r'''(
http[s]?://
(?:[a-zA-Z]|
[0-9]|
[$-_@.&+]|
[!*\(\),]|
(?:%[0-9a-fA-F][0-9a-fA-F]))+
)''', re.IGNORECASE | re.VERBOSE)
# Get DOI / URL using different methods
# Method 1: argument
try:
input_link = sys.argv[1]
# Method 2: clipboard
except IndexError:
input_link = pyperclip.paste()
# Method 3: manual input
def regex_check(regex, link):
"""
Check using regex. If DOI/URL are not in the right format,
require manual input until correct or Enter to quit.
"""
while True:
match = re.match(regex, link)
if match == None:
link = str(input('''Enter valid DOI / URL or press Enter to quit: > '''))
if link == '':
exit()
else:
continue
else:
return link
url = regex_check(link_regex, input_link) | [((277, 450), 're.compile', 're.compile', (['"""(\n http[s]?://\n (?:[a-zA-Z]|\n [0-9]|\n [$-_@.&+]|\n [!*\\\\(\\\\),]|\n (?:%[0-9a-fA-F][0-9a-fA-F]))+\n )"""', '(re.IGNORECASE | re.VERBOSE)'], {}), '(\n """(\n http[s]?://\n (?:[a-zA-Z]|\n [0-9]|\n [$-_@.&+]|\n [!*\\\\(\\\\),]|\n (?:%[0-9a-fA-F][0-9a-fA-F]))+\n )"""\n , re.IGNORECASE | re.VERBOSE)\n', (287, 450), False, 'import re\n'), ((233, 244), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (241, 244), False, 'import sys\n'), ((595, 612), 'pyperclip.paste', 'pyperclip.paste', ([], {}), '()\n', (610, 612), False, 'import pyperclip\n'), ((837, 858), 're.match', 're.match', (['regex', 'link'], {}), '(regex, link)\n', (845, 858), False, 'import re\n')] |
chillum1718/EffcientNetV2 | main.py | 4338652454185db648a6ea5df04528bcafb24ed2 | import argparse
import csv
import os
import torch
import tqdm
from torch import distributed
from torch.utils import data
from torchvision import datasets
from torchvision import transforms
from nets import nn
from utils import util
data_dir = os.path.join('..', 'Dataset', 'IMAGENET')
def batch(images, target, model, criterion=None):
images = images.cuda()
target = target.cuda()
if criterion:
with torch.cuda.amp.autocast():
loss = criterion(model(images), target)
return loss
else:
return util.accuracy(model(images), target, top_k=(1, 5))
def train(args):
epochs = 350
batch_size = 288
util.set_seeds(args.rank)
model = nn.EfficientNet().cuda()
lr = batch_size * torch.cuda.device_count() * 0.256 / 4096
optimizer = nn.RMSprop(util.add_weight_decay(model), lr, 0.9, 1e-3, momentum=0.9)
ema = nn.EMA(model)
if args.distributed:
model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[args.local_rank])
else:
model = torch.nn.DataParallel(model)
criterion = nn.CrossEntropyLoss().cuda()
normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])
dataset = datasets.ImageFolder(os.path.join(data_dir, 'train'),
transforms.Compose([util.RandomResize(),
transforms.ColorJitter(0.4, 0.4, 0.4),
transforms.RandomHorizontalFlip(),
util.RandomAugment(),
transforms.ToTensor(), normalize]))
if args.distributed:
sampler = torch.utils.data.distributed.DistributedSampler(dataset)
else:
sampler = None
loader = data.DataLoader(dataset, batch_size, sampler=sampler, num_workers=8, pin_memory=True)
scheduler = nn.StepLR(optimizer)
amp_scale = torch.cuda.amp.GradScaler()
with open(f'weights/{scheduler.__str__()}.csv', 'w') as f:
if args.local_rank == 0:
writer = csv.DictWriter(f, fieldnames=['epoch', 'acc@1', 'acc@5'])
writer.writeheader()
best_acc1 = 0
for epoch in range(0, epochs):
if args.distributed:
sampler.set_epoch(epoch)
if args.local_rank == 0:
print(('\n' + '%10s' * 2) % ('epoch', 'loss'))
bar = tqdm.tqdm(loader, total=len(loader))
else:
bar = loader
model.train()
for images, target in bar:
loss = batch(images, target, model, criterion)
optimizer.zero_grad()
amp_scale.scale(loss).backward()
amp_scale.step(optimizer)
amp_scale.update()
ema.update(model)
torch.cuda.synchronize()
if args.local_rank == 0:
bar.set_description(('%10s' + '%10.4g') % ('%g/%g' % (epoch + 1, epochs), loss))
scheduler.step(epoch + 1)
if args.local_rank == 0:
acc1, acc5 = test(ema.model.eval())
writer.writerow({'acc@1': str(f'{acc1:.3f}'),
'acc@5': str(f'{acc5:.3f}'),
'epoch': str(epoch + 1).zfill(3)})
util.save_checkpoint({'state_dict': ema.model.state_dict()}, acc1 > best_acc1)
best_acc1 = max(acc1, best_acc1)
if args.distributed:
torch.distributed.destroy_process_group()
torch.cuda.empty_cache()
def test(model=None):
if model is None:
model = nn.EfficientNet()
model.load_state_dict(torch.load('weights/best.pt', 'cpu')['state_dict'])
model = model.cuda()
model.eval()
normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])
dataset = datasets.ImageFolder(os.path.join(data_dir, 'val'),
transforms.Compose([transforms.Resize(416),
transforms.CenterCrop(384),
transforms.ToTensor(), normalize]))
loader = data.DataLoader(dataset, 48, num_workers=os.cpu_count(), pin_memory=True)
top1 = util.AverageMeter()
top5 = util.AverageMeter()
with torch.no_grad():
for images, target in tqdm.tqdm(loader, ('%10s' * 2) % ('acc@1', 'acc@5')):
acc1, acc5 = batch(images, target, model)
torch.cuda.synchronize()
top1.update(acc1.item(), images.size(0))
top5.update(acc5.item(), images.size(0))
acc1, acc5 = top1.avg, top5.avg
print('%10.3g' * 2 % (acc1, acc5))
if model is None:
torch.cuda.empty_cache()
else:
return acc1, acc5
def print_parameters():
model = nn.EfficientNet().eval()
_ = model(torch.zeros(1, 3, 224, 224))
params = sum(p.numel() for p in model.parameters())
print(f'Number of parameters: {int(params)}')
def benchmark():
shape = (1, 3, 384, 384)
util.torch2onnx(nn.EfficientNet().export().eval(), shape)
util.onnx2caffe()
util.print_benchmark(shape)
def main():
# python -m torch.distributed.launch --nproc_per_node=3 main.py --train
parser = argparse.ArgumentParser()
parser.add_argument("--local_rank", default=0, type=int)
parser.add_argument('--benchmark', action='store_true')
parser.add_argument('--train', action='store_true')
parser.add_argument('--test', action='store_true')
args = parser.parse_args()
args.distributed = False
args.rank = 0
if 'WORLD_SIZE' in os.environ:
args.distributed = int(os.environ['WORLD_SIZE']) > 1
if args.distributed:
torch.cuda.set_device(args.local_rank)
torch.distributed.init_process_group(backend='nccl', init_method='env://')
args.rank = torch.distributed.get_rank()
if args.local_rank == 0:
if not os.path.exists('weights'):
os.makedirs('weights')
if args.local_rank == 0:
print_parameters()
if args.benchmark:
benchmark()
if args.train:
train(args)
if args.test:
test()
if __name__ == '__main__':
main()
| [((246, 287), 'os.path.join', 'os.path.join', (['""".."""', '"""Dataset"""', '"""IMAGENET"""'], {}), "('..', 'Dataset', 'IMAGENET')\n", (258, 287), False, 'import os\n'), ((661, 686), 'utils.util.set_seeds', 'util.set_seeds', (['args.rank'], {}), '(args.rank)\n', (675, 686), False, 'from utils import util\n'), ((883, 896), 'nets.nn.EMA', 'nn.EMA', (['model'], {}), '(model)\n', (889, 896), False, 'from nets import nn\n'), ((1134, 1209), 'torchvision.transforms.Normalize', 'transforms.Normalize', ([], {'mean': '[0.485, 0.456, 0.406]', 'std': '[0.229, 0.224, 0.225]'}), '(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])\n', (1154, 1209), False, 'from torchvision import transforms\n'), ((1854, 1943), 'torch.utils.data.DataLoader', 'data.DataLoader', (['dataset', 'batch_size'], {'sampler': 'sampler', 'num_workers': '(8)', 'pin_memory': '(True)'}), '(dataset, batch_size, sampler=sampler, num_workers=8,\n pin_memory=True)\n', (1869, 1943), False, 'from torch.utils import data\n'), ((1957, 1977), 'nets.nn.StepLR', 'nn.StepLR', (['optimizer'], {}), '(optimizer)\n', (1966, 1977), False, 'from nets import nn\n'), ((1994, 2021), 'torch.cuda.amp.GradScaler', 'torch.cuda.amp.GradScaler', ([], {}), '()\n', (2019, 2021), False, 'import torch\n'), ((3624, 3648), 'torch.cuda.empty_cache', 'torch.cuda.empty_cache', ([], {}), '()\n', (3646, 3648), False, 'import torch\n'), ((3878, 3953), 'torchvision.transforms.Normalize', 'transforms.Normalize', ([], {'mean': '[0.485, 0.456, 0.406]', 'std': '[0.229, 0.224, 0.225]'}), '(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])\n', (3898, 3953), False, 'from torchvision import transforms\n'), ((4373, 4392), 'utils.util.AverageMeter', 'util.AverageMeter', ([], {}), '()\n', (4390, 4392), False, 'from utils import util\n'), ((4404, 4423), 'utils.util.AverageMeter', 'util.AverageMeter', ([], {}), '()\n', (4421, 4423), False, 'from utils import util\n'), ((5231, 5248), 'utils.util.onnx2caffe', 'util.onnx2caffe', ([], {}), '()\n', (5246, 5248), False, 'from utils import util\n'), ((5253, 5280), 'utils.util.print_benchmark', 'util.print_benchmark', (['shape'], {}), '(shape)\n', (5273, 5280), False, 'from utils import util\n'), ((5384, 5409), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (5407, 5409), False, 'import argparse\n'), ((814, 842), 'utils.util.add_weight_decay', 'util.add_weight_decay', (['model'], {}), '(model)\n', (835, 842), False, 'from utils import util\n'), ((939, 1017), 'torch.nn.parallel.DistributedDataParallel', 'torch.nn.parallel.DistributedDataParallel', (['model'], {'device_ids': '[args.local_rank]'}), '(model, device_ids=[args.local_rank])\n', (980, 1017), False, 'import torch\n'), ((1044, 1072), 'torch.nn.DataParallel', 'torch.nn.DataParallel', (['model'], {}), '(model)\n', (1065, 1072), False, 'import torch\n'), ((1246, 1277), 'os.path.join', 'os.path.join', (['data_dir', '"""train"""'], {}), "(data_dir, 'train')\n", (1258, 1277), False, 'import os\n'), ((1750, 1806), 'torch.utils.data.distributed.DistributedSampler', 'torch.utils.data.distributed.DistributedSampler', (['dataset'], {}), '(dataset)\n', (1797, 1806), False, 'import torch\n'), ((3578, 3619), 'torch.distributed.destroy_process_group', 'torch.distributed.destroy_process_group', ([], {}), '()\n', (3617, 3619), False, 'import torch\n'), ((3711, 3728), 'nets.nn.EfficientNet', 'nn.EfficientNet', ([], {}), '()\n', (3726, 3728), False, 'from nets import nn\n'), ((3990, 4019), 'os.path.join', 'os.path.join', (['data_dir', '"""val"""'], {}), "(data_dir, 'val')\n", (4002, 4019), False, 'import os\n'), ((4433, 4448), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (4446, 4448), False, 'import torch\n'), ((4480, 4530), 'tqdm.tqdm', 'tqdm.tqdm', (['loader', "('%10s' * 2 % ('acc@1', 'acc@5'))"], {}), "(loader, '%10s' * 2 % ('acc@1', 'acc@5'))\n", (4489, 4530), False, 'import tqdm\n'), ((4844, 4868), 'torch.cuda.empty_cache', 'torch.cuda.empty_cache', ([], {}), '()\n', (4866, 4868), False, 'import torch\n'), ((4982, 5009), 'torch.zeros', 'torch.zeros', (['(1)', '(3)', '(224)', '(224)'], {}), '(1, 3, 224, 224)\n', (4993, 5009), False, 'import torch\n'), ((5850, 5888), 'torch.cuda.set_device', 'torch.cuda.set_device', (['args.local_rank'], {}), '(args.local_rank)\n', (5871, 5888), False, 'import torch\n'), ((5897, 5971), 'torch.distributed.init_process_group', 'torch.distributed.init_process_group', ([], {'backend': '"""nccl"""', 'init_method': '"""env://"""'}), "(backend='nccl', init_method='env://')\n", (5933, 5971), False, 'import torch\n'), ((5992, 6020), 'torch.distributed.get_rank', 'torch.distributed.get_rank', ([], {}), '()\n', (6018, 6020), False, 'import torch\n'), ((425, 450), 'torch.cuda.amp.autocast', 'torch.cuda.amp.autocast', ([], {}), '()\n', (448, 450), False, 'import torch\n'), ((699, 716), 'nets.nn.EfficientNet', 'nn.EfficientNet', ([], {}), '()\n', (714, 716), False, 'from nets import nn\n'), ((1089, 1110), 'nets.nn.CrossEntropyLoss', 'nn.CrossEntropyLoss', ([], {}), '()\n', (1108, 1110), False, 'from nets import nn\n'), ((2139, 2196), 'csv.DictWriter', 'csv.DictWriter', (['f'], {'fieldnames': "['epoch', 'acc@1', 'acc@5']"}), "(f, fieldnames=['epoch', 'acc@1', 'acc@5'])\n", (2153, 2196), False, 'import csv\n'), ((4329, 4343), 'os.cpu_count', 'os.cpu_count', ([], {}), '()\n', (4341, 4343), False, 'import os\n'), ((4600, 4624), 'torch.cuda.synchronize', 'torch.cuda.synchronize', ([], {}), '()\n', (4622, 4624), False, 'import torch\n'), ((4943, 4960), 'nets.nn.EfficientNet', 'nn.EfficientNet', ([], {}), '()\n', (4958, 4960), False, 'from nets import nn\n'), ((6065, 6090), 'os.path.exists', 'os.path.exists', (['"""weights"""'], {}), "('weights')\n", (6079, 6090), False, 'import os\n'), ((6104, 6126), 'os.makedirs', 'os.makedirs', (['"""weights"""'], {}), "('weights')\n", (6115, 6126), False, 'import os\n'), ((746, 771), 'torch.cuda.device_count', 'torch.cuda.device_count', ([], {}), '()\n', (769, 771), False, 'import torch\n'), ((1334, 1353), 'utils.util.RandomResize', 'util.RandomResize', ([], {}), '()\n', (1351, 1353), False, 'from utils import util\n'), ((1410, 1447), 'torchvision.transforms.ColorJitter', 'transforms.ColorJitter', (['(0.4)', '(0.4)', '(0.4)'], {}), '(0.4, 0.4, 0.4)\n', (1432, 1447), False, 'from torchvision import transforms\n'), ((1504, 1537), 'torchvision.transforms.RandomHorizontalFlip', 'transforms.RandomHorizontalFlip', ([], {}), '()\n', (1535, 1537), False, 'from torchvision import transforms\n'), ((1594, 1614), 'utils.util.RandomAugment', 'util.RandomAugment', ([], {}), '()\n', (1612, 1614), False, 'from utils import util\n'), ((1671, 1692), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (1690, 1692), False, 'from torchvision import transforms\n'), ((2914, 2938), 'torch.cuda.synchronize', 'torch.cuda.synchronize', ([], {}), '()\n', (2936, 2938), False, 'import torch\n'), ((3759, 3795), 'torch.load', 'torch.load', (['"""weights/best.pt"""', '"""cpu"""'], {}), "('weights/best.pt', 'cpu')\n", (3769, 3795), False, 'import torch\n'), ((4076, 4098), 'torchvision.transforms.Resize', 'transforms.Resize', (['(416)'], {}), '(416)\n', (4093, 4098), False, 'from torchvision import transforms\n'), ((4155, 4181), 'torchvision.transforms.CenterCrop', 'transforms.CenterCrop', (['(384)'], {}), '(384)\n', (4176, 4181), False, 'from torchvision import transforms\n'), ((4238, 4259), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (4257, 4259), False, 'from torchvision import transforms\n'), ((5185, 5202), 'nets.nn.EfficientNet', 'nn.EfficientNet', ([], {}), '()\n', (5200, 5202), False, 'from nets import nn\n')] |
duartegroup/cgbind | cgbind/esp.py | 8c2369d4c49e8b008fc3951719d99e0c4f6b6b16 | import numpy as np
from time import time
from cgbind.atoms import get_atomic_number
from cgbind.log import logger
from cgbind.constants import Constants
from cgbind.exceptions import CgbindCritical
def get_esp_cube_lines(charges, atoms):
"""
From a list of charges and a set of xyzs create the electrostatic potential
map grid-ed uniformly between the most negative x, y, z values -5 Å
and the largest x, y, z +5 Å
:param charges: (list(float))
:param atoms: (list(autode.atoms.Atom))
:return: (list(str)), (min ESP value, max ESP value)
"""
logger.info('Calculating the ESP and generating a .cube file')
start_time = time()
try:
from esp_gen import get_cube_lines
except ModuleNotFoundError:
raise CgbindCritical('esp_gen not available. cgbind must be '
'installed with the --esp_gen flag')
if charges is None:
logger.error('Could not generate an .cube file, charges were None')
return [], (None, None)
coords = np.array([atom.coord for atom in atoms])
charges = np.array(charges)
# Get the max and min points from the coordinates
max_cart_values = np.max(coords, axis=0)
min_cat_values = np.min(coords, axis=0)
# The grid needs to be slightly larger than the smallest/largest Cartesian
# coordinate
# NOTE: All distances from here are in Bohr (a0) i.e. atomic units
min_carts = Constants.ang2a0 * (min_cat_values - 5 * np.ones(3))
max_carts = Constants.ang2a0 * (max_cart_values + 5 * np.ones(3))
coords = np.array([Constants.ang2a0 * np.array(coord) for coord in coords])
# Number of voxels will be nx * ny * nz
nx, ny, nz = 50, 50, 50
vox_size = max_carts - min_carts
rx, ry, rz = vox_size[0] / nx, vox_size[1] / ny, vox_size[2] / nz
# Write the .cube file lines
cube_file_lines = ['Generated by cgbind\n', 'ESP\n']
n_atoms = len(coords)
min_x, min_y, min_z = min_carts
cube_file_lines.append(f'{n_atoms:>5d}{min_x:>12f}{min_y:>12f}{min_z:>12f}\n') # n_atoms origin(x y z)
cube_file_lines.append(f'{nx:>5d}{rx:>12f}{0.0:>12f}{0.0:>12f}\n') # Number of voxels and their size
cube_file_lines.append(f'{ny:>5d}{0.0:>12f}{ry:>12f}{0.0:>12f}\n')
cube_file_lines.append(f'{nz:>5d}{0.0:>12f}{0.0:>12f}{rz:>12f}\n')
for atom in atoms:
x, y, z = atom.coord
cube_file_lines.append(f'{get_atomic_number(atom):>5d}{0.0:>12f}'
f'{Constants.ang2a0*x:>12f}{Constants.ang2a0*y:>12f}{Constants.ang2a0*z:>12f}\n')
# Looping over x, y, z is slow in python so use Cython extension
cube_val_lines, min_val, max_val = get_cube_lines(nx, ny, nz, coords, min_carts, charges, vox_size)
cube_file_lines += cube_val_lines
logger.info(f'ESP generated in {time()-start_time:.3f} s')
return cube_file_lines, (min_val, max_val)
| [((581, 643), 'cgbind.log.logger.info', 'logger.info', (['"""Calculating the ESP and generating a .cube file"""'], {}), "('Calculating the ESP and generating a .cube file')\n", (592, 643), False, 'from cgbind.log import logger\n'), ((661, 667), 'time.time', 'time', ([], {}), '()\n', (665, 667), False, 'from time import time\n'), ((1037, 1077), 'numpy.array', 'np.array', (['[atom.coord for atom in atoms]'], {}), '([atom.coord for atom in atoms])\n', (1045, 1077), True, 'import numpy as np\n'), ((1092, 1109), 'numpy.array', 'np.array', (['charges'], {}), '(charges)\n', (1100, 1109), True, 'import numpy as np\n'), ((1187, 1209), 'numpy.max', 'np.max', (['coords'], {'axis': '(0)'}), '(coords, axis=0)\n', (1193, 1209), True, 'import numpy as np\n'), ((1231, 1253), 'numpy.min', 'np.min', (['coords'], {'axis': '(0)'}), '(coords, axis=0)\n', (1237, 1253), True, 'import numpy as np\n'), ((2683, 2747), 'esp_gen.get_cube_lines', 'get_cube_lines', (['nx', 'ny', 'nz', 'coords', 'min_carts', 'charges', 'vox_size'], {}), '(nx, ny, nz, coords, min_carts, charges, vox_size)\n', (2697, 2747), False, 'from esp_gen import get_cube_lines\n'), ((923, 990), 'cgbind.log.logger.error', 'logger.error', (['"""Could not generate an .cube file, charges were None"""'], {}), "('Could not generate an .cube file, charges were None')\n", (935, 990), False, 'from cgbind.log import logger\n'), ((768, 862), 'cgbind.exceptions.CgbindCritical', 'CgbindCritical', (['"""esp_gen not available. cgbind must be installed with the --esp_gen flag"""'], {}), "(\n 'esp_gen not available. cgbind must be installed with the --esp_gen flag')\n", (782, 862), False, 'from cgbind.exceptions import CgbindCritical\n'), ((1479, 1489), 'numpy.ones', 'np.ones', (['(3)'], {}), '(3)\n', (1486, 1489), True, 'import numpy as np\n'), ((1549, 1559), 'numpy.ones', 'np.ones', (['(3)'], {}), '(3)\n', (1556, 1559), True, 'import numpy as np\n'), ((1604, 1619), 'numpy.array', 'np.array', (['coord'], {}), '(coord)\n', (1612, 1619), True, 'import numpy as np\n'), ((2419, 2442), 'cgbind.atoms.get_atomic_number', 'get_atomic_number', (['atom'], {}), '(atom)\n', (2436, 2442), False, 'from cgbind.atoms import get_atomic_number\n'), ((2804, 2810), 'time.time', 'time', ([], {}), '()\n', (2808, 2810), False, 'from time import time\n')] |
mcdenoising/AdvMCDenoise | codes/test_specular.py | 4ba00098c2d0f50a7dfc1e345b5e50a20768d7e8 | import os
import sys
import logging
import time
import argparse
import numpy as np
from collections import OrderedDict
import scripts.options as option
import utils.util as util
from data.util import bgr2ycbcr
from data import create_dataset, create_dataloader
from models import create_model
# options
parser = argparse.ArgumentParser()
parser.add_argument('-opt', type=str, required=True, help='Path to options JSON file.')
opt = option.parse(parser.parse_args().opt, is_train=False)
util.mkdirs((path for key, path in opt['path'].items() if not key == 'pretrain_model_G'))
opt = option.dict_to_nonedict(opt)
util.setup_logger(None, opt['path']['log'], 'test.log', level=logging.INFO, screen=True)
logger = logging.getLogger('base')
logger.info(option.dict2str(opt))
# Create test dataset and dataloader
test_loaders = []
for phase, dataset_opt in sorted(opt['datasets'].items()):
test_set = create_dataset(dataset_opt)
test_loader = create_dataloader(test_set, dataset_opt)
logger.info('Number of test images in [{:s}]: {:d}'.format(dataset_opt['name'], len(test_set)))
test_loaders.append(test_loader)
# Create model
model = create_model(opt)
for test_loader in test_loaders:
test_set_name = test_loader.dataset.opt['name']
logger.info('\nTesting [{:s}]...'.format(test_set_name))
test_start_time = time.time()
dataset_dir = os.path.join(opt['path']['results_root'], test_set_name)
util.mkdir(dataset_dir)
test_results = OrderedDict()
test_results['psnr'] = []
test_results['ssim'] = []
test_results['psnr_y'] = []
test_results['ssim_y'] = []
for data in test_loader:
need_GT = False if test_loader.dataset.opt['dataroot_GT'] is None else True
# need_GT = True
model.feed_data_specular(data, need_GT=need_GT)
if opt["image_type"] == "exr":
y = data["x_offset"]
x = data["y_offset"]
img_path = data['NOISY_path'][0]
img_name = os.path.splitext(os.path.basename(img_path))[0]
start = time.time()
model.test() # test
end = time.time()
print("Time elapsed... %f "%(end - start))
visuals = model.get_current_visuals(need_GT=need_GT)
denoised_img = util.tensor2img(visuals['DENOISED']) # uint8
noisy_img = util.tensor2img(visuals['NOISY'])
gt_img = util.tensor2img(visuals['GT']) # uint8
# save images
suffix = opt['suffix']
if suffix ==None:
suffix = ""
save_DENOISED_img_path = os.path.join(dataset_dir, img_name + suffix + '_1denoised.png')
save_NOISY_img_path = os.path.join(dataset_dir, img_name + suffix + '_0noisy.png')
save_GT_img_path = os.path.join(dataset_dir, img_name + suffix + '_2gt.png')
# calculate PSNR and SSIM
if need_GT:
# gt_img = util.tensor2img(visuals['GT'])
gt_img = gt_img / 255.
denoised_img = denoised_img / 255.
crop_border = test_loader.dataset.opt['scale']
cropped_denoised_img = denoised_img#[crop_border:-crop_border, crop_border:-crop_border, :]
cropped_gt_img = gt_img#[crop_border:-crop_border, crop_border:-crop_border, :]
psnr = util.calculate_psnr(cropped_denoised_img * 255, cropped_gt_img * 255)
ssim = util.calculate_ssim(cropped_denoised_img * 255, cropped_gt_img * 255)
test_results['psnr'].append(psnr)
test_results['ssim'].append(ssim)
if gt_img.shape[2] == 3: # RGB image
denoised_img_y = bgr2ycbcr(denoised_img, only_y=True)
gt_img_y = bgr2ycbcr(gt_img, only_y=True)
cropped_denoised_img_y = denoised_img_y[crop_border:-crop_border, crop_border:-crop_border]
cropped_gt_img_y = gt_img_y[crop_border:-crop_border, crop_border:-crop_border]
psnr_y = util.calculate_psnr(cropped_denoised_img_y * 255, cropped_gt_img_y * 255)
ssim_y = util.calculate_ssim(cropped_denoised_img_y * 255, cropped_gt_img_y * 255)
test_results['psnr_y'].append(psnr_y)
test_results['ssim_y'].append(ssim_y)
logger.info('{:20s} - PSNR: {:.6f} dB; SSIM: {:.6f}; PSNR_Y: {:.6f} dB; SSIM_Y: {:.6f}.'\
.format(img_name, psnr, ssim, psnr_y, ssim_y))
else:
logger.info('{:20s} - PSNR: {:.6f} dB; SSIM: {:.6f}.'.format(img_name, psnr, ssim))
else:
logger.info(img_name)
if opt["image_type"] == "exr":
denoised_exr = util.tensor2exr(visuals['DENOISED']) # uint8
noisy_exr = util.tensor2exr(visuals['NOISY'])
gt_exr = util.tensor2exr(visuals['GT']) # uint8
save_DENOISED_img_path = os.path.join(dataset_dir, img_name + suffix + '_1denoised.exr')
save_NOISY_img_path = os.path.join(dataset_dir, img_name + suffix + '_0noisy.exr')
save_GT_img_path = os.path.join(dataset_dir, img_name + suffix + '_2gt.exr')
util.saveEXRfromMatrix(save_DENOISED_img_path, denoised_exr, (x, y))
util.saveEXRfromMatrix(save_NOISY_img_path, noisy_exr, (x, y))
util.saveEXRfromMatrix(save_GT_img_path, gt_exr, (x, y))
if need_GT: # metrics
# Average PSNR/SSIM results
ave_psnr = sum(test_results['psnr']) / len(test_results['psnr'])
ave_ssim = sum(test_results['ssim']) / len(test_results['ssim'])
logger.info('----Average PSNR/SSIM results for {}----\n\tPSNR: {:.6f} dB; SSIM: {:.6f}\n'\
.format(test_set_name, ave_psnr, ave_ssim))
# if test_results['psnr_y'] and test_results['ssim_y']:
# ave_psnr_y = sum(test_results['psnr_y']) / len(test_results['psnr_y'])
# ave_ssim_y = sum(test_results['ssim_y']) / len(test_results['ssim_y'])
# logger.info('----Y channel, average PSNR/SSIM----\n\tPSNR_Y: {:.6f} dB; SSIM_Y: {:.6f}\n'\
# .format(ave_psnr_y, ave_ssim_y))
| [((314, 339), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (337, 339), False, 'import argparse\n'), ((584, 612), 'scripts.options.dict_to_nonedict', 'option.dict_to_nonedict', (['opt'], {}), '(opt)\n', (607, 612), True, 'import scripts.options as option\n'), ((614, 706), 'utils.util.setup_logger', 'util.setup_logger', (['None', "opt['path']['log']", '"""test.log"""'], {'level': 'logging.INFO', 'screen': '(True)'}), "(None, opt['path']['log'], 'test.log', level=logging.INFO,\n screen=True)\n", (631, 706), True, 'import utils.util as util\n'), ((712, 737), 'logging.getLogger', 'logging.getLogger', (['"""base"""'], {}), "('base')\n", (729, 737), False, 'import logging\n'), ((1149, 1166), 'models.create_model', 'create_model', (['opt'], {}), '(opt)\n', (1161, 1166), False, 'from models import create_model\n'), ((750, 770), 'scripts.options.dict2str', 'option.dict2str', (['opt'], {}), '(opt)\n', (765, 770), True, 'import scripts.options as option\n'), ((901, 928), 'data.create_dataset', 'create_dataset', (['dataset_opt'], {}), '(dataset_opt)\n', (915, 928), False, 'from data import create_dataset, create_dataloader\n'), ((947, 987), 'data.create_dataloader', 'create_dataloader', (['test_set', 'dataset_opt'], {}), '(test_set, dataset_opt)\n', (964, 987), False, 'from data import create_dataset, create_dataloader\n'), ((1336, 1347), 'time.time', 'time.time', ([], {}), '()\n', (1345, 1347), False, 'import time\n'), ((1366, 1422), 'os.path.join', 'os.path.join', (["opt['path']['results_root']", 'test_set_name'], {}), "(opt['path']['results_root'], test_set_name)\n", (1378, 1422), False, 'import os\n'), ((1427, 1450), 'utils.util.mkdir', 'util.mkdir', (['dataset_dir'], {}), '(dataset_dir)\n', (1437, 1450), True, 'import utils.util as util\n'), ((1471, 1484), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (1482, 1484), False, 'from collections import OrderedDict\n'), ((2034, 2045), 'time.time', 'time.time', ([], {}), '()\n', (2043, 2045), False, 'import time\n'), ((2089, 2100), 'time.time', 'time.time', ([], {}), '()\n', (2098, 2100), False, 'import time\n'), ((2237, 2273), 'utils.util.tensor2img', 'util.tensor2img', (["visuals['DENOISED']"], {}), "(visuals['DENOISED'])\n", (2252, 2273), True, 'import utils.util as util\n'), ((2303, 2336), 'utils.util.tensor2img', 'util.tensor2img', (["visuals['NOISY']"], {}), "(visuals['NOISY'])\n", (2318, 2336), True, 'import utils.util as util\n'), ((2354, 2384), 'utils.util.tensor2img', 'util.tensor2img', (["visuals['GT']"], {}), "(visuals['GT'])\n", (2369, 2384), True, 'import utils.util as util\n'), ((2544, 2607), 'os.path.join', 'os.path.join', (['dataset_dir', "(img_name + suffix + '_1denoised.png')"], {}), "(dataset_dir, img_name + suffix + '_1denoised.png')\n", (2556, 2607), False, 'import os\n'), ((2638, 2698), 'os.path.join', 'os.path.join', (['dataset_dir', "(img_name + suffix + '_0noisy.png')"], {}), "(dataset_dir, img_name + suffix + '_0noisy.png')\n", (2650, 2698), False, 'import os\n'), ((2726, 2783), 'os.path.join', 'os.path.join', (['dataset_dir', "(img_name + suffix + '_2gt.png')"], {}), "(dataset_dir, img_name + suffix + '_2gt.png')\n", (2738, 2783), False, 'import os\n'), ((3262, 3331), 'utils.util.calculate_psnr', 'util.calculate_psnr', (['(cropped_denoised_img * 255)', '(cropped_gt_img * 255)'], {}), '(cropped_denoised_img * 255, cropped_gt_img * 255)\n', (3281, 3331), True, 'import utils.util as util\n'), ((3351, 3420), 'utils.util.calculate_ssim', 'util.calculate_ssim', (['(cropped_denoised_img * 255)', '(cropped_gt_img * 255)'], {}), '(cropped_denoised_img * 255, cropped_gt_img * 255)\n', (3370, 3420), True, 'import utils.util as util\n'), ((4609, 4645), 'utils.util.tensor2exr', 'util.tensor2exr', (["visuals['DENOISED']"], {}), "(visuals['DENOISED'])\n", (4624, 4645), True, 'import utils.util as util\n'), ((4679, 4712), 'utils.util.tensor2exr', 'util.tensor2exr', (["visuals['NOISY']"], {}), "(visuals['NOISY'])\n", (4694, 4712), True, 'import utils.util as util\n'), ((4734, 4764), 'utils.util.tensor2exr', 'util.tensor2exr', (["visuals['GT']"], {}), "(visuals['GT'])\n", (4749, 4764), True, 'import utils.util as util\n'), ((4812, 4875), 'os.path.join', 'os.path.join', (['dataset_dir', "(img_name + suffix + '_1denoised.exr')"], {}), "(dataset_dir, img_name + suffix + '_1denoised.exr')\n", (4824, 4875), False, 'import os\n'), ((4910, 4970), 'os.path.join', 'os.path.join', (['dataset_dir', "(img_name + suffix + '_0noisy.exr')"], {}), "(dataset_dir, img_name + suffix + '_0noisy.exr')\n", (4922, 4970), False, 'import os\n'), ((5002, 5059), 'os.path.join', 'os.path.join', (['dataset_dir', "(img_name + suffix + '_2gt.exr')"], {}), "(dataset_dir, img_name + suffix + '_2gt.exr')\n", (5014, 5059), False, 'import os\n'), ((5076, 5144), 'utils.util.saveEXRfromMatrix', 'util.saveEXRfromMatrix', (['save_DENOISED_img_path', 'denoised_exr', '(x, y)'], {}), '(save_DENOISED_img_path, denoised_exr, (x, y))\n', (5098, 5144), True, 'import utils.util as util\n'), ((5158, 5220), 'utils.util.saveEXRfromMatrix', 'util.saveEXRfromMatrix', (['save_NOISY_img_path', 'noisy_exr', '(x, y)'], {}), '(save_NOISY_img_path, noisy_exr, (x, y))\n', (5180, 5220), True, 'import utils.util as util\n'), ((5235, 5291), 'utils.util.saveEXRfromMatrix', 'util.saveEXRfromMatrix', (['save_GT_img_path', 'gt_exr', '(x, y)'], {}), '(save_GT_img_path, gt_exr, (x, y))\n', (5257, 5291), True, 'import utils.util as util\n'), ((1987, 2013), 'os.path.basename', 'os.path.basename', (['img_path'], {}), '(img_path)\n', (2003, 2013), False, 'import os\n'), ((3597, 3633), 'data.util.bgr2ycbcr', 'bgr2ycbcr', (['denoised_img'], {'only_y': '(True)'}), '(denoised_img, only_y=True)\n', (3606, 3633), False, 'from data.util import bgr2ycbcr\n'), ((3661, 3691), 'data.util.bgr2ycbcr', 'bgr2ycbcr', (['gt_img'], {'only_y': '(True)'}), '(gt_img, only_y=True)\n', (3670, 3691), False, 'from data.util import bgr2ycbcr\n'), ((3921, 3994), 'utils.util.calculate_psnr', 'util.calculate_psnr', (['(cropped_denoised_img_y * 255)', '(cropped_gt_img_y * 255)'], {}), '(cropped_denoised_img_y * 255, cropped_gt_img_y * 255)\n', (3940, 3994), True, 'import utils.util as util\n'), ((4020, 4093), 'utils.util.calculate_ssim', 'util.calculate_ssim', (['(cropped_denoised_img_y * 255)', '(cropped_gt_img_y * 255)'], {}), '(cropped_denoised_img_y * 255, cropped_gt_img_y * 255)\n', (4039, 4093), True, 'import utils.util as util\n')] |
zzw0929/deeplearning | neuralNetwork/layer3/nerualNet.py | d96aadd71838fa60a4c031b13fe475d4839e8a33 | # coding:utf-8
import time
import matplotlib.pyplot as plt
import numpy as np
import sklearn
import sklearn.datasets
import sklearn.linear_model
import matplotlib
matplotlib.rcParams['figure.figsize'] = (10.0, 8.0)
np.random.seed(0)
X, y = sklearn.datasets.make_moons(200, noise=0.20)
plt.scatter(X[:,0], X[:,1], s=40, c=y, cmap=plt.cm.Spectral)
# plt.show()
clf = sklearn.linear_model.LogisticRegressionCV()
clf.fit(X, y)
# Helper function to plot a decision boundary.
# If you don't fully understand this function don't worry, it just generates
# the contour plot below.
def plot_decision_boundary(pred_func):
# Set min and max values and give it some padding
x_min, x_max = X[:, 0].min() - .5, X[:, 0].max() + .5
y_min, y_max = X[:, 1].min() - .5, X[:, 1].max() + .5
h = 0.01
# Generate a grid of points with distance h between them
xx, yy = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max,
h))
# Predict the function value for the whole gid
Z = pred_func(np.c_[xx.ravel(), yy.ravel()])
Z = Z.reshape(xx.shape)
# Plot the contour and training examples
plt.contourf(xx, yy, Z, cmap=plt.cm.Spectral)
plt.scatter(X[:, 0], X[:, 1], c=y, cmap=plt.cm.Spectral)
plot_decision_boundary(lambda x: clf.predict(x))
plt.title("Logistic Regression")
#plt.show()
num_examples = len(X) # training set size
nn_input_dim = 2 # input layer dimensionality
nn_output_dim = 2 # output layer dimensionality
# Gradient descent parameters (I picked these by hand)
epsilon = 0.01 # learning rate for gradient descent
reg_lambda = 0.01 # regularization strength
# Helper function to evaluate the total loss on the dataset
def calculate_loss(model):
W1, b1, W2, b2 = model['W1'], model['b1'], model['W2'], model['b2']
# Forward propagation to calculate our predictions
z1 = X.dot(W1) + b1
a1 = np.tanh(z1)
z2 = a1.dot(W2) + b2
exp_scores = np.exp(z2)
probs = exp_scores / np.sum(exp_scores, axis=1, keepdims=True)
# Calculating the loss
#print(11111111)
#print(probs)
#time.sleep(10)
corect_logprobs = -np.log(probs[range(num_examples), y])
data_loss = np.sum(corect_logprobs)
# Add regulatization term to loss (optional)
# L2 regulatization
data_loss += reg_lambda/2 * (np.sum(np.square(W1)) + np.sum(np.square(W2)))
return 1./num_examples * data_loss
def predict(model, x):
W1, b1, W2, b2 = model['W1'], model['b1'], model['W2'], model['b2']
# Forward propagation
z1 = x.dot(W1) + b1
a1 = np.tanh(z1)
z2 = a1.dot(W2) + b2
exp_scores = np.exp(z2)
probs = exp_scores / np.sum(exp_scores, axis=1, keepdims=True)
return np.argmax(probs, axis=1)
# This function learns parameters for the neural network and returns the model.
# - nn_hdim: Number of nodes in the hidden layer
# - num_passes: Number of passes through the training data for gradient descent
# - print_loss: If True, print the loss every 1000 iterations
def build_model(nn_hdim, num_passes=20000, print_loss=False):
# Initialize the parameters to random values. We need to learn these.
np.random.seed(0)
W1 = np.random.randn(nn_input_dim, nn_hdim) / np.sqrt(nn_input_dim)
b1 = np.zeros((1, nn_hdim))
W2 = np.random.randn(nn_hdim, nn_output_dim) / np.sqrt(nn_hdim)
b2 = np.zeros((1, nn_output_dim))
# This is what we return at the end
model = {}
# Gradient descent. For each batch...
for i in range(0, num_passes):
# Forward propagation
z1 = X.dot(W1) + b1
a1 = np.tanh(z1)
z2 = a1.dot(W2) + b2
exp_scores = np.exp(z2)
probs = exp_scores / np.sum(exp_scores, axis=1, keepdims=True)
# Backpropagation
delta3 = probs
delta3[range(num_examples), y] -= 1
dW2 = (a1.T).dot(delta3)
db2 = np.sum(delta3, axis=0, keepdims=True)
delta2 = delta3.dot(W2.T) * (1 - np.power(a1, 2))
dW1 = np.dot(X.T, delta2)
db1 = np.sum(delta2, axis=0)
# Add regularization terms (b1 and b2 don't have regularization terms)
dW2 += reg_lambda * W2
dW1 += reg_lambda * W1
# Gradient descent parameter update
W1 += -epsilon * dW1
b1 += -epsilon * db1
W2 += -epsilon * dW2
b2 += -epsilon * db2
# Assign new parameters to the model
model = { 'W1': W1, 'b1': b1, 'W2': W2, 'b2': b2}
# Optionally print the loss.
# This is expensive because it uses the whole dataset, so we don't want to do it too often.
if print_loss and i % 1000 == 0:
print("Loss after iteration %i: %f" %(i, calculate_loss(model)))
return model
def test_1():
# Build a model with a 3-dimensional hidden layer
model = build_model(3, print_loss=True)
# Plot the decision boundary
plot_decision_boundary(lambda x: predict(model, x))
plt.title("Decision Boundary for hidden layer size 3")
plt.show()
def test_2():
plt.figure(figsize=(16, 32))
hidden_layer_dimensions = [1, 2, 3, 4, 5, 20, 50]
for i, nn_hdim in enumerate(hidden_layer_dimensions):
plt.subplot(5, 2, i+1)
plt.title('Hidden Layer size %d' % nn_hdim)
model = build_model(nn_hdim)
plot_decision_boundary(lambda x: predict(model, x))
plt.show()
if __name__ == '__main__':
#print(y)
#print(12121)
#print(X)
test_1()
| [((218, 235), 'numpy.random.seed', 'np.random.seed', (['(0)'], {}), '(0)\n', (232, 235), True, 'import numpy as np\n'), ((243, 286), 'sklearn.datasets.make_moons', 'sklearn.datasets.make_moons', (['(200)'], {'noise': '(0.2)'}), '(200, noise=0.2)\n', (270, 286), False, 'import sklearn\n'), ((288, 354), 'matplotlib.pyplot.scatter', 'plt.scatter', (['X[:, (0)]', 'X[:, (1)]'], {'s': '(40)', 'c': 'y', 'cmap': 'plt.cm.Spectral'}), '(X[:, (0)], X[:, (1)], s=40, c=y, cmap=plt.cm.Spectral)\n', (299, 354), True, 'import matplotlib.pyplot as plt\n'), ((369, 412), 'sklearn.linear_model.LogisticRegressionCV', 'sklearn.linear_model.LogisticRegressionCV', ([], {}), '()\n', (410, 412), False, 'import sklearn\n'), ((1286, 1318), 'matplotlib.pyplot.title', 'plt.title', (['"""Logistic Regression"""'], {}), "('Logistic Regression')\n", (1295, 1318), True, 'import matplotlib.pyplot as plt\n'), ((1127, 1172), 'matplotlib.pyplot.contourf', 'plt.contourf', (['xx', 'yy', 'Z'], {'cmap': 'plt.cm.Spectral'}), '(xx, yy, Z, cmap=plt.cm.Spectral)\n', (1139, 1172), True, 'import matplotlib.pyplot as plt\n'), ((1177, 1237), 'matplotlib.pyplot.scatter', 'plt.scatter', (['X[:, (0)]', 'X[:, (1)]'], {'c': 'y', 'cmap': 'plt.cm.Spectral'}), '(X[:, (0)], X[:, (1)], c=y, cmap=plt.cm.Spectral)\n', (1188, 1237), True, 'import matplotlib.pyplot as plt\n'), ((1869, 1880), 'numpy.tanh', 'np.tanh', (['z1'], {}), '(z1)\n', (1876, 1880), True, 'import numpy as np\n'), ((1923, 1933), 'numpy.exp', 'np.exp', (['z2'], {}), '(z2)\n', (1929, 1933), True, 'import numpy as np\n'), ((2170, 2193), 'numpy.sum', 'np.sum', (['corect_logprobs'], {}), '(corect_logprobs)\n', (2176, 2193), True, 'import numpy as np\n'), ((2541, 2552), 'numpy.tanh', 'np.tanh', (['z1'], {}), '(z1)\n', (2548, 2552), True, 'import numpy as np\n'), ((2595, 2605), 'numpy.exp', 'np.exp', (['z2'], {}), '(z2)\n', (2601, 2605), True, 'import numpy as np\n'), ((2684, 2708), 'numpy.argmax', 'np.argmax', (['probs'], {'axis': '(1)'}), '(probs, axis=1)\n', (2693, 2708), True, 'import numpy as np\n'), ((3121, 3138), 'numpy.random.seed', 'np.random.seed', (['(0)'], {}), '(0)\n', (3135, 3138), True, 'import numpy as np\n'), ((3220, 3242), 'numpy.zeros', 'np.zeros', (['(1, nn_hdim)'], {}), '((1, nn_hdim))\n', (3228, 3242), True, 'import numpy as np\n'), ((3320, 3348), 'numpy.zeros', 'np.zeros', (['(1, nn_output_dim)'], {}), '((1, nn_output_dim))\n', (3328, 3348), True, 'import numpy as np\n'), ((4886, 4940), 'matplotlib.pyplot.title', 'plt.title', (['"""Decision Boundary for hidden layer size 3"""'], {}), "('Decision Boundary for hidden layer size 3')\n", (4895, 4940), True, 'import matplotlib.pyplot as plt\n'), ((4945, 4955), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4953, 4955), True, 'import matplotlib.pyplot as plt\n'), ((4979, 5007), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(16, 32)'}), '(figsize=(16, 32))\n', (4989, 5007), True, 'import matplotlib.pyplot as plt\n'), ((5304, 5314), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (5312, 5314), True, 'import matplotlib.pyplot as plt\n'), ((886, 912), 'numpy.arange', 'np.arange', (['x_min', 'x_max', 'h'], {}), '(x_min, x_max, h)\n', (895, 912), True, 'import numpy as np\n'), ((914, 940), 'numpy.arange', 'np.arange', (['y_min', 'y_max', 'h'], {}), '(y_min, y_max, h)\n', (923, 940), True, 'import numpy as np\n'), ((1959, 2000), 'numpy.sum', 'np.sum', (['exp_scores'], {'axis': '(1)', 'keepdims': '(True)'}), '(exp_scores, axis=1, keepdims=True)\n', (1965, 2000), True, 'import numpy as np\n'), ((2631, 2672), 'numpy.sum', 'np.sum', (['exp_scores'], {'axis': '(1)', 'keepdims': '(True)'}), '(exp_scores, axis=1, keepdims=True)\n', (2637, 2672), True, 'import numpy as np\n'), ((3148, 3186), 'numpy.random.randn', 'np.random.randn', (['nn_input_dim', 'nn_hdim'], {}), '(nn_input_dim, nn_hdim)\n', (3163, 3186), True, 'import numpy as np\n'), ((3189, 3210), 'numpy.sqrt', 'np.sqrt', (['nn_input_dim'], {}), '(nn_input_dim)\n', (3196, 3210), True, 'import numpy as np\n'), ((3252, 3291), 'numpy.random.randn', 'np.random.randn', (['nn_hdim', 'nn_output_dim'], {}), '(nn_hdim, nn_output_dim)\n', (3267, 3291), True, 'import numpy as np\n'), ((3294, 3310), 'numpy.sqrt', 'np.sqrt', (['nn_hdim'], {}), '(nn_hdim)\n', (3301, 3310), True, 'import numpy as np\n'), ((3552, 3563), 'numpy.tanh', 'np.tanh', (['z1'], {}), '(z1)\n', (3559, 3563), True, 'import numpy as np\n'), ((3614, 3624), 'numpy.exp', 'np.exp', (['z2'], {}), '(z2)\n', (3620, 3624), True, 'import numpy as np\n'), ((3836, 3873), 'numpy.sum', 'np.sum', (['delta3'], {'axis': '(0)', 'keepdims': '(True)'}), '(delta3, axis=0, keepdims=True)\n', (3842, 3873), True, 'import numpy as np\n'), ((3946, 3965), 'numpy.dot', 'np.dot', (['X.T', 'delta2'], {}), '(X.T, delta2)\n', (3952, 3965), True, 'import numpy as np\n'), ((3980, 4002), 'numpy.sum', 'np.sum', (['delta2'], {'axis': '(0)'}), '(delta2, axis=0)\n', (3986, 4002), True, 'import numpy as np\n'), ((5128, 5152), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(5)', '(2)', '(i + 1)'], {}), '(5, 2, i + 1)\n', (5139, 5152), True, 'import matplotlib.pyplot as plt\n'), ((5159, 5202), 'matplotlib.pyplot.title', 'plt.title', (["('Hidden Layer size %d' % nn_hdim)"], {}), "('Hidden Layer size %d' % nn_hdim)\n", (5168, 5202), True, 'import matplotlib.pyplot as plt\n'), ((3654, 3695), 'numpy.sum', 'np.sum', (['exp_scores'], {'axis': '(1)', 'keepdims': '(True)'}), '(exp_scores, axis=1, keepdims=True)\n', (3660, 3695), True, 'import numpy as np\n'), ((2307, 2320), 'numpy.square', 'np.square', (['W1'], {}), '(W1)\n', (2316, 2320), True, 'import numpy as np\n'), ((2331, 2344), 'numpy.square', 'np.square', (['W2'], {}), '(W2)\n', (2340, 2344), True, 'import numpy as np\n'), ((3915, 3930), 'numpy.power', 'np.power', (['a1', '(2)'], {}), '(a1, 2)\n', (3923, 3930), True, 'import numpy as np\n')] |
Lifeistrange/flaskweb | app/domain/create_db.py | 6226e9f546d96d5f0a8f11104a37849e8f16ce80 | #!/usr/bin/env python
# coding=utf-8
from manage import db
import app.domain.model
db.create_all()
| [((85, 100), 'manage.db.create_all', 'db.create_all', ([], {}), '()\n', (98, 100), False, 'from manage import db\n')] |
TimoRoth/tljh-repo2docker | tljh_repo2docker/tests/utils.py | 35e7e940266de0490990acc780b64802afe973c1 | import asyncio
import json
from aiodocker import Docker, DockerError
from jupyterhub.tests.utils import api_request
async def add_environment(
app, *, repo, ref="master", name="", memory="", cpu=""
):
"""Use the POST endpoint to add a new environment"""
r = await api_request(
app,
"environments",
method="post",
data=json.dumps(
{"repo": repo, "ref": ref, "name": name, "memory": memory, "cpu": cpu,}
),
)
return r
async def wait_for_image(*, image_name):
"""wait until an image is built"""
count, retries = 0, 60 * 10
image = None
async with Docker() as docker:
while count < retries:
await asyncio.sleep(1)
try:
image = await docker.images.inspect(image_name)
except DockerError:
count += 1
continue
else:
break
return image
async def remove_environment(app, *, image_name):
"""Use the DELETE endpoint to remove an environment"""
r = await api_request(
app, "environments", method="delete", data=json.dumps({"name": image_name,}),
)
return r
| [((637, 645), 'aiodocker.Docker', 'Docker', ([], {}), '()\n', (643, 645), False, 'from aiodocker import Docker, DockerError\n'), ((365, 451), 'json.dumps', 'json.dumps', (["{'repo': repo, 'ref': ref, 'name': name, 'memory': memory, 'cpu': cpu}"], {}), "({'repo': repo, 'ref': ref, 'name': name, 'memory': memory, 'cpu':\n cpu})\n", (375, 451), False, 'import json\n'), ((706, 722), 'asyncio.sleep', 'asyncio.sleep', (['(1)'], {}), '(1)\n', (719, 722), False, 'import asyncio\n'), ((1134, 1166), 'json.dumps', 'json.dumps', (["{'name': image_name}"], {}), "({'name': image_name})\n", (1144, 1166), False, 'import json\n')] |
CrispenGari/python-flask | 05_ARIADNE_SUBSCRIPTIONS_GRAPHQL/api/resolvers/mutations/__init__.py | 3e7896f401920b8dd045d807212ec24b8353a75a |
from api import db
from uuid import uuid4
from ariadne import MutationType
from api.models import Post
from api.store import queues
mutation = MutationType()
@mutation.field("createPost")
async def create_post_resolver(obj, info, input):
try:
post = Post(postId=uuid4(), caption=input["caption"])
db.session.add(post)
db.session.commit()
for queue in queues:
queue.put(post)
return{
"error": None,
"post": post
}
except Exception as e:
return{
"error": {"message":str(e), "field": "unknown"},
"post": None
} | [((145, 159), 'ariadne.MutationType', 'MutationType', ([], {}), '()\n', (157, 159), False, 'from ariadne import MutationType\n'), ((320, 340), 'api.db.session.add', 'db.session.add', (['post'], {}), '(post)\n', (334, 340), False, 'from api import db\n'), ((349, 368), 'api.db.session.commit', 'db.session.commit', ([], {}), '()\n', (366, 368), False, 'from api import db\n'), ((277, 284), 'uuid.uuid4', 'uuid4', ([], {}), '()\n', (282, 284), False, 'from uuid import uuid4\n')] |
justengel/async_sched | async_sched/client/__init__.py | f980722d51d15025522b2265426b0188ff368418 | from async_sched.client import quit_server as module_quit
from async_sched.client import request_schedules as module_request
from async_sched.client import run_command as module_run
from async_sched.client import schedule_command as module_schedule
from async_sched.client import stop_schedule as module_stop
from async_sched.client import update_server as module_update
from .client import Client, \
quit_server_async, quit_server, update_server_async, update_server, request_schedules_async, \
request_schedules, run_command_async, run_command, schedule_command_async, schedule_command, \
stop_schedule_async, stop_schedule
# The other modules in this package exist for the "-m" python flag
# `python -m async_sched.client.request_schedules --host "12.0.0.1" --port 8000`
__all__ = ['Client',
'quit_server_async', 'quit_server', 'update_server_async', 'update_server', 'request_schedules_async',
'request_schedules', 'run_command_async', 'run_command', 'schedule_command_async', 'schedule_command',
'stop_schedule_async', 'stop_schedule',
'module_quit', 'module_request', 'module_run', 'module_schedule', 'module_stop', 'module_update']
| [] |
t4d-classes/angular_02212022 | full-stack-angular-ngrx/backend/src/core/interfaces/crud.py | 152dfa4b14ee84c1c34cef0b852349b250103e3b | import abc
from typing import TypeVar, Generic, List, Dict
T = TypeVar('T')
class CRUDInterface(Generic[T], metaclass=abc.ABCMeta):
@abc.abstractmethod
def all(self) -> List[T]:
pass
@abc.abstractmethod
def one_by_id(self, entity_id: int) -> T:
pass
@abc.abstractmethod
def append_one(self, entity: Dict) -> T:
pass
@abc.abstractmethod
def replace_one(self, entity: Dict) -> None:
pass
@abc.abstractmethod
def remove_one(self, entity_id: int) -> None:
pass
| [((64, 76), 'typing.TypeVar', 'TypeVar', (['"""T"""'], {}), "('T')\n", (71, 76), False, 'from typing import TypeVar, Generic, List, Dict\n')] |
MrKriss/stonemason | package/tests/test_init_command.py | d78becc9168c2566b31b48c9a951e2823bc98362 |
from pathlib import Path
import pytest
import git
import json
from conftest import TEST_DIR
def test_init_with_project(tmpdir):
output_path = Path(tmpdir.strpath)
# Set arguments
args = f"init -o {output_path} {TEST_DIR}/example_templates/python_project"
from masonry import main
# Run from entry point
main.main(args=args)
# Check files were created
package_name = 'testpackage'
files = [
'.git/',
'.mason',
'MANIFEST.in',
'README',
'requirements.txt',
'setup.py',
'src/testpackage',
'src/testpackage/__init__.py',
'src/testpackage/main.py'
]
for f in files:
p = output_path / package_name / f
assert p.exists()
# Check requirements were polulated
target = "requests\nlogzero\n"
req_file = output_path / package_name / 'requirements.txt'
result = req_file.read_text()
assert result == target
# Check git repo was created and commits made
repo_dir = output_path / package_name
r = git.Repo(repo_dir.as_posix())
log = r.git.log(pretty='oneline').split('\n')
assert len(log) == 1
assert "Add 'package' template layer via stone mason." in log[0]
def test_init_with_project_and_template(tmpdir, no_prompts):
output_path = Path(tmpdir.strpath)
# Set arguments
args = f"init -o {output_path} {TEST_DIR}/example_templates/python_project/pytest"
from masonry import main
# Run from entry point
main.main(args=args)
# Check files were created
package_name = 'testpackage'
files = [
'.git/',
'.mason',
'MANIFEST.in',
'README',
'requirements.txt',
'setup.py',
'src/testpackage',
'src/testpackage/__init__.py',
'src/testpackage/main.py',
'tests/test_foo.py'
]
for f in files:
p = output_path / package_name / f
assert p.exists()
# Check requirements were polulated
target = "requests\nlogzero\npytest\npytest-cov\ncoverage\n"
req_file = output_path / package_name / 'requirements.txt'
result = req_file.read_text()
assert result == target
# Check MANIFEST was prefixed
target = "graft tests\ngraft src\n"
manifest_file = output_path / package_name / 'MANIFEST.in'
result = manifest_file.read_text()
assert result == target
# Check git repo was created and commits made
repo_dir = output_path / package_name
r = git.Repo(repo_dir.as_posix())
log = r.git.log(pretty='oneline').split('\n')
assert len(log) == 2
assert "Add 'pytest' template layer via stone mason." in log[0]
assert "Add 'package' template layer via stone mason." in log[1]
| [((151, 171), 'pathlib.Path', 'Path', (['tmpdir.strpath'], {}), '(tmpdir.strpath)\n', (155, 171), False, 'from pathlib import Path\n'), ((334, 354), 'masonry.main.main', 'main.main', ([], {'args': 'args'}), '(args=args)\n', (343, 354), False, 'from masonry import main\n'), ((1311, 1331), 'pathlib.Path', 'Path', (['tmpdir.strpath'], {}), '(tmpdir.strpath)\n', (1315, 1331), False, 'from pathlib import Path\n'), ((1501, 1521), 'masonry.main.main', 'main.main', ([], {'args': 'args'}), '(args=args)\n', (1510, 1521), False, 'from masonry import main\n')] |
Toure/openstack_mistral_wip | mistral/mistral/api/controllers/v2/service.py | 1c3d028cb7c918de74a3cb018c84d6c5ee42e3f1 | # Copyright 2015 Huawei Technologies Co., Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_config import cfg
from oslo_log import log as logging
from pecan import rest
import six
import tooz.coordination
import wsmeext.pecan as wsme_pecan
from mistral.api import access_control as acl
from mistral.api.controllers.v2 import resources
# TODO(rakhmerov): invalid dependency, a REST controller must not depend on
# a launch script.
from mistral.cmd import launch
from mistral import context
from mistral import exceptions as exc
from mistral.service import coordination
from mistral.utils import rest_utils
LOG = logging.getLogger(__name__)
class ServicesController(rest.RestController):
@rest_utils.wrap_wsme_controller_exception
@wsme_pecan.wsexpose(resources.Services)
def get_all(self):
"""Return all services."""
acl.enforce('services:list', context.ctx())
LOG.info("Fetch services.")
if not cfg.CONF.coordination.backend_url:
raise exc.CoordinationException("Service API is not supported.")
service_coordinator = coordination.get_service_coordinator()
if not service_coordinator.is_active():
raise exc.CoordinationException(
"Failed to connect to coordination backend."
)
services_list = []
service_group = ['%s_group' % i for i in launch.LAUNCH_OPTIONS]
try:
for group in service_group:
members = service_coordinator.get_members(group)
services_list.extend(
[resources.Service.from_dict(
{'type': group, 'name': member}) for member in members]
)
except tooz.coordination.ToozError as e:
# In the scenario of network interruption or manually shutdown
# connection shutdown, ToozError will be raised.
raise exc.CoordinationException(
"Failed to get service members from coordination backend. %s"
% six.text_type(e)
)
return resources.Services(services=services_list)
| [((1153, 1180), 'oslo_log.log.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1170, 1180), True, 'from oslo_log import log as logging\n'), ((1282, 1321), 'wsmeext.pecan.wsexpose', 'wsme_pecan.wsexpose', (['resources.Services'], {}), '(resources.Services)\n', (1301, 1321), True, 'import wsmeext.pecan as wsme_pecan\n'), ((1628, 1666), 'mistral.service.coordination.get_service_coordinator', 'coordination.get_service_coordinator', ([], {}), '()\n', (1664, 1666), False, 'from mistral.service import coordination\n'), ((2614, 2656), 'mistral.api.controllers.v2.resources.Services', 'resources.Services', ([], {'services': 'services_list'}), '(services=services_list)\n', (2632, 2656), False, 'from mistral.api.controllers.v2 import resources\n'), ((1417, 1430), 'mistral.context.ctx', 'context.ctx', ([], {}), '()\n', (1428, 1430), False, 'from mistral import context\n'), ((1538, 1596), 'mistral.exceptions.CoordinationException', 'exc.CoordinationException', (['"""Service API is not supported."""'], {}), "('Service API is not supported.')\n", (1563, 1596), True, 'from mistral import exceptions as exc\n'), ((1734, 1805), 'mistral.exceptions.CoordinationException', 'exc.CoordinationException', (['"""Failed to connect to coordination backend."""'], {}), "('Failed to connect to coordination backend.')\n", (1759, 1805), True, 'from mistral import exceptions as exc\n'), ((2114, 2174), 'mistral.api.controllers.v2.resources.Service.from_dict', 'resources.Service.from_dict', (["{'type': group, 'name': member}"], {}), "({'type': group, 'name': member})\n", (2141, 2174), False, 'from mistral.api.controllers.v2 import resources\n'), ((2567, 2583), 'six.text_type', 'six.text_type', (['e'], {}), '(e)\n', (2580, 2583), False, 'import six\n')] |
jtauber/greek-utils | setup.py | 1da19a5a784c4dac9d205ae1afdc5516ddcae9b4 | from setuptools import setup
setup(
name="greek-utils",
version="0.2",
description="various utilities for processing Ancient Greek",
license="MIT",
url="http://github.com/jtauber/greek-utils",
author="James Tauber",
author_email="[email protected]",
packages=["greekutils"],
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Topic :: Text Processing",
"Topic :: Text Processing :: Linguistic",
"Topic :: Utilities",
],
)
| [((30, 665), 'setuptools.setup', 'setup', ([], {'name': '"""greek-utils"""', 'version': '"""0.2"""', 'description': '"""various utilities for processing Ancient Greek"""', 'license': '"""MIT"""', 'url': '"""http://github.com/jtauber/greek-utils"""', 'author': '"""James Tauber"""', 'author_email': '"""[email protected]"""', 'packages': "['greekutils']", 'classifiers': "['Development Status :: 3 - Alpha',\n 'License :: OSI Approved :: MIT License',\n 'Programming Language :: Python :: 3.3',\n 'Programming Language :: Python :: 3.4',\n 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: 3.6', 'Topic :: Text Processing',\n 'Topic :: Text Processing :: Linguistic', 'Topic :: Utilities']"}), "(name='greek-utils', version='0.2', description=\n 'various utilities for processing Ancient Greek', license='MIT', url=\n 'http://github.com/jtauber/greek-utils', author='James Tauber',\n author_email='[email protected]', packages=['greekutils'],\n classifiers=['Development Status :: 3 - Alpha',\n 'License :: OSI Approved :: MIT License',\n 'Programming Language :: Python :: 3.3',\n 'Programming Language :: Python :: 3.4',\n 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: 3.6', 'Topic :: Text Processing',\n 'Topic :: Text Processing :: Linguistic', 'Topic :: Utilities'])\n", (35, 665), False, 'from setuptools import setup\n')] |
jfilter/foia-bot | source/tweet.py | 11a9e31116dddfcd7bbd17730be3bdb9cec65e27 | """
tweet stuff in intervals
"""
import time
import datetime
import twitter
from markov_chains import german_text
from config import config_no, config_yes
MAX_TWEET_LENGTH = 280
greeting = ' Sehr geehrte/r Anstragssteller/in.'
ending = ' MfG'
num_tweets = 3
class FoiaBot:
def __init__(self, config):
self.api = twitter.Api(consumer_key=config["consumer_key"],
consumer_secret=config["consumer_secret"],
access_token_key=config["access_token"],
access_token_secret=config["access_token_secret"], sleep_on_rate_limit=True)
self.screen_name = config["screen_name"]
self.model = german_text.setup_model(config["model_path"])
self.hour_to_tweet = config["hour_to_tweet"]
def get_favorites(self):
favorites = self.api.GetFavorites(
screen_name=self.screen_name, count=200)
print(favorites)
fav_set = set([f.id for f in favorites])
return fav_set
def get_status_to_work_on(self):
favorites = self.get_favorites()
status_list = self.api.GetMentions(count=200, trim_user=True,
contributor_details=False, include_entities=False)
for status in status_list:
print(status)
if status.id in favorites:
continue
if status.in_reply_to_status_id is not None:
continue
if not status.text.startswith('@' + self.screen_name):
continue
self.post_replies(status)
def post_replies(self, status):
tweets = self.create_tweets()
print(tweets)
success = True
reply_to_status_id = status.id
for tweet in tweets:
response = self.api.PostUpdate(tweet, in_reply_to_status_id=reply_to_status_id, auto_populate_reply_metadata=True,
exclude_reply_user_ids=False, trim_user=True, verify_status_length=False)
if response is None:
success = False
break
else:
reply_to_status_id = response.id
if success:
self.api.CreateFavorite(status=status)
def generate_sentence(self, tweet_text, chars_left, set_limit=False):
max_length = 150
if set_limit:
max_length = chars_left
new_sent = self.model.make_short_sentence(max_length, tries=100)
if new_sent is not None and len(new_sent) < chars_left:
tweet_text += ' ' + new_sent
return tweet_text
# https://stackoverflow.com/questions/7703865/going-from-twitter-date-to-python-datetime-date
def get_date_from_twitter_string(self, created_at):
x = time.strptime(created_at, '%a %b %d %H:%M:%S +0000 %Y')
return datetime.datetime.fromtimestamp(time.mktime(x))
def tweet_once_a_day(self):
now = datetime.datetime.now()
print(now.hour)
if now.hour == self.hour_to_tweet:
last_status_list = self.api.GetUserTimeline(screen_name=self.screen_name, count=1,
include_rts=False, trim_user=True, exclude_replies=True)
print(last_status_list)
if last_status_list is None:
return
if len(last_status_list) == 0:
self.post_single_tweet()
if len(last_status_list) == 1:
last_status = last_status_list[0]
created_at_date = self.get_date_from_twitter_string(
last_status.created_at)
time_diff = now - created_at_date
print('time_diff', time_diff)
time_diff_hours = time_diff.seconds / 3600 + time_diff.days * 24
print(time_diff_hours)
if time_diff_hours > 20: # something is broken with the date but whatever
self.post_single_tweet()
def post_single_tweet(self):
tweet_text = self.generate_single_tweet_text()
response = self.api.PostUpdate(tweet_text, verify_status_length=False)
def generate_single_tweet_text(self):
tweet_text = ""
while True:
chars_left = MAX_TWEET_LENGTH - len(tweet_text)
chars_left -= 1 # for the space
if chars_left < 20:
break
if chars_left < 70:
tweet_text = self.generate_sentence(
tweet_text, chars_left, True)
else:
tweet_text = self.generate_sentence(
tweet_text, chars_left)
return tweet_text
def create_tweets(self):
tweets = []
for i in range(num_tweets):
tweet_text = f'{i + 1}/{num_tweets}'
if i == 0:
tweet_text += greeting
while True:
chars_left = MAX_TWEET_LENGTH - \
len(tweet_text) - 1 # because of space
# ensure space for the ending
if i + 1 == num_tweets:
chars_left -= len(ending)
if chars_left < 20:
# at ending
if i + 1 == num_tweets:
tweet_text += ending
break
if chars_left < 70:
tweet_text = self.generate_sentence(
tweet_text, chars_left, True)
else:
tweet_text = self.generate_sentence(
tweet_text, chars_left)
tweets.append(tweet_text)
return tweets
def run(self):
self.get_status_to_work_on()
def main():
print('main called')
no_bot = FoiaBot(config_no)
print('after setting up no bot')
yes_bot = FoiaBot(config_yes)
print('after setting up yes bot')
no_bot.run()
print('after running no bot')
yes_bot.run()
print('after running yes bot')
no_bot.tweet_once_a_day()
yes_bot.tweet_once_a_day()
print('after tweet once a day')
def lambda_handler(event, context):
print('handler called')
main()
print('handler about to finish')
# if __name__ == '__main__':
# main()
| [((330, 553), 'twitter.Api', 'twitter.Api', ([], {'consumer_key': "config['consumer_key']", 'consumer_secret': "config['consumer_secret']", 'access_token_key': "config['access_token']", 'access_token_secret': "config['access_token_secret']", 'sleep_on_rate_limit': '(True)'}), "(consumer_key=config['consumer_key'], consumer_secret=config[\n 'consumer_secret'], access_token_key=config['access_token'],\n access_token_secret=config['access_token_secret'], sleep_on_rate_limit=True\n )\n", (341, 553), False, 'import twitter\n'), ((703, 748), 'markov_chains.german_text.setup_model', 'german_text.setup_model', (["config['model_path']"], {}), "(config['model_path'])\n", (726, 748), False, 'from markov_chains import german_text\n'), ((2794, 2849), 'time.strptime', 'time.strptime', (['created_at', '"""%a %b %d %H:%M:%S +0000 %Y"""'], {}), "(created_at, '%a %b %d %H:%M:%S +0000 %Y')\n", (2807, 2849), False, 'import time\n'), ((2960, 2983), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (2981, 2983), False, 'import datetime\n'), ((2897, 2911), 'time.mktime', 'time.mktime', (['x'], {}), '(x)\n', (2908, 2911), False, 'import time\n')] |
amitjoshi9627/Playong | account_processing.py | d54a8db05ae5035e122b8bc8d84c849f25483005 | from selenium.webdriver import Firefox
from selenium.webdriver.firefox.options import Options
import getpass
import time
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.action_chains import ActionChains
from utils import *
def login_user(browser, email='', password=''):
print('Redirecting to login page..')
browser.find_element_by_xpath('//*[@id="login-btn"]').click()
if email is '':
email, password = take_credentials()
browser.find_element_by_id("login_username").send_keys(email)
browser.find_element_by_id("login_password").send_keys(password)
complete_captcha(browser)
time.sleep(4)
browser.find_element_by_xpath('//*[@id="static-login-btn"]').click()
def logout_user(browser):
print("\nThank you for your using the program! Logging you out from jiosaavn...")
show_notificaton("Thank", "You", 0)
action = ActionChains(browser)
menu = browser.find_element_by_class_name('user-name')
action.move_to_element(menu).perform()
menu.click()
browser.find_element_by_xpath(
'/html/body/div[2]/div/div[2]/div[3]/div[3]/ol/li[4]/a').click()
time.sleep(2)
print('Logout..successful...')
def check_credentials(browser):
print('Checking credentials...Please wait..')
time.sleep(5)
try:
close_promo_ad(browser)
accept_cookies(browser)
success = True
except:
success = False
return success
def wrong_credentials_check(browser, counts=1):
while success != True:
print("\nWrong username/password entered.Please try again...\n")
email = input("Enter your email for jiosaavn account: ")
password = getpass.getpass(f"Enter password for {email}: ")
email_element = browser.find_element_by_id("login_username")
email_element.clear()
email_element.send_keys(email)
pswd_element = browser.find_element_by_id("login_password")
pswd_element.clear()
pswd_element.send_keys(password)
browser.find_element_by_xpath('//*[@id="static-login-btn"]').click()
success = check_credentials(browser)
counts += 1
if counts > 4:
print('Too many unsuccessful attempts done. Exiting...\n')
break
return counts
def go_without_login(browser):
return False
def take_credentials():
email = input("Enter your email for jiosaavn account: ")
password = getpass.getpass(f"Enter password for {email}: ")
return email, password
def prompt(browser):
# response = int(input("Press 1 to Log in with you account else Press 0: "))
# if response:
# login_user(browser)
# return True
# else:
# go_without_login(browser)
print("Due to some issues.. Login Option is not available currently! Sorry for the inconvenience caused.")
go_without_login(browser)
| [((645, 658), 'time.sleep', 'time.sleep', (['(4)'], {}), '(4)\n', (655, 658), False, 'import time\n'), ((899, 920), 'selenium.webdriver.common.action_chains.ActionChains', 'ActionChains', (['browser'], {}), '(browser)\n', (911, 920), False, 'from selenium.webdriver.common.action_chains import ActionChains\n'), ((1152, 1165), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (1162, 1165), False, 'import time\n'), ((1289, 1302), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (1299, 1302), False, 'import time\n'), ((2440, 2488), 'getpass.getpass', 'getpass.getpass', (['f"""Enter password for {email}: """'], {}), "(f'Enter password for {email}: ')\n", (2455, 2488), False, 'import getpass\n'), ((1688, 1736), 'getpass.getpass', 'getpass.getpass', (['f"""Enter password for {email}: """'], {}), "(f'Enter password for {email}: ')\n", (1703, 1736), False, 'import getpass\n')] |
RafaelSdm/Curso-de-Python | Mundo 3/teste.py | ae933ba80ee00ad5160bd5d05cf4b21007943fd4 | pessoas = {'nomes': "Rafael","sexo":"macho alfa","idade":19}
print(f"o {pessoas['nomes']} que se considera um {pessoas['sexo']} possui {pessoas['idade']}")
print(pessoas.keys())
print(pessoas.values())
print(pessoas.items())
for c in pessoas.keys():
print(c)
for c in pessoas.values():
print(c)
for c, j in pessoas.items():
print(f"o {c} pertence ao {j}")
del pessoas['sexo']
print(pessoas)
pessoas["sexo"] = "macho alfa"
print(pessoas)
print("outro codida daqui pra frente \n\n\n\n\n\n")
estado1 = {'estado': 'minas gerais', 'cidade':'capela nova' }
estado2 = {'estado':'rio de janeiro', 'cidade':"rossinha"}
brasil = []
brasil.append(estado1)
brasil.append(estado2)
print(brasil)
print(f"o brasil possui um estado chamado {brasil[0]['estado']} e a prorpia possui uma cidade chamada {brasil[0]['cidade']}")
print("-"*45)
es = {}
br = []
for c in range(0,3):
es['estado'] = str(input("informe o seu estado:"))
es['cidade'] = str(input("informe a sua cidade:"))
br.append(es.copy())
for c in br:
for i,j in c.items():
print(f"o campo {i} tem valor {j}")
| [] |
mumbo-pro/cyrptography-algorithm | rsa-cipher/makeRsaKeys.py | 8e08c027c361f94c547f8b4ede723401399c93ed | # RSA Key Generator
2. # http://inventwithpython.com/hacking (BSD Licensed) 3. 4. import random, sys, os, rabinMiller, cryptomath
The program imports the rabinMiller and cryptomath modules that we created in the last chapter, along with a few others.
Chapter 24 – Public Key Cryptography and the RSA Cipher 387
makeRsaKeys.py
7. def main(): 8. # create a public/private keypair with 1024 bit keys 9. print('Making key files...') 10. makeKeyFiles('al_sweigart', 1024) 11. print('Key files made.')
| [] |
Harvard-Neutrino/phys145 | atlas-outreach-data-tools-framework-1.1/Configurations/PlotConf_TTbarAnalysis.py | c3dc5788128fa2a7db0af0c796cf3afd957bf0ed | config = {
"Luminosity": 1000,
"InputDirectory": "results",
"Histograms" : {
"WtMass" : {},
"etmiss" : {},
"lep_n" : {},
"lep_pt" : {},
"lep_eta" : {},
"lep_E" : {},
"lep_phi" : {"y_margin" : 0.6},
"lep_charge" : {"y_margin" : 0.6},
"lep_type" : {"y_margin" : 0.5},
"lep_ptconerel30" : {},
"lep_etconerel20" : {},
"lep_d0" : {},
"lep_z0" : {},
"n_jets" : {},
"jet_pt" : {},
"jet_m" : {},
"jet_jvf" : {"y_margin" : 0.4},
"jet_eta" : {},
"jet_MV1" : {"y_margin" : 0.3},
"vxp_z" : {},
"pvxp_n" : {},
},
"Paintables": {
"Stack": {
"Order" : ["Diboson", "DrellYan", "W", "Z", "stop", "ttbar"],
"Processes" : {
"Diboson" : {
"Color" : "#fa7921",
"Contributions" : ["WW", "WZ", "ZZ"]},
"DrellYan": {
"Color" : "#5bc0eb",
"Contributions" : ["DYeeM08to15", "DYeeM15to40", "DYmumuM08to15", "DYmumuM15to40", "DYtautauM08to15", "DYtautauM15to40"]},
"W": {
"Color" : "#e55934",
"Contributions" : ["WenuJetsBVeto", "WenuWithB", "WenuNoJetsBVeto", "WmunuJetsBVeto", "WmunuWithB", "WmunuNoJetsBVeto", "WtaunuJetsBVeto", "WtaunuWithB", "WtaunuNoJetsBVeto"]},
"Z": {
"Color" : "#086788",
"Contributions" : ["Zee", "Zmumu", "Ztautau"]},
"stop": {
"Color" : "#fde74c",
"Contributions" : ["stop_tchan_top", "stop_tchan_antitop", "stop_schan", "stop_wtchan"]},
"ttbar": {
"Color" : "#9bc53d",
"Contributions" : ["ttbar_lep", "ttbar_had"]}
}
},
"data" : {
"Contributions": ["data_Egamma", "data_Muons"]}
},
"Depictions": {
"Order": ["Main", "Data/MC"],
"Definitions" : {
"Data/MC": {
"type" : "Agreement",
"Paintables" : ["data", "Stack"]
},
"Main": {
"type" : "Main",
"Paintables": ["Stack", "data"]
},
}
},
}
| [] |
OMGhozlan/deobshell | modules/optimizations/dead_codes.py | 701c8a09f9258442255013605185ed0a7fbac704 | # coding=utf-8
from ..logger import log_debug
from ..utils import parent_map, replace_node, is_prefixed_var, get_used_vars
def opt_unused_variable(ast):
parents = parent_map(ast)
used_vars = get_used_vars(ast)
for node in ast.iter():
if node.tag in ["AssignmentStatementAst"]:
subnodes = list(node)
if subnodes[0].tag == "VariableExpressionAst":
if subnodes[0].attrib["VariablePath"].lower() not in used_vars:
if not is_prefixed_var(subnodes[0].attrib["VariablePath"]):
log_debug("Remove assignement of unused variable %s" % (subnodes[0].attrib["VariablePath"]))
parents[node].remove(node)
return True
return False
def opt_remove_uninitialised_variable_usage(ast):
assigned = set()
for node in ast.iter():
if node.tag in ["AssignmentStatementAst"]:
subnodes = list(node)
if subnodes[0].tag == "VariableExpressionAst":
assigned.add(subnodes[0].attrib["VariablePath"].lower())
if node.tag in ["BinaryExpressionAst"]:
subnodes = list(node)
if subnodes[0].tag == "VariableExpressionAst":
variable = subnodes[0]
other = subnodes[1]
elif subnodes[1].tag == "VariableExpressionAst":
variable = subnodes[1]
other = subnodes[0]
else:
variable, other = None, None
if variable is not None and other is not None:
if variable.attrib["VariablePath"].lower() not in assigned:
if not is_prefixed_var(variable.attrib["VariablePath"]):
log_debug("Remove unassigned variable use '%s'" % (variable.attrib["VariablePath"]))
replace_node(ast, node, other)
return True
return False
| [] |
RijuDasgupta9116/LintCode | Convert Integer A to Integer B.py | 4629a3857b2c57418b86a3b3a7180ecb15e763e3 | """
Determine the number of bits required to convert integer A to integer B
Example
Given n = 31, m = 14,return 2
(31)10=(11111)2
(14)10=(01110)2
"""
__author__ = 'Danyang'
class Solution:
def bitSwapRequired(self, a, b):
"""
:param a:
:param b:
:return: int
"""
a = self.to_bin(a)
b = self.to_bin(b)
diff = len(a)-len(b)
ret = 0
if diff<0:
a, b = b, a
diff *= -1
b = "0"*diff+b
for i in xrange(len(b)):
if a[i]!=b[i]:
ret += 1
return ret
def to_bin(self, n):
"""
2's complement
32-bit
:param n:
:return:
"""
"""
:param n:
:return:
"""
a = abs(n)
lst = []
while a>0:
lst.append(a%2)
a /= 2
# 2's complement
if n>=0:
lst.extend([0]*(32-len(lst)))
else:
pivot = -1
for i in xrange(len(lst)):
if pivot==-1 and lst[i]==1:
pivot = i
continue
if pivot!=-1:
lst[i] ^= 1
lst.extend([1]*(32-len(lst)))
return "".join(map(str, reversed(lst)))
if __name__=="__main__":
assert Solution().bitSwapRequired(1, -1)==31
assert Solution().bitSwapRequired(31, 14)==2
| [] |
myriadrf/pyLMS7002M | examples/basic/findQSpark.py | b866deea1f05dba44c9ed1a1a4666352b811b66b | from pyLMS7002M import *
print("Searching for QSpark...")
try:
QSpark = QSpark()
except:
print("QSpark not found")
exit(1)
print("\QSpark info:")
QSpark.printInfo() # print the QSpark board info
# QSpark.LMS7002_Reset() # reset the LMS7002M
lms7002 = QSpark.getLMS7002() # get the LMS7002M object
ver, rev, mask = lms7002.chipInfo # get the chip info
print("\nLMS7002M info:")
print("VER : "+str(ver))
print("REV : "+str(rev))
print("MASK : "+str(mask))
| [] |
paradxum/django-macaddress | macaddress/__init__.py | c223dc8c79555d2265789c4d13667036cfbd7bd8 | from django.conf import settings
from netaddr import mac_unix, mac_eui48
import importlib
import warnings
class mac_linux(mac_unix):
"""MAC format with zero-padded all upper-case hex and colon separated"""
word_fmt = '%.2X'
def default_dialect(eui_obj=None):
# Check to see if a default dialect class has been specified in settings,
# using 'module.dialect_cls' string and use importlib and getattr to retrieve dialect class. 'module' is the module and
# 'dialect_cls' is the class name of the custom dialect. The dialect must either be defined or imported by the module's
# __init__.py if the module is a package.
from .fields import MACAddressField # Remove import at v1.4
if hasattr(settings, 'MACADDRESS_DEFAULT_DIALECT') and not MACAddressField.dialect:
module, dialect_cls = settings.MACADDRESS_DEFAULT_DIALECT.split('.')
dialect = getattr(importlib.import_module(module), dialect_cls, mac_linux)
return dialect
else:
if MACAddressField.dialect: # Remove this "if" statement at v1.4
warnings.warn(
"The set_dialect class method on MACAddressField has been deprecated, in favor of the default_dialect "
"utility function and settings.MACADDRESS_DEFAULT_DIALECT. See macaddress.__init__.py source or the "
"project README for more information.",
DeprecationWarning,
)
return MACAddressField.dialect
if eui_obj:
return eui_obj.dialect
else:
return mac_linux
def format_mac(eui_obj, dialect):
# Format a EUI instance as a string using the supplied dialect class, allowing custom string classes by
# passing directly or as a string, a la 'module.dialect_cls', where 'module' is the module and 'dialect_cls'
# is the class name of the custom dialect. The dialect must either be defined or imported by the module's __init__.py if
# the module is a package.
if not isinstance(dialect, mac_eui48):
if isinstance(dialect, str):
module, dialect_cls = dialect.split('.')
dialect = getattr(importlib.import_module(module), dialect_cls)
eui_obj.dialect = dialect
return str(eui_obj)
from pkg_resources import get_distribution, DistributionNotFound
import os.path
try:
_dist = get_distribution('django-macaddress')
except DistributionNotFound:
__version__ = 'Please install this project with setup.py'
else:
__version__ = _dist.version
VERSION = __version__ # synonym
| [((2348, 2385), 'pkg_resources.get_distribution', 'get_distribution', (['"""django-macaddress"""'], {}), "('django-macaddress')\n", (2364, 2385), False, 'from pkg_resources import get_distribution, DistributionNotFound\n'), ((825, 871), 'django.conf.settings.MACADDRESS_DEFAULT_DIALECT.split', 'settings.MACADDRESS_DEFAULT_DIALECT.split', (['"""."""'], {}), "('.')\n", (866, 871), False, 'from django.conf import settings\n'), ((898, 929), 'importlib.import_module', 'importlib.import_module', (['module'], {}), '(module)\n', (921, 929), False, 'import importlib\n'), ((1073, 1356), 'warnings.warn', 'warnings.warn', (['"""The set_dialect class method on MACAddressField has been deprecated, in favor of the default_dialect utility function and settings.MACADDRESS_DEFAULT_DIALECT. See macaddress.__init__.py source or the project README for more information."""', 'DeprecationWarning'], {}), "(\n 'The set_dialect class method on MACAddressField has been deprecated, in favor of the default_dialect utility function and settings.MACADDRESS_DEFAULT_DIALECT. See macaddress.__init__.py source or the project README for more information.'\n , DeprecationWarning)\n", (1086, 1356), False, 'import warnings\n'), ((2148, 2179), 'importlib.import_module', 'importlib.import_module', (['module'], {}), '(module)\n', (2171, 2179), False, 'import importlib\n')] |
dfstrauss/textmagic-sms-api-python | textmagic/test/message_status_tests.py | 9ab05b461861ac53da651588bef6b0b504653ecd | import time
from textmagic.test import ONE_TEST_NUMBER
from textmagic.test import THREE_TEST_NUMBERS
from textmagic.test import TextMagicTestsBase
from textmagic.test import LiveUnsafeTests
class MessageStatusTestsBase(TextMagicTestsBase):
def sendAndCheckStatusTo(self, numbers):
message = 'sdfqwersdfgfdg'
response = self.client.send(message, numbers)
ids = response['message_id'].keys()
self.getStatus(ids, message)
return (ids, message)
def getStatus(self, ids, message):
response = self.client.message_status(ids)
self.assertKeysEqualExpectedKeys(response, ids)
statuses = []
for id in ids:
status = response[id]
expected_keys = ['status', 'text', 'reply_number', 'created_time']
if (len(status) == 4):
pass
elif (len(status) == 6):
expected_keys.append('completed_time')
expected_keys.append('credits_cost')
else:
self.fail("Unexpected number of return parameters: %s" % len(status))
self.assertKeysEqualExpectedKeys(status, expected_keys)
self.assertEquals(status['text'], message)
self.assertEquals(status['reply_number'], '447624800500')
self.assertTrue(isinstance(status['created_time'], time.struct_time))
if (len(status) == 6):
self.assertTrue(isinstance(status['completed_time'], time.struct_time))
self.assertTrue(isinstance(status['credits_cost'], float))
statuses.append(status['status'])
return statuses
class MessageStatusTests(MessageStatusTestsBase):
def testMessageStatusWhenSendingOneMessage(self):
self.sendAndCheckStatusTo(ONE_TEST_NUMBER)
def testMessageStatusWhenSendingThreeMessages(self):
self.sendAndCheckStatusTo(THREE_TEST_NUMBERS)
class LiveUnsafeMessageStatusTests(MessageStatusTestsBase, LiveUnsafeTests):
"""
This test is live-unsafe because it is intended to be sent to a real
telephone number. It keeps asking for message status until it receives
a "delivered" response.
"""
def testMessageStatusWhenPhoneIsSwitchedOff(self):
ids, message = self.sendAndCheckStatusTo(['27991114444'])
while True:
s, = self.getStatus(ids, message)
if (s == 'd'):
break
| [] |
Subsets and Splits