code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
---|---|---|
import abc
from six import with_metaclass
class Loop(with_metaclass(abc.ABCMeta, object)):
@abc.abstractmethod
def name(self):
assert False
@abc.abstractmethod
def close(self):
assert False
@abc.abstractmethod
def open(self):
assert False
@abc.abstractmethod
def isClosed(self):
assert False
@abc.abstractmethod
def performOnePass(self):
assert False
@abc.abstractmethod
def getConvergenceStepCount(self):
assert False
@abc.abstractmethod
def hasConverged(self):
assert False
class LoopException(Exception):
def __init__(self, message):
Exception.__init__(self, message)
|
[
"six.with_metaclass"
] |
[((55, 90), 'six.with_metaclass', 'with_metaclass', (['abc.ABCMeta', 'object'], {}), '(abc.ABCMeta, object)\n', (69, 90), False, 'from six import with_metaclass\n')]
|
from typing import Callable, List, Union
import pandas_flavor as pf
import pandas as pd
from janitor.utils import deprecated_alias
@pf.register_dataframe_method
@deprecated_alias(new_column="new_column_name", agg_column="agg_column_name")
def groupby_agg(
df: pd.DataFrame,
by: Union[List, Callable, str],
new_column_name: str,
agg_column_name: str,
agg: Union[Callable, str],
dropna: bool = True,
) -> pd.DataFrame:
"""Shortcut for assigning a groupby-transform to a new column.
This method does not mutate the original DataFrame.
Intended to be the method-chaining equivalent of:
```python
df = df.assign(...=df.groupby(...)[...].transform(...))
```
Example: Basic usage.
>>> import pandas as pd
>>> import janitor
>>> df = pd.DataFrame({
... "item": ["shoe", "shoe", "bag", "shoe", "bag"],
... "quantity": [100, 120, 75, 200, 25],
... })
>>> df.groupby_agg(
... by="item",
... agg="mean",
... agg_column_name="quantity",
... new_column_name="avg_quantity",
... )
item quantity avg_quantity
0 shoe 100 140.0
1 shoe 120 140.0
2 bag 75 50.0
3 shoe 200 140.0
4 bag 25 50.0
Example: Set `dropna=False` to compute the aggregation, treating the null
values in the `by` column as an isolated "group".
>>> import pandas as pd
>>> import janitor
>>> df = pd.DataFrame({
... "x": ["a", "a", None, "b"], "y": [9, 9, 9, 9],
... })
>>> df.groupby_agg(
... by="x",
... agg="count",
... agg_column_name="y",
... new_column_name="y_count",
... dropna=False,
... )
x y y_count
0 a 9 2
1 a 9 2
2 None 9 1
3 b 9 1
:param df: A pandas DataFrame.
:param by: Column(s) to groupby on, will be passed into `DataFrame.groupby`.
:param new_column_name: Name of the aggregation output column.
:param agg_column_name: Name of the column to aggregate over.
:param agg: How to aggregate.
:param dropna: Whether or not to include null values, if present in the
`by` column(s). Default is True (null values in `by` are assigned NaN in
the new column).
:returns: A pandas DataFrame.
""" # noqa: E501
return df.assign(
**{
new_column_name: df.groupby(by, dropna=dropna)[
agg_column_name
].transform(agg),
}
)
|
[
"janitor.utils.deprecated_alias"
] |
[((165, 241), 'janitor.utils.deprecated_alias', 'deprecated_alias', ([], {'new_column': '"""new_column_name"""', 'agg_column': '"""agg_column_name"""'}), "(new_column='new_column_name', agg_column='agg_column_name')\n", (181, 241), False, 'from janitor.utils import deprecated_alias\n')]
|
import pytesseract
import requests
from PIL import Image
from PIL import ImageFilter
import io
def process_image(url):
image= _get_image(url)
image.filter(ImageFilter.SHARPEN)
return pytesseract.image_to_string(image)
def _get_image(url):
pattern_string = requests.get(url).content()
return Image.open(io.StringIO(pattern_string))
|
[
"requests.get",
"io.StringIO",
"pytesseract.image_to_string"
] |
[((200, 234), 'pytesseract.image_to_string', 'pytesseract.image_to_string', (['image'], {}), '(image)\n', (227, 234), False, 'import pytesseract\n'), ((328, 355), 'io.StringIO', 'io.StringIO', (['pattern_string'], {}), '(pattern_string)\n', (339, 355), False, 'import io\n'), ((278, 295), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (290, 295), False, 'import requests\n')]
|
"""
Created on 24 Mar 2021
@author: <NAME> (<EMAIL>)
Modem
-----
modem.generic.device-identifier : 3f07553c31ce11715037ac16c24ceddcfb6f7a0b
modem.generic.manufacturer : QUALCOMM INCORPORATED
modem.generic.model : QUECTEL Mobile Broadband Module
modem.generic.revision : EC21EFAR06A01M4G
...
modem.3gpp.imei : 867962041294151
example JSON:
{"id": "3f07553c31ce11715037ac16c24ceddcfb6f7a0b", "imei": "867962041294151", "mfr": "QUALCOMM INCORPORATED",
"model": "QUECTEL Mobile Broadband Module", "rev": "EC21EFAR06A01M4G"}
ModemConnection
---------------
modem.generic.state : connected
modem.generic.state-failed-reason : --
modem.generic.signal-quality.value : 67
modem.generic.signal-quality.recent : yes
example JSON:
{"state": "connected", "signal": {"quality": 67, "recent": true}}
SIM (Subscriber Identity Module)
--------------------------------
sim.dbus-path : /org/freedesktop/ModemManager1/SIM/0
sim.properties.imsi : 234104886708667
sim.properties.iccid : 8944110068256270054
sim.properties.operator-code : 23410
sim.properties.operator-name : giffgaff
sim.properties.emergency-numbers.length : 2
sim.properties.emergency-numbers.value[1] : 999
sim.properties.emergency-numbers.value[2] : 00112
example JSON:
{"imsi": "123", "iccid": "456", "operator-code": "789 012", "operator-name": "<NAME>"}
"""
import re
from collections import OrderedDict
from scs_core.data.datum import Datum
from scs_core.data.json import JSONable
# --------------------------------------------------------------------------------------------------------------------
class ModemList(object):
"""
modem-list.value[1] : /org/freedesktop/ModemManager1/Modem/0
"""
# ----------------------------------------------------------------------------------------------------------------
@classmethod
def construct_from_mmcli(cls, lines):
modems = []
for line in lines:
match = re.match(r'modem-list.value\[[\d]+]\s+:\s+([\S]+)', line)
if match:
modems.append(match.groups()[0])
return cls(modems)
# ----------------------------------------------------------------------------------------------------------------
def __init__(self, modems):
"""
Constructor
"""
self.__modems = modems # array of string
def __len__(self):
return len(self.__modems)
# ----------------------------------------------------------------------------------------------------------------
def modem(self, index):
return self.__modems[index]
def number(self, index):
pieces = self.modem(index).split('/')
return pieces[-1]
# ----------------------------------------------------------------------------------------------------------------
def __str__(self, *args, **kwargs):
return "ModemList:{modems:%s}" % self.__modems
# --------------------------------------------------------------------------------------------------------------------
class Modem(JSONable):
"""
modem.generic.device-identifier : 3f07553c31ce11715037ac16c24ceddcfb6f7a0b
modem.generic.manufacturer : QUALCOMM INCORPORATED
modem.generic.model : QUECTEL Mobile Broadband Module
modem.generic.revision : EC21EFAR06A01M4G
...
modem.3gpp.imei : 867962041294151
"""
# ----------------------------------------------------------------------------------------------------------------
@classmethod
def construct_from_jdict(cls, jdict):
if not jdict:
return None
id = jdict.get('id')
imei = jdict.get('imei')
mfr = jdict.get('mfr')
model = jdict.get('model')
rev = jdict.get('rev')
return cls(id, imei, mfr, model, rev)
@classmethod
def construct_from_mmcli(cls, lines):
id = None
imei = None
mfr = None
model = None
rev = None
for line in lines:
match = re.match(r'modem\.generic\.device-identifier\s+:\s+(\S+)', line)
if match:
id = match.groups()[0]
continue
match = re.match(r'.*\.imei\s+:\s+(\d+)', line)
if match:
imei = match.groups()[0]
continue
match = re.match(r'modem\.generic\.manufacturer\s+:\s+(\S.*\S)', line)
if match:
mfr = match.groups()[0]
continue
match = re.match(r'modem\.generic\.model\s+:\s+(\S.*\S)', line)
if match:
model = match.groups()[0]
continue
match = re.match(r'modem\.generic\.revision\s+:\s+(\S+)', line)
if match:
rev = match.groups()[0]
continue
return cls(id, imei, mfr, model, rev)
# ----------------------------------------------------------------------------------------------------------------
def __init__(self, id, imei, mfr, model, rev):
"""
Constructor
"""
self.__id = id # string
self.__imei = imei # string
self.__mfr = mfr # string
self.__model = model # string
self.__rev = rev # string
def __eq__(self, other):
try:
return self.id == other.id and self.imei == other.imei and self.mfr == other.mfr and \
self.model == other.model and self.rev == other.rev
except (TypeError, AttributeError):
return False
# ----------------------------------------------------------------------------------------------------------------
@property
def id(self):
return self.__id
@property
def imei(self):
return self.__imei
@property
def mfr(self):
return self.__mfr
@property
def model(self):
return self.__model
@property
def rev(self):
return self.__rev
# ----------------------------------------------------------------------------------------------------------------
def as_json(self):
jdict = OrderedDict()
jdict['id'] = self.id
jdict['imei'] = self.imei
jdict['mfr'] = self.mfr
jdict['model'] = self.model
jdict['rev'] = self.rev
return jdict
# ----------------------------------------------------------------------------------------------------------------
def __str__(self, *args, **kwargs):
return "Modem:{id:%s, imei:%s, mfr:%s, model:%s, rev:%s}" % \
(self.id, self.imei, self.mfr, self.model, self.rev)
# --------------------------------------------------------------------------------------------------------------------
class ModemConnection(JSONable):
"""
modem.generic.state : connected
modem.generic.state-failed-reason : --
modem.generic.signal-quality.value : 67
modem.generic.signal-quality.recent : yes
"""
UNAVAILABLE_STATE = "unavailable"
# ----------------------------------------------------------------------------------------------------------------
@classmethod
def construct_from_jdict(cls, jdict):
if not jdict:
return None
state = jdict.get('state')
failure = jdict.get('failure')
signal = Signal.construct_from_jdict(jdict.get('signal'))
return cls(state, failure, signal)
@classmethod
def construct_from_mmcli(cls, lines):
state = None
failure = None
quality = None
recent = None
for line in lines:
match = re.match(r'modem\.generic\.state\s+:\s+([a-z]+)', line)
if match:
state = match.groups()[0]
continue
match = re.match(r'modem\.generic\.state-failed-reason\s+:\s+(\S.*\S)', line)
if match:
reported_failure = match.groups()[0]
failure = None if reported_failure == '--' else reported_failure
continue
match = re.match(r'modem\.generic\.signal-quality\.value\s+:\s+([\d]+)', line)
if match:
quality = match.groups()[0]
continue
match = re.match(r'modem\.generic\.signal-quality\.recent\s+:\s+([a-z]+)', line)
if match:
recent = match.groups()[0] == 'yes'
continue
return cls(state, failure, Signal(quality, recent))
@classmethod
def null_datum(cls):
return cls(cls.UNAVAILABLE_STATE, None, Signal.null_datum())
# ----------------------------------------------------------------------------------------------------------------
def __init__(self, state, failure, signal):
"""
Constructor
"""
self.__state = state # string
self.__failure = failure # string
self.__signal = signal # Signal
# ----------------------------------------------------------------------------------------------------------------
@property
def state(self):
return self.__state
@property
def failure(self):
return self.__failure
@property
def signal(self):
return self.__signal
# ----------------------------------------------------------------------------------------------------------------
def as_json(self):
jdict = OrderedDict()
jdict['state'] = self.state
if self.failure is not None:
jdict['failure'] = self.failure
jdict['signal'] = self.signal
return jdict
# ----------------------------------------------------------------------------------------------------------------
def __str__(self, *args, **kwargs):
return "ModemConnection:{state:%s, failure:%s, signal:%s}" % (self.state, self.failure, self.signal)
# --------------------------------------------------------------------------------------------------------------------
class Signal(JSONable):
"""
modem.generic.signal-quality.value : 67
modem.generic.signal-quality.recent : yes
"""
__SIGNIFICANT_QUALITY_DIFFERENCE = 10
# ----------------------------------------------------------------------------------------------------------------
@classmethod
def construct_from_jdict(cls, jdict):
if not jdict:
return None
quality = jdict.get('quality')
recent = jdict.get('recent')
return cls(quality, recent)
@classmethod
def null_datum(cls):
return cls(None, None)
# ----------------------------------------------------------------------------------------------------------------
def __init__(self, quality, recent):
"""
Constructor
"""
self.__quality = Datum.int(quality) # int
self.__recent = recent # bool
# ----------------------------------------------------------------------------------------------------------------
@property
def quality(self):
return self.__quality
@property
def recent(self):
return self.__recent
# ----------------------------------------------------------------------------------------------------------------
def as_json(self):
jdict = OrderedDict()
jdict['quality'] = self.quality
jdict['recent'] = self.recent
return jdict
# ----------------------------------------------------------------------------------------------------------------
def __str__(self, *args, **kwargs):
return "Signal:{quality:%s, recent:%s}" % (self.quality, self.recent)
# --------------------------------------------------------------------------------------------------------------------
class SIMList(object):
"""
modem.generic.sim : /org/freedesktop/ModemManager1/SIM/0
"""
# ----------------------------------------------------------------------------------------------------------------
@classmethod
def construct_from_mmcli(cls, lines):
sims = []
for line in lines:
match = re.match(r'modem\.generic\.sim\s+:\s+([\S]+)', line)
if match:
sims.append(match.groups()[0])
return cls(sims)
# ----------------------------------------------------------------------------------------------------------------
def __init__(self, sims):
"""
Constructor
"""
self.__sims = sims # array of string
def __len__(self):
return len(self.__sims)
# ----------------------------------------------------------------------------------------------------------------
def sim(self, index):
return self.__sims[index]
def number(self, index):
pieces = self.sim(index).split('/')
return pieces[-1]
# ----------------------------------------------------------------------------------------------------------------
def __str__(self, *args, **kwargs):
return "SIMList:{sims:%s}" % self.__sims
# --------------------------------------------------------------------------------------------------------------------
class SIM(JSONable):
"""
classdocs
"""
# ----------------------------------------------------------------------------------------------------------------
@classmethod
def construct_from_jdict(cls, jdict):
if not jdict:
return None
imsi = jdict.get('imsi')
iccid = jdict.get('iccid')
operator_code = jdict.get('operator-code')
operator_name = jdict.get('operator-name')
return cls(imsi, iccid, operator_code, operator_name)
@classmethod
def construct_from_mmcli(cls, lines):
imsi = None
iccid = None
operator_code = None
operator_name = None
for line in lines:
match = re.match(r'sim\.properties\.imsi\s+:\s+([\d]+)', line)
if match:
imsi = match.groups()[0]
continue
match = re.match(r'sim\.properties\.iccid\s+:\s+([\d]+)', line)
if match:
iccid = match.groups()[0]
continue
match = re.match(r'sim\.properties\.operator-code\s+:\s+([\d]+)', line)
if match:
operator_code = match.groups()[0]
continue
match = re.match(r'sim\.properties\.operator-name\s+:\s+(\S.*)', line)
if match:
reported_name = match.groups()[0].strip()
operator_name = None if reported_name == '--' else reported_name
return cls(imsi, iccid, operator_code, operator_name)
# ----------------------------------------------------------------------------------------------------------------
def __init__(self, imsi, iccid, operator_code, operator_name):
"""
Constructor
"""
self.__imsi = imsi # numeric string
self.__iccid = iccid # numeric string
self.__operator_code = operator_code # string
self.__operator_name = operator_name # string
def __eq__(self, other):
try:
return self.imsi == other.imsi and self.iccid == other.iccid and \
self.operator_code == other.operator_code and self.operator_name == other.operator_name
except (TypeError, AttributeError):
return False
# ----------------------------------------------------------------------------------------------------------------
@property
def imsi(self):
return self.__imsi
@property
def iccid(self):
return self.__iccid
@property
def operator_code(self):
return self.__operator_code
@property
def operator_name(self):
return self.__operator_name
# ----------------------------------------------------------------------------------------------------------------
def as_json(self):
jdict = OrderedDict()
jdict['imsi'] = str(self.imsi)
jdict['iccid'] = str(self.iccid)
jdict['operator-code'] = self.operator_code
jdict['operator-name'] = self.operator_name
return jdict
# ----------------------------------------------------------------------------------------------------------------
def __str__(self, *args, **kwargs):
return "SIM:{imsi:%s, iccid:%s, operator_code:%s, operator_name:%s}" % \
(self.imsi, self.iccid, self.operator_code, self.operator_name)
|
[
"collections.OrderedDict",
"re.match",
"scs_core.data.datum.Datum.int"
] |
[((6658, 6671), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (6669, 6671), False, 'from collections import OrderedDict\n'), ((10057, 10070), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (10068, 10070), False, 'from collections import OrderedDict\n'), ((11482, 11500), 'scs_core.data.datum.Datum.int', 'Datum.int', (['quality'], {}), '(quality)\n', (11491, 11500), False, 'from scs_core.data.datum import Datum\n'), ((12007, 12020), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (12018, 12020), False, 'from collections import OrderedDict\n'), ((16844, 16857), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (16855, 16857), False, 'from collections import OrderedDict\n'), ((2209, 2270), 're.match', 're.match', (['"""modem-list.value\\\\[[\\\\d]+]\\\\s+:\\\\s+([\\\\S]+)"""', 'line'], {}), "('modem-list.value\\\\[[\\\\d]+]\\\\s+:\\\\s+([\\\\S]+)', line)\n", (2217, 2270), False, 'import re\n'), ((4414, 4482), 're.match', 're.match', (['"""modem\\\\.generic\\\\.device-identifier\\\\s+:\\\\s+(\\\\S+)"""', 'line'], {}), "('modem\\\\.generic\\\\.device-identifier\\\\s+:\\\\s+(\\\\S+)', line)\n", (4422, 4482), False, 'import re\n'), ((4586, 4628), 're.match', 're.match', (['""".*\\\\.imei\\\\s+:\\\\s+(\\\\d+)"""', 'line'], {}), "('.*\\\\.imei\\\\s+:\\\\s+(\\\\d+)', line)\n", (4594, 4628), False, 'import re\n'), ((4735, 4802), 're.match', 're.match', (['"""modem\\\\.generic\\\\.manufacturer\\\\s+:\\\\s+(\\\\S.*\\\\S)"""', 'line'], {}), "('modem\\\\.generic\\\\.manufacturer\\\\s+:\\\\s+(\\\\S.*\\\\S)', line)\n", (4743, 4802), False, 'import re\n'), ((4906, 4966), 're.match', 're.match', (['"""modem\\\\.generic\\\\.model\\\\s+:\\\\s+(\\\\S.*\\\\S)"""', 'line'], {}), "('modem\\\\.generic\\\\.model\\\\s+:\\\\s+(\\\\S.*\\\\S)', line)\n", (4914, 4966), False, 'import re\n'), ((5072, 5131), 're.match', 're.match', (['"""modem\\\\.generic\\\\.revision\\\\s+:\\\\s+(\\\\S+)"""', 'line'], {}), "('modem\\\\.generic\\\\.revision\\\\s+:\\\\s+(\\\\S+)', line)\n", (5080, 5131), False, 'import re\n'), ((8199, 8257), 're.match', 're.match', (['"""modem\\\\.generic\\\\.state\\\\s+:\\\\s+([a-z]+)"""', 'line'], {}), "('modem\\\\.generic\\\\.state\\\\s+:\\\\s+([a-z]+)', line)\n", (8207, 8257), False, 'import re\n'), ((8365, 8439), 're.match', 're.match', (['"""modem\\\\.generic\\\\.state-failed-reason\\\\s+:\\\\s+(\\\\S.*\\\\S)"""', 'line'], {}), "('modem\\\\.generic\\\\.state-failed-reason\\\\s+:\\\\s+(\\\\S.*\\\\S)', line)\n", (8373, 8439), False, 'import re\n'), ((8637, 8712), 're.match', 're.match', (['"""modem\\\\.generic\\\\.signal-quality\\\\.value\\\\s+:\\\\s+([\\\\d]+)"""', 'line'], {}), "('modem\\\\.generic\\\\.signal-quality\\\\.value\\\\s+:\\\\s+([\\\\d]+)', line)\n", (8645, 8712), False, 'import re\n'), ((8820, 8896), 're.match', 're.match', (['"""modem\\\\.generic\\\\.signal-quality\\\\.recent\\\\s+:\\\\s+([a-z]+)"""', 'line'], {}), "('modem\\\\.generic\\\\.signal-quality\\\\.recent\\\\s+:\\\\s+([a-z]+)', line)\n", (8828, 8896), False, 'import re\n'), ((12834, 12890), 're.match', 're.match', (['"""modem\\\\.generic\\\\.sim\\\\s+:\\\\s+([\\\\S]+)"""', 'line'], {}), "('modem\\\\.generic\\\\.sim\\\\s+:\\\\s+([\\\\S]+)', line)\n", (12842, 12890), False, 'import re\n'), ((14649, 14707), 're.match', 're.match', (['"""sim\\\\.properties\\\\.imsi\\\\s+:\\\\s+([\\\\d]+)"""', 'line'], {}), "('sim\\\\.properties\\\\.imsi\\\\s+:\\\\s+([\\\\d]+)', line)\n", (14657, 14707), False, 'import re\n'), ((14813, 14872), 're.match', 're.match', (['"""sim\\\\.properties\\\\.iccid\\\\s+:\\\\s+([\\\\d]+)"""', 'line'], {}), "('sim\\\\.properties\\\\.iccid\\\\s+:\\\\s+([\\\\d]+)', line)\n", (14821, 14872), False, 'import re\n'), ((14979, 15046), 're.match', 're.match', (['"""sim\\\\.properties\\\\.operator-code\\\\s+:\\\\s+([\\\\d]+)"""', 'line'], {}), "('sim\\\\.properties\\\\.operator-code\\\\s+:\\\\s+([\\\\d]+)', line)\n", (14987, 15046), False, 'import re\n'), ((15161, 15227), 're.match', 're.match', (['"""sim\\\\.properties\\\\.operator-name\\\\s+:\\\\s+(\\\\S.*)"""', 'line'], {}), "('sim\\\\.properties\\\\.operator-name\\\\s+:\\\\s+(\\\\S.*)', line)\n", (15169, 15227), False, 'import re\n')]
|
#!/home/moringaschool/Documents/django projects/insta-moringa/virtual/bin/python3.6
from django.core import management
if __name__ == "__main__":
management.execute_from_command_line()
|
[
"django.core.management.execute_from_command_line"
] |
[((151, 189), 'django.core.management.execute_from_command_line', 'management.execute_from_command_line', ([], {}), '()\n', (187, 189), False, 'from django.core import management\n')]
|
import sys
import os
import re, getopt
key_list = [('A',), ('A#', 'Bb'), ('B', 'Cb'), ('C',), ('C#', 'Db'), ('D',),
('D#', 'Eb'), ('E',), ('F',), ('F#', 'Gb'), ('G',), ('G#', 'Ab')]
sharp_flat = ['#', 'b']
sharp_flat_preferences = {
'A': '#',
'A#': 'b',
'Bb': 'b',
'B': '#',
'C': 'b',
'C#': 'b',
'Db': 'b',
'D': '#',
'D#': 'b',
'Eb': 'b',
'E': '#',
'F': 'b',
'F#': '#',
'Gb': '#',
'G': '#',
'G#': 'b',
'Ab': 'b',
}
key_regex = re.compile(r"[ABCDEFG][#b]?")
def get_index_from_key(source_key):
"""Gets the internal index of a key
>>> get_index_from_key('Bb')
1
"""
for key_names in key_list:
if source_key in key_names:
return key_list.index(key_names)
raise Exception("Invalid key: %s" % source_key)
def get_key_from_index(index, to_key):
"""Gets the key at the given internal index.
Sharp or flat depends on the target key.
>>> get_key_from_index(1, 'Eb')
'Bb'
"""
key_names = key_list[index % len(key_list)]
if len(key_names) > 1:
sharp_or_flat = sharp_flat.index(sharp_flat_preferences[to_key])
return key_names[sharp_or_flat]
return key_names[0]
def get_transponation_steps(source_key, target_key):
"""Gets the number of half tones to transpose
>>> get_transponation_steps('D', 'C')
-2
"""
source_index = get_index_from_key(source_key)
target_index = get_index_from_key(target_key)
return target_index - source_index
def transpose_file(file_name, from_key, to_key):
"""Transposes a file from a key to another.
>>> transpose_file('example.txt', 'D', 'E')
'Rocking start, jazzy ending\\n| E | A B | Cm7#11/D# |\\n'
"""
direction = get_transponation_steps(from_key, to_key)
result = ''
try:
for line in open(file_name):
result += transpose_line(line, direction, to_key)
return result
except IOError:
print("Invalid filename!")
usage()
def transpose_line(source_line, direction, to_key):
"""Transposes a line a number of keys if it starts with a pipe. Examples:
>>> transpose_line('| A | A# | Bb | C#m7/F# |', -2, 'C')
'| G | Ab | Ab | Bm7/E |'
Different keys will be sharp or flat depending on target key.
>>> transpose_line('| A | A# | Bb | C#m7/F# |', -2, 'D')
'| G | G# | G# | Bm7/E |'
It will use the more common key if sharp/flat, for example F# instead of Gb.
>>> transpose_line('| Gb |', 0, 'Gb')
'| F# |'
Lines not starting with pipe will not be transposed
>>> transpose_line('A | Bb |', -2, 'C')
'A | Bb |'
"""
if source_line[0] != '|':
return source_line
source_chords = key_regex.findall(source_line)
return recursive_line_transpose(source_line, source_chords, direction, to_key)
def recursive_line_transpose(source_line, source_chords, direction, to_key):
if not source_chords or not source_line:
return source_line
source_chord = source_chords.pop(0)
chord_index = source_line.find(source_chord)
after_chord_index = chord_index + len(source_chord)
return source_line[:chord_index] + \
transpose(source_chord, direction, to_key) + \
recursive_line_transpose(source_line[after_chord_index:], source_chords, direction, to_key)
def transpose(source_chord, direction, to_key):
"""Transposes a chord a number of half tones.
Sharp or flat depends on target key.
>>> transpose('C', 3, 'Bb')
'Eb'
"""
source_index = get_index_from_key(source_chord)
return get_key_from_index(source_index + direction, to_key)
def usage():
print()
'Usage:'
print()
'%s --from=Eb --to=F# input_filename' % os.path.basename(__file__)
sys.exit(2)
def main():
from_key = 'C'
to_key = 'C'
file_name = None
try:
options, arguments = getopt.getopt(sys.argv[1:], 'f:t:', ['from=', 'to=', 'doctest'])
except getopt.GetoptError as err:
print(str(err), usage(), sys.exit(2))
for option, value in options:
if option in ('-f', '--from'):
from_key = value
elif option in ('-t', '--to'):
to_key = value
elif option == '--doctest':
import doctest
doctest.testmod()
exit()
else:
usage()
if arguments:
file_name = arguments[0]
else:
usage()
result = transpose_file(file_name, from_key, to_key)
print("Result (%s -> %s):" % (from_key, to_key))
print(result)
if __name__ == '__main__':
print(transpose_line('|Eb', 2, 'C'))
|
[
"getopt.getopt",
"os.path.basename",
"sys.exit",
"doctest.testmod",
"re.compile"
] |
[((510, 538), 're.compile', 're.compile', (['"""[ABCDEFG][#b]?"""'], {}), "('[ABCDEFG][#b]?')\n", (520, 538), False, 'import re, getopt\n'), ((3785, 3796), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (3793, 3796), False, 'import sys\n'), ((3754, 3780), 'os.path.basename', 'os.path.basename', (['__file__'], {}), '(__file__)\n', (3770, 3780), False, 'import os\n'), ((3906, 3970), 'getopt.getopt', 'getopt.getopt', (['sys.argv[1:]', '"""f:t:"""', "['from=', 'to=', 'doctest']"], {}), "(sys.argv[1:], 'f:t:', ['from=', 'to=', 'doctest'])\n", (3919, 3970), False, 'import re, getopt\n'), ((4042, 4053), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (4050, 4053), False, 'import sys\n'), ((4298, 4315), 'doctest.testmod', 'doctest.testmod', ([], {}), '()\n', (4313, 4315), False, 'import doctest\n')]
|
import logging
import subprocess
from typing import Optional
from celery import Celery
from celery.utils.log import get_logger
from code_execution.code_execution import CodeExcution
from utils import generate_random_file
tmp_dir_path = '/worker/tmp'
compiled_dir_path = '/worker/tmp/compiled_files'
# Create the celery app and get the logger
celery_app = Celery('code-executions-tasks',
broker='pyamqp://guest@rabbit//', backend='amqp://guest@rabbit//')
# Add CELERY_ACKS_LATE in order to wait for infinite loop code executions
# celery_app.conf.update(
# CELERY_ACKS_LATE=True
# )
logger = get_logger(__name__)
@celery_app.task
def execute_code(language: str, code: str, submission: bool = False, timeout: Optional[float] = 10) -> dict:
"""
Task for code execution
@param language: code programming language
@param code: code to be executed
@param submission: flag which tells if the code to be executed is a submission or a normal execution
@param timeout: maximum time the code is allowed to run
@return: dict containgin execution results
"""
logger.info("Starting code execution")
in_file_path = (f"{tmp_dir_path}/in_files/{generate_random_file()}."
f"{CodeExcution.get_lang_extension(language)}")
compiled_file = f'{compiled_dir_path}/{generate_random_file()}.out'
command_to_execute_code = CodeExcution.provide_code_execution_command(
in_file_path, language, compiled_file, submission)
default_dict = {
"has_error": False,
"out_of_resources": False,
"exit_code": 0,
"out_of_time": False,
"raw_output": ""
}
try:
code_output = CodeExcution.execute_code(
command_to_execute_code, in_file_path, compiled_file, code, timeout)
logging.info(f"Code Returned, result: {code_output}")
default_dict["raw_output"] = code_output
except subprocess.CalledProcessError as cpe:
logging.debug(f"Code execution was errored: {cpe}")
default_dict["has_error"] = True
default_dict["exit_code"] = cpe.returncode
default_dict["raw_output"] = cpe.output
except subprocess.TimeoutExpired as te:
logger.debug(f"Code timeout after: {te.timeout}")
default_dict["has_error"] = True
default_dict["exit_code"] = 124
default_dict["out_of_time"] = True
default_dict["raw_output"] = "Time Limit Exceeded"
return default_dict
|
[
"utils.generate_random_file",
"celery.Celery",
"logging.debug",
"code_execution.code_execution.CodeExcution.provide_code_execution_command",
"code_execution.code_execution.CodeExcution.get_lang_extension",
"logging.info",
"celery.utils.log.get_logger",
"code_execution.code_execution.CodeExcution.execute_code"
] |
[((360, 463), 'celery.Celery', 'Celery', (['"""code-executions-tasks"""'], {'broker': '"""pyamqp://guest@rabbit//"""', 'backend': '"""amqp://guest@rabbit//"""'}), "('code-executions-tasks', broker='pyamqp://guest@rabbit//', backend=\n 'amqp://guest@rabbit//')\n", (366, 463), False, 'from celery import Celery\n'), ((622, 642), 'celery.utils.log.get_logger', 'get_logger', (['__name__'], {}), '(__name__)\n', (632, 642), False, 'from celery.utils.log import get_logger\n'), ((1403, 1501), 'code_execution.code_execution.CodeExcution.provide_code_execution_command', 'CodeExcution.provide_code_execution_command', (['in_file_path', 'language', 'compiled_file', 'submission'], {}), '(in_file_path, language,\n compiled_file, submission)\n', (1446, 1501), False, 'from code_execution.code_execution import CodeExcution\n'), ((1709, 1807), 'code_execution.code_execution.CodeExcution.execute_code', 'CodeExcution.execute_code', (['command_to_execute_code', 'in_file_path', 'compiled_file', 'code', 'timeout'], {}), '(command_to_execute_code, in_file_path,\n compiled_file, code, timeout)\n', (1734, 1807), False, 'from code_execution.code_execution import CodeExcution\n'), ((1825, 1878), 'logging.info', 'logging.info', (['f"""Code Returned, result: {code_output}"""'], {}), "(f'Code Returned, result: {code_output}')\n", (1837, 1878), False, 'import logging\n'), ((1205, 1227), 'utils.generate_random_file', 'generate_random_file', ([], {}), '()\n', (1225, 1227), False, 'from utils import generate_random_file\n'), ((1254, 1295), 'code_execution.code_execution.CodeExcution.get_lang_extension', 'CodeExcution.get_lang_extension', (['language'], {}), '(language)\n', (1285, 1295), False, 'from code_execution.code_execution import CodeExcution\n'), ((1343, 1365), 'utils.generate_random_file', 'generate_random_file', ([], {}), '()\n', (1363, 1365), False, 'from utils import generate_random_file\n'), ((1986, 2037), 'logging.debug', 'logging.debug', (['f"""Code execution was errored: {cpe}"""'], {}), "(f'Code execution was errored: {cpe}')\n", (1999, 2037), False, 'import logging\n')]
|
#imports
import haversine as hs
import pandas as pd
import numpy as np
import random
import time
from concurrent import futures
import grpc
import databroker_pb2_grpc
import databroker_pb2
port = 8061
class Databroker(databroker_pb2_grpc.DatabrokerServicer):
def __init__(self):
self.current_row = 0
#load required datasets
self.no2_data = pd.read_csv('./data/no2_testset.csv')
self.pm10_data = pd.read_csv('./data/pm10_testset.csv')
self.pm25_data = pd.read_csv('./data/pm25_testset.csv')
self.gps_data = pd.read_csv('./data/sensor_gps.csv')
self.sensor_gps = pd.read_csv('./data/low_cost_sensors.csv')
def get_next(self, request, context):
response = databroker_pb2.Features()
if self.current_row >= self.no2_data.shape[0]:
context.set_code(grpc.StatusCode.NOT_FOUND)
context.set_details("all data has been processed")
else:
#load 1 row from each dataset and convert to numpy
# create response format dataframe
no2 = pd.DataFrame(data=None, columns=self.no2_data.columns)
pm10 = pd.DataFrame(data=None, columns=self.pm10_data.columns)
pm25 = pd.DataFrame(data=None, columns=self.pm25_data.columns)
for sensor in range(self.sensor_gps.shape[0]):
id = self.sensor_gps.deviceID[sensor]
counter=1
for i in range(23,0,-1):
lat1 = np.rad2deg(self.sensor_gps.iloc[sensor,4])
lon1 = np.rad2deg(self.sensor_gps.iloc[sensor,5])
lat2 = self.gps_data.iloc[0,i*2+1]
lon2 = self.gps_data.iloc[0,i*2]
distance = hs.haversine((lat2, lon2), (lat1, lon1))
self.no2_data.iloc[self.current_row,counter] = distance
self.pm10_data.iloc[self.current_row,counter] = distance
self.pm25_data.iloc[self.current_row,counter] = distance
counter +=1
no2 = no2.append(self.no2_data.iloc[self.current_row,:])
pm10 = pm10.append(self.pm10_data.iloc[self.current_row,:])
pm25 = pm25.append(self.pm25_data.iloc[self.current_row,:])
no2_input= no2.iloc[:,1:].to_numpy()
pm10_input= pm10.iloc[:,1:].to_numpy()
pm25_input= pm25.iloc[:,1:].to_numpy()
no2_input = np.ndarray.tobytes(no2_input)
pm10_input = np.ndarray.tobytes(pm10_input)
pm25_input = np.ndarray.tobytes(pm25_input)
#add output to response
response.no2_data = no2_input
response.pm10_data = pm10_input
response.pm25_data = pm25_input
#add 1 to row counter(maybe we could make it cyclical with mod later)
self.current_row += 1
return response
#host server
server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
databroker_pb2_grpc.add_DatabrokerServicer_to_server(Databroker(), server)
print("Starting server. Listening on port : " + str(port))
server.add_insecure_port("[::]:{}".format(port))
server.start()
try:
while True:
time.sleep(86400)
except KeyboardInterrupt:
server.stop(0)
|
[
"pandas.DataFrame",
"pandas.read_csv",
"haversine.haversine",
"time.sleep",
"numpy.rad2deg",
"databroker_pb2.Features",
"concurrent.futures.ThreadPoolExecutor",
"numpy.ndarray.tobytes"
] |
[((3015, 3057), 'concurrent.futures.ThreadPoolExecutor', 'futures.ThreadPoolExecutor', ([], {'max_workers': '(10)'}), '(max_workers=10)\n', (3041, 3057), False, 'from concurrent import futures\n'), ((372, 409), 'pandas.read_csv', 'pd.read_csv', (['"""./data/no2_testset.csv"""'], {}), "('./data/no2_testset.csv')\n", (383, 409), True, 'import pandas as pd\n'), ((435, 473), 'pandas.read_csv', 'pd.read_csv', (['"""./data/pm10_testset.csv"""'], {}), "('./data/pm10_testset.csv')\n", (446, 473), True, 'import pandas as pd\n'), ((499, 537), 'pandas.read_csv', 'pd.read_csv', (['"""./data/pm25_testset.csv"""'], {}), "('./data/pm25_testset.csv')\n", (510, 537), True, 'import pandas as pd\n'), ((562, 598), 'pandas.read_csv', 'pd.read_csv', (['"""./data/sensor_gps.csv"""'], {}), "('./data/sensor_gps.csv')\n", (573, 598), True, 'import pandas as pd\n'), ((625, 667), 'pandas.read_csv', 'pd.read_csv', (['"""./data/low_cost_sensors.csv"""'], {}), "('./data/low_cost_sensors.csv')\n", (636, 667), True, 'import pandas as pd\n'), ((731, 756), 'databroker_pb2.Features', 'databroker_pb2.Features', ([], {}), '()\n', (754, 756), False, 'import databroker_pb2\n'), ((3287, 3304), 'time.sleep', 'time.sleep', (['(86400)'], {}), '(86400)\n', (3297, 3304), False, 'import time\n'), ((1073, 1127), 'pandas.DataFrame', 'pd.DataFrame', ([], {'data': 'None', 'columns': 'self.no2_data.columns'}), '(data=None, columns=self.no2_data.columns)\n', (1085, 1127), True, 'import pandas as pd\n'), ((1147, 1202), 'pandas.DataFrame', 'pd.DataFrame', ([], {'data': 'None', 'columns': 'self.pm10_data.columns'}), '(data=None, columns=self.pm10_data.columns)\n', (1159, 1202), True, 'import pandas as pd\n'), ((1222, 1277), 'pandas.DataFrame', 'pd.DataFrame', ([], {'data': 'None', 'columns': 'self.pm25_data.columns'}), '(data=None, columns=self.pm25_data.columns)\n', (1234, 1277), True, 'import pandas as pd\n'), ((2491, 2520), 'numpy.ndarray.tobytes', 'np.ndarray.tobytes', (['no2_input'], {}), '(no2_input)\n', (2509, 2520), True, 'import numpy as np\n'), ((2550, 2580), 'numpy.ndarray.tobytes', 'np.ndarray.tobytes', (['pm10_input'], {}), '(pm10_input)\n', (2568, 2580), True, 'import numpy as np\n'), ((2610, 2640), 'numpy.ndarray.tobytes', 'np.ndarray.tobytes', (['pm25_input'], {}), '(pm25_input)\n', (2628, 2640), True, 'import numpy as np\n'), ((1493, 1536), 'numpy.rad2deg', 'np.rad2deg', (['self.sensor_gps.iloc[sensor, 4]'], {}), '(self.sensor_gps.iloc[sensor, 4])\n', (1503, 1536), True, 'import numpy as np\n'), ((1563, 1606), 'numpy.rad2deg', 'np.rad2deg', (['self.sensor_gps.iloc[sensor, 5]'], {}), '(self.sensor_gps.iloc[sensor, 5])\n', (1573, 1606), True, 'import numpy as np\n'), ((1745, 1785), 'haversine.haversine', 'hs.haversine', (['(lat2, lon2)', '(lat1, lon1)'], {}), '((lat2, lon2), (lat1, lon1))\n', (1757, 1785), True, 'import haversine as hs\n')]
|
import smart_imports
smart_imports.all()
INFINIT_PREMIUM_DESCRIPTION = 'Вечная подписка даёт вам все бонусы подписчика на всё время игры.'
class PERMANENT_PURCHASE_TYPE(rels_django.DjangoEnum):
description = rels.Column(unique=False)
might_required = rels.Column(unique=False, single_type=False)
level_required = rels.Column(unique=False, single_type=False)
full_name = rels.Column()
records = (('INFINIT_SUBSCRIPTION', 12, 'Вечная подписка', INFINIT_PREMIUM_DESCRIPTION, None, None, 'Вечная подписка'),)
class GOODS_GROUP(rels_django.DjangoEnum):
uid = rels.Column()
uid_prefix = rels.Column(unique=False)
records = (('PREMIUM', 0, 'подписка', 'subscription', 'subscription-'),
('ENERGY', 1, 'энергия', 'energy', 'energy-'),
('CHEST', 2, 'сундук', 'random-premium-chest', 'random-premium-chest'),
('PREFERENCES', 3, 'предпочтения', 'preference', 'preference-'),
('PREFERENCES_RESET', 4, 'сброс предпочтений', 'preference-reset', 'hero-preference-reset-'),
('HABITS', 5, 'черты', 'habits', 'hero-habits-'),
('ABILITIES', 6, 'способности', 'abilities', 'hero-abilities-'),
('CLANS', 7, 'гильдии', 'clans', 'clan-'),
('CARDS', 8, 'Карты судьбы', 'cards', 'cards-'))
CARDS_MIN_PRICES = {cards_relations.RARITY.COMMON: 2,
cards_relations.RARITY.UNCOMMON: 10,
cards_relations.RARITY.RARE: 25,
cards_relations.RARITY.EPIC: 50,
cards_relations.RARITY.LEGENDARY: 100}
|
[
"smart_imports.all"
] |
[((23, 42), 'smart_imports.all', 'smart_imports.all', ([], {}), '()\n', (40, 42), False, 'import smart_imports\n')]
|
"""spasco - spaces to underscores
==============================
Command line tool for replacing/removing whitespaces or other patterns of file- and directory names.
"""
# Copyright (c) 2021, <NAME>.
# All rights reserved. Distributed under the MIT License.
import argparse
import configparser
import fnmatch
import logging
import os
import sys
from argparse import _SubParsersAction
from argparse import HelpFormatter
from typing import List
from typing import Tuple
from spasco import __src_url__
from spasco import __title__
from spasco import __version__
from spasco.term_color import fmt
from spasco.term_color import Txt
base, file = os.path.split(__file__)
settings_file = os.path.join(base, 'settings.ini')
# set up a settings file and then a logger:
config = configparser.ConfigParser()
config.read(settings_file)
# default values for log record are created:
if not config.read(settings_file):
config['VALUE-SETTINGS'] = {
'search_value': "' '",
'new_value': '_',
}
config['LOG-SETTINGS'] = {
'Logging_turned_on': "False",
'logger_filename': f'{__title__}.log',
'logger_location': os.environ['HOME'],
}
with open(settings_file, 'w') as f:
config.write(f)
def get_logger_path() -> str:
logger_location = config.get('LOG-SETTINGS', 'logger_location')
logger_filename = config.get('LOG-SETTINGS', 'logger_filename')
return f"{logger_location}/{logger_filename}"
logger_path = get_logger_path()
logging.basicConfig(
filename=logger_path,
level=logging.INFO,
format='%(levelname)s | %(asctime)s | %(message)s',
)
if (sys.platform != 'linux' and sys.platform != 'darwin'):
print(f"{__title__!r} is currently not optimized for platforms other than OS X / linux")
def main(argv: List[str]) -> int:
""" Main program.
:argument
argv: command-line arguments, such as sys.argv (including the program name
in argv[0]).
:return
Zero on successful program termination, non-zero otherwise.
"""
main_parser, config_subparser = __build_parser()
argv = argv[1:]
args = main_parser.parse_args(args=argv)
# triggering config subparser
if vars(args).get('command', None) == 'config':
execute_config(config_subparser, argv)
return 0
###########################
# 1 select and sort paths #
###########################
files_dirs = []
if isinstance(args.file_or_dir, str):
args.file_or_dir = [args.file_or_dir]
if args.file_or_dir and not args.recursive:
files_dirs.extend(args.file_or_dir)
if args.recursive:
files_dirs = recurse_dirs_and_files()
# sort paths (longest paths first) so that renaming starts with the deepest nested file/directory:
files_dirs = [x.split('/') for x in files_dirs]
sorted_paths = sorted(files_dirs, key=len, reverse=True)
files_dirs = ['/'.join(path_as_lst) for path_as_lst in sorted_paths]
########################
# 2: path filtration #
########################
SEARCH_VALUE = args.search_value if args.search_value else config.get(
'VALUE-SETTINGS', 'search_value',
)
if SEARCH_VALUE == "' '":
SEARCH_VALUE = ' '
filtered_paths = []
all_selected_files_dirs = files_dirs.copy()
# ------ no file/dir existent ----
if not files_dirs:
print('No directory or file present!')
return 1
# ------ search-value filter ------
# [files_dirs.remove(x) for x in all_selected_files_dirs if SEARCH_VALUE not in x.split('/')[-1]]
for x in all_selected_files_dirs:
if SEARCH_VALUE not in x.split('/')[-1]:
files_dirs.remove(x)
if not files_dirs:
searchval_msg = f"None of the {len(all_selected_files_dirs)} present files/directories contain the search value '{SEARCH_VALUE}'!"
print(searchval_msg)
return 1
# ------ pattern-only filter ------
# [files_dirs.remove(x) for x in files_dirs.copy() if args.pattern_only and not fnmatch.fnmatch(os.path.split(x)[1], args.pattern_only)]
for x in files_dirs.copy():
if args.pattern_only and not fnmatch.fnmatch(os.path.split(x)[1], args.pattern_only):
files_dirs.remove(x)
if not files_dirs:
print(f'None of the {len(all_selected_files_dirs)} present files/directories contain the pattern {args.pattern_only!r}!')
return 1
# ------ except-pattern filter -----
# [files_dirs.remove(x) for x in files_dirs.copy() if args.except_pattern and fnmatch.fnmatch(os.path.split(x)[-1], args.except_pattern)]
for x in files_dirs.copy():
if args.except_pattern and fnmatch.fnmatch(os.path.split(x)[-1], args.except_pattern):
files_dirs.remove(x)
if not files_dirs:
print(f'None of the exception-pattern matching files/directories contain the search-value {SEARCH_VALUE!r}.',)
return 1
# ------ dirs-only filter -----
# [files_dirs.remove(x) for x in files_dirs.copy() if args.dirs_only and not os.path.isdir(x)]
for x in files_dirs.copy():
if args.dirs_only and not os.path.isdir(x):
files_dirs.remove(x)
if not files_dirs:
print('No directory present for renaming.')
return 1
# ------ files-only filter -----
# [files_dirs.remove(x) for x in files_dirs.copy() if args.files_only and not os.path.isfile(x)]
for x in files_dirs.copy():
if args.files_only and not os.path.isfile(x):
files_dirs.remove(x)
if not files_dirs:
print('No file present for renaming.')
return 1
filtered_paths = files_dirs
################
# 3 renaming #
################
if args.new_value == '':
NEW_VALUE = ''
if args.new_value:
NEW_VALUE = args.new_value
if args.new_value is None:
NEW_VALUE = config.get('VALUE-SETTINGS', 'new_value')
if NEW_VALUE == "''" or NEW_VALUE == '""':
NEW_VALUE = ''
filecount, dircount, renamed_paths = path_renaming(
path_lst=filtered_paths,
search_value=SEARCH_VALUE,
new_value=NEW_VALUE,
)
if args.immediately:
is_proceeding = 'y'
else:
msg = f'You can rename {len(filtered_paths)} files and/or directories.' # 🔨
colored_msg = fmt(msg) # , Txt.greenblue
print(colored_msg)
print()
before_heading = fmt('Before', Txt.pink, bolded=True)
after_heading = fmt('After', Txt.blue, bolded=True)
sep_line = fmt('──', Txt.greenblue)
print(f"{before_heading} {' ' * (max([len(x) for x in filtered_paths]) - len('before') + 6)} {after_heading}",)
print(f"{sep_line * (max([len(x) for x in filtered_paths]) + 4)}")
for before, after in list(zip(filtered_paths, renamed_paths)):
before_renaming = fmt(before, Txt.pink)
after_renaming = fmt(after, Txt.blue)
print(f"'{before_renaming}'{' ' * (max([len(x) for x in filtered_paths]) - len(before))} {fmt('🡆', Txt.greenblue)} '{after_renaming}'",)
print(f"{sep_line * (max([len(x) for x in filtered_paths]) + 4)}")
print()
q = fmt(' [y/n] ', Txt.pink)
proceed_msg = fmt('OK to proceed with renaming?') # , Txt.greenblue
is_proceeding = input(proceed_msg + q)
if is_proceeding.lower() == 'y':
filecount, dircount, new_pathnames = path_renaming(
path_lst=filtered_paths,
search_value=SEARCH_VALUE,
new_value=NEW_VALUE,
renaming=True,
)
success_msg = fmt(f'All done! {filecount} files and {dircount} directories were renamed! ✨💄✨', Txt.greenblue)
print(success_msg)
return 0
else:
print(fmt("Command aborted.", textcolor=Txt.pink))
return 1
settings_msg = f"""{fmt("value settings:", Txt.greenblue)}
search_value: {config.get('VALUE-SETTINGS', 'search_value')}
new_value: {config.get('VALUE-SETTINGS', 'new_value')}
{fmt("log settings:", Txt.greenblue)}
logging_turned_on: {config.getboolean('LOG-SETTINGS', 'logging_turned_on')}
logger_filename: {config.get('LOG-SETTINGS', 'logger_filename')}
logger_location: {config.get('LOG-SETTINGS', 'logger_location')}"""
def execute_config(config_subparser: argparse.ArgumentParser, argv: List[str]) -> int:
""" Boolean logic of config subparser triggering. """
args = config_subparser.parse_args(argv[1:])
if args.show_settings:
print(settings_msg)
return 0
if args.turn_log_on:
config['LOG-SETTINGS']['logging_turned_on'] = args.turn_log_on.capitalize()
with open(settings_file, 'w') as fp:
config.write(fp)
log_state = config.getboolean('LOG-SETTINGS', 'logging_turned_on')
if log_state:
print('Logging is activated.')
else:
print('Logging is deactivated.')
return 0
if args.log_name:
old_logger_path = get_logger_path()
config['LOG-SETTINGS']['logger_filename'] = args.log_name
with open(settings_file, 'w') as fp:
config.write(fp)
new_logger_path = get_logger_path()
os.rename(old_logger_path, new_logger_path)
print(f"The new log filename is {config.get('LOG-SETTINGS', 'logger_filename')!r}.",)
return 0
if args.log_location:
old_logger_path = get_logger_path()
log_location = args.log_location
if '~' in args.log_location:
log_location = os.path.expanduser(args.log_location)
if not os.path.isdir(log_location):
print(f'The given path {args.log_location!r} is not a valid directory!')
return 1
config['LOG-SETTINGS']['logger_location'] = log_location
with open(settings_file, 'w') as fp:
config.write(fp)
new_logger_path = get_logger_path()
os.rename(old_logger_path, new_logger_path)
print(f"The new log location is {config.get('LOG-SETTINGS', 'logger_location')!r}.",)
return 0
if args.set_search_value:
if args.set_search_value == ' ':
config['VALUE-SETTINGS']['search_value'] = "' '"
with open(settings_file, 'w') as fp:
config.write(fp)
print(f"The new search-value is {config.get('VALUE-SETTINGS', 'search_value')}.",)
else:
config['VALUE-SETTINGS']['search_value'] = args.set_search_value
with open(settings_file, 'w') as fp:
config.write(fp)
print(f"The new search-value is {config.get('VALUE-SETTINGS', 'search_value')!r}.",)
return 0
if args.set_new_value == '':
config['VALUE-SETTINGS']['new_value'] = "''"
with open(settings_file, 'w') as fp:
config.write(fp)
print(f"The new 'new-value' is {config.get('VALUE-SETTINGS', 'new_value')}.")
return 0
if args.set_new_value:
config['VALUE-SETTINGS']['new_value'] = args.set_new_value
with open(settings_file, 'w') as fp:
config.write(fp)
print(f"The new 'new-value' is {config.get('VALUE-SETTINGS', 'new_value')!r}.")
return 0
config_subparser.print_help()
return 1
def path_renaming(path_lst: List[str], search_value: str, new_value: str, renaming: bool = False) -> Tuple[int, int, List[str]]:
""" List of filtered files and directories are renamed and their names
returned. Furthermore, the number fo directories/files which were renamed
are also returned.
:returns
Tuples containing the number of directories, files and the names of them after renaming
"""
renamed_paths = []
dircount, filecount = 0, 0
for old_path_name in path_lst:
path_base, file = os.path.split(old_path_name)
new_name = file.replace(search_value, new_value)
full_new = os.path.join(path_base, new_name)
renamed_paths.append(full_new)
if renaming:
os.rename(old_path_name, full_new)
if os.path.isdir(full_new):
dircount += 1
elif os.path.isfile(full_new):
filecount += 1
logging.info(f" working dir: {os.getcwd()!r} | naming: {old_path_name!r} --> {full_new!r}",)
return (filecount, dircount, renamed_paths)
def recurse_dirs_and_files() -> List[str]:
""" All files/directories within the current working directory are mapped
into a list.
:returns
List of all file/directory paths, recursively and sorted
"""
all_files_dirs = []
base_path = os.getcwd()
# collect all rel. paths in a list (rel to cwd):
for dirpath, dirnames, filenames in os.walk(base_path):
for filename in filenames:
full_filepath = dirpath + '/' + filename
rel_filepath = os.path.relpath(full_filepath, base_path)
all_files_dirs.append(rel_filepath)
for dirname in dirnames:
full_dirpath = dirpath + '/' + dirname
rel_dirpath = os.path.relpath(full_dirpath, base_path)
all_files_dirs.append(rel_dirpath)
return all_files_dirs
# hack for removing the metavar below the subparsers (config) title
class NoSubparsersMetavarFormatter(HelpFormatter):
def _format_action_invocation(self, action): # type: ignore
if isinstance(action, _SubParsersAction):
return ""
return super()._format_action_invocation(action)
class MyOwnFormatter(NoSubparsersMetavarFormatter, argparse.RawDescriptionHelpFormatter):
""" Removes metavar of config subparser and adds RawDescription """
pass
def __build_parser() -> Tuple[argparse.ArgumentParser, argparse.ArgumentParser]:
""" Constructs the main_parser for the command line arguments.
:returns
An ArgumentParser instance for the CLI.
"""
main_parser = argparse.ArgumentParser(
prog=__title__,
add_help=False,
description=f'Spasco is a glorified replace function. By default it replaces whitespaces\n'
f'of all file- and directory names within your current working directory by \n'
f'underscores.\n\nsrc: {__src_url__}',
epilog='Make your files more computer-friendly 😄',
formatter_class=lambda prog: MyOwnFormatter(
prog, max_help_position=80,
),
)
# optional arguments:
main_parser.add_argument(
"-t",
dest='file_or_dir',
metavar='file_or_dir',
action='store',
nargs='?',
default=os.listdir(),
help='Select a single file or directory for renaming.',
)
main_parser.add_argument(
'-s',
dest='search_value',
nargs='?',
action='store',
metavar='search_value',
help="Define custom search-value (default: ' ').",
)
main_parser.add_argument(
'-n',
dest='new_value',
nargs='?',
action='store',
metavar='new_value',
help="Define custom new-value (default: '_')."
)
main_parser.add_argument(
'-p',
dest='pattern_only',
nargs='?',
action='store',
metavar='pattern_only',
help='Only files/dirs containing the pattern are renamed.',
)
main_parser.add_argument(
'-e',
metavar='except_pattern',
dest='except_pattern',
nargs='?',
action='store',
help='Only files/dirs not containing the pattern are renamed.',
)
main_parser.add_argument(
'-d',
'--dirs-only',
action='store_true',
help='Only directories are renamed.',
)
main_parser.add_argument(
'-f',
'--files-only',
action='store_true',
help='Only files are renamed.',
)
main_parser.add_argument(
'-r',
'--recursive',
action='store_true',
help='Recurse into directories.',
)
main_parser.add_argument(
'-i',
'--immediately',
action='store_true',
help='Skip security question, renaming preview and execute immediately.',
)
main_parser.add_argument(
'-v',
'--version',
action='version',
help='Show version number and exit.',
version=f'%(prog)s {__version__}',
)
add_parser_help(main_parser)
# ---- configuration structured as subparser -----
config_subparsers = main_parser.add_subparsers(
title='log and renaming configuration',
)
config_subparser = add_config_subparser(config_subparsers)
return main_parser, config_subparser
def add_config_subparser(sub_parsers: argparse._SubParsersAction) -> argparse.ArgumentParser:
""" Parser for configuring spasco.
"""
config_subparser = sub_parsers.add_parser(
name='config',
description='search-value and new-value can be changed. Logging to record all '
'renaming actions as log file can be activated.',
usage=f'{__title__} config [--show-setting] [-o true/false] [-n [filename]] [-l [pathname]] [-h, --help ]',
add_help=False,
formatter_class=lambda prog: argparse.RawDescriptionHelpFormatter(
prog, max_help_position=33,
),
help=f"Sub-command to interact with {__title__}'s logging and rename settings.",
)
config_subparser.add_argument(
'--show-settings',
action='store_true',
help='Returns your current settings for logging and renaming.',
)
add_parser_help(config_subparser)
config_subparser_logging = config_subparser.add_argument_group(
'log settings',
)
config_subparser_logging.add_argument(
'-o',
nargs='?',
metavar='true/false',
dest='turn_log_on',
choices=['true', 'false'],
help="Logging is turned on/off (default: off).",
)
config_subparser_logging.add_argument(
'-f',
nargs='?',
metavar='filename',
dest='log_name',
help='Set a new filename for the logger.',
)
config_subparser_logging.add_argument(
'-l',
nargs='?',
metavar='pathname',
dest='log_location',
help='Set a new file location for the logger.',
)
config_subparser_renaming = config_subparser.add_argument_group(
'renaming settings',
)
config_subparser_renaming.add_argument(
'-s',
nargs='?',
metavar='search_value',
dest='set_search_value',
help="Set a new 'search-value' permanently.",
)
config_subparser_renaming.add_argument(
'-n',
nargs='?',
metavar='new_value',
dest='set_new_value',
help="Set a new 'new-value' permanently.",
)
config_subparser.set_defaults(command='config')
return config_subparser
def add_parser_help(parser: argparse.ArgumentParser) -> None:
""" Custom help-argument to have consistent style.
add_help=False to enable this.
"""
parser.add_argument(
'-h',
'--help',
action='help',
help="Show this help message and exit.",
)
def run_main() -> None:
try:
sys.exit(main(sys.argv))
except Exception as e:
sys.stderr.write(__title__ + ': ' + str(e) + '\n')
sys.exit(1)
if __name__ == '__main__':
run_main()
|
[
"os.path.expanduser",
"spasco.term_color.fmt",
"logging.basicConfig",
"os.getcwd",
"os.path.isdir",
"os.rename",
"os.walk",
"os.path.isfile",
"os.path.relpath",
"argparse.RawDescriptionHelpFormatter",
"configparser.ConfigParser",
"os.path.join",
"os.listdir",
"sys.exit",
"os.path.split"
] |
[((642, 665), 'os.path.split', 'os.path.split', (['__file__'], {}), '(__file__)\n', (655, 665), False, 'import os\n'), ((682, 716), 'os.path.join', 'os.path.join', (['base', '"""settings.ini"""'], {}), "(base, 'settings.ini')\n", (694, 716), False, 'import os\n'), ((771, 798), 'configparser.ConfigParser', 'configparser.ConfigParser', ([], {}), '()\n', (796, 798), False, 'import configparser\n'), ((1489, 1607), 'logging.basicConfig', 'logging.basicConfig', ([], {'filename': 'logger_path', 'level': 'logging.INFO', 'format': '"""%(levelname)s | %(asctime)s | %(message)s"""'}), "(filename=logger_path, level=logging.INFO, format=\n '%(levelname)s | %(asctime)s | %(message)s')\n", (1508, 1607), False, 'import logging\n'), ((12582, 12593), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (12591, 12593), False, 'import os\n'), ((12687, 12705), 'os.walk', 'os.walk', (['base_path'], {}), '(base_path)\n', (12694, 12705), False, 'import os\n'), ((6326, 6334), 'spasco.term_color.fmt', 'fmt', (['msg'], {}), '(msg)\n', (6329, 6334), False, 'from spasco.term_color import fmt\n'), ((6422, 6458), 'spasco.term_color.fmt', 'fmt', (['"""Before"""', 'Txt.pink'], {'bolded': '(True)'}), "('Before', Txt.pink, bolded=True)\n", (6425, 6458), False, 'from spasco.term_color import fmt\n'), ((6483, 6518), 'spasco.term_color.fmt', 'fmt', (['"""After"""', 'Txt.blue'], {'bolded': '(True)'}), "('After', Txt.blue, bolded=True)\n", (6486, 6518), False, 'from spasco.term_color import fmt\n'), ((6538, 6562), 'spasco.term_color.fmt', 'fmt', (['"""──"""', 'Txt.greenblue'], {}), "('──', Txt.greenblue)\n", (6541, 6562), False, 'from spasco.term_color import fmt\n'), ((7186, 7210), 'spasco.term_color.fmt', 'fmt', (['""" [y/n] """', 'Txt.pink'], {}), "(' [y/n] ', Txt.pink)\n", (7189, 7210), False, 'from spasco.term_color import fmt\n'), ((7233, 7268), 'spasco.term_color.fmt', 'fmt', (['"""OK to proceed with renaming?"""'], {}), "('OK to proceed with renaming?')\n", (7236, 7268), False, 'from spasco.term_color import fmt\n'), ((7601, 7701), 'spasco.term_color.fmt', 'fmt', (['f"""All done! {filecount} files and {dircount} directories were renamed! ✨💄✨"""', 'Txt.greenblue'], {}), "(f'All done! {filecount} files and {dircount} directories were renamed! ✨💄✨'\n , Txt.greenblue)\n", (7604, 7701), False, 'from spasco.term_color import fmt\n'), ((7849, 7886), 'spasco.term_color.fmt', 'fmt', (['"""value settings:"""', 'Txt.greenblue'], {}), "('value settings:', Txt.greenblue)\n", (7852, 7886), False, 'from spasco.term_color import fmt\n'), ((8025, 8060), 'spasco.term_color.fmt', 'fmt', (['"""log settings:"""', 'Txt.greenblue'], {}), "('log settings:', Txt.greenblue)\n", (8028, 8060), False, 'from spasco.term_color import fmt\n'), ((9190, 9233), 'os.rename', 'os.rename', (['old_logger_path', 'new_logger_path'], {}), '(old_logger_path, new_logger_path)\n', (9199, 9233), False, 'import os\n'), ((9900, 9943), 'os.rename', 'os.rename', (['old_logger_path', 'new_logger_path'], {}), '(old_logger_path, new_logger_path)\n', (9909, 9943), False, 'import os\n'), ((11775, 11803), 'os.path.split', 'os.path.split', (['old_path_name'], {}), '(old_path_name)\n', (11788, 11803), False, 'import os\n'), ((11880, 11913), 'os.path.join', 'os.path.join', (['path_base', 'new_name'], {}), '(path_base, new_name)\n', (11892, 11913), False, 'import os\n'), ((6859, 6880), 'spasco.term_color.fmt', 'fmt', (['before', 'Txt.pink'], {}), '(before, Txt.pink)\n', (6862, 6880), False, 'from spasco.term_color import fmt\n'), ((6910, 6930), 'spasco.term_color.fmt', 'fmt', (['after', 'Txt.blue'], {}), '(after, Txt.blue)\n', (6913, 6930), False, 'from spasco.term_color import fmt\n'), ((7765, 7808), 'spasco.term_color.fmt', 'fmt', (['"""Command aborted."""'], {'textcolor': 'Txt.pink'}), "('Command aborted.', textcolor=Txt.pink)\n", (7768, 7808), False, 'from spasco.term_color import fmt\n'), ((9521, 9558), 'os.path.expanduser', 'os.path.expanduser', (['args.log_location'], {}), '(args.log_location)\n', (9539, 9558), False, 'import os\n'), ((9574, 9601), 'os.path.isdir', 'os.path.isdir', (['log_location'], {}), '(log_location)\n', (9587, 9601), False, 'import os\n'), ((11986, 12020), 'os.rename', 'os.rename', (['old_path_name', 'full_new'], {}), '(old_path_name, full_new)\n', (11995, 12020), False, 'import os\n'), ((12036, 12059), 'os.path.isdir', 'os.path.isdir', (['full_new'], {}), '(full_new)\n', (12049, 12059), False, 'import os\n'), ((12822, 12863), 'os.path.relpath', 'os.path.relpath', (['full_filepath', 'base_path'], {}), '(full_filepath, base_path)\n', (12837, 12863), False, 'import os\n'), ((13022, 13062), 'os.path.relpath', 'os.path.relpath', (['full_dirpath', 'base_path'], {}), '(full_dirpath, base_path)\n', (13037, 13062), False, 'import os\n'), ((14550, 14562), 'os.listdir', 'os.listdir', ([], {}), '()\n', (14560, 14562), False, 'import os\n'), ((19300, 19311), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (19308, 19311), False, 'import sys\n'), ((5126, 5142), 'os.path.isdir', 'os.path.isdir', (['x'], {}), '(x)\n', (5139, 5142), False, 'import os\n'), ((5476, 5493), 'os.path.isfile', 'os.path.isfile', (['x'], {}), '(x)\n', (5490, 5493), False, 'import os\n'), ((12108, 12132), 'os.path.isfile', 'os.path.isfile', (['full_new'], {}), '(full_new)\n', (12122, 12132), False, 'import os\n'), ((17156, 17220), 'argparse.RawDescriptionHelpFormatter', 'argparse.RawDescriptionHelpFormatter', (['prog'], {'max_help_position': '(33)'}), '(prog, max_help_position=33)\n', (17192, 17220), False, 'import argparse\n'), ((4687, 4703), 'os.path.split', 'os.path.split', (['x'], {}), '(x)\n', (4700, 4703), False, 'import os\n'), ((4175, 4191), 'os.path.split', 'os.path.split', (['x'], {}), '(x)\n', (4188, 4191), False, 'import os\n'), ((7034, 7057), 'spasco.term_color.fmt', 'fmt', (['"""🡆"""', 'Txt.greenblue'], {}), "('🡆', Txt.greenblue)\n", (7037, 7057), False, 'from spasco.term_color import fmt\n'), ((12207, 12218), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (12216, 12218), False, 'import os\n')]
|
''' imports '''
# filesystem management
import os
# tensors and nn modules
import torch
# array handling
import numpy as np
# midi file import and parse
from mido import MidiFile
class MelodyDataset(torch.utils.data.Dataset):
''' dataset class for midi files '''
def __init__(self, dir_path: str, cache = False, ds: int = 20):
''' init dataset, import midi files '''
super().__init__()
# store downsampling factor
self.ds = ds
# get and store list midi files in directory
self.file_names = [ name for name in os.listdir(dir_path) if 'mid' in name[-4:] ]
# import and store midi files
self.midi_files = [ MidiFile(os.path.join(dir_path, file_name))
for file_name in self.file_names ]
# case filter by key
if False:
# get index for only midi with meta plus [melody, chords, bass] tracks
j = [ i for i in range(len(self.file_names))
if len(self.midi_files[i].tracks) > 3
and "key='{}'".format(key) in str(self.midi_files[i].tracks[0][2]) ]
if False:
# get index for only midi with meta plus [melody, chords, bass] tracks
j = [ i for i in range(len(self.file_names))
if len(self.midi_files[i].tracks) > 3 ]
# filter midi file and file name lists
self.midi_files = [ self.midi_files[i] for i in j ]
self.file_names = [ self.file_names[i] for i in j ]
# init store of import state
self.import_list = [ None for _ in range(len(self.midi_files)) ]
# pre-cache all data
if cache:
# iterate through midi files
for index in range(len(self.file_names)):
# import data to memory
self.import_data(index)
def import_data(self, index):
''' import midi data to memory '''
# get midi by index
midi = self.midi_files[index]
# get midi tracks
tracks = self.midi2tracks(midi)
# get note tracks matrix
matrix = self.tracks2matrix(tracks)
# get melody format from matrix
melody = self.matrix2melody(matrix)
# downsample over time
melody = melody[::self.ds]
# store matrix in import list
self.import_list[index] = melody
def midi2tracks(self, midi):
''' extract tracks from mido.MidiFile '''
# initialise tracks list
tracks = []
if len(midi.tracks) == 1:
ts = [0]
else:
ts = range(len(midi.tracks))[1:4]
# iterate over tracks in midi (excl. meta track, extra), [melody, chords, bass]
#for i in range(len(midi.tracks))[1:4]:
for i in ts:
# store track data as dict for processing
track = []
# iterate messages in track
for msg in midi.tracks[i][:]:
# ensure note data only
if msg.type in ['note_on', 'note_off']:
# init note data dict
note = {}
# store each note data
#note['type'] = msg.type
#note['channel'] = msg.channel
note['note'] = msg.note
note['time'] = msg.time
#note['velocity'] = msg.velocity
note['velocity'] = 0 if msg.type == 'note_off' else 1
# store note data
track.append(note)
# store track notes
tracks.append(track)
# return extracted midi tracks
return tracks
def tracks2matrix(self, tracks: list):
''' convert tracks to matrix '''
# initialise track matricies list
m = []
# iterate tracks
for track in tracks:
# initialise note state vector, 7-bit note depth
N = np.zeros(128, dtype = np.int16)
# initialise track note matrix (zero init column)
M = np.zeros((128, 1), dtype = np.int16)
# iterate messages in track
for msg in track:
# if time step changes, store intermediate notes
if int(msg['time']) != 0:
# extend note state vector over range time step
n = np.stack([ N for _ in range( int(msg['time']) ) ]).T
# append note state vector to track note matrix
M = np.concatenate( [M, n], axis = 1 )
# update value of note vector by index
N[int(msg['note'])] = int(msg['velocity'])
# store track note matrix
m.append(M)
# get max length track
s = max([ track.shape[1] for track in m ])
# pad tracks to max length of time axis, stack on new axis
M = np.stack([ np.pad(track, ((0, 0), (0, s - track.shape[1])))
for track in m ], axis = 2)
# return stacked tracks note matrix
return M
def matrix2melody(self, matrix):
''' extract melody from note matrix '''
# get track note matrix for melody only
M = matrix[:,:,0]
# init zero melody, default negative one
#melody = np.ones(M.shape[1])*-1
melody = np.zeros(M.shape[1])
# get index (note, time) where nonzero
j = np.where( M != 0 )
# set melody note at time by index
melody[j[1]] = j[0]
# return extracted melody
return melody
def __getitem__(self, index):
''' return tracks note matrix '''
# check for import state
if self.import_list[index] is None:
# import data to memory
self.import_data(index)
# return data if already imported
return self.import_list[index]
'''
def linear_quantize(samples, q_levels):
samples = samples.clone()
samples -= samples.min(dim=-1)[0].expand_as(samples)
samples /= samples.max(dim=-1)[0].expand_as(samples)
samples *= q_levels - EPSILON
samples += EPSILON / 2
return samples.long()
def linear_dequantize(samples, q_levels):
return samples.float() / (q_levels / 2) - 1
def q_zero(q_levels):
return q_levels // 2
'''
def __len__(self):
''' return total midi files '''
# return number of midi files
return len(self.file_names)
class MelodyDataLoader(torch.utils.data.DataLoader):
def __init__(self, dataset, batch_size, seq_len, overlap_len,
*args, **kwargs):
super().__init__(dataset, batch_size, *args, **kwargs)
self.seq_len = seq_len
self.overlap_len = overlap_len
def __iter__(self):
for batch in super().__iter__():
(batch_size, n_samples) = batch.size()
reset = True
#print(self.overlap_len, n_samples, self.seq_len)
for seq_begin in range(self.overlap_len, n_samples, self.seq_len)[:-1]:
from_index = seq_begin - self.overlap_len
to_index = seq_begin + self.seq_len
sequences = batch[:, from_index : to_index]
input_sequences = sequences[:, : -1]
#print(input_sequences.shape)
target_sequences = sequences[:, self.overlap_len :].contiguous()
yield (input_sequences, reset, target_sequences)
reset = False
def __len__(self):
raise NotImplementedError()
|
[
"numpy.pad",
"numpy.zeros",
"numpy.where",
"os.path.join",
"os.listdir",
"numpy.concatenate"
] |
[((5349, 5369), 'numpy.zeros', 'np.zeros', (['M.shape[1]'], {}), '(M.shape[1])\n', (5357, 5369), True, 'import numpy as np\n'), ((5430, 5446), 'numpy.where', 'np.where', (['(M != 0)'], {}), '(M != 0)\n', (5438, 5446), True, 'import numpy as np\n'), ((3972, 4001), 'numpy.zeros', 'np.zeros', (['(128)'], {'dtype': 'np.int16'}), '(128, dtype=np.int16)\n', (3980, 4001), True, 'import numpy as np\n'), ((4083, 4117), 'numpy.zeros', 'np.zeros', (['(128, 1)'], {'dtype': 'np.int16'}), '((128, 1), dtype=np.int16)\n', (4091, 4117), True, 'import numpy as np\n'), ((579, 599), 'os.listdir', 'os.listdir', (['dir_path'], {}), '(dir_path)\n', (589, 599), False, 'import os\n'), ((700, 733), 'os.path.join', 'os.path.join', (['dir_path', 'file_name'], {}), '(dir_path, file_name)\n', (712, 733), False, 'import os\n'), ((4926, 4974), 'numpy.pad', 'np.pad', (['track', '((0, 0), (0, s - track.shape[1]))'], {}), '(track, ((0, 0), (0, s - track.shape[1])))\n', (4932, 4974), True, 'import numpy as np\n'), ((4538, 4568), 'numpy.concatenate', 'np.concatenate', (['[M, n]'], {'axis': '(1)'}), '([M, n], axis=1)\n', (4552, 4568), True, 'import numpy as np\n')]
|
## Copyright 2015-2019 <NAME>, <NAME>
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
## http://www.apache.org/licenses/LICENSE-2.0
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
from PyFlow.Core import NodeBase
from PyFlow.Core.PathsRegistry import PathsRegistry
from PyFlow.Core.NodeBase import NodePinsSuggestionsHelper
from PyFlow.Core.Common import *
from PyFlow.Packages.PyFlowBase.Nodes import FLOW_CONTROL_ORANGE
import threading
class forLoopBegin(NodeBase):
def __init__(self, name):
super(forLoopBegin, self).__init__(name)
self._working = False
self.currentIndex = 0
self.prevIndex = -1
self.inExec = self.createInputPin('inExec', 'ExecPin', None, self.compute)
self.firstIndex = self.createInputPin('Start', 'IntPin')
self.lastIndex = self.createInputPin('Stop', 'IntPin')
self.loopEndNode = self.createInputPin('Paired block', 'StringPin')
self.loopEndNode.setInputWidgetVariant("ObjectPathWIdget")
self.loopBody = self.createOutputPin('LoopBody', 'ExecPin')
self.index = self.createOutputPin('Index', 'IntPin')
self.headerColor = FLOW_CONTROL_ORANGE
self.setExperimental()
@staticmethod
def pinTypeHints():
helper = NodePinsSuggestionsHelper()
helper.addInputDataType('ExecPin')
helper.addInputDataType('IntPin')
helper.addOutputDataType('ExecPin')
helper.addOutputDataType('IntPin')
helper.addInputStruct(StructureType.Single)
helper.addOutputStruct(StructureType.Single)
return helper
@staticmethod
def category():
return 'FlowControl'
@staticmethod
def keywords():
return ['iter']
@staticmethod
def description():
return 'For loop begin block'
def reset(self):
self.currentIndex = 0
self.prevIndex = -1
#self._working = False
def isDone(self):
indexTo = self.lastIndex.getData()
if self.currentIndex >= indexTo:
self.reset()
#loopEndNode = PathsRegistry().getEntity(self.loopEndNode.getData())
#loopEndNode.completed.call()
self._working = False
return True
return False
def onNext(self, *args, **kwargs):
while not self.isDone():
if self.currentIndex > self.prevIndex:
self.index.setData(self.currentIndex)
self.prevIndex = self.currentIndex
self.loopBody.call()
def compute(self, *args, **kwargs):
self.reset()
endNodePath = self.loopEndNode.getData()
loopEndNode = PathsRegistry().getEntity(endNodePath)
if loopEndNode is not None:
if loopEndNode.loopBeginNode.getData() != self.path():
self.setError("Invalid pair")
return
if self.graph() is not loopEndNode.graph():
err = "block ends in different graphs"
self.setError(err)
loopEndNode.setError(err)
return
else:
self.setError("{} not found".format(endNodePath))
if not self._working:
self.thread = threading.Thread(target=self.onNext,args=(self, args, kwargs))
self.thread.start()
self._working = True
#self.onNext(*args, **kwargs)
|
[
"PyFlow.Core.PathsRegistry.PathsRegistry",
"threading.Thread",
"PyFlow.Core.NodeBase.NodePinsSuggestionsHelper"
] |
[((1672, 1699), 'PyFlow.Core.NodeBase.NodePinsSuggestionsHelper', 'NodePinsSuggestionsHelper', ([], {}), '()\n', (1697, 1699), False, 'from PyFlow.Core.NodeBase import NodePinsSuggestionsHelper\n'), ((3609, 3672), 'threading.Thread', 'threading.Thread', ([], {'target': 'self.onNext', 'args': '(self, args, kwargs)'}), '(target=self.onNext, args=(self, args, kwargs))\n', (3625, 3672), False, 'import threading\n'), ((3055, 3070), 'PyFlow.Core.PathsRegistry.PathsRegistry', 'PathsRegistry', ([], {}), '()\n', (3068, 3070), False, 'from PyFlow.Core.PathsRegistry import PathsRegistry\n')]
|
import re
class Command:
def __init__(self, name, register, jump_addr=None):
self.name = name
self.register = register
self.jump_addr = jump_addr
class Program:
def __init__(self, commands, registers):
self.commands = commands
self.registers = registers
self.instr_ptr = 0
def exec_next_command(self):
cmd = self.commands[self.instr_ptr]
if cmd.name == "hlf":
self.registers[cmd.register] //= 2
self.instr_ptr += 1
elif cmd.name == "tpl":
self.registers[cmd.register] *= 3
self.instr_ptr += 1
elif cmd.name == "inc":
self.registers[cmd.register] += 1
self.instr_ptr += 1
elif cmd.name == "jmp":
self.instr_ptr += cmd.jump_addr
elif cmd.name == "jie":
self.instr_ptr += cmd.jump_addr if self.registers[cmd.register] % 2 == 0 else 1
elif cmd.name == "jio":
self.instr_ptr += cmd.jump_addr if self.registers[cmd.register] == 1 else 1
else:
raise ValueError("Unsupported command: ", cmd.name)
def run(self):
while self.instr_ptr < len(self.commands):
self.exec_next_command()
def solve(commands):
pgm = Program(commands, {"a": 0, "b": 0})
pgm.run()
return pgm.registers["b"]
def parse(file_name):
with open(file_name, "r") as f:
commands = []
for line in f.readlines():
if any([cmd in line for cmd in ["inc", "tpl", "hlf"]]):
_, cmd, r, _ = re.split(r"([a-z]+) ([a|b])", line)
commands.append(Command(cmd, r))
elif "jmp" in line:
_, cmd, jmp_addr, _ = re.split(r"([a-z]+) ([+|-][0-9]+)", line)
commands.append(Command(cmd, None, int(jmp_addr)))
if any([cmd in line for cmd in ["jie", "jio"]]):
_, cmd, r, jmp_addr, _ = re.split(r"([a-z]+) ([a|b]), ([+\-0-9]+)", line)
commands.append(Command(cmd, r, int(jmp_addr)))
return commands
if __name__ == '__main__':
print(solve(parse("data.txt")))
|
[
"re.split"
] |
[((1575, 1609), 're.split', 're.split', (['"""([a-z]+) ([a|b])"""', 'line'], {}), "('([a-z]+) ([a|b])', line)\n", (1583, 1609), False, 'import re\n'), ((1941, 1989), 're.split', 're.split', (['"""([a-z]+) ([a|b]), ([+\\\\-0-9]+)"""', 'line'], {}), "('([a-z]+) ([a|b]), ([+\\\\-0-9]+)', line)\n", (1949, 1989), False, 'import re\n'), ((1730, 1770), 're.split', 're.split', (['"""([a-z]+) ([+|-][0-9]+)"""', 'line'], {}), "('([a-z]+) ([+|-][0-9]+)', line)\n", (1738, 1770), False, 'import re\n')]
|
# Generated by Django 3.0.5 on 2020-04-17 21:07
import uuid
import django_fsm
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("paywall", "0001_initial"),
]
operations = [
migrations.RemoveField(model_name="paymententry", name="payment",),
migrations.AddField(
model_name="paymententry",
name="ext_id",
field=models.CharField(db_index=True, default=uuid.uuid4, max_length=100),
),
migrations.AddField(
model_name="paymententry",
name="fraud_status",
field=django_fsm.FSMField(
choices=[
("unknown", "unknown"),
("accepted", "accepted"),
("rejected", "rejected"),
("check", "needs manual verification"),
],
default="unknown",
max_length=50,
protected=True,
),
),
migrations.AddField(
model_name="paymententry",
name="payment_status",
field=django_fsm.FSMField(
choices=[
("new", "new"),
("prepared", "in progress"),
("pre-auth", "pre-authed"),
("charge_started", "charge process started"),
("partially_paid", "partially paid"),
("paid", "paid"),
("failed", "failed"),
("refund_started", "refund started"),
("refunded", "refunded"),
],
default="prepared",
max_length=50,
protected=True,
),
),
]
|
[
"django.db.migrations.RemoveField",
"django_fsm.FSMField",
"django.db.models.CharField"
] |
[((255, 320), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""paymententry"""', 'name': '"""payment"""'}), "(model_name='paymententry', name='payment')\n", (277, 320), False, 'from django.db import migrations, models\n'), ((436, 503), 'django.db.models.CharField', 'models.CharField', ([], {'db_index': '(True)', 'default': 'uuid.uuid4', 'max_length': '(100)'}), '(db_index=True, default=uuid.uuid4, max_length=100)\n', (452, 503), False, 'from django.db import migrations, models\n'), ((635, 842), 'django_fsm.FSMField', 'django_fsm.FSMField', ([], {'choices': "[('unknown', 'unknown'), ('accepted', 'accepted'), ('rejected', 'rejected'),\n ('check', 'needs manual verification')]", 'default': '"""unknown"""', 'max_length': '(50)', 'protected': '(True)'}), "(choices=[('unknown', 'unknown'), ('accepted',\n 'accepted'), ('rejected', 'rejected'), ('check',\n 'needs manual verification')], default='unknown', max_length=50,\n protected=True)\n", (654, 842), False, 'import django_fsm\n'), ((1142, 1500), 'django_fsm.FSMField', 'django_fsm.FSMField', ([], {'choices': "[('new', 'new'), ('prepared', 'in progress'), ('pre-auth', 'pre-authed'), (\n 'charge_started', 'charge process started'), ('partially_paid',\n 'partially paid'), ('paid', 'paid'), ('failed', 'failed'), (\n 'refund_started', 'refund started'), ('refunded', 'refunded')]", 'default': '"""prepared"""', 'max_length': '(50)', 'protected': '(True)'}), "(choices=[('new', 'new'), ('prepared', 'in progress'), (\n 'pre-auth', 'pre-authed'), ('charge_started', 'charge process started'),\n ('partially_paid', 'partially paid'), ('paid', 'paid'), ('failed',\n 'failed'), ('refund_started', 'refund started'), ('refunded',\n 'refunded')], default='prepared', max_length=50, protected=True)\n", (1161, 1500), False, 'import django_fsm\n')]
|
from feast.infra.offline_stores.contrib.postgres_offline_store.tests.data_source import (
PostgreSQLDataSourceCreator,
)
from tests.integration.feature_repos.integration_test_repo_config import (
IntegrationTestRepoConfig,
)
FULL_REPO_CONFIGS = [
IntegrationTestRepoConfig(
provider="local",
offline_store_creator=PostgreSQLDataSourceCreator,
online_store_creator=PostgreSQLDataSourceCreator,
),
]
|
[
"tests.integration.feature_repos.integration_test_repo_config.IntegrationTestRepoConfig"
] |
[((260, 414), 'tests.integration.feature_repos.integration_test_repo_config.IntegrationTestRepoConfig', 'IntegrationTestRepoConfig', ([], {'provider': '"""local"""', 'offline_store_creator': 'PostgreSQLDataSourceCreator', 'online_store_creator': 'PostgreSQLDataSourceCreator'}), "(provider='local', offline_store_creator=\n PostgreSQLDataSourceCreator, online_store_creator=\n PostgreSQLDataSourceCreator)\n", (285, 414), False, 'from tests.integration.feature_repos.integration_test_repo_config import IntegrationTestRepoConfig\n')]
|
import random
def utils_min_required():
responses = [
"Sorry, I need your opinion on a movie "\
"before I can give you quality recommendations.",
"Sorry, I don't have enough information yet "\
"to make a good recommendation.",
"I can't give a good recommendation yet. Please "\
"tell me about some movies you've watched first.",
"It's gonna be hard for me to give you some good "\
"recommendations if I don't know anything about your tastes.",
"I don't think I'm ready to give a recommendation yet. "\
"How about you tell me about some movies you've watched?",
"Please tell me about some movies you watched first. "\
"Then I'll be able to give you some great recommendations"
]
return random.choice(responses)
def utils_quotations():
responses = [
"Hmm seems like you messed up your quotation marks. " \
"Try again.",
"Uh oh, I don't think your quotation marks are correct. ",
"It's hard for me to understand which movie you're talking about.",
"To help me understand, please put quotation marks around the " \
"movie like this \"The Wizard of Oz\"",
"It's hard for me to understand with your quotation marks.",
"Oops, seems like your quotation marks aren't quite right.",
"Please re-check your quotation marks. There should be two "\
"in your response surrounding the movie title.",
"I'm having trouble reading your sentence because of the "\
"quotation marks. Can you please try again? ",
]
return random.choice(responses)
def utils_new_movie():
responses = [
"Interesting, I haven't heard of that movie.",
"Hmm I haven't heard of that movie.",
"Wow that movie is new to me. I don't know much about it.",
"I've actually never heard of that movie before! Unfortunately "\
"that means \nI can't give you some good recommendations based "\
"on that one.",
"That movie is actually unfamiliar to me.",
"To be honest, I haven't seen that movie before, so it'll "\
"be hard to recommend you a movie based on that one."
]
return random.choice(responses)
def utils_liked():
responses1 = [
"Great, glad you liked that one.",
"Okay got it that was a good movie.",
"Nice, sounds like that movie was right up your alley."
"Wow so you like those kinds of movies. "\
"I think you'll like my recommendations.",
"Glad you liked the movie.",
"Sounds like you enjoyed that one.",
"Good, glad you enjoyed it.",
"Okay, got it, I think I have some other ones that you'll like as well.",
"Awesome, glad you liked it."
]
responses2 = [
" Now feel free to tell me about some more movies or say "\
"'Recommendations please!' to hear my recommendations. ",
" Any more movies you've seen? ",
" You're giving me some great feedback.",
" What other movies have you seen? ",
" Any other movies you've seen? ",
" Any more movie opinions I should know?",
" Anything else you want to tell me before I give my recommendations?"
]
response1 = random.choice(responses1)
response2 = ''
if random.uniform(0, 1) < 0.3:
response2 = random.choice(responses2)
return response1 + response2
def utils_disliked():
responses1 = [
"Okay got it you didn't like that one.",
"Gotcha so that wasn't the movie for you.",
"Okay you didn't like that one.",
"Yeah I've heard other people didn't like that one as well.",
"So you didn't like that one got it.",
"That really wasn't your movie huh.",
"That movie wasn't for you then. I'll keep that in mind.",
"Okay so you did not like that one.",
]
responses2 = [
" Now feel free to tell me about some more movies or say "\
"'Recommendations please!' to hear my recommendations. ",
" Any more movies you've seen? ",
" You're giving me some great feedback.",
" What other movies have you seen?",
" Any other movies you've seen?",
" Got any more hot takes?",
" Any more movie opinions I should know?",
" Anything else you want to tell me before I give my recommendations?"
]
response1 = random.choice(responses1)
response2 = ''
if random.uniform(0, 1) < 0.3:
response2 = random.choice(responses2)
return response1 + response2
def utils_more_opinions():
responses = [
" Now feel free to tell me about some more movies or say "\
"'Recommendations please!' to hear my recommendations.",
" Any more movies you've seen? ",
" You're giving me some great feedback.",
" What other movies have you seen?",
" Any other movies you've seen?",
" Got any more opinions on movies you've seen?",
" Any more movie opinions I should know?",
" Anything else you want to tell me before I give my recommendations?"
]
return random.choice(responses)
def utils_liked_match(match):
responses = [
f"Got it! So you liked {match}.",
f"Okay so {match} was your type of movie.",
f"Gotcha so {match} was a good fit for you.",
f"Okay got it you liked {match}.",
f"Sounds like {match} was right up your alley.",
f"Okay so your tastes align with {match}, got it."
]
return random.choice(responses)
def utils_disliked_match(match):
responses = [
f"Okay sounds like {match} wasn't the " \
"movie for you.",
f"Okay got it {match} wasn't your cup of tea.",
f"So you did not like {match}. Got it.",
f"Gotcha so you didn't like {match}.",
f"Okay so {match} was the movie you didn't like.",
f"{match} wasn't the movie for you then.",
f"Got it you didn't like {match}."
]
return random.choice(responses)
def utils_low_confidence():
responses = [
"Sorry, I couldn't tell if you liked that " \
"movie or not.",
"Sorry I'm not sure if you liked that one.",
"I can't quite tell what you think about that movie.",
"I'm not quite sure if you liked that movie or not.",
"Wait.. did you like or dislike that movie?",
"I think I need some more information to tell whether you "\
"liked that movie or not.",
"Hang on, I couldn't tell if you liked that movie or not."
]
return random.choice(responses)
|
[
"random.choice",
"random.uniform"
] |
[((842, 866), 'random.choice', 'random.choice', (['responses'], {}), '(responses)\n', (855, 866), False, 'import random\n'), ((1715, 1739), 'random.choice', 'random.choice', (['responses'], {}), '(responses)\n', (1728, 1739), False, 'import random\n'), ((2359, 2383), 'random.choice', 'random.choice', (['responses'], {}), '(responses)\n', (2372, 2383), False, 'import random\n'), ((3442, 3467), 'random.choice', 'random.choice', (['responses1'], {}), '(responses1)\n', (3455, 3467), False, 'import random\n'), ((4656, 4681), 'random.choice', 'random.choice', (['responses1'], {}), '(responses1)\n', (4669, 4681), False, 'import random\n'), ((5413, 5437), 'random.choice', 'random.choice', (['responses'], {}), '(responses)\n', (5426, 5437), False, 'import random\n'), ((5835, 5859), 'random.choice', 'random.choice', (['responses'], {}), '(responses)\n', (5848, 5859), False, 'import random\n'), ((6358, 6382), 'random.choice', 'random.choice', (['responses'], {}), '(responses)\n', (6371, 6382), False, 'import random\n'), ((6987, 7011), 'random.choice', 'random.choice', (['responses'], {}), '(responses)\n', (7000, 7011), False, 'import random\n'), ((3494, 3514), 'random.uniform', 'random.uniform', (['(0)', '(1)'], {}), '(0, 1)\n', (3508, 3514), False, 'import random\n'), ((3542, 3567), 'random.choice', 'random.choice', (['responses2'], {}), '(responses2)\n', (3555, 3567), False, 'import random\n'), ((4708, 4728), 'random.uniform', 'random.uniform', (['(0)', '(1)'], {}), '(0, 1)\n', (4722, 4728), False, 'import random\n'), ((4756, 4781), 'random.choice', 'random.choice', (['responses2'], {}), '(responses2)\n', (4769, 4781), False, 'import random\n')]
|
import cv2
import math
import imutils
import numpy as np
import warnings
from sklearn.cluster import KMeans
from skimage.morphology import *
from skimage.util import *
class OD_CV:
def loadImage(self, filepath):
return cv2.imread(filepath)
def resizeImage(self, image, kar, width, height):
if kar:
return imutils.resize(image, width=width)
else:
return cv2.resize(image, (width, height))
def maskIMG(self, image, pts):
mask = np.zeros(image.shape[:2], np.uint8)
mask = cv2.drawContours(mask, [pts], -1, (255,255,255), -1)
image = cv2.bitwise_and(image.copy(), image.copy(), mask=mask)
return image
def cropIMG(self, image, coords):
return image[coords[1]:coords[1]+coords[3], coords[0]:coords[0]+coords[2]]
def dmntCOLOR(self, image):
image = cv2.resize(image, (0, 0), None, 0.5, 0.5)
with warnings.catch_warnings():
warnings.simplefilter("ignore")
clt = KMeans(n_clusters=5, random_state=0).fit(image.reshape(-1, 3))
numLabels = np.arange(0, len(np.unique(clt.labels_)) + 1)
hist, _ = np.histogram(clt.labels_, bins=numLabels)
# normalize the histogram, such that it sums to one
hist = hist.astype("float")
hist /= hist.sum()
palette = np.zeros((40, 200, 3), dtype="uint8")
startX = 0
# loop over the percentage of each cluster and the color of
# each cluster
for percent, color in zip(hist, clt.cluster_centers_):
# plot the relative percentage of each cluster
endX = startX + (percent * 200)
cv2.rectangle(palette, (int(startX), 0), (int(endX), 40), color.astype("uint8").tolist(), -1)
startX = endX
return palette
def thinning(self, image, flag):
image = img_as_float(image)
if flag: #live streaming, faster computation
skeleton = skeletonize(image > 0)
else: # upload image mode
skeleton = skeletonize(image > 0, method='lee')
return img_as_ubyte(skeleton)
def thresholding(self, image, auto, lower, max):
if auto:
_, image = cv2.threshold(image.copy(), 0, 255, cv2.THRESH_BINARY+cv2.THRESH_OTSU)
else:
_, image = cv2.threshold(image.copy(), lower, max, cv2.THRESH_BINARY)
return image
def color_CVT(self, image, flag):
if flag==1:
return cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
elif flag==2:
return cv2.cvtColor(image, cv2.COLOR_GRAY2BGR)
def compareIMG(self, image):
h,w = image[0].shape[:2]
bg = np.zeros((h*2+3, w*2+3, 3), np.uint8)
bg[0:h, 0:w] = image[0]
bg[0:h, w+3:w*2+3] = image[1]
bg[h+3:h*2+3, 0:w] = image[2]
bg[h+3:h*2+3, w+3:w*2+3] = image[3]
bg[0:h*2+3, w:w+3] = (255,255,255)
bg[0:h * 2 + 3, w+1:w + 2] = (0,0,0)
bg[h:h+3, 0:w*2+3] = (255,255,255)
bg[h+1:h + 2, 0:w * 2 + 3] = (0,0,0)
return bg
def Color_picker(self, color, size, wid=(10,20)):
image = np.zeros((size[0], size[1], 3), np.uint8)
image[:] = color
if wid[0]>0:
cv2.rectangle(image, (int(size[0]*.01), int(size[1]*.01)), (int(size[0]*.99), int(size[1]*.99)), (0,0,0), wid[0], cv2.LINE_AA)
if wid[1]>0:
cv2.rectangle(image, (int(size[0]*.1), int(size[1]*.1)), (int(size[0]*.9), int(size[1]*.9)), (255,255,255), wid[1], cv2.LINE_AA)
return image
def drawPrimitives(self, image, flag, points, color, thick, width=None, height=None):
if flag==1:
cv2.polylines(image, points, True, color, thick)
elif flag==2:
cv2.rectangle(image, (points[0]-10, points[1]-10), (points[0]+points[2]+10, points[1]+points[3]+10), color, thick)
elif flag==3:
x, y, w, h = points
width_Total = x+int(w*0.05)+width
if width_Total>x+w+10:
width_Total = x+w+10
cv2.rectangle(image, (x+int(w*0.05),y-10-height), (width_Total, y-10-2), color, thick)
elif flag == 4:
x, y, w, h = points
if width!=0:
w = width
cv2.rectangle(image, (x-10,y+10+h), (x+10+w, y+10+h+height), color, thick)
def drawText(self, flag, image, text, coords, fontstyle, color, thick, height=None):
font = None
if fontstyle == 0:
font = cv2.FONT_HERSHEY_COMPLEX
elif fontstyle == 1:
font = cv2.FONT_HERSHEY_COMPLEX_SMALL
elif fontstyle == 2:
font = cv2.FONT_HERSHEY_DUPLEX
elif fontstyle == 3:
font = cv2.FONT_HERSHEY_PLAIN
elif fontstyle == 4:
font = cv2.FONT_HERSHEY_SCRIPT_COMPLEX
elif fontstyle == 5:
font = cv2.FONT_HERSHEY_SCRIPT_SIMPLEX
elif fontstyle == 6:
font = cv2.FONT_HERSHEY_TRIPLEX
elif fontstyle == 7:
font = cv2.FONT_ITALIC
x, y, w, h = coords
if flag==1:
cv2.putText(image, text, (x+int(w*0.07),y-19), font, thick, color, 1)
elif flag==2:
cv2.putText(image, text, (x-10,y+10+h+height-5), font, thick, color, 1)
def canny(self, image, GK_size, GSigma, DK_size, D_i, EK_size, E_i, cAuto, cThres_L, cThres_H, isDIL, isERO, isThin=None):
imgGray = self.color_CVT(image.copy(), 1)
image = cv2.GaussianBlur(imgGray, (GK_size, GK_size), GSigma)
if cAuto:
sigma = 0.33
v = np.median(image.copy())
# apply automatic Canny edge detection using the computed median
lower = int(max(0, (1.0 - sigma) * v))
upper = int(min(255, (1.0 + sigma) * v))
else:
lower, upper = cThres_L, cThres_H
image = cv2.Canny(image, lower, upper)
if isThin:
image = self.thinning(image)
edge = image.copy()
if isDIL:
Dial_K = np.ones((DK_size, DK_size))
image = cv2.dilate(image, Dial_K, iterations=D_i)
if isERO:
Ero_K = np.ones((EK_size, EK_size))
image = cv2.erode(image, Ero_K, iterations=E_i)
return image, edge
def sobel(self, image, GK_size, GSigma, DK_size, D_i, EK_size, E_i, Ksize, isDIL, isERO, isThin, Thres_auto, Thres_L, Thres_H, isThres, live_flag):
imgGray = self.color_CVT(image.copy(), 1)
imgBlur = cv2.GaussianBlur(imgGray, (GK_size, GK_size), GSigma)
Sobel_X = cv2.Sobel(imgBlur.copy(), cv2.CV_64F, 1, 0, ksize=Ksize)
Sobel_Y = cv2.Sobel(imgBlur.copy(), cv2.CV_64F, 0, 1, ksize=Ksize)
sobel_img = cv2.bitwise_or(cv2.convertScaleAbs(Sobel_X), cv2.convertScaleAbs(Sobel_Y))
if isThres:
sobel_img = self.thresholding(sobel_img.copy(), Thres_auto, Thres_L, Thres_H)
if isThin:
sobel_img = self.thinning(sobel_img, live_flag)
image = sobel_img
edge = image.copy()
if isDIL:
Dial_K = np.ones((DK_size, DK_size))
image = cv2.dilate(image, Dial_K, iterations=D_i)
if isERO:
Ero_K = np.ones((EK_size, EK_size))
image = cv2.erode(image, Ero_K, iterations=E_i)
return image, edge
def prewitt(self, image, GK_size, GSigma, DK_size, D_i, EK_size, E_i, isDIL, isERO, isThin, Thres_auto, Thres_L, Thres_H, isThres, live_flag):
imgGray = self.color_CVT(image.copy(), 1)
imgBlur = cv2.GaussianBlur(imgGray, (GK_size, GK_size), GSigma)
kernelx = np.array([[1, 1, 1], [0, 0, 0], [-1, -1, -1]])
kernelx2 = np.array([[-1, -1, -1], [0, 0, 0], [1, 1, 1]])
kernely = np.array([[-1, 0, 1], [-1, 0, 1], [-1, 0, 1]])
kernely2 = np.array([[1, 0, -1], [1, 0, -1], [1, 0, -1]])
kernels = [kernelx, kernelx2, kernely, kernely2]
prewitt_img = np.zeros_like(imgGray)
for k in kernels:
prewitt_img = cv2.bitwise_or(prewitt_img, cv2.filter2D(imgBlur.copy(), -1, k))
if isThres:
prewitt_img = self.thresholding(prewitt_img.copy(), Thres_auto, Thres_L, Thres_H)
if isThin:
prewitt_img = self.thinning(prewitt_img, live_flag)
image = prewitt_img
edge = image.copy()
if isDIL:
Dial_K = np.ones((DK_size, DK_size))
image = cv2.dilate(image, Dial_K, iterations=D_i)
if isERO:
Ero_K = np.ones((EK_size, EK_size))
image = cv2.erode(image, Ero_K, iterations=E_i)
return image, edge
def getTarget_Contour(self, image, image_edg, minArea, shapes, circular, color, thick):
contours, _ = cv2.findContours(image_edg.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
finalCountours = []
for c in contours:
for i, shape in enumerate(shapes):
if not shape:
continue
area = cv2.contourArea(c)
if area > minArea[i]:
peri = cv2.arcLength(c, True)
approx = cv2.approxPolyDP(c, 0.02 * peri, True)
bbox = cv2.boundingRect(approx)
rect = cv2.minAreaRect(c)
box = cv2.boxPoints(rect)
rbox = np.int0(box)
if i==0 and len(approx) == 3: #Shape >>> vertices
finalCountours.append((approx, bbox, c, i, rbox))
elif i==1 and len(approx) == 4:
finalCountours.append((approx, bbox, c, i, rbox))
elif i==2:
if len(approx) < 8:
continue
circularity = 4 * math.pi * (area / (peri*peri))
if circular[0] < circularity < circular[1]:
finalCountours.append((approx, bbox, c, i, rbox))
elif i==3:
finalCountours.append((approx, bbox, c, i, rbox))
finalCountours = sorted(finalCountours, key=lambda x:x[1], reverse=True)
if thick==0:
thick = -1
for cont in finalCountours:
cv2.drawContours(image, [cont[2]], -1, color, thick)
return finalCountours, image
def reorder(self, points):
NewPoints = np.zeros_like(points)
points = points.reshape((4,2))
add = points.sum(1)
NewPoints[0] = points[np.argmin(add)]
NewPoints[2] = points[np.argmax(add)]
d_dx = np.diff(points, axis=1)
NewPoints[1] = points[np.argmin(d_dx)]
NewPoints[3] = points[np.argmax(d_dx)]
return NewPoints
def warpImg(self, image, points, size, pad=3):
points = self.reorder(points)
# if not size:
w, h = points[1][0][0] - points[0][0][0], points[3][0][1]-points[0][0][1]
sw,sh = w/size[0], h/size[1]
# w,h = size
pts1 = np.float32(points)
pts2 = np.float32([[0,0], [w,0], [w,h], [0,h]])
matrix = cv2.getPerspectiveTransform(pts1, pts2)
imgWarp = cv2.warpPerspective(image, matrix, (w,h))
imgWarp = imgWarp[pad:imgWarp.shape[0]-pad, pad:imgWarp.shape[1]-pad] #remove boundary
return imgWarp, (sw,sh)
def findDist(self, flag, pts, scale, unit, deci):
unit_conv = 1
if unit[0]==0:
unit_conv = 1
elif unit[0]==1:
unit_conv = 10
elif unit[0]==2:
unit_conv = 1000
if unit[1]==0:
unit_conv /= 1
elif unit[1]==1:
unit_conv /= 10
elif unit[1]==2:
unit_conv /= 1000
def dist(pt1, pt2):
return ((pt2[0] // scale[0] - pt1[0] // scale[0]) ** 2 + (pt2[1] // scale[1] - pt1[1] // scale[1]) ** 2) ** 0.5
# if flag==1: # rect
pts = self.reorder(pts)
if flag==1: #rect
p1, p2, p3 = pts[0][0], pts[1][0], pts[3][0]
else:
p1, p2, p3 = pts[0], pts[1], pts[3]
if p1[1]==p2[1]:
newW = (p2[0]-p1[0])//scale[0]
else:
newW = dist(p1, p2)
if p1[0]==p3[0]:
newH = (p3[1]-p1[1])//scale[1]
else:
newH = dist(p1, p3)
newW = newW*unit_conv
newH = newH*unit_conv
return "{:.{}f}".format(newW, deci), "{:.{}f}".format(newH, deci)
def deviceList(self):
index = 0
arr, res = [], []
while True:
cap = cv2.VideoCapture(index)
if not cap.read()[0]:
break
else:
arr.append(str(index))
res.append((cap.get(cv2.CAP_PROP_FRAME_WIDTH), cap.get(cv2.CAP_PROP_FRAME_HEIGHT)))
cap.release()
index += 1
return arr, res
|
[
"cv2.GaussianBlur",
"numpy.argmax",
"cv2.getPerspectiveTransform",
"cv2.arcLength",
"cv2.approxPolyDP",
"numpy.ones",
"numpy.argmin",
"numpy.histogram",
"cv2.boxPoints",
"cv2.rectangle",
"imutils.resize",
"cv2.erode",
"cv2.minAreaRect",
"numpy.unique",
"cv2.warpPerspective",
"numpy.zeros_like",
"cv2.contourArea",
"warnings.simplefilter",
"cv2.dilate",
"cv2.cvtColor",
"sklearn.cluster.KMeans",
"warnings.catch_warnings",
"cv2.convertScaleAbs",
"cv2.drawContours",
"cv2.boundingRect",
"cv2.resize",
"cv2.Canny",
"numpy.int0",
"cv2.putText",
"cv2.polylines",
"numpy.float32",
"numpy.zeros",
"cv2.VideoCapture",
"cv2.imread",
"numpy.diff",
"numpy.array"
] |
[((245, 265), 'cv2.imread', 'cv2.imread', (['filepath'], {}), '(filepath)\n', (255, 265), False, 'import cv2\n'), ((519, 554), 'numpy.zeros', 'np.zeros', (['image.shape[:2]', 'np.uint8'], {}), '(image.shape[:2], np.uint8)\n', (527, 554), True, 'import numpy as np\n'), ((571, 625), 'cv2.drawContours', 'cv2.drawContours', (['mask', '[pts]', '(-1)', '(255, 255, 255)', '(-1)'], {}), '(mask, [pts], -1, (255, 255, 255), -1)\n', (587, 625), False, 'import cv2\n'), ((895, 936), 'cv2.resize', 'cv2.resize', (['image', '(0, 0)', 'None', '(0.5)', '(0.5)'], {}), '(image, (0, 0), None, 0.5, 0.5)\n', (905, 936), False, 'import cv2\n'), ((1191, 1232), 'numpy.histogram', 'np.histogram', (['clt.labels_'], {'bins': 'numLabels'}), '(clt.labels_, bins=numLabels)\n', (1203, 1232), True, 'import numpy as np\n'), ((1378, 1415), 'numpy.zeros', 'np.zeros', (['(40, 200, 3)'], {'dtype': '"""uint8"""'}), "((40, 200, 3), dtype='uint8')\n", (1386, 1415), True, 'import numpy as np\n'), ((2752, 2797), 'numpy.zeros', 'np.zeros', (['(h * 2 + 3, w * 2 + 3, 3)', 'np.uint8'], {}), '((h * 2 + 3, w * 2 + 3, 3), np.uint8)\n', (2760, 2797), True, 'import numpy as np\n'), ((3219, 3260), 'numpy.zeros', 'np.zeros', (['(size[0], size[1], 3)', 'np.uint8'], {}), '((size[0], size[1], 3), np.uint8)\n', (3227, 3260), True, 'import numpy as np\n'), ((5596, 5649), 'cv2.GaussianBlur', 'cv2.GaussianBlur', (['imgGray', '(GK_size, GK_size)', 'GSigma'], {}), '(imgGray, (GK_size, GK_size), GSigma)\n', (5612, 5649), False, 'import cv2\n'), ((5999, 6029), 'cv2.Canny', 'cv2.Canny', (['image', 'lower', 'upper'], {}), '(image, lower, upper)\n', (6008, 6029), False, 'import cv2\n'), ((6635, 6688), 'cv2.GaussianBlur', 'cv2.GaussianBlur', (['imgGray', '(GK_size, GK_size)', 'GSigma'], {}), '(imgGray, (GK_size, GK_size), GSigma)\n', (6651, 6688), False, 'import cv2\n'), ((7695, 7748), 'cv2.GaussianBlur', 'cv2.GaussianBlur', (['imgGray', '(GK_size, GK_size)', 'GSigma'], {}), '(imgGray, (GK_size, GK_size), GSigma)\n', (7711, 7748), False, 'import cv2\n'), ((7768, 7814), 'numpy.array', 'np.array', (['[[1, 1, 1], [0, 0, 0], [-1, -1, -1]]'], {}), '([[1, 1, 1], [0, 0, 0], [-1, -1, -1]])\n', (7776, 7814), True, 'import numpy as np\n'), ((7835, 7881), 'numpy.array', 'np.array', (['[[-1, -1, -1], [0, 0, 0], [1, 1, 1]]'], {}), '([[-1, -1, -1], [0, 0, 0], [1, 1, 1]])\n', (7843, 7881), True, 'import numpy as np\n'), ((7901, 7947), 'numpy.array', 'np.array', (['[[-1, 0, 1], [-1, 0, 1], [-1, 0, 1]]'], {}), '([[-1, 0, 1], [-1, 0, 1], [-1, 0, 1]])\n', (7909, 7947), True, 'import numpy as np\n'), ((7968, 8014), 'numpy.array', 'np.array', (['[[1, 0, -1], [1, 0, -1], [1, 0, -1]]'], {}), '([[1, 0, -1], [1, 0, -1], [1, 0, -1]])\n', (7976, 8014), True, 'import numpy as np\n'), ((8096, 8118), 'numpy.zeros_like', 'np.zeros_like', (['imgGray'], {}), '(imgGray)\n', (8109, 8118), True, 'import numpy as np\n'), ((10586, 10607), 'numpy.zeros_like', 'np.zeros_like', (['points'], {}), '(points)\n', (10599, 10607), True, 'import numpy as np\n'), ((10787, 10810), 'numpy.diff', 'np.diff', (['points'], {'axis': '(1)'}), '(points, axis=1)\n', (10794, 10810), True, 'import numpy as np\n'), ((11209, 11227), 'numpy.float32', 'np.float32', (['points'], {}), '(points)\n', (11219, 11227), True, 'import numpy as np\n'), ((11244, 11288), 'numpy.float32', 'np.float32', (['[[0, 0], [w, 0], [w, h], [0, h]]'], {}), '([[0, 0], [w, 0], [w, h], [0, h]])\n', (11254, 11288), True, 'import numpy as np\n'), ((11303, 11342), 'cv2.getPerspectiveTransform', 'cv2.getPerspectiveTransform', (['pts1', 'pts2'], {}), '(pts1, pts2)\n', (11330, 11342), False, 'import cv2\n'), ((11362, 11404), 'cv2.warpPerspective', 'cv2.warpPerspective', (['image', 'matrix', '(w, h)'], {}), '(image, matrix, (w, h))\n', (11381, 11404), False, 'import cv2\n'), ((360, 394), 'imutils.resize', 'imutils.resize', (['image'], {'width': 'width'}), '(image, width=width)\n', (374, 394), False, 'import imutils\n'), ((430, 464), 'cv2.resize', 'cv2.resize', (['image', '(width, height)'], {}), '(image, (width, height))\n', (440, 464), False, 'import cv2\n'), ((951, 976), 'warnings.catch_warnings', 'warnings.catch_warnings', ([], {}), '()\n', (974, 976), False, 'import warnings\n'), ((991, 1022), 'warnings.simplefilter', 'warnings.simplefilter', (['"""ignore"""'], {}), "('ignore')\n", (1012, 1022), False, 'import warnings\n'), ((2545, 2584), 'cv2.cvtColor', 'cv2.cvtColor', (['image', 'cv2.COLOR_BGR2GRAY'], {}), '(image, cv2.COLOR_BGR2GRAY)\n', (2557, 2584), False, 'import cv2\n'), ((3762, 3810), 'cv2.polylines', 'cv2.polylines', (['image', 'points', '(True)', 'color', 'thick'], {}), '(image, points, True, color, thick)\n', (3775, 3810), False, 'import cv2\n'), ((6162, 6189), 'numpy.ones', 'np.ones', (['(DK_size, DK_size)'], {}), '((DK_size, DK_size))\n', (6169, 6189), True, 'import numpy as np\n'), ((6211, 6252), 'cv2.dilate', 'cv2.dilate', (['image', 'Dial_K'], {'iterations': 'D_i'}), '(image, Dial_K, iterations=D_i)\n', (6221, 6252), False, 'import cv2\n'), ((6293, 6320), 'numpy.ones', 'np.ones', (['(EK_size, EK_size)'], {}), '((EK_size, EK_size))\n', (6300, 6320), True, 'import numpy as np\n'), ((6342, 6381), 'cv2.erode', 'cv2.erode', (['image', 'Ero_K'], {'iterations': 'E_i'}), '(image, Ero_K, iterations=E_i)\n', (6351, 6381), False, 'import cv2\n'), ((6877, 6905), 'cv2.convertScaleAbs', 'cv2.convertScaleAbs', (['Sobel_X'], {}), '(Sobel_X)\n', (6896, 6905), False, 'import cv2\n'), ((6907, 6935), 'cv2.convertScaleAbs', 'cv2.convertScaleAbs', (['Sobel_Y'], {}), '(Sobel_Y)\n', (6926, 6935), False, 'import cv2\n'), ((7227, 7254), 'numpy.ones', 'np.ones', (['(DK_size, DK_size)'], {}), '((DK_size, DK_size))\n', (7234, 7254), True, 'import numpy as np\n'), ((7276, 7317), 'cv2.dilate', 'cv2.dilate', (['image', 'Dial_K'], {'iterations': 'D_i'}), '(image, Dial_K, iterations=D_i)\n', (7286, 7317), False, 'import cv2\n'), ((7358, 7385), 'numpy.ones', 'np.ones', (['(EK_size, EK_size)'], {}), '((EK_size, EK_size))\n', (7365, 7385), True, 'import numpy as np\n'), ((7407, 7446), 'cv2.erode', 'cv2.erode', (['image', 'Ero_K'], {'iterations': 'E_i'}), '(image, Ero_K, iterations=E_i)\n', (7416, 7446), False, 'import cv2\n'), ((8538, 8565), 'numpy.ones', 'np.ones', (['(DK_size, DK_size)'], {}), '((DK_size, DK_size))\n', (8545, 8565), True, 'import numpy as np\n'), ((8587, 8628), 'cv2.dilate', 'cv2.dilate', (['image', 'Dial_K'], {'iterations': 'D_i'}), '(image, Dial_K, iterations=D_i)\n', (8597, 8628), False, 'import cv2\n'), ((8669, 8696), 'numpy.ones', 'np.ones', (['(EK_size, EK_size)'], {}), '((EK_size, EK_size))\n', (8676, 8696), True, 'import numpy as np\n'), ((8718, 8757), 'cv2.erode', 'cv2.erode', (['image', 'Ero_K'], {'iterations': 'E_i'}), '(image, Ero_K, iterations=E_i)\n', (8727, 8757), False, 'import cv2\n'), ((10440, 10492), 'cv2.drawContours', 'cv2.drawContours', (['image', '[cont[2]]', '(-1)', 'color', 'thick'], {}), '(image, [cont[2]], -1, color, thick)\n', (10456, 10492), False, 'import cv2\n'), ((10708, 10722), 'numpy.argmin', 'np.argmin', (['add'], {}), '(add)\n', (10717, 10722), True, 'import numpy as np\n'), ((10755, 10769), 'numpy.argmax', 'np.argmax', (['add'], {}), '(add)\n', (10764, 10769), True, 'import numpy as np\n'), ((10842, 10857), 'numpy.argmin', 'np.argmin', (['d_dx'], {}), '(d_dx)\n', (10851, 10857), True, 'import numpy as np\n'), ((10890, 10905), 'numpy.argmax', 'np.argmax', (['d_dx'], {}), '(d_dx)\n', (10899, 10905), True, 'import numpy as np\n'), ((12808, 12831), 'cv2.VideoCapture', 'cv2.VideoCapture', (['index'], {}), '(index)\n', (12824, 12831), False, 'import cv2\n'), ((2628, 2667), 'cv2.cvtColor', 'cv2.cvtColor', (['image', 'cv2.COLOR_GRAY2BGR'], {}), '(image, cv2.COLOR_GRAY2BGR)\n', (2640, 2667), False, 'import cv2\n'), ((3847, 3978), 'cv2.rectangle', 'cv2.rectangle', (['image', '(points[0] - 10, points[1] - 10)', '(points[0] + points[2] + 10, points[1] + points[3] + 10)', 'color', 'thick'], {}), '(image, (points[0] - 10, points[1] - 10), (points[0] + points[\n 2] + 10, points[1] + points[3] + 10), color, thick)\n', (3860, 3978), False, 'import cv2\n'), ((5326, 5412), 'cv2.putText', 'cv2.putText', (['image', 'text', '(x - 10, y + 10 + h + height - 5)', 'font', 'thick', 'color', '(1)'], {}), '(image, text, (x - 10, y + 10 + h + height - 5), font, thick,\n color, 1)\n', (5337, 5412), False, 'import cv2\n'), ((9173, 9191), 'cv2.contourArea', 'cv2.contourArea', (['c'], {}), '(c)\n', (9188, 9191), False, 'import cv2\n'), ((1042, 1078), 'sklearn.cluster.KMeans', 'KMeans', ([], {'n_clusters': '(5)', 'random_state': '(0)'}), '(n_clusters=5, random_state=0)\n', (1048, 1078), False, 'from sklearn.cluster import KMeans\n'), ((1143, 1165), 'numpy.unique', 'np.unique', (['clt.labels_'], {}), '(clt.labels_)\n', (1152, 1165), True, 'import numpy as np\n'), ((9259, 9281), 'cv2.arcLength', 'cv2.arcLength', (['c', '(True)'], {}), '(c, True)\n', (9272, 9281), False, 'import cv2\n'), ((9312, 9350), 'cv2.approxPolyDP', 'cv2.approxPolyDP', (['c', '(0.02 * peri)', '(True)'], {}), '(c, 0.02 * peri, True)\n', (9328, 9350), False, 'import cv2\n'), ((9379, 9403), 'cv2.boundingRect', 'cv2.boundingRect', (['approx'], {}), '(approx)\n', (9395, 9403), False, 'import cv2\n'), ((9432, 9450), 'cv2.minAreaRect', 'cv2.minAreaRect', (['c'], {}), '(c)\n', (9447, 9450), False, 'import cv2\n'), ((9478, 9497), 'cv2.boxPoints', 'cv2.boxPoints', (['rect'], {}), '(rect)\n', (9491, 9497), False, 'import cv2\n'), ((9526, 9538), 'numpy.int0', 'np.int0', (['box'], {}), '(box)\n', (9533, 9538), True, 'import numpy as np\n'), ((4363, 4459), 'cv2.rectangle', 'cv2.rectangle', (['image', '(x - 10, y + 10 + h)', '(x + 10 + w, y + 10 + h + height)', 'color', 'thick'], {}), '(image, (x - 10, y + 10 + h), (x + 10 + w, y + 10 + h + height\n ), color, thick)\n', (4376, 4459), False, 'import cv2\n')]
|
import os
import socket
import subprocess
from vimpdb import config
from vimpdb import errors
def get_eggs_paths():
import vim_bridge
vimpdb_path = config.get_package_path(errors.ReturnCodeError())
vim_bridge_path = config.get_package_path(vim_bridge.bridged)
return (
os.path.dirname(vimpdb_path),
os.path.dirname(vim_bridge_path),
)
class Communicator(object):
def __init__(self, script, server_name):
self.script = script
self.server_name = server_name
def prepare_subprocess(self, *args):
parts = self.script.split()
parts.extend(args)
return parts
def _remote_expr(self, expr):
parts = self.prepare_subprocess('--servername',
self.server_name, "--remote-expr", expr)
p = subprocess.Popen(parts, stdout=subprocess.PIPE)
return_code = p.wait()
if return_code:
raise errors.RemoteUnavailable()
child_stdout = p.stdout
output = child_stdout.read()
return output.strip()
def _send(self, command):
# add ':<BS>' to hide last keys sent in VIM command-line
command = ''.join((command, ':<BS>'))
parts = self.prepare_subprocess('--servername',
self.server_name, "--remote-send", command)
return_code = subprocess.call(parts)
if return_code:
raise errors.RemoteUnavailable()
class ProxyToVim(object):
"""
use subprocess to launch Vim instance that use clientserver mode
to communicate with Vim instance used for debugging.
"""
def __init__(self, communicator):
self.communicator = communicator
def _send(self, command):
self.communicator._send(command)
config.logger.debug("sent: %s" % command)
def _remote_expr(self, expr):
return self.communicator._remote_expr(expr)
def setupRemote(self):
if not self.isRemoteSetup():
# source vimpdb.vim
proxy_package_path = config.get_package_path(self)
filename = os.path.join(proxy_package_path, "vimpdb.vim")
command = "<C-\><C-N>:source %s<CR>" % filename
self._send(command)
for egg_path in get_eggs_paths():
self._send(':call PDB_setup_egg(%s)<CR>' % repr(egg_path))
self._send(':call PDB_init_controller()')
def isRemoteSetup(self):
status = self._expr("exists('*PDB_setup_egg')")
return status == '1'
def showFeedback(self, feedback):
if not feedback:
return
feedback_list = feedback.splitlines()
self.setupRemote()
self._send(':call PDB_show_feedback(%s)<CR>' % repr(feedback_list))
def displayLocals(self, feedback):
if not feedback:
return
feedback_list = feedback.splitlines()
self.setupRemote()
self._send(':call PDB_reset_watch()<CR>')
for line in feedback_list:
self._send(':call PDB_append_watch([%s])<CR>' % repr(line))
def showFileAtLine(self, filename, lineno):
if os.path.exists(filename):
self._showFileAtLine(filename, lineno)
def _showFileAtLine(self, filename, lineno):
# Windows compatibility:
# Windows command-line does not play well with backslash in filename.
# So turn backslash to slash; Vim knows how to translate them back.
filename = filename.replace('\\', '/')
self.setupRemote()
self._send(':call PDB_show_file_at_line("%s", "%d")<CR>'
% (filename, lineno))
def _expr(self, expr):
config.logger.debug("expr: %s" % expr)
result = self._remote_expr(expr)
config.logger.debug("result: %s" % result)
return result
# code leftover from hacking
# def getText(self, prompt):
# self.setupRemote()
# command = self._expr('PDB_get_command("%s")' % prompt)
# return command
class ProxyFromVim(object):
BUFLEN = 512
socket_factory = socket.socket
def __init__(self, port):
self.socket_inactive = True
self.port = port
def bindSocket(self):
if self.socket_inactive:
self.socket = self.socket_factory(
socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.socket.bind(('', self.port))
self.socket_inactive = False
def closeSocket(self):
if not self.socket_inactive:
self.socket.close()
self.socket_inactive = True
def waitFor(self, pdb):
self.bindSocket()
(message, address) = self.socket.recvfrom(self.BUFLEN)
config.logger.debug("command: %s" % message)
return message
# code leftover from hacking
# def eat_stdin(self):
# sys.stdout.write('-- Type Ctrl-D to continue --\n')
# sys.stdout.flush()
# sys.stdin.readlines()
|
[
"subprocess.Popen",
"vimpdb.errors.ReturnCodeError",
"vimpdb.config.get_package_path",
"os.path.dirname",
"os.path.exists",
"subprocess.call",
"vimpdb.config.logger.debug",
"vimpdb.errors.RemoteUnavailable",
"os.path.join"
] |
[((231, 274), 'vimpdb.config.get_package_path', 'config.get_package_path', (['vim_bridge.bridged'], {}), '(vim_bridge.bridged)\n', (254, 274), False, 'from vimpdb import config\n'), ((183, 207), 'vimpdb.errors.ReturnCodeError', 'errors.ReturnCodeError', ([], {}), '()\n', (205, 207), False, 'from vimpdb import errors\n'), ((296, 324), 'os.path.dirname', 'os.path.dirname', (['vimpdb_path'], {}), '(vimpdb_path)\n', (311, 324), False, 'import os\n'), ((334, 366), 'os.path.dirname', 'os.path.dirname', (['vim_bridge_path'], {}), '(vim_bridge_path)\n', (349, 366), False, 'import os\n'), ((811, 858), 'subprocess.Popen', 'subprocess.Popen', (['parts'], {'stdout': 'subprocess.PIPE'}), '(parts, stdout=subprocess.PIPE)\n', (827, 858), False, 'import subprocess\n'), ((1341, 1363), 'subprocess.call', 'subprocess.call', (['parts'], {}), '(parts)\n', (1356, 1363), False, 'import subprocess\n'), ((1763, 1804), 'vimpdb.config.logger.debug', 'config.logger.debug', (["('sent: %s' % command)"], {}), "('sent: %s' % command)\n", (1782, 1804), False, 'from vimpdb import config\n'), ((3110, 3134), 'os.path.exists', 'os.path.exists', (['filename'], {}), '(filename)\n', (3124, 3134), False, 'import os\n'), ((3633, 3671), 'vimpdb.config.logger.debug', 'config.logger.debug', (["('expr: %s' % expr)"], {}), "('expr: %s' % expr)\n", (3652, 3671), False, 'from vimpdb import config\n'), ((3721, 3763), 'vimpdb.config.logger.debug', 'config.logger.debug', (["('result: %s' % result)"], {}), "('result: %s' % result)\n", (3740, 3763), False, 'from vimpdb import config\n'), ((4754, 4798), 'vimpdb.config.logger.debug', 'config.logger.debug', (["('command: %s' % message)"], {}), "('command: %s' % message)\n", (4773, 4798), False, 'from vimpdb import config\n'), ((932, 958), 'vimpdb.errors.RemoteUnavailable', 'errors.RemoteUnavailable', ([], {}), '()\n', (956, 958), False, 'from vimpdb import errors\n'), ((1406, 1432), 'vimpdb.errors.RemoteUnavailable', 'errors.RemoteUnavailable', ([], {}), '()\n', (1430, 1432), False, 'from vimpdb import errors\n'), ((2022, 2051), 'vimpdb.config.get_package_path', 'config.get_package_path', (['self'], {}), '(self)\n', (2045, 2051), False, 'from vimpdb import config\n'), ((2075, 2121), 'os.path.join', 'os.path.join', (['proxy_package_path', '"""vimpdb.vim"""'], {}), "(proxy_package_path, 'vimpdb.vim')\n", (2087, 2121), False, 'import os\n')]
|
from django.urls import path, include
from rest_framework import routers
from drf_file_management.views import FileAPIView
router = routers.SimpleRouter()
router.register(r'file', FileAPIView)
app_name = 'drf_file_management'
urlpatterns = router.urls
|
[
"rest_framework.routers.SimpleRouter"
] |
[((134, 156), 'rest_framework.routers.SimpleRouter', 'routers.SimpleRouter', ([], {}), '()\n', (154, 156), False, 'from rest_framework import routers\n')]
|
# Generated by Django 2.2.1 on 2020-10-08 10:05
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('blog', '0004_auto_20201003_2247'),
]
operations = [
migrations.AddField(
model_name='channel',
name='updated',
field=models.DateTimeField(auto_now=True, verbose_name='修改时间'),
),
migrations.AlterField(
model_name='article',
name='created',
field=models.DateTimeField(default=django.utils.timezone.now, verbose_name='创建时间'),
),
migrations.AlterField(
model_name='article',
name='updated',
field=models.DateTimeField(auto_now=True, verbose_name='修改时间'),
),
migrations.AlterField(
model_name='attachment',
name='Created',
field=models.DateTimeField(default=django.utils.timezone.now, verbose_name='创建时间'),
),
migrations.AlterField(
model_name='attachment',
name='Updated',
field=models.DateTimeField(auto_now=True, verbose_name='更新时间'),
),
migrations.AlterField(
model_name='channel',
name='created',
field=models.DateTimeField(default=django.utils.timezone.now, verbose_name='创建时间'),
),
migrations.AlterField(
model_name='tagsmodels',
name='tags_created',
field=models.DateTimeField(default=django.utils.timezone.now, verbose_name='创建时间'),
),
migrations.AlterField(
model_name='tagsmodels',
name='tags_updated',
field=models.DateTimeField(auto_now=True, verbose_name='更新时间'),
),
]
|
[
"django.db.models.DateTimeField"
] |
[((362, 418), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)', 'verbose_name': '"""修改时间"""'}), "(auto_now=True, verbose_name='修改时间')\n", (382, 418), False, 'from django.db import migrations, models\n'), ((542, 618), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'default': 'django.utils.timezone.now', 'verbose_name': '"""创建时间"""'}), "(default=django.utils.timezone.now, verbose_name='创建时间')\n", (562, 618), False, 'from django.db import migrations, models\n'), ((742, 798), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)', 'verbose_name': '"""修改时间"""'}), "(auto_now=True, verbose_name='修改时间')\n", (762, 798), False, 'from django.db import migrations, models\n'), ((925, 1001), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'default': 'django.utils.timezone.now', 'verbose_name': '"""创建时间"""'}), "(default=django.utils.timezone.now, verbose_name='创建时间')\n", (945, 1001), False, 'from django.db import migrations, models\n'), ((1128, 1184), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)', 'verbose_name': '"""更新时间"""'}), "(auto_now=True, verbose_name='更新时间')\n", (1148, 1184), False, 'from django.db import migrations, models\n'), ((1308, 1384), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'default': 'django.utils.timezone.now', 'verbose_name': '"""创建时间"""'}), "(default=django.utils.timezone.now, verbose_name='创建时间')\n", (1328, 1384), False, 'from django.db import migrations, models\n'), ((1516, 1592), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'default': 'django.utils.timezone.now', 'verbose_name': '"""创建时间"""'}), "(default=django.utils.timezone.now, verbose_name='创建时间')\n", (1536, 1592), False, 'from django.db import migrations, models\n'), ((1724, 1780), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)', 'verbose_name': '"""更新时间"""'}), "(auto_now=True, verbose_name='更新时间')\n", (1744, 1780), False, 'from django.db import migrations, models\n')]
|
from pyop3.api import AccessMode, ArgType, IterationRegion
from pyop3.codegen.compiled import build_wrapper, get_c_function
from pyop3.obj.kernel import Kernel
from pyop3.obj.maps import IdentityMap
from pyop3.obj.sets import AbstractSet
from pyop3.utils import cached_property, debug_check_args
def filter_args(args, access_modes):
reductions = []
seen = set()
exchange = []
dirty = []
mats = []
for arg, access_mode in zip(args, access_modes):
if arg.argtype == ArgType.DAT:
dirty.append(arg.obj)
if arg.map_tuple != (IdentityMap, ) and arg.obj not in seen:
exchange.append((arg.obj, access_mode))
seen.add(arg.obj)
if arg.argtype == ArgType.GLOBAL and access_mode != AccessMode.READ:
reductions.append((arg.obj, access_mode))
if arg.argtype == ArgType.MAT:
mats.append((arg.obj, access_mode))
return tuple(exchange), tuple(dirty), tuple(reductions), tuple(mats)
def noop():
pass
class ParLoop(object):
def validator(self, kernel, iterset, *args,
iteration_region=IterationRegion.ALL,
pass_layer_arg=False):
assert isinstance(kernel, Kernel)
assert isinstance(iterset, AbstractSet)
assert len(args) == len(kernel.access_modes)
assert isinstance(iteration_region, IterationRegion)
seen = {}
for arg, access_mode in zip(args, kernel.access_modes):
assert arg.validate(iterset)
try:
assert seen[arg] == access_mode
except KeyError:
seen[arg] = access_mode
@debug_check_args(validator)
def __init__(self, kernel, iterset, *args,
iteration_region=IterationRegion.ALL,
pass_layer_arg=False):
self.args = tuple(args)
self.kernel = kernel
self.iterset = iterset
self.iteration_region = iteration_region
self.pass_layer_arg = pass_layer_arg
exchange, dirty, reductions, mats = filter_args(args, kernel.access_modes)
self.exchange = exchange
self.dirty = dirty
self.reductions = reductions
self.mats = mats
# Micro-optimisations
if not reductions or iterset.comm.size == 1:
self.reduction_begin = noop
self.reduction_end = noop
if not exchange or iterset.comm.size == 1:
self.g2lbegin = noop
self.g2lend = noop
self.l2gbegin = noop
self.l2gend = noop
if not dirty or iterset.comm.size == 1:
self.mark_dirty = noop
def g2lbegin(self):
for d, mode in self.exchange:
d.g2lbegin(mode)
def g2lend(self):
for d, mode in self.exchange:
d.g2lend(mode)
def l2gbegin(self):
for d, mode in self.exchange:
d.l2gbegin(mode)
def l2gend(self):
for d, mode in self.exchange:
d.l2gend(mode)
def reduction_begin(self):
for g, mode in self.reductions:
g.reduction_begin(mode)
def reduction_end(self):
for g, mode in self.reductions:
g.reduction_end(mode)
def mark_dirty(self):
for d in self.dirty:
d.halo_valid = False
def execute(self):
self.g2lbegin()
self.dll(0, self.iterset.core_size, *self.c_arglist)
self.g2lend()
self.dll(self.iterset.core_size, self.iterset.size, *self.c_arglist)
self.reduction_begin()
# self.l2gbegin()
self.reduction_end()
# self.l2gend()
# self.mark_dirty()
@cached_property
def _arglist_and_types(self):
arglist = self.iterset._parloop_args_
argtypes = self.iterset._parloop_argtypes_
maptypes = []
maplist = []
seen = set()
for arg in self.args:
arglist += arg._parloop_args_
argtypes += arg._parloop_argtypes_
for map_ in arg.map_tuple:
for m, t in zip(map_._parloop_args_, map_._parloop_argtypes_):
if m in seen:
continue
seen.add(m)
maplist.append(m)
maptypes.append(t)
return arglist + tuple(maplist), argtypes + tuple(maptypes)
@cached_property
def c_argtypes(self):
return self._arglist_and_types[1]
@cached_property
def c_arglist(self):
return self._arglist_and_types[0]
code_cache = {}
@cached_property
def dll(self):
key = (self.kernel, self.iterset._codegen_info_,
*(a._codegen_info_ for a in self.args),
self.iteration_region,
self.pass_layer_arg)
try:
return self.code_cache[key]
except KeyError:
wrapper = build_wrapper(*key[:-2],
iteration_region=self.iteration_region,
pass_layer_arg=self.pass_layer_arg)
dll = get_c_function(wrapper, self.c_argtypes)
return self.code_cache.setdefault(key, dll)
|
[
"pyop3.codegen.compiled.build_wrapper",
"pyop3.utils.debug_check_args",
"pyop3.codegen.compiled.get_c_function"
] |
[((1659, 1686), 'pyop3.utils.debug_check_args', 'debug_check_args', (['validator'], {}), '(validator)\n', (1675, 1686), False, 'from pyop3.utils import cached_property, debug_check_args\n'), ((4875, 4979), 'pyop3.codegen.compiled.build_wrapper', 'build_wrapper', (['*key[:-2]'], {'iteration_region': 'self.iteration_region', 'pass_layer_arg': 'self.pass_layer_arg'}), '(*key[:-2], iteration_region=self.iteration_region,\n pass_layer_arg=self.pass_layer_arg)\n', (4888, 4979), False, 'from pyop3.codegen.compiled import build_wrapper, get_c_function\n'), ((5066, 5106), 'pyop3.codegen.compiled.get_c_function', 'get_c_function', (['wrapper', 'self.c_argtypes'], {}), '(wrapper, self.c_argtypes)\n', (5080, 5106), False, 'from pyop3.codegen.compiled import build_wrapper, get_c_function\n')]
|
"""Utilities for TFRecords
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import json
import tensorflow as tf
TFRECORDS_EXT = ".tfrecords"
def tfrecord_name_and_json_name(output):
output = normalize_tfrecords_path(output)
json_output = output[: -len(TFRECORDS_EXT)] + ".json"
return output, json_output
def normalize_tfrecords_path(output):
if not output.endswith(TFRECORDS_EXT):
output += TFRECORDS_EXT
return output
def bytes_feature(value):
if isinstance(value, type(tf.constant(0))):
value = value.numpy() # BytesList won't unpack a string from an EagerTensor.
return tf.train.Feature(bytes_list=tf.train.BytesList(value=[value]))
def int64_feature(value):
return tf.train.Feature(int64_list=tf.train.Int64List(value=[value]))
def dataset_to_tfrecord(dataset, output):
"""Writes a tf.data.Dataset into a TFRecord file.
Parameters
----------
dataset : ``tf.data.Dataset``
The tf.data.Dataset that you want to write into a TFRecord file.
output : str
Path to the TFRecord file. Besides this file, a .json file is also created.
This json file is needed when you want to convert the TFRecord file back into
a dataset.
Returns
-------
``tf.Operation``
A tf.Operation that, when run, writes contents of dataset to a file. When
running in eager mode, calling this function will write the file. Otherwise, you
have to call session.run() on the returned operation.
"""
output, json_output = tfrecord_name_and_json_name(output)
# dump the structure so that we can read it back
meta = {
"output_types": repr(tf.compat.v1.data.get_output_types(dataset)),
"output_shapes": repr(tf.compat.v1.data.get_output_shapes(dataset)),
}
with open(json_output, "w") as f:
json.dump(meta, f)
# create a custom map function that serializes the dataset
def serialize_example_pyfunction(*args):
feature = {}
for i, f in enumerate(args):
key = f"feature{i}"
feature[key] = bytes_feature(f)
example_proto = tf.train.Example(features=tf.train.Features(feature=feature))
return example_proto.SerializeToString()
def tf_serialize_example(*args):
args = tf.nest.flatten(args)
args = [tf.io.serialize_tensor(f) for f in args]
tf_string = tf.py_function(serialize_example_pyfunction, args, tf.string)
return tf.reshape(tf_string, ()) # The result is a scalar
dataset = dataset.map(tf_serialize_example)
writer = tf.data.experimental.TFRecordWriter(output)
return writer.write(dataset)
def dataset_from_tfrecord(tfrecord, num_parallel_reads=None):
"""Reads TFRecords and returns a dataset.
The TFRecord file must have been created using the :any:`dataset_to_tfrecord`
function.
Parameters
----------
tfrecord : str or list
Path to the TFRecord file. Pass a list if you are sure several tfrecords need
the same map function.
num_parallel_reads: int
A `tf.int64` scalar representing the number of files to read in parallel.
Defaults to reading files sequentially.
Returns
-------
``tf.data.Dataset``
A dataset that contains the data from the TFRecord file.
"""
# these imports are needed so that eval can work
from tensorflow import TensorShape # noqa: F401
if isinstance(tfrecord, str):
tfrecord = [tfrecord]
tfrecord = [tfrecord_name_and_json_name(path) for path in tfrecord]
json_output = tfrecord[0][1]
tfrecord = [path[0] for path in tfrecord]
raw_dataset = tf.data.TFRecordDataset(
tfrecord, num_parallel_reads=num_parallel_reads
)
with open(json_output) as f:
meta = json.load(f)
for k, v in meta.items():
meta[k] = eval(v)
output_types = tf.nest.flatten(meta["output_types"])
output_shapes = tf.nest.flatten(meta["output_shapes"])
feature_description = {}
for i in range(len(output_types)):
key = f"feature{i}"
feature_description[key] = tf.io.FixedLenFeature([], tf.string)
def _parse_function(example_proto):
# Parse the input tf.Example proto using the dictionary above.
args = tf.io.parse_single_example(
serialized=example_proto, features=feature_description
)
args = tf.nest.flatten(args)
args = [tf.io.parse_tensor(v, t) for v, t in zip(args, output_types)]
args = [tf.reshape(v, s) for v, s in zip(args, output_shapes)]
return tf.nest.pack_sequence_as(meta["output_types"], args)
return raw_dataset.map(_parse_function)
|
[
"tensorflow.train.BytesList",
"json.dump",
"tensorflow.py_function",
"json.load",
"tensorflow.data.TFRecordDataset",
"tensorflow.train.Int64List",
"tensorflow.reshape",
"tensorflow.io.parse_single_example",
"tensorflow.train.Features",
"tensorflow.constant",
"tensorflow.io.serialize_tensor",
"tensorflow.nest.flatten",
"tensorflow.io.parse_tensor",
"tensorflow.data.experimental.TFRecordWriter",
"tensorflow.compat.v1.data.get_output_shapes",
"tensorflow.io.FixedLenFeature",
"tensorflow.compat.v1.data.get_output_types",
"tensorflow.nest.pack_sequence_as"
] |
[((2658, 2701), 'tensorflow.data.experimental.TFRecordWriter', 'tf.data.experimental.TFRecordWriter', (['output'], {}), '(output)\n', (2693, 2701), True, 'import tensorflow as tf\n'), ((3736, 3808), 'tensorflow.data.TFRecordDataset', 'tf.data.TFRecordDataset', (['tfrecord'], {'num_parallel_reads': 'num_parallel_reads'}), '(tfrecord, num_parallel_reads=num_parallel_reads)\n', (3759, 3808), True, 'import tensorflow as tf\n'), ((3960, 3997), 'tensorflow.nest.flatten', 'tf.nest.flatten', (["meta['output_types']"], {}), "(meta['output_types'])\n", (3975, 3997), True, 'import tensorflow as tf\n'), ((4018, 4056), 'tensorflow.nest.flatten', 'tf.nest.flatten', (["meta['output_shapes']"], {}), "(meta['output_shapes'])\n", (4033, 4056), True, 'import tensorflow as tf\n'), ((1918, 1936), 'json.dump', 'json.dump', (['meta', 'f'], {}), '(meta, f)\n', (1927, 1936), False, 'import json\n'), ((2368, 2389), 'tensorflow.nest.flatten', 'tf.nest.flatten', (['args'], {}), '(args)\n', (2383, 2389), True, 'import tensorflow as tf\n'), ((2467, 2528), 'tensorflow.py_function', 'tf.py_function', (['serialize_example_pyfunction', 'args', 'tf.string'], {}), '(serialize_example_pyfunction, args, tf.string)\n', (2481, 2528), True, 'import tensorflow as tf\n'), ((2544, 2569), 'tensorflow.reshape', 'tf.reshape', (['tf_string', '()'], {}), '(tf_string, ())\n', (2554, 2569), True, 'import tensorflow as tf\n'), ((3872, 3884), 'json.load', 'json.load', (['f'], {}), '(f)\n', (3881, 3884), False, 'import json\n'), ((4188, 4224), 'tensorflow.io.FixedLenFeature', 'tf.io.FixedLenFeature', (['[]', 'tf.string'], {}), '([], tf.string)\n', (4209, 4224), True, 'import tensorflow as tf\n'), ((4352, 4439), 'tensorflow.io.parse_single_example', 'tf.io.parse_single_example', ([], {'serialized': 'example_proto', 'features': 'feature_description'}), '(serialized=example_proto, features=\n feature_description)\n', (4378, 4439), True, 'import tensorflow as tf\n'), ((4472, 4493), 'tensorflow.nest.flatten', 'tf.nest.flatten', (['args'], {}), '(args)\n', (4487, 4493), True, 'import tensorflow as tf\n'), ((4658, 4710), 'tensorflow.nest.pack_sequence_as', 'tf.nest.pack_sequence_as', (["meta['output_types']", 'args'], {}), "(meta['output_types'], args)\n", (4682, 4710), True, 'import tensorflow as tf\n'), ((577, 591), 'tensorflow.constant', 'tf.constant', (['(0)'], {}), '(0)\n', (588, 591), True, 'import tensorflow as tf\n'), ((720, 753), 'tensorflow.train.BytesList', 'tf.train.BytesList', ([], {'value': '[value]'}), '(value=[value])\n', (738, 753), True, 'import tensorflow as tf\n'), ((822, 855), 'tensorflow.train.Int64List', 'tf.train.Int64List', ([], {'value': '[value]'}), '(value=[value])\n', (840, 855), True, 'import tensorflow as tf\n'), ((1743, 1786), 'tensorflow.compat.v1.data.get_output_types', 'tf.compat.v1.data.get_output_types', (['dataset'], {}), '(dataset)\n', (1777, 1786), True, 'import tensorflow as tf\n'), ((1819, 1863), 'tensorflow.compat.v1.data.get_output_shapes', 'tf.compat.v1.data.get_output_shapes', (['dataset'], {}), '(dataset)\n', (1854, 1863), True, 'import tensorflow as tf\n'), ((2406, 2431), 'tensorflow.io.serialize_tensor', 'tf.io.serialize_tensor', (['f'], {}), '(f)\n', (2428, 2431), True, 'import tensorflow as tf\n'), ((4510, 4534), 'tensorflow.io.parse_tensor', 'tf.io.parse_tensor', (['v', 't'], {}), '(v, t)\n', (4528, 4534), True, 'import tensorflow as tf\n'), ((4588, 4604), 'tensorflow.reshape', 'tf.reshape', (['v', 's'], {}), '(v, s)\n', (4598, 4604), True, 'import tensorflow as tf\n'), ((2230, 2264), 'tensorflow.train.Features', 'tf.train.Features', ([], {'feature': 'feature'}), '(feature=feature)\n', (2247, 2264), True, 'import tensorflow as tf\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Author: Bertrand256
# Created on: 2021-04
from typing import Optional, List
from PyQt5 import QtCore, QtWidgets, QtGui
from PyQt5.QtCore import QVariant, QAbstractTableModel, pyqtSlot, QPoint, QTimer, Qt
from PyQt5.QtGui import QKeySequence
from PyQt5.QtWidgets import QWidget, QMenu, QShortcut, QApplication, QLabel
from mnemonic import Mnemonic
from wnd_utils import WndUtils
class SeedWordsWdg(QWidget):
def __init__(self, parent):
QWidget.__init__(self, parent=parent)
self.layout_main: Optional[QtWidgets.QVBoxLayout] = None
self.spacer: Optional[QtWidgets.QSpacerItem] = None
self.word_count: int = 24
self.mnemonic_words: List[str] = [""] * 24
self.mnemonic = Mnemonic('english')
self.grid_model = MnemonicModel(self, self.mnemonic_words, self.mnemonic.wordlist)
self.popMenuWords: Optional[QMenu] = None
self.setupUi(self)
def setupUi(self, dlg):
dlg.setObjectName("SeedWordsWdg")
self.layout_main = QtWidgets.QVBoxLayout(dlg)
self.layout_main.setObjectName('layout_main')
self.layout_main.setContentsMargins(0, 0, 0, 0)
self.layout_main.setSpacing(3)
self.layout_main.setObjectName("verticalLayout")
self.viewMnemonic = QtWidgets.QTableView(self)
self.viewMnemonic.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
self.viewMnemonic.setSelectionMode(QtWidgets.QAbstractItemView.SingleSelection)
self.viewMnemonic.setObjectName("viewMnemonic")
self.viewMnemonic.horizontalHeader().setVisible(False)
self.viewMnemonic.horizontalHeader().setStretchLastSection(True)
self.viewMnemonic.verticalHeader().setVisible(False)
self.layout_main.addWidget(self.viewMnemonic)
self.msg = QtWidgets.QLabel(self)
self.msg.setWordWrap(True)
self.msg.setObjectName("msg")
self.msg.setText('You can copy and paste the complete set of seed words into this dialog directly (separated '
'by spaces, commas or line breaks).')
self.layout_main.addWidget(self.msg)
self.viewMnemonic.verticalHeader().setDefaultSectionSize(
self.viewMnemonic.verticalHeader().fontMetrics().height() + 6)
self.viewMnemonic.customContextMenuRequested.connect(self.on_viewMnemonic_customContextMenuRequested)
# words grid context menu
self.popMenuWords = QMenu(self)
# copy action
self.actCopyWords = self.popMenuWords.addAction("\u274f Copy all words")
self.actCopyWords.triggered.connect(self.on_copy_seed_words_triggered)
self.actCopyWords.setShortcut(QKeySequence("Ctrl+C")) # not working on Mac (used here to display
# shortcut in menu item
QShortcut(QKeySequence("Ctrl+C"), self.viewMnemonic).activated.connect(self.on_copy_seed_words_triggered)
# paste action
self.act_paste_words = self.popMenuWords.addAction("\u23ce Paste")
self.act_paste_words.triggered.connect(self.on_paste_seed_words_triggered)
self.act_paste_words.setShortcut(QKeySequence("Ctrl+V"))
QShortcut(QKeySequence("Ctrl+V"), self.viewMnemonic).activated.connect(self.on_paste_seed_words_triggered)
def set_word_count(self, word_count):
self.word_count = word_count
self.grid_model.set_words_count(word_count)
def setup_mnem_view():
width = self.viewMnemonic.width()
width = int((width - (2 * 40)) / 2)
self.viewMnemonic.setModel(self.grid_model)
self.viewMnemonic.setColumnWidth(0, 40)
self.viewMnemonic.setColumnWidth(1, width)
self.viewMnemonic.setColumnWidth(2, 40)
QTimer.singleShot(10, setup_mnem_view)
def set_words(self, words):
for idx, word in enumerate(words):
if idx < len(self.mnemonic_words):
self.mnemonic_words[idx] = word
def get_cur_mnemonic_words(self):
ws = []
for idx, w in enumerate(self.mnemonic_words):
if idx >= self.word_count:
break
ws.append(w)
return ws
def on_copy_seed_words_triggered(self):
try:
ws = self.get_cur_mnemonic_words()
ws_str = '\n'.join(ws)
clipboard = QApplication.clipboard()
if clipboard:
clipboard.setText(ws_str)
except Exception as e:
self.error_msg(str(e))
def on_paste_seed_words_triggered(self):
try:
clipboard = QApplication.clipboard()
if clipboard:
ws_str = clipboard.text()
if isinstance(ws_str, str):
ws_str = ws_str.replace('\n', ' ').replace('\r', ' ').replace(",", ' ')
ws = ws_str.split()
for idx, w in enumerate(ws):
if idx >= self.word_count:
break
self.mnemonic_words[idx] = w
self.grid_model.refresh_view()
except Exception as e:
self.error_msg(str(e))
@pyqtSlot(QPoint)
def on_viewMnemonic_customContextMenuRequested(self, point):
try:
self.popMenuWords.exec_(self.viewMnemonic.mapToGlobal(point))
except Exception as e:
self.error_msg(str(e))
class MnemonicModel(QAbstractTableModel):
def __init__(self, parent, mnemonic_word_list, dictionary_words):
QAbstractTableModel.__init__(self, parent)
self.parent = parent
self.dictionary_words = dictionary_words
self.mnemonic_word_list = mnemonic_word_list
self.words_count = 24
self.read_only = False
self.columns = [
"#",
'Word',
'#',
'Word'
]
def set_words_count(self, words_count):
self.words_count = words_count
self.refresh_view()
def refresh_view(self):
self.beginResetModel()
self.endResetModel()
def set_read_only(self, ro):
self.read_only = ro
def columnCount(self, parent=None, *args, **kwargs):
return len(self.columns)
def rowCount(self, parent=None, *args, **kwargs):
return self.words_count / 2
def headerData(self, section, orientation, role=None):
if role != 0:
return QVariant()
if orientation == 0x1:
if section < len(self.columns):
return self.columns[section]
return ''
else:
return ' '
def setData(self, index, data, role=None):
row_idx = index.row()
col_idx = index.column()
if 0 <= row_idx < int(self.words_count / 2):
if col_idx == 1:
idx = row_idx
else:
idx = row_idx + int(self.words_count / 2)
self.mnemonic_word_list[idx] = data
return True
def flags(self, index):
col_idx = index.column()
if col_idx in (1, 3):
ret = Qt.ItemIsEnabled | Qt.ItemIsSelectable
if not self.read_only:
ret |= Qt.ItemIsEditable
else:
ret = Qt.ItemIsEnabled
return ret
def data(self, index, role=None):
if index.isValid():
col_idx = index.column()
row_idx = index.row()
if col_idx < len(self.columns):
if role in (Qt.DisplayRole, Qt.EditRole):
if col_idx == 0:
return str(row_idx + 1) + '.'
elif col_idx == 2:
return str(int(self.words_count / 2) + row_idx + 1) + '.'
elif col_idx == 1:
if 0 <= row_idx < int(self.words_count / 2):
return self.mnemonic_word_list[row_idx]
elif col_idx == 3:
if 0 <= row_idx < int(self.words_count / 2):
return self.mnemonic_word_list[int(self.words_count / 2) + row_idx]
elif role == Qt.ForegroundRole:
if 0 <= row_idx < int(self.words_count / 2):
if col_idx in (0, 1):
word_col_idx = 1
else:
word_col_idx = 3
if word_col_idx == 1:
word = self.mnemonic_word_list[row_idx]
elif word_col_idx == 3 and row_idx < int(self.words_count / 2):
word = self.mnemonic_word_list[int(self.words_count / 2) + row_idx]
else:
return
if word and word not in self.dictionary_words:
return QtGui.QColor('red')
elif role == Qt.BackgroundRole:
if col_idx in (0, 2):
return QtGui.QColor('lightgray')
elif role == Qt.TextAlignmentRole:
if col_idx in (0, 2):
return Qt.AlignRight
elif role == Qt.FontRole:
pass
return QVariant()
|
[
"PyQt5.QtWidgets.QLabel",
"PyQt5.QtCore.QVariant",
"PyQt5.QtWidgets.QApplication.clipboard",
"PyQt5.QtGui.QKeySequence",
"PyQt5.QtGui.QColor",
"PyQt5.QtWidgets.QMenu",
"PyQt5.QtCore.QTimer.singleShot",
"PyQt5.QtCore.QAbstractTableModel.__init__",
"PyQt5.QtWidgets.QTableView",
"PyQt5.QtWidgets.QVBoxLayout",
"PyQt5.QtWidgets.QWidget.__init__",
"mnemonic.Mnemonic",
"PyQt5.QtCore.pyqtSlot"
] |
[((5175, 5191), 'PyQt5.QtCore.pyqtSlot', 'pyqtSlot', (['QPoint'], {}), '(QPoint)\n', (5183, 5191), False, 'from PyQt5.QtCore import QVariant, QAbstractTableModel, pyqtSlot, QPoint, QTimer, Qt\n'), ((502, 539), 'PyQt5.QtWidgets.QWidget.__init__', 'QWidget.__init__', (['self'], {'parent': 'parent'}), '(self, parent=parent)\n', (518, 539), False, 'from PyQt5.QtWidgets import QWidget, QMenu, QShortcut, QApplication, QLabel\n'), ((774, 793), 'mnemonic.Mnemonic', 'Mnemonic', (['"""english"""'], {}), "('english')\n", (782, 793), False, 'from mnemonic import Mnemonic\n'), ((1060, 1086), 'PyQt5.QtWidgets.QVBoxLayout', 'QtWidgets.QVBoxLayout', (['dlg'], {}), '(dlg)\n', (1081, 1086), False, 'from PyQt5 import QtCore, QtWidgets, QtGui\n'), ((1322, 1348), 'PyQt5.QtWidgets.QTableView', 'QtWidgets.QTableView', (['self'], {}), '(self)\n', (1342, 1348), False, 'from PyQt5 import QtCore, QtWidgets, QtGui\n'), ((1840, 1862), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self'], {}), '(self)\n', (1856, 1862), False, 'from PyQt5 import QtCore, QtWidgets, QtGui\n'), ((2477, 2488), 'PyQt5.QtWidgets.QMenu', 'QMenu', (['self'], {}), '(self)\n', (2482, 2488), False, 'from PyQt5.QtWidgets import QWidget, QMenu, QShortcut, QApplication, QLabel\n'), ((3767, 3805), 'PyQt5.QtCore.QTimer.singleShot', 'QTimer.singleShot', (['(10)', 'setup_mnem_view'], {}), '(10, setup_mnem_view)\n', (3784, 3805), False, 'from PyQt5.QtCore import QVariant, QAbstractTableModel, pyqtSlot, QPoint, QTimer, Qt\n'), ((5532, 5574), 'PyQt5.QtCore.QAbstractTableModel.__init__', 'QAbstractTableModel.__init__', (['self', 'parent'], {}), '(self, parent)\n', (5560, 5574), False, 'from PyQt5.QtCore import QVariant, QAbstractTableModel, pyqtSlot, QPoint, QTimer, Qt\n'), ((9243, 9253), 'PyQt5.QtCore.QVariant', 'QVariant', ([], {}), '()\n', (9251, 9253), False, 'from PyQt5.QtCore import QVariant, QAbstractTableModel, pyqtSlot, QPoint, QTimer, Qt\n'), ((2709, 2731), 'PyQt5.QtGui.QKeySequence', 'QKeySequence', (['"""Ctrl+C"""'], {}), "('Ctrl+C')\n", (2721, 2731), False, 'from PyQt5.QtGui import QKeySequence\n'), ((3146, 3168), 'PyQt5.QtGui.QKeySequence', 'QKeySequence', (['"""Ctrl+V"""'], {}), "('Ctrl+V')\n", (3158, 3168), False, 'from PyQt5.QtGui import QKeySequence\n'), ((4354, 4378), 'PyQt5.QtWidgets.QApplication.clipboard', 'QApplication.clipboard', ([], {}), '()\n', (4376, 4378), False, 'from PyQt5.QtWidgets import QWidget, QMenu, QShortcut, QApplication, QLabel\n'), ((4596, 4620), 'PyQt5.QtWidgets.QApplication.clipboard', 'QApplication.clipboard', ([], {}), '()\n', (4618, 4620), False, 'from PyQt5.QtWidgets import QWidget, QMenu, QShortcut, QApplication, QLabel\n'), ((6421, 6431), 'PyQt5.QtCore.QVariant', 'QVariant', ([], {}), '()\n', (6429, 6431), False, 'from PyQt5.QtCore import QVariant, QAbstractTableModel, pyqtSlot, QPoint, QTimer, Qt\n'), ((2827, 2849), 'PyQt5.QtGui.QKeySequence', 'QKeySequence', (['"""Ctrl+C"""'], {}), "('Ctrl+C')\n", (2839, 2849), False, 'from PyQt5.QtGui import QKeySequence\n'), ((3188, 3210), 'PyQt5.QtGui.QKeySequence', 'QKeySequence', (['"""Ctrl+V"""'], {}), "('Ctrl+V')\n", (3200, 3210), False, 'from PyQt5.QtGui import QKeySequence\n'), ((8854, 8873), 'PyQt5.QtGui.QColor', 'QtGui.QColor', (['"""red"""'], {}), "('red')\n", (8866, 8873), False, 'from PyQt5 import QtCore, QtWidgets, QtGui\n'), ((8996, 9021), 'PyQt5.QtGui.QColor', 'QtGui.QColor', (['"""lightgray"""'], {}), "('lightgray')\n", (9008, 9021), False, 'from PyQt5 import QtCore, QtWidgets, QtGui\n')]
|
#!/usr/local/bin/python
import spacy
nlp = spacy.load('en')
entityLs = ["ORG","PERSON","DATE","TIME","MONEY","PERCENT","FAC","GPE","NORP","WORK_OF_ART","QUANTITY","LOC","PRODUCT","EVENT","LAW","LANGUAGE","ORDINAL","CARDINAL"]
def updateAlphaLs(text):
alphaLs = []
doc = nlp(text)
for token in doc:
if(token.is_alpha): alphaLs.append(token)
return alphaLs
def updateEntityLs(text):
entityDict = {entity: 0 for entity in entityLs}
doc = nlp(text)
for entity in doc.ents:
entityDict[entity.label_] += 1
return entityDict
def alphaPercentage(text,wordCount):
alphaLs = updateAlphaLs(text)
return(len(alphaLs)/wordCount)
def entityPercentage(text,wordCount):
entityDict = updateEntityLs(text)
entitySum = 0
for k,v in entityDict.items(): entitySum += v
return(entitySum/wordCount)
|
[
"spacy.load"
] |
[((44, 60), 'spacy.load', 'spacy.load', (['"""en"""'], {}), "('en')\n", (54, 60), False, 'import spacy\n')]
|
# Generated by Django 2.0 on 2019-05-21 06:57
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Summary',
fields=[
('summary_id', models.AutoField(primary_key=True, serialize=False)),
('summary_title', models.TextField()),
('summary_content', models.TextField()),
('publish_date', models.DateTimeField(auto_now=True)),
],
),
]
|
[
"django.db.models.DateTimeField",
"django.db.models.TextField",
"django.db.models.AutoField"
] |
[((298, 349), 'django.db.models.AutoField', 'models.AutoField', ([], {'primary_key': '(True)', 'serialize': '(False)'}), '(primary_key=True, serialize=False)\n', (314, 349), False, 'from django.db import migrations, models\n'), ((386, 404), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (402, 404), False, 'from django.db import migrations, models\n'), ((443, 461), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (459, 461), False, 'from django.db import migrations, models\n'), ((497, 532), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (517, 532), False, 'from django.db import migrations, models\n')]
|
import numpy as np
import matplotlib.pyplot as plt
from evaluation.results import packageResults
from dissertation import datasetInfo
from config.routes import getRecordedResultsRoute
MINI_FONTSIZE=10
FONTSIZE = 14 * 4 / 3
NUMBER_FORMAT = "{:.0f}"
interpolationResults = packageResults.interpolationResults.getDictionary()
# Distance in image space:
for dataset in datasetInfo.INTERPOLATION_DATASET_ORDER:
interpolationFactors = list(interpolationResults[dataset][datasetInfo.DATASET_ARCH_NAMES[dataset]["dense"]].keys())
for interpolationFactor in interpolationFactors:
metricResults = [
interpolationResults[dataset][datasetInfo.DATASET_ARCH_NAMES[dataset]["conv"]][interpolationFactor]["interpolateLatentSpace"]["metricImageSpace"]
] + [
interpolationResults[dataset][datasetInfo.DATASET_ARCH_NAMES[dataset]["dense"]][interpolationFactor][interpolationTechnique]["metricImageSpace"]
for interpolationTechnique in datasetInfo.INTERPOLATION_TECHNIQUES
]
x = np.arange(len(datasetInfo.INTERPOLATION_TECHNIQUES) + 1)
means = np.array(list(map(lambda x: x.mean, metricResults)))
stds = np.array(list(map(lambda x: x.standardDeviation, metricResults)))
labels = [datasetInfo.INTERPOLATE_TECHNIQUE_NAMES["interpolateLatentSpace"] + "_{\mathrm{conv}}$"] + \
[datasetInfo.INTERPOLATE_TECHNIQUE_NAMES[interpolationTechnique] + ("_{\mathrm{dense}}$" if interpolationTechnique == "interpolateLatentSpace" else "") for interpolationTechnique in datasetInfo.INTERPOLATION_TECHNIQUES]
plt.figure(figsize=(4, 8))
bars = plt.bar(x, means, yerr=stds, capsize=5)
plt.xticks(x, labels, fontsize=FONTSIZE, rotation=90)
plt.xlabel("Proposed Interpolation $\mathbf{x}$", fontsize=FONTSIZE)
plt.ylabel("$\mathcal{BCE}[\mathbf{x}_{\mathrm{centre}}, \mathbf{x}]$", fontsize=FONTSIZE)
plt.ylim(ymin=0)
plt.yticks(fontsize=MINI_FONTSIZE)
maxVal = max(map(lambda x: x.mean + x.standardDeviation, metricResults))
extraHeight = 0.0125 * maxVal
for bar, error, mean in zip(bars, stds, means):
plt.text(
bar.get_x() + 0.5 * bar.get_width(),
mean + error + extraHeight,
NUMBER_FORMAT.format(mean),
ha="center",
va="bottom",
rotation=0,
fontsize=MINI_FONTSIZE
)
plt.tight_layout()
plt.savefig(getRecordedResultsRoute(dataset + "_" + interpolationFactor + "_" + "metricImageSpace.png"))
plt.close()
# Distance in latent space:
for dataset in datasetInfo.INTERPOLATION_DATASET_ORDER:
interpolationFactors = list(interpolationResults[dataset][datasetInfo.DATASET_ARCH_NAMES[dataset]["dense"]].keys())
for interpolationFactor in interpolationFactors:
for architecture in datasetInfo.ARCH_TYPES:
metricResults = [
interpolationResults[dataset][datasetInfo.DATASET_ARCH_NAMES[dataset][architecture]][interpolationFactor][interpolationTechnique]["metricLatentSpace"]
for interpolationTechnique in datasetInfo.INTERPOLATION_TECHNIQUES
]
x = np.arange(len(datasetInfo.INTERPOLATION_TECHNIQUES))
means = np.array(list(map(lambda x: x.mean, metricResults)))
stds = np.array(list(map(lambda x: x.standardDeviation, metricResults)))
labels = [datasetInfo.INTERPOLATE_TECHNIQUE_NAMES[interpolationTechnique] for interpolationTechnique in datasetInfo.INTERPOLATION_TECHNIQUES]
plt.figure(figsize=(4, 6))
bars = plt.bar(x, means, yerr=stds, capsize=5)
plt.xticks(x, labels, fontsize=FONTSIZE, rotation=90)
plt.xlabel("Proposed Interpolation $\mathbf{x}$", fontsize=FONTSIZE)
plt.ylabel("ED(enc(x_centre), enc(x)) in Latent Space", fontsize=FONTSIZE)
plt.ylim(ymin=0)
plt.yticks(fontsize=FONTSIZE)
maxVal = max(map(lambda x: x.mean + x.standardDeviation, metricResults))
extraHeight = 0.0125 * maxVal
for bar, error, mean in zip(bars, stds, means):
plt.text(
bar.get_x() + 0.5 * bar.get_width(),
mean + error + extraHeight,
NUMBER_FORMAT.format(mean),
ha="center",
va="bottom",
fontsize=MINI_FONTSIZE,
rotation=0
)
plt.tight_layout()
plt.savefig(getRecordedResultsRoute(dataset + "_" + architecture + "_" + interpolationFactor + "_" + "metricLatentSpace.png"))
plt.close()
|
[
"matplotlib.pyplot.ylim",
"evaluation.results.packageResults.interpolationResults.getDictionary",
"matplotlib.pyplot.bar",
"matplotlib.pyplot.yticks",
"matplotlib.pyplot.close",
"config.routes.getRecordedResultsRoute",
"matplotlib.pyplot.figure",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xticks",
"matplotlib.pyplot.tight_layout",
"matplotlib.pyplot.xlabel"
] |
[((277, 328), 'evaluation.results.packageResults.interpolationResults.getDictionary', 'packageResults.interpolationResults.getDictionary', ([], {}), '()\n', (326, 328), False, 'from evaluation.results import packageResults\n'), ((1607, 1633), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(4, 8)'}), '(figsize=(4, 8))\n', (1617, 1633), True, 'import matplotlib.pyplot as plt\n'), ((1650, 1689), 'matplotlib.pyplot.bar', 'plt.bar', (['x', 'means'], {'yerr': 'stds', 'capsize': '(5)'}), '(x, means, yerr=stds, capsize=5)\n', (1657, 1689), True, 'import matplotlib.pyplot as plt\n'), ((1699, 1752), 'matplotlib.pyplot.xticks', 'plt.xticks', (['x', 'labels'], {'fontsize': 'FONTSIZE', 'rotation': '(90)'}), '(x, labels, fontsize=FONTSIZE, rotation=90)\n', (1709, 1752), True, 'import matplotlib.pyplot as plt\n'), ((1761, 1830), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Proposed Interpolation $\\\\mathbf{x}$"""'], {'fontsize': 'FONTSIZE'}), "('Proposed Interpolation $\\\\mathbf{x}$', fontsize=FONTSIZE)\n", (1771, 1830), True, 'import matplotlib.pyplot as plt\n'), ((1839, 1937), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""$\\\\mathcal{BCE}[\\\\mathbf{x}_{\\\\mathrm{centre}}, \\\\mathbf{x}]$"""'], {'fontsize': 'FONTSIZE'}), "('$\\\\mathcal{BCE}[\\\\mathbf{x}_{\\\\mathrm{centre}}, \\\\mathbf{x}]$',\n fontsize=FONTSIZE)\n", (1849, 1937), True, 'import matplotlib.pyplot as plt\n'), ((1938, 1954), 'matplotlib.pyplot.ylim', 'plt.ylim', ([], {'ymin': '(0)'}), '(ymin=0)\n', (1946, 1954), True, 'import matplotlib.pyplot as plt\n'), ((1963, 1997), 'matplotlib.pyplot.yticks', 'plt.yticks', ([], {'fontsize': 'MINI_FONTSIZE'}), '(fontsize=MINI_FONTSIZE)\n', (1973, 1997), True, 'import matplotlib.pyplot as plt\n'), ((2485, 2503), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (2501, 2503), True, 'import matplotlib.pyplot as plt\n'), ((2626, 2637), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (2635, 2637), True, 'import matplotlib.pyplot as plt\n'), ((2525, 2620), 'config.routes.getRecordedResultsRoute', 'getRecordedResultsRoute', (["(dataset + '_' + interpolationFactor + '_' + 'metricImageSpace.png')"], {}), "(dataset + '_' + interpolationFactor + '_' +\n 'metricImageSpace.png')\n", (2548, 2620), False, 'from config.routes import getRecordedResultsRoute\n'), ((3637, 3663), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(4, 6)'}), '(figsize=(4, 6))\n', (3647, 3663), True, 'import matplotlib.pyplot as plt\n'), ((3684, 3723), 'matplotlib.pyplot.bar', 'plt.bar', (['x', 'means'], {'yerr': 'stds', 'capsize': '(5)'}), '(x, means, yerr=stds, capsize=5)\n', (3691, 3723), True, 'import matplotlib.pyplot as plt\n'), ((3737, 3790), 'matplotlib.pyplot.xticks', 'plt.xticks', (['x', 'labels'], {'fontsize': 'FONTSIZE', 'rotation': '(90)'}), '(x, labels, fontsize=FONTSIZE, rotation=90)\n', (3747, 3790), True, 'import matplotlib.pyplot as plt\n'), ((3803, 3872), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Proposed Interpolation $\\\\mathbf{x}$"""'], {'fontsize': 'FONTSIZE'}), "('Proposed Interpolation $\\\\mathbf{x}$', fontsize=FONTSIZE)\n", (3813, 3872), True, 'import matplotlib.pyplot as plt\n'), ((3885, 3959), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""ED(enc(x_centre), enc(x)) in Latent Space"""'], {'fontsize': 'FONTSIZE'}), "('ED(enc(x_centre), enc(x)) in Latent Space', fontsize=FONTSIZE)\n", (3895, 3959), True, 'import matplotlib.pyplot as plt\n'), ((3972, 3988), 'matplotlib.pyplot.ylim', 'plt.ylim', ([], {'ymin': '(0)'}), '(ymin=0)\n', (3980, 3988), True, 'import matplotlib.pyplot as plt\n'), ((4001, 4030), 'matplotlib.pyplot.yticks', 'plt.yticks', ([], {'fontsize': 'FONTSIZE'}), '(fontsize=FONTSIZE)\n', (4011, 4030), True, 'import matplotlib.pyplot as plt\n'), ((4570, 4588), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (4586, 4588), True, 'import matplotlib.pyplot as plt\n'), ((4741, 4752), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (4750, 4752), True, 'import matplotlib.pyplot as plt\n'), ((4614, 4731), 'config.routes.getRecordedResultsRoute', 'getRecordedResultsRoute', (["(dataset + '_' + architecture + '_' + interpolationFactor + '_' +\n 'metricLatentSpace.png')"], {}), "(dataset + '_' + architecture + '_' +\n interpolationFactor + '_' + 'metricLatentSpace.png')\n", (4637, 4731), False, 'from config.routes import getRecordedResultsRoute\n')]
|
import json
from dataclasses import dataclass
from typing import Optional
@dataclass()
class Event:
source: str # One of constants.EVENT_SOURCE
source_id: str # Unique ID from the source
name: str
_start_time: str = None
_end_time = None
_description = None
# {'timezone': 'America/Denver', 'local': '2020-02-07T19:00:00', 'utc': '2020-02-08T02:00:00Z'}
@property
def start_time(self) -> Optional[str]:
return self._start_time
@start_time.setter
def start_time(self, value: str):
self._start_time = value
# {'timezone': 'America/Denver', 'local': '2020-02-07T19:00:00', 'utc': '2020-02-08T02:00:00Z'}
@property
def end_time(self) -> Optional[str]:
return self._end_time
@end_time.setter
def end_time(self, value: str):
self._end_time = value
@property
def description(self) -> Optional[str]:
return self._description
@description.setter
def description(self, value: str):
self._description = value
def to_json(self) -> object:
obj = {
"source_id": self.source_id,
"source": self.source,
"name": self.name,
"start_time": self.start_time,
"end_time": self.end_time,
"description": self.description
}
return json.dumps(obj)
|
[
"dataclasses.dataclass",
"json.dumps"
] |
[((77, 88), 'dataclasses.dataclass', 'dataclass', ([], {}), '()\n', (86, 88), False, 'from dataclasses import dataclass\n'), ((1345, 1360), 'json.dumps', 'json.dumps', (['obj'], {}), '(obj)\n', (1355, 1360), False, 'import json\n')]
|
import curses
from curses import textpad
def test_textpad(stdscr, insert_mode=False):
ncols, nlines = 8, 3
uly, ulx = 3, 2
if insert_mode:
mode = 'insert mode'
else:
mode = 'overwrite mode'
stdscr.addstr(uly - 3, ulx, 'Use Ctrl-G to end editing (%s).' % mode)
stdscr.addstr(uly - 2, ulx,
'Be sure to try typing in the lower-right corner.')
win = curses.newwin(nlines, ncols, uly, ulx)
textpad.rectangle(stdscr, uly - 1, ulx - 1, uly + nlines, ulx + ncols)
stdscr.refresh()
box = textpad.Textbox(win, insert_mode)
contents = box.edit()
stdscr.addstr(uly + ncols + 2, 0, 'Text entered in the box\n')
stdscr.addstr(repr(contents))
stdscr.addstr('\n')
stdscr.addstr('Press any key')
stdscr.getch()
for i in range(3):
stdscr.move(uly + ncols + 2 + i, 0)
stdscr.clrtoeol()
def main(stdscr):
stdscr.clear()
test_textpad(stdscr, False)
test_textpad(stdscr, True)
if __name__ == '__main__':
curses.wrapper(main)
|
[
"curses.wrapper",
"curses.textpad.Textbox",
"curses.newwin",
"curses.textpad.rectangle"
] |
[((400, 438), 'curses.newwin', 'curses.newwin', (['nlines', 'ncols', 'uly', 'ulx'], {}), '(nlines, ncols, uly, ulx)\n', (413, 438), False, 'import curses\n'), ((443, 513), 'curses.textpad.rectangle', 'textpad.rectangle', (['stdscr', '(uly - 1)', '(ulx - 1)', '(uly + nlines)', '(ulx + ncols)'], {}), '(stdscr, uly - 1, ulx - 1, uly + nlines, ulx + ncols)\n', (460, 513), False, 'from curses import textpad\n'), ((545, 578), 'curses.textpad.Textbox', 'textpad.Textbox', (['win', 'insert_mode'], {}), '(win, insert_mode)\n', (560, 578), False, 'from curses import textpad\n'), ((1012, 1032), 'curses.wrapper', 'curses.wrapper', (['main'], {}), '(main)\n', (1026, 1032), False, 'import curses\n')]
|
import pandas as pd
import numpy as np
def df_info(df: pd.DataFrame, return_info=False, shape=True, cols=True, info_prefix=''):
""" Print a string to describe a df.
"""
info = info_prefix
if shape:
info = f'{info}Shape = {df.shape}'
if cols:
info = f'{info} , Cols = {df.columns.tolist()}'
print(info)
if return_info:
return info
def df_dummy_ts(start='2019-01-01', end='2019-01-02', freq='1s', n_cols=5,
smooth_n: int = 100, smooth_f: str = 'mean', dropna: bool = True):
""" Make dummy ts df.
"""
time_range = pd.DataFrame(pd.date_range(start, end, freq=freq), columns=['time'])
data = pd.DataFrame(np.random.randn(len(time_range), n_cols), columns=[f'col{n}' for n in range(n_cols)])
df = pd.concat([time_range, data], axis=1)
df = df.set_index('time')
if smooth_n:
if smooth_f == 'mean':
df = df.rolling(smooth_n).mean()
elif smooth_f == 'min':
df = df.rolling(smooth_n).min()
elif smooth_f == 'max':
df = df.rolling(smooth_n).max()
elif smooth_f == 'median':
df = df.rolling(smooth_n).median()
else:
raise NotImplementedError(f'... {smooth_f} not implemented ...')
if dropna:
df = df.dropna()
return df
|
[
"pandas.date_range",
"pandas.concat"
] |
[((783, 820), 'pandas.concat', 'pd.concat', (['[time_range, data]'], {'axis': '(1)'}), '([time_range, data], axis=1)\n', (792, 820), True, 'import pandas as pd\n'), ((608, 644), 'pandas.date_range', 'pd.date_range', (['start', 'end'], {'freq': 'freq'}), '(start, end, freq=freq)\n', (621, 644), True, 'import pandas as pd\n')]
|
from django.core.exceptions import ObjectDoesNotExist
from django.core.files.uploadedfile import UploadedFile
from django.conf import settings
from django.http import HttpResponse
from rest_framework import generics
from rest_framework.exceptions import NotFound
from rest_framework.response import Response
from rest_framework.viewsets import ViewSet
from rest_framework import mixins
from rest_framework import viewsets
from rest_framework.exceptions import PermissionDenied
from django.contrib.auth.models import User
from heritages.models import Heritage, Multimedia, Annotation
from heritages.search import search_heritages, search_annotations
from heritages.serializers import HeritageSerializer, MultimediaSerializer, AnnotationSerializer, UserSerializer, \
AnnotationPaleSerializer
from heritages.permissions import IsOwner, IsNotAnonymous, IsSelf
class HeritagesListView(generics.ListCreateAPIView):
queryset = Heritage.objects.all()
serializer_class = HeritageSerializer
def list(self, request, *args, **kwargs):
keyword = self.request.query_params.get("keyword", None)
if not keyword:
return super().list(request, *args, **kwargs)
result = Response(search_heritages(keyword)).data
return Response(i["_source"] for i in result["hits"]["hits"])
class HeritageView(generics.RetrieveUpdateDestroyAPIView):
queryset = Heritage.objects.all()
serializer_class = HeritageSerializer
class MultimediaListView(generics.ListCreateAPIView):
serializer_class = MultimediaSerializer
def get_queryset(self):
try:
heritage = Heritage.objects.get(pk=self.kwargs["heritage_id"])
except ObjectDoesNotExist:
raise NotFound()
return heritage.multimedia
def perform_create(self, serializer):
heritage_id = self.kwargs["heritage_id"]
try:
heritage = Heritage.objects.get(pk=heritage_id)
except ObjectDoesNotExist:
raise NotFound()
return serializer.save(heritage=heritage)
class MultimediaView(generics.RetrieveDestroyAPIView):
queryset = Multimedia.objects.all()
serializer_class = MultimediaSerializer
class MultimediaFileView(ViewSet):
@staticmethod
def get_file(request, multimedia_id):
try:
m = Multimedia.objects.get(pk=multimedia_id)
except ObjectDoesNotExist:
raise NotFound(multimedia_id)
file = UploadedFile(m.file)
return HttpResponse(file, content_type="image/png")
class AnnotationListView(generics.ListCreateAPIView):
serializer_class = AnnotationSerializer
def get_queryset(self):
queryset = Annotation.objects.all()
heritage_id = self.kwargs["heritage_id"]
if heritage_id is not None:
queryset = queryset.filter(target__target_id__contains=heritage_id)
return queryset
else:
return NotFound()
def get_serializer_context(self):
return {"target_id": self.request.build_absolute_uri(),
"heritage_id": self.kwargs["heritage_id"]}
def list(self, request, *args, **kwargs):
keyword = self.request.query_params.get("keyword", None)
if not keyword:
return super().list(request, *args, **kwargs)
result = Response(search_annotations(keyword)).data
return Response(i["_source"] for i in result["hits"]["hits"])
class AnnotationPaleListView(generics.ListCreateAPIView):
serializer_class = AnnotationPaleSerializer
def get_queryset(self):
return Annotation.objects.all()
def get_serializer_context(self):
return {"target_id": self.request.build_absolute_uri()}
def list(self, request, *args, **kwargs):
keyword = self.request.query_params.get("keyword", None)
if not keyword:
return super().list(request, *args, **kwargs)
result = Response(search_annotations(keyword)).data
return Response(i["_source"] for i in result["hits"]["hits"])
class AnnotationView(generics.RetrieveUpdateDestroyAPIView):
queryset = Annotation.objects.all()
serializer_class = AnnotationSerializer
class AnnotationPaleView(generics.RetrieveUpdateDestroyAPIView):
queryset = Annotation.objects.all()
serializer_class = AnnotationPaleSerializer
class Users(mixins.CreateModelMixin, viewsets.GenericViewSet):
queryset = User.objects.all()
serializer_class = UserSerializer
class UserDetail(mixins.RetrieveModelMixin,
mixins.UpdateModelMixin,
viewsets.GenericViewSet):
queryset = User.objects.all()
serializer_class = UserSerializer
permission_classes = (IsSelf,)
def get_me(self, request):
return Response(self.serializer_class(request.user).data)
|
[
"django.http.HttpResponse",
"heritages.models.Annotation.objects.all",
"heritages.models.Multimedia.objects.get",
"django.core.files.uploadedfile.UploadedFile",
"heritages.models.Heritage.objects.all",
"rest_framework.response.Response",
"heritages.search.search_annotations",
"heritages.models.Multimedia.objects.all",
"rest_framework.exceptions.NotFound",
"heritages.models.Heritage.objects.get",
"django.contrib.auth.models.User.objects.all",
"heritages.search.search_heritages"
] |
[((963, 985), 'heritages.models.Heritage.objects.all', 'Heritage.objects.all', ([], {}), '()\n', (983, 985), False, 'from heritages.models import Heritage, Multimedia, Annotation\n'), ((1427, 1449), 'heritages.models.Heritage.objects.all', 'Heritage.objects.all', ([], {}), '()\n', (1447, 1449), False, 'from heritages.models import Heritage, Multimedia, Annotation\n'), ((2159, 2183), 'heritages.models.Multimedia.objects.all', 'Multimedia.objects.all', ([], {}), '()\n', (2181, 2183), False, 'from heritages.models import Heritage, Multimedia, Annotation\n'), ((4149, 4173), 'heritages.models.Annotation.objects.all', 'Annotation.objects.all', ([], {}), '()\n', (4171, 4173), False, 'from heritages.models import Heritage, Multimedia, Annotation\n'), ((4300, 4324), 'heritages.models.Annotation.objects.all', 'Annotation.objects.all', ([], {}), '()\n', (4322, 4324), False, 'from heritages.models import Heritage, Multimedia, Annotation\n'), ((4453, 4471), 'django.contrib.auth.models.User.objects.all', 'User.objects.all', ([], {}), '()\n', (4469, 4471), False, 'from django.contrib.auth.models import User\n'), ((4656, 4674), 'django.contrib.auth.models.User.objects.all', 'User.objects.all', ([], {}), '()\n', (4672, 4674), False, 'from django.contrib.auth.models import User\n'), ((1296, 1350), 'rest_framework.response.Response', 'Response', (["(i['_source'] for i in result['hits']['hits'])"], {}), "(i['_source'] for i in result['hits']['hits'])\n", (1304, 1350), False, 'from rest_framework.response import Response\n'), ((2488, 2508), 'django.core.files.uploadedfile.UploadedFile', 'UploadedFile', (['m.file'], {}), '(m.file)\n', (2500, 2508), False, 'from django.core.files.uploadedfile import UploadedFile\n'), ((2524, 2568), 'django.http.HttpResponse', 'HttpResponse', (['file'], {'content_type': '"""image/png"""'}), "(file, content_type='image/png')\n", (2536, 2568), False, 'from django.http import HttpResponse\n'), ((2717, 2741), 'heritages.models.Annotation.objects.all', 'Annotation.objects.all', ([], {}), '()\n', (2739, 2741), False, 'from heritages.models import Heritage, Multimedia, Annotation\n'), ((3411, 3465), 'rest_framework.response.Response', 'Response', (["(i['_source'] for i in result['hits']['hits'])"], {}), "(i['_source'] for i in result['hits']['hits'])\n", (3419, 3465), False, 'from rest_framework.response import Response\n'), ((3618, 3642), 'heritages.models.Annotation.objects.all', 'Annotation.objects.all', ([], {}), '()\n', (3640, 3642), False, 'from heritages.models import Heritage, Multimedia, Annotation\n'), ((4016, 4070), 'rest_framework.response.Response', 'Response', (["(i['_source'] for i in result['hits']['hits'])"], {}), "(i['_source'] for i in result['hits']['hits'])\n", (4024, 4070), False, 'from rest_framework.response import Response\n'), ((1657, 1708), 'heritages.models.Heritage.objects.get', 'Heritage.objects.get', ([], {'pk': "self.kwargs['heritage_id']"}), "(pk=self.kwargs['heritage_id'])\n", (1677, 1708), False, 'from heritages.models import Heritage, Multimedia, Annotation\n'), ((1936, 1972), 'heritages.models.Heritage.objects.get', 'Heritage.objects.get', ([], {'pk': 'heritage_id'}), '(pk=heritage_id)\n', (1956, 1972), False, 'from heritages.models import Heritage, Multimedia, Annotation\n'), ((2355, 2395), 'heritages.models.Multimedia.objects.get', 'Multimedia.objects.get', ([], {'pk': 'multimedia_id'}), '(pk=multimedia_id)\n', (2377, 2395), False, 'from heritages.models import Heritage, Multimedia, Annotation\n'), ((2968, 2978), 'rest_framework.exceptions.NotFound', 'NotFound', ([], {}), '()\n', (2976, 2978), False, 'from rest_framework.exceptions import NotFound\n'), ((1249, 1274), 'heritages.search.search_heritages', 'search_heritages', (['keyword'], {}), '(keyword)\n', (1265, 1274), False, 'from heritages.search import search_heritages, search_annotations\n'), ((1762, 1772), 'rest_framework.exceptions.NotFound', 'NotFound', ([], {}), '()\n', (1770, 1772), False, 'from rest_framework.exceptions import NotFound\n'), ((2026, 2036), 'rest_framework.exceptions.NotFound', 'NotFound', ([], {}), '()\n', (2034, 2036), False, 'from rest_framework.exceptions import NotFound\n'), ((2449, 2472), 'rest_framework.exceptions.NotFound', 'NotFound', (['multimedia_id'], {}), '(multimedia_id)\n', (2457, 2472), False, 'from rest_framework.exceptions import NotFound\n'), ((3362, 3389), 'heritages.search.search_annotations', 'search_annotations', (['keyword'], {}), '(keyword)\n', (3380, 3389), False, 'from heritages.search import search_heritages, search_annotations\n'), ((3967, 3994), 'heritages.search.search_annotations', 'search_annotations', (['keyword'], {}), '(keyword)\n', (3985, 3994), False, 'from heritages.search import search_heritages, search_annotations\n')]
|
import glob
import cv2
import numpy as np
def globimgs(path, globs:list):
"""returns a list of files with path with globing with more than one extensions"""
imgs = []
for i in globs:
imgs.extend(glob.glob(path + i))
paths = []
for path in imgs:
paths.append(path.replace("\\", "/"))
return paths
def scaneffects(img):
dilated_img = cv2.dilate(img, np.ones((7, 7), np.uint8))
bg_img = cv2.medianBlur(dilated_img, 15)
diff_img = 255 - cv2.absdiff(img, bg_img)
norm_img = diff_img.copy()
cv2.normalize(diff_img, norm_img, alpha=0, beta=255, norm_type=cv2.NORM_MINMAX, dtype=cv2.CV_8UC1)
_, thr_img = cv2.threshold(norm_img, 230, 0, cv2.THRESH_TRUNC)
cv2.normalize(thr_img, thr_img, alpha=0, beta=255, norm_type=cv2.NORM_MINMAX, dtype=cv2.CV_8UC1)
return thr_img
|
[
"cv2.medianBlur",
"cv2.threshold",
"numpy.ones",
"glob.glob",
"cv2.normalize",
"cv2.absdiff"
] |
[((408, 439), 'cv2.medianBlur', 'cv2.medianBlur', (['dilated_img', '(15)'], {}), '(dilated_img, 15)\n', (422, 439), False, 'import cv2\n'), ((522, 625), 'cv2.normalize', 'cv2.normalize', (['diff_img', 'norm_img'], {'alpha': '(0)', 'beta': '(255)', 'norm_type': 'cv2.NORM_MINMAX', 'dtype': 'cv2.CV_8UC1'}), '(diff_img, norm_img, alpha=0, beta=255, norm_type=cv2.\n NORM_MINMAX, dtype=cv2.CV_8UC1)\n', (535, 625), False, 'import cv2\n'), ((638, 687), 'cv2.threshold', 'cv2.threshold', (['norm_img', '(230)', '(0)', 'cv2.THRESH_TRUNC'], {}), '(norm_img, 230, 0, cv2.THRESH_TRUNC)\n', (651, 687), False, 'import cv2\n'), ((692, 793), 'cv2.normalize', 'cv2.normalize', (['thr_img', 'thr_img'], {'alpha': '(0)', 'beta': '(255)', 'norm_type': 'cv2.NORM_MINMAX', 'dtype': 'cv2.CV_8UC1'}), '(thr_img, thr_img, alpha=0, beta=255, norm_type=cv2.\n NORM_MINMAX, dtype=cv2.CV_8UC1)\n', (705, 793), False, 'import cv2\n'), ((368, 393), 'numpy.ones', 'np.ones', (['(7, 7)', 'np.uint8'], {}), '((7, 7), np.uint8)\n', (375, 393), True, 'import numpy as np\n'), ((461, 485), 'cv2.absdiff', 'cv2.absdiff', (['img', 'bg_img'], {}), '(img, bg_img)\n', (472, 485), False, 'import cv2\n'), ((203, 222), 'glob.glob', 'glob.glob', (['(path + i)'], {}), '(path + i)\n', (212, 222), False, 'import glob\n')]
|
'''
Created on 31 mar. 2020
@author: David
'''
from sys import path
path.append("/flash/userapp")
from pyb import LED, Switch, Pin
from uasyncio import get_event_loop, sleep_ms as ua_sleep_ms
from uvacbot.io.esp8266 import Connection, Esp8266
class LedToggleConnection(Connection):
async def onConnected(self):
print("Connected: {0}".format(self._clientId))
def onClose(self):
print("Closed: {0}".format(self._clientId))
async def onReceived(self, message):
if message.startswith("LED"):
try:
ledId = int(message.split(":")[1])
#The Nucleo-F767ZI board has 3 on-board user leds
if ledId >= 1 and ledId <= 3:
LED(ledId).toggle()
print("Led['{0}'] toggled.".format(ledId))
else:
print("Led not found. Please, try again.")
except:
print("I don't understand '{0}'. Please, try again.".format(message))
class EchoConnection(Connection):
async def onConnected(self):
print("Connected!")
async def onReceived(self, message):
echo = message.strip()
if echo != "":
self.send("echo: '{0}'\r\n".format(echo))
def onClose(self):
print("Closed.")
class RemoteExecConnection(Connection):
async def onReceived(self, message):
code = message.strip()
if code != "":
try:
exec("{0}\r\n".format(str(code, Esp8266.BYTES_ENCODING)))
except Exception as ex:
self.send("Exception: {0}\r\n".format(ex))
async def serve(esp):
esp.initServer(EchoConnection)
#esp.initServer(LedToggleConnection)
print("Waiting for connections...")
sw = Switch()
while not sw.value():
await ua_sleep_ms(200)
esp.stopServer()
print("Server stopped.")
def main():
print("*** Esp8266 communication test ***")
print("Press switch button to finish.")
esp = None # Uncomment ESP8266 configuration properly
#esp = Esp8266(3, Pin.board.D3, 115200, debug=True) #NUCLEO-L476RG
# On NUCLEO-F767ZI TX6 is on CN7-01 (PC6) and RX6 is on CN7-11 (PC7)
#esp = Esp8266(6, Pin.board.D8, 115200, debug=True) #NUCLEO-F767ZI
if not esp:
raise Exception("Create a Esp8266 object first.")
loop = get_event_loop()
esp.start()
assert esp.isPresent()
try:
#esp.setOperatingMode(Esp8266.OP_MODE_CLIENT)
#esp.join("SSID", "PASSWD")
#esp.setStaIpAddress("192.168.1.200", "192.168.1.1")
esp.setOperatingMode(Esp8266.OP_MODE_AP)
esp.setAccessPointConfig("ESP8266-AP", "", 1, Esp8266.SECURITY_OPEN)
loop.run_until_complete(serve(esp))
finally:
esp._flushRx()
esp.cleanup()
print("Program finished")
if __name__ == "__main__":
main()
|
[
"sys.path.append",
"pyb.LED",
"uasyncio.sleep_ms",
"uasyncio.get_event_loop",
"pyb.Switch"
] |
[((69, 98), 'sys.path.append', 'path.append', (['"""/flash/userapp"""'], {}), "('/flash/userapp')\n", (80, 98), False, 'from sys import path\n'), ((2052, 2060), 'pyb.Switch', 'Switch', ([], {}), '()\n', (2058, 2060), False, 'from pyb import LED, Switch, Pin\n'), ((2678, 2694), 'uasyncio.get_event_loop', 'get_event_loop', ([], {}), '()\n', (2692, 2694), False, 'from uasyncio import get_event_loop, sleep_ms as ua_sleep_ms\n'), ((2109, 2125), 'uasyncio.sleep_ms', 'ua_sleep_ms', (['(200)'], {}), '(200)\n', (2120, 2125), True, 'from uasyncio import get_event_loop, sleep_ms as ua_sleep_ms\n'), ((817, 827), 'pyb.LED', 'LED', (['ledId'], {}), '(ledId)\n', (820, 827), False, 'from pyb import LED, Switch, Pin\n')]
|
from pathlib import Path
DATA_DIR= Path(__file__).resolve().parent
|
[
"pathlib.Path"
] |
[((38, 52), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (42, 52), False, 'from pathlib import Path\n')]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django import template
from django.conf import settings
from django.urls import reverse_lazy
from calls.models import CallCampaignStatus
from local_groups.models import find_local_group_by_user
from organizing_hub.models import OrganizingHubLoginAlert
import logging
logger = logging.getLogger(__name__)
register = template.Library()
BSD_CREATE_ACCOUNT_URL = settings.BSD_CREATE_ACCOUNT_URL
ORGANIZING_DOCS_URL = settings.ORGANIZING_DOCS_URL
ORGANIZING_EMAIL = settings.ORGANIZING_EMAIL
ORGANIZING_GUIDES_URL = settings.ORGANIZING_GUIDES_URL
ORGANIZING_HUB_ADMINS_ENABLED = settings.ORGANIZING_HUB_ADMINS_ENABLED
ORGANIZING_HUB_CALL_CALLERS_URL = settings.ORGANIZING_HUB_CALL_CALLERS_URL
ORGANIZING_HUB_CALL_MANAGE_URL = settings.ORGANIZING_HUB_CALL_MANAGE_URL
ORGANIZING_HUB_CALL_SCRIPT_URL = settings.ORGANIZING_HUB_CALL_SCRIPT_URL
ORGANIZING_HUB_DASHBOARD_URL = settings.ORGANIZING_HUB_DASHBOARD_URL
ORGANIZING_HUB_PROMOTE_ENABLED = settings.ORGANIZING_HUB_PROMOTE_ENABLED
@register.simple_tag
def bsd_create_account_url():
return BSD_CREATE_ACCOUNT_URL
@register.simple_tag
def call_campaign_complete_url(call_campaign):
"""
URL for Complete Call Campaign page
Parameters
----------
call_campaign : CallCampaign
Call Campaign
Returns
-------
str
Return url for Complete Call Campaign page
"""
return reverse_lazy(
'organizing-hub-call-campaign-status',
kwargs={
'uuid': call_campaign.uuid,
'status_id': CallCampaignStatus.complete.value[0],
}
)
@register.simple_tag
def call_campaign_pause_url(call_campaign):
"""
URL for Pause Call Campaign page
Parameters
----------
call_campaign : CallCampaign
Call Campaign
Returns
-------
str
Return url for Pause Call Campaign page
"""
return reverse_lazy(
'organizing-hub-call-campaign-status',
kwargs={
'uuid': call_campaign.uuid,
'status_id': CallCampaignStatus.paused.value[0],
}
)
@register.simple_tag
def call_campaign_resume_url(call_campaign):
"""
URL for Resume Call Campaign page
Parameters
----------
call_campaign : CallCampaign
Call Campaign
Returns
-------
str
Return url for Resume Call Campaign page
"""
return reverse_lazy(
'organizing-hub-call-campaign-status',
kwargs={
'uuid': call_campaign.uuid,
'status_id': CallCampaignStatus.in_progress.value[0],
}
)
@register.simple_tag
def call_campaign_start_url(call_campaign):
"""
URL for Start Call Campaign page
Parameters
----------
call_campaign : CallCampaign
Call Campaign
Returns
-------
str
Return url for Start Call Campaign page
"""
return reverse_lazy(
'organizing-hub-call-campaign-status',
kwargs={
'uuid': call_campaign.uuid,
'status_id': CallCampaignStatus.in_progress.value[0],
}
)
@register.inclusion_tag('partials/events_nav.html', takes_context=True)
def events_nav(context):
"""Show Hydra Promote Link if Hub Promote is not enabled"""
show_promote_link = not ORGANIZING_HUB_PROMOTE_ENABLED
return {
'show_promote_link': show_promote_link,
'request': context['request'],
}
# Organizing Hub templates
@register.inclusion_tag('partials/group_link.html', takes_context=True)
def group_link(context):
group = find_local_group_by_user(context['request'].user)
return {
'group': group,
'request': context['request'],
}
@register.simple_tag(takes_context=True)
def has_organizing_hub_feature_access(context, feature_id):
"""
Check if user has access to Organizing Hub Feature
Parameters
----------
feature_id : int
Organizing Hub Feature id
Returns
-------
bool
Return True if user has access to Organizing Hub Feature
"""
local_group = find_local_group_by_user(context['request'].user)
if local_group is not None and hasattr(
local_group,
'organizinghubaccess',
):
access = local_group.organizinghubaccess
has_feature_access = access.has_feature_access_by_id(feature_id)
return has_feature_access
else:
return False
@register.simple_tag(takes_context=True)
def local_group(context):
"""TODO move to local groups template tags"""
return find_local_group_by_user(context['request'].user)
# Organizing Hub Navigation menu
@register.inclusion_tag('partials/organizing_hub_nav.html', takes_context=True)
def organizing_hub_nav(context):
group = find_local_group_by_user(context['request'].user)
show_admins_link = ORGANIZING_HUB_ADMINS_ENABLED
return {
'group': group,
'organizing_guides_url': ORGANIZING_GUIDES_URL,
'organizing_docs_url': ORGANIZING_DOCS_URL,
'show_admins_link': show_admins_link,
'request': context['request'],
}
@register.simple_tag
def organizing_docs_url():
return ORGANIZING_DOCS_URL
@register.simple_tag
def organizing_email():
return ORGANIZING_EMAIL
@register.simple_tag
def organizing_hub_call_callers_url():
return ORGANIZING_HUB_CALL_CALLERS_URL
@register.simple_tag
def organizing_hub_call_manage_url():
return ORGANIZING_HUB_CALL_MANAGE_URL
@register.simple_tag
def organizing_hub_call_script_url():
return ORGANIZING_HUB_CALL_SCRIPT_URL
@register.simple_tag
def organizing_hub_dashboard_url():
return ORGANIZING_HUB_DASHBOARD_URL
@register.inclusion_tag(
'organizing_hub/tags/organizing_hub_login_alert.html',
takes_context=True
)
def organizing_hub_login_alert(context):
"""Organizing Hub Login Alert snippet set to show"""
return {
'organizing_hub_login_alert': OrganizingHubLoginAlert.objects.filter(
show=True
).first(),
'request': context['request'],
}
|
[
"django.template.Library",
"django.urls.reverse_lazy",
"organizing_hub.models.OrganizingHubLoginAlert.objects.filter",
"logging.getLogger",
"local_groups.models.find_local_group_by_user"
] |
[((346, 373), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (363, 373), False, 'import logging\n'), ((386, 404), 'django.template.Library', 'template.Library', ([], {}), '()\n', (402, 404), False, 'from django import template\n'), ((1454, 1597), 'django.urls.reverse_lazy', 'reverse_lazy', (['"""organizing-hub-call-campaign-status"""'], {'kwargs': "{'uuid': call_campaign.uuid, 'status_id': CallCampaignStatus.complete.value[0]}"}), "('organizing-hub-call-campaign-status', kwargs={'uuid':\n call_campaign.uuid, 'status_id': CallCampaignStatus.complete.value[0]})\n", (1466, 1597), False, 'from django.urls import reverse_lazy\n'), ((1961, 2102), 'django.urls.reverse_lazy', 'reverse_lazy', (['"""organizing-hub-call-campaign-status"""'], {'kwargs': "{'uuid': call_campaign.uuid, 'status_id': CallCampaignStatus.paused.value[0]}"}), "('organizing-hub-call-campaign-status', kwargs={'uuid':\n call_campaign.uuid, 'status_id': CallCampaignStatus.paused.value[0]})\n", (1973, 2102), False, 'from django.urls import reverse_lazy\n'), ((2469, 2615), 'django.urls.reverse_lazy', 'reverse_lazy', (['"""organizing-hub-call-campaign-status"""'], {'kwargs': "{'uuid': call_campaign.uuid, 'status_id': CallCampaignStatus.in_progress.\n value[0]}"}), "('organizing-hub-call-campaign-status', kwargs={'uuid':\n call_campaign.uuid, 'status_id': CallCampaignStatus.in_progress.value[0]})\n", (2481, 2615), False, 'from django.urls import reverse_lazy\n'), ((2979, 3125), 'django.urls.reverse_lazy', 'reverse_lazy', (['"""organizing-hub-call-campaign-status"""'], {'kwargs': "{'uuid': call_campaign.uuid, 'status_id': CallCampaignStatus.in_progress.\n value[0]}"}), "('organizing-hub-call-campaign-status', kwargs={'uuid':\n call_campaign.uuid, 'status_id': CallCampaignStatus.in_progress.value[0]})\n", (2991, 3125), False, 'from django.urls import reverse_lazy\n'), ((3648, 3697), 'local_groups.models.find_local_group_by_user', 'find_local_group_by_user', (["context['request'].user"], {}), "(context['request'].user)\n", (3672, 3697), False, 'from local_groups.models import find_local_group_by_user\n'), ((4170, 4219), 'local_groups.models.find_local_group_by_user', 'find_local_group_by_user', (["context['request'].user"], {}), "(context['request'].user)\n", (4194, 4219), False, 'from local_groups.models import find_local_group_by_user\n'), ((4640, 4689), 'local_groups.models.find_local_group_by_user', 'find_local_group_by_user', (["context['request'].user"], {}), "(context['request'].user)\n", (4664, 4689), False, 'from local_groups.models import find_local_group_by_user\n'), ((4851, 4900), 'local_groups.models.find_local_group_by_user', 'find_local_group_by_user', (["context['request'].user"], {}), "(context['request'].user)\n", (4875, 4900), False, 'from local_groups.models import find_local_group_by_user\n'), ((6018, 6067), 'organizing_hub.models.OrganizingHubLoginAlert.objects.filter', 'OrganizingHubLoginAlert.objects.filter', ([], {'show': '(True)'}), '(show=True)\n', (6056, 6067), False, 'from organizing_hub.models import OrganizingHubLoginAlert\n')]
|
#!/usr/bin/env python
import getopt, sys, os
import numpy as np
import pyfits
from pylab import matplotlib
import matplotlib.pyplot as plt
from mpl_toolkits.axes_grid.inset_locator import zoomed_inset_axes
from mpl_toolkits.axes_grid.inset_locator import mark_inset
#fname_ext = '/home/nbarbey/data/csh/output/ngc6946_cross_robust.fits'
fname_ext = sys.argv[1]
fname = fname_ext.split('.')[0]
out_fname = fname + '.png'
print('displaying ' + fname)
title_str = fname.split(os.sep)[-1]
t = np.flipud(pyfits.fitsopen(fname_ext)[0].data.T)
fig = plt.figure(1, [5,4])
ax = fig.add_subplot(111)
#imshow(t , interpolation="nearest")
#imshow((t - t.min())) ** .25, interpolation="nearest")
tt = t ** .25
tt[np.isnan(tt)] = 0
extent = [0., 192., 0., 192.]
ax.imshow(tt, extent=extent, interpolation="nearest")
tzoom = tt[135:155, 80:100,]
axins = zoomed_inset_axes(ax, 2, loc=3) # zoom = 6
extent = [80., 100., 192. - 155., 192. - 135, ]
im = axins.imshow(tzoom, extent=extent, interpolation="nearest")
im.set_clim([tt.min(), tt.max()])
plt.xticks(visible=False)
plt.yticks(visible=False)
#x1, x2, y1, y2 = 80., 100., 135., 155.,
#axins.set_xlim(x1, x2)
#axins.set_ylim(y1, y2)
mark_inset(ax, axins, loc1=2, loc2=4, fc="none", ec="0.5")
#plt.title(title_str)
#plt.colorbar()
#plt.xlabel('Right Ascension')
#plt.ylabel('Declination')
plt.show()
fig.savefig(out_fname)
|
[
"pyfits.fitsopen",
"matplotlib.pyplot.show",
"mpl_toolkits.axes_grid.inset_locator.mark_inset",
"matplotlib.pyplot.yticks",
"numpy.isnan",
"matplotlib.pyplot.figure",
"mpl_toolkits.axes_grid.inset_locator.zoomed_inset_axes",
"matplotlib.pyplot.xticks"
] |
[((544, 565), 'matplotlib.pyplot.figure', 'plt.figure', (['(1)', '[5, 4]'], {}), '(1, [5, 4])\n', (554, 565), True, 'import matplotlib.pyplot as plt\n'), ((842, 873), 'mpl_toolkits.axes_grid.inset_locator.zoomed_inset_axes', 'zoomed_inset_axes', (['ax', '(2)'], {'loc': '(3)'}), '(ax, 2, loc=3)\n', (859, 873), False, 'from mpl_toolkits.axes_grid.inset_locator import zoomed_inset_axes\n'), ((1032, 1057), 'matplotlib.pyplot.xticks', 'plt.xticks', ([], {'visible': '(False)'}), '(visible=False)\n', (1042, 1057), True, 'import matplotlib.pyplot as plt\n'), ((1058, 1083), 'matplotlib.pyplot.yticks', 'plt.yticks', ([], {'visible': '(False)'}), '(visible=False)\n', (1068, 1083), True, 'import matplotlib.pyplot as plt\n'), ((1173, 1231), 'mpl_toolkits.axes_grid.inset_locator.mark_inset', 'mark_inset', (['ax', 'axins'], {'loc1': '(2)', 'loc2': '(4)', 'fc': '"""none"""', 'ec': '"""0.5"""'}), "(ax, axins, loc1=2, loc2=4, fc='none', ec='0.5')\n", (1183, 1231), False, 'from mpl_toolkits.axes_grid.inset_locator import mark_inset\n'), ((1329, 1339), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1337, 1339), True, 'import matplotlib.pyplot as plt\n'), ((702, 714), 'numpy.isnan', 'np.isnan', (['tt'], {}), '(tt)\n', (710, 714), True, 'import numpy as np\n'), ((500, 526), 'pyfits.fitsopen', 'pyfits.fitsopen', (['fname_ext'], {}), '(fname_ext)\n', (515, 526), False, 'import pyfits\n')]
|
# coding=utf-8
import logging
from widen import settings
import requests
from index.models import *
class WeiXinLogin():
def __init__(self, code, state):
self.code = code
self.state = state
self.appid = settings.APP_ID
self.appsecret = settings.APP_SECRET
self.access_token = ''
self.refresh_token = ''
self.openid = ''
self.is_expires = 1
self.detail = {}
# 为了方便大家看,我都写在一个函数里
def get_access_token(self):
# 2.通过code换取网页授权access_token
if self.refresh_token:
url = u'https://api.weixin.qq.com/sns/oauth2/refresh_token'
params = {
'appid': self.appid,
'grant_type': self.refresh_token,
'refresh_token': self.refresh_token
}
res = requests.get(url, params=params).json()
if res.get('errcode', None):
logging.info(res.get('errmsg'))
return res.get('errmsg')
self.access_token = res.get("access_token")
self.openid = res.get("openid")
self.refresh_token = res.get('refresh_token')
logging.info(
'access_token:%s ;openid:%s ;refresh_token:%s' % (
self.access_token, self.openid, self.refresh_token))
return True
url = u'https://api.weixin.qq.com/sns/oauth2/access_token'
params = {
'appid': self.appid,
'secret': self.appsecret,
'code': self.code,
'grant_type': 'authorization_code'
}
res = requests.get(url, params=params).json()
if res.get('errcode', None):
logging.info(res.get('errmsg'))
return res.get('errmsg')
self.access_token = res.get("access_token")
self.openid = res.get("openid")
self.refresh_token = res.get('refresh_token')
Token_get = Token(**res)
Token_get.save()
logging.info(
'access_token:%s ;openid:%s ;refresh_token:%s' % (
self.access_token, self.openid, self.refresh_token))
return True
def token_expires(self):
# 监测当前access_token是否超时?
url = u'https://api.weixin.qq.com/sns/auth'
params = {
'appid': self.appid,
'access_token': self.access_token,
}
res = requests.get(url, params=params).json()
if res.get('errcode'):
self.is_expires = 1
logging.info('is_expires:%s' % self.is_expires)
else:
self.is_expires = 0
return True
def get_info(self):
# 4.拉取用户信息
user_info_url = u'https://api.weixin.qq.com/sns/userinfo'
params = {
'access_token': self.access_token,
'openid': self.openid,
'lang': 'zh_CN',
}
res = requests.get(user_info_url, params=params).json()
if res.get('errcode'):
return res.get('errmsg')
# decode response content
logging.info('Get user detail openid:' + res.get('openid'))
for key, value in res.items():
self.detail[key] = value.encode('iso8859-1').decode('utf-8') if isinstance(value, str) else value
WxUser = Wxuser(**self.detail)
WxUser.save()
logging.info('Save%s to db' % self.detail.get('openid'))
return True
def get_detail(self):
self.token_expires()
if self.is_expires == 1:
self.get_access_token()
self.get_info()
return self.detail
|
[
"logging.info",
"requests.get"
] |
[((1973, 2093), 'logging.info', 'logging.info', (["('access_token:%s ;openid:%s ;refresh_token:%s' % (self.access_token, self.\n openid, self.refresh_token))"], {}), "('access_token:%s ;openid:%s ;refresh_token:%s' % (self.\n access_token, self.openid, self.refresh_token))\n", (1985, 2093), False, 'import logging\n'), ((1166, 1286), 'logging.info', 'logging.info', (["('access_token:%s ;openid:%s ;refresh_token:%s' % (self.access_token, self.\n openid, self.refresh_token))"], {}), "('access_token:%s ;openid:%s ;refresh_token:%s' % (self.\n access_token, self.openid, self.refresh_token))\n", (1178, 1286), False, 'import logging\n'), ((2491, 2538), 'logging.info', 'logging.info', (["('is_expires:%s' % self.is_expires)"], {}), "('is_expires:%s' % self.is_expires)\n", (2503, 2538), False, 'import logging\n'), ((1603, 1635), 'requests.get', 'requests.get', (['url'], {'params': 'params'}), '(url, params=params)\n', (1615, 1635), False, 'import requests\n'), ((2376, 2408), 'requests.get', 'requests.get', (['url'], {'params': 'params'}), '(url, params=params)\n', (2388, 2408), False, 'import requests\n'), ((2869, 2911), 'requests.get', 'requests.get', (['user_info_url'], {'params': 'params'}), '(user_info_url, params=params)\n', (2881, 2911), False, 'import requests\n'), ((826, 858), 'requests.get', 'requests.get', (['url'], {'params': 'params'}), '(url, params=params)\n', (838, 858), False, 'import requests\n')]
|
import unittest
from nagplug import Plugin, Threshold, ArgumentParserError
from nagplug import OK, WARNING, CRITICAL, UNKNOWN
class TestParsing(unittest.TestCase):
def test_parse(self):
plugin = Plugin()
plugin.add_arg('-e', '--test', action='store_true')
args = plugin.parser.parse_args(['-e'])
self.assertTrue(args.test)
def test_parse_threshold_string(self):
plugin = Plugin()
plugin.add_arg('-w', '--warning-threshold')
plugin.add_arg('-c', '--critical-threshold')
args = plugin.parse_args(['-w', '10:20', '-c', '0:40'])
self.assertEqual(OK, plugin.check_threshold(15,
args.warning_threshold,
args.critical_threshold))
def test_parse_threshold_native(self):
plugin = Plugin()
plugin.add_arg('-w', '--warning-threshold', type=Threshold)
plugin.add_arg('-c', '--critical-threshold', type=Threshold)
args = plugin.parse_args(['-w', '10:20', '-c', '0:40'])
self.assertEqual(OK, plugin.check_threshold(15,
args.warning_threshold,
args.critical_threshold))
def test_parse_exceptions(self):
plugin = Plugin()
plugin.add_arg('test')
self.assertRaises(ArgumentParserError, plugin.parse_args, [])
def test_parse_exceptions(self):
plugin = Plugin()
plugin.add_arg('threshold', type=Threshold)
self.assertRaises(ArgumentParserError, plugin.parse_args, [])
class TestThreshold(unittest.TestCase):
def test_threshold_parseerror(self):
self.assertRaises(ValueError, Threshold, ("helloworld"))
def test_threshold_valueerror(self):
self.assertRaises(ValueError, Threshold, ("10:2"))
def test_theshold_simple_neg(self):
self.assertFalse(Threshold("10").check(-1))
def test_theshold_simple_over(self):
self.assertFalse(Threshold("10").check(11))
def test_theshold_simple_zero(self):
self.assertTrue(Threshold("10").check(0))
def test_theshold_simple_upperbound(self):
self.assertTrue(Threshold("10").check(10))
def test_theshold_simple_inside(self):
self.assertTrue(Threshold("10").check(5))
def test_threshold_range_one(self):
self.assertTrue(Threshold("10:10").check(10))
def test_threshold_range_lowerbound(self):
self.assertTrue(Threshold("10:20").check(10))
def test_threshold_range_inside(self):
self.assertTrue(Threshold("10:20").check(15))
def test_threshold_range_upperbound(self):
self.assertTrue(Threshold("10:20").check(20))
def test_threshold_range_lower(self):
self.assertFalse(Threshold("10:20").check(9))
def test_threshold_range_upper(self):
self.assertFalse(Threshold("10:20").check(21))
def test_threshold_invert_bound(self):
self.assertFalse(Threshold("@10").check(10))
def test_threshold_invert_range(self):
self.assertFalse(Threshold("@10:20").check(10))
def test_threshold_invert_upper(self):
self.assertFalse(Threshold("@:20").check(10))
def test_threshold_openrange_simple(self):
self.assertTrue(Threshold("10:").check(20))
def test_threshold_openrange_inside(self):
self.assertTrue(Threshold(":10").check(5))
def test_threshold_openrange_over(self):
self.assertFalse(Threshold(":10").check(20))
def test_threshold_openrange_neg(self):
self.assertTrue(Threshold("~:10").check(-1))
def test_threshold_openrange_neg_over(self):
self.assertFalse(Threshold("~:10").check(11))
class TestCode(unittest.TestCase):
def test_simple_default(self):
plugin = Plugin()
self.assertEqual(plugin.get_code(), UNKNOWN)
def test_simple_ok(self):
plugin = Plugin()
plugin.add_result(OK, 'OK')
self.assertEqual(plugin.get_code(), OK)
def test_simple_warning(self):
plugin = Plugin()
plugin.add_result(WARNING, 'WARNING')
self.assertEqual(plugin.get_code(), WARNING)
def test_simple_critical(self):
plugin = Plugin()
plugin.add_result(CRITICAL, 'CRITICAL')
self.assertEqual(plugin.get_code(), CRITICAL)
def test_simple_owc(self):
plugin = Plugin()
plugin.add_result(OK, 'OK')
plugin.add_result(WARNING, 'WARNING')
plugin.add_result(CRITICAL, 'CRITICAL')
self.assertEqual(plugin.get_code(), CRITICAL)
def test_simple_ow(self):
plugin = Plugin()
plugin.add_result(OK, 'OK')
plugin.add_result(WARNING, 'WARNING')
self.assertEqual(plugin.get_code(), WARNING)
def test_simple_cw(self):
plugin = Plugin()
plugin.add_result(CRITICAL, 'OK')
plugin.add_result(WARNING, 'WARNING')
plugin.add_result(WARNING, 'WARNING')
plugin.add_result(WARNING, 'WARNING')
plugin.add_result(WARNING, 'UNKNOWN')
self.assertEqual(plugin.get_code(), CRITICAL)
class TestMessage(unittest.TestCase):
def test_simple_default(self):
plugin = Plugin()
self.assertEqual(plugin.get_message(), '')
def test_simple_ok(self):
plugin = Plugin()
plugin.add_result(OK, 'OK')
self.assertEqual(plugin.get_message(), 'OK')
def test_simple_owc(self):
plugin = Plugin()
plugin.add_result(OK, 'OK')
plugin.add_result(WARNING, 'WARNING')
plugin.add_result(CRITICAL, 'CRITICAL')
self.assertEqual(plugin.get_message(joiner=', '),
', '.join(['OK', 'WARNING', 'CRITICAL']))
def test_simple_owc_level(self):
plugin = Plugin()
plugin.add_result(OK, 'OK')
plugin.add_result(WARNING, 'WARNING')
plugin.add_result(CRITICAL, 'CRITICAL')
self.assertEqual(plugin.get_message(joiner=', ', msglevels=[WARNING]),
', '.join(['WARNING']))
class TestExtData(unittest.TestCase):
def test_simple(self):
plugin = Plugin()
plugin.add_extdata('OK')
plugin.add_extdata('hey!')
plugin.add_extdata('STUFF')
self.assertEqual(plugin.get_extdata(),
'\n'.join(['OK', 'hey!', 'STUFF']))
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main",
"nagplug.Plugin",
"nagplug.Threshold"
] |
[((6415, 6430), 'unittest.main', 'unittest.main', ([], {}), '()\n', (6428, 6430), False, 'import unittest\n'), ((210, 218), 'nagplug.Plugin', 'Plugin', ([], {}), '()\n', (216, 218), False, 'from nagplug import Plugin, Threshold, ArgumentParserError\n'), ((423, 431), 'nagplug.Plugin', 'Plugin', ([], {}), '()\n', (429, 431), False, 'from nagplug import Plugin, Threshold, ArgumentParserError\n'), ((872, 880), 'nagplug.Plugin', 'Plugin', ([], {}), '()\n', (878, 880), False, 'from nagplug import Plugin, Threshold, ArgumentParserError\n'), ((1347, 1355), 'nagplug.Plugin', 'Plugin', ([], {}), '()\n', (1353, 1355), False, 'from nagplug import Plugin, Threshold, ArgumentParserError\n'), ((1512, 1520), 'nagplug.Plugin', 'Plugin', ([], {}), '()\n', (1518, 1520), False, 'from nagplug import Plugin, Threshold, ArgumentParserError\n'), ((3842, 3850), 'nagplug.Plugin', 'Plugin', ([], {}), '()\n', (3848, 3850), False, 'from nagplug import Plugin, Threshold, ArgumentParserError\n'), ((3952, 3960), 'nagplug.Plugin', 'Plugin', ([], {}), '()\n', (3958, 3960), False, 'from nagplug import Plugin, Threshold, ArgumentParserError\n'), ((4098, 4106), 'nagplug.Plugin', 'Plugin', ([], {}), '()\n', (4104, 4106), False, 'from nagplug import Plugin, Threshold, ArgumentParserError\n'), ((4260, 4268), 'nagplug.Plugin', 'Plugin', ([], {}), '()\n', (4266, 4268), False, 'from nagplug import Plugin, Threshold, ArgumentParserError\n'), ((4420, 4428), 'nagplug.Plugin', 'Plugin', ([], {}), '()\n', (4426, 4428), False, 'from nagplug import Plugin, Threshold, ArgumentParserError\n'), ((4661, 4669), 'nagplug.Plugin', 'Plugin', ([], {}), '()\n', (4667, 4669), False, 'from nagplug import Plugin, Threshold, ArgumentParserError\n'), ((4853, 4861), 'nagplug.Plugin', 'Plugin', ([], {}), '()\n', (4859, 4861), False, 'from nagplug import Plugin, Threshold, ArgumentParserError\n'), ((5235, 5243), 'nagplug.Plugin', 'Plugin', ([], {}), '()\n', (5241, 5243), False, 'from nagplug import Plugin, Threshold, ArgumentParserError\n'), ((5343, 5351), 'nagplug.Plugin', 'Plugin', ([], {}), '()\n', (5349, 5351), False, 'from nagplug import Plugin, Threshold, ArgumentParserError\n'), ((5490, 5498), 'nagplug.Plugin', 'Plugin', ([], {}), '()\n', (5496, 5498), False, 'from nagplug import Plugin, Threshold, ArgumentParserError\n'), ((5809, 5817), 'nagplug.Plugin', 'Plugin', ([], {}), '()\n', (5815, 5817), False, 'from nagplug import Plugin, Threshold, ArgumentParserError\n'), ((6161, 6169), 'nagplug.Plugin', 'Plugin', ([], {}), '()\n', (6167, 6169), False, 'from nagplug import Plugin, Threshold, ArgumentParserError\n'), ((1959, 1974), 'nagplug.Threshold', 'Threshold', (['"""10"""'], {}), "('10')\n", (1968, 1974), False, 'from nagplug import Plugin, Threshold, ArgumentParserError\n'), ((2053, 2068), 'nagplug.Threshold', 'Threshold', (['"""10"""'], {}), "('10')\n", (2062, 2068), False, 'from nagplug import Plugin, Threshold, ArgumentParserError\n'), ((2146, 2161), 'nagplug.Threshold', 'Threshold', (['"""10"""'], {}), "('10')\n", (2155, 2161), False, 'from nagplug import Plugin, Threshold, ArgumentParserError\n'), ((2244, 2259), 'nagplug.Threshold', 'Threshold', (['"""10"""'], {}), "('10')\n", (2253, 2259), False, 'from nagplug import Plugin, Threshold, ArgumentParserError\n'), ((2339, 2354), 'nagplug.Threshold', 'Threshold', (['"""10"""'], {}), "('10')\n", (2348, 2354), False, 'from nagplug import Plugin, Threshold, ArgumentParserError\n'), ((2430, 2448), 'nagplug.Threshold', 'Threshold', (['"""10:10"""'], {}), "('10:10')\n", (2439, 2448), False, 'from nagplug import Plugin, Threshold, ArgumentParserError\n'), ((2532, 2550), 'nagplug.Threshold', 'Threshold', (['"""10:20"""'], {}), "('10:20')\n", (2541, 2550), False, 'from nagplug import Plugin, Threshold, ArgumentParserError\n'), ((2630, 2648), 'nagplug.Threshold', 'Threshold', (['"""10:20"""'], {}), "('10:20')\n", (2639, 2648), False, 'from nagplug import Plugin, Threshold, ArgumentParserError\n'), ((2732, 2750), 'nagplug.Threshold', 'Threshold', (['"""10:20"""'], {}), "('10:20')\n", (2741, 2750), False, 'from nagplug import Plugin, Threshold, ArgumentParserError\n'), ((2830, 2848), 'nagplug.Threshold', 'Threshold', (['"""10:20"""'], {}), "('10:20')\n", (2839, 2848), False, 'from nagplug import Plugin, Threshold, ArgumentParserError\n'), ((2927, 2945), 'nagplug.Threshold', 'Threshold', (['"""10:20"""'], {}), "('10:20')\n", (2936, 2945), False, 'from nagplug import Plugin, Threshold, ArgumentParserError\n'), ((3026, 3042), 'nagplug.Threshold', 'Threshold', (['"""@10"""'], {}), "('@10')\n", (3035, 3042), False, 'from nagplug import Plugin, Threshold, ArgumentParserError\n'), ((3123, 3142), 'nagplug.Threshold', 'Threshold', (['"""@10:20"""'], {}), "('@10:20')\n", (3132, 3142), False, 'from nagplug import Plugin, Threshold, ArgumentParserError\n'), ((3223, 3240), 'nagplug.Threshold', 'Threshold', (['"""@:20"""'], {}), "('@:20')\n", (3232, 3240), False, 'from nagplug import Plugin, Threshold, ArgumentParserError\n'), ((3324, 3340), 'nagplug.Threshold', 'Threshold', (['"""10:"""'], {}), "('10:')\n", (3333, 3340), False, 'from nagplug import Plugin, Threshold, ArgumentParserError\n'), ((3424, 3440), 'nagplug.Threshold', 'Threshold', (['""":10"""'], {}), "(':10')\n", (3433, 3440), False, 'from nagplug import Plugin, Threshold, ArgumentParserError\n'), ((3522, 3538), 'nagplug.Threshold', 'Threshold', (['""":10"""'], {}), "(':10')\n", (3531, 3538), False, 'from nagplug import Plugin, Threshold, ArgumentParserError\n'), ((3619, 3636), 'nagplug.Threshold', 'Threshold', (['"""~:10"""'], {}), "('~:10')\n", (3628, 3636), False, 'from nagplug import Plugin, Threshold, ArgumentParserError\n'), ((3723, 3740), 'nagplug.Threshold', 'Threshold', (['"""~:10"""'], {}), "('~:10')\n", (3732, 3740), False, 'from nagplug import Plugin, Threshold, ArgumentParserError\n')]
|
from paypal_transactions_wrapper.exceptions import TransactionPropertyNotFound
class Transaction:
KEY_MAP = {
"TIMESTAMP": "date",
"TIMEZONE": "timezone",
"TYPE": "type",
"EMAIL": "costumer_email",
"NAME": "costumer_name",
"TRANSACTIONID": "id",
"STATUS": "status",
"AMT": "amount",
"CURRENCYCODE": "currency",
}
def __init__(self, transaction):
self._transaction = transaction
def __str__(self):
return str(self._transaction)
def __getattr__(self, item):
if item in self._transaction:
return self._transaction[item]
raise TransactionPropertyNotFound("%s property has not found")
|
[
"paypal_transactions_wrapper.exceptions.TransactionPropertyNotFound"
] |
[((664, 720), 'paypal_transactions_wrapper.exceptions.TransactionPropertyNotFound', 'TransactionPropertyNotFound', (['"""%s property has not found"""'], {}), "('%s property has not found')\n", (691, 720), False, 'from paypal_transactions_wrapper.exceptions import TransactionPropertyNotFound\n')]
|
import socket
import logging
from threading import Thread
from select import select
from enum import Enum
from prodj.core.clientlist import ClientList
from prodj.core.vcdj import Vcdj
from prodj.data.dataprovider import DataProvider
from prodj.network.nfsclient import NfsClient
from prodj.network.ip import guess_own_iface
from prodj.network import packets
from prodj.network import packets_dump
class OwnIpStatus(Enum):
notNeeded = 1,
waiting = 2,
acquired = 3
class ProDj(Thread):
def __init__(self):
super().__init__()
self.cl = ClientList(self)
self.data = DataProvider(self)
self.vcdj = Vcdj(self)
self.nfs = NfsClient(self)
self.keepalive_ip = "0.0.0.0"
self.keepalive_port = 50000
self.beat_ip = "0.0.0.0"
self.beat_port = 50001
self.status_ip = "0.0.0.0"
self.status_port = 50002
self.need_own_ip = OwnIpStatus.notNeeded
self.own_ip = None
def start(self):
self.keepalive_sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self.keepalive_sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
self.keepalive_sock.bind((self.keepalive_ip, self.keepalive_port))
logging.info("Listening on {}:{} for keepalive packets".format(self.keepalive_ip, self.keepalive_port))
self.beat_sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self.beat_sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
self.beat_sock.bind((self.beat_ip, self.beat_port))
logging.info("Listening on {}:{} for beat packets".format(self.beat_ip, self.beat_port))
self.status_sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self.status_sock.bind((self.status_ip, self.status_port))
logging.info("Listening on {}:{} for status packets".format(self.status_ip, self.status_port))
self.socks = [self.keepalive_sock, self.beat_sock, self.status_sock]
self.keep_running = True
self.data.start()
self.nfs.start()
super().start()
def stop(self):
self.keep_running = False
self.nfs.stop()
self.data.stop()
self.vcdj_disable()
self.join()
self.keepalive_sock.close()
self.beat_sock.close()
def vcdj_set_player_number(self, vcdj_player_number=5):
logging.info("Player number set to {}".format(vcdj_player_number))
self.vcdj.player_number = vcdj_player_number
#self.data.dbc.own_player_number = vcdj_player_number
def vcdj_enable(self):
self.vcdj_set_iface()
self.vcdj.start()
def vcdj_disable(self):
self.vcdj.stop()
self.vcdj.join()
def vcdj_set_iface(self):
if self.own_ip is not None:
self.vcdj.set_interface_data(*self.own_ip[1:4])
def run(self):
logging.debug("starting main loop")
while self.keep_running:
rdy = select(self.socks,[],[],1)[0]
for sock in rdy:
if sock == self.keepalive_sock:
data, addr = self.keepalive_sock.recvfrom(128)
self.handle_keepalive_packet(data, addr)
elif sock == self.beat_sock:
data, addr = self.beat_sock.recvfrom(128)
self.handle_beat_packet(data, addr)
elif sock == self.status_sock:
data, addr = self.status_sock.recvfrom(256)
self.handle_status_packet(data, addr)
self.cl.gc()
logging.debug("main loop finished")
def handle_keepalive_packet(self, data, addr):
#logging.debug("Broadcast keepalive packet from {}".format(addr))
try:
packet = packets.KeepAlivePacket.parse(data)
except Exception as e:
logging.warning("Failed to parse keepalive packet from {}, {} bytes: {}".format(addr, len(data), e))
packets_dump.dump_packet_raw(data)
return
# both packet types give us enough information to store the client
if packet["type"] in ["type_ip", "type_status", "type_change"]:
self.cl.eatKeepalive(packet)
if self.own_ip is None and len(self.cl.getClientIps()) > 0:
self.own_ip = guess_own_iface(self.cl.getClientIps())
if self.own_ip is not None:
logging.info("Guessed own interface {} ip {} mask {} mac {}".format(*self.own_ip))
self.vcdj_set_iface()
packets_dump.dump_keepalive_packet(packet)
def handle_beat_packet(self, data, addr):
#logging.debug("Broadcast beat packet from {}".format(addr))
try:
packet = packets.BeatPacket.parse(data)
except Exception as e:
logging.warning("Failed to parse beat packet from {}, {} bytes: {}".format(addr, len(data), e))
packets_dump.dump_packet_raw(data)
return
if packet["type"] in ["type_beat", "type_mixer"]:
self.cl.eatBeat(packet)
packets_dump.dump_beat_packet(packet)
def handle_status_packet(self, data, addr):
#logging.debug("Broadcast status packet from {}".format(addr))
try:
packet = packets.StatusPacket.parse(data)
except Exception as e:
logging.warning("Failed to parse status packet from {}, {} bytes: {}".format(addr, len(data), e))
packets_dump.dump_packet_raw(data)
return
self.cl.eatStatus(packet)
packets_dump.dump_status_packet(packet)
# called whenever a keepalive packet is received
# arguments of cb: this clientlist object, player number of changed client
def set_client_keepalive_callback(self, cb=None):
self.cl.client_keepalive_callback = cb
# called whenever a status update of a known client is received
# arguments of cb: this clientlist object, player number of changed client
def set_client_change_callback(self, cb=None):
self.cl.client_change_callback = cb
# called when a player media changes
# arguments of cb: this clientlist object, player_number, changed slot
def set_media_change_callback(self, cb=None):
self.cl.media_change_callback = cb
|
[
"prodj.network.packets_dump.dump_status_packet",
"logging.debug",
"prodj.network.packets.StatusPacket.parse",
"socket.socket",
"prodj.core.vcdj.Vcdj",
"prodj.core.clientlist.ClientList",
"prodj.data.dataprovider.DataProvider",
"prodj.network.packets.KeepAlivePacket.parse",
"prodj.network.packets.BeatPacket.parse",
"prodj.network.packets_dump.dump_packet_raw",
"select.select",
"prodj.network.packets_dump.dump_beat_packet",
"prodj.network.nfsclient.NfsClient",
"prodj.network.packets_dump.dump_keepalive_packet"
] |
[((552, 568), 'prodj.core.clientlist.ClientList', 'ClientList', (['self'], {}), '(self)\n', (562, 568), False, 'from prodj.core.clientlist import ClientList\n'), ((585, 603), 'prodj.data.dataprovider.DataProvider', 'DataProvider', (['self'], {}), '(self)\n', (597, 603), False, 'from prodj.data.dataprovider import DataProvider\n'), ((620, 630), 'prodj.core.vcdj.Vcdj', 'Vcdj', (['self'], {}), '(self)\n', (624, 630), False, 'from prodj.core.vcdj import Vcdj\n'), ((646, 661), 'prodj.network.nfsclient.NfsClient', 'NfsClient', (['self'], {}), '(self)\n', (655, 661), False, 'from prodj.network.nfsclient import NfsClient\n'), ((958, 1006), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_DGRAM'], {}), '(socket.AF_INET, socket.SOCK_DGRAM)\n', (971, 1006), False, 'import socket\n'), ((1285, 1333), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_DGRAM'], {}), '(socket.AF_INET, socket.SOCK_DGRAM)\n', (1298, 1333), False, 'import socket\n'), ((1579, 1627), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_DGRAM'], {}), '(socket.AF_INET, socket.SOCK_DGRAM)\n', (1592, 1627), False, 'import socket\n'), ((2660, 2695), 'logging.debug', 'logging.debug', (['"""starting main loop"""'], {}), "('starting main loop')\n", (2673, 2695), False, 'import logging\n'), ((3237, 3272), 'logging.debug', 'logging.debug', (['"""main loop finished"""'], {}), "('main loop finished')\n", (3250, 3272), False, 'import logging\n'), ((4098, 4140), 'prodj.network.packets_dump.dump_keepalive_packet', 'packets_dump.dump_keepalive_packet', (['packet'], {}), '(packet)\n', (4132, 4140), False, 'from prodj.network import packets_dump\n'), ((4577, 4614), 'prodj.network.packets_dump.dump_beat_packet', 'packets_dump.dump_beat_packet', (['packet'], {}), '(packet)\n', (4606, 4614), False, 'from prodj.network import packets_dump\n'), ((5005, 5044), 'prodj.network.packets_dump.dump_status_packet', 'packets_dump.dump_status_packet', (['packet'], {}), '(packet)\n', (5036, 5044), False, 'from prodj.network import packets_dump\n'), ((3417, 3452), 'prodj.network.packets.KeepAlivePacket.parse', 'packets.KeepAlivePacket.parse', (['data'], {}), '(data)\n', (3446, 3452), False, 'from prodj.network import packets\n'), ((4275, 4305), 'prodj.network.packets.BeatPacket.parse', 'packets.BeatPacket.parse', (['data'], {}), '(data)\n', (4299, 4305), False, 'from prodj.network import packets\n'), ((4753, 4785), 'prodj.network.packets.StatusPacket.parse', 'packets.StatusPacket.parse', (['data'], {}), '(data)\n', (4779, 4785), False, 'from prodj.network import packets\n'), ((2737, 2766), 'select.select', 'select', (['self.socks', '[]', '[]', '(1)'], {}), '(self.socks, [], [], 1)\n', (2743, 2766), False, 'from select import select\n'), ((3593, 3627), 'prodj.network.packets_dump.dump_packet_raw', 'packets_dump.dump_packet_raw', (['data'], {}), '(data)\n', (3621, 3627), False, 'from prodj.network import packets_dump\n'), ((4441, 4475), 'prodj.network.packets_dump.dump_packet_raw', 'packets_dump.dump_packet_raw', (['data'], {}), '(data)\n', (4469, 4475), False, 'from prodj.network import packets_dump\n'), ((4923, 4957), 'prodj.network.packets_dump.dump_packet_raw', 'packets_dump.dump_packet_raw', (['data'], {}), '(data)\n', (4951, 4957), False, 'from prodj.network import packets_dump\n')]
|
import requests
import bs4
import sqlite3
# Relevant API Documentation:
# USPTO: https://developer.uspto.gov/ibd-api-docs/
# Google Maps: https://developers.google.com/maps/documentation/geocoding/intro
USPTO_API = "https://developer.uspto.gov/ibd-api/v1/patent/application"
MAPS_API = "https://maps.googleapis.com/maps/api/geocode/json"
# Link to individual USPTO search page by patent number:
USPTO_PAGE = "http://patft.uspto.gov/netacgi/nph-Parser?Sect1=PTO1&Sect2=HIT\
OFF&d=PALL&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.htm&r=1&f=G&l=50&s1={}.PN."
SQLITE_DB = "db.sqlite"
def get_latlon(location):
"""Gets latitude and longitude corresponding to a place using Google Maps
API."""
result = requests.get(MAPS_API, params={"address": location})
return result.json()['results'][0]['geometry']['location']
def scrape_patent_web(patent_num):
"""Returns BS4/HTML of USPTO patent search for a patent entry. Contains
extra information (location, text) not available through API."""
patent_html = USPTO_PAGE.format(patent_num)
return bs4.BeautifulSoup(requests.get(patent_html).content, "lxml")
def get_location(patent_html):
"""Gets location of company associated with patent entry (dict)."""
# Grab metadata table
ass_loc = patent_html.find(text="Assignee:").find_next()
# Split tag contents so that only first assignee location is retrieved
ass_text = ass_loc.text.split('\n\n')[0].replace('\n', '')
lind = ass_text.find("(")
rind = ass_text.rfind(")")
return ass_text[lind + 1:rind]
def get_title(patent_html):
"""Gets title of patent entry (dict). Avoids case mangling (MRI -> Mri)
associated with the API results."""
return ' '.join(patent_html.find_all('font')[-1].text.split())
def get_abstract(patent_html):
"""Gets abstract of company associated with patent entry (dict)."""
return ' '.join(patent_html.p.contents[0].split())
if __name__ == '__main__':
# Search for successful (granted) patent applications in nanotechnology
search_params = {"searchText": "nano", "applicationType": "UTILITY",
"documentType": "grant", "rows": 100, 'sortOrder': 'desc'}
response = requests.get(USPTO_API, params=search_params)
# Check if request went through successfully (status code 200)
if response.status_code == 200:
# Get list of results
patents = response.json()['response']['docs']
# Populate a new SQLite database
db = sqlite3.connect(SQLITE_DB)
# Overwrite old data
db.execute("DROP TABLE IF EXISTS patents")
db.execute("""CREATE TABLE patents
(id INTEGER PRIMARY KEY, title TEXT, year INTEGER, assignee TEXT,
city TEXT, abstract TEXT, lat REAL, lng REAL)""")
for pat in patents:
html = scrape_patent_web(pat['patentNumber'])
pat['title'] = get_title(html)
print(pat['patentNumber'], pat['title'])
# Skip patent if there's no company listed.
if "assignee" not in pat:
print("No company assigned to patent - skipping.")
continue
try:
city = get_location(html)
loc = get_latlon(city)
print(city, loc)
except (IndexError, KeyError):
print("Can't grab location information - skipping.")
continue
abstr = get_abstract(html)
db.execute("INSERT INTO patents VALUES (?,?,?,?,?,?,?,?)",
(int(pat['patentNumber']), pat['title'], int(pat['year']),
pat['assignee'][0], city, abstr, loc['lat'], loc['lng']))
db.commit()
db.close()
else:
print("Unexpected response code:", response.status_code)
|
[
"sqlite3.connect",
"requests.get"
] |
[((704, 756), 'requests.get', 'requests.get', (['MAPS_API'], {'params': "{'address': location}"}), "(MAPS_API, params={'address': location})\n", (716, 756), False, 'import requests\n'), ((2195, 2240), 'requests.get', 'requests.get', (['USPTO_API'], {'params': 'search_params'}), '(USPTO_API, params=search_params)\n', (2207, 2240), False, 'import requests\n'), ((2485, 2511), 'sqlite3.connect', 'sqlite3.connect', (['SQLITE_DB'], {}), '(SQLITE_DB)\n', (2500, 2511), False, 'import sqlite3\n'), ((1079, 1104), 'requests.get', 'requests.get', (['patent_html'], {}), '(patent_html)\n', (1091, 1104), False, 'import requests\n')]
|
#! /usr/bin/env python
import argparse
import datetime
import json
import time
import logging
import pandas as pd
import requests
from pathlib import Path
from retrying import retry
AVAILABLE_CURRENCY_PAIRS = ['BTC_AMP', 'BTC_ARDR', 'BTC_BCH', 'BTC_BCN', 'BTC_BCY', 'BTC_BELA',
'BTC_BLK', 'BTC_BTCD', 'BTC_BTM', 'BTC_BTS', 'BTC_BURST', 'BTC_CLAM',
'BTC_CVC', 'BTC_DASH', 'BTC_DCR', 'BTC_DGB', 'BTC_DOGE', 'BTC_EMC2',
'BTC_ETC', 'BTC_ETH', 'BTC_EXP', 'BTC_FCT', 'BTC_FLDC', 'BTC_FLO', 'BTC_GAME',
'BTC_GAS', 'BTC_GNO', 'BTC_GNT', 'BTC_GRC', 'BTC_HUC', 'BTC_LBC', 'BTC_LSK',
'BTC_LTC', 'BTC_MAID', 'BTC_NAV', 'BTC_NEOS', 'BTC_NMC', 'BTC_NXC', 'BTC_NXT',
'BTC_OMG', 'BTC_OMNI', 'BTC_PASC', 'BTC_PINK', 'BTC_POT', 'BTC_PPC', 'BTC_RADS',
'BTC_SC', 'BTC_STEEM', 'BTC_STORJ', 'BTC_STR', 'BTC_STRAT', 'BTC_SYS',
'BTC_VIA', 'BTC_VRC', 'BTC_VTC', 'BTC_XBC', 'BTC_XCP', 'BTC_XEM', 'BTC_XMR',
'BTC_XPM', 'BTC_XRP', 'BTC_XVC', 'BTC_ZEC', 'BTC_ZRX', 'ETH_BCH', 'ETH_CVC',
'ETH_ETC', 'ETH_GAS', 'ETH_GNO', 'ETH_GNT', 'ETH_LSK', 'ETH_OMG', 'ETH_REP',
'ETH_STEEM', 'ETH_ZEC', 'ETH_ZRX', 'USDT_BCH', 'USDT_BTC', 'USDT_DASH',
'USDT_ETC', 'USDT_ETH', 'USDT_LTC', 'USDT_NXT', 'USDT_REP', 'USDT_STR',
'USDT_XMR', 'USDT_XRP', 'USDT_ZEC', 'XMR_BCN', 'XMR_BLK', 'XMR_BTCD', 'XMR_DASH',
'XMR_LTC', 'XMR_MAID', 'XMR_NXT', 'XMR_ZEC', 'BTC_REP', 'BTC_RIC', 'BTC_SBD',]
class CryptoData(object):
"""
Poloneix Documentation: https://poloniex.com/support/api/
## returnChartData
Returns candlestick chart data. Required GET parameters are "currencyPair",
"period" (candlestick period in seconds; valid values are 300, 900, 1800, 7200,
14400, and 86400), "start", and "end". "Start" and "end" are given in UNIX
timestamp format and used to specify the date range for the data returned. Sample output:
[{"date":1405699200,"high":0.0045388,"low":0.00403001,"open":0.00404545,"close":0.00427592,"volume":44.11655644,
"quoteVolume":10259.29079097,"weightedAverage":0.00430015}, ...]
"""
def __init__(self, currency_pair='USDT_BTC', start_date='2015-01-01', end_date=None,
period=14400, destination=None, api='returnChartData', logger=None):
self.currency_pair = currency_pair.upper()
self.start_timestamp = self.get_timestamp(date_string=start_date)
if not end_date:
self.end_timestamp = 9999999999
else:
self.end_timestamp = self.get_timestamp(date_string=end_date)
self.period = 300
self.api = api
self.destination = destination
self.data = None
self.logger = logger
self.url = f'https://poloniex.com/public?command={self.api}¤cyPair' \
f'={self.currency_pair}&start={self.start_timestamp}&end=' \
f'{self.end_timestamp}&period={self.period}'
def get_timestamp(self, date_string=None, date_format='%Y-%m-%d'):
if date_string is None:
return int(time.mktime(datetime.datetime.utcnow().timetuple()))
else:
return int(time.mktime(datetime.datetime.strptime(date_string, date_format).timetuple()))
def get_api_data(self):
response = requests.get(self.url)
return response
def parse_api_data_text(self, response):
parsed_data = json.loads(response.text)
if isinstance(parsed_data, dict) and 'error' in parsed_data.keys():
if parsed_data['error'] == 'Invalid currency pair.':
raise Exception(f'{self.currency_pair} is not a valid currency pair. ' \
f'You must use one of: \n{AVAILABLE_CURRENCY_PAIRS}')
else:
raise Exception(f'API Error: {parsed_data["error"]}')
return parsed_data
def build_dataframe(self, parsed_data):
data = pd.DataFrame(parsed_data)
data['datetime'] = data['date'].apply(datetime.datetime.utcfromtimestamp)
data.sort_values('datetime', inplace=True)
data['datetime_utc'] = data['datetime']
cols = ['datetime_utc', 'open', 'high', 'low', 'close', 'quoteVolume', 'volume',
'weightedAverage']
self.data = data[cols]
def save_data(self, dataframe):
dataframe.to_csv(self.destination, index=False)
return self
@retry(stop_max_attempt_number=7, wait_random_min=1000, wait_random_max=2000)
def run(self, save=True):
if self.data is None:
response = self.get_api_data()
self.build_dataframe(self.parse_api_data_text(response))
if save:
self.save_data(self.data)
return self
else:
return self.data
if __name__ == '__main__':
DESCRIPTION = """
A simple tool to pull price data from Poloneix's API. The data
can be saved down as a csv or used in memory as a pandas DataFrame.
Poloneix Documentation: https://poloniex.com/support/api/
"""
parser = argparse.ArgumentParser(description=DESCRIPTION)
parser.add_argument('--currency-pair', dest='currency_pair', default='USDT_LTC',
type=str, help='A poloneix currency pair. Use --pairs to view pairs')
parser.add_argument('--period', dest='period', default=14400, help='The timefrime to use '
'when pulling data in seconds. Defaults to 14400. Available options' \
' 300, 900, 1800, 7200, 14400, 86400.', type=int)
parser.add_argument('--dest', dest='dest', type=str, default=None, help='The full path to which '
'the output file should be saved. Defaults to the home directory.')
parser.add_argument('--start-date', dest='start_date', type=str,
default=datetime.datetime.strftime(
datetime.datetime.utcnow() + datetime.timedelta(-30), format='%Y-%m-%d'),
help='The start date for the data pull in the format YYYY-MM-DD. Defaults ' \
'to 30 days ago.')
parser.add_argument('--end-date', dest='end_date', type=str, default=None,
help='The end date for the data pull in the format YYYY-MM-DD. Defaults ' \
'to now.')
parser.add_argument('--pairs', dest='pairs', action='store_true',
default=False, help='A flag used to view currency pairs.')
args = parser.parse_args()
logger = logging.getLogger(__name__)
logger.setLevel('INFO')
if args.pairs:
chunks = [AVAILABLE_CURRENCY_PAIRS[x:x + 3] for x in range(0, len(AVAILABLE_CURRENCY_PAIRS), 3)]
setup = [[str(e) for e in row] for row in chunks]
lens = [max(map(len, col)) for col in zip(*setup)]
fmt = '\t'.join('{{:{}}}'.format(x) for x in lens)
table = [fmt.format(*row) for row in setup]
print('\n'.join(table))
CURRENCY_PAIR = args.currency_pair
SAVE = True
PERIOD = args.period
_dest = args.dest
if SAVE and _dest is None:
home_dir = str(Path.home())
DESTINATION = f'{home_dir}/{CURRENCY_PAIR}_{PERIOD}.csv'
else:
DESTINATION = _dest
START_DATE = args.start_date
END_DATE = args.end_date
client = CryptoData(
currency_pair=CURRENCY_PAIR,
destination=DESTINATION,
period=PERIOD,
start_date=START_DATE,
end_date=END_DATE,
logger=logger
)
client.run(save=SAVE)
|
[
"pandas.DataFrame",
"argparse.ArgumentParser",
"json.loads",
"pathlib.Path.home",
"datetime.datetime.utcnow",
"datetime.datetime.strptime",
"datetime.timedelta",
"requests.get",
"retrying.retry",
"logging.getLogger"
] |
[((4672, 4748), 'retrying.retry', 'retry', ([], {'stop_max_attempt_number': '(7)', 'wait_random_min': '(1000)', 'wait_random_max': '(2000)'}), '(stop_max_attempt_number=7, wait_random_min=1000, wait_random_max=2000)\n', (4677, 4748), False, 'from retrying import retry\n'), ((5336, 5384), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': 'DESCRIPTION'}), '(description=DESCRIPTION)\n', (5359, 5384), False, 'import argparse\n'), ((6812, 6839), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (6829, 6839), False, 'import logging\n'), ((3559, 3581), 'requests.get', 'requests.get', (['self.url'], {}), '(self.url)\n', (3571, 3581), False, 'import requests\n'), ((3674, 3699), 'json.loads', 'json.loads', (['response.text'], {}), '(response.text)\n', (3684, 3699), False, 'import json\n'), ((4191, 4216), 'pandas.DataFrame', 'pd.DataFrame', (['parsed_data'], {}), '(parsed_data)\n', (4203, 4216), True, 'import pandas as pd\n'), ((7411, 7422), 'pathlib.Path.home', 'Path.home', ([], {}), '()\n', (7420, 7422), False, 'from pathlib import Path\n'), ((6180, 6206), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (6204, 6206), False, 'import datetime\n'), ((6209, 6232), 'datetime.timedelta', 'datetime.timedelta', (['(-30)'], {}), '(-30)\n', (6227, 6232), False, 'import datetime\n'), ((3354, 3380), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (3378, 3380), False, 'import datetime\n'), ((3444, 3496), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['date_string', 'date_format'], {}), '(date_string, date_format)\n', (3470, 3496), False, 'import datetime\n')]
|
#!/usr/bin/env python
# Relay node takes a list of topics and republish prepending /record namespace
import rospy
import rostopic
import signal
import sys
QUEUE_SIZE=1000 #Make sure we don't miss points
def signal_handler(sig,frame):
print('Ctrl+c')
sys.exit(0)
signal.signal(signal.SIGINT,signal_handler)
def echo(pub,msg):
''' echos the message to a publisher '''
pub.publish(msg)
rospy.init_node('talker', anonymous=True)
# Get the list of topics to relay from rosparam
publishers=[]
subscribers=[]
# Manually list the topics to Relay
topics=['/emg']
for topic in topics:
#relay
(topicClass,topicName,c)=rostopic.get_topic_class(topic,blocking=True)
print("Relay for "+topicName+" with class "+str(topicClass))
pub = rospy.Publisher("/record"+topicName, topicClass, queue_size=QUEUE_SIZE)
callback=lambda msg: echo(pub,msg)
sub = rospy.Subscriber(topic, topicClass,callback)
publishers.append(pub)
subscribers.append(sub)
rospy.spin()
|
[
"rostopic.get_topic_class",
"rospy.Subscriber",
"rospy.Publisher",
"rospy.init_node",
"rospy.spin",
"signal.signal",
"sys.exit"
] |
[((274, 318), 'signal.signal', 'signal.signal', (['signal.SIGINT', 'signal_handler'], {}), '(signal.SIGINT, signal_handler)\n', (287, 318), False, 'import signal\n'), ((406, 447), 'rospy.init_node', 'rospy.init_node', (['"""talker"""'], {'anonymous': '(True)'}), "('talker', anonymous=True)\n", (421, 447), False, 'import rospy\n'), ((986, 998), 'rospy.spin', 'rospy.spin', ([], {}), '()\n', (996, 998), False, 'import rospy\n'), ((262, 273), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (270, 273), False, 'import sys\n'), ((642, 688), 'rostopic.get_topic_class', 'rostopic.get_topic_class', (['topic'], {'blocking': '(True)'}), '(topic, blocking=True)\n', (666, 688), False, 'import rostopic\n'), ((763, 836), 'rospy.Publisher', 'rospy.Publisher', (["('/record' + topicName)", 'topicClass'], {'queue_size': 'QUEUE_SIZE'}), "('/record' + topicName, topicClass, queue_size=QUEUE_SIZE)\n", (778, 836), False, 'import rospy\n'), ((884, 929), 'rospy.Subscriber', 'rospy.Subscriber', (['topic', 'topicClass', 'callback'], {}), '(topic, topicClass, callback)\n', (900, 929), False, 'import rospy\n')]
|
"""
.. codeauthor:: <NAME> <<EMAIL>>
"""
import abc
import re
from decimal import Decimal
from typepy import RealNumber, String
from .error import ParameterError, UnitNotFoundError
_BASE_ATTRS = ("name", "regexp")
_RE_NUMBER = re.compile(r"^[-\+]?[0-9\.]+$")
def _get_unit_msg(text_units):
return ", ".join([", ".join(values) for values in text_units.values()])
class HumanReadableValue(metaclass=abc.ABCMeta):
@abc.abstractproperty
def _text_units(self): # pragma: no cover
pass
@abc.abstractproperty
def _units(self): # pragma: no cover
pass
@abc.abstractmethod
def get_as(self, unit): # pragma: no cover
pass
def __init__(self, readable_value, default_unit=None):
self._default_unit = self._normalize_unit(default_unit)
self._number, self._from_unit = self.__preprocess(readable_value)
def __repr__(self):
items = [str(self._number)]
if self._from_unit.name:
items.append(self._from_unit.name)
return " ".join(items)
def _normalize_unit(self, unit):
if unit is None:
return None
for u in self._text_units:
if u.regexp.search(unit):
return u
raise ValueError("unit not found: {}".format(unit))
def __split_unit(self, readable_value):
if RealNumber(readable_value).is_type():
if self._default_unit is None:
raise UnitNotFoundError(
"unit not found",
value=readable_value,
available_units=_get_unit_msg(self._text_units),
)
return (readable_value, self._default_unit)
if not String(readable_value).is_type():
raise TypeError("readable_value must be a string")
for unit in self._units:
try:
if unit.regexp.search(readable_value):
number = unit.regexp.split(readable_value)[0]
if not RealNumber(number).is_type():
continue
return (number, unit)
except TypeError:
continue
raise UnitNotFoundError(
"unit not found", value=readable_value, available_units=_get_unit_msg(self._text_units),
)
def __preprocess(self, readable_value):
if readable_value is None:
raise TypeError("readable_value must be a string")
number, from_unit = self.__split_unit(readable_value)
if number is not None:
number = self.__to_number(number)
if from_unit is None:
raise UnitNotFoundError(
"unit not found",
value=readable_value,
available_units=_get_unit_msg(self._text_units),
)
return (number, from_unit)
def __to_number(self, readable_num):
match = _RE_NUMBER.search(readable_num)
if not match:
raise ParameterError(
"human-readable value should only include a number", value=readable_num
)
return Decimal(match.group())
|
[
"typepy.RealNumber",
"typepy.String",
"re.compile"
] |
[((232, 264), 're.compile', 're.compile', (['"""^[-\\\\+]?[0-9\\\\.]+$"""'], {}), "('^[-\\\\+]?[0-9\\\\.]+$')\n", (242, 264), False, 'import re\n'), ((1352, 1378), 'typepy.RealNumber', 'RealNumber', (['readable_value'], {}), '(readable_value)\n', (1362, 1378), False, 'from typepy import RealNumber, String\n'), ((1714, 1736), 'typepy.String', 'String', (['readable_value'], {}), '(readable_value)\n', (1720, 1736), False, 'from typepy import RealNumber, String\n'), ((2010, 2028), 'typepy.RealNumber', 'RealNumber', (['number'], {}), '(number)\n', (2020, 2028), False, 'from typepy import RealNumber, String\n')]
|
import matplotlib.pyplot as plt
import time
from matplotlib import animation
def animate(U, timeSteps: int, postionSteps: int, timeStepSize: float):
fig= plt.figure()
ims = []
for i in range(timeSteps):
im = plt.plot(postionSteps, U[:,i] , animated = True, color = 'red')
ims.append(im)
ani = animation.ArtistAnimation(fig, ims, interval = (10), blit = True, repeat_delay = 500)
plt.show()
#animation(u, trial.k_N, trial.x_range, trial.k)
|
[
"matplotlib.animation.ArtistAnimation",
"matplotlib.pyplot.figure",
"matplotlib.pyplot.show",
"matplotlib.pyplot.plot"
] |
[((165, 177), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (175, 177), True, 'import matplotlib.pyplot as plt\n'), ((332, 409), 'matplotlib.animation.ArtistAnimation', 'animation.ArtistAnimation', (['fig', 'ims'], {'interval': '(10)', 'blit': '(True)', 'repeat_delay': '(500)'}), '(fig, ims, interval=10, blit=True, repeat_delay=500)\n', (357, 409), False, 'from matplotlib import animation\n'), ((422, 432), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (430, 432), True, 'import matplotlib.pyplot as plt\n'), ((235, 294), 'matplotlib.pyplot.plot', 'plt.plot', (['postionSteps', 'U[:, i]'], {'animated': '(True)', 'color': '"""red"""'}), "(postionSteps, U[:, i], animated=True, color='red')\n", (243, 294), True, 'import matplotlib.pyplot as plt\n')]
|
import unittest
from decimal import Decimal
from Broker import Broker
from Portfolio import Account, Portfolio
from tests.MockAuthenticator import MockAuthenticator, load_test_balance, \
load_test_positions
class TestPortfolio(unittest.TestCase):
def setUp(self):
self.broker = Broker()
self.broker.set_authenticator(MockAuthenticator())
self.portfolio = Portfolio(self.broker)
self.portfolio.load_accounts()
def test_load_accounts(self):
self.portfolio.load_accounts()
result = self.portfolio.list_accounts()
expected = ['111111', '222222']
self.assertEqual(result, expected)
def test_get_all_positions(self):
expected = {}
for account in self.portfolio.list_accounts():
positions = load_test_positions(account)
for position in positions:
if position['symbol'] not in expected:
expected[position['symbol']] = Decimal(
position['currentMarketValue']).quantize(
Decimal('0.00'))
else:
expected[position['symbol']] += Decimal(
position['currentMarketValue']).quantize(
Decimal('0.00'))
result = self.portfolio.get_all_positions()
self.assertEqual(result, expected)
def test_get_total_holdings(self):
expected = 0
for account in self.portfolio.list_accounts():
expected += Decimal(
load_test_balance(account)['combinedBalances'][0][
'marketValue']).quantize(Decimal('0.00'))
result = self.portfolio.get_total_holdings()
self.assertEqual(result, expected)
def test_get_cash(self):
expected = 0
for account in self.portfolio.list_accounts():
expected += Decimal(
load_test_balance(account)['combinedBalances'][0][
'cash']).quantize(Decimal('0.00'))
result = self.portfolio.get_cash()
self.assertEqual(result, expected)
class TestAccount(unittest.TestCase):
def setUp(self):
self.account_id = '111111'
self.account = Account(load_test_balance(self.account_id),
load_test_positions(self.account_id))
def test_get_balance(self):
expected = load_test_balance(self.account_id)
result = self.account.get_balance()
self.assertEqual(result, expected)
def test_get_total_holdings(self):
expected = Decimal(
load_test_balance(self.account_id)['combinedBalances'][0][
'marketValue']).quantize(Decimal('0.00'))
result = self.account.get_total_holdings()
self.assertEqual(result, expected)
def test_get_positions(self):
positions = load_test_positions(self.account_id)
expected = {}
for position in positions:
expected[position['symbol']] = Decimal(
position['currentMarketValue']).quantize(Decimal('0.00'))
result = self.account.get_positions()
self.assertEqual(result, expected)
def test_get_cash(self):
balance = load_test_balance(self.account_id)
expected = Decimal(balance['combinedBalances'][0]['cash']).quantize(
Decimal('0.00'))
result = self.account.get_cash()
self.assertEqual(result, expected)
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main",
"Broker.Broker",
"decimal.Decimal",
"tests.MockAuthenticator.load_test_positions",
"Portfolio.Portfolio",
"tests.MockAuthenticator.MockAuthenticator",
"tests.MockAuthenticator.load_test_balance"
] |
[((3462, 3477), 'unittest.main', 'unittest.main', ([], {}), '()\n', (3475, 3477), False, 'import unittest\n'), ((298, 306), 'Broker.Broker', 'Broker', ([], {}), '()\n', (304, 306), False, 'from Broker import Broker\n'), ((391, 413), 'Portfolio.Portfolio', 'Portfolio', (['self.broker'], {}), '(self.broker)\n', (400, 413), False, 'from Portfolio import Account, Portfolio\n'), ((2379, 2413), 'tests.MockAuthenticator.load_test_balance', 'load_test_balance', (['self.account_id'], {}), '(self.account_id)\n', (2396, 2413), False, 'from tests.MockAuthenticator import MockAuthenticator, load_test_balance, load_test_positions\n'), ((2847, 2883), 'tests.MockAuthenticator.load_test_positions', 'load_test_positions', (['self.account_id'], {}), '(self.account_id)\n', (2866, 2883), False, 'from tests.MockAuthenticator import MockAuthenticator, load_test_balance, load_test_positions\n'), ((3204, 3238), 'tests.MockAuthenticator.load_test_balance', 'load_test_balance', (['self.account_id'], {}), '(self.account_id)\n', (3221, 3238), False, 'from tests.MockAuthenticator import MockAuthenticator, load_test_balance, load_test_positions\n'), ((345, 364), 'tests.MockAuthenticator.MockAuthenticator', 'MockAuthenticator', ([], {}), '()\n', (362, 364), False, 'from tests.MockAuthenticator import MockAuthenticator, load_test_balance, load_test_positions\n'), ((798, 826), 'tests.MockAuthenticator.load_test_positions', 'load_test_positions', (['account'], {}), '(account)\n', (817, 826), False, 'from tests.MockAuthenticator import MockAuthenticator, load_test_balance, load_test_positions\n'), ((2222, 2256), 'tests.MockAuthenticator.load_test_balance', 'load_test_balance', (['self.account_id'], {}), '(self.account_id)\n', (2239, 2256), False, 'from tests.MockAuthenticator import MockAuthenticator, load_test_balance, load_test_positions\n'), ((2289, 2325), 'tests.MockAuthenticator.load_test_positions', 'load_test_positions', (['self.account_id'], {}), '(self.account_id)\n', (2308, 2325), False, 'from tests.MockAuthenticator import MockAuthenticator, load_test_balance, load_test_positions\n'), ((2681, 2696), 'decimal.Decimal', 'Decimal', (['"""0.00"""'], {}), "('0.00')\n", (2688, 2696), False, 'from decimal import Decimal\n'), ((3328, 3343), 'decimal.Decimal', 'Decimal', (['"""0.00"""'], {}), "('0.00')\n", (3335, 3343), False, 'from decimal import Decimal\n'), ((1634, 1649), 'decimal.Decimal', 'Decimal', (['"""0.00"""'], {}), "('0.00')\n", (1641, 1649), False, 'from decimal import Decimal\n'), ((1991, 2006), 'decimal.Decimal', 'Decimal', (['"""0.00"""'], {}), "('0.00')\n", (1998, 2006), False, 'from decimal import Decimal\n'), ((3050, 3065), 'decimal.Decimal', 'Decimal', (['"""0.00"""'], {}), "('0.00')\n", (3057, 3065), False, 'from decimal import Decimal\n'), ((3258, 3305), 'decimal.Decimal', 'Decimal', (["balance['combinedBalances'][0]['cash']"], {}), "(balance['combinedBalances'][0]['cash'])\n", (3265, 3305), False, 'from decimal import Decimal\n'), ((2984, 3023), 'decimal.Decimal', 'Decimal', (["position['currentMarketValue']"], {}), "(position['currentMarketValue'])\n", (2991, 3023), False, 'from decimal import Decimal\n'), ((1071, 1086), 'decimal.Decimal', 'Decimal', (['"""0.00"""'], {}), "('0.00')\n", (1078, 1086), False, 'from decimal import Decimal\n'), ((1261, 1276), 'decimal.Decimal', 'Decimal', (['"""0.00"""'], {}), "('0.00')\n", (1268, 1276), False, 'from decimal import Decimal\n'), ((972, 1011), 'decimal.Decimal', 'Decimal', (["position['currentMarketValue']"], {}), "(position['currentMarketValue'])\n", (979, 1011), False, 'from decimal import Decimal\n'), ((1162, 1201), 'decimal.Decimal', 'Decimal', (["position['currentMarketValue']"], {}), "(position['currentMarketValue'])\n", (1169, 1201), False, 'from decimal import Decimal\n'), ((2581, 2615), 'tests.MockAuthenticator.load_test_balance', 'load_test_balance', (['self.account_id'], {}), '(self.account_id)\n', (2598, 2615), False, 'from tests.MockAuthenticator import MockAuthenticator, load_test_balance, load_test_positions\n'), ((1538, 1564), 'tests.MockAuthenticator.load_test_balance', 'load_test_balance', (['account'], {}), '(account)\n', (1555, 1564), False, 'from tests.MockAuthenticator import MockAuthenticator, load_test_balance, load_test_positions\n'), ((1902, 1928), 'tests.MockAuthenticator.load_test_balance', 'load_test_balance', (['account'], {}), '(account)\n', (1919, 1928), False, 'from tests.MockAuthenticator import MockAuthenticator, load_test_balance, load_test_positions\n')]
|
from django.contrib import admin
from .models import Medicine, MedicineForm, Suggestion, Instruction, Company, Prescription, PrescribedMedicine, PatientHistory
admin.site.register(Medicine)
admin.site.register(MedicineForm)
admin.site.register(Suggestion)
admin.site.register(Instruction)
admin.site.register(Company)
admin.site.register(Prescription)
admin.site.register(PrescribedMedicine)
admin.site.register(PatientHistory)
|
[
"django.contrib.admin.site.register"
] |
[((161, 190), 'django.contrib.admin.site.register', 'admin.site.register', (['Medicine'], {}), '(Medicine)\n', (180, 190), False, 'from django.contrib import admin\n'), ((191, 224), 'django.contrib.admin.site.register', 'admin.site.register', (['MedicineForm'], {}), '(MedicineForm)\n', (210, 224), False, 'from django.contrib import admin\n'), ((225, 256), 'django.contrib.admin.site.register', 'admin.site.register', (['Suggestion'], {}), '(Suggestion)\n', (244, 256), False, 'from django.contrib import admin\n'), ((257, 289), 'django.contrib.admin.site.register', 'admin.site.register', (['Instruction'], {}), '(Instruction)\n', (276, 289), False, 'from django.contrib import admin\n'), ((290, 318), 'django.contrib.admin.site.register', 'admin.site.register', (['Company'], {}), '(Company)\n', (309, 318), False, 'from django.contrib import admin\n'), ((319, 352), 'django.contrib.admin.site.register', 'admin.site.register', (['Prescription'], {}), '(Prescription)\n', (338, 352), False, 'from django.contrib import admin\n'), ((353, 392), 'django.contrib.admin.site.register', 'admin.site.register', (['PrescribedMedicine'], {}), '(PrescribedMedicine)\n', (372, 392), False, 'from django.contrib import admin\n'), ((393, 428), 'django.contrib.admin.site.register', 'admin.site.register', (['PatientHistory'], {}), '(PatientHistory)\n', (412, 428), False, 'from django.contrib import admin\n')]
|
"""
# Copyright 2022 Red Hat
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
from unittest import TestCase
from unittest.mock import Mock
from cibyl.outputs.cli.ci.system.utils.sorting.jobs import SortJobsByName
class TestSortJobsByName(TestCase):
"""Tests for :class:`SortJobsByName`.
"""
def test_names_are_equal(self):
"""Checks that two jobs are equal if they share the same name.
"""
job1 = Mock()
job1.name.value = 'job'
job2 = Mock()
job2.name.value = 'job'
comparator = SortJobsByName()
self.assertEqual(
0,
comparator.compare(job1, job2)
)
def test_alphabetical_order(self):
"""Checks that the comparator will sort jobs in alphabetical order.
"""
job1 = Mock()
job1.name.value = 'A'
job2 = Mock()
job2.name.value = 'B'
comparator = SortJobsByName()
self.assertEqual(
-1,
comparator.compare(job1, job2)
)
self.assertEqual(
1,
comparator.compare(job2, job1)
)
|
[
"unittest.mock.Mock",
"cibyl.outputs.cli.ci.system.utils.sorting.jobs.SortJobsByName"
] |
[((969, 975), 'unittest.mock.Mock', 'Mock', ([], {}), '()\n', (973, 975), False, 'from unittest.mock import Mock\n'), ((1024, 1030), 'unittest.mock.Mock', 'Mock', ([], {}), '()\n', (1028, 1030), False, 'from unittest.mock import Mock\n'), ((1085, 1101), 'cibyl.outputs.cli.ci.system.utils.sorting.jobs.SortJobsByName', 'SortJobsByName', ([], {}), '()\n', (1099, 1101), False, 'from cibyl.outputs.cli.ci.system.utils.sorting.jobs import SortJobsByName\n'), ((1340, 1346), 'unittest.mock.Mock', 'Mock', ([], {}), '()\n', (1344, 1346), False, 'from unittest.mock import Mock\n'), ((1393, 1399), 'unittest.mock.Mock', 'Mock', ([], {}), '()\n', (1397, 1399), False, 'from unittest.mock import Mock\n'), ((1452, 1468), 'cibyl.outputs.cli.ci.system.utils.sorting.jobs.SortJobsByName', 'SortJobsByName', ([], {}), '()\n', (1466, 1468), False, 'from cibyl.outputs.cli.ci.system.utils.sorting.jobs import SortJobsByName\n')]
|
# rough copy of https://github.com/geohot/tinygrad/blob/master/examples/mnist_gan.py
from simplegrad import Tensor, Device, Adam
import numpy as np
import itertools as it
from torchvision.utils import make_grid, save_image
import torch
from abc import abstractmethod
import os
def leakyrelu(x, neg_slope=0.2):
return x.relu().sub(x.fork().mul(Tensor(neg_slope).mul(Tensor(-1.0))).relu())
torch.functional.F.leaky_relu(torch.tensor(x.val), negative_slope=0.2)
def random_uniform(*shape):
return np.random.uniform(-1., 1., size=shape)/np.sqrt(np.prod(shape)).astype(np.float32)
class nn:
@abstractmethod
def forward(self, x):
raise NotImplementedError
@property
def params(self):
return tuple(v for k,v in self.__dict__.items() if isinstance(v, Tensor))
class LinearGen(nn):
def __init__(self):
self.l1 = Tensor(random_uniform(128,256))
self.l2 = Tensor(random_uniform(256, 512))
self.l3 = Tensor(random_uniform(512, 1024))
self.l4 = Tensor(random_uniform(1024, 784))
def forward(self, x):
for layer in [self.l1, self.l2, self.l3]:
leakyrelu(x.dot(layer))
return x.dot(self.l4).tanh()
class LinearDisc(nn):
def __init__(self):
self.l1 = Tensor(random_uniform(784, 1024))
self.l2 = Tensor(random_uniform(1024, 512))
self.l3 = Tensor(random_uniform(512, 256))
self.l4 = Tensor(random_uniform(256, 2))
def forward(self, x):
for layer in [self.l1, self.l2, self.l3]:
leakyrelu(x.dot(layer))
return x.dot(self.l4).logsoftmax()
import gzip
def fetch(url):
import requests, tempfile, os
fp = os.path.join(tempfile.gettempdir(), url.encode()[-10:].hex())
if os.path.isfile(fp) and os.stat(fp).st_size:
with open(fp, 'rb') as f:
return f.read()
dat = requests.get(url).content
with open(fp + '.tmp', 'wb') as f:
f.write(dat)
os.rename(fp+'.tmp', fp)
return dat
def test_minst_gan():
generator = LinearGen()
discriminator = LinearDisc()
parse = lambda dat: np.frombuffer(gzip.decompress(dat), dtype=np.uint8).copy()
x_train = parse(fetch(url = "http://yann.lecun.com/exdb/mnist/train-images-idx3-ubyte.gz"))[0x10:].reshape((-1, 28*28)).astype(np.float32)
# Hyperparameters
epochs = 10
batch_size = 512
n_batches = int(len(x_train) / batch_size)
output_folder = "outputs"
ds_noise = np.random.randn(64,128).astype(np.float32)
optim_g = Adam(generator.params, learning_rate=0.0002, beta1=0.5)
optim_d = Adam(discriminator.params, learning_rate=0.0002, beta1=0.5)
def batches_generator():
batch_nr = 0
while batch_nr < n_batches:
idx = np.random.randint(0, x_train.shape[0], size=(batch_size))
image_b = x_train[idx].reshape(-1, 28*28).astype(np.float32)/255.
image_b = (image_b - 0.5)/0.5
yield image_b
batch_nr += 1
def real_label(bs):
y = np.zeros((bs,2), np.float32)
y[range(bs), [1]*bs] = -2.0
real_labels = Tensor(y)
return real_labels
def fake_label(bs):
y = np.zeros((bs,2), np.float32)
y[range(bs), [0]*bs] = -2.0
fake_labels = Tensor(y)
return fake_labels
def train_discriminator(optim, data_real, data_fake):
real_labels = real_label(batch_size)
fake_labels = fake_label(batch_size)
optim.zero_grad()
output_real = discriminator.forward(data_real)
loss_real = real_labels.mul(output_real).mean(axis=(0,1))
output_fake = discriminator.forward(data_fake)
loss_fake = fake_labels.mul(output_fake).mean(axis=(0,1))
loss_real.backward()
loss_fake.backward()
optim.step()
return loss_fake.val + loss_real.val
def train_generator(optim, data_fake):
real_labels = real_label(batch_size)
optim.zero_grad()
output = discriminator.forward(data_fake)
loss = real_labels.mul(output).mean(axis=(0,1))
loss.backward()
optim.step()
return loss.val
for epoch in range(epochs):
batches = tuple(batches_generator())
for data_real in batches:
data_real = Tensor(data_real)
noise = Tensor(np.random.randn(batch_size, 128))
data_fake = generator.forward(noise)
data_fake = Tensor(data_fake.val)
loss_d = train_discriminator(optim_d, data_real, data_fake).item()
noise = Tensor(np.random.randn(batch_size, 128))
data_fake = generator.forward(noise)
loss_g = train_generator(optim_g, data_fake).item()
# generate images after each epoch
fake_images = generator.forward(Tensor(ds_noise)).val
fake_images = (fake_images.reshape(-1, 1, 28, 28)+ 1) / 2
fake_images = make_grid(torch.tensor(fake_images))
save_image(fake_images, os.path.join(output_folder, f'image_{epoch}.jpg'))
|
[
"numpy.random.uniform",
"os.stat",
"numpy.random.randn",
"os.rename",
"tempfile.gettempdir",
"numpy.zeros",
"gzip.decompress",
"numpy.prod",
"os.path.isfile",
"numpy.random.randint",
"requests.get",
"simplegrad.Tensor",
"os.path.join",
"simplegrad.Adam",
"torch.tensor"
] |
[((1961, 1987), 'os.rename', 'os.rename', (["(fp + '.tmp')", 'fp'], {}), "(fp + '.tmp', fp)\n", (1970, 1987), False, 'import requests, tempfile, os\n'), ((2523, 2578), 'simplegrad.Adam', 'Adam', (['generator.params'], {'learning_rate': '(0.0002)', 'beta1': '(0.5)'}), '(generator.params, learning_rate=0.0002, beta1=0.5)\n', (2527, 2578), False, 'from simplegrad import Tensor, Device, Adam\n'), ((2593, 2652), 'simplegrad.Adam', 'Adam', (['discriminator.params'], {'learning_rate': '(0.0002)', 'beta1': '(0.5)'}), '(discriminator.params, learning_rate=0.0002, beta1=0.5)\n', (2597, 2652), False, 'from simplegrad import Tensor, Device, Adam\n'), ((428, 447), 'torch.tensor', 'torch.tensor', (['x.val'], {}), '(x.val)\n', (440, 447), False, 'import torch\n'), ((509, 549), 'numpy.random.uniform', 'np.random.uniform', (['(-1.0)', '(1.0)'], {'size': 'shape'}), '(-1.0, 1.0, size=shape)\n', (526, 549), True, 'import numpy as np\n'), ((1698, 1719), 'tempfile.gettempdir', 'tempfile.gettempdir', ([], {}), '()\n', (1717, 1719), False, 'import requests, tempfile, os\n'), ((1754, 1772), 'os.path.isfile', 'os.path.isfile', (['fp'], {}), '(fp)\n', (1768, 1772), False, 'import requests, tempfile, os\n'), ((1871, 1888), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (1883, 1888), False, 'import requests, tempfile, os\n'), ((3025, 3054), 'numpy.zeros', 'np.zeros', (['(bs, 2)', 'np.float32'], {}), '((bs, 2), np.float32)\n', (3033, 3054), True, 'import numpy as np\n'), ((3112, 3121), 'simplegrad.Tensor', 'Tensor', (['y'], {}), '(y)\n', (3118, 3121), False, 'from simplegrad import Tensor, Device, Adam\n'), ((3186, 3215), 'numpy.zeros', 'np.zeros', (['(bs, 2)', 'np.float32'], {}), '((bs, 2), np.float32)\n', (3194, 3215), True, 'import numpy as np\n'), ((3273, 3282), 'simplegrad.Tensor', 'Tensor', (['y'], {}), '(y)\n', (3279, 3282), False, 'from simplegrad import Tensor, Device, Adam\n'), ((1777, 1788), 'os.stat', 'os.stat', (['fp'], {}), '(fp)\n', (1784, 1788), False, 'import requests, tempfile, os\n'), ((2465, 2489), 'numpy.random.randn', 'np.random.randn', (['(64)', '(128)'], {}), '(64, 128)\n', (2480, 2489), True, 'import numpy as np\n'), ((2758, 2813), 'numpy.random.randint', 'np.random.randint', (['(0)', 'x_train.shape[0]'], {'size': 'batch_size'}), '(0, x_train.shape[0], size=batch_size)\n', (2775, 2813), True, 'import numpy as np\n'), ((4281, 4298), 'simplegrad.Tensor', 'Tensor', (['data_real'], {}), '(data_real)\n', (4287, 4298), False, 'from simplegrad import Tensor, Device, Adam\n'), ((4433, 4454), 'simplegrad.Tensor', 'Tensor', (['data_fake.val'], {}), '(data_fake.val)\n', (4439, 4454), False, 'from simplegrad import Tensor, Device, Adam\n'), ((4913, 4938), 'torch.tensor', 'torch.tensor', (['fake_images'], {}), '(fake_images)\n', (4925, 4938), False, 'import torch\n'), ((4972, 5021), 'os.path.join', 'os.path.join', (['output_folder', 'f"""image_{epoch}.jpg"""'], {}), "(output_folder, f'image_{epoch}.jpg')\n", (4984, 5021), False, 'import requests, tempfile, os\n'), ((4326, 4358), 'numpy.random.randn', 'np.random.randn', (['batch_size', '(128)'], {}), '(batch_size, 128)\n', (4341, 4358), True, 'import numpy as np\n'), ((4562, 4594), 'numpy.random.randn', 'np.random.randn', (['batch_size', '(128)'], {}), '(batch_size, 128)\n', (4577, 4594), True, 'import numpy as np\n'), ((4793, 4809), 'simplegrad.Tensor', 'Tensor', (['ds_noise'], {}), '(ds_noise)\n', (4799, 4809), False, 'from simplegrad import Tensor, Device, Adam\n'), ((556, 570), 'numpy.prod', 'np.prod', (['shape'], {}), '(shape)\n', (563, 570), True, 'import numpy as np\n'), ((2124, 2144), 'gzip.decompress', 'gzip.decompress', (['dat'], {}), '(dat)\n', (2139, 2144), False, 'import gzip\n'), ((371, 383), 'simplegrad.Tensor', 'Tensor', (['(-1.0)'], {}), '(-1.0)\n', (377, 383), False, 'from simplegrad import Tensor, Device, Adam\n'), ((349, 366), 'simplegrad.Tensor', 'Tensor', (['neg_slope'], {}), '(neg_slope)\n', (355, 366), False, 'from simplegrad import Tensor, Device, Adam\n')]
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import matplotlib
matplotlib.use('Agg')
import matplotlib.pylab as plt
def save_figure(xs, file_path):
plt.figure()
plt.ylim([-2.0, 2.0])
plt.xlim([-2.0, 2.0])
plt.plot(xs[:,0], xs[:,1], "ro")
plt.savefig(file_path)
plt.close()
|
[
"matplotlib.pylab.savefig",
"matplotlib.pylab.plot",
"matplotlib.use",
"matplotlib.pylab.xlim",
"matplotlib.pylab.close",
"matplotlib.pylab.ylim",
"matplotlib.pylab.figure"
] |
[((172, 193), 'matplotlib.use', 'matplotlib.use', (['"""Agg"""'], {}), "('Agg')\n", (186, 193), False, 'import matplotlib\n'), ((260, 272), 'matplotlib.pylab.figure', 'plt.figure', ([], {}), '()\n', (270, 272), True, 'import matplotlib.pylab as plt\n'), ((275, 296), 'matplotlib.pylab.ylim', 'plt.ylim', (['[-2.0, 2.0]'], {}), '([-2.0, 2.0])\n', (283, 296), True, 'import matplotlib.pylab as plt\n'), ((299, 320), 'matplotlib.pylab.xlim', 'plt.xlim', (['[-2.0, 2.0]'], {}), '([-2.0, 2.0])\n', (307, 320), True, 'import matplotlib.pylab as plt\n'), ((323, 357), 'matplotlib.pylab.plot', 'plt.plot', (['xs[:, 0]', 'xs[:, 1]', '"""ro"""'], {}), "(xs[:, 0], xs[:, 1], 'ro')\n", (331, 357), True, 'import matplotlib.pylab as plt\n'), ((358, 380), 'matplotlib.pylab.savefig', 'plt.savefig', (['file_path'], {}), '(file_path)\n', (369, 380), True, 'import matplotlib.pylab as plt\n'), ((383, 394), 'matplotlib.pylab.close', 'plt.close', ([], {}), '()\n', (392, 394), True, 'import matplotlib.pylab as plt\n')]
|
import flask
from tensorflow import keras
import pandas as pd
from flask import request, jsonify
from pandas.io.json import json_normalize
app = flask.Flask(__name__)
app.config["DEBUG"] = True
@app.route('/api/prediction', methods=['POST'])
def precict():
# Validate the request body contains JSON
if request.is_json:
# Parse the JSON into a Python dictionary
req = request.get_json()
sample_df = json_normalize(req)
timesteps = 40
#sample_df = sample_df.drop(["TIMESTAMP"], axis=1)
sample_df = sample_df.astype(float)
x_test, y_test = sample_df.iloc[:, :-1], sample_df.iloc[:, -1]
n_features = 83
x_test_reshaped = x_test.values.reshape(x_test.shape[0], timesteps + 1, n_features)
optimizer = keras.optimizers.Nadam(lr=0.0001, beta_1=0.9, beta_2=0.999, epsilon=1e-07, schedule_decay=0.004)
model = keras.models.load_model('models/CNN-1.h5', compile=False) # todo: get right model
model.compile(loss='categorical_crossentropy', optimizer=optimizer, metrics=['acc'])
y_pred = model.predict(x_test_reshaped)
y_class = y_pred.argmax(axis=-1)
y_class = y_class + 1
y_pred_pd = pd.DataFrame(y_class, columns=["class"])
y_test_pd = pd.DataFrame(y_test.tolist(), columns=["class"])
# activity_map = {0: "no activity", 1: "Act01", 2: "Act02", 3: "Act03", 4: "Act04", 5: "Act05", 6: "Act06", 7: "Act07", 8: "Act08",
# 9: "Act09", 10: "Act10", 11: "Act11", 12: "Act12", 13: "Act13", 14: "Act14", 15: "Act15",
# 16: "Act16", 17: "Act17", 18: "Act18", 19: "Act19", 20: "Act20", 21: "Act21", 22: "Act22",
# 23: "Act23", 24: "Act24"}
activity_map = {0: "no activity", 1: "Take medication", 2: "Prepare breakfast", 3: "Prepare lunch", 4: "Prepare dinner",
5: "Breakfast", 6: "Lunch", 7: "Dinner", 8: "Eat a snack", 9: "Watch TV", 10: "Enter the SmartLab",
11: "Play a videogame", 12: "Relax on the sofa", 13: "Leave the SmartLab", 14: "Visit in the SmartLab",
15: "Put waste in the bin", 16: "Wash hands", 17: "Brush teeth", 18: "Use the toilet", 19: "Wash dishes",
20: "Put washin into the washing machine", 21: "Work at the table", 22: "Dressing", 23: "Go to the bed",
24: "Wake up"}
predicted_class = y_pred_pd["class"].map(activity_map)
y_test_pd = y_test_pd.astype(float)
actual_class = y_test_pd["class"].map(activity_map)
prediction_result = "The new data point is predicted to be the activity {} ({}). The ground truth activity is {} ({}). ".format(predicted_class[0], y_class[0], actual_class[0], int(y_test[0]))
if(y_class[0] == int(y_test[0])):
prediction_result += "The system predicted correctly! "
else:
prediction_result += "The system predicted wrong! "
print(prediction_result)
# Return a string along with an HTTP status code
return prediction_result, 200
else:
# The request body wasn't JSON so return a 400 HTTP status code
return "Request was not JSON", 400
app.run()
|
[
"pandas.DataFrame",
"tensorflow.keras.models.load_model",
"pandas.io.json.json_normalize",
"flask.Flask",
"tensorflow.keras.optimizers.Nadam",
"flask.request.get_json"
] |
[((146, 167), 'flask.Flask', 'flask.Flask', (['__name__'], {}), '(__name__)\n', (157, 167), False, 'import flask\n'), ((397, 415), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (413, 415), False, 'from flask import request, jsonify\n'), ((436, 455), 'pandas.io.json.json_normalize', 'json_normalize', (['req'], {}), '(req)\n', (450, 455), False, 'from pandas.io.json import json_normalize\n'), ((794, 894), 'tensorflow.keras.optimizers.Nadam', 'keras.optimizers.Nadam', ([], {'lr': '(0.0001)', 'beta_1': '(0.9)', 'beta_2': '(0.999)', 'epsilon': '(1e-07)', 'schedule_decay': '(0.004)'}), '(lr=0.0001, beta_1=0.9, beta_2=0.999, epsilon=1e-07,\n schedule_decay=0.004)\n', (816, 894), False, 'from tensorflow import keras\n'), ((908, 965), 'tensorflow.keras.models.load_model', 'keras.models.load_model', (['"""models/CNN-1.h5"""'], {'compile': '(False)'}), "('models/CNN-1.h5', compile=False)\n", (931, 965), False, 'from tensorflow import keras\n'), ((1225, 1265), 'pandas.DataFrame', 'pd.DataFrame', (['y_class'], {'columns': "['class']"}), "(y_class, columns=['class'])\n", (1237, 1265), True, 'import pandas as pd\n')]
|
from collections import Counter
class Solution:
def findAnagrams(self, word: str, substr: str):
"""O(n) time | O(1) space"""
if not word or not substr: return []
l = 0
r = -1
seen = 0
ln = len(substr)
counts = Counter(substr)
counts = {char: -counts[char] for char in substr}
result = []
while r < len(word)-1:
r += 1
char = word[r]
if char in counts:
counts[char] += 1
if counts[char] <= 0:
seen += 1
if seen == ln:
result.append(l)
if r-l+1 == ln:
char = word[l]
l += 1
if char in counts:
counts[char] -= 1
if counts[char] < 0:
seen -= 1
return result
|
[
"collections.Counter"
] |
[((271, 286), 'collections.Counter', 'Counter', (['substr'], {}), '(substr)\n', (278, 286), False, 'from collections import Counter\n')]
|
import random
import speech_recognition as sr
import datetime
import calendar
import time
import webbrowser
import wikipedia
from gtts import gTTS
import playsound
import os
import win10toast
from bs4 import BeautifulSoup
import requests
import re
import nltk
from googletrans import Translator
import sports
from newspaper import Article
bot_name = "Rag2020"
bot_template = "{0}"
user_template = "{1}"
def send_message(message):
response = respond(message)
alex_speak(bot_template.format(response))
def respond(message):
if message in responses:
bot_message = random.choice(responses[message])
elif 'Search' in message:
search = record_audio("Specify the word")
url = "https://google.com/search?q=" +search
bot_message = webbrowser.get().open(url)
elif message == "Find Location":
location = record_audio("City name")
url = "https://google.ml/maps/place/" + location +'/&'
bot_message = webbrowser.get().open(url)
elif message == "Calculate":
m = record_audio("What you have to compute")
bot_message = calculate(m)
elif 'who is' in message:
person = person_name(message)
bot_message = wikipedia.summary(person, sentences=2)
elif message == "Set An Remainder":
bot_message = remainder()
elif message == "Set An Alarm":
bot_message = alarm()
elif message == "Play Me A Song":
bot_message = melody()
elif message == "Weather":
bot_message = weather_manager()
elif message == "Wikipedia":
bot_message = scrap()
elif message == "Translate":
bot_message = trans()
elif message == "Headlines":
bot_message = news_scrap()
elif message == "Live Score":
bot_message = sport_score()
elif message == "Exit":
breakpoint()
else:
bot_message = random.choice(responses["Default"])
return bot_message
def date_and_time():
now = datetime.datetime.now()
today = datetime.datetime.today()
weekday = calendar.day_name[today.weekday()]
month = now.month
day = now.day
month_list = ['January', 'February', 'March', 'April', 'May', 'June',
'July', 'August', 'September', 'October', 'November', 'December']
Numbers = ['1st', '2nd', '3rd', '4th', '5th', '6th', '7th', '8th', '9th', '10th', '11th', '12th', '13th',
'14th', '15th', '16th', '17th', '18th', '19th', '20th', '21st', '22nd', '23rd', '24th', '25th', '26th', '27th',
'28th', '29th', '30th', '31st']
return "Today is "+weekday + ' '+month_list[month-1]+' the ' + Numbers[day-1]
def month():
now = datetime.datetime.now()
month = now.month
month_list = ['January', 'February', 'March', 'April', 'May', 'June',
'July', 'August', 'September', 'October', 'November', 'December']
return month_list[month-1]
def current_time():
local_time = time.ctime()
return local_time
def calculate(message):
message = message.split()
i = 0
request_d = {}
for req in message:
request_d[i] = req
i = i + 1
for key,value in request_d.items():
if value == '+':
return int(request_d[key - 1]) + int(request_d[key + 1])
if value == '-':
return int(request_d[key - 1]) - int(request_d[key + 1])
if value == '*':
return int(request_d[key - 1]) * int(request_d[key + 1])
if value == '/':
return int(request_d[key - 1]) / int(request_d[key + 1])
def person_name(text):
name = text.split()
for i in range(0, len(name)):
if i + 3 <= len(name)-1 and name[i].lower == 'who' and name[i+1].lower == 'is':
return name[i+2]+ ' '+ name[i+3]
def remainder():
Remainder_message = record_audio("Enter the remainder message:")
time = str(input("Enter the timing in format HH:MM"))
date = str(input("Enter the remainder date in format DD/MM/YYYY"))
time = time.split(":")
date = date.split("/")
timings = str(input("Enter AM or PM"))
timings = timings.lower()
alarmHour = int(time[0])
alarmMinute = int(time[1])
rem_date = int(date[0])
rem_month = int(date[1])
rem_year = int(date[2])
if timings == "pm":
alarmHour = alarmHour + 12
while True:
if alarmHour == datetime.datetime.now().hour and alarmMinute == datetime.datetime.now().minute and rem_date == datetime.datetime.now().day and rem_month == datetime.datetime.now().month and rem_year == datetime.datetime.now().year:
toaster = win10toast.ToastNotifier()
notification_message = toaster.show_toast("Pycharm", Remainder_message, duration=10)
return notification_message
def news_scrap():
url = 'https://www.indiatoday.in/top-stories'
article = Article(url)
article.download()
article.parse()
nltk.download('punkt')
article.nlp()
return article.text
def sport_score():
import sports
matches = sports.all_matches()
match_invoked = record_audio("Enter the game you want to search")
if match_invoked == 'Cricket':
cricket = matches['cricket']
elif match_invoked == 'Football':
cricket = matches['football']
else:
cricket = "no matches found"
return cricket
def trans():
trans = Translator()
text = record_audio("Specify the sentence or word to be translated:")
source = record_audio("From Languages:")
source = source.lower()
source = source[0:2]
desti = record_audio("To Languages:")
desti = desti.lower()
desti = desti[0:2]
t = trans.translate(
text, src=source, dest=desti
)
return t.text
def scrap():
search = record_audio("Enter the word")
url = f"https://en.wikipedia.org/wiki/{search}"
r = requests.get(url)
soup = BeautifulSoup(r.text, "html.parser")
text = ""
for paragraph in soup.find_all('p'):
text += paragraph.text
text = re.sub(r'\[[0-9]*\]', ' ', text)
text = re.sub(r'\s+', ' ', text)
text = re.sub(r'\d', ' ', text)
text = re.sub(r'\s+', ' ', text)
sentences = nltk.sent_tokenize(text)
return (sentences[0],sentences[1])
def alarm():
time = record_audio("Enter the Time in the format HH:MM")
time = time.split(":")
alarmHour = int(time[0])
alarmMinute = int(time[1])
timings_module = str(input("Mention PM or AM"))
timings_module = timings_module.lower()
if timings_module == "pm":
alarmHour = alarmHour + 12
while True:
if alarmHour == datetime.datetime.now().hour and alarmMinute == datetime.datetime.now().minute:
from playsound import playsound
alarm = playsound('C:/Users/Anandatirtha/PycharmProjects/Chat_ananda/the-purge-siren-ringtone.mp3')
return alarm
def melody():
from playsound import playsound
melody = playsound('C:/Users/Anandatirtha/PycharmProjects/Chat_ananda/nature-alarm-sounds.mp3')
return melody
def weather_manager():
place = record_audio("Enter the name of place")
search = f"Weather in {place}"
url = f"https://www.google.com/search?&q={search}"
r = requests.get(url)
soup = BeautifulSoup(r.text, "html.parser")
update = soup.find("div", class_="BNeawe").text
weather_report = "The current temperature in {0} is {1}".format(place, update)
return weather_report
responses = {
"Hey Alex": ["Your bot is activating..."," Bot is Launcing 3 2 1"],
"Good Morning": ["Good Morning have a great day", "great day ahead", "have a wonderful day", "Good Morning"],
"Hi": ["Hi", "Hello", "Hola", "Hi there", "what's special today"],
"Default": ["I can't get you", "sorry one more time", "Sorry! again"],
"Who Created You": ["I was developed by Anandatirtha", "By Anandatirtha", "I was developed by Anandatirtha as a demo bot"],
"What Is Your Name": ["My name is {0}".format(bot_name), "Call me {0}".format(bot_name)],
"Good Afternoon": ["Good Afternoon", "Good Afternoon after your great meal", "Good Afternoon don't forget to check notifications"],
"Good Night": ["Good Night", "Good Night !! Sweet dreams", "Good Night we will meet Next day"],
"What Is Today Date": [date_and_time()],
"What Is The Month": [month()],
"What Is The Time Now": [ time.ctime()],
"Thank You": ["Welcome", "It's nice to hear from you"],
"When Is Your Birthday": ["It's on June 2nd 2020", "It's on June 2nd 2020 at Rag labs"],
"Happy Birthday Rag": ["Thank you for your wishes","Thak you so much for thinking of me", "Thanks for making me feel special on my birthday",
"I can't tell you how I enjoyed hearing from you"],
"I Feel Stressed": ["Here are some tips to get rid of stress:\n 1) Avoid Caffine and Alcohol \n 2) Get more sleep \n 3)Talk to someone who cares you",
"Here are few tips to get rid of stress:\n 1) Listen some melody songs \n 2) Exercise regularly \n 3) Get enough sleep and rest",
"Follow these tips:\n 1) Make time for hobbies\n 2) Avoid using Mobile Phone \n 3) Get advise from mental health professional\n "
"4) Be positive"],
"Feels Stressed": ["Here are some tips to get rid of stress:\n 1) Avoid Caffine and Alcohol \n 2) Get more sleep \n 3)Talk to someone who cares you",
"Here are few tips to get rid of stress:\n 1) Listen some melody songs \n 2) Exercise regularly \n 3) Get enough sleep and rest",
"Follow these tips:\n 1) Make time for hobbies\n 2) Avoid using Mobile Phone \n 3) Get advise from mental health professional\n "
"4) Be positive"],
"How To Relieve Stress": ["Here are some tips to get rid of stress:\n 1) Avoid Caffine and Alcohol \n 2) Get more sleep \n 3)Talk to someone who cares you",
"Here are few tips to get rid of stress:\n 1) Listen some melody songs \n 2) Exercise regularly \n 3) Get enough sleep and rest",
"Follow these tips:\n 1) Make time for hobbies\n 2) Avoid using Mobile Phone \n 3) Get advise from mental health professional\n "
"4) Be positive"],
"I Feel Bored": ["Here Some Melody songs", "I tired to play music but vain", "Sleep well"],
# Medical field questions
"Cold": ["The common cold is medically referred to as a viral upper respiratory tract infection. "
"Symptoms of the common cold may include cough, sore throat, low-grade fever, nasal congestion, runny nose, and sneezing."],
"I Have Cold": ["Sad to har from you", "Please, take rest from you", "Properly take medicines",
"Consult doctor before it becomes complicated"],
"Symptoms For Cold": ["Here are results \n 1)Runny nose \n 2)Sore throat \n 3)Cough \n 4)Congestion \n 5)Body Achnes \n 6)Sneezing \n 7) Fever"],
"How To Prevent From Cold": ["Here are some Prevention methods \n 1. Wash your hands properly \n 2. Disinfect your stuff \n 3. Avoid touching your eyes,nose and mouth \n 4. Stay away"],
"Symptoms For Fever": ["1)Sweating 2)Headaches 3)Muscle aches 4) Loss of appetite 5)Dehydration"],
"Symptoms For Throat Pain": ["1) Scratchy sensation in the throat \n 2)Difficulty in Swallowing \n 3)Sore"],
"Symptoms For Acidity": ["1)Bloating \n 2) Burping \n 3)Dry Cough \n 4)Sore throat"],
#Political questions
"The 11Th President Of India": ["<NAME>"],
"Member Of Rajya Sabha": ["Selected by elected members of Legislative Assembly"],
"Current Prime Minister of India":["<NAME>"],
"Chief Minister Of Andhra Pradesh": ["<NAME>"],
"Chief Minister Of Arunachal Pradesh": ["<NAME>"],
"Chief Minister Of Assam": ["<NAME>"],
"Chief Minister Of Bihar": ["<NAME>"],
"Chief Minister Of Chhattisgarh": ["<NAME>"],
"Chief Minister Of Delhi": ["<NAME>"],
"Chief Minister Of Goa": ["<NAME>"],
"Chief Minister Of Gujarat": ["<NAME>"],
"Chief Minister Of Haryana": ["<NAME>"],
"Chief Minister Of Himachal Pradesh": ["<NAME>"],
"Chief Minister Of Jammu and Kashmir": ["President's rule"],
"Chief Minister Of Jharkhand": ["<NAME>"],
"Chief Minister Of Karnataka": ["<NAME>"],
"Chief Minister Of Kerala": ["<NAME>"],
"Chief Minister Of Madhya Pradesh": ["<NAME>"],
"Chief Minister Of Maharashtra": ["<NAME>"],
"Chief Minister Of Manipur": ["<NAME>"],
"Chief Minister Of Meghalaya": ["<NAME>"],
"Chief Minister Of Mizoram": ["Zoramthanga"],
"Chief Minister Of Nagaland": ["<NAME>"],
"Chief Minister Of Odisha": ["<NAME>"],
"Chief Minister Of Puducherry": ["<NAME>"],
"Chief Minister Of Punjab": ["<NAME>"],
"Chief Minister Of Rajasthan": ["<NAME>"],
"Chief Minister Of Sikkim": ["<NAME>"],
"Chief Minister Of Tamil Nadu": ["<NAME>"],
"Chief Minister Of Telangana": ["<NAME>"],
"Chief Minister Of Tripura": ["<NAME>"],
"Chief Minister Of Uttar Pradesh": ["<NAME>"],
"Chief Minister Of Uttarakhand": ["<NAME>"],
"Chief Minister Of West Bengal": ["<NAME>"],
"Defence Minster Of India": ["<NAME>"],
"Ministry Of Home Affairs": ["<NAME>"],
#capital of States in India
"Capital Of Tripura": ["Agartala"],
"Capital Of Rajasthan": ["Jaipur"],
"Capital Of Sikkim": ["Gangtok"],
"Capital Of Arunachal Pradesh": ["Itanagar"],
"Capital Of Maharasthtra": ["Mumbai"],
"Capital Of Mizoram": ["Aizawl"],
"Capital Of Chhattisgarh": ["Raipur"],
"Capital Of Telangana": [" Hyderabad"],
"Capital Of Assam": ["Dispur"],
"Capital Of Uttar Pradesh": ["Lucknow"],
"Capital Of Himachal Pradesh": ["Shimla"],
"Capital Of Gujarat": ["Gandhinagar"],
"Capital Of Bihar": ["Patna"],
"Capital Of Haryana": ["Chandigarh"],
"Capital Of Jammu & Kashmir": [" Srinagar & Jammu"],
"Capital Of Uttaranchal": ["Dehradun"],
"Capital Of Nagaland": ["Kohima"],
"Capital Of Tamil Nadu": ["Chennai"],
"Capital Of Meghalaya": ["Shillong"],
#national games
"What Is The National Game Of Bangladesh": ["Kabaddi"],
"What Is The National Game Of Argentina": ["Pato"],
"What Is The National Game Of United States": ["Baseball"],
"What Is The National Game Of Afghanistan": ["Buzkashi"],
"What Is The National Game Of Bhutan": [" Archery"],
"What Is The National Game Of Sri Lanka": ["Volley ball"],
"What Is The National Game Of Turkey": ["Oil Wrestling"],
"What Is The National Game Of India": [" Field Hockey"],
"What Is The National Game Of England": ["Cricket"],
"What Is The National Game Of Scotland": ["Golf"],
"What Is The National Game Of Iran": ["Wrestling"],
"What Is The National Game Of Hungary": [" Water Polo"],
"What Is The National Game Of Cuba": ["Baseball"],
"What Is The National Game Of Pakistan": ["Field Hockey"],
"What Is The National Game Of Brazil": ["Football"],
"What Is The National Game Of Russia": ["Bandy"],
"What Is The National Game Of Canada in Summer ": ["Lacrosse"],
"What Is The National Game Of Canada in Winter": ["Ice Hockey"],
"What Is The National Game Of Spain": ["Bull Fighting"],
}
def record_audio(ask=False):
r = sr.Recognizer()
with sr.Microphone() as source:
if ask:
alex_speak(ask)
audio = r.listen(source)
data = ''
try:
data = r.recognize_google(audio)
except sr.UnknownValueError():
alex_speak("Error")
except sr.RequestError():
alex_speak("Error 1")
return data
def alex_speak(audio_string):
tts = gTTS(text=audio_string, lang='en')
r = random.randint(1, 10000000)
audio_file = 'audio-' +str(r)+'.mp4'
tts.save(audio_file)
print(audio_string)
playsound.playsound(audio_file)
os.remove(audio_file)
alex_speak("What can I do for you")
while True:
message = record_audio()
send_message(message.title())
|
[
"playsound.playsound",
"os.remove",
"speech_recognition.UnknownValueError",
"time.ctime",
"speech_recognition.RequestError",
"sports.all_matches",
"playsound.playsound.playsound",
"nltk.download",
"googletrans.Translator",
"random.randint",
"time.split",
"gtts.gTTS",
"webbrowser.get",
"speech_recognition.Microphone",
"requests.get",
"datetime.datetime.now",
"re.sub",
"newspaper.Article",
"datetime.datetime.today",
"bs4.BeautifulSoup",
"win10toast.ToastNotifier",
"nltk.sent_tokenize",
"random.choice",
"wikipedia.summary",
"speech_recognition.Recognizer"
] |
[((2039, 2062), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (2060, 2062), False, 'import datetime\n'), ((2076, 2101), 'datetime.datetime.today', 'datetime.datetime.today', ([], {}), '()\n', (2099, 2101), False, 'import datetime\n'), ((2757, 2780), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (2778, 2780), False, 'import datetime\n'), ((3043, 3055), 'time.ctime', 'time.ctime', ([], {}), '()\n', (3053, 3055), False, 'import time\n'), ((4127, 4142), 'time.split', 'time.split', (['""":"""'], {}), "(':')\n", (4137, 4142), False, 'import time\n'), ((4995, 5007), 'newspaper.Article', 'Article', (['url'], {}), '(url)\n', (5002, 5007), False, 'from newspaper import Article\n'), ((5060, 5082), 'nltk.download', 'nltk.download', (['"""punkt"""'], {}), "('punkt')\n", (5073, 5082), False, 'import nltk\n'), ((5187, 5207), 'sports.all_matches', 'sports.all_matches', ([], {}), '()\n', (5205, 5207), False, 'import sports\n'), ((5531, 5543), 'googletrans.Translator', 'Translator', ([], {}), '()\n', (5541, 5543), False, 'from googletrans import Translator\n'), ((6033, 6050), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (6045, 6050), False, 'import requests\n'), ((6063, 6099), 'bs4.BeautifulSoup', 'BeautifulSoup', (['r.text', '"""html.parser"""'], {}), "(r.text, 'html.parser')\n", (6076, 6099), False, 'from bs4 import BeautifulSoup\n'), ((6201, 6234), 're.sub', 're.sub', (['"""\\\\[[0-9]*\\\\]"""', '""" """', 'text'], {}), "('\\\\[[0-9]*\\\\]', ' ', text)\n", (6207, 6234), False, 'import re\n'), ((6246, 6271), 're.sub', 're.sub', (['"""\\\\s+"""', '""" """', 'text'], {}), "('\\\\s+', ' ', text)\n", (6252, 6271), False, 'import re\n'), ((6284, 6308), 're.sub', 're.sub', (['"""\\\\d"""', '""" """', 'text'], {}), "('\\\\d', ' ', text)\n", (6290, 6308), False, 'import re\n'), ((6321, 6346), 're.sub', 're.sub', (['"""\\\\s+"""', '""" """', 'text'], {}), "('\\\\s+', ' ', text)\n", (6327, 6346), False, 'import re\n'), ((6364, 6388), 'nltk.sent_tokenize', 'nltk.sent_tokenize', (['text'], {}), '(text)\n', (6382, 6388), False, 'import nltk\n'), ((6520, 6535), 'time.split', 'time.split', (['""":"""'], {}), "(':')\n", (6530, 6535), False, 'import time\n'), ((7140, 7236), 'playsound.playsound', 'playsound', (['"""C:/Users/Anandatirtha/PycharmProjects/Chat_ananda/nature-alarm-sounds.mp3"""'], {}), "(\n 'C:/Users/Anandatirtha/PycharmProjects/Chat_ananda/nature-alarm-sounds.mp3'\n )\n", (7149, 7236), False, 'from playsound import playsound\n'), ((7426, 7443), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (7438, 7443), False, 'import requests\n'), ((7456, 7492), 'bs4.BeautifulSoup', 'BeautifulSoup', (['r.text', '"""html.parser"""'], {}), "(r.text, 'html.parser')\n", (7469, 7492), False, 'from bs4 import BeautifulSoup\n'), ((15649, 15664), 'speech_recognition.Recognizer', 'sr.Recognizer', ([], {}), '()\n', (15662, 15664), True, 'import speech_recognition as sr\n'), ((16075, 16109), 'gtts.gTTS', 'gTTS', ([], {'text': 'audio_string', 'lang': '"""en"""'}), "(text=audio_string, lang='en')\n", (16079, 16109), False, 'from gtts import gTTS\n'), ((16119, 16146), 'random.randint', 'random.randint', (['(1)', '(10000000)'], {}), '(1, 10000000)\n', (16133, 16146), False, 'import random\n'), ((16245, 16276), 'playsound.playsound.playsound', 'playsound.playsound', (['audio_file'], {}), '(audio_file)\n', (16264, 16276), False, 'from playsound import playsound\n'), ((16282, 16303), 'os.remove', 'os.remove', (['audio_file'], {}), '(audio_file)\n', (16291, 16303), False, 'import os\n'), ((617, 650), 'random.choice', 'random.choice', (['responses[message]'], {}), '(responses[message])\n', (630, 650), False, 'import random\n'), ((8586, 8598), 'time.ctime', 'time.ctime', ([], {}), '()\n', (8596, 8598), False, 'import time\n'), ((15677, 15692), 'speech_recognition.Microphone', 'sr.Microphone', ([], {}), '()\n', (15690, 15692), True, 'import speech_recognition as sr\n'), ((4740, 4766), 'win10toast.ToastNotifier', 'win10toast.ToastNotifier', ([], {}), '()\n', (4764, 4766), False, 'import win10toast\n'), ((6952, 7053), 'playsound.playsound', 'playsound', (['"""C:/Users/Anandatirtha/PycharmProjects/Chat_ananda/the-purge-siren-ringtone.mp3"""'], {}), "(\n 'C:/Users/Anandatirtha/PycharmProjects/Chat_ananda/the-purge-siren-ringtone.mp3'\n )\n", (6961, 7053), False, 'from playsound import playsound\n'), ((15881, 15903), 'speech_recognition.UnknownValueError', 'sr.UnknownValueError', ([], {}), '()\n', (15901, 15903), True, 'import speech_recognition as sr\n'), ((15954, 15971), 'speech_recognition.RequestError', 'sr.RequestError', ([], {}), '()\n', (15969, 15971), True, 'import speech_recognition as sr\n'), ((810, 826), 'webbrowser.get', 'webbrowser.get', ([], {}), '()\n', (824, 826), False, 'import webbrowser\n'), ((4501, 4524), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (4522, 4524), False, 'import datetime\n'), ((4549, 4572), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (4570, 4572), False, 'import datetime\n'), ((4596, 4619), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (4617, 4619), False, 'import datetime\n'), ((4641, 4664), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (4662, 4664), False, 'import datetime\n'), ((4687, 4710), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (4708, 4710), False, 'import datetime\n'), ((6806, 6829), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (6827, 6829), False, 'import datetime\n'), ((6854, 6877), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (6875, 6877), False, 'import datetime\n'), ((1012, 1028), 'webbrowser.get', 'webbrowser.get', ([], {}), '()\n', (1026, 1028), False, 'import webbrowser\n'), ((1256, 1294), 'wikipedia.summary', 'wikipedia.summary', (['person'], {'sentences': '(2)'}), '(person, sentences=2)\n', (1273, 1294), False, 'import wikipedia\n'), ((1940, 1975), 'random.choice', 'random.choice', (["responses['Default']"], {}), "(responses['Default'])\n", (1953, 1975), False, 'import random\n')]
|
#!/usr/bin/env python
import rospy
import tf
import time
import serial
import struct
from geometry_msgs.msg import PointStamped
from ros_decawave.msg import Tag, Anchor, AnchorArray, Acc
class DecawaveDriver(object):
""" docstring for DecawaveDriver """
def __init__(self):
rospy.init_node('decawave_driver', anonymous=False)
# Getting Serial Parameters
self.port_ = rospy.get_param('port', '/dev/ttyACM0')
self.baudrate_ = int(rospy.get_param('baudrate', '115200'))
self.tf_publisher_ = rospy.get_param('tf_publisher', 'True')
self.rate_ = int(rospy.get_param('rate', '10'))
# Initiate Serial
self.ser = serial.Serial(self.port_, self.baudrate_, timeout=0.1)
rospy.loginfo("\33[96mConnected to %s at %i\33[0m", self.ser.portstr, self.baudrate_)
self.get_uart_mode()
self.switch_uart_mode()
#self.get_tag_status()
#self.get_tag_version()
self.anchors = AnchorArray()
self.anchors.anchors = []
self.tag = Tag()
self.accel = Acc()
def get_uart_mode(self):
""" Check UART Mode Used """
rospy.loginfo("\33[96mChecking which UART mode is the gateway...\33[0m")
self.mode_ = 'UNKNOWN'
self.ser.flushInput()
self.ser.write(b'\r') # Test Mode
time.sleep(0.1)
while(self.ser.inWaiting() == 0):
pass
cc = self.ser.readline()
if cc == '@\x01\x01': # GENERIC MODE
rospy.loginfo("\33[96mDevice is on GENERIC MODE! It must to be changed to SHELL MODE!\33[0m")
self.mode_ = "GENERIC"
else: # SHELL MODE
rospy.loginfo("\33[96mDevice is on SHELL MODE! Ok!\33[0m")
self.mode_ = "SHELL"
return self.mode_
def switch_uart_mode(self):
self.ser.flushInput()
if self.mode_ == "GENERIC":
rospy.loginfo("\33[96mChanging UART mode to SHELL MODE...\33[0m")
self.ser.write(b'\r\r') # Go to Shell Mode
while(self.ser.inWaiting()==0):
pass
time.sleep(1.0)
self.ser.flushInput()
rospy.loginfo("\33[96m%s\33[0m", self.ser.readline().replace('\n', ''))
elif self.mode_ == "UNKNOWN":
rospy.logerr("%s", "Unknown Mode Detected! Please reset the device and try again!")
def get_tag_version(self):
self.ser.flushInput()
self.ser.write(b'\x15\x00') # Status
while(self.ser.inWaiting() < 21):
pass
version = self.ser.read(21)
data_ = struct.unpack('<BBBBBLBBLBBL', bytearray(version))
rospy.loginfo("\33[96m--------------------------------\33[0m")
rospy.loginfo("\33[96mFirmware Version:0x"+format(data_[5], '04X')+"\33[0m")
rospy.loginfo("\33[96mConfiguration Version:0x"+format(data_[8], '04X')+"\33[0m")
rospy.loginfo("\33[96mHardware Version:0x"+format(data_[11], '04X')+"\33[0m")
rospy.loginfo("\33[96m--------------------------------\33[0m")
#def get_tag_status(self):
# self.ser.flushInput()
# self.ser.write(b'\x32\x00') # Status
# while(self.ser.inWaiting()==0):
# pass
# status = self.ser.readline()
# data_ = struct.unpack('<BBBBBB', bytearray(status))
# if data_[0] != 64 and data_[2] != 0:
# rospy.logwarn("Get Status Failed! Packet does not match!")
# print("%s", data_)
# if data_[5] == 3:
# rospy.loginfo("\33[96mTag is CONNECTED to a UWB network and LOCATION data are READY!\33[0m")
# elif data_[5] == 2:
# rospy.logwarn("Tag is CONNECTED to a UWB network but LOCATION data are NOT READY!")
# elif data_[5] == 1:
# rospy.logwarn("Tag is NOT CONNECTED to a UWB network but LOCATION data are READY!")
# elif data_[5] == 0:
# rospy.logwarn("Tag is NOT CONNECTED to a UWB network and LOCATION data are NOT READY!")
def get_tag_acc(self):
""" Read Acc Value: The values are raw values. So to convert them to g you first have to divide the
values by 2^6 ( as it is shifted) and then multiply it into 0.004 (assuming you are using the
+-2g scale). With regards to the getting the accelerometer readings to the UART, I have written
specific functions to read the data . I could put the github link up if you want."""
self.ser.flushInput()
self.ser.write(b'av\r') # Test Mode
while(self.ser.inWaiting() == 0):
pass
cc = ''
t = rospy.Time.now()
while not 'acc' in cc:
cc = self.ser.readline()
if rospy.Time.now() - t > rospy.Duration(0.5):
rospy.logwarn("Could not get accel data!")
cc = cc.replace('\r\n', '').replace('acc: ', '').split(',')
if len(cc) == 3:
self.accel.x = float(int(cc[0].replace('x = ', ''))>>6) * 0.04
self.accel.y = float(int(cc[1].replace('y = ', ''))>>6) * 0.04
self.accel.z = float(int(cc[2].replace('z = ', ''))>>6) * 0.04
self.accel.header.frame_id = 'tag'
self.accel.header.stamp = rospy.Time.now()
def get_tag_location(self):
self.ser.flushInput()
self.ser.write(b'lec\r') # Test Mode
while(self.ser.inWaiting() == 0):
pass
cc = ''
t = rospy.Time.now()
while not 'DIST' in cc:
cc = self.ser.readline()
print (cc)
if rospy.Time.now() - t > rospy.Duration(0.5):
rospy.logwarn("Could not get tag data!")
self.ser.flushInput()
self.ser.write(b'\r') # Test Mode
#cc = ''
#t = rospy.Time.now()
#while not 'acc' in cc:
# cc = self.ser.readline()
# if rospy.Time.now() - t > rospy.Duration(0.5):
# rospy.logwarn("Could not get accel data!")
#cc = cc.replace('\r\n', '').replace('acc: ', '').split(',')
#if len(cc) == 3:
# self.accel.x = float(int(cc[0].replace('x = ', ''))/64.0) * 0.04
# self.accel.y = float(int(cc[1].replace('y = ', ''))/64.0) * 0.04
# self.accel.z = float(int(cc[2].replace('z = ', ''))/64.0) * 0.04
# self.accel.header.frame_id = 'tag'
# self.accel.header.stamp = rospy.Time.now()
def tf_callback(self, timer):
if self.tf_publisher_ == 'True':
self.br.sendTransform((self.tag.x, self.tag.y, self.tag.z),
tf.transformations.quaternion_from_euler(0, 0, 0),
rospy.Time.now(),
"tag",
"world")
for anchor in self.anchors.anchors:
self.br.sendTransform((anchor.x, anchor.y, anchor.z),
tf.transformations.quaternion_from_euler(0, 0, 0),
rospy.Time.now(),
anchor.header.frame_id,
"world")
def run(self):
self.rate = rospy.Rate(self.rate_)
rospy.loginfo("\33[96mInitiating Driver...\33[0m")
self.tag_pub_ = rospy.Publisher('pose', Tag, queue_size=1)
self.anchors_pub_ = rospy.Publisher('status', AnchorArray, queue_size=1)
self.acc_pub_ = rospy.Publisher('accelerometer', Acc, queue_size=1)
self.timer = rospy.Timer(rospy.Duration(0.2), self.tf_callback)
self.br = tf.TransformBroadcaster()
while not rospy.is_shutdown():
self.get_tag_acc()
self.acc_pub_.publish(self.accel)
#self.get_tag_location()
#self.tag.header.stamp = rospy.Time.now()
#self.tag_pub_.publish(self.tag)
#self.anchors.header.stamp = rospy.Time.now()
#self.anchors_pub_.publish(self.anchors)
self.rate.sleep()
# Main function
if __name__ == '__main__':
try:
dd = DecawaveDriver()
dd.run()
except rospy.ROSInterruptException:
rospy.loginfo("[Decawave Driver]: Closed!")
|
[
"serial.Serial",
"rospy.logwarn",
"rospy.logerr",
"ros_decawave.msg.AnchorArray",
"rospy.Time.now",
"ros_decawave.msg.Acc",
"tf.TransformBroadcaster",
"rospy.Publisher",
"rospy.Rate",
"time.sleep",
"rospy.get_param",
"rospy.loginfo",
"rospy.is_shutdown",
"rospy.init_node",
"tf.transformations.quaternion_from_euler",
"ros_decawave.msg.Tag",
"rospy.Duration"
] |
[((295, 346), 'rospy.init_node', 'rospy.init_node', (['"""decawave_driver"""'], {'anonymous': '(False)'}), "('decawave_driver', anonymous=False)\n", (310, 346), False, 'import rospy\n'), ((404, 443), 'rospy.get_param', 'rospy.get_param', (['"""port"""', '"""/dev/ttyACM0"""'], {}), "('port', '/dev/ttyACM0')\n", (419, 443), False, 'import rospy\n'), ((541, 580), 'rospy.get_param', 'rospy.get_param', (['"""tf_publisher"""', '"""True"""'], {}), "('tf_publisher', 'True')\n", (556, 580), False, 'import rospy\n'), ((682, 736), 'serial.Serial', 'serial.Serial', (['self.port_', 'self.baudrate_'], {'timeout': '(0.1)'}), '(self.port_, self.baudrate_, timeout=0.1)\n', (695, 736), False, 'import serial\n'), ((745, 836), 'rospy.loginfo', 'rospy.loginfo', (['"""\x1b[96mConnected to %s at %i\x1b[0m"""', 'self.ser.portstr', 'self.baudrate_'], {}), "('\\x1b[96mConnected to %s at %i\\x1b[0m', self.ser.portstr,\n self.baudrate_)\n", (758, 836), False, 'import rospy\n'), ((978, 991), 'ros_decawave.msg.AnchorArray', 'AnchorArray', ([], {}), '()\n', (989, 991), False, 'from ros_decawave.msg import Tag, Anchor, AnchorArray, Acc\n'), ((1045, 1050), 'ros_decawave.msg.Tag', 'Tag', ([], {}), '()\n', (1048, 1050), False, 'from ros_decawave.msg import Tag, Anchor, AnchorArray, Acc\n'), ((1072, 1077), 'ros_decawave.msg.Acc', 'Acc', ([], {}), '()\n', (1075, 1077), False, 'from ros_decawave.msg import Tag, Anchor, AnchorArray, Acc\n'), ((1155, 1229), 'rospy.loginfo', 'rospy.loginfo', (['"""\x1b[96mChecking which UART mode is the gateway...\x1b[0m"""'], {}), "('\\x1b[96mChecking which UART mode is the gateway...\\x1b[0m')\n", (1168, 1229), False, 'import rospy\n'), ((1339, 1354), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (1349, 1354), False, 'import time\n'), ((2669, 2733), 'rospy.loginfo', 'rospy.loginfo', (['"""\x1b[96m--------------------------------\x1b[0m"""'], {}), "('\\x1b[96m--------------------------------\\x1b[0m')\n", (2682, 2733), False, 'import rospy\n'), ((3001, 3065), 'rospy.loginfo', 'rospy.loginfo', (['"""\x1b[96m--------------------------------\x1b[0m"""'], {}), "('\\x1b[96m--------------------------------\\x1b[0m')\n", (3014, 3065), False, 'import rospy\n'), ((4595, 4611), 'rospy.Time.now', 'rospy.Time.now', ([], {}), '()\n', (4609, 4611), False, 'import rospy\n'), ((5414, 5430), 'rospy.Time.now', 'rospy.Time.now', ([], {}), '()\n', (5428, 5430), False, 'import rospy\n'), ((7076, 7098), 'rospy.Rate', 'rospy.Rate', (['self.rate_'], {}), '(self.rate_)\n', (7086, 7098), False, 'import rospy\n'), ((7107, 7159), 'rospy.loginfo', 'rospy.loginfo', (['"""\x1b[96mInitiating Driver...\x1b[0m"""'], {}), "('\\x1b[96mInitiating Driver...\\x1b[0m')\n", (7120, 7159), False, 'import rospy\n'), ((7182, 7224), 'rospy.Publisher', 'rospy.Publisher', (['"""pose"""', 'Tag'], {'queue_size': '(1)'}), "('pose', Tag, queue_size=1)\n", (7197, 7224), False, 'import rospy\n'), ((7253, 7305), 'rospy.Publisher', 'rospy.Publisher', (['"""status"""', 'AnchorArray'], {'queue_size': '(1)'}), "('status', AnchorArray, queue_size=1)\n", (7268, 7305), False, 'import rospy\n'), ((7330, 7381), 'rospy.Publisher', 'rospy.Publisher', (['"""accelerometer"""', 'Acc'], {'queue_size': '(1)'}), "('accelerometer', Acc, queue_size=1)\n", (7345, 7381), False, 'import rospy\n'), ((7472, 7497), 'tf.TransformBroadcaster', 'tf.TransformBroadcaster', ([], {}), '()\n', (7495, 7497), False, 'import tf\n'), ((473, 510), 'rospy.get_param', 'rospy.get_param', (['"""baudrate"""', '"""115200"""'], {}), "('baudrate', '115200')\n", (488, 510), False, 'import rospy\n'), ((606, 635), 'rospy.get_param', 'rospy.get_param', (['"""rate"""', '"""10"""'], {}), "('rate', '10')\n", (621, 635), False, 'import rospy\n'), ((1504, 1610), 'rospy.loginfo', 'rospy.loginfo', (['"""\x1b[96mDevice is on GENERIC MODE! It must to be changed to SHELL MODE!\x1b[0m"""'], {}), "(\n '\\x1b[96mDevice is on GENERIC MODE! It must to be changed to SHELL MODE!\\x1b[0m'\n )\n", (1517, 1610), False, 'import rospy\n'), ((1673, 1733), 'rospy.loginfo', 'rospy.loginfo', (['"""\x1b[96mDevice is on SHELL MODE! Ok!\x1b[0m"""'], {}), "('\\x1b[96mDevice is on SHELL MODE! Ok!\\x1b[0m')\n", (1686, 1733), False, 'import rospy\n'), ((1916, 1983), 'rospy.loginfo', 'rospy.loginfo', (['"""\x1b[96mChanging UART mode to SHELL MODE...\x1b[0m"""'], {}), "('\\x1b[96mChanging UART mode to SHELL MODE...\\x1b[0m')\n", (1929, 1983), False, 'import rospy\n'), ((2123, 2138), 'time.sleep', 'time.sleep', (['(1.0)'], {}), '(1.0)\n', (2133, 2138), False, 'import time\n'), ((5201, 5217), 'rospy.Time.now', 'rospy.Time.now', ([], {}), '()\n', (5215, 5217), False, 'import rospy\n'), ((7415, 7434), 'rospy.Duration', 'rospy.Duration', (['(0.2)'], {}), '(0.2)\n', (7429, 7434), False, 'import rospy\n'), ((7516, 7535), 'rospy.is_shutdown', 'rospy.is_shutdown', ([], {}), '()\n', (7533, 7535), False, 'import rospy\n'), ((8039, 8082), 'rospy.loginfo', 'rospy.loginfo', (['"""[Decawave Driver]: Closed!"""'], {}), "('[Decawave Driver]: Closed!')\n", (8052, 8082), False, 'import rospy\n'), ((2307, 2394), 'rospy.logerr', 'rospy.logerr', (['"""%s"""', '"""Unknown Mode Detected! Please reset the device and try again!"""'], {}), "('%s',\n 'Unknown Mode Detected! Please reset the device and try again!')\n", (2319, 2394), False, 'import rospy\n'), ((4718, 4737), 'rospy.Duration', 'rospy.Duration', (['(0.5)'], {}), '(0.5)\n', (4732, 4737), False, 'import rospy\n'), ((4755, 4797), 'rospy.logwarn', 'rospy.logwarn', (['"""Could not get accel data!"""'], {}), "('Could not get accel data!')\n", (4768, 4797), False, 'import rospy\n'), ((5561, 5580), 'rospy.Duration', 'rospy.Duration', (['(0.5)'], {}), '(0.5)\n', (5575, 5580), False, 'import rospy\n'), ((5598, 5638), 'rospy.logwarn', 'rospy.logwarn', (['"""Could not get tag data!"""'], {}), "('Could not get tag data!')\n", (5611, 5638), False, 'import rospy\n'), ((6578, 6627), 'tf.transformations.quaternion_from_euler', 'tf.transformations.quaternion_from_euler', (['(0)', '(0)', '(0)'], {}), '(0, 0, 0)\n', (6618, 6627), False, 'import tf\n'), ((6653, 6669), 'rospy.Time.now', 'rospy.Time.now', ([], {}), '()\n', (6667, 6669), False, 'import rospy\n'), ((4695, 4711), 'rospy.Time.now', 'rospy.Time.now', ([], {}), '()\n', (4709, 4711), False, 'import rospy\n'), ((5538, 5554), 'rospy.Time.now', 'rospy.Time.now', ([], {}), '()\n', (5552, 5554), False, 'import rospy\n'), ((6873, 6922), 'tf.transformations.quaternion_from_euler', 'tf.transformations.quaternion_from_euler', (['(0)', '(0)', '(0)'], {}), '(0, 0, 0)\n', (6913, 6922), False, 'import tf\n'), ((6944, 6960), 'rospy.Time.now', 'rospy.Time.now', ([], {}), '()\n', (6958, 6960), False, 'import rospy\n')]
|
from django.contrib import admin
from . import models
from .models import Notification
from django.contrib.contenttypes.admin import GenericTabularInline
# Register your models here.
class NotificationAdmin(GenericTabularInline):
model = models.Notification
extra = 0
def get_queryset(self, request):
qs = super().get_queryset(request)
return qs.prefetch_related("to")
@admin.register(models.Template)
class TemplateAdmin(admin.ModelAdmin):
list_display = ("name", "description", "code", "title", "text", "kwargs")
list_filter = ("name", "code")
ordering = ("name",)
@admin.register(models.DingDingMessage)
class DingDingMessageAdmin(admin.ModelAdmin):
list_display = [
"title",
"content",
"at_mobiles",
"is_at_all",
"extra",
"created_at",
]
list_filter = ("title", "created_at")
inlines = (NotificationAdmin,)
ordering = ("-id",)
def get_queryset(self, request):
qs = super().get_queryset(request)
return qs.prefetch_related("notify")
@admin.register(models.EmailMessage)
class EmailMessageAdmin(admin.ModelAdmin):
list_display = [
"subject",
"sender",
"receivers",
"cc",
"content_subtype",
"content",
"created_at",
]
list_filter = ("subject", "content_subtype", "created_at")
inlines = (NotificationAdmin,)
ordering = ("-id",)
def get_queryset(self, request):
qs = super().get_queryset(request)
return qs.prefetch_related("notify")
@admin.register(models.WebsocketMessage)
class WebsocketMessageAdmin(admin.ModelAdmin):
list_display = ["title", "content", "msgtype", "groups", "created_at"]
inlines = (NotificationAdmin,)
ordering = ("-id",)
def get_queryset(self, request):
qs = super().get_queryset(request)
return qs.prefetch_related("notify")
|
[
"django.contrib.admin.register"
] |
[((405, 436), 'django.contrib.admin.register', 'admin.register', (['models.Template'], {}), '(models.Template)\n', (419, 436), False, 'from django.contrib import admin\n'), ((617, 655), 'django.contrib.admin.register', 'admin.register', (['models.DingDingMessage'], {}), '(models.DingDingMessage)\n', (631, 655), False, 'from django.contrib import admin\n'), ((1077, 1112), 'django.contrib.admin.register', 'admin.register', (['models.EmailMessage'], {}), '(models.EmailMessage)\n', (1091, 1112), False, 'from django.contrib import admin\n'), ((1574, 1613), 'django.contrib.admin.register', 'admin.register', (['models.WebsocketMessage'], {}), '(models.WebsocketMessage)\n', (1588, 1613), False, 'from django.contrib import admin\n')]
|
import numpy as np
import torch
import math
import ray
import copy
import networks
import global_config
def play_one_game(model, env_func, config, temperature, save=False, filename = ''):
game_history = GameHistory()
game = env_func(max_steps = config.max_moves, window_size = config.observation_shape[1])
observation = game.reset()
game_history.action_history.append(0)
game_history.observation_history.append(observation)
game_history.reward_history.append(0)
done = False
with torch.no_grad():
while (not done and len(game_history.action_history) <= config.max_moves):
root = MCTS(config).run(model, observation, game.actions,
False if temperature == 0 else True)
action = select_action(root, temperature
if len(game_history.action_history) < config.temperature_threshold else 0)
observation, reward, done, _ = game.step(action)
game_history.store_search_statistics(root, [i for i in range(config.action_space_size)])
game_history.action_history.append(action)
game_history.observation_history.append(observation)
game_history.reward_history.append(reward)
if save:
game.plot_toolpath(save = True, folder = config.logdir, filename = filename)
game.close()
return game_history
def select_action(node, temperature):
visit_counts = np.array(
[child.visit_count for child in node.children.values()]
)
actions = [action for action in node.children.keys()]
if temperature == 0:
action = actions[np.argmax(visit_counts)]
elif temperature == float("inf"):
action = np.random.choice(actions)
else:
visit_count_distribution = visit_counts ** (1 / temperature)
visit_count_distribution = visit_count_distribution / sum(
visit_count_distribution
)
action = np.random.choice(actions, p=visit_count_distribution)
return action
class MCTS:
def __init__(self, config):
self.config = config
def run(self, model, observation, legal_actions, add_exploration_noise):
root = Node(0)
observation = (torch.tensor(observation).float().unsqueeze(0).to(next(model.parameters()).device))
_, reward, policy_logits, hidden_state = model.initial_inference(observation)
reward = reward.item()
root.expand(legal_actions, reward, policy_logits, hidden_state)
if add_exploration_noise:
root.add_exploration_noise(
dirichlet_alpha=self.config.root_dirichlet_alpha,
exploration_fraction=self.config.root_exploration_fraction,
)
min_max_stats = MinMaxStats()
for _ in range(self.config.num_simulations):
node = root
search_path = [node]
while node.expanded():
action, node = self.select_child(node, min_max_stats)
search_path.append(node)
parent = search_path[-2]
value, reward, policy_logits, hidden_state = model.recurrent_inference(
parent.hidden_state,
torch.tensor([[action]]).to(parent.hidden_state.device),
)
value = networks.support_to_scalar(value).item()
reward = reward.item()
node.expand(
[i for i in range(self.config.action_space_size)],
reward,
policy_logits,
hidden_state,
)
self.backpropagate(
search_path, value, min_max_stats
)
return root
def select_child(self, node, min_max_stats):
max_ucb = max(self.ucb_score(node, child, min_max_stats) for action, child in node.children.items())
action = np.random.choice([action for action, child in node.children.items() if self.ucb_score(node, child, min_max_stats) == max_ucb])
return action, node.children[action]
def ucb_score(self, parent, child, min_max_stats):
pb_c = (
math.log(
(parent.visit_count + self.config.pb_c_base + 1) / self.config.pb_c_base
)
+ self.config.pb_c_init
)
pb_c *= math.sqrt(parent.visit_count) / (child.visit_count + 1)
prior_score = pb_c * child.prior
if child.visit_count > 0:
value_score = min_max_stats.normalize(
child.reward + self.config.discount * child.value()
)
else:
value_score = 0
return prior_score + value_score
def backpropagate(self, search_path, value, min_max_stats):
for node in reversed(search_path):
node.value_sum += value #if node.to_play == to_play else -value
node.visit_count += 1
min_max_stats.update(node.reward + self.config.discount * node.value())
value = node.reward + self.config.discount * value
class Node:
def __init__(self, prior):
self.visit_count = 0
self.prior = prior
self.value_sum = 0
self.children = {}
self.hidden_state = None
self.reward = 0
def expanded(self):
return len(self.children) > 0
def value(self):
if self.visit_count == 0:
return 0
return self.value_sum / self.visit_count
def expand(self, actions, reward, policy_logits, hidden_state):
self.reward = reward
self.hidden_state = hidden_state
policy = {}
for a in actions:
try:
policy[a] = 1/sum(torch.exp(policy_logits[0] - policy_logits[0][a]))
except OverflowError:
print("Warning: prior has been approximated")
policy[a] = 0.0
for action, p in policy.items():
self.children[action] = Node(p)
def add_exploration_noise(self, dirichlet_alpha, exploration_fraction):
actions = list(self.children.keys())
noise = np.random.dirichlet([dirichlet_alpha] * len(actions))
frac = exploration_fraction
for a, n in zip(actions, noise):
self.children[a].prior = self.children[a].prior * (1 - frac) + n * frac
class GameHistory:
def __init__(self):
self.observation_history = []
self.action_history = []
self.reward_history = []
self.child_visits = []
self.root_values = []
def store_search_statistics(self, root, action_space):
if root is not None:
sum_visits = sum(child.visit_count for child in root.children.values())
self.child_visits.append([root.children[a].visit_count / sum_visits
if a in root.children else 0 for a in action_space])
self.root_values.append(root.value())
else:
self.root_values.append(None)
class MinMaxStats:
def __init__(self):
self.maximum = -float("inf")
self.minimum = float("inf")
def update(self, value):
self.maximum = max(self.maximum, value)
self.minimum = min(self.minimum, value)
def normalize(self, value):
if self.maximum > self.minimum:
return (value - self.minimum) / (self.maximum - self.minimum)
return value
if global_config.use_ray:
play_one_game = ray.remote(play_one_game)
|
[
"ray.remote",
"torch.tensor",
"math.sqrt",
"numpy.argmax",
"torch.exp",
"numpy.random.choice",
"math.log",
"torch.no_grad",
"networks.support_to_scalar"
] |
[((7362, 7387), 'ray.remote', 'ray.remote', (['play_one_game'], {}), '(play_one_game)\n', (7372, 7387), False, 'import ray\n'), ((518, 533), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (531, 533), False, 'import torch\n'), ((1616, 1639), 'numpy.argmax', 'np.argmax', (['visit_counts'], {}), '(visit_counts)\n', (1625, 1639), True, 'import numpy as np\n'), ((1696, 1721), 'numpy.random.choice', 'np.random.choice', (['actions'], {}), '(actions)\n', (1712, 1721), True, 'import numpy as np\n'), ((1932, 1985), 'numpy.random.choice', 'np.random.choice', (['actions'], {'p': 'visit_count_distribution'}), '(actions, p=visit_count_distribution)\n', (1948, 1985), True, 'import numpy as np\n'), ((4088, 4175), 'math.log', 'math.log', (['((parent.visit_count + self.config.pb_c_base + 1) / self.config.pb_c_base)'], {}), '((parent.visit_count + self.config.pb_c_base + 1) / self.config.\n pb_c_base)\n', (4096, 4175), False, 'import math\n'), ((4263, 4292), 'math.sqrt', 'math.sqrt', (['parent.visit_count'], {}), '(parent.visit_count)\n', (4272, 4292), False, 'import math\n'), ((3270, 3303), 'networks.support_to_scalar', 'networks.support_to_scalar', (['value'], {}), '(value)\n', (3296, 3303), False, 'import networks\n'), ((3179, 3203), 'torch.tensor', 'torch.tensor', (['[[action]]'], {}), '([[action]])\n', (3191, 3203), False, 'import torch\n'), ((5615, 5664), 'torch.exp', 'torch.exp', (['(policy_logits[0] - policy_logits[0][a])'], {}), '(policy_logits[0] - policy_logits[0][a])\n', (5624, 5664), False, 'import torch\n'), ((2204, 2229), 'torch.tensor', 'torch.tensor', (['observation'], {}), '(observation)\n', (2216, 2229), False, 'import torch\n')]
|
import io
import csv
from smtplib import SMTPException
from django.core.management.base import BaseCommand
from django.core.mail import EmailMessage
from django.conf import settings
from bvc import utils
class Command(BaseCommand):
def handle(self, *args, **options):
csvfile = io.StringIO()
writer = csv.writer(csvfile)
writer.writerow(['Col A', 'Col B',])
email = EmailMessage(
utils.format_mail_subject("Démarrage de l'application - mail test"),
"Test de l'envoi des mails depuis l'application BVC.",
settings.EMAIL_HOST_USER,
[settings.EMAIL_HOST_USER],
[],
)
email.attach('test.csv', csvfile.getvalue(), 'text/csv')
if not email.send():
raise SMTPException()
|
[
"bvc.utils.format_mail_subject",
"io.StringIO",
"smtplib.SMTPException",
"csv.writer"
] |
[((294, 307), 'io.StringIO', 'io.StringIO', ([], {}), '()\n', (305, 307), False, 'import io\n'), ((325, 344), 'csv.writer', 'csv.writer', (['csvfile'], {}), '(csvfile)\n', (335, 344), False, 'import csv\n'), ((433, 500), 'bvc.utils.format_mail_subject', 'utils.format_mail_subject', (['"""Démarrage de l\'application - mail test"""'], {}), '("Démarrage de l\'application - mail test")\n', (458, 500), False, 'from bvc import utils\n'), ((785, 800), 'smtplib.SMTPException', 'SMTPException', ([], {}), '()\n', (798, 800), False, 'from smtplib import SMTPException\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Example to show the new marker styles"""
import matplotlib.pyplot as plt
from sajou.plot.lines_mpl import Line2D
fig = plt.figure(figsize=(12, 3))
ax = fig.add_subplot(111)
markers = ['ap', 'an', 'psx', 'rsx', 'es', 'rex', 'rc']
for ix, mark in enumerate(markers):
marker = Line2D([ix], [0], marker=mark, fillstyle='none', color='k')
ax.add_line(marker)
ax.set_xlim(-1, len(markers))
ax.set_ylim(-1, 1)
plt.show()
|
[
"matplotlib.pyplot.figure",
"sajou.plot.lines_mpl.Line2D",
"matplotlib.pyplot.show"
] |
[((169, 196), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(12, 3)'}), '(figsize=(12, 3))\n', (179, 196), True, 'import matplotlib.pyplot as plt\n'), ((463, 473), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (471, 473), True, 'import matplotlib.pyplot as plt\n'), ((329, 388), 'sajou.plot.lines_mpl.Line2D', 'Line2D', (['[ix]', '[0]'], {'marker': 'mark', 'fillstyle': '"""none"""', 'color': '"""k"""'}), "([ix], [0], marker=mark, fillstyle='none', color='k')\n", (335, 388), False, 'from sajou.plot.lines_mpl import Line2D\n')]
|
import numpy
import sys
import scipy
from scipy.signal import find_peaks_cwt
import matplotlib.pyplot as plt
from headbang.params import DEFAULTS
from headbang.util import find_closest
openpose_install_path = "/home/sevagh/thirdparty-repos/openpose"
openpose_dir = openpose_install_path
sys.path.append(openpose_dir + "/build/python/openpose")
import pyopenpose as op
class OpenposeDetector:
undef_coord_default = numpy.nan
object_limit = 3
min_confidence = 0.5
def __init__(
self,
n_frames,
frame_duration,
keypoints=DEFAULTS["pose_keypoints"],
):
config = {}
config["logging_level"] = 3
config["net_resolution"] = "320x320"
config["model_pose"] = "BODY_25"
config["alpha_pose"] = 0.6
config["scale_gap"] = 0.3
config["scale_number"] = 1
config["render_threshold"] = 0.05
config["num_gpu_start"] = 0
config["disable_blending"] = False
config["model_folder"] = openpose_dir + "/models/"
self.opWrapper = op.WrapperPython()
self.opWrapper.configure(config)
self.opWrapper.start()
self.keypoints = [int(i) for i in keypoints.split(",")]
self.n_frames = int(n_frames)
self.all_y_coords = [OpenposeDetector.undef_coord_default] * self.n_frames
self.frame_idx = 0
self.frame_duration = frame_duration
self.total_duration = self.frame_duration * self.n_frames
print("Started OpenposeDetector for keypoints {0}".format(self.keypoints))
def detect_pose(self, image):
datum = op.Datum()
datum.cvInputData = image
ret = self.opWrapper.emplaceAndPop(op.VectorDatum([datum]))
if not ret:
raise ValueError("couldn't emplaceAndPop")
return datum.poseKeypoints, datum.cvOutputData
def process_frame(self, frame):
multiple_detected_poses, outframe = self.detect_pose(frame)
if multiple_detected_poses is not None:
poses_of_interest = []
# collect (x, y) coordinates of the head, median across the first object_limit objects
for detected_poses in multiple_detected_poses[
: OpenposeDetector.object_limit
]:
for keypoint, d in enumerate(detected_poses):
if (
keypoint in self.keypoints
and d[2] > OpenposeDetector.min_confidence
):
poses_of_interest.append((d[0], d[1]))
poses_of_interest = numpy.asarray(poses_of_interest)
median_coords = numpy.median(poses_of_interest, axis=0)
if not numpy.any(numpy.isnan(median_coords)):
median_y = median_coords[1]
y_norm = median_y / frame.shape[0]
self.all_y_coords[self.frame_idx] = y_norm
self.frame_idx += 1
return outframe
def find_peaks(self):
min_coord = numpy.nanmin(self.all_y_coords)
adjusted_y_coords = numpy.nan_to_num(self.all_y_coords, nan=min_coord)
# wavelets are good for peaks
# https://www.ncbi.nlm.nih.gov/pmc/articles/PMC2631518/
peaks = find_peaks_cwt(adjusted_y_coords, numpy.arange(2, 4))
peaks = peaks[numpy.where(numpy.diff(peaks) > 11)[0]]
return peaks
def plot_ycoords(
self, bop_bpm_plot_history, debug_bpm=False, debug_bpm_frame_skip=30
):
plt.figure(1)
plt.title("normalized median y coordinate motion")
plt.xlabel("time (s)")
plt.ylabel("normalized y coordinate")
frame_times = numpy.arange(0.0, self.total_duration, self.frame_duration)
peaks = self.find_peaks()
y_coords = numpy.asarray(self.all_y_coords)
plt.plot(
frame_times,
y_coords,
"-D",
markevery=peaks,
mec="black",
)
if debug_bpm:
# skip every 10 frames for bpm plot
for i, bop_bpm_hist in enumerate(
bop_bpm_plot_history[:-debug_bpm_frame_skip]
):
if i % debug_bpm_frame_skip != 0:
continue
bop_times, bpm = bop_bpm_hist
x = find_closest(frame_times, bop_times)
if x.size > 2:
text_x = (
frame_times[x[-2]]
+ (frame_times[x[-1]] - frame_times[x[-2]]) / 2
)
y = y_coords[x]
text_y = max(y) + 0.03
plt.plot(frame_times[x], y, "r")
plt.text(text_x, text_y, "{0}".format(int(round(bpm))))
plt.grid()
plt.show()
def bpm_from_beats(beats):
if beats.size == 0:
return 0
m_res = scipy.stats.linregress(numpy.arange(len(beats)), beats)
beat_step = m_res.slope
return 60 / beat_step
def align_beats_motion(beats, motion, thresh):
i = 0
j = 0
aligned_beats = []
while i < len(motion) and j < len(beats):
curr_motion = motion[i]
curr_beat = beats[j]
if numpy.abs(curr_motion - curr_beat) <= thresh:
aligned_beats.append(min(curr_motion, curr_beat))
i += 1
j += 1
continue
if curr_beat < curr_motion:
# increment beats
j += 1
elif curr_beat > curr_motion:
i += 1
return aligned_beats
|
[
"matplotlib.pyplot.title",
"numpy.abs",
"numpy.nan_to_num",
"numpy.isnan",
"matplotlib.pyplot.figure",
"numpy.arange",
"sys.path.append",
"pyopenpose.WrapperPython",
"matplotlib.pyplot.show",
"numpy.median",
"numpy.asarray",
"pyopenpose.VectorDatum",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.grid",
"matplotlib.pyplot.plot",
"pyopenpose.Datum",
"headbang.util.find_closest",
"numpy.nanmin",
"numpy.diff",
"matplotlib.pyplot.xlabel"
] |
[((289, 345), 'sys.path.append', 'sys.path.append', (["(openpose_dir + '/build/python/openpose')"], {}), "(openpose_dir + '/build/python/openpose')\n", (304, 345), False, 'import sys\n'), ((1059, 1077), 'pyopenpose.WrapperPython', 'op.WrapperPython', ([], {}), '()\n', (1075, 1077), True, 'import pyopenpose as op\n'), ((1610, 1620), 'pyopenpose.Datum', 'op.Datum', ([], {}), '()\n', (1618, 1620), True, 'import pyopenpose as op\n'), ((3002, 3033), 'numpy.nanmin', 'numpy.nanmin', (['self.all_y_coords'], {}), '(self.all_y_coords)\n', (3014, 3033), False, 'import numpy\n'), ((3062, 3112), 'numpy.nan_to_num', 'numpy.nan_to_num', (['self.all_y_coords'], {'nan': 'min_coord'}), '(self.all_y_coords, nan=min_coord)\n', (3078, 3112), False, 'import numpy\n'), ((3484, 3497), 'matplotlib.pyplot.figure', 'plt.figure', (['(1)'], {}), '(1)\n', (3494, 3497), True, 'import matplotlib.pyplot as plt\n'), ((3506, 3556), 'matplotlib.pyplot.title', 'plt.title', (['"""normalized median y coordinate motion"""'], {}), "('normalized median y coordinate motion')\n", (3515, 3556), True, 'import matplotlib.pyplot as plt\n'), ((3566, 3588), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""time (s)"""'], {}), "('time (s)')\n", (3576, 3588), True, 'import matplotlib.pyplot as plt\n'), ((3597, 3634), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""normalized y coordinate"""'], {}), "('normalized y coordinate')\n", (3607, 3634), True, 'import matplotlib.pyplot as plt\n'), ((3658, 3717), 'numpy.arange', 'numpy.arange', (['(0.0)', 'self.total_duration', 'self.frame_duration'], {}), '(0.0, self.total_duration, self.frame_duration)\n', (3670, 3717), False, 'import numpy\n'), ((3772, 3804), 'numpy.asarray', 'numpy.asarray', (['self.all_y_coords'], {}), '(self.all_y_coords)\n', (3785, 3804), False, 'import numpy\n'), ((3814, 3881), 'matplotlib.pyplot.plot', 'plt.plot', (['frame_times', 'y_coords', '"""-D"""'], {'markevery': 'peaks', 'mec': '"""black"""'}), "(frame_times, y_coords, '-D', markevery=peaks, mec='black')\n", (3822, 3881), True, 'import matplotlib.pyplot as plt\n'), ((4746, 4756), 'matplotlib.pyplot.grid', 'plt.grid', ([], {}), '()\n', (4754, 4756), True, 'import matplotlib.pyplot as plt\n'), ((4765, 4775), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4773, 4775), True, 'import matplotlib.pyplot as plt\n'), ((1698, 1721), 'pyopenpose.VectorDatum', 'op.VectorDatum', (['[datum]'], {}), '([datum])\n', (1712, 1721), True, 'import pyopenpose as op\n'), ((2588, 2620), 'numpy.asarray', 'numpy.asarray', (['poses_of_interest'], {}), '(poses_of_interest)\n', (2601, 2620), False, 'import numpy\n'), ((2649, 2688), 'numpy.median', 'numpy.median', (['poses_of_interest'], {'axis': '(0)'}), '(poses_of_interest, axis=0)\n', (2661, 2688), False, 'import numpy\n'), ((3266, 3284), 'numpy.arange', 'numpy.arange', (['(2)', '(4)'], {}), '(2, 4)\n', (3278, 3284), False, 'import numpy\n'), ((5181, 5215), 'numpy.abs', 'numpy.abs', (['(curr_motion - curr_beat)'], {}), '(curr_motion - curr_beat)\n', (5190, 5215), False, 'import numpy\n'), ((4291, 4327), 'headbang.util.find_closest', 'find_closest', (['frame_times', 'bop_times'], {}), '(frame_times, bop_times)\n', (4303, 4327), False, 'from headbang.util import find_closest\n'), ((2719, 2745), 'numpy.isnan', 'numpy.isnan', (['median_coords'], {}), '(median_coords)\n', (2730, 2745), False, 'import numpy\n'), ((4628, 4660), 'matplotlib.pyplot.plot', 'plt.plot', (['frame_times[x]', 'y', '"""r"""'], {}), "(frame_times[x], y, 'r')\n", (4636, 4660), True, 'import matplotlib.pyplot as plt\n'), ((3320, 3337), 'numpy.diff', 'numpy.diff', (['peaks'], {}), '(peaks)\n', (3330, 3337), False, 'import numpy\n')]
|
import sys
cmd_folder = "../../../vis" # nopep8
if cmd_folder not in sys.path: # nopep8
sys.path.insert(0, cmd_folder)
from tile_mov import tile_movie
from make_mov import make_all, get_particle_trajectories
import pylab as plt
import numpy as np
from mpl_toolkits.axes_grid1 import make_axes_locatable
import matplotlib.gridspec as gridspec
plt.rcParams.update({
"text.usetex": True,
"font.family": "sans-serif",
"font.sans-serif": ["Helvetica"]})
# ==============================================================================
# MAKE MOVIES
# ==============================================================================
def smooth_limits(vmin, vmax):
from scipy.signal import savgol_filter
vmin = savgol_filter(vmin, 11, 3)
vmax = savgol_filter(vmax, 11, 3)
return vmin, vmax
def get_number_density(ds, c):
x, r = ds.get("rho-%s"%c["component"])
x, m = ds.get("mass-%s"%c["component"], grid='node')
return {"x":x[0], "y":x[1], "value":r/m}
def get_D_mag(ds, c):
x, Dx = ds.get("x_D-field")
x, Dy = ds.get("y_D-field")
return {"x":x[0], "y":x[1], "value":np.sqrt(Dx**2 + Dy**2)}
def get_Bz(ds, c):
x, Bz = ds.get("z_B-field")
return {"x":x[0], "y":x[1], "value":Bz}
def plot(frame, data, output_name):
xn = data["nd-ion"]["x"][()]
yn = data["nd-ion"]["y"][()]
ni = data["nd-ion"]["value"][()]
ni_min = frame["nd-ion"]["min"]
ni_max = frame["nd-ion"]["max"]
ne = data["nd-electron"]["value"][()]
ne_min = frame["nd-electron"]["min"]
ne_max = frame["nd-electron"]["max"]
D = data["D"]["value"][()]
D_min = frame["D"]["min"]
D_max = frame["D"]["max"]
B = data["B"]["value"][()]
B_min = frame["B"]["min"]
B_max = frame["B"]["max"]
x = np.concatenate((-xn[::-1][0:-1], xn))
y = np.concatenate((-yn[::-1][0:-1], yn))
y, x = np.meshgrid(y, x)
axes = []
# join the data
nx = xn.size - 1
ny = yn.size - 1
fig = plt.figure(figsize=(3,3))
gs = gridspec.GridSpec(ncols=1, nrows=1, hspace=0.01, wspace=0.01)
ax = fig.add_subplot(gs[0,0]); axes.append(ax)
# number densities
J = np.zeros((2*nx, 2*ny))*np.nan
# J[0:nx, 0:ny] = np.rot90(ne.T,2)
J[0:nx, ny::] = np.rot90(ne)
# J[nx::, 0:ny] = np.rot90(ni.T,3)
J[nx::, ny::] = ni
vmin = min(ne_min, ni_min)
vmax = max(ne_max, ni_max)
pcm = ax.pcolormesh(x, y, J, vmin=vmin, vmax=vmax)
ax.text(0.025, 0.975, r'$n_e$', horizontalalignment='left',
verticalalignment='top', transform=ax.transAxes, fontsize=10)
ax.text(0.975, 0.975, r'$n_i$', horizontalalignment='right',
verticalalignment='top', transform=ax.transAxes, fontsize=10)
# fields
J = np.zeros((2*nx, 2*ny))*np.nan
J[0:nx, 0:ny] = np.rot90(D.T,2)
pcm = ax.pcolormesh(x, y, J, vmin=D_min, vmax=D_max)
J = np.zeros((2*nx, 2*ny))*np.nan
J[nx::, 0:ny] = np.rot90(B.T,3)
big = max(abs(B_max), abs(B_min))
pcm = ax.pcolormesh(x, y, J, vmin=-big, vmax=big, cmap="bwr")
ax.text(0.025, 0.025, r'$\left|\vec{D}\right|$', horizontalalignment='left',
verticalalignment='bottom', transform=ax.transAxes, fontsize=10)
ax.text(0.975, 0.025, r'$B_z$', horizontalalignment='right',
verticalalignment='bottom', transform=ax.transAxes, fontsize=10)
for ax in axes:
ax.set_xlim(-2, 2)
ax.set_ylim(-2, 2)
ax.set_aspect(1)
ax.axes.xaxis.set_visible(False)
ax.axes.yaxis.set_visible(False)
# fig.tight_layout()
fig.savefig(output_name, dpi=300, bbox_inches="tight")
plt.close(fig)
return
if 1:
Q = []
q = {}
q["files_dir"] = "."
q["level"] = -1
q["get"] = [
{"func":get_number_density, "tag":"nd-ion", "component":"ion"},
{"func":get_number_density, "tag":"nd-electron", "component":"electron"},
{"func":get_D_mag, "tag":"D"},
{"func":get_Bz, "tag":"B"}
]
q["plot"] = plot
q["name"] = "movie"
dt = 0.005
##
q["framerate"] = 20
q["mov_save"] = q["files_dir"] + "/mov"
q["offset"] = [0.0, 0.0]
q["xy_limits"] = [[0,0], [4,4]]
q["file_include"] = ["TRMI.plt"]
q["file_exclude"] = []
q["cores"] = 11
q["time_span"] = [] #np.arange(1.95,2+dt, dt)
q["force_data"] = False
q["force_frames"] = True
q["only_frames"] = False
q["redo_streaks"] = False
q["dpi"] = 300
q["normalize"] = "none" #{"smooth":smooth_limits}
Q.append(q)
make_all(Q)
print("DONE")
|
[
"pylab.close",
"scipy.signal.savgol_filter",
"numpy.meshgrid",
"pylab.rcParams.update",
"numpy.zeros",
"sys.path.insert",
"make_mov.make_all",
"numpy.rot90",
"pylab.figure",
"matplotlib.gridspec.GridSpec",
"numpy.concatenate",
"numpy.sqrt"
] |
[((349, 458), 'pylab.rcParams.update', 'plt.rcParams.update', (["{'text.usetex': True, 'font.family': 'sans-serif', 'font.sans-serif': [\n 'Helvetica']}"], {}), "({'text.usetex': True, 'font.family': 'sans-serif',\n 'font.sans-serif': ['Helvetica']})\n", (368, 458), True, 'import pylab as plt\n'), ((94, 124), 'sys.path.insert', 'sys.path.insert', (['(0)', 'cmd_folder'], {}), '(0, cmd_folder)\n', (109, 124), False, 'import sys\n'), ((733, 759), 'scipy.signal.savgol_filter', 'savgol_filter', (['vmin', '(11)', '(3)'], {}), '(vmin, 11, 3)\n', (746, 759), False, 'from scipy.signal import savgol_filter\n'), ((771, 797), 'scipy.signal.savgol_filter', 'savgol_filter', (['vmax', '(11)', '(3)'], {}), '(vmax, 11, 3)\n', (784, 797), False, 'from scipy.signal import savgol_filter\n'), ((1781, 1818), 'numpy.concatenate', 'np.concatenate', (['(-xn[::-1][0:-1], xn)'], {}), '((-xn[::-1][0:-1], xn))\n', (1795, 1818), True, 'import numpy as np\n'), ((1827, 1864), 'numpy.concatenate', 'np.concatenate', (['(-yn[::-1][0:-1], yn)'], {}), '((-yn[::-1][0:-1], yn))\n', (1841, 1864), True, 'import numpy as np\n'), ((1877, 1894), 'numpy.meshgrid', 'np.meshgrid', (['y', 'x'], {}), '(y, x)\n', (1888, 1894), True, 'import numpy as np\n'), ((1985, 2011), 'pylab.figure', 'plt.figure', ([], {'figsize': '(3, 3)'}), '(figsize=(3, 3))\n', (1995, 2011), True, 'import pylab as plt\n'), ((2020, 2081), 'matplotlib.gridspec.GridSpec', 'gridspec.GridSpec', ([], {'ncols': '(1)', 'nrows': '(1)', 'hspace': '(0.01)', 'wspace': '(0.01)'}), '(ncols=1, nrows=1, hspace=0.01, wspace=0.01)\n', (2037, 2081), True, 'import matplotlib.gridspec as gridspec\n'), ((2255, 2267), 'numpy.rot90', 'np.rot90', (['ne'], {}), '(ne)\n', (2263, 2267), True, 'import numpy as np\n'), ((2787, 2803), 'numpy.rot90', 'np.rot90', (['D.T', '(2)'], {}), '(D.T, 2)\n', (2795, 2803), True, 'import numpy as np\n'), ((2920, 2936), 'numpy.rot90', 'np.rot90', (['B.T', '(3)'], {}), '(B.T, 3)\n', (2928, 2936), True, 'import numpy as np\n'), ((3600, 3614), 'pylab.close', 'plt.close', (['fig'], {}), '(fig)\n', (3609, 3614), True, 'import pylab as plt\n'), ((4509, 4520), 'make_mov.make_all', 'make_all', (['Q'], {}), '(Q)\n', (4517, 4520), False, 'from make_mov import make_all, get_particle_trajectories\n'), ((1127, 1153), 'numpy.sqrt', 'np.sqrt', (['(Dx ** 2 + Dy ** 2)'], {}), '(Dx ** 2 + Dy ** 2)\n', (1134, 1153), True, 'import numpy as np\n'), ((2165, 2191), 'numpy.zeros', 'np.zeros', (['(2 * nx, 2 * ny)'], {}), '((2 * nx, 2 * ny))\n', (2173, 2191), True, 'import numpy as np\n'), ((2737, 2763), 'numpy.zeros', 'np.zeros', (['(2 * nx, 2 * ny)'], {}), '((2 * nx, 2 * ny))\n', (2745, 2763), True, 'import numpy as np\n'), ((2870, 2896), 'numpy.zeros', 'np.zeros', (['(2 * nx, 2 * ny)'], {}), '((2 * nx, 2 * ny))\n', (2878, 2896), True, 'import numpy as np\n')]
|
from autobot_helpers import boto3_helper, context_helper
from botocore.exceptions import ClientError
import traceback
class Support:
def __init__(self):
self.client = boto3_helper.get_client('support')
def refresh_checks(self):
try:
ta_checks = self.client.describe_trusted_advisor_checks(language='en')
for checks in ta_checks['checks']:
try:
self.client.refresh_trusted_advisor_check(checkId=checks['id'])
except ClientError as e:
print('Cannot refresh check: ' + checks['name'])
print("Not able to refresh the trusted adviser check: " + traceback.format_exc() +
": Check name:" +checks['name'])
continue
return {'success': True}
except BaseException as e:
err_str = traceback.format_exc()
context_helper.logger().exception("Some exception occurred while refreshing checks=%s", err_str)
return {'success': False, 'error_code': 'EXCEPTION', 'message': err_str}
def generate_report(self):
try:
ta_checks = self.client.describe_trusted_advisor_checks(language='en')
check_summary_list = {}
for checks in ta_checks['checks']:
try:
check_summary = self.client.describe_trusted_advisor_check_summaries(
checkIds=[checks['id']])['summaries'][0]
if check_summary['status'] != 'not_available':
if checks['category'] not in check_summary_list:
check_summary_list[checks['category']] = []
check_summary_list[checks['category']].append({
'name': checks['name'],
'status': check_summary['status'],
'resourcesProcessed': str(check_summary['resourcesSummary']['resourcesProcessed']),
'resourcesFlagged': str(check_summary['resourcesSummary']['resourcesFlagged']),
'resourcesSuppressed': str(check_summary['resourcesSummary']['resourcesSuppressed']),
'resourcesIgnored': str(check_summary['resourcesSummary']['resourcesIgnored']),
})
except BaseException as e:
print('Failed to get check: ' + checks['id'] + ' --- ' + checks['name'])
traceback.print_exc()
continue
for k1, v1 in check_summary_list.items():
if isinstance(v1, (dict, list)) and len(v1) != 0:
for dict_val_v1 in v1:
if dict_val_v1['status'] == 'error':
v1[v1.index(dict_val_v1)] = (dict_val_v1, 1)
elif dict_val_v1['status'] == 'warning':
v1[v1.index(dict_val_v1)] = (dict_val_v1, 2)
elif dict_val_v1['status'] == 'ok':
v1[v1.index(dict_val_v1)] = (dict_val_v1, 3)
else:
v1[v1.index(dict_val_v1)] = (dict_val_v1, 4)
v1.sort(key=lambda x: x[1])
return {'success': True, 'response': check_summary_list}
except BaseException as e:
err_str = traceback.format_exc()
context_helper.logger().exception("Some exception occurred while generating report=%s", err_str)
if 'SubscriptionRequiredException' in err_str:
return {'success': False, 'error_code': 'NO_PREMIUM_SUBSCRIPTION',
'message': "AWS Premium Support Subscription is required to generate this report."}
return {'success': False, 'error_code': 'EXCEPTION', 'message': err_str}
|
[
"traceback.print_exc",
"autobot_helpers.context_helper.logger",
"autobot_helpers.boto3_helper.get_client",
"traceback.format_exc"
] |
[((182, 216), 'autobot_helpers.boto3_helper.get_client', 'boto3_helper.get_client', (['"""support"""'], {}), "('support')\n", (205, 216), False, 'from autobot_helpers import boto3_helper, context_helper\n'), ((891, 913), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (911, 913), False, 'import traceback\n'), ((3413, 3435), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (3433, 3435), False, 'import traceback\n'), ((926, 949), 'autobot_helpers.context_helper.logger', 'context_helper.logger', ([], {}), '()\n', (947, 949), False, 'from autobot_helpers import boto3_helper, context_helper\n'), ((2519, 2540), 'traceback.print_exc', 'traceback.print_exc', ([], {}), '()\n', (2538, 2540), False, 'import traceback\n'), ((3448, 3471), 'autobot_helpers.context_helper.logger', 'context_helper.logger', ([], {}), '()\n', (3469, 3471), False, 'from autobot_helpers import boto3_helper, context_helper\n'), ((684, 706), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (704, 706), False, 'import traceback\n')]
|
import logging
import kitesettings
from kiteconnect import KiteConnect
logging.basicConfig(level=logging.DEBUG)
kite = KiteConnect(kitesettings.API_KEY)
# https://kite.zerodha.com/connect/login?v=4&API_KEY=Q8JPzjkt8ftXgqvmXa
request_token = input("Request Token: ")
data = kite.generate_session(request_token, kitesettings.api_secret)
kite.set_access_token(data["access_token"])
print("====================")
print("Access Token: ", data["access_token"])
|
[
"kiteconnect.KiteConnect",
"logging.basicConfig"
] |
[((72, 112), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.DEBUG'}), '(level=logging.DEBUG)\n', (91, 112), False, 'import logging\n'), ((121, 154), 'kiteconnect.KiteConnect', 'KiteConnect', (['kitesettings.API_KEY'], {}), '(kitesettings.API_KEY)\n', (132, 154), False, 'from kiteconnect import KiteConnect\n')]
|
from typing import Union
from datetime import datetime
from logging import Formatter, LogRecord
class BalsaFormatter(Formatter):
"""
Format time in ISO 8601
"""
def formatTime(self, record: LogRecord, datefmt: Union[str, None] = None) -> str:
assert datefmt is None # static format
time_stamp = datetime.fromtimestamp(record.created)
return time_stamp.astimezone().isoformat()
|
[
"datetime.datetime.fromtimestamp"
] |
[((332, 370), 'datetime.datetime.fromtimestamp', 'datetime.fromtimestamp', (['record.created'], {}), '(record.created)\n', (354, 370), False, 'from datetime import datetime\n')]
|
'''
@description: basic function for common
@return : return value is all by jsondata
'''
#-*- coding:utf-8 -*-
import json
from django.contrib.auth.hashers import make_password,check_password
from django.contrib.auth import authenticate
def checkUserLoginInfo(username,password):
'''
@brief: basic function to check password and username
:param username: 用户名
:param password: 密码
:return: jsondata that whether has password or user problem
if OK == 1 : means checkResult ok
'''
dict_Result = {}
dict_Result['OK'] = 0
if len(username) < 6 :
str_error_type = "输入用户名过短,请重新输入"
dict_Result['error'] = str_error_type
return json.dumps(dict_Result,ensure_ascii=False,sort_keys=True)
if len(password) < 6 :
str_error_type = "输入密码过短,请重新输入"
dict_Result['error'] = str_error_type
return json.dumps(dict_Result,ensure_ascii=False,sort_keys=True,)
dict_Result['OK'] = 1
return json.dumps(dict_Result)
''' password method : 加密 or 解密 '''
def generateSecurityPassword(password):
'''
@description: gernerate Security Word:
:param password:
:return: str type that has make it security
'''
security_password = make_password(password)
return security_password
def checkSecurityPassword(password,security_password):
'''
@description: check security password
:param password:
:param security_password:
:return: bool type
'''
b_Result = check_password(password,security_password)
return b_Result
|
[
"django.contrib.auth.hashers.make_password",
"django.contrib.auth.hashers.check_password",
"json.dumps"
] |
[((967, 990), 'json.dumps', 'json.dumps', (['dict_Result'], {}), '(dict_Result)\n', (977, 990), False, 'import json\n'), ((1220, 1243), 'django.contrib.auth.hashers.make_password', 'make_password', (['password'], {}), '(password)\n', (1233, 1243), False, 'from django.contrib.auth.hashers import make_password, check_password\n'), ((1476, 1519), 'django.contrib.auth.hashers.check_password', 'check_password', (['password', 'security_password'], {}), '(password, security_password)\n', (1490, 1519), False, 'from django.contrib.auth.hashers import make_password, check_password\n'), ((684, 743), 'json.dumps', 'json.dumps', (['dict_Result'], {'ensure_ascii': '(False)', 'sort_keys': '(True)'}), '(dict_Result, ensure_ascii=False, sort_keys=True)\n', (694, 743), False, 'import json\n'), ((871, 930), 'json.dumps', 'json.dumps', (['dict_Result'], {'ensure_ascii': '(False)', 'sort_keys': '(True)'}), '(dict_Result, ensure_ascii=False, sort_keys=True)\n', (881, 930), False, 'import json\n')]
|
#!python
# pylint: disable=redefined-outer-name,unexpected-keyword-arg
"""Script to detokenize text file"""
from lang2sign.lang2gloss.tokenizers.en_asl import EnAslTokenizer
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(
description="Detokenize text files"
)
parser.add_argument(
"--input-file",
dest="input_file",
type=str,
help="filepath of text to be detokenized"
)
args = parser.parse_args()
output_filepath = args.input_file + ".detok"
tokenizer = EnAslTokenizer()
print(
"Writing detokenized file to {}".format(
output_filepath
)
)
tokenizer.write_detokenized_file(
args.input_file,
output_filepath
)
|
[
"argparse.ArgumentParser",
"lang2sign.lang2gloss.tokenizers.en_asl.EnAslTokenizer"
] |
[((237, 297), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Detokenize text files"""'}), "(description='Detokenize text files')\n", (260, 297), False, 'import argparse\n'), ((560, 576), 'lang2sign.lang2gloss.tokenizers.en_asl.EnAslTokenizer', 'EnAslTokenizer', ([], {}), '()\n', (574, 576), False, 'from lang2sign.lang2gloss.tokenizers.en_asl import EnAslTokenizer\n')]
|
import os
from flask import Blueprint, render_template, redirect, url_for, send_from_directory
import conf
from . import manager
from .forms import SimulationForm
bp = Blueprint('web', __name__)
@bp.route('/')
def index():
return render_template('status.html')
@bp.route('/start', methods=['GET', 'POST'])
def start():
form = SimulationForm()
if form.validate_on_submit():
manager.start(**form.data)
return redirect(url_for('web.index'))
return render_template('start.html', form=form)
@bp.route('/results')
def runs():
# sort by datetime, most recent first
ids = os.listdir(conf.RUN['OUTPUT_PATH'])
ids = sorted(ids, key=lambda d: d.split('__')[-1], reverse=True)
return render_template('runs.html', runs=ids)
@bp.route('/results/<string:id>')
def results(id):
# Currently just showing top-level plots
path = os.path.join(conf.RUN['OUTPUT_PATH'], id)
plots = os.path.join(path, 'plots')
try:
plots = [os.path.join('/output', id, 'plots', p) for p in os.listdir(plots)]
except FileNotFoundError:
plots = []
return render_template('results.html', id=id, plots=plots)
@bp.route('/output/<path:filename>')
def output(filename):
"""serve simulation result files from the output path"""
return send_from_directory(conf.RUN['OUTPUT_PATH'], filename)
|
[
"flask.Blueprint",
"flask.url_for",
"flask.render_template",
"flask.send_from_directory",
"os.path.join",
"os.listdir"
] |
[((171, 197), 'flask.Blueprint', 'Blueprint', (['"""web"""', '__name__'], {}), "('web', __name__)\n", (180, 197), False, 'from flask import Blueprint, render_template, redirect, url_for, send_from_directory\n'), ((240, 270), 'flask.render_template', 'render_template', (['"""status.html"""'], {}), "('status.html')\n", (255, 270), False, 'from flask import Blueprint, render_template, redirect, url_for, send_from_directory\n'), ((485, 525), 'flask.render_template', 'render_template', (['"""start.html"""'], {'form': 'form'}), "('start.html', form=form)\n", (500, 525), False, 'from flask import Blueprint, render_template, redirect, url_for, send_from_directory\n'), ((614, 649), 'os.listdir', 'os.listdir', (["conf.RUN['OUTPUT_PATH']"], {}), "(conf.RUN['OUTPUT_PATH'])\n", (624, 649), False, 'import os\n'), ((730, 768), 'flask.render_template', 'render_template', (['"""runs.html"""'], {'runs': 'ids'}), "('runs.html', runs=ids)\n", (745, 768), False, 'from flask import Blueprint, render_template, redirect, url_for, send_from_directory\n'), ((878, 919), 'os.path.join', 'os.path.join', (["conf.RUN['OUTPUT_PATH']", 'id'], {}), "(conf.RUN['OUTPUT_PATH'], id)\n", (890, 919), False, 'import os\n'), ((932, 959), 'os.path.join', 'os.path.join', (['path', '"""plots"""'], {}), "(path, 'plots')\n", (944, 959), False, 'import os\n'), ((1114, 1165), 'flask.render_template', 'render_template', (['"""results.html"""'], {'id': 'id', 'plots': 'plots'}), "('results.html', id=id, plots=plots)\n", (1129, 1165), False, 'from flask import Blueprint, render_template, redirect, url_for, send_from_directory\n'), ((1299, 1353), 'flask.send_from_directory', 'send_from_directory', (["conf.RUN['OUTPUT_PATH']", 'filename'], {}), "(conf.RUN['OUTPUT_PATH'], filename)\n", (1318, 1353), False, 'from flask import Blueprint, render_template, redirect, url_for, send_from_directory\n'), ((452, 472), 'flask.url_for', 'url_for', (['"""web.index"""'], {}), "('web.index')\n", (459, 472), False, 'from flask import Blueprint, render_template, redirect, url_for, send_from_directory\n'), ((986, 1025), 'os.path.join', 'os.path.join', (['"""/output"""', 'id', '"""plots"""', 'p'], {}), "('/output', id, 'plots', p)\n", (998, 1025), False, 'import os\n'), ((1035, 1052), 'os.listdir', 'os.listdir', (['plots'], {}), '(plots)\n', (1045, 1052), False, 'import os\n')]
|
import math
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
def default_conv(in_channels, out_channels, kernel_size, bias=True):
return nn.Conv2d(
in_channels, out_channels, kernel_size,
padding=(kernel_size//2), bias=bias)
class MeanShift(nn.Conv2d):
def __init__(self, rgb_range, rgb_mean, rgb_std, sign=-1):
super(MeanShift, self).__init__(3, 3, kernel_size=1)
std = torch.Tensor(rgb_std)
self.weight.data = torch.eye(3).view(3, 3, 1, 1)
self.weight.data.div_(std.view(3, 1, 1, 1))
self.bias.data = sign * rgb_range * torch.Tensor(rgb_mean)
self.bias.data.div_(std)
self.weight.requires_grad = False
self.bias.requires_grad = False
class BasicBlock(nn.Sequential):
def __init__(
self, in_channels, out_channels, kernel_size, stride=1, bias=False,
bn=True, act=nn.ReLU(True)):
m = [nn.Conv2d(
in_channels, out_channels, kernel_size,
padding=(kernel_size//2), stride=stride, bias=bias)
]
if bn: m.append(nn.BatchNorm2d(out_channels))
if act is not None: m.append(act)
super(BasicBlock, self).__init__(*m)
|
[
"torch.eye",
"torch.nn.ReLU",
"torch.nn.Conv2d",
"torch.nn.BatchNorm2d",
"torch.Tensor"
] |
[((198, 288), 'torch.nn.Conv2d', 'nn.Conv2d', (['in_channels', 'out_channels', 'kernel_size'], {'padding': '(kernel_size // 2)', 'bias': 'bias'}), '(in_channels, out_channels, kernel_size, padding=kernel_size // 2,\n bias=bias)\n', (207, 288), True, 'import torch.nn as nn\n'), ((469, 490), 'torch.Tensor', 'torch.Tensor', (['rgb_std'], {}), '(rgb_std)\n', (481, 490), False, 'import torch\n'), ((931, 944), 'torch.nn.ReLU', 'nn.ReLU', (['(True)'], {}), '(True)\n', (938, 944), True, 'import torch.nn as nn\n'), ((644, 666), 'torch.Tensor', 'torch.Tensor', (['rgb_mean'], {}), '(rgb_mean)\n', (656, 666), False, 'import torch\n'), ((961, 1066), 'torch.nn.Conv2d', 'nn.Conv2d', (['in_channels', 'out_channels', 'kernel_size'], {'padding': '(kernel_size // 2)', 'stride': 'stride', 'bias': 'bias'}), '(in_channels, out_channels, kernel_size, padding=kernel_size // 2,\n stride=stride, bias=bias)\n', (970, 1066), True, 'import torch.nn as nn\n'), ((518, 530), 'torch.eye', 'torch.eye', (['(3)'], {}), '(3)\n', (527, 530), False, 'import torch\n'), ((1122, 1150), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['out_channels'], {}), '(out_channels)\n', (1136, 1150), True, 'import torch.nn as nn\n')]
|
# -*- coding: utf-8 -*-
"""Warning : this file has been generated, you shouldn't edit it"""
from os import linesep
from pyleecan.Classes.check import check_init_dict, check_var
from pyleecan.Functions.save import save
from pyleecan.Classes.frozen import FrozenClass
from pyleecan.Methods.Machine.Magnet.comp_angle_opening import comp_angle_opening
from pyleecan.Methods.Machine.Magnet.comp_height import comp_height
from pyleecan.Methods.Machine.Magnet.comp_mass import comp_mass
from pyleecan.Methods.Machine.Magnet.comp_ratio_opening import comp_ratio_opening
from pyleecan.Methods.Machine.Magnet.comp_surface import comp_surface
from pyleecan.Methods.Machine.Magnet.comp_volume import comp_volume
from pyleecan.Methods.Machine.Magnet.is_outwards import is_outwards
from pyleecan.Methods.Machine.Magnet.plot import plot
from pyleecan.Classes.check import InitUnKnowClassError
from pyleecan.Classes.Material import Material
class Magnet(FrozenClass):
VERSION = 1
# cf Methods.Machine.Magnet.comp_angle_opening
comp_angle_opening = comp_angle_opening
# cf Methods.Machine.Magnet.comp_height
comp_height = comp_height
# cf Methods.Machine.Magnet.comp_mass
comp_mass = comp_mass
# cf Methods.Machine.Magnet.comp_ratio_opening
comp_ratio_opening = comp_ratio_opening
# cf Methods.Machine.Magnet.comp_surface
comp_surface = comp_surface
# cf Methods.Machine.Magnet.comp_volume
comp_volume = comp_volume
# cf Methods.Machine.Magnet.is_outwards
is_outwards = is_outwards
# cf Methods.Machine.Magnet.plot
plot = plot
# save method is available in all object
save = save
def __init__(self, mat_type=-1, type_magnetization=0, Lmag=0.95, init_dict=None):
"""Constructor of the class. Can be use in two ways :
- __init__ (arg1 = 1, arg3 = 5) every parameters have name and default values
for Matrix, None will initialise the property with an empty Matrix
for pyleecan type, None will call the default constructor
- __init__ (init_dict = d) d must be a dictionnary wiht every properties as keys
ndarray or list can be given for Vector and Matrix
object or dict can be given for pyleecan Object"""
if mat_type == -1:
mat_type = Material()
if init_dict is not None: # Initialisation by dict
check_init_dict(init_dict, ["mat_type", "type_magnetization", "Lmag"])
# Overwrite default value with init_dict content
if "mat_type" in list(init_dict.keys()):
mat_type = init_dict["mat_type"]
if "type_magnetization" in list(init_dict.keys()):
type_magnetization = init_dict["type_magnetization"]
if "Lmag" in list(init_dict.keys()):
Lmag = init_dict["Lmag"]
# Initialisation by argument
self.parent = None
# mat_type can be None, a Material object or a dict
if isinstance(mat_type, dict):
self.mat_type = Material(init_dict=mat_type)
else:
self.mat_type = mat_type
self.type_magnetization = type_magnetization
self.Lmag = Lmag
# The class is frozen, for now it's impossible to add new properties
self._freeze()
def __str__(self):
"""Convert this objet in a readeable string (for print)"""
Magnet_str = ""
if self.parent is None:
Magnet_str += "parent = None " + linesep
else:
Magnet_str += "parent = " + str(type(self.parent)) + " object" + linesep
Magnet_str += "mat_type = " + str(self.mat_type.as_dict()) + linesep + linesep
Magnet_str += "type_magnetization = " + str(self.type_magnetization) + linesep
Magnet_str += "Lmag = " + str(self.Lmag)
return Magnet_str
def __eq__(self, other):
"""Compare two objects (skip parent)"""
if type(other) != type(self):
return False
if other.mat_type != self.mat_type:
return False
if other.type_magnetization != self.type_magnetization:
return False
if other.Lmag != self.Lmag:
return False
return True
def as_dict(self):
"""Convert this objet in a json seriable dict (can be use in __init__)
"""
Magnet_dict = dict()
if self.mat_type is None:
Magnet_dict["mat_type"] = None
else:
Magnet_dict["mat_type"] = self.mat_type.as_dict()
Magnet_dict["type_magnetization"] = self.type_magnetization
Magnet_dict["Lmag"] = self.Lmag
# The class name is added to the dict fordeserialisation purpose
Magnet_dict["__class__"] = "Magnet"
return Magnet_dict
def _set_None(self):
"""Set all the properties to None (except pyleecan object)"""
if self.mat_type is not None:
self.mat_type._set_None()
self.type_magnetization = None
self.Lmag = None
def _get_mat_type(self):
"""getter of mat_type"""
return self._mat_type
def _set_mat_type(self, value):
"""setter of mat_type"""
check_var("mat_type", value, "Material")
self._mat_type = value
if self._mat_type is not None:
self._mat_type.parent = self
# The Magnet material
# Type : Material
mat_type = property(
fget=_get_mat_type, fset=_set_mat_type, doc=u"""The Magnet material"""
)
def _get_type_magnetization(self):
"""getter of type_magnetization"""
return self._type_magnetization
def _set_type_magnetization(self, value):
"""setter of type_magnetization"""
check_var("type_magnetization", value, "int", Vmin=0, Vmax=5)
self._type_magnetization = value
# Permanent magnet magnetization type: 0 for radial, 1 for parallel, 2 for HallBach []
# Type : int, min = 0, max = 5
type_magnetization = property(
fget=_get_type_magnetization,
fset=_set_type_magnetization,
doc=u"""Permanent magnet magnetization type: 0 for radial, 1 for parallel, 2 for HallBach []""",
)
def _get_Lmag(self):
"""getter of Lmag"""
return self._Lmag
def _set_Lmag(self, value):
"""setter of Lmag"""
check_var("Lmag", value, "float", Vmin=0)
self._Lmag = value
# Magnet axial length
# Type : float, min = 0
Lmag = property(fget=_get_Lmag, fset=_set_Lmag, doc=u"""Magnet axial length""")
|
[
"pyleecan.Classes.check.check_init_dict",
"pyleecan.Classes.Material.Material",
"pyleecan.Classes.check.check_var"
] |
[((5166, 5206), 'pyleecan.Classes.check.check_var', 'check_var', (['"""mat_type"""', 'value', '"""Material"""'], {}), "('mat_type', value, 'Material')\n", (5175, 5206), False, 'from pyleecan.Classes.check import check_init_dict, check_var\n'), ((5699, 5760), 'pyleecan.Classes.check.check_var', 'check_var', (['"""type_magnetization"""', 'value', '"""int"""'], {'Vmin': '(0)', 'Vmax': '(5)'}), "('type_magnetization', value, 'int', Vmin=0, Vmax=5)\n", (5708, 5760), False, 'from pyleecan.Classes.check import check_init_dict, check_var\n'), ((6302, 6343), 'pyleecan.Classes.check.check_var', 'check_var', (['"""Lmag"""', 'value', '"""float"""'], {'Vmin': '(0)'}), "('Lmag', value, 'float', Vmin=0)\n", (6311, 6343), False, 'from pyleecan.Classes.check import check_init_dict, check_var\n'), ((2289, 2299), 'pyleecan.Classes.Material.Material', 'Material', ([], {}), '()\n', (2297, 2299), False, 'from pyleecan.Classes.Material import Material\n'), ((2372, 2442), 'pyleecan.Classes.check.check_init_dict', 'check_init_dict', (['init_dict', "['mat_type', 'type_magnetization', 'Lmag']"], {}), "(init_dict, ['mat_type', 'type_magnetization', 'Lmag'])\n", (2387, 2442), False, 'from pyleecan.Classes.check import check_init_dict, check_var\n'), ((3019, 3047), 'pyleecan.Classes.Material.Material', 'Material', ([], {'init_dict': 'mat_type'}), '(init_dict=mat_type)\n', (3027, 3047), False, 'from pyleecan.Classes.Material import Material\n')]
|
from pathlib import Path
import pytest
from maggma.stores import JSONStore, MemoryStore
from monty.serialization import dumpfn, loadfn
from emmet.builders.materials.electronic_structure import ElectronicStructureBuilder
from emmet.builders.vasp.materials import MaterialsBuilder
@pytest.fixture(scope="session")
def tasks_store(test_dir):
return JSONStore(
test_dir / "electronic_structure/es_task_docs.json.gz", key="task_id"
)
@pytest.fixture(scope="session")
def materials_store(tasks_store):
materials_store = MemoryStore(key="material_id")
builder = MaterialsBuilder(tasks=tasks_store, materials=materials_store)
builder.run()
return materials_store
@pytest.fixture
def electronic_structure_store():
return MemoryStore(key="material_id")
@pytest.fixture
def bandstructure_fs(test_dir):
return JSONStore(
test_dir / "electronic_structure/es_bs_objs.json.gz", key="task_id"
)
@pytest.fixture
def dos_fs(test_dir):
return JSONStore(
test_dir / "electronic_structure/es_dos_objs.json.gz", key="task_id"
)
def test_electronic_structure_builder(
tasks_store, materials_store, electronic_structure_store, bandstructure_fs, dos_fs
):
builder = ElectronicStructureBuilder(
tasks=tasks_store,
materials=materials_store,
electronic_structure=electronic_structure_store,
bandstructure_fs=bandstructure_fs,
dos_fs=dos_fs,
)
builder.run()
assert electronic_structure_store.count() == 3
def test_serialization(tmpdir):
builder = ElectronicStructureBuilder(
MemoryStore(), MemoryStore(), MemoryStore(), MemoryStore(), MemoryStore()
)
dumpfn(builder.as_dict(), Path(tmpdir) / "test.json")
loadfn(Path(tmpdir) / "test.json")
|
[
"emmet.builders.materials.electronic_structure.ElectronicStructureBuilder",
"pytest.fixture",
"pathlib.Path",
"maggma.stores.MemoryStore",
"maggma.stores.JSONStore",
"emmet.builders.vasp.materials.MaterialsBuilder"
] |
[((284, 315), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (298, 315), False, 'import pytest\n'), ((452, 483), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (466, 483), False, 'import pytest\n'), ((354, 439), 'maggma.stores.JSONStore', 'JSONStore', (["(test_dir / 'electronic_structure/es_task_docs.json.gz')"], {'key': '"""task_id"""'}), "(test_dir / 'electronic_structure/es_task_docs.json.gz', key='task_id'\n )\n", (363, 439), False, 'from maggma.stores import JSONStore, MemoryStore\n'), ((540, 570), 'maggma.stores.MemoryStore', 'MemoryStore', ([], {'key': '"""material_id"""'}), "(key='material_id')\n", (551, 570), False, 'from maggma.stores import JSONStore, MemoryStore\n'), ((585, 647), 'emmet.builders.vasp.materials.MaterialsBuilder', 'MaterialsBuilder', ([], {'tasks': 'tasks_store', 'materials': 'materials_store'}), '(tasks=tasks_store, materials=materials_store)\n', (601, 647), False, 'from emmet.builders.vasp.materials import MaterialsBuilder\n'), ((756, 786), 'maggma.stores.MemoryStore', 'MemoryStore', ([], {'key': '"""material_id"""'}), "(key='material_id')\n", (767, 786), False, 'from maggma.stores import JSONStore, MemoryStore\n'), ((848, 926), 'maggma.stores.JSONStore', 'JSONStore', (["(test_dir / 'electronic_structure/es_bs_objs.json.gz')"], {'key': '"""task_id"""'}), "(test_dir / 'electronic_structure/es_bs_objs.json.gz', key='task_id')\n", (857, 926), False, 'from maggma.stores import JSONStore, MemoryStore\n'), ((992, 1071), 'maggma.stores.JSONStore', 'JSONStore', (["(test_dir / 'electronic_structure/es_dos_objs.json.gz')"], {'key': '"""task_id"""'}), "(test_dir / 'electronic_structure/es_dos_objs.json.gz', key='task_id')\n", (1001, 1071), False, 'from maggma.stores import JSONStore, MemoryStore\n'), ((1232, 1412), 'emmet.builders.materials.electronic_structure.ElectronicStructureBuilder', 'ElectronicStructureBuilder', ([], {'tasks': 'tasks_store', 'materials': 'materials_store', 'electronic_structure': 'electronic_structure_store', 'bandstructure_fs': 'bandstructure_fs', 'dos_fs': 'dos_fs'}), '(tasks=tasks_store, materials=materials_store,\n electronic_structure=electronic_structure_store, bandstructure_fs=\n bandstructure_fs, dos_fs=dos_fs)\n', (1258, 1412), False, 'from emmet.builders.materials.electronic_structure import ElectronicStructureBuilder\n'), ((1605, 1618), 'maggma.stores.MemoryStore', 'MemoryStore', ([], {}), '()\n', (1616, 1618), False, 'from maggma.stores import JSONStore, MemoryStore\n'), ((1620, 1633), 'maggma.stores.MemoryStore', 'MemoryStore', ([], {}), '()\n', (1631, 1633), False, 'from maggma.stores import JSONStore, MemoryStore\n'), ((1635, 1648), 'maggma.stores.MemoryStore', 'MemoryStore', ([], {}), '()\n', (1646, 1648), False, 'from maggma.stores import JSONStore, MemoryStore\n'), ((1650, 1663), 'maggma.stores.MemoryStore', 'MemoryStore', ([], {}), '()\n', (1661, 1663), False, 'from maggma.stores import JSONStore, MemoryStore\n'), ((1665, 1678), 'maggma.stores.MemoryStore', 'MemoryStore', ([], {}), '()\n', (1676, 1678), False, 'from maggma.stores import JSONStore, MemoryStore\n'), ((1716, 1728), 'pathlib.Path', 'Path', (['tmpdir'], {}), '(tmpdir)\n', (1720, 1728), False, 'from pathlib import Path\n'), ((1755, 1767), 'pathlib.Path', 'Path', (['tmpdir'], {}), '(tmpdir)\n', (1759, 1767), False, 'from pathlib import Path\n')]
|
#!/usr/bin/env python
"""
Download needed raw data
Author: <NAME>
Copyright (c) 2021 - <NAME>
License: See the LICENSE file.
Date: 2021-02-05
"""
import os
import time
import random
import requests
import tempfile
import sys
import zipfile
import shutil
import tarfile
from tqdm import tqdm
from parse_args import parse_io
def download_url(url, output_dir, file_name):
response = None
try:
response = requests.get(url, stream=True)
except:
print(f'Connection error occurred trying to get URL: {url}',
file=sys.stderr)
if response is None or response.status_code != 200:
print(f'Error {response.status_code}',
f'while downloading file from URL: {url}')
return None
tmp_fd, tmp_fn = tempfile.mkstemp()
total_size_in_bytes = int(response.headers.get('content-length', 0))
with os.fdopen(tmp_fd, 'wb') as f_out, \
tqdm(total=total_size_in_bytes, unit='iB', unit_scale=True) as progress_bar:
if total_size_in_bytes is None:
f_out.write(response.content)
else:
total_size_in_bytes = int(total_size_in_bytes)
block_size = 1024 # 1 KB
for data in response.iter_content(block_size):
progress_bar.update(len(data))
f_out.write(data)
if total_size_in_bytes != 0 and progress_bar.n != total_size_in_bytes:
print(f'ERROR, something went wrong while downloading {url}')
target = os.path.join(output_dir, file_name)
if target.endswith('.zip') and not zipfile.is_zipfile(tmp_fn):
with zipfile.ZipFile(target, 'w', zipfile.ZIP_DEFLATED) as f_zip:
f_zip.write(tmp_fn)
os.unlink(tmp_fn)
elif any([el.endswith('.tar') for el in url.split('?')]):
shutil.move(tmp_fn, target)
with tarfile.open(target) as f_tar:
f_tar.extractall(output_dir)
os.remove(target)
else:
shutil.move(tmp_fn, target)
return target
def main():
args = parse_io()
source_url = args.input
output_file = args.output
output_dir, file_name = os.path.split(output_file)
if not os.path.exists(output_dir):
os.makedirs(output_dir)
download_url(source_url, output_dir, file_name)
time.sleep(random.random()*5)
if __name__ == '__main__':
main()
|
[
"zipfile.is_zipfile",
"tqdm.tqdm",
"os.remove",
"zipfile.ZipFile",
"os.unlink",
"tempfile.mkstemp",
"os.makedirs",
"tarfile.open",
"parse_args.parse_io",
"os.path.exists",
"random.random",
"requests.get",
"shutil.move",
"os.fdopen",
"os.path.split",
"os.path.join"
] |
[((766, 784), 'tempfile.mkstemp', 'tempfile.mkstemp', ([], {}), '()\n', (782, 784), False, 'import tempfile\n'), ((1500, 1535), 'os.path.join', 'os.path.join', (['output_dir', 'file_name'], {}), '(output_dir, file_name)\n', (1512, 1535), False, 'import os\n'), ((2033, 2043), 'parse_args.parse_io', 'parse_io', ([], {}), '()\n', (2041, 2043), False, 'from parse_args import parse_io\n'), ((2135, 2161), 'os.path.split', 'os.path.split', (['output_file'], {}), '(output_file)\n', (2148, 2161), False, 'import os\n'), ((424, 454), 'requests.get', 'requests.get', (['url'], {'stream': '(True)'}), '(url, stream=True)\n', (436, 454), False, 'import requests\n'), ((867, 890), 'os.fdopen', 'os.fdopen', (['tmp_fd', '"""wb"""'], {}), "(tmp_fd, 'wb')\n", (876, 890), False, 'import os\n'), ((912, 971), 'tqdm.tqdm', 'tqdm', ([], {'total': 'total_size_in_bytes', 'unit': '"""iB"""', 'unit_scale': '(True)'}), "(total=total_size_in_bytes, unit='iB', unit_scale=True)\n", (916, 971), False, 'from tqdm import tqdm\n'), ((1717, 1734), 'os.unlink', 'os.unlink', (['tmp_fn'], {}), '(tmp_fn)\n', (1726, 1734), False, 'import os\n'), ((2173, 2199), 'os.path.exists', 'os.path.exists', (['output_dir'], {}), '(output_dir)\n', (2187, 2199), False, 'import os\n'), ((2209, 2232), 'os.makedirs', 'os.makedirs', (['output_dir'], {}), '(output_dir)\n', (2220, 2232), False, 'import os\n'), ((1575, 1601), 'zipfile.is_zipfile', 'zipfile.is_zipfile', (['tmp_fn'], {}), '(tmp_fn)\n', (1593, 1601), False, 'import zipfile\n'), ((1616, 1666), 'zipfile.ZipFile', 'zipfile.ZipFile', (['target', '"""w"""', 'zipfile.ZIP_DEFLATED'], {}), "(target, 'w', zipfile.ZIP_DEFLATED)\n", (1631, 1666), False, 'import zipfile\n'), ((1805, 1832), 'shutil.move', 'shutil.move', (['tmp_fn', 'target'], {}), '(tmp_fn, target)\n', (1816, 1832), False, 'import shutil\n'), ((1926, 1943), 'os.remove', 'os.remove', (['target'], {}), '(target)\n', (1935, 1943), False, 'import os\n'), ((1962, 1989), 'shutil.move', 'shutil.move', (['tmp_fn', 'target'], {}), '(tmp_fn, target)\n', (1973, 1989), False, 'import shutil\n'), ((2300, 2315), 'random.random', 'random.random', ([], {}), '()\n', (2313, 2315), False, 'import random\n'), ((1846, 1866), 'tarfile.open', 'tarfile.open', (['target'], {}), '(target)\n', (1858, 1866), False, 'import tarfile\n')]
|
import os
from delphifmx import *
class Child_Form(Form):
def __init__(self, owner):
self.child_heading = None
self.result_text_heading = None
self.result_text_label = None
self.LoadProps(os.path.join(os.path.dirname(os.path.abspath(__file__)), "child_window.pyfmx"))
|
[
"os.path.abspath"
] |
[((255, 280), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (270, 280), False, 'import os\n')]
|
import setuptools
import platform
import sys
import os
THIS_DIR = os.path.dirname(os.path.abspath(__file__))
BUILD_DIR = 'mcell/utils/pybind11_test/build/'
if platform.system() == 'Linux':
# TODO: copy mcell library to the current directory
pass
elif platform.system() == 'Darwin':
#
pass
elif 'Windows' in platform.system():
pass
else:
sys.exit("Operating system '" + platform.system() + "' is not supported in this build system yet.")
def get_mcell_version():
# TODO
return '3.99.0'
setuptools.setup(
name='mcell',
version=get_mcell_version(), # todo: set automatically - has to be number
py_modules=['lib/mcell'],
author="Salk Institute for Biologocal Studies",
author_email="<EMAIL>",
description="MCell4",
long_description="MCell4",
long_description_content_type="text/markdown",
url="https://www.mcell.org",
download_url="https://mcell.org/download.html",
python_requires='>=3.8',
classifiers=[
"Development Status :: 4 - Beta",
"License :: OSI Approved :: GNU General Public License v2 (GPLv2)"
],
zip_safe=True
)
|
[
"platform.system",
"os.path.abspath"
] |
[((83, 108), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (98, 108), False, 'import os\n'), ((161, 178), 'platform.system', 'platform.system', ([], {}), '()\n', (176, 178), False, 'import platform\n'), ((261, 278), 'platform.system', 'platform.system', ([], {}), '()\n', (276, 278), False, 'import platform\n'), ((325, 342), 'platform.system', 'platform.system', ([], {}), '()\n', (340, 342), False, 'import platform\n'), ((395, 412), 'platform.system', 'platform.system', ([], {}), '()\n', (410, 412), False, 'import platform\n')]
|
#!/usr/bin/env python
import requests
from jobs import AbstractJob
from lxml import etree
class Plex(AbstractJob):
def __init__(self, conf):
self.interval = conf['interval']
self.movies = conf['movies']
self.shows = conf['shows']
self.timeout = conf.get('timeout')
def _parse_movies(self, xml):
tree = etree.fromstring(xml)
movies = []
for movie in tree.xpath('/MediaContainer/Video'):
movies.append({
'title': movie.get('title'),
'year': movie.get('year')
})
return movies
def _parse_shows(self, xml):
tree = etree.fromstring(xml)
shows = []
for show in tree.xpath('/MediaContainer/Video'):
shows.append({
'name': show.get('grandparentTitle'),
'title': show.get('title'),
'episode': show.get('index').zfill(2),
'season': show.get('parentIndex').zfill(2)
})
return shows
def get(self):
try:
r = requests.get(self.movies, timeout=self.timeout)
movies = self._parse_movies(r.content)
r = requests.get(self.shows)
shows = self._parse_shows(r.content)
return {'movies': movies, 'shows': shows}
except requests.exceptions.ConnectionError:
return {}
|
[
"lxml.etree.fromstring",
"requests.get"
] |
[((355, 376), 'lxml.etree.fromstring', 'etree.fromstring', (['xml'], {}), '(xml)\n', (371, 376), False, 'from lxml import etree\n'), ((656, 677), 'lxml.etree.fromstring', 'etree.fromstring', (['xml'], {}), '(xml)\n', (672, 677), False, 'from lxml import etree\n'), ((1078, 1125), 'requests.get', 'requests.get', (['self.movies'], {'timeout': 'self.timeout'}), '(self.movies, timeout=self.timeout)\n', (1090, 1125), False, 'import requests\n'), ((1194, 1218), 'requests.get', 'requests.get', (['self.shows'], {}), '(self.shows)\n', (1206, 1218), False, 'import requests\n')]
|
from scipy import stats
import scikit_posthocs as sp
import numpy as np
import pandas as pd
import glob
def friedman_test(dataframe):
return stats.friedmanchisquare(*[row for index, row in dataframe.T.iterrows()])
def nemenyi_test(dataframe):
nemenyi = sp.posthoc_nemenyi_friedman(dataframe)
list_index=[]
for col in nemenyi.columns:
list_index.append([col,list(nemenyi[nemenyi[col]<0.05].index),list(nemenyi[nemenyi[col]<0.05][col].values)])
return pd.DataFrame(list_index)
def read_dataset(dataframe_path):
return pd.read_csv(dataframe_path, skiprows=[0,2], sep=",",decimal='.')
PATH='/Users/sergiojunior/sentiment-embeddings-final/Experiment Results/Experiments Results/'
PATH_OUT='/Users/sergiojunior/sentiment-embeddings-final/Experiment Results/Statistical_Reslts/'
#list_experiment=['Static','Transformers','Fine_tuning','Task_Fine_tuning']#'Static','Transformers','Fine_tuning','Task_Fine_tuning'
list_experiment=['Fine_tuning']#'Static','Transformers','Fine_tuning','Task_Fine_tuning'
list_classifiers = ['MLPClassifier','Random_Forest','SVM','XGboost','Reg_Logistica']
list_metrics = ['accuracy','f1_macro']
list_models=['BERT',"RoBERTa",'BERTweet']
for experiment in list_experiment:
for classifier in list_classifiers:
for metric in list_metrics:
print("{}_{}_{}".format(experiment,classifier,metric))
if experiment=='Static':
print("Static_embedding")
df = read_dataset(glob.glob(PATH+experiment+'/Pivot_tables/pivot_'+classifier+'*'+metric+'*.csv')[0])
print('friedman_test: ',friedman_test(df.iloc[:,1:]))
nemenyi_test(df.iloc[:,1:]).to_csv(PATH_OUT+"nemenyi_{}_{}_{}.csv".format(experiment,
classifier,
metric))
if experiment=="Transformers":
df = read_dataset(glob.glob(PATH+list_models[0]+'/Pivot_tables/pivot_'+classifier+'*'+metric+'*.csv')[0])
for models in list_models[1:]:
print(models)
df = df.merge(read_dataset(glob.glob(PATH+models+'/Pivot_tables/pivot_'+classifier+'*'+metric+'*.csv')[0]),
how='left',
on='Embedding')
print('friedman_test: ',friedman_test(df.iloc[:,1:]))
nemenyi_test(df.iloc[:,1:]).to_csv(PATH_OUT+"nemenyi_{}_{}_{}.csv".format(experiment,
classifier,
metric))
if experiment=='Fine_tuning':
for models in list_models:
print(models)
df = pd.read_csv(glob.glob(PATH +'Fine_tuning_Generic_tweets/'+ models + '-1-LM/pivot_' + classifier + '*'+metric+'*.csv')[0])
for k in ['5','05','10','25','50','250','500','1500','6600']:
df = df.merge(pd.read_csv(glob.glob(PATH +'Fine_tuning_Generic_tweets/'+ models + '-'+k+'-LM/pivot_' + classifier + '*'+metric+'*.csv')[0]),
how='left',
on='Embedding',
suffixes=("","_"+str(k)))
#df_original = pd.read_csv(glob.glob(PATH + models+'/Pivot_tables/pivot_' + classifier + '*'+metric+'*.csv')[0],
# skiprows=[0,2],sep=",",decimal='.')
#df = df.merge(df_original,how='left', on='Embedding')
#df.columns=['Embedding','1','5','05','10','25','50','250','500','1500','6600','original']
df.columns=['Embedding','1','5','05','10','25','50','250','500','1500','6600']
print('friedman_test: ',friedman_test(df.iloc[:,1:]))
nemenyi_test(df.iloc[:,1:]).to_csv(PATH_OUT+"nemenyi_{}_{}_{}_{}.csv".format(models,experiment,
classifier,
metric))
if experiment=='Task_Fine_tuning':
for models in list_models:
print(models)
df=None
df = pd.read_csv(glob.glob(PATH + 'InData/'+models+'-LM/pivot_' + classifier + '*'+metric+'*.csv')[0],sep=",",decimal='.')
df.iloc[:,1] = round(df.iloc[:,1]*100,2)
for k in ['LOO','22Dt']:
df = df.merge(pd.read_csv(glob.glob(PATH + k +'/'+models+'-LM/pivot_' + classifier + '*'+metric+'*.csv')[0],sep=",",decimal='.'),
how='left',
on='Embedding',
suffixes=("","_"+str(k)))
df.columns=['Embedding','InData','LOO','22Dt']
df['22Dt'] = round(df['22Dt']*100,2)
print('friedman_test: ',friedman_test(df.iloc[:,1:]))
nemenyi_test(df.iloc[:,1:]).to_csv(PATH_OUT+"nemenyi_{}_{}_{}_{}.csv".format(models,experiment,
classifier,
metric))
print()
|
[
"pandas.DataFrame",
"scikit_posthocs.posthoc_nemenyi_friedman",
"pandas.read_csv",
"glob.glob"
] |
[((263, 301), 'scikit_posthocs.posthoc_nemenyi_friedman', 'sp.posthoc_nemenyi_friedman', (['dataframe'], {}), '(dataframe)\n', (290, 301), True, 'import scikit_posthocs as sp\n'), ((481, 505), 'pandas.DataFrame', 'pd.DataFrame', (['list_index'], {}), '(list_index)\n', (493, 505), True, 'import pandas as pd\n'), ((552, 618), 'pandas.read_csv', 'pd.read_csv', (['dataframe_path'], {'skiprows': '[0, 2]', 'sep': '""","""', 'decimal': '"""."""'}), "(dataframe_path, skiprows=[0, 2], sep=',', decimal='.')\n", (563, 618), True, 'import pandas as pd\n'), ((1489, 1584), 'glob.glob', 'glob.glob', (["(PATH + experiment + '/Pivot_tables/pivot_' + classifier + '*' + metric +\n '*.csv')"], {}), "(PATH + experiment + '/Pivot_tables/pivot_' + classifier + '*' +\n metric + '*.csv')\n", (1498, 1584), False, 'import glob\n'), ((2012, 2111), 'glob.glob', 'glob.glob', (["(PATH + list_models[0] + '/Pivot_tables/pivot_' + classifier + '*' + metric +\n '*.csv')"], {}), "(PATH + list_models[0] + '/Pivot_tables/pivot_' + classifier + '*' +\n metric + '*.csv')\n", (2021, 2111), False, 'import glob\n'), ((2894, 3009), 'glob.glob', 'glob.glob', (["(PATH + 'Fine_tuning_Generic_tweets/' + models + '-1-LM/pivot_' +\n classifier + '*' + metric + '*.csv')"], {}), "(PATH + 'Fine_tuning_Generic_tweets/' + models + '-1-LM/pivot_' +\n classifier + '*' + metric + '*.csv')\n", (2903, 3009), False, 'import glob\n'), ((4465, 4558), 'glob.glob', 'glob.glob', (["(PATH + 'InData/' + models + '-LM/pivot_' + classifier + '*' + metric + '*.csv'\n )"], {}), "(PATH + 'InData/' + models + '-LM/pivot_' + classifier + '*' +\n metric + '*.csv')\n", (4474, 4558), False, 'import glob\n'), ((2228, 2319), 'glob.glob', 'glob.glob', (["(PATH + models + '/Pivot_tables/pivot_' + classifier + '*' + metric + '*.csv')"], {}), "(PATH + models + '/Pivot_tables/pivot_' + classifier + '*' +\n metric + '*.csv')\n", (2237, 2319), False, 'import glob\n'), ((3140, 3263), 'glob.glob', 'glob.glob', (["(PATH + 'Fine_tuning_Generic_tweets/' + models + '-' + k + '-LM/pivot_' +\n classifier + '*' + metric + '*.csv')"], {}), "(PATH + 'Fine_tuning_Generic_tweets/' + models + '-' + k +\n '-LM/pivot_' + classifier + '*' + metric + '*.csv')\n", (3149, 3263), False, 'import glob\n'), ((4727, 4818), 'glob.glob', 'glob.glob', (["(PATH + k + '/' + models + '-LM/pivot_' + classifier + '*' + metric + '*.csv')"], {}), "(PATH + k + '/' + models + '-LM/pivot_' + classifier + '*' +\n metric + '*.csv')\n", (4736, 4818), False, 'import glob\n')]
|
# Copyright 2020 Yalfoosh
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
from typing import Optional, Tuple, Union
import numpy as np
from . import constants
from .function import Function
def clean_nelder_mead_simplex_search_arguments(
function: Function,
alpha: float,
beta: float,
gamma: float,
sigma: float,
use_jakobovic_expand: bool,
epsilon: float,
max_iterations: int,
verbosity: Optional[str],
decimal_precision: int,
) -> Tuple[Function, float, float, float, float, bool, float, int, int, int]:
"""
Checks the Nelder Mead Simplex Search arguments and returns them prepared for work.
Args:
function (Function): A Function representing the loss function.
alpha (float): A float used in point reflection.
beta (float): A float used in point contraction.
gamma (float): A float used in point expansion.
sigma (float): A float used when moving points to the optimum.
use_jakobovic_expand (bool): A bool determining whether or not to use the
__expand_jakobovic method instead of the __expand method for point expansion.
Defaults to False.
epsilon (float): A float representing the error threshold.
max_iterations (int): An int representing the maximum number of iterations
before the algorithm times out and returns the last found optimum.
verbosity (Optional[str]): A str representing the verbosity of the output during
algorithm execution.
decimal_precision (int): An int representing the number of decimal digits to
round numbers outputted during algorithm execution.
Raises:
TypeError: Raised if argument function is not a Function.
TypeError: Raised if argument alpha is not a float.
TypeError: Raised if argument beta is not a float.
TypeError: Raised if argument gamma is not a float.
TypeError: Raised if argument sigma is not a float.
TypeError: Raised if argument use_jakobovic_expand is not a bool.
TypeError: Raised if argument epsilon is not a float.
ValueError: Raised if argument epsilon is a negative number.
TypeError: Raised if argument max_iterations is not an int.
ValueError: Raised if argument max_iterations is a negative number.
TypeError: Raised if argument verbosity is not a str.
KeyError: Raised if argument verbosity is an invalid key.
TypeError: Raised if argument decimal_precision is not an int.
ValueError: Raised if argument decimal_precision is a negative number.
Returns:
Tuple[Function, float, float, float, float, bool, float, int, int, int]: Cleaned
arguments.
"""
if not isinstance(function, Function):
raise TypeError(
"Expected argument function to be a Function, instead it is "
f"{type(function)}."
)
if isinstance(alpha, int):
alpha = float(alpha)
if not isinstance(alpha, float):
raise TypeError(
"Expected argument alpha to be a float, instead it is " f"{type(alpha)}."
)
if isinstance(beta, int):
beta = float(beta)
if not isinstance(beta, float):
raise TypeError(
"Expected argument beta to be a float, instead it is " f"{type(beta)}."
)
if isinstance(gamma, int):
gamma = float(gamma)
if not isinstance(gamma, float):
raise TypeError(
"Expected argument gamma to be a float, instead it is " f"{type(gamma)}."
)
if isinstance(sigma, int):
sigma = float(sigma)
if not isinstance(sigma, float):
raise TypeError(
"Expected argument sigma to be a float, instead it is " f"{type(sigma)}."
)
if not isinstance(use_jakobovic_expand, bool):
raise TypeError(
"Expected argument use_jakobovic_expand to be a bool, instead it is "
f"{type(use_jakobovic_expand)}."
)
if not isinstance(epsilon, float):
raise TypeError(
"Expected argument epsilon to be a float, instead it is "
f"{type(epsilon)}."
)
if epsilon < 0:
raise ValueError(
"Expected argument epsilon to be a positive float, instead it is "
f"{epsilon}."
)
if not isinstance(max_iterations, int):
raise TypeError(
"Expected argument max_interations to be an int, instead it is "
f"{type(max_iterations)}."
)
if max_iterations < 1:
raise ValueError(
"Expected argument max_interations to be a positive integer, instead it is "
f"{max_iterations}."
)
if verbosity is None:
verbosity = "none"
if not isinstance(verbosity, str):
raise TypeError(
f"Expected argument verbosity to be a str, instead it is {type(verbosity)}."
)
if verbosity not in constants.NELDER_MEAD_SIMPLEX_VERBOSITY_DICT:
verbosity_dict_length = len(constants.NELDER_MEAD_SIMPLEX_VERBOSITY_DICT)
if verbosity_dict_length == 0:
verbosity_string = "There are no keys available."
elif verbosity_dict_length == 1:
_key = list(constants.NELDER_MEAD_SIMPLEX_VERBOSITY_DICT.keys())[0]
verbosity_string = f'The only available key is "{_key}".'
else:
_keys = list(sorted(constants.NELDER_MEAD_SIMPLEX_VERBOSITY_DICT.keys()))
verbosity_string = "The available keys are "
verbosity_string += ", ".join([str(f'"{x}"') for x in _keys[:-1]])
verbosity_string += f' and "{_keys[-1]}"".'
raise KeyError(
f'Verbosity key "{verbosity}" is not in the Nelder Mead Simplex Verbosity '
f"dictionary. {verbosity_string}"
)
verbosity = constants.NELDER_MEAD_SIMPLEX_VERBOSITY_DICT[verbosity]
if not isinstance(decimal_precision, int):
raise TypeError(
"Expected argument decimal_precision to be an int, instead it is "
f"{type(decimal_precision)}."
)
if decimal_precision < 1:
raise ValueError(
"Expected argument decimal_precision to be a positive int, instead it is"
f"{decimal_precision}."
)
return (
function,
alpha,
beta,
gamma,
sigma,
use_jakobovic_expand,
epsilon,
max_iterations,
verbosity,
decimal_precision,
)
def clean_get_simplex_points(
start: np.ndarray, stride: Union[float, int]
) -> Tuple[np.ndarray, float]:
"""
Checks the __get_simplex_points arguments and returns them prepared for work.
Args:
start (np.ndarray): A numpy.ndarray representing the starting point for simplex
generation.
stride (Union[float, int]): A float or int representing the stride.
Raises:
TypeError: Raised if argument start is not a numpy.ndarray.
ValueError: Raised if argument start is a zero-length vector.
TypeError: Raised if argument stride is not a float or int.
Returns:
Tuple[np.ndarray, float]: Cleaned arguments.
"""
if not isinstance(start, np.ndarray):
raise TypeError(
"Expected argument start to be a numpy.ndarray, instead it is "
f"{type(start)}."
)
start = np.reshape(start, -1)
if start.shape[0] == 0:
raise ValueError(
"Expected argument starting point to be a vector with at least one "
"element, instead it is empty."
)
if not isinstance(stride, (float, int)):
raise TypeError(
"Expected argument stride to be a float or int, instead it is "
f"{type(stride)}."
)
stride = float(stride)
return start, stride
def __get_simplex_points(start: np.ndarray, stride: float) -> np.ndarray:
"""
Generates simplex points for a starting point.
Args:
start (np.ndarray): A numpy.ndarray representing the starting point for simplex
generation.
stride (float): A float representing the stride.
Returns:
np.ndarray: A matrix with each row representing a point of the simplex.
"""
points = np.tile(start, reps=(start.shape[0], 1))
points = points + stride * np.eye(points.shape[0])
return np.vstack([start, points])
def __reflect(
centroid: np.ndarray, maximum_point: np.ndarray, alpha: float
) -> np.ndarray:
"""
Reflects argument maximum_points wrt centroid by argument alpha.
Args:
centroid (np.ndarray): A numpy.ndarray representing the simplex centroid.
maximum_point (np.ndarray): A numpy.ndarray representing the worst point of a
simplex.
alpha (float): A float representing the amount a point will be reflected.
Returns:
np.ndarray: A numpy.ndarray representing the reflected point.
"""
return (1 + alpha) * centroid - alpha * maximum_point
def __contract(
centroid: np.ndarray, maximum_point: np.ndarray, beta: float
) -> np.ndarray:
"""
Contracts argument maximum_points wrt centroid by argument beta.
Args:
centroid (np.ndarray): A numpy.ndarray representing the simplex centroid.
maximum_point (np.ndarray): A numpy.ndarray representing the worst point of a
simplex.
beta (float): A float representing the amount a point will be contracted.
Returns:
np.ndarray: A numpy.ndarray representing the contracted point.
"""
return (1 - beta) * centroid + beta * maximum_point
def __expand(
centroid: np.ndarray, reflected_point: np.ndarray, gamma: float
) -> np.ndarray:
"""
Expands argument reflected_point wrt centroid by argument alpha.
Args:
centroid (np.ndarray): A numpy.ndarray representing the simplex centroid.
maximum_point (np.ndarray): A numpy.ndarray representing the worst point of a
simplex.
gamma (float): A float representing the amount a point will be expanded.
Returns:
np.ndarray: A numpy.ndarray representing the expanded point.
"""
return (1 - gamma) * centroid + gamma * reflected_point
def __expand_jakobovic(
centroid: np.ndarray, reflected_point: np.ndarray, gamma: float
) -> np.ndarray:
"""
Expands argument reflected_point wrt centroid by argument alpha. This is a modified
version which is supposedly the correct one, as said by prof. Jakobović.
Args:
centroid (np.ndarray): A numpy.ndarray representing the simplex centroid.
maximum_point (np.ndarray): A numpy.ndarray representing the worst point of a
simplex.
gamma (float): A float representing the amount a point will be expanded.
Returns:
np.ndarray: A numpy.ndarray representing the expanded point.
"""
return (1 - gamma) * centroid - gamma * reflected_point
def __time_to_stop(
simplex_values: np.ndarray, centroid_value: float, epsilon: float
) -> bool:
"""
Checks if it's time to stop Nelder Mead Simplex Search.
Args:
simplex_values (np.ndarray): A numpy.ndarray representing the vector of simplex
values.
centroid_value (float): A float representing the value of the simplex centroid.
epsilon (float): A float representing the error threshold.
Returns:
bool: True if the stopping condition of Nelder Mead Simplex Search has been met,
False otherwise.
"""
difference_in_values = simplex_values - centroid_value
squared_difference_in_values = np.square(difference_in_values)
mean_squared_difference_in_values = np.mean(squared_difference_in_values)
return np.sqrt(mean_squared_difference_in_values) <= epsilon
def __print_nmss_values(
function: Function,
centroid: np.ndarray,
verbosity: int,
decimal_precision: int,
):
"""
Prints the Nelder Mead Simplex Search values.
Args:
function (Function): A Function representing the loss function.
centroid (np.ndarray): A numpy.ndarray representing the simplex centroid.
verbosity (int): An int representing the level of verbosity of the output during
algorithm execution.
decimal_precision (int): An int representing the number of decimal digits to
round numbers outputted during algorithm execution.
"""
if verbosity == 1:
print(f"c = {np.around(centroid, decimal_precision)}")
elif verbosity > 1:
result = function(centroid, dont_count=True)
result = (
np.around(result, 3)
if isinstance(result, np.ndarray)
else f"{result:.0{decimal_precision}f}"
)
print(f"F(c = {np.around(centroid, decimal_precision)}) = {result}")
def nelder_mead_simplex_search(
function: Function,
start: np.ndarray,
stride: Union[float, int] = 1,
alpha: float = 1.0,
beta: float = 0.5,
gamma: float = 2.0,
sigma: float = 0.5,
use_jakobovic_expand: bool = False,
epsilon: float = 1e-6,
max_iterations: int = 100000,
verbosity: Optional[str] = None,
decimal_precision: int = 3,
) -> np.ndarray:
"""
Uses Nelder Mead Simplex Search to find an n-D optimum of a function.
Args:
function (Function): A Function representing the loss function.
start (np.ndarray): A numpy.ndarray representing the starting point of the
search.
stride (Union[float, int], optional): A float or int representing the stride for
simplex generation. Defaults to 1.
alpha (float, optional): A float used in point reflection. Defaults to 1.0.
beta (float, optional): A float used in point contraction. Defaults to 0.5.
gamma (float, optional): A float used in point expansion. Defaults to 2.0.
sigma (float, optional): A float used when moving points to the optimum.
Defaults to 0.5.
use_jakobovic_expand (float, optional): A bool determining whether or not to use
the __expand_jakobovic method instead of the __expand method for point
expansion. Defaults to False.
epsilon (float, optional): A float representing the error threshold. Defaults to
1e-6.
max_iterations (int, optional): An int representing the maximum number of
iterations before the algorithm times out and returns the last found optimum.
Defaults to 100000.
verbosity (Optional[str], optional): A str representing the verbosity of the
output during algorithm execution. Defaults to None (no output during algorithm
execution).
decimal_precision (int, optional): An int representing the number of decimal
digits to round numbers outputted during algorithm execution. Defaults to 3.
Returns:
np.ndarray: A numpy.ndarray representing the last found optimum.
"""
(
function,
alpha,
beta,
gamma,
sigma,
use_jakobovic_expand,
epsilon,
max_iterations,
verbosity,
decimal_precision,
) = clean_nelder_mead_simplex_search_arguments(
function=function,
alpha=alpha,
beta=beta,
gamma=gamma,
sigma=sigma,
use_jakobovic_expand=use_jakobovic_expand,
epsilon=epsilon,
max_iterations=max_iterations,
verbosity=verbosity,
decimal_precision=decimal_precision,
)
start, stride = clean_get_simplex_points(start=start, stride=stride)
simplex_points = __get_simplex_points(start=start, stride=stride)
simplex_values = np.array([function(x) for x in simplex_points])
timed_out = True
expansion_method = __expand_jakobovic if use_jakobovic_expand else __expand
for _ in range(max_iterations):
minimum_index = np.argmin(simplex_values)
maximum_index = np.argmax(simplex_values)
centroid = np.mean(np.delete(simplex_points, maximum_index, axis=0), axis=0)
__print_nmss_values(
function=function,
centroid=centroid,
verbosity=verbosity,
decimal_precision=decimal_precision,
)
reflected_point = __reflect(
centroid=centroid, maximum_point=simplex_points[maximum_index], alpha=alpha
)
reflected_value = function(reflected_point)
minimum_value = simplex_values[minimum_index]
if reflected_value < minimum_value:
expanded_point = expansion_method(
centroid=centroid, reflected_point=reflected_point, gamma=gamma
)
expanded_value = function(expanded_point)
if expanded_value < minimum_value:
simplex_points[maximum_index] = expanded_point
simplex_values[maximum_index] = expanded_value
else:
simplex_points[maximum_index] = reflected_point
simplex_values[maximum_index] = reflected_value
else:
maximum_value = simplex_values[maximum_index]
if all(np.delete(simplex_values, maximum_index, axis=0) < reflected_value):
if reflected_value < maximum_value:
simplex_points[maximum_index] = reflected_point
simplex_values[maximum_index] = reflected_value
# We need this here since we're introducing a new point and value
minimum_index = np.argmin(simplex_values)
maximum_index = np.argmax(simplex_values)
# We need to do this since the maximum value has potentially changed
maximum_value = simplex_values[maximum_index]
contracted_point = __contract(
centroid=centroid,
maximum_point=simplex_points[maximum_index],
beta=beta,
)
contracted_value = function(contracted_point)
if contracted_value < maximum_value:
simplex_points[maximum_index] = contracted_point
simplex_values[maximum_index] = contracted_value
else:
for i, simplex_point in enumerate(simplex_points):
if i == minimum_index:
continue
simplex_points[i] += (
simplex_points[minimum_index] - simplex_points[i]
) * sigma
simplex_values[i] = function(simplex_points[i])
else:
simplex_points[maximum_index] = reflected_point
simplex_values[maximum_index] = reflected_value
if __time_to_stop(
simplex_values=simplex_values,
centroid_value=function(centroid),
epsilon=epsilon,
):
timed_out = False
break
if timed_out:
print(
f"WARNING: Nelder Mead Simplex Search timed out after {max_iterations} "
"iterations - result might not be a minimum.",
file=sys.stderr,
)
# Do this to get a more precise result
maximum_index = np.argmax(simplex_values)
centroid = np.mean(np.delete(simplex_points, maximum_index, axis=0), axis=0)
return centroid
|
[
"numpy.argmax",
"numpy.square",
"numpy.argmin",
"numpy.around",
"numpy.mean",
"numpy.tile",
"numpy.reshape",
"numpy.eye",
"numpy.delete",
"numpy.vstack",
"numpy.sqrt"
] |
[((7922, 7943), 'numpy.reshape', 'np.reshape', (['start', '(-1)'], {}), '(start, -1)\n', (7932, 7943), True, 'import numpy as np\n'), ((8803, 8843), 'numpy.tile', 'np.tile', (['start'], {'reps': '(start.shape[0], 1)'}), '(start, reps=(start.shape[0], 1))\n', (8810, 8843), True, 'import numpy as np\n'), ((8911, 8937), 'numpy.vstack', 'np.vstack', (['[start, points]'], {}), '([start, points])\n', (8920, 8937), True, 'import numpy as np\n'), ((12136, 12167), 'numpy.square', 'np.square', (['difference_in_values'], {}), '(difference_in_values)\n', (12145, 12167), True, 'import numpy as np\n'), ((12208, 12245), 'numpy.mean', 'np.mean', (['squared_difference_in_values'], {}), '(squared_difference_in_values)\n', (12215, 12245), True, 'import numpy as np\n'), ((19743, 19768), 'numpy.argmax', 'np.argmax', (['simplex_values'], {}), '(simplex_values)\n', (19752, 19768), True, 'import numpy as np\n'), ((12258, 12300), 'numpy.sqrt', 'np.sqrt', (['mean_squared_difference_in_values'], {}), '(mean_squared_difference_in_values)\n', (12265, 12300), True, 'import numpy as np\n'), ((16382, 16407), 'numpy.argmin', 'np.argmin', (['simplex_values'], {}), '(simplex_values)\n', (16391, 16407), True, 'import numpy as np\n'), ((16432, 16457), 'numpy.argmax', 'np.argmax', (['simplex_values'], {}), '(simplex_values)\n', (16441, 16457), True, 'import numpy as np\n'), ((19792, 19840), 'numpy.delete', 'np.delete', (['simplex_points', 'maximum_index'], {'axis': '(0)'}), '(simplex_points, maximum_index, axis=0)\n', (19801, 19840), True, 'import numpy as np\n'), ((8875, 8898), 'numpy.eye', 'np.eye', (['points.shape[0]'], {}), '(points.shape[0])\n', (8881, 8898), True, 'import numpy as np\n'), ((16485, 16533), 'numpy.delete', 'np.delete', (['simplex_points', 'maximum_index'], {'axis': '(0)'}), '(simplex_points, maximum_index, axis=0)\n', (16494, 16533), True, 'import numpy as np\n'), ((13128, 13148), 'numpy.around', 'np.around', (['result', '(3)'], {}), '(result, 3)\n', (13137, 13148), True, 'import numpy as np\n'), ((12978, 13016), 'numpy.around', 'np.around', (['centroid', 'decimal_precision'], {}), '(centroid, decimal_precision)\n', (12987, 13016), True, 'import numpy as np\n'), ((17622, 17670), 'numpy.delete', 'np.delete', (['simplex_values', 'maximum_index'], {'axis': '(0)'}), '(simplex_values, maximum_index, axis=0)\n', (17631, 17670), True, 'import numpy as np\n'), ((18002, 18027), 'numpy.argmin', 'np.argmin', (['simplex_values'], {}), '(simplex_values)\n', (18011, 18027), True, 'import numpy as np\n'), ((18064, 18089), 'numpy.argmax', 'np.argmax', (['simplex_values'], {}), '(simplex_values)\n', (18073, 18089), True, 'import numpy as np\n'), ((13281, 13319), 'numpy.around', 'np.around', (['centroid', 'decimal_precision'], {}), '(centroid, decimal_precision)\n', (13290, 13319), True, 'import numpy as np\n')]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-08-08 14:52
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('wapps', '0004_add_wapps_image'),
]
operations = [
migrations.RemoveField(
model_name='identitysettings',
name='logo',
),
]
|
[
"django.db.migrations.RemoveField"
] |
[((286, 352), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""identitysettings"""', 'name': '"""logo"""'}), "(model_name='identitysettings', name='logo')\n", (308, 352), False, 'from django.db import migrations\n')]
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
from helpers.gh_pages import main
main()
|
[
"helpers.gh_pages.main"
] |
[((78, 84), 'helpers.gh_pages.main', 'main', ([], {}), '()\n', (82, 84), False, 'from helpers.gh_pages import main\n')]
|
from selenium.webdriver.support.select import Select
from Data.parameters import Data
from reuse_func import GetData
class click_on_home():
def __init__(self,driver):
self.driver = driver
def test_homeicon(self):
self.p = GetData()
self.driver.implicitly_wait(20)
self.driver.find_element_by_xpath(Data.hyper).click()
self.p.page_loading(self.driver)
dist = Select(self.driver.find_element_by_name("myDistrict"))
dist.select_by_index(2)
self.p.page_loading(self.driver)
block = Select(self.driver.find_element_by_name("myBlock"))
block.select_by_index(2)
self.p.page_loading(self.driver)
cluster = Select(self.driver.find_element_by_name("myCluster"))
cluster.select_by_index(2)
self.p.page_loading(self.driver)
self.driver.find_element_by_id(Data.homeicon).click()
self.p.page_loading(self.driver)
|
[
"reuse_func.GetData"
] |
[((250, 259), 'reuse_func.GetData', 'GetData', ([], {}), '()\n', (257, 259), False, 'from reuse_func import GetData\n')]
|
import pytest
from mugen import lists
from mugen.lists import MugenList
class Dummy(object):
foo = 1
@pytest.fixture
def mugen_list() -> MugenList:
return MugenList([Dummy(), Dummy(), Dummy(), Dummy(), Dummy(), Dummy()])
@pytest.mark.parametrize("l, expected_foo", [
(mugen_list(), [1, 1, 1, 1, 1, 1])
])
def test_lget(l, expected_foo):
assert l.lget('foo') == expected_foo
@pytest.mark.parametrize("l, expected_l", [
([1, [2, 3], [[4, 5], [6, 7]]], [1, 2, 3, 4, 5, 6, 7])
])
def test_flatten(l, expected_l):
assert lists.flatten(l) == expected_l
def test_mugen_list__operations_yield_mugen_list():
assert type(MugenList() + MugenList()) == MugenList
assert type(MugenList()[1:2]) == MugenList
|
[
"pytest.mark.parametrize",
"mugen.lists.flatten",
"mugen.lists.MugenList"
] |
[((400, 503), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""l, expected_l"""', '[([1, [2, 3], [[4, 5], [6, 7]]], [1, 2, 3, 4, 5, 6, 7])]'], {}), "('l, expected_l', [([1, [2, 3], [[4, 5], [6, 7]]], [\n 1, 2, 3, 4, 5, 6, 7])])\n", (423, 503), False, 'import pytest\n'), ((549, 565), 'mugen.lists.flatten', 'lists.flatten', (['l'], {}), '(l)\n', (562, 565), False, 'from mugen import lists\n'), ((650, 661), 'mugen.lists.MugenList', 'MugenList', ([], {}), '()\n', (659, 661), False, 'from mugen.lists import MugenList\n'), ((664, 675), 'mugen.lists.MugenList', 'MugenList', ([], {}), '()\n', (673, 675), False, 'from mugen.lists import MugenList\n'), ((706, 717), 'mugen.lists.MugenList', 'MugenList', ([], {}), '()\n', (715, 717), False, 'from mugen.lists import MugenList\n')]
|
"""
:Created: 13 July 2015
:Author: <NAME>
"""
import random
from django.http import HttpResponseRedirect
from django.urls import reverse
from django.views.generic import TemplateView
class HomeView(TemplateView):
template_name = "home.html"
class CategoryPageView(TemplateView):
template_name = "category-page.html"
def get_context_data(self, items, **kwargs):
context = super().get_context_data(**kwargs)
context["page"] = items._meta.verbose_name_plural.lower()
item_qs = items.objects.filter(visible=True).order_by("order")
heroes = item_qs.filter(hero=True)
regular = item_qs.filter(hero=False)
context["items"] = {"heroes": heroes, "regular": regular}
context["random_hero_unit_index"] = (
random.randint(0, heroes.count() - 1) if heroes.count() > 0 else 0
)
return context
class ItemPageView(TemplateView):
template_name = "item-page.html"
def dispatch(self, request, items, slug, *args, **kwargs):
try:
self.item = items.objects.get(slug=slug)
except items.DoesNotExist:
verbose_name_plural = items._meta.verbose_name_plural.lower()
items_list = "{items}:{items}_list".format(items=verbose_name_plural)
return HttpResponseRedirect(reverse(items_list))
return super().dispatch(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context["item"] = self.item
context["absolute_uri"] = self.request.build_absolute_uri()
return context
|
[
"django.urls.reverse"
] |
[((1323, 1342), 'django.urls.reverse', 'reverse', (['items_list'], {}), '(items_list)\n', (1330, 1342), False, 'from django.urls import reverse\n')]
|
from discord.ext import commands
import discord
import asyncio
import youtube_dl
import logging
import math
import random
import heapq
from urllib import request
from ..video import Video
from ..video import Setlist
# TODO: abstract FFMPEG options into their own file?
FFMPEG_BEFORE_OPTS = '-reconnect 1 -reconnect_streamed 1 -reconnect_delay_max 5'
"""
Command line options to pass to `ffmpeg` before the `-i`.
See https://stackoverflow.com/questions/43218292/youtubedl-read-error-with-discord-py/44490434#44490434 for more information.
Also, https://ffmpeg.org/ffmpeg-protocols.html for command line option reference.
"""
async def audio_playing(ctx):
"""Checks that audio is currently playing before continuing."""
client = ctx.guild.voice_client
if client and client.channel and client.source:
return True
else:
raise commands.CommandError("Not currently playing any audio.")
async def in_voice_channel(ctx):
"""Checks that the command sender is in the same voice channel as the bot."""
voice = ctx.author.voice
bot_voice = ctx.guild.voice_client
if voice and bot_voice and voice.channel and bot_voice.channel and voice.channel == bot_voice.channel:
return True
else:
raise commands.CommandError(
"You need to be in the channel to do that.")
async def is_audio_requester(ctx):
"""Checks that the command sender is the song requester."""
music = ctx.bot.get_cog("Music")
state = music.get_state(ctx.guild)
permissions = ctx.channel.permissions_for(ctx.author)
if permissions.administrator or state.is_requester(ctx.author):
return True
else:
raise commands.CommandError(
"You need to be the song requester to do that.")
class Music(commands.Cog):
"""Bot commands to help play music."""
def __init__(self, bot, config):
self.bot = bot
self.config = config[__name__.split(".")[
-1]] # retrieve module name, find config entry
self.states = {}
self.bot.add_listener(self.on_reaction_add, "on_reaction_add")
def get_state(self, guild):
"""Gets the state for `guild`, creating it if it does not exist."""
if guild.id in self.states:
return self.states[guild.id]
else:
self.states[guild.id] = GuildState()
return self.states[guild.id]
@commands.command(aliases=["stop"])
@commands.guild_only()
@commands.has_permissions(administrator=True)
async def leave(self, ctx):
"""Leaves the voice channel, if currently in one."""
client = ctx.guild.voice_client
state = self.get_state(ctx.guild)
if client and client.channel:
await client.disconnect()
state.playlist = []
state.now_playing = None
else:
raise commands.CommandError("Not in a voice channel.")
@commands.command(aliases=["resume", "p"])
@commands.guild_only()
@commands.check(audio_playing)
@commands.check(in_voice_channel)
@commands.check(is_audio_requester)
async def pause(self, ctx):
"""Pauses any currently playing audio."""
client = ctx.guild.voice_client
self._pause_audio(client)
def _pause_audio(self, client):
if client.is_paused():
client.resume()
else:
client.pause()
@commands.command(aliases=["vol", "v"])
@commands.guild_only()
@commands.check(audio_playing)
@commands.check(in_voice_channel)
@commands.check(is_audio_requester)
async def volume(self, ctx, volume: int):
"""Change the volume of currently playing audio (values 0-250)."""
state = self.get_state(ctx.guild)
# make sure volume is nonnegative
if volume < 0:
volume = 0
max_vol = self.config["max_volume"]
if max_vol > -1: # check if max volume is set
# clamp volume to [0, max_vol]
if volume > max_vol:
volume = max_vol
client = ctx.guild.voice_client
state.volume = float(volume) / 100.0
client.source.volume = state.volume # update the AudioSource's volume to match
@commands.command()
@commands.guild_only()
@commands.check(audio_playing)
@commands.check(in_voice_channel)
async def skip(self, ctx):
"""Skips the currently playing song, or votes to skip it."""
state = self.get_state(ctx.guild)
client = ctx.guild.voice_client
if ctx.channel.permissions_for(
ctx.author).administrator or state.is_requester(ctx.author):
# immediately skip if requester or admin
client.stop()
elif self.config["vote_skip"]:
# vote to skip song
channel = client.channel
self._vote_skip(channel, ctx.author)
# announce vote
users_in_channel = len([
member for member in channel.members if not member.bot
]) # don't count bots
required_votes = math.ceil(
self.config["vote_skip_ratio"] * users_in_channel)
if required_votes == 0:
required_votes = 1
await ctx.send(
f"{ctx.author.mention} voted to skip ({len(state.skip_votes)}/{required_votes} votes)"
)
else:
raise commands.CommandError("Sorry, vote skipping is disabled.")
def _vote_skip(self, channel, member):
"""Register a vote for `member` to skip the song playing."""
logging.info(f"{member.name} votes to skip")
state = self.get_state(channel.guild)
state.skip_votes.add(member)
users_in_channel = len([
member for member in channel.members if not member.bot
]) # don't count bots
if (float(len(state.skip_votes)) /
users_in_channel) >= self.config["vote_skip_ratio"]:
# enough members have voted to skip, so skip the song
logging.info(f"Enough votes, skipping...")
channel.guild.voice_client.stop()
async def _set_status(self, song=None):
if song:
await self.bot.change_presence(activity=discord.Game(name=f"♫ {song.title}"))
else:
await self.bot.change_presence(activity=None)
def _play_song(self, client, state, song):
state.now_playing = song
state.skip_votes = set() # clear skip votes
asyncio.run_coroutine_threadsafe(self._set_status(song=song), self.bot.loop)
source = discord.PCMVolumeTransformer(
discord.FFmpegPCMAudio(song.stream_url, before_options=FFMPEG_BEFORE_OPTS), volume=state.volume)
def after_playing(err):
if state.autoplay:
more = state.playlist_state.target_length - len(state.playlist)
if more > 0:
state.playlist.append(state.playlist_state.get_num(more))
if len(state.playlist) > 0:
next_song = state.playlist.pop(0)
self._play_song(client, state, next_song)
else:
asyncio.run_coroutine_threadsafe(client.disconnect(),
self.bot.loop)
asyncio.run_coroutine_threadsafe(self._set_status(), self.bot.loop)
client.play(source, after=after_playing)
@commands.command(aliases=["np"])
@commands.guild_only()
@commands.check(audio_playing)
async def nowplaying(self, ctx):
"""Displays information about the current song."""
state = self.get_state(ctx.guild)
message = await ctx.send("", embed=state.now_playing.get_embed())
await self._add_reaction_controls(message)
@commands.command(aliases=["q", "playlist"])
@commands.guild_only()
@commands.check(audio_playing)
async def queue(self, ctx):
"""Display the current play queue."""
state = self.get_state(ctx.guild)
text = self._queue_text(state.playlist)
if state.autoplay:
text += "\n\nAutoplay is enabled."
await ctx.send(text)
def _queue_text(self, queue):
"""Returns a block of text describing a given song queue."""
if len(queue) > 0:
message = [f"{len(queue)} songs in queue:"]
message += [
f" {index+1}. **{song.title}** (requested by **{song.requested_by.display_name}**)"
for (index, song) in enumerate(queue)
] # add individual songs
return "\n".join(message)
else:
return "The play queue is empty."
@commands.command(aliases=["cq"])
@commands.guild_only()
@commands.check(audio_playing)
@commands.has_permissions(administrator=True)
async def clearqueue(self, ctx):
"""Clears the play queue without leaving the channel."""
state = self.get_state(ctx.guild)
state.playlist = []
@commands.command(aliases=["jq"])
@commands.guild_only()
@commands.check(audio_playing)
@commands.has_permissions(administrator=True)
async def jumpqueue(self, ctx, song: int, new_index: int):
"""Moves song at an index to `new_index` in queue."""
state = self.get_state(ctx.guild) # get state for this guild
if 1 <= song <= len(state.playlist) and 1 <= new_index:
song = state.playlist.pop(song - 1) # take song at index...
state.playlist.insert(new_index - 1, song) # and insert it.
await ctx.send(self._queue_text(state.playlist))
else:
raise commands.CommandError("You must use a valid index.")
@commands.command(brief="Plays audio from <url>.")
@commands.guild_only()
async def play(self, ctx, *, url):
"""Plays audio hosted at <url> (or performs a search for <url> and plays the first result)."""
client = ctx.guild.voice_client
state = self.get_state(ctx.guild) # get the guild's state
await self._play(ctx, client, state, url)
async def _play(self, ctx, client, state, url):
if client and client.channel:
try:
video = Video(url, ctx.author)
except youtube_dl.DownloadError as e:
logging.warn(f"Error downloading video: {e}")
await ctx.send(
"There was an error downloading your video, sorry.")
return
state.playlist.append(video)
message = await ctx.send(
"Added to queue.", embed=video.get_embed())
await self._add_reaction_controls(message)
else:
if ctx.author.voice is not None and ctx.author.voice.channel is not None:
channel = ctx.author.voice.channel
try:
video = Video(url, ctx.author)
except youtube_dl.DownloadError as e:
await ctx.send(
"There was an error downloading your video, sorry.")
return
client = await channel.connect()
self._play_song(client, state, video)
message = await ctx.send("", embed=video.get_embed())
await self._add_reaction_controls(message)
logging.info(f"Now playing '{video.title}'")
else:
raise commands.CommandError(
"You need to be in a voice channel to do that.")
@commands.command(brief="Queue <url> to play after the one currently playing")
@commands.guild_only()
async def playnext(self, ctx, *, url):
client = ctx.guild.voice_client
state = self.get_state(ctx.guild)
# TODO: maybe make better "nowplaying" checking logic
if not client:
await self._play(ctx, client, state, url)
else:
try:
video = Video(url, ctx.author)
except youtube_dl.DownloadError as e:
logging.warn(f"Error downloading video: {e}")
await ctx.send(
"There was an error downloading your video, sorry.")
return
state.playlist.insert(0, video)
# TODO: probably make this admin-only, vote, etc
@commands.command(brief="Stop the current song and play <url> right now")
@commands.guild_only()
async def playnow(self, ctx, *, url):
client = ctx.guild.voice_client
state = self.get_state(ctx.guild)
if not client:
await self._play(ctx, client, state, url)
else:
try:
video = Video(url, ctx.author)
except youtube_dl.DownloadError as e:
logging.warn(f"Error downloading video: {e}")
await ctx.send(
"There was an error downloading your video, sorry.")
return
state.playlist.insert(0, video)
client.stop()
@commands.command(brief="Register the playlist at <url> to the requesting user")
@commands.guild_only()
async def setlist(self, ctx, *, url):
client = ctx.guild.voice_client
state = self.get_state(ctx.guild) # get the guild's state
if url == "remove":
del state.setlists[ctx.author.id]
await ctx.send(f"Deleted playlist for {ctx.author.display_name}")
return
state.setlists[ctx.author.id] = Setlist(url, ctx.author)
await ctx.send(f"Playlist registered for {ctx.author.display_name}")
#self._shuffle_setlists(state, client)
#await self._play(ctx, client, state, state.playlist.pop(0).video_url)
# Shuffle all user's setlists together
def _shuffle_setlists(self, state, client):
temp = []
# Grab a random 5 songs from each user's setlists
for user,setlist in state.setlists.items():
temp += list(map(lambda x: Video(x, user), random.sample(setlist, k=5)))
# Shuffle all the songs together
random.shuffle(temp)
state.playlist = temp
# TODO: rename to something better
@commands.command(brief="TODO")
@commands.guild_only()
async def build(self, ctx, *, num):
try:
num = int(num)
if num <= 0:
raise Exception("not greater than zero")
except:
await ctx.send(f"{num} is not an integer greater than zero")
await self._build(ctx, num)
async def _build(self, ctx, num):
state = self.get_state(ctx.guild)
if not state.setlists.items():
await ctx.send("No registered setlists, ignoring")
return
client = ctx.guild.voice_client
state.playlist_state = PlaylistState(state.setlists)
state.playlist = state.playlist_state.get_num(num)
await self._play(ctx, client, state, state.playlist.pop(0).video_url)
@commands.command(brief="TODO")
@commands.guild_only()
async def extend(self, ctx, *, num):
try:
num = int(num)
if num <= 0:
raise Exception("not greater than zero")
except:
await ctx.send(f"{num} is not an integer greater than zero")
state = self.get_state(ctx.guild)
if not state.setlists.items():
await ctx.send("No registered setlists, ignoring")
return
if not state.playlist_state:
await ctx.send("Playlist mode not activated, use !build to start")
return
state.playlist += state.playlist_state.get_num(num)
@commands.command(brief="Toggle autoplay mode from registered setlists",
aliases=["a", "ap", "yolo"])
@commands.guild_only()
async def autoplay(self, ctx):
state = self.get_state(ctx.guild)
state.autoplay = not state.autoplay
await ctx.send(f"Autoplay has been {'enabled' if state.autoplay else 'disabled'}")
if state.autoplay and not state.playlist_state:
await self._build(ctx, 10)
elif not state.autoplay:
state.playlist_state = None
@commands.command(brief="Reshuffle user setlists and generate a new queue")
@commands.guild_only()
@commands.check(audio_playing)
async def reshuffle(self, ctx):
client = ctx.guild.voice_client
state = self.get_state(ctx.guild) # get the guild's state
await ctx.send("Regenerating play queue.")
self._shuffle_setlists(state, client)
await ctx.send(self._queue_text(state.playlist))
async def on_reaction_add(self, reaction, user):
"""Respods to reactions added to the bot's messages, allowing reactions to control playback."""
message = reaction.message
if user != self.bot.user and message.author == self.bot.user:
await message.remove_reaction(reaction, user)
if message.guild and message.guild.voice_client:
user_in_channel = user.voice and user.voice.channel and user.voice.channel == message.guild.voice_client.channel
permissions = message.channel.permissions_for(user)
guild = message.guild
state = self.get_state(guild)
if permissions.administrator or (
user_in_channel and state.is_requester(user)):
client = message.guild.voice_client
if reaction.emoji == "⏯":
# pause audio
self._pause_audio(client)
elif reaction.emoji == "⏭":
# skip audio
client.stop()
elif reaction.emoji == "⏮":
state.playlist.insert(
0, state.now_playing
) # insert current song at beginning of playlist
client.stop() # skip ahead
elif reaction.emoji == "⏭" and self.config["vote_skip"] and user_in_channel and message.guild.voice_client and message.guild.voice_client.channel:
# ensure that skip was pressed, that vote skipping is
# enabled, the user is in the channel, and that the bot is
# in a voice channel
voice_channel = message.guild.voice_client.channel
self._vote_skip(voice_channel, user)
# announce vote
channel = message.channel
users_in_channel = len([
member for member in voice_channel.members
if not member.bot
]) # don't count bots
required_votes = math.ceil(
self.config["vote_skip_ratio"] * users_in_channel)
if required_votes == 0:
required_votes = 1
await channel.send(
f"{user.mention} voted to skip ({len(state.skip_votes)}/{required_votes} votes)"
)
async def _add_reaction_controls(self, message):
"""Adds a 'control-panel' of reactions to a message that can be used to control the bot."""
CONTROLS = ["⏮", "⏯", "⏭"]
for control in CONTROLS:
await message.add_reaction(control)
# TODO: Holy crap absolutely don't expose this one to the public.
@commands.command()
@commands.guild_only()
@commands.has_permissions(administrator=True)
async def debug(self, ctx, *, url):
state = self.get_state(ctx.guild) # get the guild's state
try:
ret = f"```{str(eval(url))[:1900]}```"
except Exception as e:
ret = e
await ctx.send(f"{ret}")
class GuildState:
"""Helper class managing per-guild state."""
def __init__(self):
self.volume = 1.0
self.playlist = []
self.skip_votes = set()
self.now_playing = None
# userid -> Setlist
self.setlists = {}
self.playlist_state = None
self.autoplay = False
def is_requester(self, user):
return self.now_playing.requested_by == user
class PlaylistState:
"""Helper class to manage a playlist state"""
# users: list(userid, userid...)
def __init__(self, setlists):
# list((num, userid))
self.user_playtime = [(0, u) for u in setlists.keys()]
random.shuffle(self.user_playtime) # ensure the first song picked is random
# userid -> Setlist
# copy from guild state, pops played songs
self.user_setlists = {u:v.copy() for u,v in setlists.items()}
# TODO: probably make this configurable
self.target_length = 10
# Shuffle each setlist so we can always just take from the front
for _,v in self.user_setlists.items():
random.shuffle(v)
# Get a list of <num> songs, increment play times
def get_num(self, num):
ret = []
# TODO: yeah this is a problem.
# This function stalls if you build too much, so this needs to be reconsidered.
# Maybe autoplay should be the only behavior, and it only queues out maybe 10 in advance
if num >= 20:
num = 20
for i in range(num):
ret.append(self.next())
return ret
# Return a video object for the next song to play
def next(self):
time, userid = heapq.heappop(self.user_playtime)
# TODO: refill playlist when a user's runs out
video = self.user_setlists[userid].pop(0)
video = Video(video, self.user_setlists[userid].requester)
time += video.duration
heapq.heappush(self.user_playtime, (time, userid))
return video
|
[
"discord.ext.commands.command",
"heapq.heappush",
"math.ceil",
"logging.warn",
"discord.ext.commands.has_permissions",
"discord.ext.commands.check",
"discord.ext.commands.CommandError",
"random.shuffle",
"random.sample",
"heapq.heappop",
"logging.info",
"discord.Game",
"discord.FFmpegPCMAudio",
"discord.ext.commands.guild_only"
] |
[((2398, 2432), 'discord.ext.commands.command', 'commands.command', ([], {'aliases': "['stop']"}), "(aliases=['stop'])\n", (2414, 2432), False, 'from discord.ext import commands\n'), ((2438, 2459), 'discord.ext.commands.guild_only', 'commands.guild_only', ([], {}), '()\n', (2457, 2459), False, 'from discord.ext import commands\n'), ((2465, 2509), 'discord.ext.commands.has_permissions', 'commands.has_permissions', ([], {'administrator': '(True)'}), '(administrator=True)\n', (2489, 2509), False, 'from discord.ext import commands\n'), ((2917, 2958), 'discord.ext.commands.command', 'commands.command', ([], {'aliases': "['resume', 'p']"}), "(aliases=['resume', 'p'])\n", (2933, 2958), False, 'from discord.ext import commands\n'), ((2964, 2985), 'discord.ext.commands.guild_only', 'commands.guild_only', ([], {}), '()\n', (2983, 2985), False, 'from discord.ext import commands\n'), ((2991, 3020), 'discord.ext.commands.check', 'commands.check', (['audio_playing'], {}), '(audio_playing)\n', (3005, 3020), False, 'from discord.ext import commands\n'), ((3026, 3058), 'discord.ext.commands.check', 'commands.check', (['in_voice_channel'], {}), '(in_voice_channel)\n', (3040, 3058), False, 'from discord.ext import commands\n'), ((3064, 3098), 'discord.ext.commands.check', 'commands.check', (['is_audio_requester'], {}), '(is_audio_requester)\n', (3078, 3098), False, 'from discord.ext import commands\n'), ((3398, 3436), 'discord.ext.commands.command', 'commands.command', ([], {'aliases': "['vol', 'v']"}), "(aliases=['vol', 'v'])\n", (3414, 3436), False, 'from discord.ext import commands\n'), ((3442, 3463), 'discord.ext.commands.guild_only', 'commands.guild_only', ([], {}), '()\n', (3461, 3463), False, 'from discord.ext import commands\n'), ((3469, 3498), 'discord.ext.commands.check', 'commands.check', (['audio_playing'], {}), '(audio_playing)\n', (3483, 3498), False, 'from discord.ext import commands\n'), ((3504, 3536), 'discord.ext.commands.check', 'commands.check', (['in_voice_channel'], {}), '(in_voice_channel)\n', (3518, 3536), False, 'from discord.ext import commands\n'), ((3542, 3576), 'discord.ext.commands.check', 'commands.check', (['is_audio_requester'], {}), '(is_audio_requester)\n', (3556, 3576), False, 'from discord.ext import commands\n'), ((4219, 4237), 'discord.ext.commands.command', 'commands.command', ([], {}), '()\n', (4235, 4237), False, 'from discord.ext import commands\n'), ((4243, 4264), 'discord.ext.commands.guild_only', 'commands.guild_only', ([], {}), '()\n', (4262, 4264), False, 'from discord.ext import commands\n'), ((4270, 4299), 'discord.ext.commands.check', 'commands.check', (['audio_playing'], {}), '(audio_playing)\n', (4284, 4299), False, 'from discord.ext import commands\n'), ((4305, 4337), 'discord.ext.commands.check', 'commands.check', (['in_voice_channel'], {}), '(in_voice_channel)\n', (4319, 4337), False, 'from discord.ext import commands\n'), ((7410, 7442), 'discord.ext.commands.command', 'commands.command', ([], {'aliases': "['np']"}), "(aliases=['np'])\n", (7426, 7442), False, 'from discord.ext import commands\n'), ((7448, 7469), 'discord.ext.commands.guild_only', 'commands.guild_only', ([], {}), '()\n', (7467, 7469), False, 'from discord.ext import commands\n'), ((7475, 7504), 'discord.ext.commands.check', 'commands.check', (['audio_playing'], {}), '(audio_playing)\n', (7489, 7504), False, 'from discord.ext import commands\n'), ((7774, 7817), 'discord.ext.commands.command', 'commands.command', ([], {'aliases': "['q', 'playlist']"}), "(aliases=['q', 'playlist'])\n", (7790, 7817), False, 'from discord.ext import commands\n'), ((7823, 7844), 'discord.ext.commands.guild_only', 'commands.guild_only', ([], {}), '()\n', (7842, 7844), False, 'from discord.ext import commands\n'), ((7850, 7879), 'discord.ext.commands.check', 'commands.check', (['audio_playing'], {}), '(audio_playing)\n', (7864, 7879), False, 'from discord.ext import commands\n'), ((8661, 8693), 'discord.ext.commands.command', 'commands.command', ([], {'aliases': "['cq']"}), "(aliases=['cq'])\n", (8677, 8693), False, 'from discord.ext import commands\n'), ((8699, 8720), 'discord.ext.commands.guild_only', 'commands.guild_only', ([], {}), '()\n', (8718, 8720), False, 'from discord.ext import commands\n'), ((8726, 8755), 'discord.ext.commands.check', 'commands.check', (['audio_playing'], {}), '(audio_playing)\n', (8740, 8755), False, 'from discord.ext import commands\n'), ((8761, 8805), 'discord.ext.commands.has_permissions', 'commands.has_permissions', ([], {'administrator': '(True)'}), '(administrator=True)\n', (8785, 8805), False, 'from discord.ext import commands\n'), ((8984, 9016), 'discord.ext.commands.command', 'commands.command', ([], {'aliases': "['jq']"}), "(aliases=['jq'])\n", (9000, 9016), False, 'from discord.ext import commands\n'), ((9022, 9043), 'discord.ext.commands.guild_only', 'commands.guild_only', ([], {}), '()\n', (9041, 9043), False, 'from discord.ext import commands\n'), ((9049, 9078), 'discord.ext.commands.check', 'commands.check', (['audio_playing'], {}), '(audio_playing)\n', (9063, 9078), False, 'from discord.ext import commands\n'), ((9084, 9128), 'discord.ext.commands.has_permissions', 'commands.has_permissions', ([], {'administrator': '(True)'}), '(administrator=True)\n', (9108, 9128), False, 'from discord.ext import commands\n'), ((9687, 9736), 'discord.ext.commands.command', 'commands.command', ([], {'brief': '"""Plays audio from <url>."""'}), "(brief='Plays audio from <url>.')\n", (9703, 9736), False, 'from discord.ext import commands\n'), ((9742, 9763), 'discord.ext.commands.guild_only', 'commands.guild_only', ([], {}), '()\n', (9761, 9763), False, 'from discord.ext import commands\n'), ((11503, 11580), 'discord.ext.commands.command', 'commands.command', ([], {'brief': '"""Queue <url> to play after the one currently playing"""'}), "(brief='Queue <url> to play after the one currently playing')\n", (11519, 11580), False, 'from discord.ext import commands\n'), ((11586, 11607), 'discord.ext.commands.guild_only', 'commands.guild_only', ([], {}), '()\n', (11605, 11607), False, 'from discord.ext import commands\n'), ((12295, 12367), 'discord.ext.commands.command', 'commands.command', ([], {'brief': '"""Stop the current song and play <url> right now"""'}), "(brief='Stop the current song and play <url> right now')\n", (12311, 12367), False, 'from discord.ext import commands\n'), ((12373, 12394), 'discord.ext.commands.guild_only', 'commands.guild_only', ([], {}), '()\n', (12392, 12394), False, 'from discord.ext import commands\n'), ((12993, 13072), 'discord.ext.commands.command', 'commands.command', ([], {'brief': '"""Register the playlist at <url> to the requesting user"""'}), "(brief='Register the playlist at <url> to the requesting user')\n", (13009, 13072), False, 'from discord.ext import commands\n'), ((13078, 13099), 'discord.ext.commands.guild_only', 'commands.guild_only', ([], {}), '()\n', (13097, 13099), False, 'from discord.ext import commands\n'), ((14152, 14182), 'discord.ext.commands.command', 'commands.command', ([], {'brief': '"""TODO"""'}), "(brief='TODO')\n", (14168, 14182), False, 'from discord.ext import commands\n'), ((14188, 14209), 'discord.ext.commands.guild_only', 'commands.guild_only', ([], {}), '()\n', (14207, 14209), False, 'from discord.ext import commands\n'), ((14950, 14980), 'discord.ext.commands.command', 'commands.command', ([], {'brief': '"""TODO"""'}), "(brief='TODO')\n", (14966, 14980), False, 'from discord.ext import commands\n'), ((14986, 15007), 'discord.ext.commands.guild_only', 'commands.guild_only', ([], {}), '()\n', (15005, 15007), False, 'from discord.ext import commands\n'), ((15627, 15731), 'discord.ext.commands.command', 'commands.command', ([], {'brief': '"""Toggle autoplay mode from registered setlists"""', 'aliases': "['a', 'ap', 'yolo']"}), "(brief='Toggle autoplay mode from registered setlists',\n aliases=['a', 'ap', 'yolo'])\n", (15643, 15731), False, 'from discord.ext import commands\n'), ((15755, 15776), 'discord.ext.commands.guild_only', 'commands.guild_only', ([], {}), '()\n', (15774, 15776), False, 'from discord.ext import commands\n'), ((16167, 16241), 'discord.ext.commands.command', 'commands.command', ([], {'brief': '"""Reshuffle user setlists and generate a new queue"""'}), "(brief='Reshuffle user setlists and generate a new queue')\n", (16183, 16241), False, 'from discord.ext import commands\n'), ((16247, 16268), 'discord.ext.commands.guild_only', 'commands.guild_only', ([], {}), '()\n', (16266, 16268), False, 'from discord.ext import commands\n'), ((16274, 16303), 'discord.ext.commands.check', 'commands.check', (['audio_playing'], {}), '(audio_playing)\n', (16288, 16303), False, 'from discord.ext import commands\n'), ((19458, 19476), 'discord.ext.commands.command', 'commands.command', ([], {}), '()\n', (19474, 19476), False, 'from discord.ext import commands\n'), ((19482, 19503), 'discord.ext.commands.guild_only', 'commands.guild_only', ([], {}), '()\n', (19501, 19503), False, 'from discord.ext import commands\n'), ((19509, 19553), 'discord.ext.commands.has_permissions', 'commands.has_permissions', ([], {'administrator': '(True)'}), '(administrator=True)\n', (19533, 19553), False, 'from discord.ext import commands\n'), ((858, 915), 'discord.ext.commands.CommandError', 'commands.CommandError', (['"""Not currently playing any audio."""'], {}), "('Not currently playing any audio.')\n", (879, 915), False, 'from discord.ext import commands\n'), ((1252, 1318), 'discord.ext.commands.CommandError', 'commands.CommandError', (['"""You need to be in the channel to do that."""'], {}), "('You need to be in the channel to do that.')\n", (1273, 1318), False, 'from discord.ext import commands\n'), ((1679, 1749), 'discord.ext.commands.CommandError', 'commands.CommandError', (['"""You need to be the song requester to do that."""'], {}), "('You need to be the song requester to do that.')\n", (1700, 1749), False, 'from discord.ext import commands\n'), ((5579, 5623), 'logging.info', 'logging.info', (['f"""{member.name} votes to skip"""'], {}), "(f'{member.name} votes to skip')\n", (5591, 5623), False, 'import logging\n'), ((14055, 14075), 'random.shuffle', 'random.shuffle', (['temp'], {}), '(temp)\n', (14069, 14075), False, 'import random\n'), ((20476, 20510), 'random.shuffle', 'random.shuffle', (['self.user_playtime'], {}), '(self.user_playtime)\n', (20490, 20510), False, 'import random\n'), ((21489, 21522), 'heapq.heappop', 'heapq.heappop', (['self.user_playtime'], {}), '(self.user_playtime)\n', (21502, 21522), False, 'import heapq\n'), ((21737, 21787), 'heapq.heappush', 'heapq.heappush', (['self.user_playtime', '(time, userid)'], {}), '(self.user_playtime, (time, userid))\n', (21751, 21787), False, 'import heapq\n'), ((2862, 2910), 'discord.ext.commands.CommandError', 'commands.CommandError', (['"""Not in a voice channel."""'], {}), "('Not in a voice channel.')\n", (2883, 2910), False, 'from discord.ext import commands\n'), ((6028, 6070), 'logging.info', 'logging.info', (['f"""Enough votes, skipping..."""'], {}), "(f'Enough votes, skipping...')\n", (6040, 6070), False, 'import logging\n'), ((6620, 6694), 'discord.FFmpegPCMAudio', 'discord.FFmpegPCMAudio', (['song.stream_url'], {'before_options': 'FFMPEG_BEFORE_OPTS'}), '(song.stream_url, before_options=FFMPEG_BEFORE_OPTS)\n', (6642, 6694), False, 'import discord\n'), ((9628, 9680), 'discord.ext.commands.CommandError', 'commands.CommandError', (['"""You must use a valid index."""'], {}), "('You must use a valid index.')\n", (9649, 9680), False, 'from discord.ext import commands\n'), ((20916, 20933), 'random.shuffle', 'random.shuffle', (['v'], {}), '(v)\n', (20930, 20933), False, 'import random\n'), ((5073, 5133), 'math.ceil', 'math.ceil', (["(self.config['vote_skip_ratio'] * users_in_channel)"], {}), "(self.config['vote_skip_ratio'] * users_in_channel)\n", (5082, 5133), False, 'import math\n'), ((5399, 5457), 'discord.ext.commands.CommandError', 'commands.CommandError', (['"""Sorry, vote skipping is disabled."""'], {}), "('Sorry, vote skipping is disabled.')\n", (5420, 5457), False, 'from discord.ext import commands\n'), ((11319, 11363), 'logging.info', 'logging.info', (['f"""Now playing \'{video.title}\'"""'], {}), '(f"Now playing \'{video.title}\'")\n', (11331, 11363), False, 'import logging\n'), ((11404, 11474), 'discord.ext.commands.CommandError', 'commands.CommandError', (['"""You need to be in a voice channel to do that."""'], {}), "('You need to be in a voice channel to do that.')\n", (11425, 11474), False, 'from discord.ext import commands\n'), ((10286, 10331), 'logging.warn', 'logging.warn', (['f"""Error downloading video: {e}"""'], {}), "(f'Error downloading video: {e}')\n", (10298, 10331), False, 'import logging\n'), ((12017, 12062), 'logging.warn', 'logging.warn', (['f"""Error downloading video: {e}"""'], {}), "(f'Error downloading video: {e}')\n", (12029, 12062), False, 'import logging\n'), ((12741, 12786), 'logging.warn', 'logging.warn', (['f"""Error downloading video: {e}"""'], {}), "(f'Error downloading video: {e}')\n", (12753, 12786), False, 'import logging\n'), ((13975, 14002), 'random.sample', 'random.sample', (['setlist'], {'k': '(5)'}), '(setlist, k=5)\n', (13988, 14002), False, 'import random\n'), ((6231, 6267), 'discord.Game', 'discord.Game', ([], {'name': 'f"""♫ {song.title}"""'}), "(name=f'♫ {song.title}')\n", (6243, 6267), False, 'import discord\n'), ((18772, 18832), 'math.ceil', 'math.ceil', (["(self.config['vote_skip_ratio'] * users_in_channel)"], {}), "(self.config['vote_skip_ratio'] * users_in_channel)\n", (18781, 18832), False, 'import math\n')]
|
import matplotlib.pyplot as plt
from multiprocessing import Pool, Manager, cpu_count
from functools import partial
import numpy as np
from bs4 import BeautifulSoup
from colour import Color
import copy
import math
import re
import time
from consts import QWERTY, THUMBS, COORDS
CACHE = {}
def cleanhtml(raw_html):
soup = BeautifulSoup(raw_html, "lxml")
spans = soup.find_all('span')
lowercase = ''.join([i.text.replace('Пользователь 2: ', '').replace('Пользователь 1: ', '') for i in spans]).lower()
return re.sub('[^а-я]+', '', lowercase)
def generate_strokes(sample, QWERTY):
zones = {}
for idr, row in enumerate(QWERTY):
for idk, key in enumerate(row):
zones[key] = THUMBS[idr][idk]
strokes = {}
stroke = ''
for idx, char in enumerate(sample):
current_zone = zones[char]
stroke += char
if idx + 1 < len(sample) and zones[sample[idx + 1]] != current_zone:
r_stroke = stroke[::-1]
if stroke in strokes:
strokes[stroke]["count"] += 1
elif r_stroke in strokes:
strokes[r_stroke]["count"] += 1
else:
strokes[stroke] = {"zone": current_zone, "count": 1}
stroke = ''
if idx + 1 == len(sample):
r_stroke = stroke[::-1]
if stroke in strokes:
strokes[stroke]["count"] += 1
elif r_stroke in strokes:
strokes[r_stroke]["count"] += 1
else:
strokes[stroke] = {"zone": current_zone, "count": 1}
return strokes
def calculateDistance(x1,y1,x2,y2):
global CACHE
if f"{x1}{y1}{x2}{y2}" in CACHE:
return CACHE[f"{x1}{y1}{x2}{y2}"]
if f"{x2}{y2}{x1}{y1}" in CACHE:
return CACHE[f"{x2}{y2}{x1}{y1}"]
dist = math.sqrt((x2 - x1)**2 + (y2 - y1)**2)
CACHE[f"{x1}{y1}{x2}{y2}"] = dist
return dist
def finger_heatmap(finger_distances):
return [[
finger_distances['ЛМ'],
finger_distances['ЛБ'],
finger_distances['ЛС'],
finger_distances['ЛУ'],
finger_distances['ПУ'],
finger_distances['ПС'],
finger_distances['ПБ'],
finger_distances['ПМ']
]]
def shift_row(c, row_num, value):
new_coords = copy.deepcopy(c)
for idx, cell in enumerate(new_coords[row_num]):
new_coords[row_num][idx][0] = new_coords[row_num][idx][0] + value
return new_coords
def shift_col(c, col_num, value):
new_coords = copy.deepcopy(c)
for idx, row in enumerate(new_coords):
new_coords[idx][col_num][1] = new_coords[idx][col_num][1] + value
return new_coords
def get_mapper(c, k):
text_mapper = {
item: {
'x': c[idx][idy][0],
'y': c[idx][idy][1],
'thumb': THUMBS[idx][idy]
} for idx, sublist in enumerate(k) for idy, item in enumerate(sublist)
}
# print(json.dumps(text_mapper, indent=2, ensure_ascii=False))
return text_mapper
def draw_keyboard(coords, QWERTY):
x = [i[0] for i in [item for sublist in coords for item in sublist]]
y = [i[1] for i in [item for sublist in coords for item in sublist]]
n = [item for sublist in QWERTY for item in sublist]
fig, ax = plt.subplots()
ax.scatter(x, y, marker=",", s=620, color=(0.5, 0.5, 0.5))
ax.set_title('Координаты клавиш', fontsize=10)
ax.set_aspect('equal', 'box')
# Or if you want different settings for the grids:
major_ticks = np.arange(-20, 210, 20)
minor_ticks = np.arange(-20, 210, 5)
ax.set_xticks(major_ticks)
ax.set_xticks(minor_ticks, minor=True)
ax.set_yticks(major_ticks)
ax.set_yticks(minor_ticks, minor=True)
# And a corresponding grid
ax.grid(which='both')
# Or if you want different settings for the grids:
ax.grid(which='minor', alpha=0.2)
ax.grid(which='major', alpha=0.5)
ax.axis([-12, 210, -12, 48])
for i, txt in enumerate(n):
ax.annotate(txt, (x[i], y[i]), color=(1, 1, 1))
def get_keyboard(coords, QWERTY):
x = [i[0] for i in [item for sublist in coords for item in sublist]]
y = [i[1] for i in [item for sublist in coords for item in sublist]]
n = [item for sublist in QWERTY for item in sublist]
fig, ax = plt.subplots()
ax.scatter(x, y, marker=",", s=620, color=(0.5, 0.5, 0.5))
ax.set_title('Координаты клавиш', fontsize=10)
ax.set_aspect('equal', 'box')
# Or if you want different settings for the grids:
major_ticks = np.arange(-20, 210, 20)
minor_ticks = np.arange(-20, 210, 5)
ax.set_xticks(major_ticks)
ax.set_xticks(minor_ticks, minor=True)
ax.set_yticks(major_ticks)
ax.set_yticks(minor_ticks, minor=True)
# And a corresponding grid
ax.grid(which='both')
# Or if you want different settings for the grids:
ax.grid(which='minor', alpha=0.2)
ax.grid(which='major', alpha=0.5)
ax.axis([-12, 210, -12, 48])
for i, txt in enumerate(n):
ax.annotate(txt, (x[i], y[i]), color=(1, 1, 1))
return ax
def count_presses(text):
press_count = {}
for idx, char in enumerate(text):
if char not in press_count:
press_count[char] = 1
else:
press_count[char] += 1
return press_count
def press_heatmap(presses_counts, QWERTY):
return [[presses_counts[item] if item in presses_counts else 0 for item in row] for row in QWERTY]
def zone_distances(zone, press_count):
keys = []
default_position = {
'ЛМ': COORDS[1][0],
'ЛБ': COORDS[1][1],
'ЛС': COORDS[1][2],
'ЛУ': COORDS[1][3],
'ПУ': COORDS[1][6],
'ПС': COORDS[1][7],
'ПБ': COORDS[1][8],
'ПМ': COORDS[1][9],
}
for idr, row in enumerate(QWERTY):
for idk, key in enumerate(row):
if THUMBS[idr][idk] == zone and len(QWERTY[idr][idk]) > 0:
x1, y1 = default_position[zone][0], default_position[zone][1]
x2, y2 = COORDS[idr][idk][0], COORDS[idr][idk][1]
distance = calculateDistance(x1, y1, x2, y2)
keys.append({
"symbol": QWERTY[idr][idk],
"distance": distance,
"press_count": press_count[QWERTY[idr][idk]]
})
return sorted(keys, key=lambda i: i["press_count"], reverse=True)
def distance_deltas(distance, distance_1):
sum = 0
for k, v in distance.items():
delta = v - distance_1[k]
sum += delta
print(f"{k}: {distance_1[k] / 1000:.2f} м - меньше на {delta / 1000:.2f} м ({(1 - (distance_1[k] / v)) * 100:.2f}%)")
print(f"\nОбщая дистанция уменшилась на {sum / 1000:.2f} м")
def count_stroke_distance(default_position, default_keys, mapper, stroke):
text = stroke["stroke"]
zone = stroke["zone"]
count = stroke["count"]
pairs = []
total_distance = 0
if len(text) <= 1:
return
for idx, char in enumerate(text):
if idx + 1 == len(text):
char_1 = char
x1 = default_position[mapper[char]['thumb']][0]
y1 = default_position[mapper[char]['thumb']][1]
char_2 = default_keys[zone]
x2 = mapper[char]['x']
y2 = mapper[char]['y']
distance = calculateDistance(x1, y1, x2, y2)
total_distance += distance
pair = f"{char_1}{char_2}"
pairs.append({
"pair": pair,
"distance": distance
})
if idx == 0:
char_1 = default_keys[zone]
x1 = default_position[mapper[char]['thumb']][0]
y1 = default_position[mapper[char]['thumb']][1]
char_2 = char
x2 = mapper[char]['x']
y2 = mapper[char]['y']
distance = calculateDistance(x1, y1, x2, y2)
total_distance += distance
pair = f"{char_1}{char_2}"
pairs.append({
"pair": pair,
"distance": distance
})
else:
char_1 = text[idx - 1]
x1 = mapper[char_1]['x']
y1 = mapper[char_1]['y']
char_2 = char
x2 = mapper[char_2]['x']
y2 = mapper[char_2]['y']
distance = calculateDistance(x1, y1, x2, y2)
total_distance += distance
pair = f"{char_1}{char_2}"
pairs.append({
"pair": pair,
"distance": distance
})
return {
"pairs": pairs,
"count": count,
"total_distance": total_distance,
"zone": zone
}
def draw_stroke_lines(pairs, COORDS, QWERTY, row_count, max_value, max_line_width):
ax = get_keyboard(COORDS, QWERTY)
mapper = get_mapper(COORDS, QWERTY)
red = Color("green")
colors = list(red.range_to(Color("red"),100))
for pair, distance in pairs.items():
stroke_a, stroke_b = pair[0], pair[1]
x1 = mapper[stroke_a]['x']
y1 = mapper[stroke_a]['y']
x2 = mapper[stroke_b]['x']
y2 = mapper[stroke_b]['y']
linewidth = (max_line_width / max_value) * distance
color_hue = (100 / max_value) * distance
color_hue = int(round(color_hue))
r, g, b = colors[color_hue - 1].rgb
ax.plot([x1,x2],[y1,y2], linewidth=linewidth, color=(r, g, b, 1))
def process_strokes(strokes, coords, qwerty):
distances = {
'ЛМ': 0,
'ЛБ': 0,
'ЛС': 0,
'ЛУ': 0,
'ПУ': 0,
'ПС': 0,
'ПБ': 0,
'ПМ': 0,
}
default_keys = {
'ЛМ': qwerty[1][0],
'ЛБ': qwerty[1][1],
'ЛС': qwerty[1][2],
'ЛУ': qwerty[1][3],
'ПУ': qwerty[1][6],
'ПС': qwerty[1][7],
'ПБ': qwerty[1][8],
'ПМ': qwerty[1][9],
}
default_position = {
'ЛМ': coords[1][0],
'ЛБ': coords[1][1],
'ЛС': coords[1][2],
'ЛУ': coords[1][3],
'ПУ': coords[1][6],
'ПС': coords[1][7],
'ПБ': coords[1][8],
'ПМ': coords[1][9],
}
start_time = time.time()
mapper = get_mapper(coords, qwerty)
pairs = {}
num_workers = cpu_count()
p = Pool(num_workers)
manager = Manager()
func = partial(count_stroke_distance, default_position, default_keys, mapper)
results = p.map_async(func, strokes).get()
p.close()
p.join()
for stroke_distance in results:
if stroke_distance is None:
continue
# stroke_distance = count_stroke_distance(COORDS, QWERTY, THUMBS, default_position, default_keys, stroke)
distances[stroke_distance["zone"]] += stroke_distance["total_distance"] * stroke_distance["count"]
for pair in stroke_distance["pairs"]:
if pair["pair"] in pairs:
pairs[pair["pair"]] += pair["distance"] * stroke_distance["count"]
elif f'{pair["pair"][1]}{pair["pair"][0]}' in pairs:
pairs[f'{pair["pair"][1]}{pair["pair"][0]}'] += pair["distance"] * stroke_distance["count"]
else:
pairs[pair["pair"]] = pair["distance"] * stroke_distance["count"]
print("--- %s seconds ---" % (time.time() - start_time))
return {
"pairs": pairs,
"distances": distances
}
|
[
"colour.Color",
"functools.partial",
"copy.deepcopy",
"math.sqrt",
"multiprocessing.Manager",
"time.time",
"numpy.arange",
"multiprocessing.Pool",
"bs4.BeautifulSoup",
"matplotlib.pyplot.subplots",
"re.sub",
"multiprocessing.cpu_count"
] |
[((328, 359), 'bs4.BeautifulSoup', 'BeautifulSoup', (['raw_html', '"""lxml"""'], {}), "(raw_html, 'lxml')\n", (341, 359), False, 'from bs4 import BeautifulSoup\n'), ((526, 558), 're.sub', 're.sub', (['"""[^а-я]+"""', '""""""', 'lowercase'], {}), "('[^а-я]+', '', lowercase)\n", (532, 558), False, 'import re\n'), ((1829, 1871), 'math.sqrt', 'math.sqrt', (['((x2 - x1) ** 2 + (y2 - y1) ** 2)'], {}), '((x2 - x1) ** 2 + (y2 - y1) ** 2)\n', (1838, 1871), False, 'import math\n'), ((2295, 2311), 'copy.deepcopy', 'copy.deepcopy', (['c'], {}), '(c)\n', (2308, 2311), False, 'import copy\n'), ((2514, 2530), 'copy.deepcopy', 'copy.deepcopy', (['c'], {}), '(c)\n', (2527, 2530), False, 'import copy\n'), ((3264, 3278), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (3276, 3278), True, 'import matplotlib.pyplot as plt\n'), ((3501, 3524), 'numpy.arange', 'np.arange', (['(-20)', '(210)', '(20)'], {}), '(-20, 210, 20)\n', (3510, 3524), True, 'import numpy as np\n'), ((3543, 3565), 'numpy.arange', 'np.arange', (['(-20)', '(210)', '(5)'], {}), '(-20, 210, 5)\n', (3552, 3565), True, 'import numpy as np\n'), ((4282, 4296), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (4294, 4296), True, 'import matplotlib.pyplot as plt\n'), ((4519, 4542), 'numpy.arange', 'np.arange', (['(-20)', '(210)', '(20)'], {}), '(-20, 210, 20)\n', (4528, 4542), True, 'import numpy as np\n'), ((4561, 4583), 'numpy.arange', 'np.arange', (['(-20)', '(210)', '(5)'], {}), '(-20, 210, 5)\n', (4570, 4583), True, 'import numpy as np\n'), ((8824, 8838), 'colour.Color', 'Color', (['"""green"""'], {}), "('green')\n", (8829, 8838), False, 'from colour import Color\n'), ((10123, 10134), 'time.time', 'time.time', ([], {}), '()\n', (10132, 10134), False, 'import time\n'), ((10208, 10219), 'multiprocessing.cpu_count', 'cpu_count', ([], {}), '()\n', (10217, 10219), False, 'from multiprocessing import Pool, Manager, cpu_count\n'), ((10228, 10245), 'multiprocessing.Pool', 'Pool', (['num_workers'], {}), '(num_workers)\n', (10232, 10245), False, 'from multiprocessing import Pool, Manager, cpu_count\n'), ((10260, 10269), 'multiprocessing.Manager', 'Manager', ([], {}), '()\n', (10267, 10269), False, 'from multiprocessing import Pool, Manager, cpu_count\n'), ((10281, 10351), 'functools.partial', 'partial', (['count_stroke_distance', 'default_position', 'default_keys', 'mapper'], {}), '(count_stroke_distance, default_position, default_keys, mapper)\n', (10288, 10351), False, 'from functools import partial\n'), ((8870, 8882), 'colour.Color', 'Color', (['"""red"""'], {}), "('red')\n", (8875, 8882), False, 'from colour import Color\n'), ((11215, 11226), 'time.time', 'time.time', ([], {}), '()\n', (11224, 11226), False, 'import time\n')]
|
class remoteGet:
def __init__(self, link, saveTo):
self._link = link
self._saveTo = saveTo
def getFile(self):
'''(NoneType) -> NoneType
Retrieves file from set url to set local destination
Raises CannotRetrieveFileException
Returns NoneType
'''
import urllib
try:
urllib.request.urlretrieve(self._link, self._saveTo)
except:
raise CannotRetrieveFileException(self._link, self._saveTo)
def isNew(self):
'''(NoneType) -> bool
returns true if file at remote URL is different than file located at local destination
else returns false
Raises CannotRetrieveFileException
Returns bool
'''
import hashlib
import urllib
import os
try:
urllib.request.urlretrieve(self._link, self._saveTo + ".TMP")
except:
raise CannotRetrieveFileException(self._link, self._saveTo)
hashgen = hashlib.md5()
with open(self._saveTo + ".TMP", 'rb') as afile:
buf = afile.read()
hashgen.update(buf)
csumNew = hashgen.hexdigest()
hashgen2 = hashlib.md5()
with open(self._saveTo, 'rb') as afile:
buf2 = afile.read()
hashgen2.update(buf2)
csumOriginal = hashgen2.hexdigest()
os.remove(self._saveTo + ".TMP")
return not (csumNew == csumOriginal)
class CannotRetrieveFileException(Exception):
pass
|
[
"urllib.request.urlretrieve",
"os.remove",
"hashlib.md5"
] |
[((1004, 1017), 'hashlib.md5', 'hashlib.md5', ([], {}), '()\n', (1015, 1017), False, 'import hashlib\n'), ((1195, 1208), 'hashlib.md5', 'hashlib.md5', ([], {}), '()\n', (1206, 1208), False, 'import hashlib\n'), ((1375, 1407), 'os.remove', 'os.remove', (["(self._saveTo + '.TMP')"], {}), "(self._saveTo + '.TMP')\n", (1384, 1407), False, 'import os\n'), ((357, 409), 'urllib.request.urlretrieve', 'urllib.request.urlretrieve', (['self._link', 'self._saveTo'], {}), '(self._link, self._saveTo)\n', (383, 409), False, 'import urllib\n'), ((836, 897), 'urllib.request.urlretrieve', 'urllib.request.urlretrieve', (['self._link', "(self._saveTo + '.TMP')"], {}), "(self._link, self._saveTo + '.TMP')\n", (862, 897), False, 'import urllib\n')]
|
"""
sysinfo.py
obtain system informations
@author: K.Edeline
"""
import platform
class SysInfo():
"""
extend me
"""
def __init__(self):
self.system = platform.system()
self.node = platform.node()
self.release = platform.release()
self.version = platform.version()
self.machine = platform.machine()
self.processor = platform.processor()
self.platform = platform.platform()
self.architecture = platform.architecture()
def __str__(self):
return "node: {} system: {} release: {} arch: {}".format(self.node,
self.system, self.release, self.processor)
|
[
"platform.processor",
"platform.node",
"platform.architecture",
"platform.platform",
"platform.version",
"platform.system",
"platform.release",
"platform.machine"
] |
[((181, 198), 'platform.system', 'platform.system', ([], {}), '()\n', (196, 198), False, 'import platform\n'), ((217, 232), 'platform.node', 'platform.node', ([], {}), '()\n', (230, 232), False, 'import platform\n'), ((254, 272), 'platform.release', 'platform.release', ([], {}), '()\n', (270, 272), False, 'import platform\n'), ((294, 312), 'platform.version', 'platform.version', ([], {}), '()\n', (310, 312), False, 'import platform\n'), ((334, 352), 'platform.machine', 'platform.machine', ([], {}), '()\n', (350, 352), False, 'import platform\n'), ((376, 396), 'platform.processor', 'platform.processor', ([], {}), '()\n', (394, 396), False, 'import platform\n'), ((419, 438), 'platform.platform', 'platform.platform', ([], {}), '()\n', (436, 438), False, 'import platform\n'), ((465, 488), 'platform.architecture', 'platform.architecture', ([], {}), '()\n', (486, 488), False, 'import platform\n')]
|
"""Interactions model"""
# Django
from django.db import models
# Utils
from tclothes.utils.baseModels import TClothesModel
class InteractionsModel(TClothesModel):
"""Interactions interactions model."""
clothe = models.ForeignKey(
'clothes.ClothesModel',
on_delete=models.CASCADE
)
user = models.ForeignKey(
'users.User',
on_delete=models.CASCADE,
)
INTERACTIVE_VALUES = [
('LIKE', 'like'),
('SUPERLIKE', 'superlike'),
('DISLIKE', 'dislike')
]
value = models.CharField(
'Interaction type',
max_length=9,
choices=INTERACTIVE_VALUES,
)
def __str__(self):
"""Return clothe, user, and interactive values"""
return f'clothe: {self.clothe} | user: {self.user} | value: {self.value}'
|
[
"django.db.models.ForeignKey",
"django.db.models.CharField"
] |
[((224, 291), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""clothes.ClothesModel"""'], {'on_delete': 'models.CASCADE'}), "('clothes.ClothesModel', on_delete=models.CASCADE)\n", (241, 291), False, 'from django.db import models\n'), ((325, 382), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""users.User"""'], {'on_delete': 'models.CASCADE'}), "('users.User', on_delete=models.CASCADE)\n", (342, 382), False, 'from django.db import models\n'), ((544, 622), 'django.db.models.CharField', 'models.CharField', (['"""Interaction type"""'], {'max_length': '(9)', 'choices': 'INTERACTIVE_VALUES'}), "('Interaction type', max_length=9, choices=INTERACTIVE_VALUES)\n", (560, 622), False, 'from django.db import models\n')]
|
"""
This module contains preprocessing code to prepare data for training and inference.
Examples:
$ python preprocess.py \
--config configs/baseline.yaml
"""
import argparse
from collections import Counter
import os
from types import SimpleNamespace
import pandas as pd
import torch
import torchtext
from torch.nn.utils.rnn import pad_sequence
import yaml
from yaml import parser
class CoNLL2003Dataset(torch.utils.data.Dataset):
"""Custom dataset to contain the CoNLL2003 dataset.
"""
def __init__(self, df: pd.DataFrame, transform: list = None) -> None:
"""Initializes the dataset and prepares sequences of tokens and labels.
Args:
df (pd.DataFrame): DF containing training examples.
transform (list, optional): List of transforms (e.g. index lookups, etc.). Defaults to None.
"""
self.df = df
self.transform = transform
self.sentences, self.labels = self._prepare_data()
def _prepare_data(self) -> tuple:
"""Groups data into sequences of tokens and labels.
Returns:
tuple: sentences, labels
"""
temp_df = self.df.groupby(['Article_ID', 'Sentence_ID'],
as_index=False).agg(
Sentence=('Token', list),
Labels=('NER_Tag_Normalized', list))
sentences = temp_df['Sentence'].values.tolist()
labels = temp_df['Labels'].values.tolist()
return sentences, labels
def __len__(self) -> int:
"""Retrieve the length of the dataset.
Returns:
int: Dataset length.
"""
return len(self.sentences)
def __getitem__(self, idx: int) -> tuple:
"""Retrieves the idx item from the dataset, potentially transformed.
Args:
idx (int): idx item from the dataset.
Returns:
tuple: sentences, labels
"""
if self.transform is None:
return self.sentences[idx], self.labels[idx]
# TODO: probably should wrap this in a for-loop
indices = self.transform[0](self.sentences[idx])
labels = self.transform[1](self.labels[idx])
return indices, labels
class Preprocessor(object):
"""Preproccessor class to handle data preparation at train and inference time.
"""
def __init__(self, config: str) -> None:
"""Initialize the preprocessor and generate vocabulary and label dictionary based on the training set.
Args:
config (str): File path to the configuration yaml file.
"""
with open(config, 'r') as f:
config = yaml.safe_load(f)
self.config = SimpleNamespace(**config)
self.vocab, self.label_dict = self._create_vocabs()
self.idx_to_label = {v: k for k, v in self.label_dict.items()}
def _create_vocabs(self) -> tuple:
"""Generate vocabulary object and label dictionary.
Returns:
tuple: vocab, label_dict
"""
# load train data to build the dictionaries
train_df = pd.read_csv(os.path.join(self.config.data_dir, 'train.csv'))
# create vocabulary
vocab = torchtext.vocab.Vocab(
Counter(train_df['Token'].value_counts().to_dict()))
# create label dictionary
label_dict = {}
i = 0
for k in train_df['NER_Tag_Normalized'].unique():
label_dict[k] = i
i += 1
return vocab, label_dict
@staticmethod
def _collate_fn(batch: tuple, train: bool = True) -> tuple:
"""Custom collate function that combines variable length sequences into padded batches.
Args:
batch (tuple): sentence_indices, sentences_labels OR just sentences_indices (a list).
train (bool, optional): If train=True, expects tuple of
sentence_indices, sentences_labels, else just a list of sentence_indices. Defaults to True.
Returns:
tuple: (sentences_padded, sentence_lens), labels_padded if train=True, else (sentences_padded, sentence_lens).
"""
if train:
sentence_indices, sentence_labels = zip(*batch)
else:
sentence_indices = batch
sentence_lens = [len(x) for x in sentence_indices]
# vocab['<pad>'] = 1
sentences_padded = pad_sequence(sentence_indices,
batch_first=True,
padding_value=1)
if train:
labels_padded = pad_sequence(sentence_labels,
batch_first=True,
padding_value=-1)
return (sentences_padded, sentence_lens), labels_padded
else:
return (sentences_padded, sentence_lens)
def get_train_datasets(self) -> tuple:
"""Generates all the datasets needed for model training.
Returns:
tuple: train_dataset, val_dataset, test_dataset
"""
train_file_path = os.path.join(self.config.data_dir, 'train.csv')
val_file_path = os.path.join(self.config.data_dir, 'validation.csv')
test_file_path = os.path.join(self.config.data_dir, 'test.csv')
transform = [self._transform_sentence, self._transform_labels]
train_dataset = CoNLL2003Dataset(pd.read_csv(train_file_path),
transform)
val_dataset = CoNLL2003Dataset(pd.read_csv(val_file_path), transform)
test_dataset = CoNLL2003Dataset(pd.read_csv(test_file_path), transform)
return train_dataset, val_dataset, test_dataset
def get_train_dataloaders(self) -> tuple:
"""Generates all the dataloaders needed for model training.
Returns:
tuple: train_dataloader, val_dataloader, test_dataloader
"""
train_dataset, val_dataset, test_dataset = self.get_train_datasets()
train_dataloader = torch.utils.data.DataLoader(
train_dataset,
batch_size=self.config.batch_size,
collate_fn=self._collate_fn,
shuffle=True)
val_dataloader = torch.utils.data.DataLoader(
val_dataset,
batch_size=self.config.batch_size,
collate_fn=self._collate_fn)
test_dataloader = torch.utils.data.DataLoader(
test_dataset,
batch_size=self.config.batch_size,
collate_fn=self._collate_fn)
return train_dataloader, val_dataloader, test_dataloader
@staticmethod
def _tokenize(sentence: str) -> list:
"""Utility function to tokenize sentences.
Args:
sentence (str): Sentence string.
Returns:
list: Tokenized sentence.
"""
return sentence.split(' ')
def _transform_sentence(self, sentence: list) -> torch.tensor:
"""Transform function that accepts a sentence as a string or tokenized list and returns vocabulary indices.
Args:
sentence (list): Tokenized list or sentence string.
Returns:
torch.tensor: Vocabulary indices.
"""
if isinstance(sentence, str):
sentence = self._tokenize(sentence)
indices = []
for token in sentence:
indices.append(self.vocab[token])
return torch.tensor(indices)
def _transform_labels(self, label_sequence: list) -> torch.tensor:
"""Transform function that accepts a sequence of labels and returns label indices.
Args:
label_sequence (list): Sequence of string labels.
Returns:
torch.tensor: Label indices.
"""
labels = []
for label in label_sequence:
labels.append(self.label_dict[label])
return torch.tensor(labels)
def preprocess(self, sentences: list) -> tuple:
"""Preprocess any arbitrary list of string sentences and return indices that can be fed into the model.
Args:
sentences (list): List of sentences to tokenize and retrieve indices for.
Returns:
tuple: (sentences_padded, sentence_lens)
"""
# TODO: see if there is a way to reuse the CoNLL2003Dataset class + dataloaders
# for guaranteed consistency with the way that we're preparing training data
preprocessed = []
if isinstance(sentences, str):
preprocessed.append(self._transform_sentence(sentences))
else:
for sentence in sentences:
preprocessed.append(self._transform_sentence(sentence))
return self._collate_fn(preprocessed, train=False)
def main(args):
# contains vocab and label_dict embedded in the transform function
preprocessor = Preprocessor(args.config)
sample_sentence = '<NAME> lives in New York City.'
prepared_sentence = preprocessor.preprocess(sample_sentence)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
'--config',
type=str,
help='File path where the model configuration file is located.',
required=True)
args = parser.parse_args()
main(args)
|
[
"argparse.ArgumentParser",
"torch.nn.utils.rnn.pad_sequence",
"torch.utils.data.DataLoader",
"pandas.read_csv",
"yaml.parser.parse_args",
"yaml.safe_load",
"yaml.parser.add_argument",
"os.path.join",
"types.SimpleNamespace",
"torch.tensor"
] |
[((9022, 9047), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (9045, 9047), False, 'import argparse\n'), ((9052, 9178), 'yaml.parser.add_argument', 'parser.add_argument', (['"""--config"""'], {'type': 'str', 'help': '"""File path where the model configuration file is located."""', 'required': '(True)'}), "('--config', type=str, help=\n 'File path where the model configuration file is located.', required=True)\n", (9071, 9178), False, 'from yaml import parser\n'), ((9218, 9237), 'yaml.parser.parse_args', 'parser.parse_args', ([], {}), '()\n', (9235, 9237), False, 'from yaml import parser\n'), ((2734, 2759), 'types.SimpleNamespace', 'SimpleNamespace', ([], {}), '(**config)\n', (2749, 2759), False, 'from types import SimpleNamespace\n'), ((4400, 4465), 'torch.nn.utils.rnn.pad_sequence', 'pad_sequence', (['sentence_indices'], {'batch_first': '(True)', 'padding_value': '(1)'}), '(sentence_indices, batch_first=True, padding_value=1)\n', (4412, 4465), False, 'from torch.nn.utils.rnn import pad_sequence\n'), ((5101, 5148), 'os.path.join', 'os.path.join', (['self.config.data_dir', '"""train.csv"""'], {}), "(self.config.data_dir, 'train.csv')\n", (5113, 5148), False, 'import os\n'), ((5173, 5225), 'os.path.join', 'os.path.join', (['self.config.data_dir', '"""validation.csv"""'], {}), "(self.config.data_dir, 'validation.csv')\n", (5185, 5225), False, 'import os\n'), ((5251, 5297), 'os.path.join', 'os.path.join', (['self.config.data_dir', '"""test.csv"""'], {}), "(self.config.data_dir, 'test.csv')\n", (5263, 5297), False, 'import os\n'), ((6026, 6151), 'torch.utils.data.DataLoader', 'torch.utils.data.DataLoader', (['train_dataset'], {'batch_size': 'self.config.batch_size', 'collate_fn': 'self._collate_fn', 'shuffle': '(True)'}), '(train_dataset, batch_size=self.config.\n batch_size, collate_fn=self._collate_fn, shuffle=True)\n', (6053, 6151), False, 'import torch\n'), ((6221, 6329), 'torch.utils.data.DataLoader', 'torch.utils.data.DataLoader', (['val_dataset'], {'batch_size': 'self.config.batch_size', 'collate_fn': 'self._collate_fn'}), '(val_dataset, batch_size=self.config.batch_size,\n collate_fn=self._collate_fn)\n', (6248, 6329), False, 'import torch\n'), ((6389, 6498), 'torch.utils.data.DataLoader', 'torch.utils.data.DataLoader', (['test_dataset'], {'batch_size': 'self.config.batch_size', 'collate_fn': 'self._collate_fn'}), '(test_dataset, batch_size=self.config.batch_size,\n collate_fn=self._collate_fn)\n', (6416, 6498), False, 'import torch\n'), ((7410, 7431), 'torch.tensor', 'torch.tensor', (['indices'], {}), '(indices)\n', (7422, 7431), False, 'import torch\n'), ((7865, 7885), 'torch.tensor', 'torch.tensor', (['labels'], {}), '(labels)\n', (7877, 7885), False, 'import torch\n'), ((2694, 2711), 'yaml.safe_load', 'yaml.safe_load', (['f'], {}), '(f)\n', (2708, 2711), False, 'import yaml\n'), ((3142, 3189), 'os.path.join', 'os.path.join', (['self.config.data_dir', '"""train.csv"""'], {}), "(self.config.data_dir, 'train.csv')\n", (3154, 3189), False, 'import os\n'), ((4592, 4657), 'torch.nn.utils.rnn.pad_sequence', 'pad_sequence', (['sentence_labels'], {'batch_first': '(True)', 'padding_value': '(-1)'}), '(sentence_labels, batch_first=True, padding_value=-1)\n', (4604, 4657), False, 'from torch.nn.utils.rnn import pad_sequence\n'), ((5412, 5440), 'pandas.read_csv', 'pd.read_csv', (['train_file_path'], {}), '(train_file_path)\n', (5423, 5440), True, 'import pandas as pd\n'), ((5533, 5559), 'pandas.read_csv', 'pd.read_csv', (['val_file_path'], {}), '(val_file_path)\n', (5544, 5559), True, 'import pandas as pd\n'), ((5612, 5639), 'pandas.read_csv', 'pd.read_csv', (['test_file_path'], {}), '(test_file_path)\n', (5623, 5639), True, 'import pandas as pd\n')]
|
import grinpy as gp
import pytest
class TestZeroForcing:
def test_non_integral_value_for_k_raises_TypeError_in_is_k_forcing(self):
with pytest.raises(TypeError):
G = gp.star_graph(2)
gp.is_k_forcing_vertex(G, 1, [1], 1.5)
def test_0_value_for_k_raises_ValueError_in_is_k_forcing(self):
with pytest.raises(ValueError):
G = gp.star_graph(2)
gp.is_k_forcing_vertex(G, 1, [1], 0)
def test_integral_float_for_k_works(self):
G = gp.star_graph(2)
assert gp.is_k_forcing_vertex(G, 1, [1], 1.0) == True
def test_leaf_is_zero_forcing_vertex_for_star(self):
G = gp.star_graph(2)
assert gp.is_zero_forcing_vertex(G, 1, [1]) == True
def test_center_is_not_zero_forcing_vertex_for_star(self):
G = gp.star_graph(2)
assert gp.is_zero_forcing_vertex(G, 0, [0]) == False
def test_no_vertex_is_zero_forcing_vertex_for_empty_set(self):
G = gp.star_graph(2)
assert gp.is_zero_forcing_vertex(G, 0, set()) == False
assert gp.is_zero_forcing_vertex(G, 1, set()) == False
assert gp.is_zero_forcing_vertex(G, 2, set()) == False
def test_center_of_S3_is_3_forcing_vertex(self):
G = gp.star_graph(3)
assert gp.is_k_forcing_vertex(G, 0, [0], 3) == True
def test_center_of_S3_is_not_2_forcing_vertex(self):
G = gp.star_graph(3)
assert gp.is_k_forcing_vertex(G, 0, [0], 2) == False
def test_leaf_of_star_is_zero_forcing_active_set(self):
G = gp.star_graph(2)
assert gp.is_zero_forcing_active_set(G, [1]) == True
def test_center_of_star_is_not_zero_forcing_active_set(self):
G = gp.star_graph(2)
assert gp.is_zero_forcing_active_set(G, [0]) == False
def test_empy_set_is_not_zero_forcing_active_set(self):
G = gp.star_graph(2)
assert gp.is_zero_forcing_active_set(G, set()) == False
def test_leaf_is_zero_forcing_set_of_path(self):
G = gp.path_graph(3)
assert gp.is_zero_forcing_set(G, [0]) == True
def test_leaf_is_not_zero_forcing_set_of_S3(self):
G = gp.star_graph(3)
assert gp.is_zero_forcing_set(G, [1]) == False
def test_leaf_is_max_degree_minus_one_forcing_set_for_star(self):
for i in range(3, 13):
G = gp.star_graph(i)
D = gp.max_degree(G)
assert gp.is_k_forcing_set(G, [1], D - 1) == True
def test_zero_forcing_number_of_star_is_order_minus_2(self):
for i in range(2, 12):
G = gp.star_graph(i)
assert gp.zero_forcing_number(G) == G.order() - 2
def test_zero_forcing_number_of_petersen_graph_is_5(self):
G = gp.petersen_graph()
assert gp.zero_forcing_number(G) == 5
def test_2_forcing_number_of_petersen_graph_is_2(self):
G = gp.petersen_graph()
assert gp.k_forcing_number(G, 2) == 2
def test_leaf_is_not_total_forcing_set_of_path(self):
G = gp.path_graph(3)
assert gp.is_total_zero_forcing_set(G, [0]) == False
def test_pair_of_adjacent_nodes_is_total_forcing_set_of_path(self):
G = gp.path_graph(6)
assert gp.is_total_zero_forcing_set(G, [2, 3]) == True
def test_total_zero_forcing_number_of_path_is_2(self):
G = gp.path_graph(5)
assert gp.total_zero_forcing_number(G) == 2
def test_connected_zero_forcing_number_of_monster_is_4(self):
G = gp.star_graph(3)
G.add_edge(3, 4)
G.add_edge(3, 5)
assert gp.connected_zero_forcing_number(G) == 4
def test_non_int_value_for_k_raises_error_in_is_connected_k_forcing(self):
with pytest.raises(TypeError):
G = gp.star_graph(2)
gp.is_connected_k_forcing_set(G, [0], 1.5)
def test_0_value_for_k_raises_error_in_is_connected_k_forcing(self):
with pytest.raises(ValueError):
G = gp.star_graph(2)
gp.is_connected_k_forcing_set(G, [0], 0)
def test_non_int_value_for_k_raises_error_in_min_connected_k_forcing(self):
with pytest.raises(TypeError):
G = gp.star_graph(2)
gp.min_connected_k_forcing_set(G, 1.5)
def test_0_value_for_k_raises_error_in_min_connected_k_forcing(self):
with pytest.raises(ValueError):
G = gp.star_graph(2)
gp.min_connected_k_forcing_set(G, 0)
def test_non_int_value_for_k_raises_error_in_connected_k_forcing_num(self):
with pytest.raises(TypeError):
G = gp.star_graph(2)
gp.connected_k_forcing_number(G, 1.5)
def test_0_value_for_k_raises_error_in_connected_k_forcing_num(self):
with pytest.raises(ValueError):
G = gp.star_graph(2)
gp.connected_k_forcing_number(G, 0)
def test_total_zero_forcing_num_of_trivial_graph_is_None(self):
G = gp.trivial_graph()
assert gp.total_zero_forcing_number(G) == None
def test_endpoint_is_connected_forcing_set_of_path(self):
G = gp.path_graph(2)
assert gp.is_connected_zero_forcing_set(G, [0])
def test_connected_zero_forcing_num_of_disconnected_graph_is_None(self):
G = gp.empty_graph(5)
assert gp.connected_zero_forcing_number(G) == None
|
[
"grinpy.is_connected_zero_forcing_set",
"grinpy.empty_graph",
"grinpy.is_zero_forcing_set",
"grinpy.is_zero_forcing_vertex",
"grinpy.max_degree",
"grinpy.connected_k_forcing_number",
"grinpy.is_k_forcing_vertex",
"grinpy.zero_forcing_number",
"pytest.raises",
"grinpy.is_total_zero_forcing_set",
"grinpy.min_connected_k_forcing_set",
"grinpy.connected_zero_forcing_number",
"grinpy.petersen_graph",
"grinpy.star_graph",
"grinpy.total_zero_forcing_number",
"grinpy.k_forcing_number",
"grinpy.is_connected_k_forcing_set",
"grinpy.is_zero_forcing_active_set",
"grinpy.trivial_graph",
"grinpy.is_k_forcing_set",
"grinpy.path_graph"
] |
[((511, 527), 'grinpy.star_graph', 'gp.star_graph', (['(2)'], {}), '(2)\n', (524, 527), True, 'import grinpy as gp\n'), ((660, 676), 'grinpy.star_graph', 'gp.star_graph', (['(2)'], {}), '(2)\n', (673, 676), True, 'import grinpy as gp\n'), ((813, 829), 'grinpy.star_graph', 'gp.star_graph', (['(2)'], {}), '(2)\n', (826, 829), True, 'import grinpy as gp\n'), ((971, 987), 'grinpy.star_graph', 'gp.star_graph', (['(2)'], {}), '(2)\n', (984, 987), True, 'import grinpy as gp\n'), ((1243, 1259), 'grinpy.star_graph', 'gp.star_graph', (['(3)'], {}), '(3)\n', (1256, 1259), True, 'import grinpy as gp\n'), ((1390, 1406), 'grinpy.star_graph', 'gp.star_graph', (['(3)'], {}), '(3)\n', (1403, 1406), True, 'import grinpy as gp\n'), ((1541, 1557), 'grinpy.star_graph', 'gp.star_graph', (['(2)'], {}), '(2)\n', (1554, 1557), True, 'import grinpy as gp\n'), ((1698, 1714), 'grinpy.star_graph', 'gp.star_graph', (['(2)'], {}), '(2)\n', (1711, 1714), True, 'import grinpy as gp\n'), ((1850, 1866), 'grinpy.star_graph', 'gp.star_graph', (['(2)'], {}), '(2)\n', (1863, 1866), True, 'import grinpy as gp\n'), ((1997, 2013), 'grinpy.path_graph', 'gp.path_graph', (['(3)'], {}), '(3)\n', (2010, 2013), True, 'import grinpy as gp\n'), ((2136, 2152), 'grinpy.star_graph', 'gp.star_graph', (['(3)'], {}), '(3)\n', (2149, 2152), True, 'import grinpy as gp\n'), ((2706, 2725), 'grinpy.petersen_graph', 'gp.petersen_graph', ([], {}), '()\n', (2723, 2725), True, 'import grinpy as gp\n'), ((2845, 2864), 'grinpy.petersen_graph', 'gp.petersen_graph', ([], {}), '()\n', (2862, 2864), True, 'import grinpy as gp\n'), ((2982, 2998), 'grinpy.path_graph', 'gp.path_graph', (['(3)'], {}), '(3)\n', (2995, 2998), True, 'import grinpy as gp\n'), ((3145, 3161), 'grinpy.path_graph', 'gp.path_graph', (['(6)'], {}), '(6)\n', (3158, 3161), True, 'import grinpy as gp\n'), ((3297, 3313), 'grinpy.path_graph', 'gp.path_graph', (['(5)'], {}), '(5)\n', (3310, 3313), True, 'import grinpy as gp\n'), ((3445, 3461), 'grinpy.star_graph', 'gp.star_graph', (['(3)'], {}), '(3)\n', (3458, 3461), True, 'import grinpy as gp\n'), ((4856, 4874), 'grinpy.trivial_graph', 'gp.trivial_graph', ([], {}), '()\n', (4872, 4874), True, 'import grinpy as gp\n'), ((5005, 5021), 'grinpy.path_graph', 'gp.path_graph', (['(2)'], {}), '(2)\n', (5018, 5021), True, 'import grinpy as gp\n'), ((5037, 5077), 'grinpy.is_connected_zero_forcing_set', 'gp.is_connected_zero_forcing_set', (['G', '[0]'], {}), '(G, [0])\n', (5069, 5077), True, 'import grinpy as gp\n'), ((5168, 5185), 'grinpy.empty_graph', 'gp.empty_graph', (['(5)'], {}), '(5)\n', (5182, 5185), True, 'import grinpy as gp\n'), ((150, 174), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (163, 174), False, 'import pytest\n'), ((192, 208), 'grinpy.star_graph', 'gp.star_graph', (['(2)'], {}), '(2)\n', (205, 208), True, 'import grinpy as gp\n'), ((221, 259), 'grinpy.is_k_forcing_vertex', 'gp.is_k_forcing_vertex', (['G', '(1)', '[1]', '(1.5)'], {}), '(G, 1, [1], 1.5)\n', (243, 259), True, 'import grinpy as gp\n'), ((342, 367), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (355, 367), False, 'import pytest\n'), ((385, 401), 'grinpy.star_graph', 'gp.star_graph', (['(2)'], {}), '(2)\n', (398, 401), True, 'import grinpy as gp\n'), ((414, 450), 'grinpy.is_k_forcing_vertex', 'gp.is_k_forcing_vertex', (['G', '(1)', '[1]', '(0)'], {}), '(G, 1, [1], 0)\n', (436, 450), True, 'import grinpy as gp\n'), ((543, 581), 'grinpy.is_k_forcing_vertex', 'gp.is_k_forcing_vertex', (['G', '(1)', '[1]', '(1.0)'], {}), '(G, 1, [1], 1.0)\n', (565, 581), True, 'import grinpy as gp\n'), ((692, 728), 'grinpy.is_zero_forcing_vertex', 'gp.is_zero_forcing_vertex', (['G', '(1)', '[1]'], {}), '(G, 1, [1])\n', (717, 728), True, 'import grinpy as gp\n'), ((845, 881), 'grinpy.is_zero_forcing_vertex', 'gp.is_zero_forcing_vertex', (['G', '(0)', '[0]'], {}), '(G, 0, [0])\n', (870, 881), True, 'import grinpy as gp\n'), ((1275, 1311), 'grinpy.is_k_forcing_vertex', 'gp.is_k_forcing_vertex', (['G', '(0)', '[0]', '(3)'], {}), '(G, 0, [0], 3)\n', (1297, 1311), True, 'import grinpy as gp\n'), ((1422, 1458), 'grinpy.is_k_forcing_vertex', 'gp.is_k_forcing_vertex', (['G', '(0)', '[0]', '(2)'], {}), '(G, 0, [0], 2)\n', (1444, 1458), True, 'import grinpy as gp\n'), ((1573, 1610), 'grinpy.is_zero_forcing_active_set', 'gp.is_zero_forcing_active_set', (['G', '[1]'], {}), '(G, [1])\n', (1602, 1610), True, 'import grinpy as gp\n'), ((1730, 1767), 'grinpy.is_zero_forcing_active_set', 'gp.is_zero_forcing_active_set', (['G', '[0]'], {}), '(G, [0])\n', (1759, 1767), True, 'import grinpy as gp\n'), ((2029, 2059), 'grinpy.is_zero_forcing_set', 'gp.is_zero_forcing_set', (['G', '[0]'], {}), '(G, [0])\n', (2051, 2059), True, 'import grinpy as gp\n'), ((2168, 2198), 'grinpy.is_zero_forcing_set', 'gp.is_zero_forcing_set', (['G', '[1]'], {}), '(G, [1])\n', (2190, 2198), True, 'import grinpy as gp\n'), ((2326, 2342), 'grinpy.star_graph', 'gp.star_graph', (['i'], {}), '(i)\n', (2339, 2342), True, 'import grinpy as gp\n'), ((2359, 2375), 'grinpy.max_degree', 'gp.max_degree', (['G'], {}), '(G)\n', (2372, 2375), True, 'import grinpy as gp\n'), ((2551, 2567), 'grinpy.star_graph', 'gp.star_graph', (['i'], {}), '(i)\n', (2564, 2567), True, 'import grinpy as gp\n'), ((2741, 2766), 'grinpy.zero_forcing_number', 'gp.zero_forcing_number', (['G'], {}), '(G)\n', (2763, 2766), True, 'import grinpy as gp\n'), ((2880, 2905), 'grinpy.k_forcing_number', 'gp.k_forcing_number', (['G', '(2)'], {}), '(G, 2)\n', (2899, 2905), True, 'import grinpy as gp\n'), ((3014, 3050), 'grinpy.is_total_zero_forcing_set', 'gp.is_total_zero_forcing_set', (['G', '[0]'], {}), '(G, [0])\n', (3042, 3050), True, 'import grinpy as gp\n'), ((3177, 3216), 'grinpy.is_total_zero_forcing_set', 'gp.is_total_zero_forcing_set', (['G', '[2, 3]'], {}), '(G, [2, 3])\n', (3205, 3216), True, 'import grinpy as gp\n'), ((3329, 3360), 'grinpy.total_zero_forcing_number', 'gp.total_zero_forcing_number', (['G'], {}), '(G)\n', (3357, 3360), True, 'import grinpy as gp\n'), ((3527, 3562), 'grinpy.connected_zero_forcing_number', 'gp.connected_zero_forcing_number', (['G'], {}), '(G)\n', (3559, 3562), True, 'import grinpy as gp\n'), ((3661, 3685), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (3674, 3685), False, 'import pytest\n'), ((3703, 3719), 'grinpy.star_graph', 'gp.star_graph', (['(2)'], {}), '(2)\n', (3716, 3719), True, 'import grinpy as gp\n'), ((3732, 3774), 'grinpy.is_connected_k_forcing_set', 'gp.is_connected_k_forcing_set', (['G', '[0]', '(1.5)'], {}), '(G, [0], 1.5)\n', (3761, 3774), True, 'import grinpy as gp\n'), ((3862, 3887), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (3875, 3887), False, 'import pytest\n'), ((3905, 3921), 'grinpy.star_graph', 'gp.star_graph', (['(2)'], {}), '(2)\n', (3918, 3921), True, 'import grinpy as gp\n'), ((3934, 3974), 'grinpy.is_connected_k_forcing_set', 'gp.is_connected_k_forcing_set', (['G', '[0]', '(0)'], {}), '(G, [0], 0)\n', (3963, 3974), True, 'import grinpy as gp\n'), ((4069, 4093), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (4082, 4093), False, 'import pytest\n'), ((4111, 4127), 'grinpy.star_graph', 'gp.star_graph', (['(2)'], {}), '(2)\n', (4124, 4127), True, 'import grinpy as gp\n'), ((4140, 4178), 'grinpy.min_connected_k_forcing_set', 'gp.min_connected_k_forcing_set', (['G', '(1.5)'], {}), '(G, 1.5)\n', (4170, 4178), True, 'import grinpy as gp\n'), ((4267, 4292), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (4280, 4292), False, 'import pytest\n'), ((4310, 4326), 'grinpy.star_graph', 'gp.star_graph', (['(2)'], {}), '(2)\n', (4323, 4326), True, 'import grinpy as gp\n'), ((4339, 4375), 'grinpy.min_connected_k_forcing_set', 'gp.min_connected_k_forcing_set', (['G', '(0)'], {}), '(G, 0)\n', (4369, 4375), True, 'import grinpy as gp\n'), ((4470, 4494), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (4483, 4494), False, 'import pytest\n'), ((4512, 4528), 'grinpy.star_graph', 'gp.star_graph', (['(2)'], {}), '(2)\n', (4525, 4528), True, 'import grinpy as gp\n'), ((4541, 4578), 'grinpy.connected_k_forcing_number', 'gp.connected_k_forcing_number', (['G', '(1.5)'], {}), '(G, 1.5)\n', (4570, 4578), True, 'import grinpy as gp\n'), ((4667, 4692), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (4680, 4692), False, 'import pytest\n'), ((4710, 4726), 'grinpy.star_graph', 'gp.star_graph', (['(2)'], {}), '(2)\n', (4723, 4726), True, 'import grinpy as gp\n'), ((4739, 4774), 'grinpy.connected_k_forcing_number', 'gp.connected_k_forcing_number', (['G', '(0)'], {}), '(G, 0)\n', (4768, 4774), True, 'import grinpy as gp\n'), ((4890, 4921), 'grinpy.total_zero_forcing_number', 'gp.total_zero_forcing_number', (['G'], {}), '(G)\n', (4918, 4921), True, 'import grinpy as gp\n'), ((5201, 5236), 'grinpy.connected_zero_forcing_number', 'gp.connected_zero_forcing_number', (['G'], {}), '(G)\n', (5233, 5236), True, 'import grinpy as gp\n'), ((2395, 2429), 'grinpy.is_k_forcing_set', 'gp.is_k_forcing_set', (['G', '[1]', '(D - 1)'], {}), '(G, [1], D - 1)\n', (2414, 2429), True, 'import grinpy as gp\n'), ((2587, 2612), 'grinpy.zero_forcing_number', 'gp.zero_forcing_number', (['G'], {}), '(G)\n', (2609, 2612), True, 'import grinpy as gp\n')]
|
import plotly.figure_factory as ff
import plotly.graph_objects as go
import pandas as pd
import statistics
dataframe = pd.read_csv("StudentsPerformance.csv")
data_list = dataframe["reading score"].to_list()
data_mean = statistics.mean(data_list)
data_median = statistics.median(data_list)
data_mode = statistics.mode(data_list)
data_std_deviation = statistics.stdev(data_list)
data_first_std_deviation_start, data_first_std_deviation_end = data_mean - \
data_std_deviation, data_mean+data_std_deviation
data_second_std_deviation_start, data_second_std_deviation_end = data_mean - \
(2*data_std_deviation), data_mean+(2*data_std_deviation)
data_third_std_deviation_start, data_third_std_deviation_end = data_mean - \
(3*data_std_deviation), data_mean+(3*data_std_deviation)
data_list_of_data_within_1_std_deviation = [
result for result in data_list if result > data_first_std_deviation_start and result < data_first_std_deviation_end]
data_list_of_data_within_2_std_deviation = [result for result in data_list if result >
data_second_std_deviation_start and result < data_second_std_deviation_end]
data_list_of_data_within_3_std_deviation = [
result for result in data_list if result > data_third_std_deviation_start and result < data_third_std_deviation_end]
print("Mean of this data is {}.".format(data_mean))
print("Median of this data is {}.".format(data_median))
print("Mode of this data is {}.".format(data_mode))
print("{}% of data for data lies within 1 standard deviation".format(
len(data_list_of_data_within_1_std_deviation)*100.0/len(data_list)))
print("{}% of data for data lies within 2 standard deviations".format(
len(data_list_of_data_within_2_std_deviation)*100.0/len(data_list)))
print("{}% of data for data lies within 3 standard deviations".format(
len(data_list_of_data_within_3_std_deviation)*100.0/len(data_list)))
fig = ff.create_distplot([data_list], ["reading scores"], show_hist=False)
fig.add_trace(go.Scatter(x=[data_mean, data_mean], y=[
0, 0.17], mode="lines", name="MEAN"))
fig.add_trace(go.Scatter(x=[data_first_std_deviation_start, data_first_std_deviation_start], y=[
0, 0.17], mode="lines", name="STANDARD DEVIATION 1"))
fig.add_trace(go.Scatter(x=[data_first_std_deviation_end, data_first_std_deviation_end], y=[
0, 0.17], mode="lines", name="STANDARD DEVIATION 1"))
fig.add_trace(go.Scatter(x=[data_second_std_deviation_start, data_second_std_deviation_start], y=[
0, 0.17], mode="lines", name="STANDARD DEVIATION 2"))
fig.add_trace(go.Scatter(x=[data_third_std_deviation_end, data_third_std_deviation_end], y=[
0, 0.17], mode="lines", name="STANDARD DEVIATION 3"))
fig.show()
|
[
"plotly.graph_objects.Scatter",
"statistics.median",
"pandas.read_csv",
"statistics.stdev",
"statistics.mean",
"plotly.figure_factory.create_distplot",
"statistics.mode"
] |
[((125, 163), 'pandas.read_csv', 'pd.read_csv', (['"""StudentsPerformance.csv"""'], {}), "('StudentsPerformance.csv')\n", (136, 163), True, 'import pandas as pd\n'), ((229, 255), 'statistics.mean', 'statistics.mean', (['data_list'], {}), '(data_list)\n', (244, 255), False, 'import statistics\n'), ((271, 299), 'statistics.median', 'statistics.median', (['data_list'], {}), '(data_list)\n', (288, 299), False, 'import statistics\n'), ((313, 339), 'statistics.mode', 'statistics.mode', (['data_list'], {}), '(data_list)\n', (328, 339), False, 'import statistics\n'), ((362, 389), 'statistics.stdev', 'statistics.stdev', (['data_list'], {}), '(data_list)\n', (378, 389), False, 'import statistics\n'), ((1966, 2034), 'plotly.figure_factory.create_distplot', 'ff.create_distplot', (['[data_list]', "['reading scores']"], {'show_hist': '(False)'}), "([data_list], ['reading scores'], show_hist=False)\n", (1984, 2034), True, 'import plotly.figure_factory as ff\n'), ((2050, 2126), 'plotly.graph_objects.Scatter', 'go.Scatter', ([], {'x': '[data_mean, data_mean]', 'y': '[0, 0.17]', 'mode': '"""lines"""', 'name': '"""MEAN"""'}), "(x=[data_mean, data_mean], y=[0, 0.17], mode='lines', name='MEAN')\n", (2060, 2126), True, 'import plotly.graph_objects as go\n'), ((2159, 2302), 'plotly.graph_objects.Scatter', 'go.Scatter', ([], {'x': '[data_first_std_deviation_start, data_first_std_deviation_start]', 'y': '[0, 0.17]', 'mode': '"""lines"""', 'name': '"""STANDARD DEVIATION 1"""'}), "(x=[data_first_std_deviation_start,\n data_first_std_deviation_start], y=[0, 0.17], mode='lines', name=\n 'STANDARD DEVIATION 1')\n", (2169, 2302), True, 'import plotly.graph_objects as go\n'), ((2326, 2460), 'plotly.graph_objects.Scatter', 'go.Scatter', ([], {'x': '[data_first_std_deviation_end, data_first_std_deviation_end]', 'y': '[0, 0.17]', 'mode': '"""lines"""', 'name': '"""STANDARD DEVIATION 1"""'}), "(x=[data_first_std_deviation_end, data_first_std_deviation_end],\n y=[0, 0.17], mode='lines', name='STANDARD DEVIATION 1')\n", (2336, 2460), True, 'import plotly.graph_objects as go\n'), ((2489, 2634), 'plotly.graph_objects.Scatter', 'go.Scatter', ([], {'x': '[data_second_std_deviation_start, data_second_std_deviation_start]', 'y': '[0, 0.17]', 'mode': '"""lines"""', 'name': '"""STANDARD DEVIATION 2"""'}), "(x=[data_second_std_deviation_start,\n data_second_std_deviation_start], y=[0, 0.17], mode='lines', name=\n 'STANDARD DEVIATION 2')\n", (2499, 2634), True, 'import plotly.graph_objects as go\n'), ((2658, 2792), 'plotly.graph_objects.Scatter', 'go.Scatter', ([], {'x': '[data_third_std_deviation_end, data_third_std_deviation_end]', 'y': '[0, 0.17]', 'mode': '"""lines"""', 'name': '"""STANDARD DEVIATION 3"""'}), "(x=[data_third_std_deviation_end, data_third_std_deviation_end],\n y=[0, 0.17], mode='lines', name='STANDARD DEVIATION 3')\n", (2668, 2792), True, 'import plotly.graph_objects as go\n')]
|
#!/usr/bin/env python
# source - https://github.com/whoenig/crazyflie_ros/commit/b048c1f2fd3ee34f899fa0e2f6c58a4885a39405#diff-970be3522034ff436332d391db26982a
from __future__ import absolute_import, division, unicode_literals, print_function
import rospy
import crazyflie
import uav_trajectory
import time
import tf
#from crazyflie_driver.msg import Hover
from std_msgs.msg import Empty
from crazyflie_driver.srv import UpdateParams
from crazyflie_driver.msg import GenericLogData
from threading import Thread
import tty, termios
import sys
if __name__ == '__main__':
rospy.init_node('test_high_level')
#rospy.Subscriber('/cf1/log_ranges', GenericLogData, get_ranges)
prefix = '/cf1'
cf = crazyflie.Crazyflie("/cf1", "world")
rospy.wait_for_service(prefix + '/update_params')
rospy.loginfo("found update_params service")
cf.setParam("commander/enHighLevel", 1)
cf.setParam("stabilizer/estimator", 2) # Use EKF
cf.setParam("ctrlMel/kp_z", 1.0) # reduce z wobble - default 1.25
#cf.setParam("ctrlMel/ki_z", 0.06) # reduce z wobble - default 0.05
#cf.setParam("ctrlMel/kd_z", 0.2) # reduce z wobble - default 0.4
## reset kalman
cf.setParam("kalman/initialX", 0)
cf.setParam("kalman/initialY", 0)
cf.setParam("kalman/initialZ", 0)
cf.setParam("kalman/resetEstimation", 1)
########
cf.setParam("stabilizer/controller", 2) # 2=Use mellinger controller
time.sleep(1.0)
rospy.loginfo("launching")
#cf.takeoff(targetHeight = 0.4, duration = 3.0)
#time.sleep(5.0)
traj1 = uav_trajectory.Trajectory()
traj1.loadcsv("/home/user/catkin_ws/src/crazyflie_ros/crazyflie_demo/scripts/takeoff.csv")
traj2 = uav_trajectory.Trajectory()
traj2.loadcsv("/home/user/catkin_ws/src/crazyflie_ros/crazyflie_demo/scripts/sine.csv")
print('traj2 duration :', traj2.duration)
cf.uploadTrajectory(0, 0, traj1)
cf.uploadTrajectory(1, len(traj1.polynomials), traj2)
cf.startTrajectory(0, timescale=1.0)
time.sleep(traj1.duration * 2.0)
cf.startTrajectory(1, timescale=1.5)
time.sleep(traj2.duration * 1.5)
time.sleep(1) #additional delay at end
cf.startTrajectory(0, timescale=1.0, reverse=True)
time.sleep(1.2)
cf.stop()
|
[
"uav_trajectory.Trajectory",
"time.sleep",
"rospy.loginfo",
"rospy.init_node",
"rospy.wait_for_service",
"crazyflie.Crazyflie"
] |
[((579, 613), 'rospy.init_node', 'rospy.init_node', (['"""test_high_level"""'], {}), "('test_high_level')\n", (594, 613), False, 'import rospy\n'), ((712, 748), 'crazyflie.Crazyflie', 'crazyflie.Crazyflie', (['"""/cf1"""', '"""world"""'], {}), "('/cf1', 'world')\n", (731, 748), False, 'import crazyflie\n'), ((753, 802), 'rospy.wait_for_service', 'rospy.wait_for_service', (["(prefix + '/update_params')"], {}), "(prefix + '/update_params')\n", (775, 802), False, 'import rospy\n'), ((807, 851), 'rospy.loginfo', 'rospy.loginfo', (['"""found update_params service"""'], {}), "('found update_params service')\n", (820, 851), False, 'import rospy\n'), ((1437, 1452), 'time.sleep', 'time.sleep', (['(1.0)'], {}), '(1.0)\n', (1447, 1452), False, 'import time\n'), ((1458, 1484), 'rospy.loginfo', 'rospy.loginfo', (['"""launching"""'], {}), "('launching')\n", (1471, 1484), False, 'import rospy\n'), ((1571, 1598), 'uav_trajectory.Trajectory', 'uav_trajectory.Trajectory', ([], {}), '()\n', (1596, 1598), False, 'import uav_trajectory\n'), ((1707, 1734), 'uav_trajectory.Trajectory', 'uav_trajectory.Trajectory', ([], {}), '()\n', (1732, 1734), False, 'import uav_trajectory\n'), ((2017, 2049), 'time.sleep', 'time.sleep', (['(traj1.duration * 2.0)'], {}), '(traj1.duration * 2.0)\n', (2027, 2049), False, 'import time\n'), ((2096, 2128), 'time.sleep', 'time.sleep', (['(traj2.duration * 1.5)'], {}), '(traj2.duration * 1.5)\n', (2106, 2128), False, 'import time\n'), ((2133, 2146), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (2143, 2146), False, 'import time\n'), ((2232, 2247), 'time.sleep', 'time.sleep', (['(1.2)'], {}), '(1.2)\n', (2242, 2247), False, 'import time\n')]
|
import json
from flask import Flask, request, redirect, session
import requests
import json
from urllib.parse import quote
app = Flask(__name__)
app.secret_key = "super secret key"
# Authentication Steps, paramaters, and responses are defined at https://developer.spotify.com/web-api/authorization-guide/
# Visit this url to see all the steps, parameters, and expected response.
# Client Keys
CLIENT_ID = ""
CLIENT_SECRET = ""
# Spotify URLS
SPOTIFY_AUTH_URL = "https://accounts.spotify.com/authorize"
SPOTIFY_TOKEN_URL = "https://accounts.spotify.com/api/token"
SPOTIFY_API_BASE_URL = "https://api.spotify.com"
API_VERSION = "v1"
SPOTIFY_API_URL = "{}/{}".format(SPOTIFY_API_BASE_URL, API_VERSION)
# Server-side Parameters
CLIENT_SIDE_URL = "http://localhost"
PORT = 5000
REDIRECT_URI = 'http://localhost:5000/callback'
SCOPE = "playlist-modify-public playlist-modify-private streaming user-read-playback-state"
STATE = ""
SHOW_DIALOG_bool = True
SHOW_DIALOG_str = str(SHOW_DIALOG_bool).lower()
auth_query_parameters = {
"response_type": "code",
"redirect_uri": REDIRECT_URI,
"scope": SCOPE,
# "state": STATE,
# "show_dialog": SHOW_DIALOG_str,
"client_id": CLIENT_ID
}
@app.route("/")
def index():
# Auth Step 1: Authorization
url_args = "&".join(["{}={}".format(key, quote(val)) for key, val in auth_query_parameters.items()])
auth_url = "{}/?{}".format(SPOTIFY_AUTH_URL, url_args)
return redirect(auth_url)
@app.route("/callback")
def callback():
# Auth Step 4: Requests refresh and access tokens
auth_token = request.args['code']
code_payload = {
"grant_type": "authorization_code",
"code": str(auth_token),
"redirect_uri": REDIRECT_URI,
'client_id': CLIENT_ID,
'client_secret': CLIENT_SECRET,
}
post_request = requests.post(SPOTIFY_TOKEN_URL, data=code_payload)
# Auth Step 5: Tokens are Returned to Application
response_data = json.loads(post_request.text)
session['access_token'] = response_data["access_token"]
session['refresh_token'] = response_data["refresh_token"]
session['expires_in'] = response_data["expires_in"]
access_token = response_data["access_token"]
refresh_token = response_data["refresh_token"]
token_type = response_data["token_type"]
expires_in = response_data["expires_in"]
# print(access_token)
# print(refresh_token)
# print(expires_in)
return ''
@app.route("/play")
def play():
authorization_header = getAuthorizationHeader()
body = {
"context_uri": "spotify:playlist:5XCRfaXW22GIQIZrUrw2gc",
"offset": {
"position": 6
},
"position_ms": 0
}
# Auth Step 6: Use the access token to access Spotify API
play_endpoint = "{}/me/player/play".format(SPOTIFY_API_URL)
play_request = requests.put(play_endpoint, headers=authorization_header, data=json.dumps(body))
# print(play_request.json())
return 'play_request.status_code'
@app.route("/pause")
def pause():
authorization_header = getAuthorizationHeader()
pause_profile_endpoint = "{}/me/player/pause".format(SPOTIFY_API_URL)
pause_request = requests.put(pause_profile_endpoint, headers=authorization_header)
print((pause_request.status_code))
return 'pause_request.status_code'
@app.route("/next")
def next():
authorization_header = getAuthorizationHeader()
pause_profile_endpoint = "{}/me/player/devices".format(SPOTIFY_API_URL)
pause_request = requests.get(pause_profile_endpoint, headers=authorization_header)
print((pause_request.json()))
return 'pause_request.status_code'
def refreshAccessToken():
print('yea')
def getAuthorizationHeader():
authorization_header = {"Authorization": "Bearer {}".format(session['access_token'])}
return authorization_header
if __name__ == "__main__":
app.run(debug=True, port=PORT)
|
[
"json.loads",
"flask.redirect",
"flask.Flask",
"json.dumps",
"urllib.parse.quote",
"requests.get",
"requests.put",
"requests.post"
] |
[((130, 145), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (135, 145), False, 'from flask import Flask, request, redirect, session\n'), ((1440, 1458), 'flask.redirect', 'redirect', (['auth_url'], {}), '(auth_url)\n', (1448, 1458), False, 'from flask import Flask, request, redirect, session\n'), ((1826, 1877), 'requests.post', 'requests.post', (['SPOTIFY_TOKEN_URL'], {'data': 'code_payload'}), '(SPOTIFY_TOKEN_URL, data=code_payload)\n', (1839, 1877), False, 'import requests\n'), ((1953, 1982), 'json.loads', 'json.loads', (['post_request.text'], {}), '(post_request.text)\n', (1963, 1982), False, 'import json\n'), ((3189, 3255), 'requests.put', 'requests.put', (['pause_profile_endpoint'], {'headers': 'authorization_header'}), '(pause_profile_endpoint, headers=authorization_header)\n', (3201, 3255), False, 'import requests\n'), ((3516, 3582), 'requests.get', 'requests.get', (['pause_profile_endpoint'], {'headers': 'authorization_header'}), '(pause_profile_endpoint, headers=authorization_header)\n', (3528, 3582), False, 'import requests\n'), ((2918, 2934), 'json.dumps', 'json.dumps', (['body'], {}), '(body)\n', (2928, 2934), False, 'import json\n'), ((1310, 1320), 'urllib.parse.quote', 'quote', (['val'], {}), '(val)\n', (1315, 1320), False, 'from urllib.parse import quote\n')]
|
from django.conf.urls import patterns, include, url
from rest_framework import routers
from books.api import views as api_views
from books import views
router = routers.DefaultRouter()
# TODO: Nest API endpoints
# # from rest_framework_extensions.routers import ExtendedSimpleRouter
router.register(r'books', api_views.BookViewSet)
router.register(r'editions', api_views.EditionViewSet)
router.register(r'authors', api_views.AuthorViewSet)
router.register(r'publishers', api_views.PublisherViewSet)
urlpatterns = patterns(
'',
url(r'^api/search/external/(?P<q>[\w ]+)/$', api_views.search_external),
url(r'^api/', include(router.urls)),
)
|
[
"django.conf.urls.url",
"rest_framework.routers.DefaultRouter",
"django.conf.urls.include"
] |
[((163, 186), 'rest_framework.routers.DefaultRouter', 'routers.DefaultRouter', ([], {}), '()\n', (184, 186), False, 'from rest_framework import routers\n'), ((540, 611), 'django.conf.urls.url', 'url', (['"""^api/search/external/(?P<q>[\\\\w ]+)/$"""', 'api_views.search_external'], {}), "('^api/search/external/(?P<q>[\\\\w ]+)/$', api_views.search_external)\n", (543, 611), False, 'from django.conf.urls import patterns, include, url\n'), ((631, 651), 'django.conf.urls.include', 'include', (['router.urls'], {}), '(router.urls)\n', (638, 651), False, 'from django.conf.urls import patterns, include, url\n')]
|
from flask import Flask , request , jsonify,render_template
import util
app=Flask(__name__)
@app.route('/')
def get_location_names():
response = util.get_location_names()
print(response)
#response.headers.add('Access-control-Allow-origin','*')
return render_template('app.html',response=response)
@app.route('/predict_house_price',methods=['POST'])
def predict_house_price():
total_sqft=float(request.form['total_sqft'])
location = float(request.form['location'])
bhk = int(request.form['bhk'])
bath = float(request.form['bhk'])
response = util.get_location_names()
#response =jsonify({
estimated_price = util.get_estimateud_price(location,total_sqft,bhk,bath)
#})
return render_template('app.html', response=response,price=estimated_price)
if __name__=="__main__":
print("Starting Python flask server from Home proce prediction...")
app.run()
|
[
"flask.render_template",
"util.get_estimateud_price",
"flask.Flask",
"util.get_location_names"
] |
[((76, 91), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (81, 91), False, 'from flask import Flask, request, jsonify, render_template\n'), ((150, 175), 'util.get_location_names', 'util.get_location_names', ([], {}), '()\n', (173, 175), False, 'import util\n'), ((268, 314), 'flask.render_template', 'render_template', (['"""app.html"""'], {'response': 'response'}), "('app.html', response=response)\n", (283, 314), False, 'from flask import Flask, request, jsonify, render_template\n'), ((578, 603), 'util.get_location_names', 'util.get_location_names', ([], {}), '()\n', (601, 603), False, 'import util\n'), ((651, 709), 'util.get_estimateud_price', 'util.get_estimateud_price', (['location', 'total_sqft', 'bhk', 'bath'], {}), '(location, total_sqft, bhk, bath)\n', (676, 709), False, 'import util\n'), ((726, 795), 'flask.render_template', 'render_template', (['"""app.html"""'], {'response': 'response', 'price': 'estimated_price'}), "('app.html', response=response, price=estimated_price)\n", (741, 795), False, 'from flask import Flask, request, jsonify, render_template\n')]
|
import numpy as np
import pandas as pd
def get_bootstrap_indices(data, cluster_by=None, seed=None, n_draws=1000):
"""Draw positional indices for the construction of bootstrap samples.
Storing the positional indices instead of the full bootstrap samples saves a lot
of memory for datasets with many variables.
Args:
data (pandas.DataFrame): original dataset.
cluster_by (str): column name of the variable to cluster by.
seed (int): Random seed.
n_draws (int): number of draws, only relevant if seeds is None.
Returns:
list: list of numpy arrays with positional indices
"""
np.random.seed(seed)
n_obs = len(data)
if cluster_by is None:
bootstrap_indices = list(np.random.randint(0, n_obs, size=(n_draws, n_obs)))
else:
clusters = data[cluster_by].unique()
drawn_clusters = np.random.choice(
clusters, size=(n_draws, len(clusters)), replace=True
)
bootstrap_indices = _convert_cluster_ids_to_indices(
data[cluster_by], drawn_clusters
)
return bootstrap_indices
def _convert_cluster_ids_to_indices(cluster_col, drawn_clusters):
"""Convert the drawn clusters to positional indices of individual observations.
Args:
cluster_col (pandas.Series):
"""
bootstrap_indices = []
cluster_to_locs = pd.Series(np.arange(len(cluster_col)), index=cluster_col)
for draw in drawn_clusters:
bootstrap_indices.append(cluster_to_locs[draw].to_numpy())
return bootstrap_indices
def get_bootstrap_samples(data, cluster_by=None, seed=None, n_draws=1000):
"""Draw bootstrap samples.
If you have memory issues you should use get_bootstrap_indices instead and construct
the full samples only as needed.
Args:
data (pandas.DataFrame): original dataset.
cluster_by (str): column name of the variable to cluster by.
seed (int): Random seed.
n_draws (int): number of draws, only relevant if seeds is None.
Returns:
list: list of resampled datasets.
"""
indices = get_bootstrap_indices(
data=data,
cluster_by=cluster_by,
seed=seed,
n_draws=n_draws,
)
datasets = _get_bootstrap_samples_from_indices(data=data, bootstrap_indices=indices)
return datasets
def _get_bootstrap_samples_from_indices(data, bootstrap_indices):
"""convert bootstrap indices into actual bootstrap samples.
Args:
data (pandas.DataFrame): original dataset.
bootstrap_indices (list): List with numpy arrays containing positional indices
of observations in data.
Returns:
list: list of DataFrames
"""
out = [data.iloc[idx] for idx in bootstrap_indices]
return out
|
[
"numpy.random.randint",
"numpy.random.seed"
] |
[((646, 666), 'numpy.random.seed', 'np.random.seed', (['seed'], {}), '(seed)\n', (660, 666), True, 'import numpy as np\n'), ((750, 800), 'numpy.random.randint', 'np.random.randint', (['(0)', 'n_obs'], {'size': '(n_draws, n_obs)'}), '(0, n_obs, size=(n_draws, n_obs))\n', (767, 800), True, 'import numpy as np\n')]
|
import json
import boto3
from environs import Env
env = Env()
AWS_ENDPOINT_URL = env('AWS_ENDPOINT_URL', None)
SMTP_HOST = env('SMTP_HOST', None)
EMAIL_ENABLED = env.bool('EMAIL_ENABLED', default=True)
secrets_manager_client = boto3.client('secretsmanager', endpoint_url=AWS_ENDPOINT_URL)
def fetch_db_secret(db_secret_arn):
if db_secret_arn is None:
return None
response = secrets_manager_client.get_secret_value(SecretId=db_secret_arn)
return json.loads(response['SecretString'])
LAMBDA_TASK_ROOT = env('LAMBDA_TASK_ROOT', '')
DB_CONNECTION = env('DB_CONNECTION', None)
if DB_CONNECTION:
DB_CONNECTION = json.loads(DB_CONNECTION)
else:
DB_CONNECTION = fetch_db_secret(env('DB_SECRET_ARN', None))
FROM_EMAIL = env('FROM_EMAIL', None)
|
[
"json.loads",
"boto3.client",
"environs.Env"
] |
[((58, 63), 'environs.Env', 'Env', ([], {}), '()\n', (61, 63), False, 'from environs import Env\n'), ((231, 292), 'boto3.client', 'boto3.client', (['"""secretsmanager"""'], {'endpoint_url': 'AWS_ENDPOINT_URL'}), "('secretsmanager', endpoint_url=AWS_ENDPOINT_URL)\n", (243, 292), False, 'import boto3\n'), ((472, 508), 'json.loads', 'json.loads', (["response['SecretString']"], {}), "(response['SecretString'])\n", (482, 508), False, 'import json\n'), ((640, 665), 'json.loads', 'json.loads', (['DB_CONNECTION'], {}), '(DB_CONNECTION)\n', (650, 665), False, 'import json\n')]
|
#!/usr/bin/env python3
import requests
import json
import sqlite3
import sense_hat
import time
from pushbullet_api import PushbulletAPI
from climate_util import ClimateUtil
# Monitor and notification class
class MonitorNotifier:
def __init__(self, databaseName):
# Get sense hat access
self.__sense = sense_hat.SenseHat()
# Load JSON config variables
with open("config.json", "r") as jsonFile:
config = json.load(jsonFile)
self.__minTemp = float(config["min_temperature"])
self.__maxTemp = float(config["max_temperature"])
self.__minHumid = float(config["min_humidity"])
self.__maxHumid = float(config["max_humidity"])
# Load Pushbullet API access
self.__pushbulletAPI = PushbulletAPI()
# Connect to database for logging climate data
self.__connectToDatabase(databaseName)
# Connects to climate database if it exists, otherwise creating one
def __connectToDatabase(self, databaseName):
# Connect to database file
self.__database = sqlite3.connect(databaseName)
with self.__database:
# Get cursor for database
cursor = self.__database.cursor()
# Create climate data table if it doesn't exist
cursor.execute("CREATE TABLE IF NOT EXISTS ClimateData \
(time DATETIME, temperature NUMERIC, humidity NUMERIC)")
# Create notification table if it doesn't exist
cursor.execute("CREATE TABLE IF NOT EXISTS Notifications \
(timesent DATETIME)")
# Commit creating of table
self.__database.commit()
# Record the current temp data into database
def recordClimate(self):
# Get and validate current climate information
try:
temperature = float(ClimateUtil.getCalibratedTemp(self.__sense))
humidity = float(self.__sense.get_humidity())
except ValueError:
print("Warning: Invalid climate data recorded,\
stopping climate monitor")
SystemExit()
# Record climate information in database and send notification
with self.__database:
cursor = self.__database.cursor()
cursor.execute("INSERT INTO ClimateData (time, temperature, humidity) \
VALUES (DATETIME('now', 'localtime'), ?, ?)",
(temperature, humidity))
self.__database.commit()
# Check if notification sould be sent
self.__checkAndNotify(temperature, humidity)
# Sends a pushbullet notification if temperature is out of range
# and a notification has not already been sent today
def __checkAndNotify(self, temperature, humidity):
# If outside of config range, check database if notification
# has already been sent today
if temperature < self.__minTemp or temperature > self.__maxTemp or\
humidity < self.__minHumid or humidity > self.__maxHumid:
# Check if notification has already been sent today
with self.__database:
cursor = self.__database.cursor()
cursor.execute(
"SELECT COUNT(*) \
FROM Notifications \
WHERE strftime('%d-%m-%Y', timesent) \
= strftime('%d-%m-%Y', DATETIME('now', 'localtime'))")
recordCount = cursor.fetchone()[0]
# If a notification has already been sent, return immediately
if recordCount >= 1:
return
# Construct pushbullet message strings
title = "Raspberry Pi climate alert"
message = "Warning,"
if temperature < self.__minTemp:
message += " temperature is too low,"
if temperature > self.__maxTemp:
message += " temperature is too high,"
if humidity < self.__minHumid:
message += " humidity is too low,"
if humidity > self.__maxHumid:
message += " humidity is too high,"
message = message.rstrip(',') + "."
# Wait until program is able to connect to internet
while not ClimateUtil.checkConnection():
time.sleep(1)
# Send pushbullet message
self.__pushbulletAPI.sendNotification(title, message)
# Record sending of notification
with self.__database:
cursor = self.__database.cursor()
cursor.execute("INSERT INTO Notifications (timesent) \
VALUES (DATETIME('now', 'localtime'))")
self.__database.commit()
# Main method
if __name__ == "__main__":
# Database name variable
databaseName = "climate_data.db"
# Initialize monitor class
monitor = MonitorNotifier(databaseName)
# Check and record climate conditions every minute
while True:
monitor.recordClimate()
time.sleep(60)
|
[
"json.load",
"sense_hat.SenseHat",
"climate_util.ClimateUtil.getCalibratedTemp",
"time.sleep",
"sqlite3.connect",
"pushbullet_api.PushbulletAPI",
"climate_util.ClimateUtil.checkConnection"
] |
[((323, 343), 'sense_hat.SenseHat', 'sense_hat.SenseHat', ([], {}), '()\n', (341, 343), False, 'import sense_hat\n'), ((785, 800), 'pushbullet_api.PushbulletAPI', 'PushbulletAPI', ([], {}), '()\n', (798, 800), False, 'from pushbullet_api import PushbulletAPI\n'), ((1086, 1115), 'sqlite3.connect', 'sqlite3.connect', (['databaseName'], {}), '(databaseName)\n', (1101, 1115), False, 'import sqlite3\n'), ((5042, 5056), 'time.sleep', 'time.sleep', (['(60)'], {}), '(60)\n', (5052, 5056), False, 'import time\n'), ((453, 472), 'json.load', 'json.load', (['jsonFile'], {}), '(jsonFile)\n', (462, 472), False, 'import json\n'), ((1856, 1899), 'climate_util.ClimateUtil.getCalibratedTemp', 'ClimateUtil.getCalibratedTemp', (['self.__sense'], {}), '(self.__sense)\n', (1885, 1899), False, 'from climate_util import ClimateUtil\n'), ((4273, 4302), 'climate_util.ClimateUtil.checkConnection', 'ClimateUtil.checkConnection', ([], {}), '()\n', (4300, 4302), False, 'from climate_util import ClimateUtil\n'), ((4320, 4333), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (4330, 4333), False, 'import time\n')]
|
import Tkinter as tk
class StatusBar:
def __init__(self, root, label):
self.label = tk.StringVar()
self.label.set(label)
self.root = root
self.initialize()
def initialize(self):
frame = tk.Frame(self.root, relief=tk.SUNKEN)
label = tk.Label(frame, font=('arial', 12, 'normal'), textvariable=self.label, padx=10, pady=10)
label.pack(fill=tk.X)
frame.pack(side=tk.BOTTOM)
def update(self, label):
percent = int(float(label)*100)
self.label.set("No mine probability: " + str(percent) + "%")
def clear(self):
self.label.set("")
|
[
"Tkinter.Frame",
"Tkinter.StringVar",
"Tkinter.Label"
] |
[((98, 112), 'Tkinter.StringVar', 'tk.StringVar', ([], {}), '()\n', (110, 112), True, 'import Tkinter as tk\n'), ((237, 274), 'Tkinter.Frame', 'tk.Frame', (['self.root'], {'relief': 'tk.SUNKEN'}), '(self.root, relief=tk.SUNKEN)\n', (245, 274), True, 'import Tkinter as tk\n'), ((291, 384), 'Tkinter.Label', 'tk.Label', (['frame'], {'font': "('arial', 12, 'normal')", 'textvariable': 'self.label', 'padx': '(10)', 'pady': '(10)'}), "(frame, font=('arial', 12, 'normal'), textvariable=self.label, padx\n =10, pady=10)\n", (299, 384), True, 'import Tkinter as tk\n')]
|
#!/usr/bin/env python3
"""{PIPELINE_NAME} pipeline (version: {PIPELINE_VERSION}): creates
pipeline-specific config files to given output directory and runs the
pipeline (unless otherwise requested).
"""
# generic usage {PIPELINE_NAME} and {PIPELINE_VERSION} replaced while
# printing usage
#--- standard library imports
#
import sys
import os
import logging
#--- third-party imports
#
import yaml
#--- project specific imports
#
# add lib dir for this pipeline installation to PYTHONPATH
LIB_PATH = os.path.abspath(
os.path.join(os.path.dirname(os.path.realpath(__file__)), "..", "..", "lib"))
if LIB_PATH not in sys.path:
sys.path.insert(0, LIB_PATH)
from readunits import get_samples_and_readunits_from_cfgfile
from readunits import get_readunits_from_args
from pipelines import get_pipeline_version
from pipelines import PipelineHandler
from pipelines import logger as aux_logger
from pipelines import get_cluster_cfgfile
from pipelines import default_argparser
import configargparse
__author__ = "<NAME>"
__email__ = "<EMAIL>"
__copyright__ = "2016 Genome Institute of Singapore"
__license__ = "The MIT License (MIT)"
# only dump() and following do not automatically create aliases
yaml.Dumper.ignore_aliases = lambda *args: True
PIPELINE_BASEDIR = os.path.dirname(sys.argv[0])
CFG_DIR = os.path.join(PIPELINE_BASEDIR, "cfg")
# same as folder name. also used for cluster job names
PIPELINE_NAME = "gatk"
MARK_DUPS = True
# global logger
logger = logging.getLogger(__name__)
handler = logging.StreamHandler()
handler.setFormatter(logging.Formatter(
'[{asctime}] {levelname:8s} {filename} {message}', style='{'))
logger.addHandler(handler)
def main():
"""main function
"""
default_parser = default_argparser(CFG_DIR, with_readunits=True)
parser = configargparse.ArgumentParser(description=__doc__.format(
PIPELINE_NAME=PIPELINE_NAME, PIPELINE_VERSION=get_pipeline_version()),
parents=[default_parser])
parser._optionals.title = "Arguments"
# pipeline specific args
parser.add_argument('-t', "--seqtype", required=True,
choices=['WGS', 'WES', 'targeted'],
help="Sequencing type")
parser.add_argument('-l', "--bed",
help="Bed file listing regions of interest."
" Required for WES and targeted sequencing.")
default = 4
parser.add_argument("-c", "--hc-nct", default=default, type=int,
help="Number of Haplotype Caller threads (per region cluster)."
" Values>1 reported to make Haplotype Caller unstable (default={})".format(default))
default = 100
parser.add_argument('-i', "--interval-padding", default=default,
help="Interval padding (for non-WGS only; default = {})".format(default))
parser.add_argument('-j', "--joint-calls", action='store_true',
help="Perform joint/cohort calling (requires multisample input)")
parser.add_argument('--raw-bam',
help="Advanced: Injects raw (pre-dedup, pre-BQSR etc.) BAM (overwrites fq options)."
" WARNING: reference needs to match pipeline requirements")
parser.add_argument('--proc-bam',
help="Advanced: Injects processed (post-dedup, post-BQSR etc.) BAM (overwrites fq options)."
" WARNING: reference and pre-processing need to match pipeline requirements")
# FIXME can be achieved with --until rule as well
parser.add_argument('--bam-only', action='store_true',
help="Only process up until BAM file")
parser.add_argument('--gvcf-only', action='store_true',
help="Only process up until GVCF file")
args = parser.parse_args()
# Repeateable -v and -q for setting logging level.
# See https://www.reddit.com/r/Python/comments/3nctlm/what_python_tools_should_i_be_using_on_every/
# and https://gist.github.com/andreas-wilm/b6031a84a33e652680d4
# script -vv -> DEBUG
# script -v -> INFO
# script -> WARNING
# script -q -> ERROR
# script -qq -> CRITICAL
# script -qqq -> no logging at all
logger.setLevel(logging.WARN + 10*args.quiet - 10*args.verbose)
aux_logger.setLevel(logging.WARN + 10*args.quiet - 10*args.verbose)
if os.path.exists(args.outdir):
logger.fatal("Output directory %s already exists", args.outdir)
sys.exit(1)
# samples is a dictionary with sample names as key (mostly just
# one) and readunit keys as value. readunits is a dict with
# readunits (think: fastq pairs with attributes) as value
if args.sample_cfg:
if any([args.fq1, args.fq2, args.sample, args.raw_bam, args.proc_bam]):
logger.fatal("Config file overrides fastq, sample and BAM arguments."
" Use one or the other")
sys.exit(1)
if not os.path.exists(args.sample_cfg):
logger.fatal("Config file %s does not exist", args.sample_cfg)
sys.exit(1)
samples, readunits = get_samples_and_readunits_from_cfgfile(args.sample_cfg)
else:# no sample config, so input is either fastq or existing bam
samples = dict()
if not args.sample:
logger.fatal("Need sample name if not using config file")
sys.exit(1)
if args.raw_bam or args.proc_bam:
assert not args.fq1, ("BAM injection overwrites fastq arguments")
if args.raw_bam:
assert os.path.exists(args.raw_bam)
assert not args.proc_bam, ("Cannot inject raw and processed BAM")
if args.proc_bam:
assert os.path.exists(args.proc_bam)
assert not args.raw_bam, ("Cannot inject raw and processed BAM")
readunits = dict()
samples[args.sample] = []
elif args.fq1:
readunits = get_readunits_from_args(args.fq1, args.fq2)
# all readunits go into this one sample specified on the command-line
samples[args.sample] = list(readunits.keys())
else:
logger.fatal("Need at least one fastq files as argument if not using config file")
sys.exit(1)
if args.seqtype in ['WES', 'targeted']:
if not args.bed:
logger.fatal("Analysis of exome and targeted sequence runs requires a bed file")
sys.exit(1)
else:
if not os.path.exists(args.bed):
logger.fatal("Bed file %s does not exist", args.sample_cfg)
sys.exit(1)
if args.joint_calls:
if len(samples)<2:
logger.fatal("Need at least two samples for joint calling")
sys.exit(1)
# turn arguments into cfg_dict (gets merged with other configs late)
#
cfg_dict = dict()
cfg_dict['readunits'] = readunits
cfg_dict['samples'] = samples
cfg_dict['seqtype'] = args.seqtype
cfg_dict['intervals'] = os.path.abspath(args.bed) if args.bed else None# always safe, might be used for WGS as well
cfg_dict['mark_dups'] = MARK_DUPS
cfg_dict['bam_only'] = args.bam_only
cfg_dict['gvcf_only'] = args.gvcf_only
cfg_dict['hc_nct'] = args.hc_nct
cfg_dict['joint_calls'] = args.joint_calls
cfg_dict['interval_padding'] = args.interval_padding
pipeline_handler = PipelineHandler(
PIPELINE_NAME, PIPELINE_BASEDIR,
args, cfg_dict,
cluster_cfgfile=get_cluster_cfgfile(CFG_DIR))
pipeline_handler.setup_env()
# Inject existing BAM by symlinking (everything upstream is temporary anyway)
# WARNING: filename has to match definition in Snakefile!
if args.raw_bam:
target = os.path.join(args.outdir, "out", args.sample,
"{}.bwamem.bam".format(args.sample))
os.makedirs(os.path.dirname(target))
os.symlink(os.path.abspath(args.raw_bam), target)
src_bai = os.path.abspath(args.raw_bam) + ".bai"
if os.path.exists(src_bai):
os.symlink(src_bai, target + ".bai")
elif args.proc_bam:
target = os.path.join(args.outdir, "out", args.sample,
"{}.bwamem".format(args.sample))
if cfg_dict['mark_dups']:
target += ".dedup"
if cfg_dict['seqtype'] != 'targeted':
target += ".bqsr"
target += ".bam"
os.makedirs(os.path.dirname(target))
os.symlink(os.path.abspath(args.proc_bam), target)
if os.path.exists(os.path.abspath(args.proc_bam) + ".bai"):
os.symlink(os.path.abspath(args.proc_bam) + ".bai", target + ".bai")
pipeline_handler.submit(args.no_run)
if __name__ == "__main__":
main()
|
[
"os.path.abspath",
"readunits.get_samples_and_readunits_from_cfgfile",
"os.path.dirname",
"logging.StreamHandler",
"os.path.exists",
"sys.path.insert",
"os.path.realpath",
"os.symlink",
"logging.Formatter",
"pipelines.logger.setLevel",
"pipelines.get_cluster_cfgfile",
"pipelines.get_pipeline_version",
"sys.exit",
"pipelines.default_argparser",
"os.path.join",
"readunits.get_readunits_from_args",
"logging.getLogger"
] |
[((1270, 1298), 'os.path.dirname', 'os.path.dirname', (['sys.argv[0]'], {}), '(sys.argv[0])\n', (1285, 1298), False, 'import os\n'), ((1309, 1346), 'os.path.join', 'os.path.join', (['PIPELINE_BASEDIR', '"""cfg"""'], {}), "(PIPELINE_BASEDIR, 'cfg')\n", (1321, 1346), False, 'import os\n'), ((1470, 1497), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1487, 1497), False, 'import logging\n'), ((1508, 1531), 'logging.StreamHandler', 'logging.StreamHandler', ([], {}), '()\n', (1529, 1531), False, 'import logging\n'), ((634, 662), 'sys.path.insert', 'sys.path.insert', (['(0)', 'LIB_PATH'], {}), '(0, LIB_PATH)\n', (649, 662), False, 'import sys\n'), ((1553, 1632), 'logging.Formatter', 'logging.Formatter', (['"""[{asctime}] {levelname:8s} {filename} {message}"""'], {'style': '"""{"""'}), "('[{asctime}] {levelname:8s} {filename} {message}', style='{')\n", (1570, 1632), False, 'import logging\n'), ((1731, 1778), 'pipelines.default_argparser', 'default_argparser', (['CFG_DIR'], {'with_readunits': '(True)'}), '(CFG_DIR, with_readunits=True)\n', (1748, 1778), False, 'from pipelines import default_argparser\n'), ((4319, 4390), 'pipelines.logger.setLevel', 'aux_logger.setLevel', (['(logging.WARN + 10 * args.quiet - 10 * args.verbose)'], {}), '(logging.WARN + 10 * args.quiet - 10 * args.verbose)\n', (4338, 4390), True, 'from pipelines import logger as aux_logger\n'), ((4395, 4422), 'os.path.exists', 'os.path.exists', (['args.outdir'], {}), '(args.outdir)\n', (4409, 4422), False, 'import os\n'), ((4504, 4515), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (4512, 4515), False, 'import sys\n'), ((5147, 5202), 'readunits.get_samples_and_readunits_from_cfgfile', 'get_samples_and_readunits_from_cfgfile', (['args.sample_cfg'], {}), '(args.sample_cfg)\n', (5185, 5202), False, 'from readunits import get_samples_and_readunits_from_cfgfile\n'), ((7068, 7093), 'os.path.abspath', 'os.path.abspath', (['args.bed'], {}), '(args.bed)\n', (7083, 7093), False, 'import os\n'), ((8083, 8106), 'os.path.exists', 'os.path.exists', (['src_bai'], {}), '(src_bai)\n', (8097, 8106), False, 'import os\n'), ((552, 578), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (568, 578), False, 'import os\n'), ((4959, 4970), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (4967, 4970), False, 'import sys\n'), ((4986, 5017), 'os.path.exists', 'os.path.exists', (['args.sample_cfg'], {}), '(args.sample_cfg)\n', (5000, 5017), False, 'import os\n'), ((5106, 5117), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (5114, 5117), False, 'import sys\n'), ((5410, 5421), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (5418, 5421), False, 'import sys\n'), ((6483, 6494), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (6491, 6494), False, 'import sys\n'), ((6795, 6806), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (6803, 6806), False, 'import sys\n'), ((7552, 7580), 'pipelines.get_cluster_cfgfile', 'get_cluster_cfgfile', (['CFG_DIR'], {}), '(CFG_DIR)\n', (7571, 7580), False, 'from pipelines import get_cluster_cfgfile\n'), ((7931, 7954), 'os.path.dirname', 'os.path.dirname', (['target'], {}), '(target)\n', (7946, 7954), False, 'import os\n'), ((7975, 8004), 'os.path.abspath', 'os.path.abspath', (['args.raw_bam'], {}), '(args.raw_bam)\n', (7990, 8004), False, 'import os\n'), ((8033, 8062), 'os.path.abspath', 'os.path.abspath', (['args.raw_bam'], {}), '(args.raw_bam)\n', (8048, 8062), False, 'import os\n'), ((8120, 8156), 'os.symlink', 'os.symlink', (['src_bai', "(target + '.bai')"], {}), "(src_bai, target + '.bai')\n", (8130, 8156), False, 'import os\n'), ((5596, 5624), 'os.path.exists', 'os.path.exists', (['args.raw_bam'], {}), '(args.raw_bam)\n', (5610, 5624), False, 'import os\n'), ((5760, 5789), 'os.path.exists', 'os.path.exists', (['args.proc_bam'], {}), '(args.proc_bam)\n', (5774, 5789), False, 'import os\n'), ((5990, 6033), 'readunits.get_readunits_from_args', 'get_readunits_from_args', (['args.fq1', 'args.fq2'], {}), '(args.fq1, args.fq2)\n', (6013, 6033), False, 'from readunits import get_readunits_from_args\n'), ((6296, 6307), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (6304, 6307), False, 'import sys\n'), ((6528, 6552), 'os.path.exists', 'os.path.exists', (['args.bed'], {}), '(args.bed)\n', (6542, 6552), False, 'import os\n'), ((6646, 6657), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (6654, 6657), False, 'import sys\n'), ((8515, 8538), 'os.path.dirname', 'os.path.dirname', (['target'], {}), '(target)\n', (8530, 8538), False, 'import os\n'), ((8559, 8589), 'os.path.abspath', 'os.path.abspath', (['args.proc_bam'], {}), '(args.proc_bam)\n', (8574, 8589), False, 'import os\n'), ((1904, 1926), 'pipelines.get_pipeline_version', 'get_pipeline_version', ([], {}), '()\n', (1924, 1926), False, 'from pipelines import get_pipeline_version\n'), ((8625, 8655), 'os.path.abspath', 'os.path.abspath', (['args.proc_bam'], {}), '(args.proc_bam)\n', (8640, 8655), False, 'import os\n'), ((8690, 8720), 'os.path.abspath', 'os.path.abspath', (['args.proc_bam'], {}), '(args.proc_bam)\n', (8705, 8720), False, 'import os\n')]
|
# -*- coding: utf-8 -*-
""" Unit Test On Grid
description:
This is the unit test for basic grid.
content:
- TestGrid
- TestGrid8D
author: Shin-Fu (<NAME>
latest update:
- 2019/05/10
- 2019/05/14 add TestGridDB
- 2019/05/15 add test_case for DynamicBoundGridWithShortcuts
"""
import os
import sys
import unittest
root = os.path.join(os.path.dirname(__file__), '..')
sys.path.append(root)
from graph.grid import GridWithWeights
from graph.grid8d import EightDirectionGrid
from graph.gridDB import DynamicBoundGrid, DynamicBoundGridWithShortcuts
from graph.duality_graph import DualityGraph
class TestGrid(unittest.TestCase):
def __init__(self, methodName='runTest'):
super(TestGrid, self).__init__(methodName)
self.g = GridWithWeights(4, 4)
def test_case1(self):
self.assertSetEqual(set(self.g.neighbors((1,1))), set([(0, 1), (2, 1), (1, 0), (1, 2)]))
self.assertSetEqual(set(self.g.neighbors((1,0))), set([(0, 0), (1, 1), (2, 0)]))
self.assertSetEqual(set(self.g.neighbors((3,3))), set([(3, 2), (2, 3)]))
class TestGrid8D(unittest.TestCase):
def __init__(self, methodName='runTest'):
super(TestGrid8D, self).__init__(methodName)
self.g = EightDirectionGrid(4, 4)
def test_case1(self):
self.assertSetEqual(set(self.g.neighbors((1,1))), set([(2, 0), (1, 0), (0, 0), (2, 1), (0, 1), (2, 2), (1, 2), (0, 2)]))
class TestGridDB(unittest.TestCase):
def __init__(self, methodName='runTest'):
super(TestGridDB, self).__init__(methodName)
self.g1 = DynamicBoundGrid(4, 4)
self.g1.set_search((0, 0), (3, 3))
self.g2 = DynamicBoundGridWithShortcuts(4, 4)
self.g2.set_search((0, 0), (3, 3))
def test_case1(self):
self.assertSetEqual(set(self.g1.neighbors((0,0))), set([(1, 0), (0, 1), (1, 1)]))
def test_case2(self):
self.assertSetEqual(set(self.g2.neighbors((0,0))), set([(1, 0), (0, 1), (1, 1)]))
class TestDualityGraph(unittest.TestCase):
def __init__(self, methodName='runTest'):
super(TestDualityGraph, self).__init__(methodName)
self.g1 = DualityGraph(4, 4)
self.g1.set_search((0, 0), (3, 3))
def test_case1(self):
self.assertSetEqual(set(self.g1.neighbors((0,0))), set([(3, 0), (0, 3), (3, 3)]))
if __name__ == '__main__':
unittest.main(verbosity=1)
|
[
"sys.path.append",
"unittest.main",
"graph.duality_graph.DualityGraph",
"graph.gridDB.DynamicBoundGridWithShortcuts",
"os.path.dirname",
"graph.grid8d.EightDirectionGrid",
"graph.grid.GridWithWeights",
"graph.gridDB.DynamicBoundGrid"
] |
[((409, 430), 'sys.path.append', 'sys.path.append', (['root'], {}), '(root)\n', (424, 430), False, 'import sys\n'), ((375, 400), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (390, 400), False, 'import os\n'), ((2459, 2485), 'unittest.main', 'unittest.main', ([], {'verbosity': '(1)'}), '(verbosity=1)\n', (2472, 2485), False, 'import unittest\n'), ((797, 818), 'graph.grid.GridWithWeights', 'GridWithWeights', (['(4)', '(4)'], {}), '(4, 4)\n', (812, 818), False, 'from graph.grid import GridWithWeights\n'), ((1289, 1313), 'graph.grid8d.EightDirectionGrid', 'EightDirectionGrid', (['(4)', '(4)'], {}), '(4, 4)\n', (1307, 1313), False, 'from graph.grid8d import EightDirectionGrid\n'), ((1637, 1659), 'graph.gridDB.DynamicBoundGrid', 'DynamicBoundGrid', (['(4)', '(4)'], {}), '(4, 4)\n', (1653, 1659), False, 'from graph.gridDB import DynamicBoundGrid, DynamicBoundGridWithShortcuts\n'), ((1723, 1758), 'graph.gridDB.DynamicBoundGridWithShortcuts', 'DynamicBoundGridWithShortcuts', (['(4)', '(4)'], {}), '(4, 4)\n', (1752, 1758), False, 'from graph.gridDB import DynamicBoundGrid, DynamicBoundGridWithShortcuts\n'), ((2227, 2245), 'graph.duality_graph.DualityGraph', 'DualityGraph', (['(4)', '(4)'], {}), '(4, 4)\n', (2239, 2245), False, 'from graph.duality_graph import DualityGraph\n')]
|
#!/usr/bin/env python
# vim:ts=4:sts=4:sw=4:et
#
# Author: <NAME>
# Date: 2018-07-13 22:46:34 +0100 (Fri, 13 Jul 2018)
#
# https://github.com/harisekhon/nagios-plugins
#
# License: see accompanying Hari Sekhon LICENSE file
#
# If you're using my code you're welcome to connect with me on LinkedIn
# and optionally send me feedback to help steer this or other code I publish
#
# https://www.linkedin.com/in/harisekhon
#
"""
Nagios Plugin to check HBase RegionServer requests imbalance via the HMaster UI
Tested on Apache HBase 0.95, 0.96, 0.98, 1.0, 1.1, 1.2, 1.3, 1.4, 2.0, 2.1
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
#import logging
#import json
import os
import sys
import traceback
try:
from bs4 import BeautifulSoup
except ImportError:
print(traceback.format_exc(), end='')
sys.exit(4)
srcdir = os.path.abspath(os.path.dirname(__file__))
libdir = os.path.join(srcdir, 'pylib')
sys.path.append(libdir)
try:
# pylint: disable=wrong-import-position
from harisekhon.utils import isInt, support_msg, UnknownError, plural
from harisekhon import RestNagiosPlugin
except ImportError as _:
print(traceback.format_exc(), end='')
sys.exit(4)
__author__ = '<NAME>'
__version__ = '0.3.0'
class CheckHBaseRegionServerBalance(RestNagiosPlugin):
def __init__(self):
# Python 2.x
super(CheckHBaseRegionServerBalance, self).__init__()
# Python 3.x
# super().__init__()
self.name = ['HBase Master', 'HBase']
self.default_port = 16010
self.path = '/master-status'
self.auth = False
self.json = False
self.msg = 'HBase msg not defined'
def add_options(self):
super(CheckHBaseRegionServerBalance, self).add_options()
self.add_thresholds(default_warning=50)
def process_options(self):
super(CheckHBaseRegionServerBalance, self).process_options()
self.validate_thresholds(percent=True, optional=True)
def parse(self, req):
soup = BeautifulSoup(req.content, 'html.parser')
#if log.isEnabledFor(logging.DEBUG):
# log.debug("BeautifulSoup prettified:\n%s\n%s", soup.prettify(), '='*80)
# this masks underlying exception
#try:
tab = soup.find('div', {'id':'tab_baseStats'})
table = tab.find_next('table')
rows = table.findChildren('tr')
if len(rows) < 2:
raise UnknownError('no regionserver rows found in base stats table! {}'.format(support_msg()))
# HBase 1.1 in HDP 2.3: ServerName | Start time | Requests Per Second | Num. Regions
# HBase 1.2 (Apache): ServerName | Start time | Version | Requests per Second | Num. Regions
# HBase 1.4 (Apache): ServerName | Start time | Last Contact | Version | Requests Per Second | Num. Regions
th_list = rows[0].findChildren('th')
if len(th_list) < 4:
raise UnknownError('no table header for base stats table!')
expected_header = 'Requests Per Second'
col_index = len(th_list) - 2
found_header = th_list[col_index].text
if found_header != expected_header:
raise UnknownError("wrong table header found for column 4! Expected '{}' but got '{}'. {}"\
.format(expected_header, found_header, support_msg()))
stats = {}
for row in rows[1:]:
cols = row.findChildren('td')
if len(cols) < 4:
raise UnknownError('4th column in table not found! {}'.format(support_msg()))
regionserver = cols[0].text.strip().split(',')[0]
if 'Total:' in regionserver:
break
reqs_per_sec = cols[col_index].text.strip()
if not isInt(reqs_per_sec):
raise UnknownError("non-integer found in Requests Per Second column for regionserver '{}'. {}"\
.format(regionserver, support_msg()))
# fix for this is to cast string '1.0' to float and then cast to int
# ValueError: invalid literal for int() with base 10: '1.0'
stats[regionserver] = int(float(reqs_per_sec))
self.process_stats(stats)
#except (AttributeError, TypeError):
# raise UnknownError('failed to parse HBase Master UI status page. {}'.format(support_msg()))
def process_stats(self, stats):
lowest_requests = None
highest_requests = None
lowest_regionserver = None
highest_regionserver = None
for regionserver in stats:
if lowest_requests is None:
lowest_requests = stats[regionserver]
lowest_regionserver = regionserver
if highest_requests is None:
highest_requests = stats[regionserver]
highest_regionserver = regionserver
if stats[regionserver] > highest_requests:
highest_requests = stats[regionserver]
highest_regionserver = regionserver
if stats[regionserver] < lowest_requests:
lowest_requests = stats[regionserver]
lowest_regionserver = regionserver
# simple algo - let me know if you think can be a better calculation
imbalance = (highest_requests - lowest_requests) / max(highest_requests, 1) * 100
num_regionservers = len(stats)
self.msg = 'HBase RegionServers reqs/sec imbalance = {:.0f}% across {} RegionServer{}'\
.format(imbalance, num_regionservers, plural(num_regionservers))
self.check_thresholds(imbalance)
if self.verbose or not self.is_ok():
self.msg += ' [min reqs/sec={} on {} / max reqs/sec={} on {}]'\
.format(lowest_requests, lowest_regionserver, highest_requests, highest_regionserver)
self.msg += ' | reqs_per_sec_balance={:.2f}%{} lowest_requests_per_sec={} highest_requests_per_sec={}'\
.format(imbalance, self.get_perf_thresholds(), lowest_requests, highest_requests)
if __name__ == '__main__':
CheckHBaseRegionServerBalance().main()
|
[
"sys.path.append",
"harisekhon.utils.UnknownError",
"os.path.dirname",
"traceback.format_exc",
"harisekhon.utils.plural",
"bs4.BeautifulSoup",
"harisekhon.utils.support_msg",
"harisekhon.utils.isInt",
"os.path.join",
"sys.exit"
] |
[((990, 1019), 'os.path.join', 'os.path.join', (['srcdir', '"""pylib"""'], {}), "(srcdir, 'pylib')\n", (1002, 1019), False, 'import os\n'), ((1020, 1043), 'sys.path.append', 'sys.path.append', (['libdir'], {}), '(libdir)\n', (1035, 1043), False, 'import sys\n'), ((954, 979), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (969, 979), False, 'import os\n'), ((917, 928), 'sys.exit', 'sys.exit', (['(4)'], {}), '(4)\n', (925, 928), False, 'import sys\n'), ((1282, 1293), 'sys.exit', 'sys.exit', (['(4)'], {}), '(4)\n', (1290, 1293), False, 'import sys\n'), ((2112, 2153), 'bs4.BeautifulSoup', 'BeautifulSoup', (['req.content', '"""html.parser"""'], {}), "(req.content, 'html.parser')\n", (2125, 2153), False, 'from bs4 import BeautifulSoup\n'), ((881, 903), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (901, 903), False, 'import traceback\n'), ((1246, 1268), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (1266, 1268), False, 'import traceback\n'), ((3013, 3066), 'harisekhon.utils.UnknownError', 'UnknownError', (['"""no table header for base stats table!"""'], {}), "('no table header for base stats table!')\n", (3025, 3066), False, 'from harisekhon.utils import isInt, support_msg, UnknownError, plural\n'), ((5628, 5653), 'harisekhon.utils.plural', 'plural', (['num_regionservers'], {}), '(num_regionservers)\n', (5634, 5653), False, 'from harisekhon.utils import isInt, support_msg, UnknownError, plural\n'), ((3847, 3866), 'harisekhon.utils.isInt', 'isInt', (['reqs_per_sec'], {}), '(reqs_per_sec)\n', (3852, 3866), False, 'from harisekhon.utils import isInt, support_msg, UnknownError, plural\n'), ((2591, 2604), 'harisekhon.utils.support_msg', 'support_msg', ([], {}), '()\n', (2602, 2604), False, 'from harisekhon.utils import isInt, support_msg, UnknownError, plural\n'), ((3417, 3430), 'harisekhon.utils.support_msg', 'support_msg', ([], {}), '()\n', (3428, 3430), False, 'from harisekhon.utils import isInt, support_msg, UnknownError, plural\n'), ((3631, 3644), 'harisekhon.utils.support_msg', 'support_msg', ([], {}), '()\n', (3642, 3644), False, 'from harisekhon.utils import isInt, support_msg, UnknownError, plural\n'), ((4037, 4050), 'harisekhon.utils.support_msg', 'support_msg', ([], {}), '()\n', (4048, 4050), False, 'from harisekhon.utils import isInt, support_msg, UnknownError, plural\n')]
|
import os
import random
from os.path import splitext
from urllib.parse import urlparse
import requests
from dotenv import load_dotenv
def get_file_extension(link):
link_path = urlparse(link).path
extension = splitext(link_path)[-1]
return extension
def get_last_comic_num():
url = "https://xkcd.com/info.0.json"
response = requests.get(url)
response.raise_for_status()
last_comic_num = response.json()["num"]
return last_comic_num
def download_comic(url, filename):
response = requests.get(url)
response.raise_for_status()
with open(filename, "wb") as file:
file.write(response.content)
def fetch_random_comic():
first_comic_num = 1
last_comic_num = int(get_last_comic_num())
comic_num = random.randint(first_comic_num, last_comic_num)
url = f"https://xkcd.com/{comic_num}/info.0.json"
response = requests.get(url)
response.raise_for_status()
converted_response = response.json()
comments = converted_response["alt"]
comic_link = converted_response["img"]
extension = get_file_extension(comic_link)
comic_name = converted_response["safe_title"]
filename = f"{comic_name}{extension}"
download_comic(comic_link, filename)
return filename, comments
def check_api_response(api_response):
if "error" in api_response:
raise requests.HTTPError(
"Ошибка с VK API",
api_response["error"]["error_msg"]
)
def get_server_link(token):
url = "https://api.vk.com/method/photos.getWallUploadServer"
payload = {
"access_token": token,
"group_id": 212094963,
"v": 5.131,
}
response = requests.get(url, params=payload)
response.raise_for_status()
converted_response = response.json()
check_api_response(converted_response)
server_link = converted_response["response"]["upload_url"]
return server_link
def upload_img_to_server(filename, upload_url):
with open(filename, "rb") as file:
files = {
"photo": file,
}
response = requests.post(upload_url, files=files)
response.raise_for_status()
server_response = response.json()
check_api_response(server_response)
return server_response
def upload_img_to_group(token, photo, server, hash_parameter):
url = "https://api.vk.com/method/photos.saveWallPhoto"
payload = {
"access_token": token,
"group_id": 212094963,
"v": 5.131,
"photo": photo,
"server": server,
"hash": hash_parameter,
}
response = requests.post(
url,
params=payload)
response.raise_for_status()
vk_response = response.json()
check_api_response(vk_response)
return vk_response
def publish_comic(token, comments, owner_id, media_id):
url = "https://api.vk.com/method/wall.post"
payload = {
"owner_id": -212094963,
"from_group": 1,
"message": comments,
"access_token": token,
"v": 5.131,
"attachments": f"photo{owner_id}_{media_id}"
}
response = requests.post(url, params=payload)
response.raise_for_status()
check_api_response(response.json())
def main():
load_dotenv()
vk_token = os.getenv("VK_ACCESS_TOKEN")
try:
filename, comments = fetch_random_comic()
server_link = get_server_link(vk_token)
server_response = upload_img_to_server(filename, server_link)
uploaded_img = server_response["photo"]
server_num = server_response["server"]
server_hash = server_response["hash"]
vk_response = upload_img_to_group(
vk_token,
uploaded_img,
server_num,
server_hash)
group_owner_id = vk_response["response"][0]["owner_id"]
media_id = vk_response["response"][0]["id"]
publish_comic(
vk_token,
comments,
group_owner_id,
media_id)
except requests.HTTPError as err:
print(err)
except requests.ConnectionError as err:
print("Connection Error. Check Internet connection.\n", str(err))
except OSError as err:
print("Error: %s - %s." % (err.filename, err.strerror))
finally:
os.remove(f"./{filename}")
if __name__ == "__main__":
main()
|
[
"os.remove",
"random.randint",
"dotenv.load_dotenv",
"os.path.splitext",
"requests.get",
"requests.HTTPError",
"requests.post",
"os.getenv",
"urllib.parse.urlparse"
] |
[((348, 365), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (360, 365), False, 'import requests\n'), ((520, 537), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (532, 537), False, 'import requests\n'), ((761, 808), 'random.randint', 'random.randint', (['first_comic_num', 'last_comic_num'], {}), '(first_comic_num, last_comic_num)\n', (775, 808), False, 'import random\n'), ((878, 895), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (890, 895), False, 'import requests\n'), ((1675, 1708), 'requests.get', 'requests.get', (['url'], {'params': 'payload'}), '(url, params=payload)\n', (1687, 1708), False, 'import requests\n'), ((2583, 2617), 'requests.post', 'requests.post', (['url'], {'params': 'payload'}), '(url, params=payload)\n', (2596, 2617), False, 'import requests\n'), ((3097, 3131), 'requests.post', 'requests.post', (['url'], {'params': 'payload'}), '(url, params=payload)\n', (3110, 3131), False, 'import requests\n'), ((3222, 3235), 'dotenv.load_dotenv', 'load_dotenv', ([], {}), '()\n', (3233, 3235), False, 'from dotenv import load_dotenv\n'), ((3251, 3279), 'os.getenv', 'os.getenv', (['"""VK_ACCESS_TOKEN"""'], {}), "('VK_ACCESS_TOKEN')\n", (3260, 3279), False, 'import os\n'), ((183, 197), 'urllib.parse.urlparse', 'urlparse', (['link'], {}), '(link)\n', (191, 197), False, 'from urllib.parse import urlparse\n'), ((219, 238), 'os.path.splitext', 'splitext', (['link_path'], {}), '(link_path)\n', (227, 238), False, 'from os.path import splitext\n'), ((1349, 1422), 'requests.HTTPError', 'requests.HTTPError', (['"""Ошибка с VK API"""', "api_response['error']['error_msg']"], {}), "('Ошибка с VK API', api_response['error']['error_msg'])\n", (1367, 1422), False, 'import requests\n'), ((2078, 2116), 'requests.post', 'requests.post', (['upload_url'], {'files': 'files'}), '(upload_url, files=files)\n', (2091, 2116), False, 'import requests\n'), ((4258, 4284), 'os.remove', 'os.remove', (['f"""./{filename}"""'], {}), "(f'./{filename}')\n", (4267, 4284), False, 'import os\n')]
|
# Register your models here.
from django.contrib.admin import register, ModelAdmin
from fitbox.consultas.models import Consulta
@register(Consulta)
class ConsultaAdmin(ModelAdmin):
list_filter = ('paciente',)
prepopulated_fields = {'slug': ('descricao', )}
|
[
"django.contrib.admin.register"
] |
[((132, 150), 'django.contrib.admin.register', 'register', (['Consulta'], {}), '(Consulta)\n', (140, 150), False, 'from django.contrib.admin import register, ModelAdmin\n')]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.