repo_name
stringlengths 5
100
| path
stringlengths 4
299
| copies
stringlengths 1
5
| size
stringlengths 4
7
| content
stringlengths 475
1M
| license
stringclasses 15
values | hash
int64 -9,223,351,895,964,839,000
9,223,293,591B
| line_mean
float64 3.17
100
| line_max
int64 7
1k
| alpha_frac
float64 0.25
0.98
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
kariminf/ArArud | aruudy/poetry/meter.py | 1 | 19931 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2019 Abdelkrime Aries <[email protected]>
#
# ---- AUTHORS ----
# 2019 Abdelkrime Aries <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import re
from aruudy.poetry import foot
from aruudy.poetry.foot import TafiilaType as FT
from aruudy.poetry.foot import TafiilaComp
re_haraka = re.compile(u"[\u064E\u064F\u0650\u0653]")
def get_ameter (text):
"""Get the Arabic meter of a given text.
Produces the Arabic meter of a given text in prosody form.
The Arabic meter is composed of two letters:
- "w" watad (peg) which are vocalized letters
- "s" sabab (cord) which are vowels and unvocalized letters
Parameters
----------
text : str
Arabic text in prosody form.
Returns
-------
str
Arabic meter of the input text.
A string composed of "w" and "s".
"""
ameter = ""
parts = []
buf = ""
for c in text:
buf += c
if re_haraka.search(c):
if buf[: -2].strip():
ameter += "s" #sabab
parts.append(buf[: -2])
buf = buf[-2:]
ameter += "w" #watad
parts.append(buf)
buf = ""
if buf.strip():
ameter += "s"
parts.append(buf)
return ameter, parts
def a2e_meter (ameter):
"""Transforms an Arabic meter to an English one.
The Arabic meter uses vocalization as a basis:
- "w" watad (peg) which are vocalized letters
- "s" sabab (cord) which are vowels and unvocalized letters
While English meter uses syllables:
- "-" for long syllables, equivalent to "ws" in the Arabic one
- "u" for short syllables, equivalent to "w" in the Arabic one.
Parameters
----------
ameter : str
The Arabic meter using the two letters: "w" and "s".
Returns
-------
str
The English meter using the two characters: "-" and "u".
"""
res = ameter
res = res.replace("ws", "-")
res = res.replace("w", "u")
return res
def e2a_meter (emeter):
"""Transforms an English meter to an Arabic one.
The English meter uses syllables as a basis:
- "-" for long syllables, equivalent to "ws" in the Arabic one
- "u" for short syllables, equivalent to "w" in the Arabic one.
While the Arabic meter uses vocalization:
- "w" watad (peg) which are vocalized letters
- "s" sabab (cord) which are vowels and unvocalized letters
Parameters
----------
emeter : str
The English meter using the two characters: "-" and "u".
Returns
-------
str
The Arabic meter using the two letters: "w" and "s".
"""
res = emeter
res = res.replace("-", "ws")
res = res.replace("u", "w")
return res
buhuur = []
class Part(TafiilaComp):
"""The text's part description.
Parameters
----------
tafiila_comp : TafiilaComp
The description of the Foot which this part is based on.
Attributes
----------
ameter : str
The Arabic meter.
emeter : type
The english meter.
text : type
The part of text following that meter.
"""
def __init__(self, tafiila_comp):
TafiilaComp.__init__(self, tafiila_comp.__dict__)
self.ameter = e2a_meter(self.emeter)
self.text = ""
def extract(self, units=[]):
"""Extracts the part of text following the meter.
Parameters
----------
units : list(str)
A list of vocallized and unvocalized elements,
generated from the text.
Returns
-------
str
the text following the meter.
"""
l = len(self.emeter)
if not units or len(units) < l:
return None
self.text = "".join(units[:l])
return units[l:]
def to_dict(self):
"""Transforms this object to a dictionary.
Parameters
----------
Returns
-------
dict
The dictionary will contin:
- type (TafiilaComp): the type of the foot
- emeter (str): the English meter
- ameter (str): the Arabic meter
- mnemonic (str): the mnemonic describing the meter
- text (str): the text following the meter
"""
return {
"type": self.type,
"emeter": self.emeter,
"ameter": self.ameter,
"mnemonic": self.mnemonic,
"text": self.text
}
class BahrForm(object):
"""The form of a Bahr (meter).
For a given Arabic meter (Bahr), there may be multiple forms.
Parameters
----------
feet : list(Tafiila)
A list of feet describing the meter.
Attributes
----------
feet: list(Tafiila)
A list of feet describing the meter.
"""
def __init__(self, feet):
self.feet = feet
def validate(self, emeter, units=[]):
"""Chacks if an emeter follows this meter's form.
Parameters
----------
emeter : str
The English meter of the text.
units : list(str)
A list of vocallized and unvocalized elements,
generated from the text.
Returns
-------
Part
The part object.
"""
parts = []
text_emeter = emeter
units_cp = list(units)
for foot in self.feet: # different feet of the variant
text_foot, text_emeter = foot.process(text_emeter)
if not text_foot:
return None
part = Part(text_foot)
units_cp = part.extract(units_cp)
parts.append(part)
return parts
def extract_meter(bahrForm, used=True):
"""Extract the meter description from a list of :class:`~aruudy.poetry.foot.Tafiila` objects.
Parameters
----------
bahrForm : BahrForm
An object describing the meter's form.
used : bool
Meters, in Arabic, can have used forms different than standard ones.
if True: the result is used form.
Otherwise, it is standard form
Returns
-------
dict
A dictionary object describing the meter represented by the feet.
The dictionary contains these elements:
- type: a string describing the type of each foot (tafiila)
- mnemonic: a string describing the mnemonic of each foot.
- emeter: a string describing the English meter of each foot.
- ameter: a string describing the Arabic meter of each foot.
"""
res = {
"type": "",
"mnemonic": "",
"emeter": "",
"ameter": ""
}
sep = ""
for foot in bahrForm.feet:
meter = foot.get_form(used)
res["type"] += sep + meter.type.ar
res["mnemonic"] += sep + meter.mnemonic
res["emeter"] += sep + meter.emeter
res["ameter"] += sep + e2a_meter(meter.emeter)
if not sep:
sep = " "
return res
class Bahr(object):
"""Representation of the Arabic meter.
Parameters
----------
info : dict
Description of parameter `info`.
Attributes
----------
name : dict
Bahr's name, which is composed of:
- arabic: its name in Arabic
- english: its name in English
- trans: its Arabic name's transliteration.
used_scansion : dict
The most used scansion.
The dictionary contains these elements:
- type: a string describing the type of each foot (tafiila)
- mnemonic: a string describing the mnemonic of each foot.
- emeter: a string describing the English meter of each foot.
- ameter: a string describing the Arabic meter of each foot.
meter : list(BahrForm)
A list of meter's forms.
std_scansion : dict
the standard scansion.
The dictionary contains these elements:
- type: a string describing the type of each foot (tafiila)
- mnemonic: a string describing the mnemonic of each foot.
- emeter: a string describing the English meter of each foot.
- ameter: a string describing the Arabic meter of each foot.
"""
def __init__(self, info):
buhuur.append(self)
self.name = info["name"]
self.meter = info["meter"]
self.key = info["key"]
self.used_scansion = extract_meter(self.meter[0])
self.std_scansion = extract_meter(self.meter[0], used=False)
def __eq__(self, other):
return self.__dict__ == other.__dict__
def __str__(self):
return str(self.get_names())
def get_names(self):
"""Get the names of the meter.
Parameters
----------
Returns
-------
dict
Bahr's name, which is composed of:
- arabic: its name in Arabic
- english: its name in English
- trans: its Arabic name's transliteration.
"""
return self.get_value("name")
def test_name(self, key, value):
"""Test if .
Parameters
----------
key : str
can be "arabic", "english" or "trans".
value : str
The name we are looking for.
Returns
-------
bool
True, if this meter have the name specified by "value"
"""
return value == self.name[key]
def to_dict(self):
"""Transform the bahr to a dictionary.
Parameters
----------
Returns
-------
dict
The dictionary has three components "name", "used_scansion" and
"std_scansion" which are dictionaries too.
They are described in the attributes section.
"""
dic = {
"name": self.name,
"used_scansion": self.used_scansion,
"std_scansion": self.std_scansion
}
return dic
def validate(self, emeter, units=[]):
"""Validate a given emeter into one of the forms.
Search for a form which the given emeter follows.
Parameters
----------
emeter : str
English meter.
units : list(str)
A list of vocallized and unvocalized elements,
generated from the text.
Returns
-------
Part
the part object.
"""
for form in self.meter: # different forms
parts = form.validate(emeter, units)
if parts:
return parts
return None
tawiil = Bahr({
"name": {
"arabic": u"طويل",
"english": "long",
"trans": u"ṭawīl"
},
"meter": [
BahrForm([
foot.WWSWS([FT.SALIM, FT.QABDH]),
foot.WWSWSWS([FT.SALIM, FT.QABDH, FT.KAFF]),
foot.WWSWS([FT.SALIM, FT.QABDH]),
foot.WWSWSWS([FT.QABDH]),
])
],
"key": u"طويلٌ له دون البحور فضائلٌ فعولن مفاعيلن فعولن مفاعلن"
})
madiid = Bahr({
"name": {
"arabic": u"مديد",
"english": "protracted",
"trans": u"madīd"
},
"meter": [
BahrForm([
foot.WSWWSWS([FT.SALIM, FT.KHABN]),
foot.WSWWS([FT.SALIM, FT.KHABN]),
foot.WSWWSWS([FT.SALIM, FT.KHABN])
])
],
"key": u"لمديد الشعر عندي صفاتُ فاعلاتن فاعلن فاعلاتن"
})
basiit = Bahr({
"name": {
"arabic": u"بسيط",
"english": "spread-out",
"trans": u"basīṭ"
},
"meter": [
BahrForm([
foot.WSWSWWS([FT.SALIM, FT.KHABN, FT.TAI]),
foot.WSWWS([FT.SALIM, FT.KHABN]),
foot.WSWSWWS([FT.SALIM, FT.KHABN, FT.TAI]),
foot.WSWWS([FT.KHABN, FT.QATE]),
]),
BahrForm([
foot.WSWSWWS([FT.SALIM, FT.KHABN, FT.TAI]),
foot.WSWWS([FT.SALIM, FT.KHABN]),
foot.WSWSWWS([FT.SALIM, FT.KHABN, FT.TAI, FT.QATE, FT.TADIIL]),
])
],
"key": u"إن البسيط لديه يبسط الأملُ مستفعلن فعلن مستفعلن فعلن"
})
wafir = Bahr({
"name": {
"arabic": u"وافر",
"english": "abundant",
"trans": u"wāfir"
},
"meter": [
BahrForm([
foot.WWSWWWS([FT.SALIM, FT.ASAB]),
foot.WWSWWWS([FT.SALIM, FT.ASAB]),
foot.WWSWS([FT.SALIM]),
])
],
"key": u"بحور الشعر وافرها جميل مفاعلتن مفاعلتن فعولن"
})
kaamil = Bahr({
"name": {
"arabic": u"كامل",
"english": "complete",
"trans": u"kāmil"
},
"meter": [
BahrForm([
foot.WWWSWWS([FT.SALIM, FT.IDHMAR]),
foot.WWWSWWS([FT.SALIM, FT.IDHMAR]),
foot.WWWSWWS([FT.SALIM, FT.IDHMAR])
]),
BahrForm([
foot.WWWSWWS([FT.SALIM, FT.IDHMAR]),
foot.WWWSWWS([FT.SALIM, FT.IDHMAR])
])
],
"key": u"كمل الجمال من البحور الكامل متفاعلن متفاعلن متفاعلن"
})
hazj = Bahr({
"name": {
"arabic": u"هزج",
"english": "trilling",
"trans": u"hazaj",
},
"meter": [
BahrForm([
foot.WWSWSWS([FT.SALIM, FT.KAFF]),
foot.WWSWSWS([FT.SALIM, FT.KAFF])
])
],
"key": u"على الأهزاج تسهيل مفاعيلن مفاعيلن"
})
rajz = Bahr({
"name": {
"arabic": u"رجز",
"english": "trembling",
"trans": u"rajaz"
},
"meter": [
BahrForm([
foot.WSWSWWS([FT.SALIM, FT.KHABN]),
foot.WSWSWWS([FT.SALIM, FT.KHABN]),
foot.WSWSWWS([FT.SALIM, FT.KHABN])
])
],
"key": u"في أبحر الأرجاز بحرٌ يسهل مستفعلن مستفعلن مستفعلن"
})
raml = Bahr({
"name": {
"arabic": u"رمل",
"english": "trotting",
"trans": u"ramal",
},
"meter": [
BahrForm([
foot.WSWWSWS([FT.SALIM, FT.KHABN]),
foot.WSWWSWS([FT.SALIM, FT.KHABN]),
foot.WSWWSWS([FT.SALIM, FT.KHABN])
])
],
"key": u"رمل الأبحر ترويه الثقات فاعلاتن فاعلاتن فاعلاتن"
})
sariie = Bahr({
"name": {
"arabic": u"سريع",
"english": "swift",
"trans": u"sarīʿ",
},
"meter": [
BahrForm([
foot.WSWSWWS([FT.SALIM, FT.KHABN, FT.TAI, FT.KHABL]),
foot.WSWSWWS([FT.SALIM, FT.KHABN, FT.TAI, FT.KHABL]),
foot.WSWWS([FT.SALIM])
])
],
"key": u"بحرٌ سريع ماله ساحل مستفعلن مستفعلن فاعلن"
})
munsarih = Bahr({
"name": {
"arabic": u"منسرح",
"english": "quick-paced",
"trans": u"munsariħ"
},
"meter": [
BahrForm([
foot.WSWSWWS([FT.SALIM, FT.KHABN]),
foot.WSWSWSW([FT.SALIM, FT.TAI]),
foot.WSWSWWS([FT.TAI])
])
],
"key": u"منسرح فيه يضرب المثل مستفعلن مفعولات مفتعلن"
})
khafiif = Bahr({
"name": {
"arabic": u"خفيف",
"english": "light",
"trans": u"khafīf"
},
"meter": [
BahrForm([
foot.WSWWSWS([FT.SALIM, FT.KHABN, FT.KAFF]),
foot.WSWSWWS([FT.SALIM]),
foot.WSWWSWS([FT.SALIM, FT.KHABN, FT.SHAKL])
])
],
"key": u"يا خفيفاً خفّت به الحركات فاعلاتن مستفعلن فاعلاتن"
})
mudharie = Bahr({
"name": {
"arabic": u"مضارع",
"english": "similar",
"trans": u"muḍāriʿ"
},
"meter": [
BahrForm([
foot.WWSWSWS([FT.SALIM, FT.QABDH,FT.KAFF]),
foot.WSWWSWS([FT.SALIM])
])
],
"key": u"تعدّ المضارعات مفاعيلُ فاعلاتن"
})
muqtadhib = Bahr({
"name": {
"arabic": u"مقتضب",
"english": "untrained",
"trans": u"muqtaḍab"
},
"meter": [
BahrForm([
foot.WSWSWSW([FT.SALIM]),# FT.KHABN
foot.WSWSWWS([FT.TAI])
])
],
"key": u"اقتضب كما سألوا مفعلات مفتعلن"
})
mujdath = Bahr({
"name": {
"arabic": u"مجتث",
"english": "cut-off",
"trans": u"mujtathth"
},
"meter": [
BahrForm([
foot.WSWSWWS([FT.SALIM, FT.KHABN]),
foot.WSWWSWS([FT.SALIM, FT.KHABN])
])
],
"key": u"أن جثت الحركات مستفعلن فاعلاتن"
})
mutaqaarib = Bahr({
"name": {
"arabic": u"متقارب",
"english": "nearing",
"trans": u"mutaqārib"
},
"meter": [
BahrForm([
foot.WWSWS([FT.SALIM, FT.QABDH]),
foot.WWSWS([FT.SALIM, FT.QABDH]),
foot.WWSWS([FT.SALIM, FT.QABDH]),
foot.WWSWS([FT.SALIM, FT.QABDH, FT.QASR])
])
],
"key": u"عن المتقارب قال الخليل فعولن فعولن فعولن فعول"
})
mutadaarik = Bahr({
"name": {
"arabic": u"متدارك",
"english": "overtaking",
"trans": u"mutadārik"
},
"meter": [
BahrForm([
foot.WSWWS([FT.SALIM, FT.KHABN, FT.QATE]),
foot.WSWWS([FT.SALIM, FT.KHABN, FT.QATE]),
foot.WSWWS([FT.SALIM, FT.KHABN, FT.QATE]),
foot.WSWWS([FT.SALIM, FT.KHABN, FT.QATE])
])
],
"key": u"حركات المحدث تنتقل فعلن فعلن فعلن فعل"
})
def name_type(name):
"""decides if a name is in English or Arabic.
Parameters
----------
name : str
The name we want to test.
Returns
-------
str
"english" or "arabic".
"""
if re.match("^[a-zA-Z]", name):
return "english"
return "arabic"
def get_bahr(name, dic=True):
"""Search for poetry Bahr by name.
Parameters
----------
name : str
name of the poetry Bahr (meter).
dic : bool
True(default): it returns a dict object with all information.
If False, it returns an object of type Bahr
Returns
-------
dict
dict: containing the information.
or a Bahr object.
or None
"""
label = name_type(name)
for b in buhuur:
if b.test_name(label, name):
if dic:
return b.to_dict()
return b
return None
def get_names(lang=None):
"""get a list of meters names.
Parameters
----------
lang : str
If not specified: the result will be all available names.
Returns
-------
list(Union[str, dict])
A list of names.
"""
names = []
for bahr in buhuur:
if lang:
names.append(bahr.name[lang])
else:
names.append(bahr.name)
return names
def search_bahr(emeter, units=[]):
"""Search for Bahr of a given English meter.
Parameters
----------
emeter : str
English meter.
units : list(str)
A list of vocallized and unvocalized elements,
generated from the text.
Returns
-------
tuple(Bahr, Part)
A tuple of the found meter and the part's description.
If not found, it will return (None, None)
"""
for b in buhuur:
res = b.validate(emeter, units)
if res:
return b, res
return None, None
| apache-2.0 | 2,906,759,876,360,441,000 | 23.503817 | 97 | 0.534372 | false |
mefyl/drake | src/drake/gnu.py | 1 | 9771 | import collections
import functools
import itertools
import subprocess
import drake
import drake.cxx
import drake.git
@functools.lru_cache(1)
def _default_make_binary():
from drake.which import which
to_try = [
'make',
'gmake',
'mingw32-make',
'mingw64-make',
]
for binary in to_try:
path = which(binary)
if path is not None:
return path
class GNUBuilder(drake.Builder):
def __init__(
self,
cxx_toolkit,
targets = [],
configure: """Configure script path (or None if no configure
step is needed)""" = None,
working_directory: "Deduced from configure" = None,
configure_args: "Arguments of the configure script" = [],
sources = None,
make_binary: "Make binary" = None,
makefile: "Makefile filename, used if not None" = None,
build_args: "Additional arguments for the make command" = ['install'],
additional_env: "Additional environment variables" = {},
configure_interpreter = None,
patch = None,
configure_stdout: 'Show configure standard output' = False,
build_stdout: 'Show build standard output' = False):
self.__toolkit = cxx_toolkit
self.__build_stdout = build_stdout
self.__configure = configure
self.__configure_args = configure_args
self.__configure_interpreter = configure_interpreter
self.__configure_stdout = configure_stdout
self.__targets = list(targets)
if make_binary is None:
self.__make_binary = _default_make_binary()
else:
self.__make_binary = make_binary
self.__makefile = makefile
self.__build_args = build_args
self.__env = {}
self.__env.update(additional_env)
self.__patch = patch
if make_binary is not None:
self.__env.setdefault('MAKE', make_binary.replace('\\', '/'))
if working_directory is not None:
self.__working_directory = working_directory
if not self.__working_directory.exists():
self.__working_directory.mkpath()
else:
if self.__configure is None:
raise Exception(
"Cannot deduce the working directory (no configure script)"
)
self.__working_directory = self.__configure.path().dirname()
if sources is None:
sources = []
if isinstance(cxx_toolkit.patchelf, drake.BaseNode):
sources.append(cxx_toolkit.patchelf)
drake.Builder.__init__(
self,
(configure is not None and [configure] or []) + sources,
self.__targets)
def execute(self):
env = dict(self.__env)
import os
env.update(os.environ)
# Patch
if self.__patch is not None:
patch_path = str(drake.path_root() / self.__patch.path())
patch_cmd = ['patch', '-N', '-p', '1', '-i', patch_path],
if not self.cmd('Patch %s' % self.work_directory,
patch_cmd,
cwd = self.work_directory):
return False
# Configure step
if self.__configure is not None:
if not self.cmd('Configure %s' % self.work_directory,
self.command_configure,
cwd = self.work_directory,
env = env,
leave_stdout = self.__configure_stdout):
return False
# Build step
if not self.cmd('Build %s' % self.work_directory,
self.command_build,
cwd = self.work_directory,
env = env,
leave_stdout = self.__build_stdout):
return False
for target in self.__targets:
path = target.path().without_prefix(self.work_directory)
if isinstance(target, drake.cxx.DynLib):
rpath = '.'
elif isinstance(target, drake.cxx.Executable):
rpath = '../lib'
else:
continue
with drake.WritePermissions(target):
cmd = self.__toolkit.rpath_set_command(target.path(), rpath)
if self.__toolkit.os is not drake.os.windows:
if not self.cmd('Fix rpath for %s' % target.path(), cmd):
return False
if self.__toolkit.os is drake.os.macos:
cmd = ['install_name_tool',
'-id', '@rpath/%s' % target.name().basename(),
str(target.path())]
if not self.cmd('Fix rpath for %s' % target.path(), cmd):
return False
lib_dependecies = self.parse_otool_libraries(target.path())
for dep in lib_dependecies:
if dep.basename() in (t.path().basename() for t in self.__targets):
cmd = [
'install_name_tool',
'-change',
str(dep),
'@rpath/%s' % dep.basename(),
str(target.path()),
]
if not self.cmd('Fix dependency name for %s' % target.path(), cmd):
return False
return True
def parse_otool_libraries(self, path):
command = ['otool', '-L', str(path)]
return [drake.Path(line[1:].split(' ')[0])
for line
in subprocess.check_output(command).decode().split('\n')
if line.startswith('\t')]
@property
def command_configure(self):
if self.__configure is None:
return None
config = [str(drake.path_build(absolute = True) / self.__configure.path())]
if self.__configure_interpreter is not None:
config.insert(0, self.__configure_interpreter)
return config + self.__configure_args
@property
def command_build(self):
if self.__makefile is not None:
return [self.__make_binary, '-f', self.__makefile, 'install'] + self.__build_args
return [self.__make_binary] + self.__build_args
@property
def work_directory(self):
return str(self.__working_directory)
def hash(self):
env = {}
env.update(self.__env)
env.pop('DRAKE_RAW', '1')
return ''.join([
str(self.command_configure),
str(self.command_build),
str(tuple(sorted(env))),
])
def __str__(self):
return '%s(%s)' % (self.__class__.__name__, self.__working_directory)
class FatLibraryGenerator(drake.Builder):
def __init__(self,
input_libs,
output_lib,
headers = [],
input_headers = None,
output_headers = None):
drake.Builder.__init__(self,
input_libs,
itertools.chain([output_lib], (drake.node(output_headers / p)
for p in headers)))
self.__input_libs = input_libs
self.__output_lib = output_lib
self.__headers = headers
if input_headers:
self.__input_headers = drake.path_build(input_headers)
else:
self.__input_headers = None
if output_headers:
self.__output_headers = drake.path_build(output_headers)
else:
self.__output_headers = None
def execute(self):
res = self.cmd('Lipo %s' % self.input_paths,
self.lipo_command,
leave_stdout = False)
if not res:
return False
if self.__headers and self.__input_headers and self.__output_headers:
res = self.cmd('cp %s' % self.__input_headers,
self.copy_headers_command,
leave_stdout = False)
return res
@property
def lipo_command(self):
if len(self.__input_libs) == 1:
res = ['cp']
res.extend(self.input_paths)
res.append(self.__output_lib.path())
else:
res = ['lipo']
res.extend(self.input_paths)
res.extend(['-create', '-output'])
res.append(self.__output_lib.path())
return res
@property
def input_paths(self):
res = []
for input in self.__input_libs:
res.append(input.path())
return res
@property
def copy_headers_command(self):
return ['cp', '-r',
self.__input_headers, self.__output_headers]
class VersionGenerator(drake.Builder):
def __init__(self, output, git = None, production_build = True):
git = git or drake.git.Git()
drake.Builder.__init__(self, [git], [output])
self.__git = git
self.__output = output
self.__production_build = production_build
def execute(self):
self.output('Generate %s' % self.__output.path())
chunks = collections.OrderedDict()
if self.__production_build:
version = self.__git.description()
else:
version = '%s-dev' % self.__git.version().split('-')[0]
chunks['version'] = version
chunks['major'], chunks['minor'], chunks['subminor'] = \
map(int, version.split('-')[0].split('.'))
with open(str(self.__output.path()), 'w') as f:
variables = (self._variable(*item) for item in chunks.items())
for line in itertools.chain(
self._prologue(), variables, self._epilogue()):
print(line, file = f)
return True
def _prologue(self):
return iter(())
def _epilogue(self):
return iter(())
def _variable(self, name, value):
raise NotImplementedError()
def hash(self):
return self.__production_build
class PythonVersionGenerator(VersionGenerator):
def _variable(self, name, value):
return '%s = %s' % (name, repr(value))
class CxxVersionGenerator(VersionGenerator):
def __init__(self, prefix, *args, **kwargs):
super().__init__(*args, **kwargs)
self.__prefix = prefix
def _variable(self, name, value):
try:
return '#define %s_%s %s' % \
(self.__prefix.upper(), name.upper(), int(value))
except:
return '#define %s_%s "%s"' % \
(self.__prefix.upper(), name.upper(), value)
def _prologue(self):
yield '#ifndef %s_GIT_VERSION_HH' % self.__prefix
yield '# define %s_GIT_VERSION_HH' % self.__prefix
yield ''
def _epilogue(self):
yield ''
yield '#endif'
| agpl-3.0 | -6,881,114,794,789,176,000 | 30.827362 | 88 | 0.575274 | false |
ljdursi/poapy | poa.py | 1 | 1751 | #!/usr/bin/env python
from __future__ import print_function
import argparse
import sys
import poagraph
import seqgraphalignment
import simplefasta
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('infile', nargs='?', type=argparse.FileType('r'), default=sys.stdin)
parser.add_argument('-G', '--gap', type=int, default=-2, help='Gap penalty, default=-1')
parser.add_argument('-g', '--globalAlign', action='store_true', help='Global alignment (default: local)')
parser.add_argument('-s', '--simple', action='store_true', help='Simple method')
parser.add_argument('-m', '--match', type=int, default=1, help='Match score, default=+1')
parser.add_argument('-M', '--mismatch', type=int, default=-1, help='Mismatch score, default=-1')
parser.add_argument('-H', '--html', nargs='?', type=argparse.FileType('w'), default='poa.html', help='html output')
args = parser.parse_args()
seqNo = 0
fasta = simplefasta.readfasta(args.infile)
graph = poagraph.POAGraph(fasta[0][1], fasta[0][0])
for label, sequence in fasta[1:]:
alignment = seqgraphalignment.SeqGraphAlignment(sequence, graph, fastMethod=not args.simple,
globalAlign=args.globalAlign,
matchscore=args.match, mismatchscore=args.mismatch,
gapscore=args.gap)
graph.incorporateSeqAlignment(alignment, sequence, label)
alignments = graph.generateAlignmentStrings()
for label, alignstring in alignments:
print("{0:15s} {1:s}".format(label, alignstring))
if args.html is not None:
graph.htmlOutput(args.html)
| gpl-2.0 | 6,116,580,610,104,872,000 | 49.028571 | 119 | 0.623073 | false |
tomato42/fsresck | fsresck/write.py | 1 | 4112 | # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
# Description: File system resilience testing application
# Author: Hubert Kario <[email protected]>
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
# Copyright (c) 2015 Hubert Kario. All rights reserved.
#
# This copyrighted material is made available to anyone wishing
# to use, modify, copy, or redistribute it subject to the terms
# and conditions of the GNU General Public License version 2.
#
# This program is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public
# License along with this program; if not, write to the Free
# Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
# Boston, MA 02110-1301, USA.
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
"""Handling of image modification requests (writes)."""
def overlapping(iterator):
"""Check if the writes in iterator are not overlapping each other."""
writes = list(iterator)
for i, write in enumerate(writes):
for other_write in writes[i+1:]:
# when writes targets different disk, it inherently does not
# overlap
if write.disk_id != other_write.disk_id:
continue
write_start = write.offset
write_end = write.offset + len(write.data)
other_write_start = other_write.offset
other_write_end = other_write.offset + len(other_write.data)
if other_write_start < write_end < other_write_end:
return True
if other_write_start <= write_start < other_write_end:
return True
return False
class Write(object):
"""Single image modification request."""
def __init__(self, offset, data, disk_id=None):
"""
Create an object instance.
@type offset: int
@param offset: the start place for the write modification request
@type data: bytearray
@param data: data to write at L{offset}
@param disk_id: base image disk UUID
"""
self.offset = offset
self.data = data
self.disk_id = disk_id
self.start_time = None
self.end_time = None
def __hash__(self):
"""Return the hash of the object."""
return hash((self.offset, bytes(self.data), self.disk_id,
self.start_time, self.end_time))
def __repr__(self):
"""Return human-readable representation of the object."""
if self.disk_id is None and self.start_time is None and \
self.end_time is None:
return "<Write offset={0}, len(data)={1}>".format(
self.offset, len(self.data))
elif self.start_time is None and self.end_time is None:
return "<Write offset={0}, len(data)={1}, disk_id={2}>".format(
self.offset, len(self.data), self.disk_id)
else:
return "<Write offset={0}, len(data)={1}, disk_id={2}, "\
"start_time={3}, end_time={4}>".format(
self.offset, len(self.data), self.disk_id,
self.start_time, self.end_time)
def set_times(self, start_time, end_time):
"""Add the issuance time and completion time of original operation."""
self.start_time = start_time
self.end_time = end_time
def __eq__(self, other):
"""
Check if objects are identical.
Compare the object with another to check if it represents the
same modification.
"""
return (isinstance(other, Write) and
self.__dict__ == other.__dict__)
def __ne__(self, other):
"""
Check if objects are different.
Compare the object with another to check if they are different
"""
return not self.__eq__(other)
| gpl-2.0 | 9,180,138,694,550,798,000 | 36.045045 | 78 | 0.569066 | false |
alex/flake8-import-order | flake8_import_order/flake8_linter.py | 1 | 1185 | import flake8_import_order
from flake8_import_order import ImportOrderChecker
class Linter(ImportOrderChecker):
name = "import-order"
version = flake8_import_order.__version__
def __init__(self, tree, filename):
super(Linter, self).__init__(filename, tree)
@classmethod
def add_options(cls, parser):
# List of application import names. They go last.
parser.add_option(
"--application-import-names",
default="",
action="store",
type="string",
help="Import names to consider as application specific"
)
parser.config_options.append("application-import-names")
@classmethod
def parse_options(cls, options):
optdict = {}
names = options.application_import_names.split(",")
optdict['application_import_names'] = [n.strip() for n in names]
cls.options = optdict
def error(self, node, code, message):
lineno, col_offset = node.lineno, node.col_offset
return (lineno, col_offset, '{0} {1}'.format(code, message), Linter)
def run(self):
for error in self.check_order():
yield error
| lgpl-3.0 | 2,170,658,530,428,986,600 | 29.384615 | 76 | 0.612658 | false |
birdland/dlkit-doc | dlkit/mongo/grading/mdata_conf.py | 1 | 5266 | """Mongo osid metadata configurations for grading service."""
from .. import types
from ..primitives import Type
DEFAULT_LANGUAGE_TYPE = Type(**types.Language().get_type_data('DEFAULT'))
DEFAULT_SCRIPT_TYPE = Type(**types.Script().get_type_data('DEFAULT'))
DEFAULT_FORMAT_TYPE = Type(**types.Format().get_type_data('DEFAULT'))
DEFAULT_GENUS_TYPE = Type(**types.Genus().get_type_data('DEFAULT'))
GRADE_OUTPUT_SCORE = {
'element_label': 'output score',
'instructions': 'enter a decimal value.',
'required': False,
'read_only': False,
'linked': False,
'array': False,
'default_decimal_values': [None],
'syntax': 'DECIMAL',
'decimal_scale': None,
'minimum_decimal': None,
'maximum_decimal': None,
'decimal_set': [],
}
GRADE_GRADE_SYSTEM = {
'element_label': 'grade system',
'instructions': 'accepts an osid.id.Id object',
'required': False,
'read_only': False,
'linked': False,
'array': False,
'default_id_values': [''],
'syntax': 'ID',
'id_set': [],
}
GRADE_INPUT_SCORE_END_RANGE = {
'element_label': 'input score end range',
'instructions': 'enter a decimal value.',
'required': False,
'read_only': False,
'linked': False,
'array': False,
'default_decimal_values': [None],
'syntax': 'DECIMAL',
'decimal_scale': None,
'minimum_decimal': None,
'maximum_decimal': None,
'decimal_set': [],
}
GRADE_INPUT_SCORE_START_RANGE = {
'element_label': 'input score start range',
'instructions': 'enter a decimal value.',
'required': False,
'read_only': False,
'linked': False,
'array': False,
'default_decimal_values': [None],
'syntax': 'DECIMAL',
'decimal_scale': None,
'minimum_decimal': None,
'maximum_decimal': None,
'decimal_set': [],
}
GRADE_SYSTEM_NUMERIC_SCORE_INCREMENT = {
'element_label': 'numeric score increment',
'instructions': 'enter a decimal value.',
'required': False,
'read_only': False,
'linked': False,
'array': False,
'default_decimal_values': [None],
'syntax': 'DECIMAL',
'decimal_scale': None,
'minimum_decimal': None,
'maximum_decimal': None,
'decimal_set': [],
}
GRADE_SYSTEM_LOWEST_NUMERIC_SCORE = {
'element_label': 'lowest numeric score',
'instructions': 'enter a decimal value.',
'required': False,
'read_only': False,
'linked': False,
'array': False,
'default_decimal_values': [None],
'syntax': 'DECIMAL',
'decimal_scale': None,
'minimum_decimal': None,
'maximum_decimal': None,
'decimal_set': [],
}
GRADE_SYSTEM_BASED_ON_GRADES = {
'element_label': 'based on grades',
'instructions': 'enter either true or false.',
'required': False,
'read_only': False,
'linked': False,
'array': False,
'syntax': 'BOOLEAN',
}
GRADE_SYSTEM_HIGHEST_NUMERIC_SCORE = {
'element_label': 'highest numeric score',
'instructions': 'enter a decimal value.',
'required': False,
'read_only': False,
'linked': False,
'array': False,
'default_decimal_values': [None],
'syntax': 'DECIMAL',
'decimal_scale': None,
'minimum_decimal': None,
'maximum_decimal': None,
'decimal_set': [],
}
GRADE_ENTRY_RESOURCE = {
'element_label': 'resource',
'instructions': 'accepts an osid.id.Id object',
'required': False,
'read_only': False,
'linked': False,
'array': False,
'default_id_values': [''],
'syntax': 'ID',
'id_set': [],
}
GRADE_ENTRY_GRADE = {
'element_label': 'grade',
'instructions': 'accepts an osid.id.Id object',
'required': False,
'read_only': False,
'linked': False,
'array': False,
'default_id_values': [''],
'syntax': 'ID',
'id_set': [],
}
GRADE_ENTRY_IGNORED_FOR_CALCULATIONS = {
'element_label': 'ignored for calculations',
'instructions': 'enter either true or false.',
'required': False,
'read_only': False,
'linked': False,
'array': False,
'syntax': 'BOOLEAN',
}
GRADE_ENTRY_SCORE = {
'element_label': 'score',
'instructions': 'enter a decimal value.',
'required': False,
'read_only': False,
'linked': False,
'array': False,
'default_decimal_values': [None],
'syntax': 'DECIMAL',
'decimal_scale': None,
'minimum_decimal': None,
'maximum_decimal': None,
'decimal_set': [],
}
GRADE_ENTRY_GRADEBOOK_COLUMN = {
'element_label': 'gradebook column',
'instructions': 'accepts an osid.id.Id object',
'required': False,
'read_only': False,
'linked': False,
'array': False,
'default_id_values': [''],
'syntax': 'ID',
'id_set': [],
}
GRADEBOOK_COLUMN_GRADE_SYSTEM = {
'element_label': 'grade system',
'instructions': 'accepts an osid.id.Id object',
'required': False,
'read_only': False,
'linked': False,
'array': False,
'default_id_values': [''],
'syntax': 'ID',
'id_set': [],
}
GRADEBOOK_COLUMN_SUMMARY_GRADEBOOK_COLUMN = {
'element_label': 'gradebook column',
'instructions': 'accepts an osid.id.Id object',
'required': False,
'read_only': False,
'linked': False,
'array': False,
'default_id_values': [''],
'syntax': 'ID',
'id_set': [],
}
| mit | -313,968,927,662,093,250 | 23.493023 | 73 | 0.59324 | false |
NoNotCar/monolith | Monolith/GUI.py | 1 | 2813 | '''
Created on 14 Aug 2015
Seeing is possibly believing
@author: NoNotCar
'''
import Img
import pygame
import sys
clock = pygame.time.Clock()
class GUI(object):
def run(self, screen, player):
pass
class WinGUI(GUI):
def __init__(self, puz=False):
self.p = puz
def run(self, screen, player):
screen.fill((255, 255, 255))
Img.bcentre(Img.bfont, "WIN", screen)
pygame.display.flip()
pygame.time.wait(1000)
if not self.p:
sys.exit()
class PauseGUI(GUI):
def run(self, screen, player):
"""The pause GUI should use minimal system resources"""
pygame.mixer.music.pause()
screen.fill((255, 255, 255))
Img.bcentre(Img.bfont, "Paused", screen)
pygame.display.flip()
while True:
for e in pygame.event.get():
if e.type == pygame.QUIT:
sys.exit()
if e.type == pygame.KEYDOWN and e.key == pygame.K_p:
pygame.mixer.music.unpause()
return None
pygame.time.wait(200)
class ListGui(GUI):
addimg = Img.imgret2("AddItem.png")
def __init__(self, menutext, colour=(0, 0, 0)):
self.mtxt = menutext
self.mtxtc = colour
self.contents = set()
self.contentsimgs = []
def run(self, screen, player):
arect = pygame.Rect(-1, -1, 0, 0)
while True:
for e in pygame.event.get():
if e.type == pygame.QUIT:
sys.exit()
if e.type == pygame.MOUSEBUTTONDOWN:
mpos = pygame.mouse.get_pos()
if arect.collidepoint(mpos):
self.contents.add(player.hand.name)
self.contentsimgs.append(player.hand.img)
elif pygame.key.get_mods() & pygame.KMOD_LCTRL:
return None
screen.fill((100, 100, 100))
screen.blit(Img.pfont.render(self.mtxt, True, self.mtxtc), (0, 0))
nx = 0
for img in self.contentsimgs:
screen.blit(img, (nx, 32))
nx += 32
if player.hand and player.hand.name not in self.contents:
arect = screen.blit(self.addimg, (nx, 32))
pygame.display.flip()
class HelpGUI(GUI):
def __init__(self, img):
self.img = img
def run(self, screen, player):
screen.fill((255, 255, 255))
screen.blit(self.img, (0, 0))
pygame.display.flip()
while True:
for e in pygame.event.get():
if e.type == pygame.QUIT:
sys.exit()
if e.type == pygame.MOUSEBUTTONDOWN:
return None
pygame.time.wait(200)
| mit | -827,190,893,434,333,000 | 28.610526 | 78 | 0.515108 | false |
Step7750/ScheduleStorm_Server | uni/UAlberta.py | 1 | 17442 | """
Copyright (c) 2016 Stepan Fedorko-Bartos, Ceegan Hale
Under MIT License - https://github.com/Step7750/ScheduleStorm/blob/master/LICENSE.md
This file is a resource for Schedule Storm - https://github.com/Step7750/ScheduleStorm
"""
import threading
import requests
import pymongo
from bs4 import BeautifulSoup
import time
import re
from ldap3 import Server, Connection, SUBTREE, ALL, LEVEL
from queue import Queue
from .University import University
class UAlberta(University):
def __init__(self, settings):
super().__init__(settings)
self.db = pymongo.MongoClient().ScheduleStorm
self.db.UAlbertaProfessor.create_index([("uid", pymongo.ASCENDING)], unique=True)
def parseCourseDescription(self, req):
"""
Removes unnessary non-letters from the req
:param req: **string** requisite form the description of a course
:return: **string**
"""
char = 1
while char < len(req) and not req[char].isalpha():
char += 1
return req[char:]
def scrapeCourseDesc(self, conn, termid):
"""
Retrieves all course descriptions then parses the course requisites and notes then upserts for every entry in
the query results
:param conn: **ldap connection object**
:param termid: **string/int** Term ID to get courses for
:return: **string**
"""
self.log.info('obtaining course descriptions')
# Page queries course descriptions with the search base
searchBase = 'term=' + termid + ', ou=calendar, dc=ualberta, dc=ca'
entry_list = conn.extend.standard.paged_search(search_base=searchBase, search_filter='(course=*)',
search_scope=LEVEL,
attributes=['catalog', 'courseDescription', 'courseTitle',
'subject', 'units'], paged_size=400, generator=False)
# for entry in list, parse and upsert course descriptions
for entry in entry_list:
# initialize course description dict
courseDesc = {
'coursenum': entry['attributes']['catalog'],
'subject': entry['attributes']['subject'],
'name': entry['attributes']['courseTitle'],
'units': entry['attributes']['units']
}
# Does the course have a description?
if 'courseDescription' in entry['attributes']:
desc = entry['attributes']['courseDescription']
# Removes "See note (x) above" from description?
if "See Note" in desc:
desc = desc.split("See Note", 1)[0]
# Does the course have a prerequisite?
if 'Prerequisite' in desc:
# Splits the prerequisite from the description
info = desc.split("Prerequisite", 1)
prereq = self.parseCourseDescription(info[1])
desc = info[0]
# Does prerequisite have a corequisite inside of it
if "Corequisite" in prereq or "corequisite" in prereq:
#Splits the corequisite from the prereq
if "Corequisite" in prereq:
info = prereq.split("Corequisite", 1)
elif "corequisite" in prereq:
info = prereq.split("corequisite", 1)
prereq = info[0]
# Removes any "and " leftover from the splitting
if prereq[-4:] == "and ":
prereq = prereq[:-4]
# if the coreq is different from the prereq
if len(info[1]) != 1:
corereq = self.parseCourseDescription(info[1])
if prereq == "or ":
prereq = corereq
else:
if corereq != prereq:
courseDesc['coreq'] = corereq
# Splits the note form the prereq
if "Note:" in prereq:
note = prereq.split("Note:", 1)
courseDesc['notes'] = note[1]
prereq = note[0]
courseDesc['prereq'] = prereq
# splits the antireq from the desc
if "Antirequisite" in desc:
antireq = desc.split("Antirequisite", 1)[1]
antireq = self.parseCourseDescription(antireq)
courseDesc['antireq'] = antireq
desc = antireq[0]
# removes leftover info from the desc split
if desc[-4:] == "and ":
desc = desc[:-4]
courseDesc['desc'] = desc
# Upserts course description
self.updateCourseDesc(courseDesc)
def UidToName(self, uid):
"""
Returns the name of the prof with the specified UID
:param uid: **string** UID of the given prof
:return: **string** Name of the prof if successful, UID if not
"""
professor = self.db.UAlbertaProfessor.find({"uid": uid})
if professor.count() == 0:
# There must have been an issue when obtaining the data, just use the UID temporarily
return uid
else:
# We got the name, return it
professor = professor[0]['Name']
return professor
def scrapeCourseList(self, conn, termid):
"""
Queries the course list with the termid, matches the professor to the course, upserts the initial dictionary
then matches additional data to the object
:param conn: **ldap connection object**
:param termid: **string/int** Term ID to get courses for
:return:
"""
searchBase = 'term=' + termid + ', ou=calendar, dc=ualberta, dc=ca'
entry_list = conn.extend.standard.paged_search(search_base=searchBase,
search_filter='(&(!(textbook=*))(class=*)(!(classtime=*)))',
search_scope=SUBTREE,
attributes=['asString', 'class', 'term', 'campus',
'section', 'component', 'enrollStatus',
'course', 'instructorUid'],
paged_size=400,
generator=False)
# Searches for additional information
times_list = conn.extend.standard.paged_search(search_base=searchBase,
search_filter='(&(!(textbook=*))(class=*)(classtime=*))',
search_scope=SUBTREE,
attributes=['day', 'class', 'startTime', 'endTime',
'location'],
paged_size=400,
generator=False)
# We want to scrape professor names from their UID's
q = Queue()
self.log.info("Filling up the Queue with Prof UIDs")
# Fill queue with unique prof names
queuedProfs = {}
for entry in entry_list:
# Ensure this class has teachers
if 'instructorUid' in entry['attributes']:
# We don't want to request duplicates
if entry['attributes']['instructorUid'][0] not in queuedProfs:
q.put(entry['attributes']['instructorUid'][0])
# Add to the queuedProfs to avoid dupes
queuedProfs[entry['attributes']['instructorUid'][0]] = True
# Start up the threads
for i in range(self.settings["uidConcurrency"]):
concurrentScraper = UIDScraper(q, self.db, self.log)
concurrentScraper.daemon = True
concurrentScraper.start()
# Wait until the threads are done
q.join()
self.log.info('Parsing course data')
# for each entry in list, upsert course into db
for entry in entry_list:
info = str(entry['attributes']['asString']).split(" ")
# Seperates the subject from the coursenum
if not re.search(r'\d', info[1]):
subject = info[0] + " " + info[1]
coursenum = info[2]
else:
subject = info[0]
coursenum = info[1]
# Does the entry have an enrollStatus
if entry['attributes']['enrollStatus'] == "O":
status = "Open"
elif entry['attributes']['enrollStatus'] == "C":
status = "Closed"
else:
status = entry['attributes']['enrollStatus']
# Initializes upsert dict
courseList = {"subject": subject, "term": entry['attributes']['term'][0], "coursenum": coursenum,
"id": str(entry['attributes']['class']), "location": str(entry['attributes']['campus']),
"type": entry['attributes']['component'], "status": status,
'section': entry['attributes']['section'], "group": entry['attributes']['course'],
"times": ["N/A"], "rooms": ["N/A"]}
# Does the entry have a instructor assigned to it
if 'instructorUid' in entry['attributes']:
courseList['teachers'] = [self.UidToName(entry['attributes']['instructorUid'][0])]
else:
courseList['teachers'] = ["N/A"]
# Get a list of times and locations associated with a course
times = [x for x in times_list if x['attributes']['class'] == courseList['id']]
for entry_time in times:
times_list.remove(entry_time)
attributes = entry_time['attributes']
# Combines day, startTime, endTime into a duration
duration = " ".join(
(attributes['day'][0], attributes['startTime'][0].replace(" ", ""),
attributes['endTime'][0].replace(" ", "")))
# Adds '-' btw the times
duration = re.sub(r'^((.*?\s.*?){1})\s', r'\1 - ', duration)
if "N/A" == courseList['times'][0]:
courseList['times'].pop(0)
courseList['times'].append(duration)
# Does the class have an assigned classroom
if 'location' in attributes:
courseList['rooms'] = [attributes['location']]
# Upserts course into db
self.updateClass(courseList)
def scrapeTerms(self, conn):
"""
Retrieves all course descriptions then parses the course requisites and notes then upserts for every entry in
the query results
:param conn: **ldap connection object**
:return: **dict** has two keys term and termTitle, values are matched to their respective keys
"""
# Page queries all terms
conn.search(search_base='ou=calendar, dc=ualberta, dc=ca', search_filter='(term=*)', search_scope=LEVEL,
attributes=['term', 'termTitle'])
terms = []
# Gets the seven most recent terms
for item in range(1, 7):
entry = conn.entries[len(conn.entries)-item]
termDict = {"id": str(entry['term']), "name": str(entry['termTitle']).replace("Term ", "")}
terms.append(termDict)
# Adds term to term DB
self.updateTerms(terms)
# Returns current terms
return terms
def updateFaculties(self, conn):
"""
Updates the faculties with the current terms as the search base
:param conn: **ldap connection object**
:return:
"""
self.log.info("Getting faculty list")
# Gets all recent terms and cycles through them
for term in self.scrapeTerms(conn):
# Sets the search base for the query
searchBase = 'term='+term['id']+', ou=calendar, dc=ualberta, dc=ca'
self.log.info("Updating faculties with search base " + searchBase)
# Page queries all faculties in current term
entry_list = conn.extend.standard.paged_search(search_base=searchBase,
search_filter='(term=*)',
search_scope=LEVEL,
attributes=['subject', 'subjectTitle', 'faculty', 'career',
'courseTitle'],
paged_size=400,
generator=False)
ugrad = []
# For each entry in list updates the faculty
for entry in entry_list:
if 'subject' in entry['attributes']:
subjectDict = {'subject': entry['attributes']['subject'],
'faculty': entry['attributes']['faculty'],
'name': None}
if 'subjectTitle' in entry['attributes']:
subjectDict['name'] = entry['attributes']['subjectTitle']
else:
subjectDict['name'] = entry['attributes']['courseTitle']
if entry['attributes']['career'] == 'UGRD':
ugrad.append(subjectDict['subject'])
self.updateSubject(subjectDict)
elif entry['attributes']['career'] == 'GRAD' and subjectDict['subject'] not in ugrad:
self.updateSubject(subjectDict)
self.log.info('Finished updating faculties')
def scrape(self):
"""
Scraping thread that obtains updated course info
:return:
"""
# Establish connection to LDAP server
server = Server('directory.srv.ualberta.ca', get_info=ALL)
conn = Connection(server, auto_bind=True)
# Updates faculties
self.updateFaculties(conn)
# Get list of current terms
terms = self.getTerms()
# For each term, get the courses
for term in terms:
self.log.info('Obtaining ' + terms[term] + ' course data with id ' + term)
self.scrapeCourseList(conn, term)
self.scrapeCourseDesc(conn, term)
self.log.info('Finished scraping for UAlberta data')
class UIDScraper(threading.Thread):
"""
Thread that gets UID's from the passed in queue and inserts the prof's data from UAlberta
"""
def __init__(self, q, db, log):
threading.Thread.__init__(self)
self.q = q
self.db = db
self.log = log
def run(self):
"""
Scraping thread that gets a UID and inserts the returned prof data into the DB
:return:
"""
while not self.q.empty():
# Get this UID from the queue
thisuid = self.q.get()
if thisuid:
# Check if its already in the DB
uidExists = self.db.UAlbertaProfessor.find({"uid": thisuid})
if uidExists.count() == 0:
try:
# Get the prof data from the UAlberta directory
r = requests.get("http://directory.ualberta.ca/person/" + thisuid, timeout=20)
# Check if the HTTP status code is ok
if r.status_code == requests.codes.ok:
# Parse the HTML
soup = BeautifulSoup(r.text, "lxml")
for tag in soup.find_all("h2", {"class": "p-0 m-0"}):
info = " ".join(tag.text.split())
if info != "Dr " and info != "Prof ":
professor = info
break
self.log.info('Adding UID ' + thisuid + ' to UAlbertaProfessor db, Name: ' + professor)
# Upsert the data
self.db.UAlbertaProfessor.update({"uid": thisuid},
{'$set': {"uid": thisuid, "Name": professor}},
upsert=True)
else:
self.log.error("Improper HTTP Status for UID " + thisuid)
except:
self.log.error("Failed to obtain name for " + thisuid)
# We're done with this class
self.q.task_done()
else:
# No more items in the queue, stop the loop
break
| mit | -8,605,710,118,479,769,000 | 40.727273 | 120 | 0.491285 | false |
xFleury/crawl-0.13.0-fairplay | source/webserver/util.py | 1 | 2634 | import re
import logging
import tornado.template
import tornado.ioloop
import os.path
import time
class TornadoFilter(logging.Filter):
def filter(self, record):
if record.module == "web" and record.levelno <= logging.INFO:
return False
return True
class DynamicTemplateLoader(tornado.template.Loader):
def __init__(self, root_dir):
tornado.template.Loader.__init__(self, root_dir)
def load(self, name, parent_path=None):
name = self.resolve_path(name, parent_path=parent_path)
if name in self.templates:
template = self.templates[name]
path = os.path.join(self.root, name)
if os.path.getmtime(path) > template.load_time:
del self.templates[name]
else:
return template
template = super(DynamicTemplateLoader, self).load(name, parent_path)
template.load_time = time.time()
return template
_instances = {}
@classmethod
def get(cls, path):
if path in cls._instances:
return cls._instances[path]
else:
l = DynamicTemplateLoader(path)
cls._instances[path] = l
return l
class FileTailer(object):
def __init__(self, filename, callback, interval_ms = 1000, io_loop = None):
self.file = None
self.filename = filename
self.callback = callback
self.io_loop = io_loop or tornado.ioloop.IOLoop.instance()
self.scheduler = tornado.ioloop.PeriodicCallback(self.check, interval_ms,
io_loop = self.io_loop)
self.scheduler.start()
def check(self):
if self.file is None:
if os.path.exists(self.filename):
self.file = open(self.filename, "r")
self.file.seek(os.path.getsize(self.filename))
else:
return
while True:
pos = self.file.tell()
line = self.file.readline()
if line.endswith("\n"):
self.callback(line)
else:
self.file.seek(pos)
return
def stop(self):
self.scheduler.stop()
def dgl_format_str(s, username, game_params):
s = s.replace("%n", username)
return s
where_entry_regex = re.compile("(?<=[^:]):(?=[^:])")
def parse_where_data(data):
where = {}
for entry in where_entry_regex.split(data):
if entry.strip() == "": continue
field, _, value = entry.partition("=")
where[field.strip()] = value.strip().replace("::", ":")
return where
| gpl-2.0 | -5,230,444,967,164,216,000 | 29.275862 | 81 | 0.56492 | false |
autoexec-batman/proc-guns | proc_guns/gun.py | 1 | 4410 | import affixes
import random
class Gun:
def __init__(self, guntype, manufacturer, quality):
self.manufacturer = manufacturer
self.guntype = guntype
self.quality = quality
base_damage = guntype['base_stats']['bullet_damage'] * manufacturer['modifiers']['damage'] * quality['modifiers']['damage']
base_magazine_size = guntype['base_stats']['magazine_size'] * manufacturer['modifiers']['mag_size']
base_fire_rate = guntype['base_stats']['fire_rate'] * manufacturer['modifiers']['fire_rate']
base_reload_time = guntype['base_stats']['reload_time'] * manufacturer['modifiers']['reload_time']
base_nice_chance = guntype['base_stats']['nice_chance'] * manufacturer['modifiers']['nice_chance']
base_nice_multiplier = guntype['base_stats']['nice_multiplier'] * manufacturer['modifiers']['nice_multiplier']
raw_affix_modifiers = dict(
raw_extra_nice_chance=0,
raw_extra_nice_multiplier=0,
raw_extra_damage=0,
raw_extra_magazine_size=0,
raw_extra_fire_rate=0,
raw_faster_reload_time=0
)
percent_affix_modifiers = dict(
percent_extra_nice_chance=1.00,
percent_extra_nice_multiplier=1.00,
percent_extra_damage=1.00,
percent_extra_magazine_size=1.00,
percent_extra_fire_rate=1.00,
percent_faster_reload_time=1.00
)
available_parts = ['barrel', 'sight', 'magazine']
available_slots = ['prefix', 'infix', 'suffix']
random.shuffle(available_parts)
random.shuffle(available_slots)
gun_affixes = []
part_count = random.randint(0,3)
for i in range(0,part_count):
current_part = available_parts.pop()
current_slot = available_slots.pop()
gun_affixes.append(affixes.select(current_part, current_slot))
prefix = ""
infix = ""
suffix = ""
self.raw_affix_text_data = []
self.percent_affix_text_data = []
for affix in gun_affixes:
print (affix['effect_name'])
if affix['effect_name'] in raw_affix_modifiers:
raw_affix_modifiers[affix['effect_name']] += affix['roll']
self.raw_affix_text_data.append((affix['effect_text'], affix['roll']))
if affix['effect_name'] in percent_affix_modifiers:
percent_affix_modifiers[affix['effect_name']] *= affix['roll']
self.percent_affix_text_data.append((affix['effect_text'], affix['roll']))
if affix['slot'] == 'prefix':
prefix = affix['name']
if affix['slot'] == 'infix':
infix = affix['name']
if affix['slot'] == 'suffix':
suffix = affix['name']
self.damage = int((base_damage + raw_affix_modifiers['raw_extra_damage']) * percent_affix_modifiers['percent_extra_damage'])
self.magazine_size = int((base_magazine_size + raw_affix_modifiers['raw_extra_magazine_size']) * percent_affix_modifiers['percent_extra_magazine_size'])
self.fire_rate = (base_fire_rate + raw_affix_modifiers['raw_extra_fire_rate']) * percent_affix_modifiers['percent_extra_fire_rate']
self.reload_time = (base_reload_time + raw_affix_modifiers['raw_faster_reload_time']) * percent_affix_modifiers['percent_faster_reload_time']
self.nice_chance = (base_nice_chance + raw_affix_modifiers['raw_extra_nice_chance']) * percent_affix_modifiers['percent_extra_nice_chance']
self.nice_multiplier = (base_nice_multiplier + raw_affix_modifiers['raw_extra_nice_multiplier']) * percent_affix_modifiers['percent_extra_nice_chance']
self.gun_affixes = gun_affixes
display_name = "{0} {1} {2} {3} {4}".format(prefix, manufacturer['qualities'][quality['name']], infix, guntype['name'], suffix)
self.display_name = ' '.join(display_name.split()) #eliminates extra spaces from missing affixes
| apache-2.0 | 3,581,647,926,256,546,300 | 56.285714 | 160 | 0.556916 | false |
mauodias/PyFuteBOT | twitter_handler.py | 1 | 1044 | import json
import twitter
class twitter_handler:
def __init__(self, json_file):
keys = json.loads(json_file)['twitter']
consumer_key = keys['consumer_key']
consumer_secret = keys['consumer_secret']
access_token_key = keys['access_token_key']
access_token_secret = keys['access_token_secret']
self.api = twitter.Api(consumer_key, consumer_secret, access_token_key, access_token_secret)
print('Twitter loaded successfully.')
def pop_direct_message(self):
dms = self.api.GetDirectMessages()
dms.sort(key=lambda dm:dm.created_at)
try:
dm = dms.pop()
self.api.DestroyDirectMessage(dm.id)
return {"text": dm.text, "sender_id": dm.sender_id, "created_at":dm.created_at}
except IndexError:
return None
def reply_direct_message(self, user_id, message):
replydm = self.api.PostDirectMessage(message, user_id)
return {"text":replydm.text, "created_at":replydm.created_at} | gpl-2.0 | 4,685,525,018,807,624,000 | 39.192308 | 100 | 0.621648 | false |
chn-lee-yumi/switch-monitor | Config.py | 1 | 1966 | # encoding: utf-8
# =============网页参数=============
USE_HTTPS = False # 是否使用HTTPS(不使用HTTP)
ADMIN_USERNAME = 'admin' # 管理员用户名
ADMIN_PASSWORD = '123456' # 管理员密码
WEB_USERNAME = 'user' # 网页登录的用户名
WEB_PASSWORD = '12345' # 网页登录的密码
WEB_PORT = 80 # http端口
# =============扫描参数=============
TCPING_TIMEOUT = 2 # tcping超时时间,秒
SCAN_THREADS = 20 # 每个扫描进程的扫描线程数
SCAN_PROCESS = 4 # 扫描进程数
# =============监控参数=============
SEND_MSG_DELAY = 5 # 微信通知的延迟,分钟
WEIXIN_STAT_TIME_H = 18 # 发送微信统计的时
WEIXIN_STAT_TIME_M = 0 # 发送微信统计的分
SW_REBOOT_TIME_H = 4 # 自动重启交换机的时
SW_REBOOT_TIME_M = 0 # 自动重启交换机的分
CPU_THRESHOLD = 80 # CPU过载阈值
MEM_THRESHOLD = 80 # 内存过高阈值
TEMP_THRESHOLD = 60 # 温度过高阈值
IF_SPEED_THRESHOLD = 0.8 # 端口流量阈值
DATA_RECORD_SAVED_DAYS = 7 # 历史记录保留天数
SCAN_REBOOT_HOURS = 6 # 扫描进程重启时间间隔,小时。参考:使用lib模式,8G内存的机器需要8小时重启一次。
# =============SNMP参数=============
SNMP_MODE = "bin" # SNMP模式,lib或bin。
# 如果是lib,则调用netsnmp的函数库(目前存在内存泄漏的问题,用替代方法,定时重启子进程,重启多次后就不再扫描(子进程变成僵尸进程的BUG))。
# 如果是bin,则调用netsnmp的可执行文件(效率较低)。
SNMP_READ_COMMUNITY = "public"
SNMP_WRITE_COMMUNITY = "private"
# =============交换机密码=============
SWITCH_PASSWORD = "123456"
# =============微信接入参数=============
WEIXIN_ENABLE = 1 # 是否启用微信接入,是为1,否为0
corpid = "wx09d7623hjg734"
corpsecret = ['', '', 'WsXbVPLqxcNMUY_Okfjrell723ERG2uREnCvZQ5IgwAOS8', '', '', '',
'jKFitXrTpzWpxRdfsghkj34hGR3YTXiWjUzZOs1JpM']
| gpl-3.0 | -8,783,740,755,997,347,000 | 31.714286 | 83 | 0.625182 | false |
endlos99/xdt99 | test/ga-checkerr.py | 1 | 2148 | #!/usr/bin/env python
import os
from config import Dirs, Files
from utils import xga, error, read_stderr, get_source_markers, check_errors
# Main test
def runtest():
"""run regression tests"""
# check for errors
source = os.path.join(Dirs.gplsources, 'gaerrs.gpl')
with open(Files.error, 'w') as ferr:
xga(source, '-o', Files.output, stderr=ferr, rc=1)
act_errors = read_stderr(Files.error, include_warnings=False)
exp_errors = get_source_markers(source, tag=r';ERROR')
check_errors(exp_errors, act_errors)
# error messages in pass 0 and 1
for s in ['gaerrs0.gpl', 'gaerrs1.gpl']:
source = os.path.join(Dirs.gplsources, s)
with open(Files.error, 'w') as ferr:
xga(source, '-o', Files.output, stderr=ferr, rc=1)
act_errors = read_stderr(Files.error, include_warnings=False)
exp_errors = get_source_markers(source, tag=r'\* ERROR')
check_errors(exp_errors, act_errors)
# open .if-.endif or .defm-.endm
source = os.path.join(Dirs.gplsources, 'gaopenif.gpl')
with open(Files.error, 'w') as ferr:
xga(source, '-o', Files.output, stderr=ferr, rc=1)
with open(Files.error, 'r') as fin:
msgs = ' '.join(fin.readlines())
if 'Missing .endif' not in msgs:
error('open', 'Missing error for open .if/.endif')
source = os.path.join(Dirs.gplsources, 'gaopenmac.gpl')
with open(Files.error, 'w') as ferr:
xga(source, '-o', Files.output, stderr=ferr, rc=1)
with open(Files.error, 'r') as fin:
msgs = ' '.join(fin.readlines())
if 'Missing .endm' not in msgs:
error('open', 'Missing error for open .defm/.endm')
# warnings
source = os.path.join(Dirs.gplsources, 'gawarn.gpl')
with open(Files.error, 'w') as ferr:
xga(source, '-o', Files.output, stderr=ferr, rc=0)
act_errors = read_stderr(Files.error, include_warnings=True)
exp_errors = get_source_markers(source, tag=r';WARN')
check_errors(exp_errors, act_errors)
# cleanup
os.remove(Files.output)
os.remove(Files.error)
if __name__ == '__main__':
runtest()
print('OK')
| gpl-2.0 | 835,982,874,836,547,000 | 33.095238 | 75 | 0.627561 | false |
njvack/yadda | examples/dicom_inotify.py | 1 | 3365 | #!/usr/bin/env python
# coding: utf8
"""
Watch source_dir for files, report them.
Usage:
dicom_inotify.py [options] <source_dir>
Options:
--timeout=<sec> Timeout (in seconds) to wait for more files in a series
[default: 30]
-h Show this help screen.
"""
from __future__ import with_statement, division, print_function
import sys
import os
import logging
logger = logging.getLogger(__name__)
import yadda
from yadda import handlers, managers
from yadda.vendor.docopt import docopt
from yadda.vendor.schema import Schema, Use
from yadda.vendor import pyinotify
import dicom
SCHEMA = Schema({
'<source_dir>': Use(os.path.expanduser),
'--timeout': Use(float),
str: object})
def main():
arguments = docopt(__doc__, version=yadda.__version__)
print(arguments)
validated = SCHEMA.validate(arguments)
print(validated)
log_level = logging.DEBUG
logging.basicConfig(level=log_level)
logger.debug("Using log level {0}".format(log_level))
return dicom_inotify(
validated['<source_dir>'],
validated['--timeout'])
def dicom_inotify(source_dir, timeout):
wm = pyinotify.WatchManager()
watch_mask = (
pyinotify.IN_MOVED_TO |
pyinotify.IN_CLOSE_WRITE |
pyinotify.IN_CREATE)
dicom_manager = MyDicomManager(timeout)
fch = FileChangeHandler(dicom_manager=dicom_manager)
notifier = pyinotify.ThreadedNotifier(wm, fch)
wm.add_watch(source_dir, watch_mask, rec=True, auto_add=True)
logger.info('Watching {0}'.format(source_dir))
try:
notifier.start()
dicom_manager.wait()
except KeyboardInterrupt:
logger.debug("Keyboard Interrupt!")
notifier.stop()
dicom_manager.stop()
class FileChangeHandler(pyinotify.ProcessEvent):
def my_init(self, dicom_manager):
self.dicom_manager = dicom_manager
def process_event(self, event):
logger.debug('Processing {0}'.format(event.pathname))
self.dicom_manager.handle_file(event.pathname)
process_IN_MOVED_TO = process_event
process_IN_CLOSE_WRITE = process_event
process_IN_CREATE = process_event
class MyDicomManager(managers.ThreadedDicomManager):
def handler_key(self, dcm):
return str(dcm.SeriesNumber)
def handle_file(self, filename):
try:
dcm = dicom.read_file(filename)
except dicom.filereader.InvalidDicomError:
logger.warn('Not a dicom: {0}'.format(filename))
return
self.handle_dicom(dcm, filename)
def build_handler(self, dcm, filename):
logger.debug(
'Building a handler from {0}'.format(filename))
return MyDicomHandler(self, self.handler_key(dcm), self.timeout)
class MyDicomHandler(handlers.ThreadedDicomHandler):
def __init__(self, manager, name, timeout):
super(MyDicomHandler, self).__init__(manager, name, timeout)
def on_start(self):
logger.debug('{0} on_start'.format(self))
def on_handle(self, dcm, filename):
logger.debug('{0} on_handle {1}'.format(self, filename))
def on_finish(self):
logger.debug('{0} on_finish'.format(self))
def terminate(self):
logger.debug('{0} terminate'.format(self))
super(MyDicomHandler, self).terminate()
if __name__ == '__main__':
sys.exit(main())
| mit | -6,849,938,730,472,083,000 | 26.809917 | 74 | 0.656166 | false |
dionyziz/llama | compiler/ast.py | 1 | 7514 | """
# ----------------------------------------------------------------------
# ast.py
#
# AST constructors for the Llama language
# http://courses.softlab.ntua.gr/compilers/2012a/llama2012.pdf
#
# Authors: Dionysis Zindros <[email protected]>
# Nick Korasidis <[email protected]>
#
# ----------------------------------------------------------------------
"""
# == INTERFACES OF AST NODES ==
class Node:
lineno = None
lexpos = None
def __init__(self):
raise NotImplementedError
def __eq__(self, other):
"""
Two nodes are equal if they are of the same type
and have all attributes equal. Override as needed.
"""
return type(self) == type(other) and all(
getattr(self, attr) == getattr(other, attr)
for attr in self.__dict__.keys()
if attr not in ('lineno', 'lexpos')
)
def copy_pos(self, node):
"""Copy line info from another AST node."""
self.lineno = node.lineno
self.lexpos = node.lexpos
def __repr__(self):
attrs = [attr for attr in dir(self) if attr[0] != '_']
values = [getattr(self, attr) for attr in attrs]
safe_values = []
for value in values:
displayable_types = (int, float, bool, str, list, Type, Expression)
if isinstance(value, displayable_types) or value is None:
safe_values.append(str(value).replace("\n", "\n\t"))
else:
safe_values.append(
'(non-scalar of type %s)' % value.__class__.__name__
)
pairs = (
"%s = '%s'" % (attr, value)
for (attr, value) in zip(attrs, safe_values)
)
return "ASTNode:%s with attributes:\n\t* %s" \
% (self.__class__.__name__, "\n\t* ".join(pairs))
class DataNode(Node):
"""A node to which a definite type can and should be assigned."""
type = None
class Expression(DataNode):
"""An expression that can be evaluated."""
pass
class Def(Node):
"""Definition of a new name."""
pass
class NameNode(Node):
"""
A node with a user-defined name that possibly requires
scope-aware disambiguation or checking.
Provides basic hashing functionality.
"""
name = None
def __hash__(self):
"""Simple hash. Override as needed."""
return hash(self.name)
class ListNode(Node):
"""
A node carrying a list of ast nodes.
Supports iterating through the nodes list.
"""
list = None
def __iter__(self):
return iter(self.list)
class Type(Node):
"""A node representing a type."""
pass
class Builtin(Type, NameNode):
"""One of the builtin types."""
def __init__(self):
self.name = self.__class__.__name__.lower()
# == AST REPRESENTATION OF PROGRAM ELEMENTS ==
class Program(ListNode):
def __init__(self, list):
self.list = list
class LetDef(ListNode):
def __init__(self, list, isRec=False):
self.list = list
self.isRec = isRec
class FunctionDef(Def, NameNode):
def __init__(self, name, params, body, type=None):
self.name = name
self.params = params
self.body = body
self.type = type
class Param(DataNode, NameNode):
def __init__(self, name, type=None):
self.name = name
self.type = type
class BinaryExpression(Expression):
def __init__(self, leftOperand, operator, rightOperand):
self.leftOperand = leftOperand
self.operator = operator
self.rightOperand = rightOperand
class UnaryExpression(Expression):
def __init__(self, operator, operand):
self.operator = operator
self.operand = operand
class ConstructorCallExpression(Expression, ListNode, NameNode):
def __init__(self, name, list):
self.name = name
self.list = list
class ArrayExpression(Expression, ListNode, NameNode):
def __init__(self, name, list):
self.name = name
self.list = list
class ConstExpression(Expression):
def __init__(self, type, value=None):
self.type = type
self.value = value
class ConidExpression(Expression, NameNode):
def __init__(self, name):
self.name = name
class GenidExpression(Expression, NameNode):
def __init__(self, name):
self.name = name
class DeleteExpression(Expression):
def __init__(self, expr):
self.expr = expr
class DimExpression(Expression, NameNode):
def __init__(self, name, dimension=1):
self.name = name
self.dimension = dimension
class ForExpression(Expression):
def __init__(self, counter, startExpr, stopExpr, body, isDown=False):
self.counter = counter
self.startExpr = startExpr
self.stopExpr = stopExpr
self.body = body
self.isDown = isDown
class FunctionCallExpression(Expression, ListNode, NameNode):
def __init__(self, name, list):
self.name = name
self.list = list
class LetInExpression(Expression):
def __init__(self, letdef, expr):
self.letdef = letdef
self.expr = expr
class IfExpression(Expression):
def __init__(self, condition, thenExpr, elseExpr=None):
self.condition = condition
self.thenExpr = thenExpr
self.elseExpr = elseExpr
class MatchExpression(Expression, ListNode):
def __init__(self, expr, list):
self.expr = expr
self.list = list
class Clause(Node):
def __init__(self, pattern, expr):
self.pattern = pattern
self.expr = expr
class Pattern(ListNode, NameNode):
def __init__(self, name, list=None):
self.name = name
self.list = list or []
class GenidPattern(NameNode):
def __init__(self, name):
self.name = name
class NewExpression(Expression):
def __init__(self, type):
self.type = type
class WhileExpression(Expression):
def __init__(self, condition, body):
self.condition = condition
self.body = body
class VariableDef(Def, NameNode):
def __init__(self, name, type=None):
self.name = name
self.type = type
class ArrayVariableDef(VariableDef, NameNode):
def __init__(self, name, dimensions, type=None):
self.name = name
self.dimensions = dimensions
self.type = type
class TDef(ListNode):
def __init__(self, type, list):
self.type = type
self.list = list
class Constructor(NameNode, ListNode):
def __init__(self, name, list=None):
self.name = name
self.list = list or []
# == REPRESENTATION OF TYPES AS AST NODES ==
class Bool(Builtin):
pass
class Char(Builtin):
pass
class Float(Builtin):
pass
class Int(Builtin):
pass
class Unit(Builtin):
pass
builtin_types_map = {
"bool": Bool,
"char": Char,
"float": Float,
"int": Int,
"unit": Unit,
}
class User(Type, NameNode):
"""A user-defined type."""
def __init__(self, name):
self.name = name
class Ref(Type):
def __init__(self, type):
self.type = type
class Array(Type):
def __init__(self, type, dimensions=1):
self.type = type
self.dimensions = dimensions
def String():
"""Factory method to alias (internally) String type to Array of char."""
return Array(Char(), 1)
class Function(Type):
def __init__(self, fromType, toType):
self.fromType = fromType
self.toType = toType
| mit | 9,006,398,026,652,156,000 | 21.564565 | 79 | 0.583045 | false |
great-expectations/great_expectations | tests/rule_based_profiler/test_rule.py | 1 | 7606 | from typing import Any
import pytest
import great_expectations.exceptions as ge_exceptions
from great_expectations.rule_based_profiler.parameter_builder import (
get_parameter_value_by_fully_qualified_parameter_name,
)
# noinspection PyPep8Naming
def test_get_parameter_value_by_fully_qualified_parameter_name_invalid_parameter_name(
rule_with_variables_with_parameters, column_Age_domain
):
with pytest.raises(
ge_exceptions.ProfilerExecutionError, match=r".+start with \$.*"
):
# noinspection PyUnusedLocal
parameter_value: Any = get_parameter_value_by_fully_qualified_parameter_name(
fully_qualified_parameter_name="mean",
domain=column_Age_domain,
variables=rule_with_variables_with_parameters.variables,
parameters=rule_with_variables_with_parameters.parameters,
)
# noinspection PyPep8Naming
def test_get_parameter_value_by_fully_qualified_parameter_name_valid_parameter_name(
rule_with_variables_with_parameters,
column_Age_domain,
column_Date_domain,
):
fully_qualified_parameter_name: str
fully_qualified_parameter_name = "$variables.false_positive_threshold"
assert (
get_parameter_value_by_fully_qualified_parameter_name(
fully_qualified_parameter_name=fully_qualified_parameter_name,
domain=column_Age_domain,
variables=rule_with_variables_with_parameters.variables,
parameters=rule_with_variables_with_parameters.parameters,
)
== 1.0e-2
)
fully_qualified_parameter_name = "$mean"
assert (
get_parameter_value_by_fully_qualified_parameter_name(
fully_qualified_parameter_name=fully_qualified_parameter_name,
domain=column_Age_domain,
variables=rule_with_variables_with_parameters.variables,
parameters=rule_with_variables_with_parameters.parameters,
)
== 5.0
)
fully_qualified_parameter_name = "$variables.false_positive_threshold"
assert (
get_parameter_value_by_fully_qualified_parameter_name(
fully_qualified_parameter_name=fully_qualified_parameter_name,
domain=column_Date_domain,
variables=rule_with_variables_with_parameters.variables,
parameters=rule_with_variables_with_parameters.parameters,
)
== 1.0e-2
)
fully_qualified_parameter_name = (
"$parameter.date_strings.yyyy_mm_dd_hh_mm_ss_tz_date_format"
)
assert (
get_parameter_value_by_fully_qualified_parameter_name(
fully_qualified_parameter_name=f"{fully_qualified_parameter_name}.value",
domain=column_Date_domain,
variables=rule_with_variables_with_parameters.variables,
parameters=rule_with_variables_with_parameters.parameters,
)
== "%Y-%m-%d %H:%M:%S %Z"
)
assert get_parameter_value_by_fully_qualified_parameter_name(
fully_qualified_parameter_name=f"{fully_qualified_parameter_name}.details",
domain=column_Date_domain,
variables=rule_with_variables_with_parameters.variables,
parameters=rule_with_variables_with_parameters.parameters,
) == {
"confidence": 7.8e-1,
}
fully_qualified_parameter_name = "$parameter.date_strings.yyyy_mm_dd_date_format"
assert (
get_parameter_value_by_fully_qualified_parameter_name(
fully_qualified_parameter_name=f"{fully_qualified_parameter_name}.value",
domain=column_Date_domain,
variables=rule_with_variables_with_parameters.variables,
parameters=rule_with_variables_with_parameters.parameters,
)
== "%Y-%m-%d"
)
assert get_parameter_value_by_fully_qualified_parameter_name(
fully_qualified_parameter_name=f"{fully_qualified_parameter_name}.details",
domain=column_Date_domain,
variables=rule_with_variables_with_parameters.variables,
parameters=rule_with_variables_with_parameters.parameters,
) == {
"confidence": 7.8e-1,
}
fully_qualified_parameter_name = (
"$parameter.date_strings.mm_yyyy_dd_hh_mm_ss_tz_date_format"
)
assert (
get_parameter_value_by_fully_qualified_parameter_name(
fully_qualified_parameter_name=f"{fully_qualified_parameter_name}.value",
domain=column_Date_domain,
variables=rule_with_variables_with_parameters.variables,
parameters=rule_with_variables_with_parameters.parameters,
)
== "%m-%Y-%d %H:%M:%S %Z"
)
assert get_parameter_value_by_fully_qualified_parameter_name(
fully_qualified_parameter_name=f"{fully_qualified_parameter_name}.details",
domain=column_Date_domain,
variables=rule_with_variables_with_parameters.variables,
parameters=rule_with_variables_with_parameters.parameters,
) == {
"confidence": 7.8e-1,
}
fully_qualified_parameter_name = "$parameter.date_strings.mm_yyyy_dd_date_format"
assert (
get_parameter_value_by_fully_qualified_parameter_name(
fully_qualified_parameter_name=f"{fully_qualified_parameter_name}.value",
domain=column_Date_domain,
variables=rule_with_variables_with_parameters.variables,
parameters=rule_with_variables_with_parameters.parameters,
)
== "%m-%Y-%d"
)
assert get_parameter_value_by_fully_qualified_parameter_name(
fully_qualified_parameter_name=f"{fully_qualified_parameter_name}.details",
domain=column_Date_domain,
variables=rule_with_variables_with_parameters.variables,
parameters=rule_with_variables_with_parameters.parameters,
) == {
"confidence": 7.8e-1,
}
fully_qualified_parameter_name = (
"$parameter.date_strings.tolerances.max_abs_error_time_milliseconds"
)
assert (
get_parameter_value_by_fully_qualified_parameter_name(
fully_qualified_parameter_name=fully_qualified_parameter_name,
domain=column_Date_domain,
variables=rule_with_variables_with_parameters.variables,
parameters=rule_with_variables_with_parameters.parameters,
)
== 100
)
fully_qualified_parameter_name = (
"$parameter.date_strings.tolerances.max_num_conversion_attempts"
)
assert (
get_parameter_value_by_fully_qualified_parameter_name(
fully_qualified_parameter_name=fully_qualified_parameter_name,
domain=column_Date_domain,
variables=rule_with_variables_with_parameters.variables,
parameters=rule_with_variables_with_parameters.parameters,
)
== 5
)
fully_qualified_parameter_name = "$parameter.tolerances.mostly"
assert (
get_parameter_value_by_fully_qualified_parameter_name(
fully_qualified_parameter_name=fully_qualified_parameter_name,
domain=column_Date_domain,
variables=rule_with_variables_with_parameters.variables,
parameters=rule_with_variables_with_parameters.parameters,
)
== 9.1e-1
)
fully_qualified_parameter_name = "$mean"
assert (
get_parameter_value_by_fully_qualified_parameter_name(
fully_qualified_parameter_name=fully_qualified_parameter_name,
domain=column_Date_domain,
variables=rule_with_variables_with_parameters.variables,
parameters=rule_with_variables_with_parameters.parameters,
)
== 6.5e-1
)
| apache-2.0 | -859,378,896,976,437,400 | 38.206186 | 86 | 0.665527 | false |
cmnjcs/flight-stats | sample_data.py | 1 | 8543 | sample_data = {
"request": {
"departureAirport": {
"requestedCode": "SEA",
"fsCode": "SEA"
},
"arrivalAirport": {
"requestedCode": "BZN",
"fsCode": "BZN"
},
"date": {
"year": "2013",
"month": "12",
"day": "12",
"interpreted": "2013-12-12"
},
"hourOfDay": {
"requested": "0",
"interpreted": 0
},
"utc": {
"requested": "false",
"interpreted": False
},
"numHours": {
"requested": "24",
"interpreted": 24
},
"codeType": {
"requested": "IATA",
"interpreted": "IATA"
},
"maxFlights": {
"requested": "5",
"interpreted": 5
},
"extendedOptions": {},
"url": "https://api.flightstats.com/flex/flightstatus/rest/v2/json/route/status/SEA/BZN/dep/2013/12/12"
},
"appendix": {
"airlines": [
{
"fs": "AS",
"iata": "AS",
"icao": "ASA",
"name": "Alaska Airlines",
"phoneNumber": "1-800-252-7522",
"active": True
},
{
"fs": "DL",
"iata": "DL",
"icao": "DAL",
"name": "Delta Air Lines",
"phoneNumber": "1-800-221-1212",
"active": True
},
{
"fs": "QX",
"iata": "QX",
"icao": "QXE",
"name": "Horizon Air",
"active": True
},
{
"fs": "AF",
"iata": "AF",
"icao": "AFR",
"name": "Air France",
"phoneNumber": "1-800-237-2747",
"active": True
},
{
"fs": "KE",
"iata": "KE",
"icao": "KAL",
"name": "Korean Air Lines",
"active": True
}
],
"airports": [
{
"fs": "BZN",
"iata": "BZN",
"icao": "KBZN",
"faa": "BZN",
"name": "Gallatin Field",
"street1": "6, Gallatin Field",
"city": "Bozeman",
"cityCode": "BZN",
"stateCode": "MT",
"postalCode": "59715",
"countryCode": "US",
"countryName": "United States",
"regionName": "North America",
"timeZoneRegionName": "America/Denver",
"weatherZone": "MTZ055",
"localTime": "2013-12-12T20:36:31.043",
"utcOffsetHours": -7,
"latitude": 45.777687,
"longitude": -111.160334,
"elevationFeet": 4474,
"classification": 4,
"active": True,
"delayIndexUrl": "https://api.flightstats.com/flex/delayindex/rest/v1/json/airports/BZN?codeType=fs",
"weatherUrl": "https://api.flightstats.com/flex/weather/rest/v1/json/all/BZN?codeType=fs"
},
{
"fs": "SEA",
"iata": "SEA",
"icao": "KSEA",
"faa": "SEA",
"name": "Seattle-Tacoma International Airport",
"city": "Seattle",
"cityCode": "SEA",
"stateCode": "WA",
"postalCode": "98158",
"countryCode": "US",
"countryName": "United States",
"regionName": "North America",
"timeZoneRegionName": "America/Los_Angeles",
"weatherZone": "WAZ001",
"localTime": "2013-12-12T19:36:31.043",
"utcOffsetHours": -8,
"latitude": 47.443839,
"longitude": -122.301732,
"elevationFeet": 429,
"classification": 1,
"active": True,
"delayIndexUrl": "https://api.flightstats.com/flex/delayindex/rest/v1/json/airports/SEA?codeType=fs",
"weatherUrl": "https://api.flightstats.com/flex/weather/rest/v1/json/all/SEA?codeType=fs"
}
],
"equipments": [
{
"iata": "DH8D",
"name": "??",
"turboProp": False,
"jet": False,
"widebody": False,
"regional": False
},
{
"iata": "DH4",
"name": "De Havilland (Bombardier) DHC-8-400 Dash 8/8Q",
"turboProp": True,
"jet": False,
"widebody": False,
"regional": True
}
]
},
"flightStatuses": [
{
"flightId": 318308318,
"carrierFsCode": "QX",
"flightNumber": "2460",
"departureAirportFsCode": "SEA",
"arrivalAirportFsCode": "BZN",
"departureDate": {
"dateLocal": "2013-12-12T12:50:00.000",
"dateUtc": "2013-12-12T20:50:00.000Z"
},
"arrivalDate": {
"dateLocal": "2013-12-12T15:35:00.000",
"dateUtc": "2013-12-12T22:35:00.000Z"
},
"status": "L",
"schedule": {
"flightType": "J",
"serviceClasses": "Y",
"restrictions": ""
},
"operationalTimes": {
"publishedDeparture": {
"dateLocal": "2013-12-12T12:50:00.000",
"dateUtc": "2013-12-12T20:50:00.000Z"
},
"publishedArrival": {
"dateLocal": "2013-12-12T15:35:00.000",
"dateUtc": "2013-12-12T22:35:00.000Z"
},
"scheduledGateDeparture": {
"dateLocal": "2013-12-12T12:50:00.000",
"dateUtc": "2013-12-12T20:50:00.000Z"
},
"estimatedGateDeparture": {
"dateLocal": "2013-12-12T13:08:00.000",
"dateUtc": "2013-12-12T21:08:00.000Z"
},
"actualGateDeparture": {
"dateLocal": "2013-12-12T13:25:00.000",
"dateUtc": "2013-12-12T21:25:00.000Z"
},
"flightPlanPlannedDeparture": {
"dateLocal": "2013-12-12T13:33:00.000",
"dateUtc": "2013-12-12T21:33:00.000Z"
},
"estimatedRunwayDeparture": {
"dateLocal": "2013-12-12T13:33:00.000",
"dateUtc": "2013-12-12T21:33:00.000Z"
},
"actualRunwayDeparture": {
"dateLocal": "2013-12-12T13:33:00.000",
"dateUtc": "2013-12-12T21:33:00.000Z"
},
"scheduledGateArrival": {
"dateLocal": "2013-12-12T15:35:00.000",
"dateUtc": "2013-12-12T22:35:00.000Z"
},
"estimatedGateArrival": {
"dateLocal": "2013-12-12T16:03:00.000",
"dateUtc": "2013-12-12T23:03:00.000Z"
},
"actualGateArrival": {
"dateLocal": "2013-12-12T15:56:00.000",
"dateUtc": "2013-12-12T22:56:00.000Z"
},
"flightPlanPlannedArrival": {
"dateLocal": "2013-12-12T15:44:00.000",
"dateUtc": "2013-12-12T22:44:00.000Z"
},
"estimatedRunwayArrival": {
"dateLocal": "2013-12-12T15:53:00.000",
"dateUtc": "2013-12-12T22:53:00.000Z"
},
"actualRunwayArrival": {
"dateLocal": "2013-12-12T15:51:00.000",
"dateUtc": "2013-12-12T22:51:00.000Z"
}
},
"codeshares": [
{
"fsCode": "AS",
"flightNumber": "2460",
"relationship": "X"
},
{
"fsCode": "DL",
"flightNumber": "7545",
"relationship": "Z"
}
],
"delays": {
"departureGateDelayMinutes": 35,
"arrivalGateDelayMinutes": 21,
"arrivalRunwayDelayMinutes": 7
},
"flightDurations": {
"scheduledBlockMinutes": 105,
"blockMinutes": 91,
"scheduledAirMinutes": 71,
"airMinutes": 78,
"scheduledTaxiOutMinutes": 43,
"taxiOutMinutes": 8,
"taxiInMinutes": 5
},
"airportResources": {
"departureTerminal": "C",
"departureGate": "16A"
},
"flightEquipment": {
"scheduledEquipmentIataCode": "DH4",
"actualEquipmentIataCode": "DH8D"
}
},
{
"flightId": 318308317,
"carrierFsCode": "QX",
"flightNumber": "2462",
"departureAirportFsCode": "SEA",
"arrivalAirportFsCode": "BZN",
"departureDate": {
"dateLocal": "2013-12-12T20:45:00.000",
"dateUtc": "2013-12-13T04:45:00.000Z"
},
"arrivalDate": {
"dateLocal": "2013-12-12T23:28:00.000",
"dateUtc": "2013-12-13T06:28:00.000Z"
},
"status": "S",
"schedule": {
"flightType": "J",
"serviceClasses": "Y",
"restrictions": ""
},
"operationalTimes": {
"publishedDeparture": {
"dateLocal": "2013-12-12T20:45:00.000",
"dateUtc": "2013-12-13T04:45:00.000Z"
},
"publishedArrival": {
"dateLocal": "2013-12-12T23:28:00.000",
"dateUtc": "2013-12-13T06:28:00.000Z"
},
"scheduledGateDeparture": {
"dateLocal": "2013-12-12T20:45:00.000",
"dateUtc": "2013-12-13T04:45:00.000Z"
},
"flightPlanPlannedDeparture": {
"dateLocal": "2013-12-12T20:54:00.000",
"dateUtc": "2013-12-13T04:54:00.000Z"
},
"estimatedRunwayDeparture": {
"dateLocal": "2013-12-12T20:54:00.000",
"dateUtc": "2013-12-13T04:54:00.000Z"
},
"scheduledGateArrival": {
"dateLocal": "2013-12-12T23:28:00.000",
"dateUtc": "2013-12-13T06:28:00.000Z"
},
"flightPlanPlannedArrival": {
"dateLocal": "2013-12-12T23:20:00.000",
"dateUtc": "2013-12-13T06:20:00.000Z"
},
"estimatedRunwayArrival": {
"dateLocal": "2013-12-12T23:20:00.000",
"dateUtc": "2013-12-13T06:20:00.000Z"
}
},
"codeshares": [
{
"fsCode": "AS",
"flightNumber": "2462",
"relationship": "X"
},
{
"fsCode": "KE",
"flightNumber": "6235",
"relationship": "Z"
},
{
"fsCode": "AF",
"flightNumber": "9664",
"relationship": "Z"
}
],
"flightDurations": {
"scheduledBlockMinutes": 103,
"scheduledAirMinutes": 86,
"scheduledTaxiOutMinutes": 9,
"scheduledTaxiInMinutes": 8
},
"flightEquipment": {
"scheduledEquipmentIataCode": "DH4"
}
}
]
} | mit | -5,631,485,492,478,907,000 | 23.481375 | 105 | 0.558352 | false |
kg-bot/SupyBot | plugins/SpinBottle/config.py | 1 | 2138 | ###
# -*- coding: utf-8 -*-
#
# Copyright (c) 2012, nyuszika7h <[email protected]>
#
# Licensed under the Creative Commons Attribution-NonCommercial-ShareAlike 3.0
# Unported License <https://creativecommons.org/licenses/by-nc-sa/3.0/>.
###
import supybot.conf as conf
import supybot.registry as registry
def configure(advanced):
# This will be called by supybot to configure this module. advanced is
# a bool that specifies whether the user identified himself as an advanced
# user or not. You should effect your configuration by manipulating the
# registry as appropriate.
from supybot.questions import expect, anything, something, yn
conf.registerPlugin('SpinBottle', True)
SpinBottle = conf.registerPlugin('SpinBottle')
# This is where your configuration variables (if any) should go. For example:
# conf.registerGlobalValue(SpinBottle, 'someConfigVariableName',
# registry.Boolean(False, _("""Help for someConfigVariableName.""")))
conf.registerGroup(SpinBottle, 'spin')
conf.registerGlobalValue(conf.supybot.plugins.SpinBottle.spin,
'requireCapability', registry.String('', ("""Determines what capability
(if any) the bot should require people trying to use this command to
have.""")))
conf.registerGroup(SpinBottle, 'randspin')
conf.registerGlobalValue(conf.supybot.plugins.SpinBottle.randspin,
'requireCapability', registry.String('', ("""Determines what
capability (if any) the bot should require people trying to use this command
to have.""")))
conf.registerGroup(SpinBottle, 'forcespin')
conf.registerGlobalValue(conf.supybot.plugins.SpinBottle.forcespin,
'requireCapability', registry.String('', ("""Determines what
capability (if any) the bot should require people trying to use this
command to have.""")))
conf.registerGroup(SpinBottle, 'forcebottle')
conf.registerGlobalValue(conf.supybot.plugins.SpinBottle.forcebottle,
'requireCapability', registry.String('', ("""Determines what
capability (if any) the bot should require people trying to use this
command to have.""")))
# vim:set shiftwidth=4 tabstop=4 expandtab textwidth=79:
| gpl-3.0 | -7,276,186,623,485,112,000 | 40.921569 | 80 | 0.749298 | false |
mazulo/simplemooc | simplemooc/courses/admin.py | 1 | 1421 | from django.contrib import admin
# Register your models here.
from .models import (
Course,
CourseTRB,
CourseRequest,
Announcement,
Comment,
Enrollment,
Lesson,
LessonTRB,
Material,
KnowledgeLevel,
ChooseKnowledgeLevel,
ChooseCategoryCognitiveProcess,
CategoryCognitiveProcess,
Verb,
)
class CourseAdmin(admin.ModelAdmin):
list_display = ['name', 'slug', 'start_date', 'created_at']
search_fields = ['name', 'slug']
prepopulated_fields = {'slug': ['name']}
class CourseTRBAdmin(CourseAdmin):
pass
class CourseRequestAdmin(admin.ModelAdmin):
list_display = ['name', 'description', 'start_date', 'professor', 'is_trb']
search_fields = ['name', 'professor']
class LessonAdmin(admin.ModelAdmin):
list_display = ['name', 'number', 'course', 'release_date']
search_fields = ['name', 'description']
list_filter = ['created_at']
admin.site.register(Course, CourseAdmin)
admin.site.register(CourseTRB, CourseTRBAdmin)
admin.site.register(CourseRequest, CourseRequestAdmin)
admin.site.register([Enrollment, Announcement, Comment, Material])
admin.site.register(Lesson, LessonAdmin)
admin.site.register(LessonTRB, LessonAdmin)
admin.site.register(KnowledgeLevel)
admin.site.register(ChooseKnowledgeLevel)
admin.site.register(CategoryCognitiveProcess)
admin.site.register(ChooseCategoryCognitiveProcess)
admin.site.register(Verb)
| mit | 7,870,221,218,341,444,000 | 25.811321 | 79 | 0.729768 | false |
davidnk/insolater | setup.py | 1 | 1092 | try:
from setuptools import setup
except ImportError:
from distutils.core import setup
packages = ['insolater'],
requires = []
with open('README.rst') as f:
readme = f.read()
with open('LICENSE.txt') as f:
license = f.read()
setup(
name='insolater',
version='0.0.1',
description='Tool to easily switch between original and modified versions of a directory.',
long_description=readme,
author='David Karesh',
author_email='[email protected]',
url='github.com/davidnk/insolater',
download_url="https://github.com/davidnk/insolater/tarball/v0.0.1",
packages=['insolater'],
include_package_data=True,
install_requires=['argparse', 'pexpect'],
license=license,
entry_points={'console_scripts': ['inso = insolater.run:main']},
classifiers=(
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
),
)
| mit | -6,779,866,917,094,095,000 | 29.333333 | 95 | 0.64011 | false |
pyfa-org/eos | tests/integration/calculator/similar_mods/test_dogma.py | 1 | 4872 | # ==============================================================================
# Copyright (C) 2011 Diego Duclos
# Copyright (C) 2011-2018 Anton Vorobyov
#
# This file is part of Eos.
#
# Eos is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Eos is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Eos. If not, see <http://www.gnu.org/licenses/>.
# ==============================================================================
from eos import Ship
from eos.const.eos import ModAffecteeFilter
from eos.const.eos import ModDomain
from eos.const.eos import ModOperator
from eos.const.eve import EffectCategoryId
from tests.integration.calculator.testcase import CalculatorTestCase
class TestSimilarModifiersDogma(CalculatorTestCase):
def make_modifier(self, src_attr, tgt_attr):
return self.mkmod(
affectee_filter=ModAffecteeFilter.item,
affectee_domain=ModDomain.self,
affectee_attr_id=tgt_attr.id,
operator=ModOperator.post_percent,
affector_attr_id=src_attr.id)
def test_same_item(self):
# Real scenario - capital ships boost their agility via proxy attrs
# Setup
tgt_attr = self.mkattr()
src_attr1 = self.mkattr()
src_attr2 = self.mkattr()
modifier1 = self.make_modifier(src_attr1, tgt_attr)
modifier2 = self.make_modifier(src_attr2, tgt_attr)
effect1 = self.mkeffect(
category_id=EffectCategoryId.passive,
modifiers=[modifier1])
effect2 = self.mkeffect(
category_id=EffectCategoryId.passive,
modifiers=[modifier2])
item = Ship(self.mktype(
attrs={src_attr1.id: 20, src_attr2.id: 20, tgt_attr.id: 100},
effects=(effect1, effect2)).id)
# Action
self.fit.ship = item
# Verification
self.assertAlmostEqual(item.attrs[tgt_attr.id], 144)
# Cleanup
self.assert_solsys_buffers_empty(self.fit.solar_system)
self.assert_log_entries(0)
def test_same_item_attr(self):
# Setup
tgt_attr = self.mkattr()
src_attr = self.mkattr()
modifier1 = self.make_modifier(src_attr, tgt_attr)
modifier2 = self.make_modifier(src_attr, tgt_attr)
effect1 = self.mkeffect(
category_id=EffectCategoryId.passive,
modifiers=[modifier1])
effect2 = self.mkeffect(
category_id=EffectCategoryId.passive,
modifiers=[modifier2])
item = Ship(self.mktype(
attrs={src_attr.id: 20, tgt_attr.id: 100},
effects=(effect1, effect2)).id)
# Action
self.fit.ship = item
# Verification
self.assertAlmostEqual(item.attrs[tgt_attr.id], 144)
# Cleanup
self.assert_solsys_buffers_empty(self.fit.solar_system)
self.assert_log_entries(0)
def test_same_item_effect(self):
# Setup
tgt_attr = self.mkattr()
src_attr1 = self.mkattr()
src_attr2 = self.mkattr()
modifier1 = self.make_modifier(src_attr1, tgt_attr)
modifier2 = self.make_modifier(src_attr2, tgt_attr)
effect1 = self.mkeffect(
category_id=EffectCategoryId.passive,
modifiers=[modifier1, modifier2])
item = Ship(self.mktype(
attrs={src_attr1.id: 20, src_attr2.id: 20, tgt_attr.id: 100},
effects=[effect1]).id)
# Action
self.fit.ship = item
# Verification
self.assertAlmostEqual(item.attrs[tgt_attr.id], 144)
# Cleanup
self.assert_solsys_buffers_empty(self.fit.solar_system)
self.assert_log_entries(0)
def test_same_item_effect_attr(self):
# Setup
tgt_attr = self.mkattr()
src_attr = self.mkattr()
modifier1 = self.make_modifier(src_attr, tgt_attr)
modifier2 = self.make_modifier(src_attr, tgt_attr)
effect1 = self.mkeffect(
category_id=EffectCategoryId.passive,
modifiers=[modifier1, modifier2])
item = Ship(self.mktype(
attrs={src_attr.id: 20, tgt_attr.id: 100},
effects=[effect1]).id)
# Action
self.fit.ship = item
# Verification
self.assertAlmostEqual(item.attrs[tgt_attr.id], 144)
# Cleanup
self.assert_solsys_buffers_empty(self.fit.solar_system)
self.assert_log_entries(0)
| lgpl-3.0 | -3,637,831,094,017,428,500 | 37.362205 | 80 | 0.613095 | false |
noironetworks/group-based-policy | gbpservice/neutron/tests/unit/services/servicechain/ncp/test_traffic_stitching_plumber.py | 1 | 8104 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from unittest import mock
from neutron.common import config # noqa
from neutron_lib import context as n_context
from neutron_lib.plugins import constants as pconst
from oslo_config import cfg
from gbpservice.neutron.services.servicechain.plugins.ncp import model
from gbpservice.neutron.tests.unit.services.grouppolicy import (
test_resource_mapping as test_gp_driver)
from gbpservice.neutron.tests.unit.services.servicechain.ncp import (
test_ncp_plugin as base)
class TrafficStitchingPlumberTestCase(base.NodeCompositionPluginTestCase):
def setUp(self):
cfg.CONF.set_override('policy_drivers', ['implicit_policy',
'resource_mapping'],
group='group_policy')
cfg.CONF.set_override('allow_overlapping_ips', True)
cfg.CONF.set_override(
'extension_drivers', ['proxy_group'], group='group_policy')
super(TrafficStitchingPlumberTestCase, self).setUp(
node_drivers=['node_dummy'], node_plumber='stitching_plumber',
core_plugin=test_gp_driver.CORE_PLUGIN)
self.driver = self.sc_plugin.driver_manager.ordered_drivers[0].obj
self.driver.get_plumbing_info = mock.Mock()
self.driver.get_plumbing_info.return_value = {}
def test_one_gateway_pt_prov_cons(self):
context = n_context.get_admin_context()
self.driver.get_plumbing_info.return_value = {
'provider': [{}], 'consumer': [{}], 'plumbing_type': 'gateway'}
provider, consumer, node = self._create_simple_chain()
provider = self.show_policy_target_group(
provider['id'])['policy_target_group']
# Verify Service PT created and correctly placed
targets = model.get_service_targets(context.session)
self.assertEqual(2, len(targets))
old_relationship = None
for target in targets:
self.assertEqual(node['id'], target.servicechain_node_id)
pt = self.show_policy_target(
target.policy_target_id)['policy_target']
if target.relationship == 'provider':
self.assertEqual(provider['id'],
pt['policy_target_group_id'])
self.assertTrue(pt['group_default_gateway'])
self.assertFalse(pt['proxy_gateway'])
else:
# Consumer side a proxy group exists
self.assertEqual(provider['proxy_group_id'],
pt['policy_target_group_id'])
self.assertFalse(pt['group_default_gateway'])
self.assertTrue(pt['proxy_gateway'])
self.assertNotEqual(old_relationship, target.relationship)
old_relationship = target.relationship
port = self._get_object('ports', pt['port_id'], self.api)['port']
self.assertTrue(port['name'].startswith('pt_service_target_'),
"Port name doesn't start with 'pt_service_target_"
"'.\nport:\n%s\n" % port)
self.update_policy_target_group(
provider['id'], provided_policy_rule_sets={})
# With chain deletion, also the Service PTs are deleted
new_targets = model.get_service_targets(context.session)
self.assertEqual(0, len(new_targets))
for target in targets:
self.show_policy_target(
target.policy_target_id, expected_res_status=404)
provider = self.show_policy_target_group(
provider['id'])['policy_target_group']
self.assertIsNone(provider['proxy_group_id'])
def test_multiple_endpoint_pt_provider(self):
context = n_context.get_admin_context()
self.driver.get_plumbing_info.return_value = {
'provider': [{}, {}], 'consumer': [], 'plumbing_type': 'endpoint'}
provider, consumer, node = self._create_simple_chain()
provider = self.show_policy_target_group(
provider['id'])['policy_target_group']
# Verify Service PT created and contains proper name, description
targets = model.get_service_targets(context.session)
self.assertEqual(2, len(targets))
for target in targets:
pt = self.show_policy_target(
target.policy_target_id)['policy_target']
self.assertEqual(provider['id'],
pt['policy_target_group_id'])
self.assertTrue(pt['name'].startswith('tscp_endpoint_service'),
"Policy Target name doesn't start with "
"'tscp_endpoint_service'.\npt:\n%s\n" % pt)
self.assertTrue(node['id'] in pt['description'],
"Policy Target description doesn't contains "
" node id.\nnode:\n%s\n" % node)
port = self._get_object('ports', pt['port_id'], self.api)['port']
self.assertTrue(port['name'].startswith(
'pt_tscp_endpoint_service'),
"Port name doesn't start with "
"'pt_tscp_endpoint_service'.\nport:\n%s\n" % port)
self.update_policy_target_group(
provider['id'], provided_policy_rule_sets={})
# With chain deletion, also the Service PTs are deleted
new_targets = model.get_service_targets(context.session)
self.assertEqual(0, len(new_targets))
for target in targets:
self.show_policy_target(
target.policy_target_id, expected_res_status=404)
provider = self.show_policy_target_group(
provider['id'])['policy_target_group']
self.assertIsNone(provider['proxy_group_id'])
def get_plumbing_info_base(self, context):
service_type = context.current_profile['service_type']
plumbing_request = {'management': [], 'provider': [{}],
'consumer': [{}]}
if service_type in [pconst.FIREWALL]:
plumbing_request['plumbing_type'] = 'gateway'
else:
plumbing_request = {}
return plumbing_request
def test_get_service_targets_in_chain(self):
context = n_context.get_admin_context()
self.driver.get_plumbing_info = self.get_plumbing_info_base
lb_prof = self._create_service_profile(
service_type='LOADBALANCERV2',
vendor=self.SERVICE_PROFILE_VENDOR)['service_profile']
lb_node = self.create_servicechain_node(
service_profile_id=lb_prof['id'],
config=self.DEFAULT_LB_CONFIG)['servicechain_node']
fw_prof = self._create_service_profile(
service_type='FIREWALL',
vendor=self.SERVICE_PROFILE_VENDOR)['service_profile']
fw_node = self.create_servicechain_node(
service_profile_id=fw_prof['id'],
config='{}')['servicechain_node']
self._create_chain_with_nodes([fw_node['id'], lb_node['id']])
targets = model.get_service_targets(context.session)
self.assertEqual(2, len(targets))
def test_ptg_delete(self):
self.driver.get_plumbing_info.return_value = {
'provider': [{}], 'consumer': [{}],
'plumbing_type': 'transparent'}
provider, _, _ = self._create_simple_service_chain()
# Deleting a PTG will fail because of existing PTs
self.delete_policy_target_group(provider['id'],
expected_res_status=204)
| apache-2.0 | -1,080,646,170,201,155,700 | 47.238095 | 78 | 0.600568 | false |
thunderboltsid/stampman | stampman/tests/test_api.py | 1 | 3330 | import unittest
import os
import json
from stampman import main
from stampman.helpers import mail_
class TestApiList(unittest.TestCase):
def setUp(self):
main.app.config['TESTING'] = True
self._app = main.app.test_client()
self._pool_api_key = os.environ.get('POOL_API_KEY')
self._admin_ai_key = os.environ.get('ADMIN_API_KEY')
def test_get_json(self):
response = self._app.get("/")
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content_type, "application/json")
self._mail_domain = os.environ.get('MAIL_DOMAIN')
def test_redirect(self):
response = self._app.get("")
self.assertEqual(response.status_code, 301)
self.assertEqual(response.content_type, "text/html; charset=utf-8")
def test_get_html(self):
response = self._app.get("/", headers={
"accept": "text/html"})
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content_type, "text/html")
class TestApiDetail(unittest.TestCase):
def setUp(self):
main.app.config['TESTING'] = True
self._app = main.app.test_client()
self._pool_api_key = os.environ.get('POOL_API_KEY')
self._admin_ai_key = os.environ.get('ADMIN_API_KEY')
self._mail_domain = os.environ.get('MAIL_DOMAIN')
def test_redirect(self):
response = self._app.get("/{}".format(self._mail_domain))
self.assertEqual(response.status_code, 301)
self.assertEqual(response.content_type, "text/html; charset=utf-8")
def test_get_json(self):
response = self._app.get("/{}/".format(self._mail_domain))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content_type, "application/json")
def test_get_html(self):
response = self._app.get("/{}/".format(self._mail_domain),
headers={"accept": "text/html"})
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content_type, "text/html")
class TestApiSend(unittest.TestCase):
def setUp(self):
main.app.config['TESTING'] = True
self._app = main.app.test_client()
self._pool_api_key = os.environ.get('POOL_API_KEY')
self._admin_ai_key = os.environ.get('ADMIN_API_KEY')
self._mail_domain = os.environ.get('MAIL_DOMAIN')
def test_get_json(self):
response = self._app.get("/{}/send/".format(self._mail_domain))
self.assertEqual(response.status_code, 200)
def test_redirect(self):
response = self._app.get("/{}/send".format(self._mail_domain))
self.assertEqual(response.status_code, 301)
def test_post(self):
response = self._app.post("/{}/send/".format(self._mail_domain),
data=json.dumps(dict(
pool_api_key=self._pool_api_key,
from_email="[email protected]",
from_name="Sid",
subject="test_send_api",
content="this_is_content"
)), content_type="application/json")
self.assertEqual(response.status_code, 200)
| mit | -5,458,157,246,960,714,000 | 38.642857 | 75 | 0.581682 | false |
SonyCSL/CSLAIER | src/model_templates/googlenetbn.py | 1 | 4811 | # -*- coding: utf-8 -*-
# HINT:image
import chainer
import chainer.functions as F
import chainer.links as L
"""
GoogLeNet:
Going Deeper with Convolutions
http://www.cs.unc.edu/~wliu/papers/GoogLeNet.pdf
##############################
## DO NOT CHANGE CLASS NAME ##
##############################
"""
class Network(chainer.Chain):
"""New GoogLeNet of BatchNormalization version."""
insize = 224
def __init__(self):
super(Network, self).__init__(
conv1=L.Convolution2D(3, 64, 7, stride=2, pad=3, nobias=True),
norm1=L.BatchNormalization(64),
conv2=L.Convolution2D(64, 192, 3, pad=1, nobias=True),
norm2=L.BatchNormalization(192),
inc3a=L.InceptionBN(192, 64, 64, 64, 64, 96, 'avg', 32),
inc3b=L.InceptionBN(256, 64, 64, 96, 64, 96, 'avg', 64),
inc3c=L.InceptionBN(320, 0, 128, 160, 64, 96, 'max', stride=2),
inc4a=L.InceptionBN(576, 224, 64, 96, 96, 128, 'avg', 128),
inc4b=L.InceptionBN(576, 192, 96, 128, 96, 128, 'avg', 128),
inc4c=L.InceptionBN(576, 128, 128, 160, 128, 160, 'avg', 128),
inc4d=L.InceptionBN(576, 64, 128, 192, 160, 192, 'avg', 128),
inc4e=L.InceptionBN(576, 0, 128, 192, 192, 256, 'max', stride=2),
inc5a=L.InceptionBN(1024, 352, 192, 320, 160, 224, 'avg', 128),
inc5b=L.InceptionBN(1024, 352, 192, 320, 192, 224, 'max', 128),
out=L.Linear(1024, 1000),
conva=L.Convolution2D(576, 128, 1, nobias=True),
norma=L.BatchNormalization(128),
lina=L.Linear(2048, 1024, nobias=True),
norma2=L.BatchNormalization(1024),
outa=L.Linear(1024, 1000),
convb=L.Convolution2D(576, 128, 1, nobias=True),
normb=L.BatchNormalization(128),
linb=L.Linear(2048, 1024, nobias=True),
normb2=L.BatchNormalization(1024),
outb=L.Linear(1024, 1000),
)
self._train = True
@property
def train(self):
return self._train
@train.setter
def train(self, value):
self._train = value
self.inc3a.train = value
self.inc3b.train = value
self.inc3c.train = value
self.inc4a.train = value
self.inc4b.train = value
self.inc4c.train = value
self.inc4d.train = value
self.inc4e.train = value
self.inc5a.train = value
self.inc5b.train = value
def __call__(self, x, t):
test = not self.train
h = F.max_pooling_2d(
F.relu(self.norm1(self.conv1(x), test=test)), 3, stride=2, pad=1)
h = F.max_pooling_2d(
F.relu(self.norm2(self.conv2(h), test=test)), 3, stride=2, pad=1)
h = self.inc3a(h)
h = self.inc3b(h)
h = self.inc3c(h)
h = self.inc4a(h)
a = F.average_pooling_2d(h, 5, stride=3)
a = F.relu(self.norma(self.conva(a), test=test))
a = F.relu(self.norma2(self.lina(a), test=test))
a = self.outa(a)
self.loss1 = F.softmax_cross_entropy(a, t)
h = self.inc4b(h)
h = self.inc4c(h)
h = self.inc4d(h)
b = F.average_pooling_2d(h, 5, stride=3)
b = F.relu(self.normb(self.convb(b), test=test))
b = F.relu(self.normb2(self.linb(b), test=test))
b = self.outb(b)
self.loss2 = F.softmax_cross_entropy(b, t)
h = self.inc4e(h)
h = self.inc5a(h)
h = F.average_pooling_2d(self.inc5b(h), 7)
h = self.out(h)
self.loss3 = F.softmax_cross_entropy(h, t)
self.loss = 0.3 * (self.loss1 + self.loss2) + self.loss3
self.accuracy = F.accuracy(h, t)
return self.loss
def predict(self, x_data):
x = chainer.Variable(x_data, volatile=True)
self.train = False
test = True
h = F.max_pooling_2d(
F.relu(self.norm1(self.conv1(x), test=test)), 3, stride=2, pad=1)
h = F.max_pooling_2d(
F.relu(self.norm2(self.conv2(h), test=test)), 3, stride=2, pad=1)
h = self.inc3a(h)
h = self.inc3b(h)
h = self.inc3c(h)
h = self.inc4a(h)
a = F.average_pooling_2d(h, 5, stride=3)
a = F.relu(self.norma(self.conva(a), test=test))
a = F.relu(self.norma2(self.lina(a), test=test))
a = self.outa(a)
h = self.inc4b(h)
h = self.inc4c(h)
h = self.inc4d(h)
b = F.average_pooling_2d(h, 5, stride=3)
b = F.relu(self.normb(self.convb(b), test=test))
b = F.relu(self.normb2(self.linb(b), test=test))
b = self.outb(b)
h = self.inc4e(h)
h = self.inc5a(h)
h = F.average_pooling_2d(self.inc5b(h), 7)
h = self.out(h)
return F.softmax(0.3 * (a + b) + h)
| mit | 3,842,499,004,113,487,400 | 31.952055 | 78 | 0.538557 | false |
r-portas/brisbane-bus-stops | analyse.py | 1 | 2506 | """Analyses datasets"""
from csv import reader
def parse_bus_stops(filename, suburb_filter=""):
"""Parses a csv file of bus stops
Returns a list of bus stops
"""
bus_stops = []
with open(filename, "rb") as bus_stop_file:
bus_csv_reader = reader(bus_stop_file)
header = bus_csv_reader.next()
# Each second line of the file is garbage
toggle = 0
for line in bus_csv_reader:
if toggle:
if suburb_filter != "":
if line[3] == suburb_filter:
bus_stops.append(BusStop(line[0], line[2], line[3], line[7], line[8]))
else:
bus_stops.append(BusStop(line[0], line[2], line[3], line[7], line[8]))
toggle = 0
else:
toggle = 1
return bus_stops
"""Finds the middle location of all stops in the list, used for centering the map on the points
Return a list of coordinates, [lat, long]
"""
def get_mid_location(bus_stops):
max_lat = 0
min_lat = 0
max_long = 0
min_long = 0
for stop in bus_stops:
# Find the lats
if max_lat == 0:
max_lat = stop.lat
else:
if max_lat < stop.lat:
max_lat = stop.lat
if min_lat == 0:
min_lat = stop.lat
else:
if min_lat > stop.lat:
min_lat = stop.lat
# Find the longs
if max_long == 0:
max_long = stop.long
else:
if max_long < stop.long:
max_long = stop.long
if min_long == 0:
min_long = stop.long
else:
if min_long > stop.long:
min_long = stop.long
mid_lat = ((max_lat - min_lat) / 2) + min_lat
mid_long = ((max_long - min_long) / 2) + min_long
return [mid_lat, mid_long]
"""Stores a bus stop"""
class BusStop:
def __init__(self, stopid, road, suburb, lat, long):
self.stopid = stopid
self.road = road
self.suburb = suburb
self.lat = float(lat)
self.long = float(long)
def __repr__(self):
return "{} - {}, {} - ({}, {})".format(self.stopid, self.road, self.suburb, self.long, self.lat)
def get_location(self):
"""Returns the location of the bus stop in a list [lat, long]"""
return [self.lat, self.long]
if __name__ == "__main__":
stops = parse_bus_stops("datasets/dataset_bus_stops.csv")
print(stops) | mit | -4,975,846,715,688,513,000 | 26.549451 | 104 | 0.513966 | false |
lyndon160/REF | openflow_bandwidth/SwitchPoll.py | 1 | 1690 | from ryu.base import app_manager
from ryu.controller import ofp_event
from ryu.controller.handler import CONFIG_DISPATCHER, MAIN_DISPATCHER
from ryu.controller.handler import set_ev_cls
from ryu.ofproto import ofproto_v1_3
from ryu.lib.packet import packet
import time
from ryu.lib.packet import ethernet
class SwitchPoll():
def __init__(self):
self._running = True
def terminate(self):
self._running = False
#input switch to send to
def send_port_stats_request(self, datapath):
ofp = datapath.ofproto
ofp_parser = datapath.ofproto_parser
req = ofp_parser.OFPPortStatsRequest(datapath, 0, ofp.OFPP_ANY)
datapath.send_msg(req)
def send_flow_stats_request(self, datapath):
ofp = datapath.ofproto
ofp_parser = datapath.ofproto_parser
req = ofp_parser.OFPFlowStatsRequest(datapath, 0, ofp.OFPTT_ALL,
ofp.OFPP_ANY, ofp.OFPG_ANY,
0, 0, ofp_parser.OFPMatch())
datapath.send_msg(req)
def send_meter_stats_request(self, datapath):
ofp = datapath.ofproto
ofp_parser = datapath.ofproto_parser
req = ofp_parser.OFPMeterStatsRequest(datapath, 0, ofp.OFPM_ALL)
datapath.send_msg(req)
#input time for every request and list of switches to request to
def run(self, pollTime,datapathdict):
while True:
for the_key, datapath in datapathdict.iteritems():
self.send_port_stats_request(datapath)
self.send_flow_stats_request(datapath)
self.send_meter_stats_request(datapath)
time.sleep(pollTime)
| apache-2.0 | -959,407,658,754,910,100 | 34.208333 | 72 | 0.642604 | false |
fcl-93/rootio_web | rootio/radio/views.py | 1 | 22221 | # -*- coding: utf-8 -*-
from __future__ import print_function
import string
import random
import os
import re
from datetime import datetime, timedelta
import sys
import time
import dateutil.rrule, dateutil.parser
from flask import g, current_app, Blueprint, render_template, request, flash, Response, json, url_for
from flask.ext.login import login_required, current_user
from crontab import CronTab
from flask.ext.babel import gettext as _
from sqlalchemy.exc import IntegrityError
from werkzeug.utils import redirect
from ..telephony import Message
from .models import Station, Program, ScheduledBlock, ScheduledProgram, Location, Person, StationhasBots, Language, ProgramType, MediaFiles
from .forms import StationForm, ProgramForm, BlockForm, LocationForm, ScheduleProgramForm, PersonForm, AddBotForm, MediaForm
from ..decorators import returns_json, returns_flat_json
from ..utils import error_dict, fk_lookup_form_data, allowed_audio_file, ALLOWED_AUDIO_EXTENSIONS
from ..extensions import db
from ..utils_bot import add_cron, send_mail, removeCron
from werkzeug import secure_filename
import mutagen
from ..messenger import messages
radio = Blueprint('radio', __name__, url_prefix='/radio')
@radio.route('/', methods=['GET'])
def index():
stations = Station.query.all()
return render_template('radio/index.html',stations=stations)
@radio.route('/emergency/', methods=['GET'])
def emergency():
stations = Station.query.all()
#demo, override station statuses
for s in stations:
s.status = "on"
#end demo
return render_template('radio/emergency.html',stations=stations)
@radio.route('/station/', methods=['GET'])
def stations():
stations = Station.query.order_by('name').all()
return render_template('radio/stations.html', stations=stations, active='stations')
@radio.route('/station/<int:station_id>', methods=['GET', 'POST'])
def station(station_id):
station = Station.query.filter_by(id=station_id).first_or_404()
form = StationForm(obj=station, next=request.args.get('next'))
if form.validate_on_submit():
form.populate_obj(station)
db.session.add(station)
db.session.commit()
flash(_('Station updated.'), 'success')
return render_template('radio/station.html', station=station, form=form)
@radio.route('/station/add/', methods=['GET', 'POST'])
@login_required
def station_add():
form = StationForm(request.form)
station = None
if form.validate_on_submit():
cleaned_data = form.data #make a copy
cleaned_data.pop('submit',None) #remove submit field from list
cleaned_data.pop('phone_inline',None) #and also inline forms
cleaned_data.pop('location_inline',None)
station = Station(**cleaned_data) #create new object from data
db.session.add(station)
db.session.commit()
flash(_('Station added.'), 'success')
elif request.method == "POST":
flash(_('Validation error'),'error')
return render_template('radio/station.html', station=station, form=form)
@radio.route('/program/', methods=['GET'])
def programs():
programs = Program.query.all()
return render_template('radio/programs.html', programs=programs, active='programs')
@radio.route('/people/', methods=['GET'])
def people():
people = Person.query.all()
return render_template('radio/people.html', people=people, active='people')
@radio.route('/people/<int:person_id>', methods=['GET', 'POST'])
def person(person_id):
person = Person.query.filter_by(id=person_id).first_or_404()
form = PersonForm(obj=person, next=request.args.get('next'))
if form.validate_on_submit():
form.populate_obj(person)
db.session.add(person)
db.session.commit()
flash(_('Person updated.'), 'success')
return render_template('radio/person.html', person=person, form=form)
@radio.route('/people/add/', methods=['GET', 'POST'])
@login_required
def person_add():
form = PersonForm(request.form)
person = None
if form.validate_on_submit():
cleaned_data = form.data #make a copy
cleaned_data.pop('submit',None) #remove submit field from list
person = Person(**cleaned_data) #create new object from data
db.session.add(person)
db.session.commit()
flash(_('Person added.'), 'success')
elif request.method == "POST":
flash(_('Validation error'),'error')
return render_template('radio/person.html', person=person, form=form)
@radio.route('/location/add/ajax/', methods=['POST'])
@login_required
@returns_json
def location_add_ajax():
data = json.loads(request.data)
#handle floats individually
float_vals = ['latitude','longitude']
for field in float_vals:
try:
data[field] = float(data[field])
except ValueError:
response = {'status':'error','errors':{field:_('Invalid ')+field},'status_code':400}
return response
form = LocationForm(None, **data) #use this format to avoid multidict-type issue
location = None
if form.validate_on_submit():
cleaned_data = form.data #make a copy
cleaned_data.pop('submit',None) #remove submit field from list
location = Location(**cleaned_data) #create new object from data
db.session.add(location)
db.session.commit()
response = {'status':'success','result':{'id':location.id,'string':unicode(location)},'status_code':200}
elif request.method == "POST":
#convert the error dictionary to something serializable
response = {'status':'error','errors':error_dict(form.errors),'status_code':400}
return response
@radio.route('/block/', methods=['GET'])
def scheduled_blocks():
scheduled_blocks = ScheduledBlock.query.all()
#TODO, display only those that are scheduled on stations the user can view
return render_template('radio/scheduled_blocks.html', scheduled_blocks=scheduled_blocks, active='blocks')
@radio.route('/block/<int:block_id>', methods=['GET', 'POST'])
def scheduled_block(block_id):
block = ScheduledBlock.query.filter_by(id=block_id).first_or_404()
form = BlockForm(obj=block, next=request.args.get('next'))
if form.validate_on_submit():
form.populate_obj(block)
db.session.add(block)
db.session.commit()
flash(_('Block updated.'), 'success')
return render_template('radio/scheduled_block.html', scheduled_block=block, form=form)
@radio.route('/block/add/', methods=['GET', 'POST'])
@login_required
def scheduled_block_add():
form = BlockForm(request.form)
block = None
if form.validate_on_submit():
cleaned_data = form.data #make a copy
cleaned_data.pop('submit',None) #remove submit field from list
block = ScheduledBlock(**cleaned_data) #create new object from data
db.session.add(block)
db.session.commit()
flash(_('Block added.'), 'success')
elif request.method == "POST":
flash(_('Validation error'),'error')
return render_template('radio/scheduled_block.html', block=block, form=form)
@radio.route('/scheduleprogram/add/ajax/', methods=['POST'])
@login_required
@returns_json
def schedule_program_add_ajax():
data = json.loads(request.data)
if not 'program' in data:
return {'status':'error','errors':'program required','status_code':400}
if not 'station' in data:
return {'status':'error','errors':'station required','status_code':400}
#lookup objects from ids
fk_errors = fk_lookup_form_data({'program':Program,'station':Station}, data)
if fk_errors:
return fk_errors
program = data['program']
scheduled_program = ScheduledProgram(program=data['program'], station=data['station'])
scheduled_program.start = dateutil.parser.parse(data['start'])
scheduled_program.end = scheduled_program.start + program.duration
db.session.add(scheduled_program)
db.session.commit()
return {'status':'success','result':{'id':scheduled_program.id},'status_code':200}
@radio.route('/scheduleprogram/delete/<int:_id>/', methods=['POST'])
@login_required
def delete_program(_id):
_program = ScheduledProgram.query.get(_id)
db.session.delete(_program)
db.session.commit()
return ""
@radio.route('/scheduleprogram/edit/ajax/', methods=['POST'])
@login_required
@returns_json
def schedule_program_edit_ajax():
data = json.loads(request.data)
if not 'scheduledprogram' in data:
return {'status':'error','errors':'scheduledprogram required','status_code':400}
#lookup objects from ids
fk_errors = fk_lookup_form_data({'scheduledprogram':ScheduledProgram}, data)
if fk_errors:
return fk_errors
scheduled_program = data['scheduledprogram']
scheduled_program.start = dateutil.parser.parse(data['start'])
program = scheduled_program.program
scheduled_program.end = scheduled_program.start + program.duration
db.session.add(scheduled_program)
db.session.commit()
return {'status':'success','result':{'id':scheduled_program.id},'status_code':200}
@radio.route('/scheduleprogram/add/recurring_ajax/', methods=['POST'])
@login_required
@returns_json
def schedule_recurring_program_ajax():
"Schedule a recurring program"
data = json.loads(request.data)
#ensure specified foreign key ids are valid
fk_errors = fk_lookup_form_data({'program':Program,'station':Station}, data)
if fk_errors:
return fk_errors
form = ScheduleProgramForm(None, **data)
try:
air_time = datetime.strptime(form.data['air_time'],'%H:%M').time()
except ValueError:
response = {'status':'error','errors':{'air_time':'Invalid time'},'status_code':400}
return response
if form.validate_on_submit():
#save refs to form objects
program = form.data['program']
station = form.data['station']
#parse recurrence rule
r = dateutil.rrule.rrulestr(form.data['recurrence'])
for instance in r[:10]: #TODO: dynamically determine instance limit
scheduled_program = ScheduledProgram(program=program, station=station)
scheduled_program.start = datetime.combine(instance,air_time) #combine instance day and air_time time
scheduled_program.end = scheduled_program.start + program.duration
db.session.add(scheduled_program)
db.session.commit()
response = {'status':'success','result':{},'status_code':200}
elif request.method == "POST":
response = {'status':'error','errors':error_dict(form.errors),'status_code':400}
return response
@radio.route('/station/<int:station_id>/scheduledprograms.json', methods=['GET'])
@returns_flat_json
def scheduled_programs_json(station_id):
if request.args.get('start') and request.args.get('end'):
start = dateutil.parser.parse(request.args.get('start'))
end = dateutil.parser.parse(request.args.get('end'))
scheduled_programs = ScheduledProgram.query.filter_by(station_id=station_id)
#TODO: filter by start > start, end < end
else:
scheduled_programs = ScheduledProgram.query.filter_by(station_id=station_id)
resp = []
for s in scheduled_programs:
d = {'title':s.program.name,
'start':s.start.isoformat(),
'end':s.end.isoformat(),
'id':s.id}
resp.append(d)
return resp
@radio.route('/station/<int:station_id>/scheduledblocks.json', methods=['GET'])
@returns_flat_json
def scheduled_block_json(station_id):
scheduled_blocks = ScheduledBlock.query.filter_by(station_id=station_id)
if not ('start' in request.args and 'end' in request.args):
return {'status':'error','errors':'scheduledblocks.json requires start and end','status_code':400}
#TODO: fullcalendar updates based on these params
start = dateutil.parser.parse(request.args.get('start'))
end = dateutil.parser.parse(request.args.get('end'))
resp = []
for block in scheduled_blocks:
r = dateutil.rrule.rrulestr(block.recurrence)
for instance in r.between(start,end):
d = {'title':block.name,
'start':datetime.combine(instance,block.start_time),
'end':datetime.combine(instance,block.end_time),
'id':block.id,
'isBackground':True, #the magic flag that tells full calendar to render as block
}
resp.append(d)
return resp
@radio.route('/schedule/', methods=['GET'])
def schedule():
#TODO, if user is authorized to view only one station, redirect them there
stations = Station.query.order_by('name').all()
return render_template('radio/schedules.html',
stations=stations, active='schedule')
@radio.route('/schedule/<int:station_id>/', methods=['GET'])
def schedule_station(station_id):
station = Station.query.filter_by(id=station_id).first_or_404()
#TODO: move this logic to an ajax call, like scheduled_block_json
scheduled_blocks = ScheduledBlock.query.filter_by(station_id=station.id)
block_list = []
for block in scheduled_blocks:
r = dateutil.rrule.rrulestr(block.recurrence)
for instance in r[:10]: #TODO: dynamically determine instance limit from calendar view
d = {'title':block.name,
'start':datetime.combine(instance,block.start_time),
'end':datetime.combine(instance,block.end_time)}
block_list.append(d)
form = ScheduleProgramForm()
all_programs = Program.query.all()
#TODO: filter by language?
return render_template('radio/schedule.html',
form=form, station=station, block_list=block_list, addable_programs=all_programs,
active='schedule')
@radio.route('/bots/', methods=['GET'])
def list_bots():
"""
Presents a list with all the bots that have been created and the radios where they\'re working
:return:
"""
stations = Station.query.all()
return render_template('radio/bots.html', stations=stations)
@radio.route('/bots/add/', methods=['GET', 'POST'])
@login_required
def new_bot_add():
"""
Renders the form to insert a new bot in the database.
Add cronJobs if the state bot is active
"""
form = AddBotForm(request.form)
bot = None
type = "add"
if form.validate_on_submit():
cleaned_data = form.data # make a copy
cleaned_data.pop('submit', None) # remove submit field from list
bot = StationhasBots(**cleaned_data) # create new object from data
try:
bot = add_cron(bot,type)
db.session.add(bot)
db.session.commit()
flash(_('Bot added.'), 'success')
except Exception as e:
removeCron(bot, CronTab(user=True))
db.session.rollback()
db.session.flush()
print (str(e))
send_mail("Error happened while you're adding a bot", str(e))
flash(_('Error Bot Not Added.'), 'error')
elif request.method == "POST":
flash(_('Validation error'), 'error')
return render_template('radio/bot.html', bot=bot, form=form)
@radio.route('/bot/<int:radio_id>/<int:function_id>', methods=['GET', 'POST'])
@login_required
def bot_edit(radio_id, function_id):
bot = StationhasBots.query.filter_by(fk_radio_station_id=radio_id, fk_bot_function_id=function_id).first_or_404()
form = AddBotForm(obj=bot, next=request.args.get('next'))
type = "edit"
if form.validate_on_submit():
form.populate_obj(bot)
try:
bot = add_cron(bot, type)
db.session.add(bot)
db.session.commit()
flash(_('Bot updated.'), 'success')
except Exception as e:
removeCron(bot,CronTab(user=True))
db.session.rollback()
db.session.flush()
print(str(e))
send_mail("Error happened editig the bot", str(e))
flash(_('Error Bot Not Updated.'), 'error')
elif request.method == "POST":
flash(_('Validation error'), 'error')
return render_template('radio/bot.html', bot=bot, form=form)
@radio.route('/media', methods=['GET', 'POST'])
@login_required
def media_files():
media = MediaFiles.query.all()
return render_template('radio/media.html', media=media)
@radio.route('/media/add', methods=['GET', 'POST'])
@login_required
def media_add():
form = MediaForm(request.form)
media = None
if form.validate_on_submit():
cleaned_data = form.data # make a copy
upload_file = request.files[form.path.name]
if upload_file and allowed_audio_file(upload_file.filename):
data = upload_file.read()
path_file = os.path.join(current_app.config['UPLOAD_FOLDER'], upload_file.filename)
open(path_file, 'w').write(data)
filename, file_extension = os.path.splitext(path_file)
if file_extension == '.wav':
import wave
import contextlib
with contextlib.closing(wave.open(path_file, 'r')) as f:
frames = f.getnframes()
rate = f.getframerate()
duration = unicode(timedelta(seconds=frames / float(rate)))
else:
audio = mutagen.File(path_file)
duration = unicode(timedelta(seconds=audio.info.length))
cleaned_data.pop('submit', None) # remove submit field from list
cleaned_data['path'] = path_file
cleaned_data['duration'] = duration
media = MediaFiles(**cleaned_data) # create new object from data
db.session.add(media)
db.session.commit()
flash(_('Media File added.'), 'success')
else:
flash("Please upload files with extensions: %s" % "/".join(ALLOWED_AUDIO_EXTENSIONS), 'error')
elif request.method == "POST":
flash(_('Validation error'), 'error')
return render_template('radio/mediaform.html', media=media, form=form)
@radio.route('/media/<int:media_id>', methods=['GET', 'POST'])
@login_required
def media_edit(media_id):
media = MediaFiles.query.filter_by(id=media_id).first_or_404()
form = MediaForm(obj=media, next=request.args.get('next'))
if form.validate_on_submit():
form.populate_obj(media)
upload_file = request.files[form.path.name]
if upload_file and allowed_audio_file(upload_file.filename):
data = upload_file.read()
path_file = os.path.join(current_app.config['UPLOAD_FOLDER'], upload_file.filename)
open(path_file, 'w').write(data)
filename, file_extension = os.path.splitext(path_file)
if file_extension == '.wav':
import wave
import contextlib
with contextlib.closing(wave.open(path_file, 'r')) as f:
frames = f.getnframes()
rate = f.getframerate()
duration = unicode(timedelta(seconds=frames / float(rate)))
else:
audio = mutagen.File(path_file)
duration = unicode(timedelta(seconds=audio.info.length))
media.path = path_file
media.duration = duration
db.session.add(media)
db.session.commit()
flash(_('Media File updated.'), 'success')
else:
flash("Please upload files with extensions: %s" % "/".join(ALLOWED_AUDIO_EXTENSIONS), 'error')
return render_template('radio/mediaform.html', media=media, form=form)
@radio.route('/media/list', methods=['GET', 'POST'])
@login_required
def media_list():
media = dict()
for m in MediaFiles.query.all():
media[m.id] = {'media_id': m.id, 'name': m.name, 'description': m.description, 'path': m.path,
'language': unicode(m.language), 'type': m.type,
'duration': m.duration}
return json.jsonify(media)
@radio.route('/media/find', methods=['GET', 'POST'])
@login_required
def media_find():
try:
media = MediaFiles.query.filter_by(path=request.form['path'])
return media[0].name
except:
media = MediaFiles.query.filter_by(path=request.form['path[]'])
return media[0].name
@radio.route('/sms/', methods=['GET', 'POST'])
@login_required
def list_sms():
messages = dict()
for m in Message.query.all():
messages[m.id] = {'message_id':m.id,'message_uuid':m.message_uuid,'sendtime':m.sendtime,
'text': m.text,'from_phonenumber_id':m.from_phonenumber_id,
'to_phonenumber_id':m.to_phonenumber_id,'onairprogram_id': m.onairprogram_id}
return json.jsonify(messages)
@radio.route('/program/add/', methods=['GET', 'POST'])
@login_required
def program_add():
form = ProgramForm(request.form)
program = None
if form.validate_on_submit():
cleaned_data = form.data # make a copy
cleaned_data.pop('submit', None) # remove submit field from list
cleaned_data['duration'] = request.form['est_time']
cleaned_data['description'] = request.form['description']
program = Program(**cleaned_data) # create new object from data
db.session.add(program)
db.session.commit()
flash(_('Program added.'), 'success')
elif request.method == "POST":
flash(_('Validation error'), 'error')
return render_template('radio/program.html', program=program, form=form)
@radio.route('/program/<int:program_id>', methods=['GET', 'POST'])
def program(program_id):
program = Program.query.filter_by(id=program_id).first_or_404()
form = ProgramForm(obj=program, next=request.args.get('next'))
if form.validate_on_submit():
form.populate_obj(program)
program.duration = request.form['est_time']
program.description = request.form['description']
db.session.add(program)
db.session.commit()
flash(_('Program updated.'), 'success')
return render_template('radio/program.html', program=program, form=form) | agpl-3.0 | -2,909,168,198,307,072,500 | 35.489327 | 139 | 0.64169 | false |
vincentropy/battleground | battleground/persistence/game_data.py | 1 | 5042 | # from uuid import uuid4
import pymongo
from pymongo import MongoClient
import bson
import json
from os import environ
import datetime
global_client = None
def get_client():
global global_client
if global_client is None:
if "MONGO_HOST" in environ:
host = environ["MONGO_HOST"]
global_client = MongoClient(host)
else:
global_client = MongoClient()
return global_client
def get_db_handle(name=None, client=None):
if name is None:
name = "game_states"
if client is None:
client = get_client()
db_handle = client[name]
return db_handle
def save_game_states(game_id,
game_type,
game_states,
db_handle=None):
"""
save one or more documents to the data-store.
game_states: [dict,...]
each key, value will be stord as key: json(value) in the document.
expected keys are "game_state", "last_move" and "player_ids"
"""
if db_handle is None:
db_handle = get_db_handle()
collection = db_handle.game_states
all_docs = []
for i, game_state in enumerate(game_states):
doc = {
"sequence": i,
"game_id": game_id,
"game_type": game_type
}
for key, value in game_state.items():
assert key not in doc
doc[key] = json.dumps(value)
all_docs.append(doc)
result = collection.insert_many(all_docs)
return result
def save_game_meta_data(game_type, num_states, utc_time=None, db_handle=None):
if db_handle is None:
db_handle = get_db_handle()
if utc_time is None:
utc_time = str(datetime.datetime.utcnow())
doc = {
"game_type": game_type,
"utc_time": utc_time,
"num_states": num_states}
game_id = db_handle.games.insert_one(doc).inserted_id
return game_id
def save_game_history(game_type, game_states, db_handle=None):
"""
save a sequence of documents to the data-store.
game_states: array of dict
each array element will be stored as one document in the doc-store.
each key, value in each dict will be stored as key: json(value) in the document.
expected keys are "game_state", "last_move" and "player_ids"
"""
if db_handle is None:
db_handle = get_db_handle()
game_id = save_game_meta_data(game_type=game_type,
num_states=len(game_states),
db_handle=db_handle)
save_game_states(game_id=game_id,
game_type=game_type,
game_states=game_states,
db_handle=db_handle)
return game_id
def load_game_history(game_id, db_handle=None):
"""load all states with the same game ID and return an ordered sequence"""
if db_handle is None:
db_handle = get_db_handle()
if not isinstance(game_id, bson.ObjectId):
game_id = bson.ObjectId(str(game_id))
collection = db_handle.game_states
result = collection.find({"game_id": game_id})
data = result[:]
states_in_sequence = dict()
# now decode some of the values that are json strings
for loaded_doc in data:
output_doc = {}
for data_key in loaded_doc:
if data_key in ["game_state", "last_move"]:
# decode these two keys, because they are special
output_doc[data_key] = json.loads(loaded_doc[data_key])
else:
output_doc[data_key] = loaded_doc[data_key]
states_in_sequence[output_doc["sequence"]] = output_doc
states_in_sequence = [states_in_sequence[key] for key in range(len(states_in_sequence))]
return states_in_sequence
def get_games_list(game_type=None, db_handle=None):
"""
get a list of unique game IDs
"""
if db_handle is None:
db_handle = get_db_handle()
collection = db_handle.games
if game_type is None:
result = collection.find(sort=[('utc_time', pymongo.DESCENDING)])
else:
result = collection.find(sort=[('utc_time', pymongo.DESCENDING)],
filter={"game_type": game_type})
return result
def get_ids_to_purge_(date=None, db_handle=None):
if db_handle is None:
db_handle = get_db_handle()
games_list = get_games_list(db_handle=db_handle)
ids_to_purge = []
for game in games_list:
# print(game)
game_time = datetime.datetime.strptime(game['utc_time'], "%Y-%m-%d %H:%M:%S.%f")
if game_time < date:
ids_to_purge.append(game['_id'])
return ids_to_purge
def purge_game_data(date=None, db_handle=None):
if db_handle is None:
db_handle = get_db_handle()
ids_to_purge = get_ids_to_purge_(date, db_handle)
collection = db_handle.games
collection.delete_many({'_id': {'$in': ids_to_purge}})
collection = db_handle.game_states
collection.delete_many({'game_id': {'$in': ids_to_purge}})
| mit | 1,045,269,744,107,249,800 | 27.811429 | 92 | 0.590242 | false |
hongyan0118/uiautotestmacaca | Public/MacacaServer.py | 1 | 2320 | import os
import time
import requests
from multiprocessing import Pool
class MacacaServer:
def __init__(self, runs):
self._runs = runs
self._cmd = 'macaca server -p %s --verbose'
self._url = 'http://127.0.0.1:%s/wd/hub/status'
self._file = 'macaca_server_port_%s.log'
self._kill = 'taskkill /PID %d /F'
self._pids = []
@staticmethod
def server_url(port):
server_url = {
'hostname': '127.0.0.1',
'port': port,
}
return server_url
def start_server(self):
pool = Pool(processes=len(self._runs))
for run in self._runs:
pool.apply_async(self._run_server, args=(run,))
pool.close()
# after start macaca server, macaca server process can not return, so should not join
# p.join()
for run in self._runs:
while not self.is_running(run.get_port()):
print('wait macaca server all ready...')
time.sleep(1)
print('macaca server all ready')
#for run in self._runs:
#file = str(run.get_path() + '\\' + self._file) % run.get_port()
#with open(file, 'w+') as f:
#line = f.readline()
#start = line.find('pid:')
#end = line[start:].find(' ')
#pid = line[start:][4:end]
#self._pids.append(int(pid))
def _run_server(self, run):
port = run.get_port()
cmd = str(self._cmd + ' > ' + run.get_path() + '\\' + self._file) % (port, port)
os.system(cmd)
def is_running(self, port):
url = self._url % port
response = None
try:
response = requests.get(url, timeout=0.1)
if str(response.status_code).startswith('2'):
# data = json.loads((response.content).decode("utf-8"))
# if data.get("staus") == 0:
return True
return False
except requests.exceptions.ConnectionError:
return False
except requests.exceptions.ReadTimeout:
return False
finally:
if response:
response.close()
def kill_macaca_server(self):
for pid in self._pids:
os.popen(self._kill % pid)
| gpl-3.0 | -1,112,649,500,990,466,000 | 26.951807 | 93 | 0.510345 | false |
mokyue/Modia-Player | Core/AudioManager.py | 1 | 11101 | # -*- coding: utf-8 -*-
from PyQt4.QtCore import QTime, Qt, QString
from PyQt4.QtGui import QMessageBox, QTableWidgetItem, QFileDialog, QDesktopServices
from PyQt4.phonon import Phonon
from Widget.MButton import MButton
import os
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
class AudioManager():
def __init__(self, window, lyric_panel):
self.__main_window = window
self.__lyric_panel = lyric_panel
self.__audio_output = Phonon.AudioOutput(Phonon.MusicCategory, self.__main_window)
self.__media_object = Phonon.MediaObject(self.__main_window)
self.__media_object.setTickInterval(1000)
self.__media_object.tick.connect(self.tick)
self.__media_object.stateChanged.connect(self.stateChanged)
self.__media_object.currentSourceChanged.connect(self.currentSourceChanged)
self.__media_object.aboutToFinish.connect(self.aboutToFinish)
self.__meta_information_resolver = Phonon.MediaObject(self.__main_window)
self.__meta_information_resolver.stateChanged.connect(self.metaStateChanged)
self.__music_table = self.__main_window.getMusicTable()
self.__list_music = list()
Phonon.createPath(self.__media_object, self.__audio_output)
self.__register_ui()
def __register_ui(self):
self.__main_window.getActionBar().get_widget('SLD_VOL').setAudioOutput(self.__audio_output)
self.__main_window.getActionBar().get_widget('SLD_SEEK').setMediaObject(self.__media_object)
def tick(self, time):
self.__main_window.getActionBar().get_widget('LBL_TIME_REMAIN').setText(
QTime(0, (time / 60000) % 60, (time / 1000) % 60).toString('mm:ss'))
self.__lyric_panel.switchLyric(time)
def play(self, media_source=None):
if media_source != None:
if not isinstance(media_source, Phonon.MediaSource):
raise ValueError('Given argument not Phonon.MediaSource type. (Phonon.MediaSource type required)')
else:
self.__media_object.setCurrentSource(media_source)
if len(self.__list_music) < 1:
self.addMusic()
if len(self.__list_music) > 0:
self.__media_object.setCurrentSource(self.__list_music[len(self.__list_music) - 1])
self.__media_object.play()
def pause(self):
self.__media_object.pause()
def stop(self):
self.__media_object.stop()
def next(self):
index_next = self.__list_music.index(self.__media_object.currentSource()) + 1
if index_next < len(self.__list_music):
self.play(self.__list_music[index_next])
else:
self.play(self.__list_music[0])
def previous(self):
index_previous = self.__list_music.index(self.__media_object.currentSource()) - 1
if index_previous > -1:
self.play(self.__list_music[index_previous])
else:
self.play(self.__list_music[len(self.__list_music) - 1])
def stateChanged(self, newState, oldState):
if newState == Phonon.ErrorState:
if self.__media_object.errorType() == Phonon.FatalError:
QMessageBox.warning(self.__main_window, "Fatal Error", self.__media_object.errorString())
self.__media_object.setCurrentSource(self.__list_music[0])
self.__list_music.remove(self.__media_object.currentSource())
else:
QMessageBox.warning(self.__main_window, "Error", self.__media_object.errorString())
self.__media_object.setCurrentSource(self.__list_music[0])
self.__list_music.remove(self.__media_object.currentSource())
elif newState == Phonon.PlayingState:
self.__main_window.getActionBar().get_widget('BTN_PLAY_PAUSE').setMStyle(MButton.Type.Pause)
self.__main_window.getActionBar().get_widget('BTN_PLAY_PAUSE').setToolTip('暂停')
self.__main_window.setSuspendStatus(False)
if self.__media_object.isSeekable():
self.__main_window.getActionBar().get_widget('SLD_SEEK').setCursor(Qt.PointingHandCursor)
self.__main_window.getActionBar().get_widget('INDICT_INFO').setText(self.__get_music_display_info())
time_total = self.__media_object.totalTime()
self.__main_window.getActionBar().get_widget('LBL_TIME_TOTAL').setText(
QTime(0, (time_total / 60000) % 60, (time_total / 1000) % 60).toString('mm:ss'))
btn_music_stop = self.__main_window.getActionBar().get_widget('BTN_STOP')
if not btn_music_stop.isEnabled():
btn_music_stop.setEnabled(True)
self.__set_lyric(self.__media_object.currentSource().fileName())
elif newState == Phonon.StoppedState:
self.__main_window.getActionBar().get_widget('SLD_SEEK').setCursor(Qt.ArrowCursor)
self.__main_window.getActionBar().get_widget('INDICT_INFO').setText(u'无音乐')
self.__main_window.getActionBar().get_widget('LBL_TIME_TOTAL').setText('00:00')
btn_music_stop = self.__main_window.getActionBar().get_widget('BTN_STOP')
if btn_music_stop.isEnabled():
btn_music_stop.setEnabled(False)
self.__lyric_panel.setNoLyric()
self.__main_window.getActionBar().get_widget('BTN_PLAY_PAUSE').setMStyle(MButton.Type.Play)
self.__main_window.getActionBar().get_widget('BTN_PLAY_PAUSE').setToolTip('播放')
self.__main_window.setSuspendStatus(True)
elif newState == Phonon.PausedState:
self.__main_window.getActionBar().get_widget('BTN_PLAY_PAUSE').setMStyle(MButton.Type.Play)
self.__main_window.getActionBar().get_widget('BTN_PLAY_PAUSE').setToolTip('播放')
self.__main_window.setSuspendStatus(True)
if newState != Phonon.StoppedState and newState != Phonon.PausedState:
return
def __set_lyric(self, music_path):
lrc_path = str(music_path.left(music_path.lastIndexOf('.'))) + u'.lrc'
if os.path.exists(lrc_path):
self.__lyric_panel.setLyricFile(lrc_path)
else:
self.__lyric_panel.setNoLyric()
def __get_music_display_info(self):
metadata = self.__media_object.metaData()
str_title = metadata.get(QString('TITLE'), [''])[0]
if str_title != '':
str_indicator = str(str_title)
else:
str_indicator = str(self.__media_object.currentSource().fileName())
str_artist = metadata.get(QString('ARTIST'), [''])[0]
if str_artist != '':
str_indicator += ' - '
str_indicator += str(str_artist)
str_description = metadata.get(QString('DESCRIPTION'), [''])[0]
if str_description != '':
str_indicator += ' '
str_indicator += str(str_description)
return str_indicator
def metaStateChanged(self, newState, oldState):
if newState == Phonon.ErrorState:
QMessageBox.warning(self.__main_window, "Error opening files",
self.__meta_information_resolver.errorString())
while self.__list_music and self.__list_music.pop() != self.__meta_information_resolver.currentSource():
pass
return
if newState != Phonon.StoppedState and newState != Phonon.PausedState:
return
if self.__meta_information_resolver.currentSource().type() == Phonon.MediaSource.Invalid:
return
metaData = self.__meta_information_resolver.metaData()
title = metaData.get(QString('TITLE'), [''])[0]
if not title:
title = self.__meta_information_resolver.currentSource().fileName()
artist = metaData.get(QString('ARTIST'), [''])[0]
if artist:
title = title + ' - ' + artist
titleItem = QTableWidgetItem(title)
titleItem.setFlags(titleItem.flags() ^ Qt.ItemIsEditable)
titleItem.setTextAlignment(Qt.AlignLeft | Qt.AlignVCenter)
long_duration = self.__meta_information_resolver.totalTime()
total_time_item = QTableWidgetItem(
QTime(0, (long_duration / 60000) % 60, (long_duration / 1000) % 60).toString('mm:ss'))
total_time_item.setFlags(total_time_item.flags() ^ Qt.ItemIsEditable)
total_time_item.setTextAlignment(Qt.AlignHCenter | Qt.AlignVCenter)
currentRow = self.__music_table.rowCount()
self.__music_table.insertRow(currentRow)
self.__music_table.setItem(currentRow, 0, titleItem)
self.__music_table.setItem(currentRow, 1, total_time_item)
if not self.__music_table.selectedItems():
self.__music_table.selectRow(0)
self.__media_object.setCurrentSource(self.__meta_information_resolver.currentSource())
index = self.__list_music.index(self.__meta_information_resolver.currentSource()) + 1
if len(self.__list_music) > index:
self.__meta_information_resolver.setCurrentSource(self.__list_music[index])
def currentSourceChanged(self, source):
self.__music_table.selectRow(self.__list_music.index(source))
def aboutToFinish(self):
index_next = self.__list_music.index(self.__media_object.currentSource()) + 1
if index_next < len(self.__list_music):
self.__media_object.enqueue(self.__list_music[index_next])
else:
self.__media_object.enqueue(self.__list_music[0])
def addMusic(self):
if len(self.__list_music) < 1:
is_empty = True
else:
is_empty = False
sources = QFileDialog.getOpenFileNames(self.__main_window, "Select Music Files",
QDesktopServices.storageLocation(QDesktopServices.MusicLocation))
if not sources:
return
index = len(self.__list_music)
for music_file in sources:
media_source = Phonon.MediaSource(music_file)
if not self.__is_existing(media_source):
self.__list_music.append(media_source)
if is_empty:
self.__media_object.setCurrentSource(self.__list_music[len(self.__list_music) - 1])
if index == len(self.__list_music):
return
if self.__list_music:
self.__meta_information_resolver.setCurrentSource(self.__list_music[index])
def __is_existing(self, media_source):
for ms in self.__list_music:
if media_source.fileName() == ms.fileName():
return True
return False
def clearQueue(self):
self.__media_object.clearQueue()
def setCurrentSourceByIndex(self, int_index):
self.__media_object.setCurrentSource(self.__list_music[int_index])
def getMediaObjectState(self):
return self.__media_object.state()
| unlicense | -5,796,457,515,968,300,000 | 48.607306 | 116 | 0.606334 | false |
TamiaLab/carnetdumaker | apps/registration/managers.py | 1 | 5803 | """
Objects managers for the registration app.
"""
import re
import uuid
from django.db import models
from django.db.models import Q
from django.contrib.auth import get_user_model
from .signals import user_registered
class UserRegistrationManager(models.Manager):
"""
``UserRegistrationProfile`` objects manager.
"""
@staticmethod
def _generate_new_activation_key():
"""
Generate a new (random) activation key of 32 alphanumeric characters.
"""
return uuid.uuid4().hex
def create_inactive_user(self, username, email, password):
"""
Create a new inactive user using the given username, email and password.
Also create an ``UserRegistrationProfile`` for the newly created user.
Once the ``User`` and ``UserRegistrationProfile`` are created, send the
``user_registered`` signal for other apps to do their jobs.
Return the created ``UserRegistrationProfile`` for any external purpose.
:param username: The user's username.
:param email: The user's email address.
:param password: The user's password (plain text).
"""
new_user = get_user_model().objects.create_user(username, email, password)
new_user.is_active = False
new_user.save(update_fields=('is_active', ))
registration_profile = self._create_profile(new_user)
user_registered.send(sender=UserRegistrationManager, user=new_user)
return registration_profile
def _create_profile(self, user):
"""
Create a new ``UserRegistrationProfile`` for the given user.
"""
activation_key = self._generate_new_activation_key()
return self.create(user=user, activation_key=activation_key)
def delete_expired_users(self, queryset=None):
"""
Remove expired instances of ``UserRegistrationProfile`` and their
associated ``User``s.
Accounts to be deleted are identified by searching for
instances of ``UserRegistrationProfile`` with NOT USED expired activation
keys, and then checking to see if their associated ``User``
instances have the field ``is_active`` set to ``False``; any
``User`` who is both inactive and has a not used expired activation
key will be deleted. If the key has been used, the ``User`` will not
be deleted. This allow administrators to disable accounts temporally.
It is recommended that this method be executed regularly as
part of your routine site maintenance; this application
provides a custom management command which will call this
method, accessible as ``manage.py cleanupregistration``.
Regularly clearing out accounts which have never been
activated serves two useful purposes:
1. It alleviates the occasional need to reset a
``UserRegistrationProfile`` and/or re-send an activation email
when a user does not receive or does not act upon the
initial activation email; since the account will be
deleted, the user will be able to simply re-register and
receive a new activation key.
2. It prevents the possibility of a malicious user registering
one or more accounts and never activating them (thus
denying the use of those usernames to anyone else); since
those accounts will be deleted, the usernames will become
available for use again.
:param queryset: If the ``queryset`` parameter is not specified the cleanup process will run on
all the ``UserRegistrationProfile`` entries currently in database.
"""
if not queryset:
queryset = self.all()
# Delete all used activation key (optimal way)
queryset.filter(activation_key_used=True).delete()
# Delete all expired (but not used) activation key
# The filter(activation_key_used=False) avoid running race
for profile in queryset.filter(activation_key_used=False):
if profile.activation_key_expired():
try:
user = profile.user
if not user.is_active:
user.delete()
except get_user_model().DoesNotExist:
pass
profile.delete()
class BannedUsernameManager(models.Manager):
"""
``BannedUsername`` objects manager.
"""
def is_username_banned(self, username):
"""
Test if the given username is banned or not.
:param username: The username to be checked.
"""
return self.filter(username__iexact=username).exists()
class BannedEmailManager(models.Manager):
"""
``BannedEmail`` objects manager.
"""
def is_email_address_banned(self, email_address):
"""
Test if the given email address is banned or not.
:param email_address: The email address to be check.
"""
email_username, email_provider = email_address.split('@')
email_provider_no_tld = email_provider.rsplit('.', 1)[0]
banned = self.filter(Q(email__iexact=email_address) |
Q(email__iexact='%s@*' % email_username) |
Q(email__iexact='*@%s' % email_provider) |
Q(email__iexact='*@%s.*' % email_provider_no_tld)).exists()
if not banned:
# Use regex to get ride of Gmail dot trick
email_username_no_dot = email_username.replace('.', '')
username_re = r'\.?'.join(re.escape(email_username_no_dot))
provider_re = re.escape(email_provider)
return self.filter(email__iregex=r'^%s@(\*|%s)$' % (username_re, provider_re)).exists()
return True
| agpl-3.0 | -6,189,914,938,721,366,000 | 41.985185 | 103 | 0.633293 | false |
matpow2/cuwo | tools/convertqmo.py | 1 | 2922 | # Copyright (c) Mathias Kaerlev 2013-2017.
#
# This file is part of cuwo.
#
# cuwo is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# cuwo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with cuwo. If not, see <http://www.gnu.org/licenses/>.
import os
import sys
import argparse
cmd_folder = os.path.realpath(os.path.abspath('.'))
if cmd_folder not in sys.path:
sys.path.insert(0, cmd_folder)
from cuwo.bytes import ByteReader, ByteWriter
from cuwo.qmo import QubicleFile, QubicleModel
from cuwo.cub import CubModel
import os
def switch_axes(x, y, z):
return x, z, y
def to_qmo(in_file, out_file):
cub = CubModel(ByteReader(open(in_file, 'rb').read()))
qmo_file = QubicleFile()
qmo_model = QubicleModel()
x_size, y_size, z_size = switch_axes(cub.x_size, cub.y_size, cub.z_size)
qmo_model.x_size = x_size
qmo_model.y_size = y_size
qmo_model.z_size = z_size
qmo_model.x_offset = -x_size / 2
qmo_model.y_offset = 0
qmo_model.z_offset = -z_size / 2
for k, v in cub.blocks.items():
x, y, z = k
x2, y2, z2 = switch_axes(x, y, z)
qmo_model.blocks[x2, y2, z2] = v
qmo_file.models.append(qmo_model)
writer = ByteWriter()
qmo_file.write(writer)
with open(out_file, 'wb') as fp:
fp.write(writer.get())
def to_cub(in_file, out_file):
qmo_file = QubicleFile(ByteReader(open(in_file, 'rb').read()))
qmo_model = qmo_file.models[0]
cub = CubModel()
x_size, y_size, z_size = switch_axes(qmo_model.x_size,
qmo_model.y_size,
qmo_model.z_size)
cub.x_size = x_size
cub.y_size = y_size
cub.z_size = z_size
for k, v in qmo_model.blocks.items():
x, y, z = k
x2, y2, z2 = switch_axes(x, y, z)
cub.blocks[x2, y2, z2] = v
writer = ByteWriter()
cub.write(writer)
with open(out_file, 'wb') as fp:
fp.write(writer.get())
def main():
parser = argparse.ArgumentParser(
description='Convert between cub and qmo files')
parser.add_argument('files', metavar='FILE', nargs='+',
help='path to file to convert')
for path in parser.parse_args().files:
print("Converting %r" % path)
filename, ext = os.path.splitext(path)
if ext == '.cub':
to_qmo(path, filename + '.qmo')
else:
to_cub(path, filename + '.cub')
if __name__ == '__main__':
main()
| gpl-3.0 | 5,897,666,201,858,834,000 | 30.76087 | 76 | 0.615674 | false |
raspibo/Thermo | var/www/cgi-bin/readcsv.py | 1 | 1604 | #!/usr/bin/env python3
# Questo file legge il file di configurazione,
# trova e modifica il parametro eseguendo il rispettivo "write*.py"
# Serve per la parte di gestione html in python
import cgi
import cgitb
# Abilita gli errori al server web/http
cgitb.enable()
# Le mie librerie Json, Html, flt (Thermo(Redis))
import mjl, mhl, flt
# Parametri generali
TestoPagina="Archivia grafico corrente / Sposta \"temperature.csv\" in archivio"
WriteFile="/cgi-bin/writecsv.py"
FileCSV="../temperature.csv"
# Start web page - Uso l'intestazione "web" della mia libreria
print (mhl.MyHtml())
print (mhl.MyHtmlHead())
# Scrivo il Titolo/Testo della pagina
print ("<h1>","<center>",TestoPagina,"</center>","</h1>")
#print ("<hr/>","<br/>")
# Eventuale help/annotazione
print ("<p>Non ci sono impostazioni, il file",FileCSV,"verra` rinominato aggiungendogli la data e spostato nella directory \"archive\".</p>")
print ("<p>Sara` ricreato un nuovo \"temperature.csv\".</p>")
print ("<p>Questa funzione e` da utilizzarsi quando si vuole ripulire ed archiviare il grafico corrente e/o sono sate aggiunte, eliminate, o cambiate le descrizioni delle sonde di temperatura.</p>")
print ("<hr/><br/>")
# Inizio del form
print (mhl.MyActionForm(WriteFile,"POST"))
print ("<table>")
print ("<tr>")
print ("<td colspan=\"2\">")
print ("<hr/>")
print ("</td>")
print ("</tr>")
print ("<tr>")
print ("<td>")
print ("</td>")
print ("<td>")
print (mhl.MyButtonForm("submit","Submit"))
print ("</td>")
print ("</tr>")
print ("</table>")
# End form
print (mhl.MyEndForm())
# End web page
print (mhl.MyHtmlBottom()) | mit | 5,711,144,561,985,839,000 | 24.47619 | 198 | 0.688903 | false |
tensorflow/federated | tensorflow_federated/python/core/test/__init__.py | 1 | 1297 | # Copyright 2018, The TensorFlow Federated Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Libraries for testing TensorFlow Federated."""
from tensorflow_federated.python.core.api.test_case import main
from tensorflow_federated.python.core.api.test_case import TestCase
from tensorflow_federated.python.core.impl.context_stack.set_default_context import set_no_default_context
from tensorflow_federated.python.core.test.static_assert import assert_contains_secure_aggregation
from tensorflow_federated.python.core.test.static_assert import assert_contains_unsecure_aggregation
from tensorflow_federated.python.core.test.static_assert import assert_not_contains_secure_aggregation
from tensorflow_federated.python.core.test.static_assert import assert_not_contains_unsecure_aggregation
| apache-2.0 | -7,692,501,565,437,387,000 | 57.954545 | 106 | 0.811874 | false |
switchyrascal/beeps | beeps/views/planview.py | 1 | 38620 | # Copyright © 2012-2015 Jonty Pearson.
#
# This file is part of Beeps.
#
# Beeps is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Beeps is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Beeps. If not, see <http://www.gnu.org/licenses/>.
import beeps.helpers.event
import beeps.models.plan
import gi.repository.GLib
import gi.repository.Gtk
import gi.repository.GtkClutter
import gi.repository.Clutter
import gi.repository.Gst
import logging
MENUS_UI = '/beeps/app/gtk/menus.ui'
PLAN_EXPORTER_UI = '/beeps/app/plan-exporter.ui'
PLAN_IMPORTER_UI = '/beeps/app/plan-importer.ui'
PLAN_UI = '/beeps/app/plan.ui'
ROUTINE_DELETER_UI = '/beeps/app/routine-deleter.ui'
ROUTINE_EDITOR_UI = '/beeps/app/routine-editor.ui'
class Component(beeps.helpers.event.Handler):
"""Abstract base class for component of composite view."""
def configure(self, *args):
pass
def connect_actions(self, controller):
pass
def quit(self):
pass
class Composite(Component):
"""Abstract base class for composite view."""
def __init__(self):
super().__init__()
self.components = {}
def add_component(self, component, *args, **kwargs):
builder = kwargs.pop('builder', None)
objs = list(args)
if builder is not None:
objs.extend(builder.get_object(obj) for obj in component.CONFIG_OBJECTS)
key = component.__class__
if self.components.get(component.__class__) is not None:
raise RuntimeError('View added component ' + str(key) + ' already')
self.components[key] = component
self.components[key].configure(*objs)
logging.debug('View added component ' + str(key))
return self.components[key]
def configure(self, *args):
pass
def connect_actions(self, controller):
super().connect_actions(controller)
for component in self.components.values():
component.connect_actions(controller)
def process(self, sender, event):
super().process(sender, event)
for component in self.components.values():
component.process(sender, event)
return False
def quit(self):
super().quit()
for component in self.components.values():
component.quit()
class Console(Component):
def __init__(self, tag):
super().__init__()
self._tag = tag
logging.info(self._tag + ' starting...')
def process(self, sender, event):
super().process(sender, event)
logging.debug(self._tag + ' event ' + self._truncate(str(event)))
def quit(self):
logging.info(self._tag + ' stopping...')
def _truncate(self, info):
if len(info) > 80:
info = '{:.77s}...'.format(info)
return info
class Label(gi.repository.Gtk.Label, Component):
TIME_FORMAT = '{:.1f}'
def __init__(self):
gi.repository.Gtk.Label.__init__(self)
Component.__init__(self, beeps.helpers.event.WhiteListFilter(self.EVENTS))
self._format = self._font_weight(self._format(), 'bold')
def configure(self):
self._default = self.get_text()
def _font_weight(self, string, weight):
return '<span font_weight="bold">' + string + '</span>'
def _foreground(self, string, colour):
return '<span foreground="' + colour + '">' + string + '</span>'
def _newline(self, string):
return string + '\n'
def _set_label(self, *args):
self.set_markup(self._format.format(*args))
class TreeView(gi.repository.Gtk.TreeView):
def __init__(self):
gi.repository.Gtk.TreeView.__init__(self)
def _find_row(self, column, obj):
# returns None if the row is not found
return next((r for r in self.get_model() if r[column] == obj), None)
def _rows(self, column):
return list(r[column] for r in self.get_model())
def _select_row(self, row):
if None is not row:
self.get_selection().select_iter(row.iter)
def _selected_item(self, column):
data = None
m, it = self.get_selection().get_selected()
if it is not None:
data = m[it][column]
return data
class IntervalLabel(Label):
__gtype_name__ = 'IntervalLabel'
EVENTS = [beeps.helpers.event.IntervalCreated,
beeps.helpers.event.IntervalDeleted,
beeps.helpers.event.IntervalProgressed,
beeps.helpers.event.IntervalStarted,
beeps.helpers.event.IntervalUpdated,
beeps.helpers.event.RoutineLoaded,
beeps.helpers.event.RoutineSought,
beeps.helpers.event.RoutineUnloaded]
def process_filtered(self, event):
super().process_filtered(event)
if isinstance(event, beeps.helpers.event.IntervalProgressed):
self._update_info(event['routine'])
elif isinstance(event, beeps.helpers.event.IntervalCreated):
self._update_info(event['routine'])
elif isinstance(event, beeps.helpers.event.IntervalDeleted):
self._update_info(event['routine'])
elif isinstance(event, beeps.helpers.event.IntervalUpdated):
self._update_info(event['routine'])
elif isinstance(event, beeps.helpers.event.IntervalStarted):
self._update_info(event['routine'])
elif isinstance(event, beeps.helpers.event.RoutineLoaded):
self._update_info(event['routine'])
elif isinstance(event, beeps.helpers.event.RoutineSought):
self._update_info(event['routine'])
elif isinstance(event, beeps.helpers.event.RoutineUnloaded):
self.set_markup(self._default)
def _format(self):
f = self._newline('{:d} / {:d}')
f += self._newline(self.TIME_FORMAT)
f += self._newline(self._foreground(self.TIME_FORMAT, 'blue'))
f += self._foreground(self.TIME_FORMAT, 'red')
return f
def _format_info(self, i, n, interval):
t = interval.time
c = interval.completed()
r = t - c
self._set_label(i, n, t, c, r)
def _update_info(self, routine):
interval = routine.current_interval()
if interval is not None:
self._format_info(routine.intervals.index(interval) + 1, len(routine.intervals), interval)
else:
self.set_markup(self._default)
class IntervalList(TreeView, Component):
__gtype_name__ = 'IntervalList'
CONFIG_OBJECTS = ['editordialog', 'intervaltreeview', 'addbutton', 'removebutton']
EVENTS = [beeps.helpers.event.IntervalCreated,
beeps.helpers.event.IntervalDeleted,
beeps.helpers.event.IntervalUpdated,
beeps.helpers.event.UpdateActions]
class Column:
OBJECT = 0
DURATION = 1
SOUND = 2
def __init__(self):
TreeView.__init__(self)
Component.__init__(self, beeps.helpers.event.WhiteListFilter(self.EVENTS))
self._default = ''
def configure(self, editordialog, intervaltreeview, addbutton, removebutton):
self._actions = editordialog.get_action_group('editor')
action = gi.repository.Gio.SimpleAction(name = 'interval::add')
self._actions.add_action(action)
action = gi.repository.Gio.SimpleAction(name = 'interval::remove')
self._actions.add_action(action)
addbutton.set_action_name('editor.interval::add')
removebutton.set_action_name('editor.interval::remove')
self._set_enabled('interval::remove', self.selected_interval() is not None)
def connect_actions(self, controller):
self._actions.lookup_action('interval::add').connect('activate', controller._on_add_activate)
self._actions.lookup_action('interval::remove').connect('activate', controller._on_remove_activate)
def intervals(self):
# return interval list
return self._rows(IntervalList.Column.OBJECT)
def process_filtered(self, event):
super().process_filtered(event)
if isinstance(event, beeps.helpers.event.IntervalCreated):
# routine could have changed by the time we get some events, but it's all
# by reference and as long as we still hold one it should work out
it = self.get_model().append()
self.get_model().set(it, self._interval_data(event['interval']))
self._select_row(self.get_model()[-1])
elif isinstance(event, beeps.helpers.event.IntervalDeleted):
row = self._find_row(IntervalList.Column.OBJECT, event['interval'])
self.get_model().remove(row.iter)
elif isinstance(event, beeps.helpers.event.IntervalUpdated):
i = event['interval']
row = self._find_row(IntervalList.Column.OBJECT, i)
self.get_model().set(row.iter, self._interval_data(i))
elif isinstance(event, beeps.helpers.event.UpdateActions):
self._set_enabled('interval::remove', self.selected_interval() is not None)
def selected_interval(self):
return self._selected_item(IntervalList.Column.OBJECT)
def _interval_data(self, interval):
s = interval.sound
if s is None:
s = self._default
return {IntervalList.Column.OBJECT : interval, IntervalList.Column.DURATION : interval.time, IntervalList.Column.SOUND : s}
def _set_enabled(self, action, enabled):
self._actions.lookup_action(action).set_enabled(enabled)
class IntervalSoundList(gi.repository.Gtk.ListStore, Component):
__gtype_name__ = 'IntervalSoundList'
EVENTS = [beeps.helpers.event.IntervalCreated,
beeps.helpers.event.IntervalDeleted,
beeps.helpers.event.IntervalUpdated]
class Column:
SOUND = 0
def __init__(self):
"""The sounds passed in are the sounds that already exist elsewhere, e.g. in
routines that already exist."""
gi.repository.Gtk.ListStore.__init__(self)
Component.__init__(self, beeps.helpers.event.WhiteListFilter(self.EVENTS))
def configure(self, sounds):
self._sounds = sounds
self._update_sounds(self._existing_sounds())
def process_filtered(self, event):
# need to loop all intervals as they can have the same sound
super().process_filtered(event)
if isinstance(event, beeps.helpers.event.IntervalCreated):
self._update_sounds(self._routine_sounds(event['routine']))
elif isinstance(event, beeps.helpers.event.IntervalDeleted):
self._update_sounds(self._routine_sounds(event['routine']))
elif isinstance(event, beeps.helpers.event.IntervalUpdated):
self._update_sounds(self._routine_sounds(event['routine']))
# TODO rename to be consistent with other model/views?
def selected_sound(self, treeiter):
return self.get_value(treeiter, IntervalSoundList.Column.SOUND)
def _add_sounds(self, soundlist, sounds):
for sound in sounds:
if sound is not None and sound not in soundlist:
soundlist.append(sound)
return soundlist
def _existing_sounds(self):
return self._add_sounds([], self._sounds)
def _routine_sounds(self, routine):
s = self._existing_sounds()
return self._add_sounds(s, list(i.sound for i in routine.intervals))
def _sound_data(self, sound):
return {IntervalSoundList.Column.SOUND : sound}
def _update_sounds(self, sounds):
self.clear()
for s in sounds:
it = self.append()
self.set(it, self._sound_data(s))
class Plan(gi.repository.Gtk.ApplicationWindow, Composite):
__gtype_name__ = 'Plan'
def __init__(self):
gi.repository.Gtk.ApplicationWindow.__init__(self)
Composite.__init__(self)
def configure(self, builder, parent, filemenu, editmenu, controlmenu):
super().configure()
self.set_application(parent)
self.add_action(gi.repository.Gio.SimpleAction(name = 'plan::close'))
self.add_action(gi.repository.Gio.SimpleAction(name = 'plan::export'))
self.add_action(gi.repository.Gio.SimpleAction(name = 'plan::import'))
self.add_component(Console(gi.repository.GLib.get_application_name()))
self.add_component(RoutineControl(), builder = builder)
self.add_component(builder.get_object('routineprogress'))
self.add_component(builder.get_object('seekscale'))
self.add_component(builder.get_object('routinelabel'))
self.add_component(builder.get_object('intervallabel'))
self.add_component(builder.get_object('volumebutton'), builder = builder)
self.add_component(builder.get_object('routinetreeview'), builder = builder)
self.add_component(builder.get_object('routinepopupmenu'), editmenu, builder = builder)
self.add_component(builder.get_object('planheaderbar'), filemenu, controlmenu, builder = builder)
def connect_actions(self, controller):
super().connect_actions(controller)
self.lookup_action('plan::close').connect('activate', controller._on_plan_close_activate)
self.lookup_action('plan::export').connect('activate', controller._on_plan_export_activate)
self.lookup_action('plan::import').connect('activate', controller._on_plan_import_activate)
def context_menu(self, button, time):
self.components[RoutinePopupMenu].popup(None, None, None, None, button, time)
def current_volume(self):
"""Returns the current volume."""
return self.components[RoutineSound].get_value()
def cursor_routine(self, event):
"""Returns the routine under the cursor, if there is one."""
return self.components[RoutineList].cursor_routine(event)
# TODO remove when we get to GTK 3.16
def get_action_group(self, prefix):
return self
def process(self, sender, event):
"""kwargs are not used because the Gtk idle callback requires a flat data
structure and expects its arguments in the kwargs."""
if isinstance(event, beeps.helpers.event.UpdatePlan):
self._set_enabled('plan::export', len(event['plan'].routines) > 0)
else:
gi.repository.GLib.idle_add(super().process, sender, event)
return False
def selected_routine(self):
"""Returns the selected routine, usually highlighted, if there is one."""
return self.components[RoutineList].selected_routine()
def _set_enabled(self, action, enabled):
self.lookup_action(action).set_enabled(enabled)
class PlanHeaderBar(gi.repository.Gtk.HeaderBar, Component):
__gtype_name__ = 'PlanHeaderBar'
CONFIG_OBJECTS = ['filemenubutton', 'controlmenubutton']
def __init__(self):
gi.repository.Gtk.HeaderBar.__init__(self)
Component.__init__(self)
def configure(self, filemenumodel, controlmenumodel, filemenubutton, controlmenubutton):
filemenubutton.set_menu_model(filemenumodel)
controlmenubutton.set_menu_model(controlmenumodel)
class RoutineControl(Component):
"""Toolbar and menu items are all controlled here via their actions."""
CONFIG_OBJECTS = ['planwindow', 'startstoptoolbutton']
EVENTS = [beeps.helpers.event.IntervalCreated,
beeps.helpers.event.IntervalDeleted,
beeps.helpers.event.IntervalPaused,
beeps.helpers.event.IntervalResumed,
beeps.helpers.event.IntervalUpdated,
beeps.helpers.event.RoutineFinished,
beeps.helpers.event.RoutineLoaded,
beeps.helpers.event.RoutineSought,
beeps.helpers.event.RoutineStarted,
beeps.helpers.event.RoutineUnloaded]
START_ICON_NAME = 'media-playback-start'
STOP_ICON_NAME = 'media-playback-stop'
def __init__(self):
super().__init__(beeps.helpers.event.RoutineLoadedFilter(), beeps.helpers.event.WhiteListFilter(self.EVENTS))
def configure(self, planwindow, toolbutton):
self._actions = planwindow.get_action_group('win')
action = gi.repository.Gio.SimpleAction(name = 'control::startstop', state = gi.repository.GLib.Variant.new_boolean(False))
self._actions.add_action(action)
action = gi.repository.Gio.SimpleAction(name = 'control::repeat', state = gi.repository.GLib.Variant.new_boolean(True))
self._actions.add_action(action)
action = gi.repository.Gio.SimpleAction(name = 'control::restart')
self._actions.add_action(action)
self._startstoptoolbutton = toolbutton
self._set_actions(self.START_ICON_NAME, False, False, False, False)
def connect_actions(self, controller):
super().connect_actions(controller)
self._actions.lookup_action('control::startstop').connect('activate', controller._on_control_startstop_toggled)
self._actions.lookup_action('control::restart').connect('activate', controller._on_control_restart_activate)
self._actions.lookup_action('control::repeat').connect('activate', controller._on_control_repeat_toggled)
def process_filtered(self, event):
super().process_filtered(event)
if isinstance(event, beeps.helpers.event.IntervalCreated):
self._set_ready(event['routine'])
elif isinstance(event, beeps.helpers.event.IntervalDeleted):
self._set_ready(event['routine'])
elif isinstance(event, beeps.helpers.event.IntervalPaused):
self._set_actions(self.START_ICON_NAME, False, True, True, True)
elif isinstance(event, beeps.helpers.event.IntervalResumed):
self._set_actions(self.STOP_ICON_NAME, True, True, True, True)
elif isinstance(event, beeps.helpers.event.IntervalUpdated):
self._set_ready(event['routine'])
elif isinstance(event, beeps.helpers.event.RoutineFinished):
if not self._actions.lookup_action('control::repeat').get_state().get_boolean():
self._set_actions(self.START_ICON_NAME, False, True, False, True)
elif isinstance(event, beeps.helpers.event.RoutineLoaded):
self._set_ready(event['routine'])
elif isinstance(event, beeps.helpers.event.RoutineSought):
self._set_ready(event['routine'])
elif isinstance(event, beeps.helpers.event.RoutineStarted):
self._set_actions(self.STOP_ICON_NAME, True, True, True, True)
elif isinstance(event, beeps.helpers.event.RoutineUnloaded):
self._set_actions(self.START_ICON_NAME, False, False, False, False)
def _set_actions(self, startstopicon, startstopactive, startstopsensitive, restartsensitive, repeatsensitive):
# just set/reset everything
self._startstoptoolbutton.set_icon_name(startstopicon)
self._set_active('control::startstop', startstopactive)
self._set_enabled('control::startstop', startstopsensitive)
self._set_enabled('control::restart', restartsensitive)
self._set_enabled('control::repeat', repeatsensitive)
def _set_active(self, action, active):
self._actions.lookup_action(action).set_state(gi.repository.GLib.Variant.new_boolean(active))
def _set_enabled(self, action, enabled):
self._actions.lookup_action(action).set_enabled(enabled)
def _set_ready(self, routine):
restart = routine.completed() > 0
start = len(routine.intervals) > 0 and routine.time() > 0
self._set_actions(self.START_ICON_NAME, False, start, restart, True)
class RoutineDeleter(gi.repository.Gtk.MessageDialog):
__gtype_name__ = 'RoutineDeleter'
def format_secondary_markup(self, name):
m = 'The routine "'
m += name
m += '" will no longer be available, are you sure you want to proceed?'
super().format_secondary_markup(m)
class RoutineEditor(gi.repository.Gtk.Dialog, Composite):
__gtype_name__ = 'RoutineEditor'
def __init__(self):
#~super(gi.repository.Gtk.Dialog, self).__init__()
#~super(Composite, self).__init__()
gi.repository.Gtk.Dialog.__init__(self)
Composite.__init__(self)
self._actions = gi.repository.Gio.SimpleActionGroup()
self.insert_action_group('editor', self._actions)
def configure(self, builder, parent, title, sounds):
super().configure()
self.add_component(Console(title))
self.add_component(builder.get_object('routineprogress'))
self.add_component(builder.get_object('nameentry'))
self.add_component(builder.get_object('intervaltreeview'), builder = builder)
self.add_component(builder.get_object('soundsliststore'), sounds)
self.set_title(title)
self.set_transient_for(parent)
# TODO remove when we get to GTK 3.16
def get_action_group(self, prefix):
return self._actions
def intervals(self):
"""This provides the intervals from the model in the order that they may
have been dragged and dropped to."""
return self.components[IntervalList].intervals()
def selected_interval(self):
return self.components[IntervalList].selected_interval()
def selected_sound(self, treeiter):
return self.components[IntervalSoundList].selected_sound(treeiter)
class RoutineLabel(Label):
__gtype_name__ = 'RoutineLabel'
EVENTS = [beeps.helpers.event.IntervalProgressed,
beeps.helpers.event.RoutineLoaded,
beeps.helpers.event.RoutineSought,
beeps.helpers.event.RoutineUnloaded,
beeps.helpers.event.RoutineUpdated]
def process_filtered(self, event):
super().process_filtered(event)
if isinstance(event, beeps.helpers.event.IntervalProgressed):
self._format_info(event)
elif isinstance(event, beeps.helpers.event.RoutineLoaded):
self._format_info(event)
elif isinstance(event, beeps.helpers.event.RoutineSought):
self._format_info(event)
elif isinstance(event, beeps.helpers.event.RoutineUnloaded):
self.set_markup(self._default)
elif isinstance(event, beeps.helpers.event.RoutineUpdated):
self._format_info(event)
def _format(self):
f = self._newline('{:s}')
f += self._newline(self.TIME_FORMAT)
f += self._newline(self._foreground(self.TIME_FORMAT, 'blue'))
f += self._foreground(self.TIME_FORMAT, 'red')
return f
def _format_info(self, data):
routine = data['routine']
t = routine.time()
c = routine.completed()
r = t - c
self._set_label(routine.name, t, c, r)
class RoutineList(TreeView, Component):
__gtype_name__ = 'RoutineList'
CONFIG_OBJECTS = ['planwindow']
EVENTS = [beeps.helpers.event.IntervalPaused,
beeps.helpers.event.IntervalResumed,
beeps.helpers.event.RoutineCreated,
beeps.helpers.event.RoutineDeleted,
beeps.helpers.event.RoutineFinished,
beeps.helpers.event.RoutineLoaded,
beeps.helpers.event.RoutineSought,
beeps.helpers.event.RoutineStarted,
beeps.helpers.event.RoutineUpdated,
beeps.helpers.event.UpdateActions]
class Column:
OBJECT = 0
NAME = 1
def __init__(self):
TreeView.__init__(self)
Component.__init__(self, beeps.helpers.event.WhiteListFilter(self.EVENTS))
def configure(self, planwindow):
self._actions = planwindow
action = gi.repository.Gio.SimpleAction(name = 'edit::delete')
self._actions.add_action(action)
action = gi.repository.Gio.SimpleAction(name = 'edit::edit')
self._actions.add_action(action)
action = gi.repository.Gio.SimpleAction(name = 'edit::new')
self._actions.add_action(action)
self._set_crud(False, False)
def connect_actions(self, controller):
super().connect_actions(controller)
self._actions.lookup_action('edit::new').connect('activate', controller._on_edit_new_activate)
self._actions.lookup_action('edit::edit').connect('activate', controller._on_edit_edit_activate)
self._actions.lookup_action('edit::delete').connect('activate', controller._on_edit_delete_activate)
def cursor_routine(self, event):
r = None
data = self.get_path_at_pos(event.x, event.y)
if data is not None:
self.set_cursor(data[0], self.get_column(RoutineList.Column.OBJECT), False)
r = self.get_model()[data[0]][RoutineList.Column.OBJECT]
return r
def process_filtered(self, event):
# sets the New/Edit/Delete menu item sensitivities for CRUD operations, and
# Import/Export for global operations; editing is disabled for the running
# routine but New/Delete is still allowed
super().process_filtered(event)
if isinstance(event, beeps.helpers.event.IntervalPaused):
crud = self.selected_routine() != None
self._set_crud(crud, crud)
elif isinstance(event, beeps.helpers.event.IntervalResumed):
edit = self.selected_routine() != event['routine']
self._set_crud(True, edit)
elif isinstance(event, beeps.helpers.event.RoutineCreated):
it = self.get_model().append()
self.get_model().set(it, self._routine_data(event['routine']))
crud = self.selected_routine() != None
self._set_crud(crud, crud)
elif isinstance(event, beeps.helpers.event.RoutineDeleted):
row = self._find_row(RoutineList.Column.OBJECT, event['routine'])
self.get_model().remove(row.iter)
self._set_crud(False, False)
elif isinstance(event, beeps.helpers.event.RoutineFinished):
crud = self.selected_routine() != None
self._set_crud(crud, crud)
elif isinstance(event, beeps.helpers.event.RoutineLoaded):
self._select_row(self._find_row(RoutineList.Column.OBJECT, event['routine']))
self._set_crud(True, True)
elif isinstance(event, beeps.helpers.event.RoutineSought):
crud = self.selected_routine() != None
self._set_crud(crud, crud)
elif isinstance(event, beeps.helpers.event.RoutineStarted):
edit = self.selected_routine() != event['routine']
self._set_crud(True, edit)
elif isinstance(event, beeps.helpers.event.RoutineUpdated):
r = event['routine']
row = self._find_row(RoutineList.Column.OBJECT, r)
self.get_model().set(row.iter, self._routine_data(r))
elif isinstance(event, beeps.helpers.event.UpdateActions):
selected = event['routine']
delete = selected != None
edit = not (event['runner'].has_state(beeps.models.plan.Runner.State.RUNNING) and event['runner'].routine == selected) and not selected == None
self._set_crud(delete, edit)
def selected_routine(self):
return self._selected_item(RoutineList.Column.OBJECT)
def _routine_data(self, routine):
return {RoutineList.Column.OBJECT : routine, RoutineList.Column.NAME : routine.name}
def _set_crud(self, delete, edit):
# just set/reset everything
self._actions.lookup_action('edit::delete').set_enabled(delete)
self._actions.lookup_action('edit::edit').set_enabled(edit)
self._actions.lookup_action('edit::new').set_enabled(True)
class RoutineNameEntry(gi.repository.Gtk.Entry, Component):
__gtype_name__ = 'RoutineNameEntry'
EVENTS = [beeps.helpers.event.RoutineCreated,
beeps.helpers.event.RoutineUpdated]
def __init__(self):
gi.repository.Gtk.Entry.__init__(self)
Component.__init__(self, beeps.helpers.event.WhiteListFilter(self.EVENTS))
def process_filtered(self, event):
super().process_filtered(event)
if isinstance(event, beeps.helpers.event.RoutineCreated):
self.set_text(event['routine'].name)
elif isinstance(event, beeps.helpers.event.RoutineUpdated):
self.set_text(event['routine'].name)
class RoutinePopupMenu(gi.repository.Gtk.Menu, Component):
__gtype_name__ = 'RoutinePopupMenu'
CONFIG_OBJECTS = ['routinetreeview']
def __init__(self):
gi.repository.Gtk.Menu.__init__(self)
Component.__init__(self)
def configure(self, menumodel, routinetreeview):
self.attach_to_widget(routinetreeview)
self.bind_model(menumodel, None, False)
class RoutineProgress(gi.repository.GtkClutter.Embed, Component):
__gtype_name__ = 'RoutineProgress'
EVENTS = [beeps.helpers.event.IntervalCreated,
beeps.helpers.event.IntervalDeleted,
beeps.helpers.event.IntervalPaused,
beeps.helpers.event.IntervalProgressed,
beeps.helpers.event.IntervalResumed,
beeps.helpers.event.IntervalUpdated,
beeps.helpers.event.RoutineFinished,
beeps.helpers.event.RoutineLoaded,
beeps.helpers.event.RoutineSought,
beeps.helpers.event.RoutineStarted,
beeps.helpers.event.RoutineUnloaded]
HEIGHT = 100
WIDTH = 162
def __init__(self):
gi.repository.GtkClutter.Embed.__init__(self)
Component.__init__(self, beeps.helpers.event.RoutineLoadedFilter(), beeps.helpers.event.WhiteListFilter(self.EVENTS))
self.connect('draw', self._on_draw)
self.set_hexpand(True)
self.set_vexpand(True)
self.set_size_request(self.WIDTH, self.HEIGHT)
self._progress = RoutineProgressSquare(self.WIDTH, self.HEIGHT)
self.get_stage().set_user_resizable(True)
self.get_stage().add_child(self._progress)
def process_filtered(self, event):
super().process_filtered(event)
routine = event['routine']
if isinstance(event, beeps.helpers.event.IntervalProgressed):
self._progress.update_progress(routine.progress())
elif isinstance(event, beeps.helpers.event.IntervalCreated):
self._progress.update_routine(self._interval_times(routine), routine.progress())
elif isinstance(event, beeps.helpers.event.IntervalDeleted):
self._progress.update_routine(self._interval_times(routine), routine.progress())
elif isinstance(event, beeps.helpers.event.IntervalPaused):
self._progress.update_progress(routine.progress())
elif isinstance(event, beeps.helpers.event.IntervalResumed):
self._progress.update_progress(routine.progress())
elif isinstance(event, beeps.helpers.event.IntervalUpdated):
self._progress.update_routine(self._interval_times(routine), routine.progress())
elif isinstance(event, beeps.helpers.event.RoutineFinished):
self._progress.update_progress(routine.progress())
elif isinstance(event, beeps.helpers.event.RoutineLoaded):
self._progress.update_routine(self._interval_times(routine), routine.progress())
elif isinstance(event, beeps.helpers.event.RoutineSought):
self._progress.update_progress(routine.progress())
elif isinstance(event, beeps.helpers.event.RoutineStarted):
self._progress.update_progress(routine.progress())
elif isinstance(event, beeps.helpers.event.RoutineUnloaded):
self._progress.update_routine([], 0.0)
def _interval_times(self, routine):
return [i.time for i in routine.intervals]
def _on_draw(self, progress, cr):
self._progress.set_size(progress.get_allocated_width(), progress.get_allocated_height())
return False
class RoutineProgressSquare(gi.repository.Clutter.Actor):
LINE_WIDTH = 1
class Geometry:
def __init__(self, width, height, border):
# width, height, x1, y1, length, depth
b = 1.0 - 2 * border
self.w = max([width, 1.0])
self.h = max([height, 1.0])
self.x = width * border
self.y = height * border
self.l = width * b
self.d = height * b
def __init__(self, width, height):
super().__init__()
self._geometry = RoutineProgressSquare.Geometry(width, height, 0.02)
self._intervals = []
self._progress = 0.0
self._total = 0.0
self._canvas = gi.repository.Clutter.Canvas()
self.set_content(self._canvas)
self._canvas.invalidate()
self._canvas.connect('draw', self._on_draw)
self.connect('allocation-changed', self._on_allocation_changed)
def update_progress(self, progress):
self._progress = progress
self._canvas.set_size(*self.get_size())
self._canvas.invalidate()
return False
def update_routine(self, intervals, progress):
self._intervals = intervals
self._total = sum(self._intervals)
self.update_progress(progress)
def _draw_background(self, cr, width, height):
cr.scale(width / self._geometry.w, height / self._geometry.h)
cr.set_source_rgb(0.75, 0.75, 0.75)
cr.paint()
def _draw_interval(self, cr, x, shade, interval):
l = 0.0
if self._total > 0:
l = interval / self._total * self._geometry.l
cr.rectangle(x + self._geometry.x, self._geometry.y, l, self._geometry.d)
s = 1 - shade
cr.set_source_rgb(1, s, s)
cr.fill_preserve()
cr.set_source_rgb(0, 0.7, 0)
cr.stroke()
return l
def _draw_intervals(self, cr):
cr.set_line_width(max(cr.device_to_user_distance(self.LINE_WIDTH, self.LINE_WIDTH)))
n = len(self._intervals)
total = 0.0
for i, interval in enumerate(self._intervals):
total += self._draw_interval(cr, total, i / n, interval)
return True
def _draw_progress(self, cr):
# this will result in a small overlap of 1/2 linewidth around border
w = self._progress * self._geometry.l
cr.rectangle(self._geometry.x, self._geometry.y, w, self._geometry.d)
cr.set_source_rgba(0, 0, 0, 0.4)
cr.fill()
def _on_allocation_changed(self, *args):
gi.repository.GLib.idle_add(self.update_progress, self._progress)
def _on_draw(self, progress, cr, width, height):
self._draw_background(cr, width, height)
self._draw_intervals(cr)
self._draw_progress(cr)
return True
# TODO Handle empty routine, probably other components also
# TODO Handle no routine, probably other components also
class RoutineScale(gi.repository.Gtk.Scale, Component):
__gtype_name__ = 'RoutineScale'
EVENTS = [beeps.helpers.event.IntervalCreated,
beeps.helpers.event.IntervalDeleted,
beeps.helpers.event.IntervalProgressed,
beeps.helpers.event.IntervalUpdated,
beeps.helpers.event.RoutineFinished,
beeps.helpers.event.RoutineLoaded,
beeps.helpers.event.RoutineSought,
beeps.helpers.event.RoutineUnloaded]
def __init__(self):
gi.repository.Gtk.Scale.__init__(self)
Component.__init__(self, beeps.helpers.event.RoutineLoadedFilter(), beeps.helpers.event.WhiteListFilter(self.EVENTS))
def configure(self):
self.clear_marks()
self.set_value(0.0)
self.set_sensitive(False)
self._add_mark_for_time(0.0)
self._add_marks_for_percent()
def process_filtered(self, event):
super().process_filtered(event)
routine = event['routine']
if isinstance(event, beeps.helpers.event.IntervalProgressed):
self._set_progress(routine)
elif isinstance(event, beeps.helpers.event.IntervalCreated):
self._add_marks(routine)
self._set_progress(routine)
elif isinstance(event, beeps.helpers.event.IntervalDeleted):
self._add_marks(routine)
self._set_progress(routine)
elif isinstance(event, beeps.helpers.event.IntervalUpdated):
self._add_marks(routine)
self.set_value(0.0)
elif isinstance(event, beeps.helpers.event.RoutineLoaded):
self._add_marks(routine)
self._set_progress(routine)
self.set_sensitive(True)
elif isinstance(event, beeps.helpers.event.RoutineSought):
self._set_progress(routine)
elif isinstance(event, beeps.helpers.event.RoutineUnloaded):
self.set_sensitive(False)
self.configure()
def _add_mark_for_percent(self, percent):
self.add_mark(percent, gi.repository.Gtk.PositionType.TOP, '{:.0f}'.format(percent))
def _add_mark_for_time(self, time):
self.add_mark(time, gi.repository.Gtk.PositionType.BOTTOM, '')
def _add_marks(self, routine):
self.clear_marks()
self._add_marks_for_percent()
self._add_marks_for_time(routine.time(), routine.intervals)
def _add_marks_for_percent(self, intervals = 4):
p = 0.0
d = self._max() / intervals
for interval in range(0, intervals + 1):
self._add_mark_for_percent(p)
p += d
def _add_marks_for_time(self, time, intervals):
t = 0.0
self._add_mark_for_time(0.0)
if time > 0:
for interval in intervals:
t += interval.time * self._max() / time
self._add_mark_for_time(t)
def _max(self):
return self.get_adjustment().get_upper()
def _set_progress(self, routine):
self.set_value(routine.progress() * self._max())
class RoutineSound(gi.repository.Gtk.VolumeButton, Component):
__gtype_name__ = 'RoutineSound'
CONFIG_OBJECTS = ['planwindow']
EVENTS = [beeps.helpers.event.IntervalFinished,
beeps.helpers.event.IntervalPaused,
beeps.helpers.event.IntervalResumed,
beeps.helpers.event.RoutineSought,
beeps.helpers.event.UpdateVolume]
class GstPlayFlags:
AUDIO = 0x00000002
SOFTVOLUME = 0x00000010
def __init__(self):
gi.repository.Gtk.VolumeButton.__init__(self)
Component.__init__(self, beeps.helpers.event.RoutineLoadedFilter(), beeps.helpers.event.WhiteListFilter(self.EVENTS))
def configure(self, planwindow):
self._playbin = gi.repository.Gst.ElementFactory.make('playbin', 'playbin')
self._playbin.set_property('flags', RoutineSound.GstPlayFlags.AUDIO | RoutineSound.GstPlayFlags.SOFTVOLUME)
self._playbin.set_state(gi.repository.Gst.State.NULL)
self._actions = planwindow.get_action_group('win')
action = gi.repository.Gio.SimpleAction(name = 'volume::increase')
self._actions.add_action(action)
action = gi.repository.Gio.SimpleAction(name = 'volume::decrease')
self._actions.add_action(action)
self._set_volume(self.get_value())
def connect_actions(self, controller):
super().connect_actions(controller)
self._actions.lookup_action('volume::increase').connect('activate', controller._on_volume_increase_activate)
self._actions.lookup_action('volume::decrease').connect('activate', controller._on_volume_decrease_activate)
def process_filtered(self, event):
super().process_filtered(event)
if isinstance(event, beeps.helpers.event.IntervalFinished):
self._playbin.set_state(gi.repository.Gst.State.NULL)
self._playbin.set_property('uri', self._uri(event['interval'].sound))
self._playbin.set_state(gi.repository.Gst.State.PLAYING)
elif isinstance(event, beeps.helpers.event.IntervalPaused):
self._playbin.set_state(gi.repository.Gst.State.PAUSED)
elif isinstance(event, beeps.helpers.event.IntervalResumed):
self._playbin.set_state(gi.repository.Gst.State.PLAYING)
elif isinstance(event, beeps.helpers.event.RoutineSought):
self._playbin.set_state(gi.repository.Gst.State.NULL)
elif isinstance(event, beeps.helpers.event.UpdateVolume):
self._update_volume(event['value'], event['force'])
def quit(self):
self._playbin.set_state(gi.repository.Gst.State.NULL)
self._playbin.set_property('uri', '')
def _set_volume(self, volume):
"""Takes a volume value between 0 and 1 and applies it to the playbin2
element."""
logging.debug('Volume set to %.1f %%' % (volume * 100))
self._playbin.set_property('volume', volume)
self._actions.lookup_action('volume::decrease').set_enabled(volume > 0.0)
self._actions.lookup_action('volume::increase').set_enabled(volume < 1.0)
def _update_volume(self, volume, force):
if True == force:
# this will trigger the value-changed signal
self.set_value(volume)
else:
self._set_volume(volume)
def _uri(self, sound):
uri = ''
if sound is not None:
uri += 'file://' + sound
return uri
| gpl-3.0 | -2,807,718,940,134,385,700 | 38.447395 | 149 | 0.693985 | false |
ucsd-ccbb/Oncolist | src/restLayer/app/MyGeneInfo.py | 1 | 3530 | import sys
import pymongo
import requests
import urllib2
from app.util import set_status, create_edges_index
from app.status import Status
from bson.json_util import dumps
__author__ = 'aarongary'
def get_gene_info_by_id(gene_id):
#return ["UNKNOWN"]
alt_term_id = []
if(len(gene_id) > 2):
r_json = {}
try:
url = 'http://mygene.info/v3/query?q=' + gene_id
r = requests.get(url)
r_json = r.json()
if 'hits' in r_json and len(r_json['hits']) > 0:
for alt_term in r_json['hits']:
if(isinstance(alt_term['symbol'], list)):
alt_term_id.append(alt_term['symbol'][0].upper())
else:
alt_term_id.append(alt_term['symbol'].upper())
#gene_symbol = r_json['hits'][0]['symbol'].upper()
return alt_term_id
except Exception as e:
print e.message
return {'hits': [{'symbol': gene_id, 'entrezgene': '', 'name': 'Entrez results: 0'}]}
return ["UNKNOWN"]
else :
return ["UNKNOWN"]
def get_entrezgene_info_by_symbol(gene_id):
if(len(gene_id) > 0):
try:
url = 'http://mygene.info/v3/query?q=' + gene_id
r = requests.get(url)
r_json = r.json()
if 'hits' in r_json and len(r_json['hits']) > 0:
for alt_term in r_json['hits']:
if(isinstance(alt_term['entrezgene'], list)):
return str(alt_term['entrezgene'][0])
else:
return str(alt_term['entrezgene'])
except Exception as e:
print e.message
return {'hits': [{'symbol': gene_id, 'entrezgene': '', 'name': 'Entrez results: 0'}]}
return ["UNKNOWN"]
else :
return ["UNKNOWN"]
def getMyGeneInfoByID(gene_id):
if(len(gene_id) > 0):
try:
mir_resolved_id = get_mir_name_converter(gene_id)
if(mir_resolved_id is not "UNKNOWN"):
url = 'http://mygene.info/v3/query?q=' + mir_resolved_id
r = requests.get(url)
r_json = r.json()
if 'hits' in r_json and len(r_json['hits']) > 0:
for alt_hit in r_json['hits']:
entrezgene_id = alt_hit['entrezgene']
url2 = 'http://mygene.info/v3/gene/' + str(entrezgene_id)
r2 = requests.get(url2)
r2_json = r2.json()
return r2_json
return r
else:
return "UNKNOWN TERM"
entrez_url = "http://mygene.info/v3/query?q=" + str(gene_id)
entrez_content = "";
entrez_data = {
'hits': []
}
for line in urllib2.urlopen(entrez_url):
entrez_content += line.rstrip() + " "
hit = {
'name': entrez_content,
'_score': 0,
'symbol': gene_id,
'source': 'Entrez'
}
entrez_data['hits'].append(hit)
except Exception as e:
print e.message
return {'hits': [{'symbol': gene_id, 'name': 'Entrez results: 0'}]}
return entrez_data
else :
return {'hits': [{'symbol': gene_id, 'name': 'not vailable'}]}
def get_mir_name_converter(mirna_id):
return "UNKNOWN" | mit | 789,984,549,225,411,300 | 29.439655 | 97 | 0.470822 | false |
Zempashi/buildbot_kube | kubernetes.py | 1 | 11420 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import copy
from datetime import datetime
from datetime import date
import hashlib
import socket
from twisted.internet import defer
from twisted.internet import threads
from twisted.python import log
from buildbot import config
from buildbot.interfaces import IRenderable
from buildbot.interfaces import LatentWorkerFailedToSubstantiate
from buildbot.process.properties import Interpolate
from buildbot.process.properties import Properties
from buildbot.util import unicode2bytes
from buildbot.worker.docker import AbstractLatentWorker
from six import integer_types
from six import string_types
from six import text_type
from zope.interface import implementer
try:
from kubernetes import config as kube_config
from kubernetes import client
except ImportError as exc:
kube_config = None
client = None
@implementer(IRenderable)
class KubeRenderable(object):
untouched_types = integer_types + (
float, bool, bytes, datetime, date, type(None)
)
def __init__(self, kube_obj):
self.kube_obj = kube_obj
@defer.inlineCallbacks
def getRenderingFor(self, props):
res = yield self.recursive_render(
copy.deepcopy(self.kube_obj),
props
)
defer.returnValue(res)
@defer.inlineCallbacks
def recursive_render(self, obj, props):
"""Recursively parse kubernetes object tree to find renderable"""
# This code is inspired by the code of kubernetes client-python
# https://github.com/kubernetes-incubator/client-python/blob/4e593a7530a8751c817cceec715bfe1d03997793/kubernetes/client/api_client.py#L172-L214
if isinstance(obj, string_types + (text_type,)):
res = yield Interpolate(obj).getRenderingFor(props)
elif isinstance(obj, tuple):
res = yield self.recursive_render(list(obj), props)
elif isinstance(obj, list):
res = []
for sub_obj in obj:
temp = yield self.recursive_render(sub_obj, props)
res.append(temp)
elif isinstance(obj, dict):
res = {} # pylint: disable=redefined-variable-type
for key, sub_obj in obj.item():
res[key] = yield self.recursive_render(sub_obj, props)
elif isinstance(obj, IRenderable):
res = yield obj.getRenderingFor(props)
elif isinstance(obj, self.untouched_types):
res = obj
else:
for key in obj.swagger_types:
value = getattr(obj, key)
if not value:
continue
res = yield self.recursive_render(value, props)
setattr(obj, key, res)
res = obj
defer.returnValue(res)
class KubeLatentWorker(AbstractLatentWorker):
instance = None
properties_source = 'kube Latent Worker'
@staticmethod
def dependency_error():
config.error("The python module 'kubernetes>=1' is needed to use a "
"KubeLatentWorker")
def load_config(self, kubeConfig):
try:
kube_config.load_kube_config()
exception = None
except Exception:
try:
kube_config.load_incluster_config()
exception = None
except kube_config.config_exception.ConfigException as exc:
exception = exc
if exception and not (kubeConfig and 'host' in kubeConfig):
config.error("No kube-apimaster host provided")
for config_key, value in kubeConfig.items():
setattr(client.configuration, config_key, value)
@classmethod
def default_job(cls):
if not client:
cls.dependency_error()
job_name = '%(prop:buildername)s-%(prop:buildnumber)s'
return client.V1Job(
metadata=client.V1ObjectMeta(name=job_name),
spec=client.V1JobSpec(
template=client.V1PodTemplateSpec(
metadata=client.V1ObjectMeta(name=job_name),
spec=client.V1PodSpec(
containers=[
client.V1Container(
name=job_name,
image='buildbot/buildbot-worker',
env=[
client.V1EnvVar(
name='BUILDMASTER',
value='%(prop:masterFQDN)s'
),
client.V1EnvVar(
name='BUILDMASTER_PORT',
value='%(prop:masterPort)s'
),
client.V1EnvVar(
name='WORKERNAME',
value='%(prop:workerName)s'
),
client.V1EnvVar(
name='WORKERPASS',
value='%(prop:workerPass)s'
)
]
)
],
restart_policy='Never'
)
)
)
)
def checkConfig(self, name, password, job=None, namespace=None,
masterFQDN=None, getMasterMethod=None,
kubeConfig=None, **kwargs):
# pylint: disable=too-many-arguments
# pylint: disable=unused-argument
# Set build_wait_timeout to 0 if not explicitly set: Starting a
# container is almost immediate, we can afford doing so for each build.
if 'build_wait_timeout' not in kwargs:
kwargs['build_wait_timeout'] = 0
if not client:
self.dependency_error()
self.load_config(kubeConfig)
AbstractLatentWorker.checkConfig(self, name, password, **kwargs)
@defer.inlineCallbacks
def reconfigService(self, name, password, job=None, namespace=None,
masterFQDN=None, getMasterMethod=None,
kubeConfig=None, **kwargs):
# pylint: disable=too-many-arguments
# Set build_wait_timeout to 0 if not explicitly set: Starting a
# container is almost immediate, we can afford doing so for each build.
if 'build_wait_timeout' not in kwargs:
kwargs['build_wait_timeout'] = 0
if password is None:
password = self.getRandomPass()
# pylint: disable=attribute-defined-outside-init
self.getMasterMethod = getMasterMethod
if masterFQDN is None:
# pylint: disable=invalid-name
masterFQDN = self.get_master_qdn # noqa: N806
if callable(masterFQDN):
# pylint: disable=invalid-name
masterFQDN = masterFQDN()
# pylint: disable=attribute-defined-outside-init
self.masterFQDN = masterFQDN
self.load_config(kubeConfig)
# pylint: disable=attribute-defined-outside-init
self.kubeConfig = kubeConfig
# pylint: disable=attribute-defined-outside-init
self.namespace = namespace or 'default'
# pylint: disable=attribute-defined-outside-init
self.job = job or KubeRenderable(self.default_job())
# pylint: disable=invalid-name
masterName = unicode2bytes(self.master.name) # noqa: N806
# pylint: disable=attribute-defined-outside-init
self.masterhash = hashlib.sha1(masterName).hexdigest()[:6]
yield AbstractLatentWorker.reconfigService(
self, name, password, **kwargs)
@defer.inlineCallbacks
def start_instance(self, build):
if self.instance is not None:
raise ValueError('instance active')
# pylint: disable=invalid-name
masterFQDN = self.masterFQDN # noqa: N806
# pylint: disable=invalid-name
masterPort = '9989' # noqa: N806
if self.registration is not None:
# pylint: disable=invalid-name
masterPort = str(self.registration.getPBPort()) # noqa: N806
if ":" in masterFQDN:
masterFQDN, masterPort = masterFQDN.split(':')
master_properties = Properties.fromDict({
'masterHash': (self.masterhash, self.properties_source),
'masterFQDN': (masterFQDN, self.properties_source),
'masterPort': (masterPort, self.properties_source),
'workerName': (self.name, self.properties_source),
'workerPass': (self.password, self.properties_source)
})
build.properties.updateFromProperties(master_properties)
namespace = yield build.render(self.namespace)
job = yield build.render(self.job)
res = yield threads.deferToThread(
self._thd_start_instance,
namespace,
job
)
defer.returnValue(res)
def _thd_start_instance(self, namespace, job):
self.load_config(self.kubeConfig)
batch_client = client.BatchV1Api()
# TODO: cleanup or not cleanup ?
# cleanup the old instances
instance = batch_client.create_namespaced_job(namespace, job)
if instance is None:
log.msg('Failed to create the container')
raise LatentWorkerFailedToSubstantiate(
'Failed to start container'
)
job_name = instance.metadata.name # pylint: disable=no-member
log.msg('Job created, Id: %s...' % job_name)
self.instance = instance
return [
job_name,
# pylint: disable=no-member
instance.spec.template.spec.containers[0].image
]
def stop_instance(self, fast=False):
if self.instance is None:
# be gentle. Something may just be trying to alert us that an
# instance never attached, and it's because, somehow, we never
# started.
return defer.succeed(None)
instance = self.instance
self.instance = None
return threads.deferToThread(self._thd_stop_instance, instance, fast)
def _thd_stop_instance(self, instance, fast):
# pylint: disable=unused-argument
assert not False
self.load_config(self.kubeConfig)
batch_client = client.BatchV1Api()
delete_body = client.V1DeleteOptions()
job_name = instance.metadata.name
namespace = instance.metadata.namespace
log.msg('Deleting Job %s...' % job_name)
batch_client.delete_namespaced_job(job_name, namespace, delete_body)
def get_master_qdn(self):
try:
qdn_getter = self.get_master_mapping[self.getMasterMethod]
except KeyError:
qdn_getter = self.default_master_qdn_getter
return qdn_getter()
@staticmethod
def get_fqdn():
return socket.getfqdn()
def get_ip(self):
fqdn = self.get_fqdn()
try:
return socket.gethostbyname(fqdn)
except socket.gaierror:
return fqdn
get_master_mapping = {
'auto_ip': get_ip,
'fqdn': get_fqdn
}
default_master_qdn_getter = get_ip
| mit | -7,640,438,846,853,588,000 | 36.689769 | 151 | 0.578984 | false |
eugena/django-rest-auth | rest_auth/views.py | 1 | 7437 | from django.contrib.auth import login, logout, get_user_model
from django.conf import settings
from rest_framework import status
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework.generics import GenericAPIView
from rest_framework.permissions import IsAuthenticated, AllowAny
from rest_framework.authtoken.models import Token
from rest_framework.generics import RetrieveUpdateAPIView
from .app_settings import (
TokenSerializer, UserDetailsSerializer, SimpleLoginSerializer, SimpleTokenLoginSerializer,
LoginSerializer, PasswordResetSerializer, PasswordResetConfirmSerializer,
PasswordChangeSerializer
)
from .utils import get_user_id_by_session_key, flush_session_by_session_key
class BaseSimpleLoginView(GenericAPIView):
permission_classes = (AllowAny,)
def login(self):
self.user = self.serializer.validated_data['user']
if getattr(settings, 'REST_SESSION_LOGIN', True):
login(self.request, self.user)
def get_error_response(self):
return Response(
self.serializer.errors, status=status.HTTP_400_BAD_REQUEST
)
def post(self, request, *args, **kwargs):
self.serializer = self.get_serializer(data=self.request.data)
if not self.serializer.is_valid():
return self.get_error_response()
self.login()
return Response({'session_key': request.session.session_key}, status=status.HTTP_200_OK)
class SimpleLoginView(BaseSimpleLoginView):
"""
Check the credentials and authenticated if the credentials are valid.
Calls Django Auth login method to register User ID
in Django session framework
Accept the following POST parameters: username, password
"""
serializer_class = SimpleLoginSerializer
class SimpleTokenLoginView(BaseSimpleLoginView):
"""
Check the credentials and authenticated if the credentials are valid.
Calls Django Auth login method to register User ID
in Django session framework
Accept the following POST parameters: uid, token
"""
serializer_class = SimpleTokenLoginSerializer
class LoginView(GenericAPIView):
"""
Check the credentials and return the REST Token
if the credentials are valid and authenticated.
Calls Django Auth login method to register User ID
in Django session framework
Accept the following POST parameters: username, password
Return the REST Framework Token Object's key.
"""
permission_classes = (AllowAny,)
serializer_class = LoginSerializer
token_model = Token
response_serializer = TokenSerializer
def login(self):
self.user = self.serializer.validated_data['user']
self.token, created = self.token_model.objects.get_or_create(
user=self.user)
if getattr(settings, 'REST_SESSION_LOGIN', True):
login(self.request, self.user)
def get_response(self):
return Response(
self.response_serializer(self.token).data, status=status.HTTP_200_OK
)
def get_error_response(self):
return Response(
self.serializer.errors, status=status.HTTP_400_BAD_REQUEST
)
def post(self, request, *args, **kwargs):
self.serializer = self.get_serializer(data=self.request.data)
if not self.serializer.is_valid():
return self.get_error_response()
self.login()
return self.get_response()
class LogoutView(APIView):
"""
Calls Django logout method and delete the Token object
assigned to the current User object.
Accepts/Returns nothing.
"""
permission_classes = (AllowAny,)
def post(self, request, **kwargs):
if getattr(settings, 'USING_SESSION_KEY', False):
flush_session_by_session_key(self.kwargs.get('session_key'))
else:
try:
request.user.auth_token.delete()
except:
pass
logout(request)
response = Response(
{"success": "Successfully logged out."},
status=status.HTTP_200_OK)
response.delete_cookie(settings.SESSION_COOKIE_NAME)
return response
class UserDetailsView(RetrieveUpdateAPIView):
"""
Returns User's details in JSON format.
Accepts the following GET parameters: token
Accepts the following POST parameters:
Required: token
Optional: email, first_name, last_name and UserProfile fields
Returns the updated UserProfile and/or User object.
"""
serializer_class = UserDetailsSerializer
permission_classes = (IsAuthenticated,)
def get_object(self):
if getattr(settings, 'USING_SESSION_KEY', False):
try:
user = get_user_model()._default_manager.get(
pk=get_user_id_by_session_key(self.context.get('view').kwargs.get('session_key') or None))
except:
user = None
else:
user = self.request.user
return user
class PasswordResetView(GenericAPIView):
"""
Calls Django Auth PasswordResetForm save method.
Accepts the following POST parameters: email
Returns the success/fail message.
"""
serializer_class = PasswordResetSerializer
permission_classes = (AllowAny,)
def post(self, request, *args, **kwargs):
# Create a serializer with request.data
serializer = self.get_serializer(data=request.data)
if not serializer.is_valid():
return Response(serializer.errors,
status=status.HTTP_400_BAD_REQUEST)
serializer.save()
# Return the success message with OK HTTP status
return Response(
{"success": "Password reset e-mail has been sent."},
status=status.HTTP_200_OK
)
class PasswordResetConfirmView(GenericAPIView):
"""
Password reset e-mail link is confirmed, therefore this resets the user's password.
Accepts the following POST parameters: new_password1, new_password2
Accepts the following Django URL arguments: token, uid
Returns the success/fail message.
"""
serializer_class = PasswordResetConfirmSerializer
permission_classes = (AllowAny,)
def post(self, request):
serializer = self.get_serializer(data=request.data)
if not serializer.is_valid():
return Response(
serializer.errors, status=status.HTTP_400_BAD_REQUEST
)
serializer.save()
return Response({"success": "Password has been reset with the new password."})
class PasswordChangeView(GenericAPIView):
"""
Calls Django Auth SetPasswordForm save method.
Accepts the following POST parameters: new_password1, new_password2
Returns the success/fail message.
"""
serializer_class = PasswordChangeSerializer
def __init__(self):
if not getattr(settings, 'USING_SESSION_KEY', False):
self.permission_classes = (IsAuthenticated,)
super(PasswordChangeView, self).__init__()
def post(self, request, **kwargs):
serializer = self.get_serializer(data=request.data)
if not serializer.is_valid():
return Response(
serializer.errors, status=status.HTTP_400_BAD_REQUEST
)
serializer.save()
return Response({"success": "New password has been saved."})
| mit | 4,463,591,443,124,861,400 | 30.646809 | 110 | 0.669894 | false |
twinaphex/sdcell | ps3_osk/rect_gen.py | 1 | 1904 | def printrect(x, y, w, h):
print "{{" + str(x) + ", " + str(y) + ", " + str(w) + ", " + str(h) + "}, HID_x, KMOD_NONE, 0},"
printrect(0, 0, 60, 60)
for i in range(0, 4):
printrect(120 + i * 60, 0, 60, 60)
for i in range(0, 4):
printrect(390 + i * 60, 0, 60, 60)
for i in range(0, 4):
printrect(660 + i * 60, 0, 60, 60)
for i in range(0, 3):
printrect(910 + i * 60, 0, 60, 60)
#ROW2
for i in range(0, 13):
printrect(i * 60, 90, 60, 60)
printrect(780, 90, 120, 60)
for i in range(0, 3):
printrect(910 + 60 * i, 90, 60, 60)
for i in range(0, 4):
printrect(1095 + 60 * i, 90, 60, 60)
#ROW3
printrect(0, 150, 90, 60)
for i in range(0, 12):
printrect(90 + i * 60, 150, 60, 60)
printrect(810, 150, 90, 60)
for i in range(0, 3):
printrect(910 + 60 * i, 150, 60, 60)
for i in range(0, 3):
printrect(1095 + 60 * i, 150, 60, 60)
printrect(1095 + 60 * 3, 150, 60, 120)
#ROW4
printrect(0, 210, 105, 60)
for i in range(0, 11):
printrect(105 + i * 60, 210, 60, 60)
printrect(765, 210, 135, 60)
for i in range(0, 3):
printrect(1095 + 60 * i, 210, 60, 60)
#ROW5
printrect(0, 270, 135, 60)
for i in range(0, 10):
printrect(135 + i * 60, 270, 60, 60)
printrect(735, 270, 165, 60)
printrect(910 + 60, 270, 60, 60)
for i in range(0, 3):
printrect(1095 + 60 * i, 270, 60, 60)
printrect(1095 + 60 * 3, 270, 60, 120)
#ROW 6
cursor = 0
printrect(cursor, 330, 90, 60)
cursor = cursor + 90
printrect(cursor, 330, 60, 60)
cursor = cursor + 60
printrect(cursor, 330, 90, 60)
cursor = cursor + 90
printrect(cursor, 330, 360, 60)
cursor = cursor + 360
printrect(cursor, 330, 90, 60)
cursor = cursor + 90
printrect(cursor, 330, 60, 60)
cursor = cursor + 60
printrect(cursor, 330, 60, 60)
cursor = cursor + 60
printrect(cursor, 330, 90, 60)
cursor = cursor + 90
for i in range(0, 3):
printrect(910 + 60 * i, 330, 60, 60)
printrect(1095, 330, 120, 60)
printrect(1095 + 120, 330, 60, 60)
| lgpl-2.1 | 3,855,062,236,392,920,000 | 17.666667 | 97 | 0.60084 | false |
hiuwo/acq4 | acq4/devices/Camera/taskGUI.py | 1 | 6177 | # -*- coding: utf-8 -*-
from PyQt4 import QtCore, QtGui
from TaskTemplate import *
from acq4.devices.DAQGeneric.taskGUI import DAQGenericTaskGui
from acq4.devices.Device import TaskGui
#from acq4.pyqtgraph.WidgetGroup import WidgetGroup
import numpy as np
import acq4.pyqtgraph as pg
#from acq4.pyqtgraph.graphicsItems import InfiniteLine, VTickGroup
#from PyQt4 import Qwt5 as Qwt
class CameraTaskGui(DAQGenericTaskGui):
def __init__(self, dev, taskRunner):
DAQGenericTaskGui.__init__(self, dev, taskRunner, ownUi=False) ## When initializing superclass, make sure it knows this class is creating the ui.
self.ui = Ui_Form()
self.ui.setupUi(self)
self.stateGroup = pg.WidgetGroup(self) ## create state group before DAQ creates its own interface
self.ui.horizSplitter.setStretchFactor(0, 0)
self.ui.horizSplitter.setStretchFactor(1, 1)
DAQGenericTaskGui.createChannelWidgets(self, self.ui.ctrlSplitter, self.ui.plotSplitter)
self.ui.plotSplitter.setStretchFactor(0, 10)
self.ui.plotSplitter.setStretchFactor(1, 1)
self.ui.plotSplitter.setStretchFactor(2, 1)
## plots should not be storing more than one trace at a time.
for p in self.plots.values():
p.plotItem.ctrl.maxTracesCheck.setChecked(True)
p.plotItem.ctrl.maxTracesSpin.setValue(1)
p.plotItem.ctrl.forgetTracesCheck.setChecked(True)
#self.stateGroup = WidgetGroup([
#(self.ui.recordCheck, 'record'),
#(self.ui.triggerCheck, 'trigger'),
#(self.ui.displayCheck, 'display'),
#(self.ui.recordExposeCheck, 'recordExposeChannel'),
#(self.ui.splitter, 'splitter')
#])
conf = self.dev.camConfig
#if 'exposeChannel' not in conf:
#self.ui.exposureGroupBox.hide()
#if 'triggerInChannel' not in conf:
#self.ui.triggerGroupBox.hide()
#if 'triggerOutChannel' not in conf:
#self.ui.triggerCheck.hide()
#self.exposeCurve = None
tModes = self.dev.listParams('triggerMode')[0]
#tModes.remove('Normal')
#tModes = ['Normal'] + tModes
for m in tModes:
item = self.ui.triggerModeCombo.addItem(m)
self.vLines = []
if 'trigger' in self.plots:
l = pg.InfiniteLine()
self.vLines.append(l)
self.plots['trigger'].addItem(self.vLines[0])
if 'exposure' in self.plots:
l = pg.InfiniteLine()
self.vLines.append(l)
self.plots['exposure'].addItem(self.vLines[1])
self.frameTicks = pg.VTickGroup()
self.frameTicks.setYRange([0.8, 1.0])
self.ui.imageView.sigTimeChanged.connect(self.timeChanged)
self.taskRunner.sigTaskPaused.connect(self.taskPaused)
def timeChanged(self, i, t):
for l in self.vLines:
l.setValue(t)
def saveState(self):
s = self.currentState()
s['daqState'] = DAQGenericTaskGui.saveState(self)
return s
def restoreState(self, state):
self.stateGroup.setState(state)
if 'daqState' in state:
DAQGenericTaskGui.restoreState(self, state['daqState'])
def generateTask(self, params=None):
daqProt = DAQGenericTaskGui.generateTask(self, params)
if params is None:
params = {}
state = self.currentState()
task = {
'record': state['recordCheck'],
#'recordExposeChannel': state['recordExposeCheck'],
'triggerProtocol': state['triggerCheck'],
'params': {
'triggerMode': state['triggerModeCombo']
}
}
task['channels'] = daqProt
if state['releaseBetweenRadio']:
task['pushState'] = None
task['popState'] = None
return task
def taskSequenceStarted(self):
DAQGenericTaskGui.taskSequenceStarted(self)
if self.ui.releaseAfterRadio.isChecked():
self.dev.pushState('cam_proto_state')
def taskFinished(self):
DAQGenericTaskGui.taskFinished(self)
if self.ui.releaseAfterRadio.isChecked():
self.dev.popState('cam_proto_state')
def taskPaused(self): ## If the task is paused, return the camera to its previous state until we start again
if self.ui.releaseAfterRadio.isChecked():
self.dev.popState('cam_proto_state')
self.dev.pushState('cam_proto_state')
def currentState(self):
return self.stateGroup.state()
def handleResult(self, result, params):
#print result
state = self.stateGroup.state()
if state['displayCheck']:
if result is None or len(result.frames()) == 0:
print "No images returned from camera task."
self.ui.imageView.clear()
else:
self.ui.imageView.setImage(result.asMetaArray())
#print " frame times:", list(result['frames'].xvals('Time'))
frameTimes, precise = result.frameTimes()
if precise:
self.frameTicks.setXVals(frameTimes)
DAQGenericTaskGui.handleResult(self, result.daqResult(), params)
#if state['displayExposureCheck'] and 'expose' in result and result['expose'] is not None:
#d = result['expose']
#if self.exposeCurve is None:
#self.exposeCurve = self.ui.exposePlot.plot(d.view(ndarray), x=d.xvals('Time'), pen=QtGui.QPen(QtGui.QColor(200, 200, 200)))
#else:
#self.exposeCurve.setData(y=d.view(ndarray), x=d.xvals('Time'))
#self.ui.exposePlot.replot()
#def recordExposeClicked(self):
#daq = self.dev.config['exposeChannel'][0]
#self.task.getDevice(daq)
def quit(self):
self.ui.imageView.close()
DAQGenericTaskGui.quit(self)
| mit | 601,782,246,092,227,100 | 37.36646 | 154 | 0.593816 | false |
Scorched-Moon/server | server/miniboa/async.py | 1 | 6429 | """
Handle Asynchronous Telnet Connections.
"""
import socket
import select
import sys
import logging
from .telnet import TelnetClient
from .telnet import ConnectionLost
# Cap sockets to 512 on Windows because winsock can only process 512 at time
# Cap sockets to 1000 on UNIX because you can only have 1024 file descriptors
MAX_CONNECTIONS = 500 if sys.platform == 'win32' else 1000
#-----------------------------------------------------Dummy Connection Handlers
def _on_connect(client):
"""
Placeholder new connection handler.
"""
logging.info("++ Opened connection to {}, sending greeting...".format(client.addrport()))
client.send("Greetings from Miniboa-py3!\n")
def _on_disconnect(client):
"""
Placeholder lost connection handler.
"""
logging.info ("-- Lost connection to %s".format(client.addrport()))
#-----------------------------------------------------------------Telnet Server
class TelnetServer(object):
"""
Poll sockets for new connections and sending/receiving data from clients.
"""
def __init__(self, port=7777, address='', on_connect=_on_connect,
on_disconnect=_on_disconnect, max_connections=MAX_CONNECTIONS,
timeout=0.05):
"""
Create a new Telnet Server.
port -- Port to listen for new connection on. On UNIX-like platforms,
you made need root access to use ports under 1025.
address -- Address of the LOCAL network interface to listen on. You
can usually leave this blank unless you want to restrict traffic
to a specific network device. This will usually NOT be the same
as the Internet address of your server.
on_connect -- function to call with new telnet connections
on_disconnect -- function to call when a client's connection dies,
either through a terminated session or client.active being set
to False.
max_connections -- maximum simultaneous the server will accept at once
timeout -- amount of time that Poll() will wait from user input
before returning. Also frees a slice of CPU time.
"""
self.port = port
self.address = address
self.on_connect = on_connect
self.on_disconnect = on_disconnect
self.max_connections = min(max_connections, MAX_CONNECTIONS)
self.timeout = timeout
server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
try:
server_socket.bind((address, port))
server_socket.listen(5)
except socket.err as err:
logging.critical("Unable to create the server socket: " + str(err))
raise
self.server_socket = server_socket
self.server_fileno = server_socket.fileno()
# Dictionary of active clients,
# key = file descriptor, value = TelnetClient (see miniboa.telnet)
self.clients = {}
def stop(self):
"""
Disconnects the clients and shuts down the server
"""
for clients in self.client_list():
clients.sock.close()
self.server_socket.close()
def client_count(self):
"""
Returns the number of active connections.
"""
return len(self.clients)
def client_list(self):
"""
Returns a list of connected clients.
"""
return self.clients.values()
def poll(self):
"""
Perform a non-blocking scan of recv and send states on the server
and client connection sockets. Process new connection requests,
read incomming data, and send outgoing data. Sends and receives may
be partial.
"""
# Build a list of connections to test for receive data pending
recv_list = [self.server_fileno] # always add the server
del_list = [] # list of clients to delete after polling
for client in self.clients.values():
if client.active:
recv_list.append(client.fileno)
else:
self.on_disconnect(client)
del_list.append(client.fileno)
# Delete inactive connections from the dictionary
for client in del_list:
del self.clients[client]
# Build a list of connections that need to send data
send_list = []
for client in self.clients.values():
if client.send_pending:
send_list.append(client.fileno)
# Get active socket file descriptors from select.select()
try:
rlist, slist, elist = select.select(recv_list, send_list, [],
self.timeout)
except select.error as err:
# If we can't even use select(), game over man, game over
logging.critical("SELECT socket error '{}'".format(str(err)))
raise
# Process socket file descriptors with data to recieve
for sock_fileno in rlist:
# If it's coming from the server's socket then this is a new connection request.
if sock_fileno == self.server_fileno:
try:
sock, addr_tup = self.server_socket.accept()
except socket.error as err:
logging.error("ACCEPT socket error '{}:{}'.".format(err[0], err[1]))
continue
# Check for maximum connections
if self.client_count() >= self.max_connections:
logging.warning("Refusing new connection, maximum already in use.")
sock.close()
continue
# Create the client instance
new_client = TelnetClient(sock, addr_tup)
# Add the connection to our dictionary and call handler
self.clients[new_client.fileno] = new_client
self.on_connect(new_client)
else:
# Call the connection's recieve method
try:
self.clients[sock_fileno].socket_recv()
except ConnectionLost:
self.clients[sock_fileno].deactivate()
# Process sockets with data to send
for sock_fileno in slist:
# Call the connection's send method
self.clients[sock_fileno].socket_send()
| gpl-3.0 | 1,876,447,987,906,235,600 | 34.324176 | 93 | 0.591383 | false |
Videoclases/videoclases | fabfile.py | 1 | 5559 | from fabric.contrib import django as ddd
import django
ddd.project("project")
django.setup()
import getpass
import os
import time
from django.contrib.auth.models import User
from django.utils import timezone
from fabric.api import env, require, run, sudo, cd, local, get
from project.fabfile_secret import *
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
file_name = 'videoclases/project/settings_secret.py'
template_name = 'videoclases/project/settings_secret.py.template'
def _load_data(reboot=False):
local('python manage.py makemigrations')
local('python manage.py migrate')
if reboot:
fixtures = ['devgroups', 'devusers', 'devschool', 'devcourses', 'devstudents', 'devteachers',
'devhomeworks']
for f in fixtures:
local('python manage.py loaddata ' + f)
# fab devserver -> states that you will connect to devserver server
def devserver():
env.hosts = [env.server_name]
# activates videoclases virtualenv in server
def virtualenv(command, use_sudo=False):
if use_sudo:
func = sudo
else:
func = run
func('source %sbin/activate && %s' % (env.virtualenv_root, command))
# creates file in ~/
# usage: fab devserver test_connection
def test_connection():
require('hosts', provided_by=[devserver])
virtualenv('echo "It works!" > fabric_connection_works.txt')
# util for prompt confirmation
def _confirm():
prompt = "Please confirm you want to sync the branch 'master' in the server 'buho'"
prompt = '%s [%s/%s]: ' % (prompt, 'y', 'n')
while True:
ans = raw_input(prompt)
if not ans:
print 'Please answer Y or N.'
continue
if ans not in ['y', 'Y', 'n', 'N']:
print 'Please answer Y or N.'
continue
if ans == 'y' or ans == 'Y':
return True
if ans == 'n' or ans == 'N':
return False
# updates dev server project from git repository
def update():
require('hosts', provided_by=[devserver])
with cd(env.repo_root):
run('git pull origin master')
# installs requirements in server
def install_requirements():
require('hosts', provided_by=[devserver])
virtualenv('pip install -q -r %(requirements_file)s' % env)
# aux function for calling manage.py functions
def manage_py(command, use_sudo=False):
require('hosts', provided_by=[devserver])
with cd(env.manage_dir):
virtualenv('python manage.py %s' % command, use_sudo)
# syncs db in server
def makemigrations():
require('hosts', provided_by=[devserver])
manage_py('makemigrations')
# south migrate for db
def migrate():
require('hosts', provided_by=[devserver])
manage_py('migrate')
# collects static files
def collectstatic():
require('hosts', provided_by=[devserver])
manage_py('collectstatic --noinput')
# restarts apache in server
def reload():
require('hosts', provided_by=[devserver])
sudo('service apache2 restart')
# deploy on development server
def deploy():
require('hosts', provided_by=[devserver])
if _confirm():
update()
install_requirements()
makemigrations()
migrate()
collectstatic()
reload()
else:
print 'Deploy cancelado'
# sync and migrate local db and start server
def restart(reboot=False):
_load_data(reboot)
local('python manage.py runserver 0.0.0.0:8000')
# reset local db and start server
def reboot():
try:
local('rm db.sqlite3')
except:
pass
restart(True)
def _create_teacher():
print '---------------------------------------'
print 'Now you will be asked for the necessary data to create a Professor.'
from videoclases.models.course import Course
from videoclases.models.teacher import Teacher
from videoclases.models.school import School
username = raw_input('Insert username: ')
password = getpass.getpass('Insert password: ')
password2 = getpass.getpass('Confirm password: ')
while password != password2:
print 'Passwords were not equal.'
password = getpass.getpass('Insert password again: ')
password2 = getpass.getpass('Confirm password: ')
first_name = raw_input('Insert first name: ')
last_name = raw_input('Insert last name: ')
school = raw_input('Insert school name: ')
course = raw_input('Insert course name: ')
user = User.objects.create_user(username=username, password=password)
user.first_name = first_name
user.last_name = last_name
user.save()
School.objects.create(name=school).save()
co = School.objects.get(name=school)
Course.objects.create(name=course, school=co, year=timezone.now().year).save()
cu = Course.objects.get(name=course, school=co, year=timezone.now().year)
Teacher.objects.create(user=user, school=co)
p = Teacher.objects.get(user=user, school=co)
p.courses.add(cu)
p.save()
def install():
local('cp ' + os.path.join(BASE_DIR, template_name) + ' ' + os.path.join(BASE_DIR, file_name))
_load_data()
local('python manage.py collectstatic --noinput -l')
local('python manage.py test')
local('python manage.py loaddata devgroups')
_create_teacher()
local('python manage.py runserver 0.0.0.0:8000')
def install_with_data():
local('cp ' + os.path.join(BASE_DIR, template_name) + ' ' + os.path.join(BASE_DIR, file_name))
_load_data(True)
local('python manage.py collectstatic --noinput -l')
local('python manage.py test')
local('python manage.py runserver 0.0.0.0:8000') | gpl-3.0 | 8,812,348,572,361,951,000 | 30.771429 | 101 | 0.655334 | false |
mmaelicke/scikit-gstat | skgstat/tests/test_models.py | 1 | 5228 | """
"""
import unittest
import numpy as np
from numpy.testing import assert_array_almost_equal
from skgstat.models import spherical, exponential
from skgstat.models import gaussian, cubic, stable, matern
from skgstat.models import variogram
class TestModels(unittest.TestCase):
def setUp(self):
self.h = np.array([5, 10, 30, 50, 100])
def test_spherical_default(self):
# extract the actual function
f = spherical.py_func
result = [13.75, 20.0, 20.0, 20.0, 20.0]
model = list(map(f, self.h, [10]*5, [20]*5))
for r, m in zip(result, model):
self.assertAlmostEqual(r, m, places=2)
def test_spherical_nugget(self):
# extract the actual function
f = spherical.py_func
result = [15.44, 27.56, 33.0, 34.0, 35.0]
# calculate
nuggets = [1, 2, 3, 4, 5]
model = list(map(f, self.h, [15] * 5, [30] * 5, nuggets))
for r, m in zip(result, model):
self.assertAlmostEqual(r, m, places=2)
def test_exponential_default(self):
# extract the actual function
f = exponential.py_func
result = [5.18, 9.02, 16.69, 19., 19.95]
model = list(map(f, self.h, [50]*5, [20]*5))
for r, m in zip(result, model):
self.assertAlmostEqual(r, m, places=2)
def test_exponential_nugget(self):
# extract the actual function
f = exponential.py_func
result = [7.64, 13.8, 26.31, 31.54, 34.8]
# calculate
nuggets = [1, 2, 3, 4, 5]
model = list(map(f, self.h, [60] * 5, [30] * 5, nuggets))
for r, m in zip(result, model):
self.assertAlmostEqual(r, m, places=2)
def test_gaussian_default(self):
# extract the actual function
f = gaussian.py_func
result = [0.96, 3.58, 16.62, 19.86, 20.]
model = list(map(f, self.h, [45]*5, [20]*5))
for r, m in zip(result, model):
self.assertAlmostEqual(r, m, places=2)
def test_gaussian_nugget(self):
# extract the actual function
f = gaussian.py_func
result = [1.82, 5.15, 21.96, 32.13, 35.]
# calculate
nuggets = [1, 2, 3, 4, 5]
model = list(map(f, self.h, [60] * 5, [30] * 5, nuggets))
for r, m in zip(result, model):
self.assertAlmostEqual(r, m, places=2)
def _test_cubic_default(self):
# extract the actual function
f = cubic.py_func
result = [6.13, 21.11, 88.12, 100., 100.]
model = list(map(f, self.h, [50]*5, [100]*5))
for r, m in zip(result, model):
self.assertAlmostEqual(r, m, places=2)
def test_cubic_nugget(self):
# extract the actual function
f = cubic.py_func
result = [11.81, 34.74, 73., 74., 75.]
# calculate
nuggets = [1, 2, 3, 4, 5]
model = list(map(f, self.h, [30] * 5, [70] * 5, nuggets))
for r, m in zip(result, model):
self.assertAlmostEqual(r, m, places=2)
def test_stable_default(self):
# extract the actual function
f = stable.py_func
result = [9.05, 23.53, 75.2, 95.02, 99.98]
model = list(map(f, self.h, [50]*5, [100]*5, [1.5]*5))
for r, m in zip(result, model):
self.assertAlmostEqual(r, m, places=2)
def test_stable_nugget(self):
# extract the actual function
f = stable.py_func
result = [8.77, 10.8, 12.75, 13.91, 14.99]
# calculate
nuggets = [1, 2, 3, 4, 5]
model = list(map(f, self.h, [20] * 5, [10] * 5, [0.5] * 5, nuggets))
for r, m in zip(result, model):
self.assertAlmostEqual(r, m, places=2)
def test_matern_default(self):
# extract the actual function
f = matern.py_func
result = [24.64, 43.2, 81.68, 94.09, 99.65]
model = list(map(f, self.h, [50]*5, [100]*5, [0.50001]*5))
for r, m in zip(result, model):
self.assertAlmostEqual(r, m, places=2)
def test_matern_nugget(self):
# extract the actual function
f = matern.py_func
result = [3.44, 8.52, 12.99, 14., 15.]
# calculate
nuggets = [1, 2, 3, 4, 5]
model = list(map(f, self.h, [20] * 5, [9.99999] * 5, [8] * 5, nuggets))
for r, m in zip(result, model):
self.assertAlmostEqual(r, m, places=2)
def test_matern_r_switch(self):
# run the default with an extreme s value
# extract the actual function
f = matern.py_func
result = [24.64, 43.20, 81.68, 94.09, 99.65]
model = list(map(f, self.h, [50]*5, [100]*5, [0.5]*5))
assert_array_almost_equal(result, model, decimal=2)
class TestVariogramDecorator(unittest.TestCase):
def test_scalar(self):
@variogram
def scalar_function(a, b):
return a, b
a, b = 1, 4
self.assertEqual(scalar_function(1, 4), (a, b))
def test_list(self):
@variogram
def adder(l, a):
return l + a
res = [5, 8, 12]
for r, c in zip(res, adder([1, 4, 8], 4)):
self.assertEqual(r, c)
if __name__=='__main__':
unittest.main()
| mit | 5,541,188,158,651,838,000 | 26.371728 | 79 | 0.538256 | false |
damonmcminn/rosalind | algorithmic-heights/ddeg.py | 1 | 1230 | from sys import argv
def vertexCounts(xs):
# @xs list: edge list
# @return dict: map of vertex counts
firstLine = xs.pop(0)
vertices,edges = firstLine
counts = {}
while vertices > 0:
counts[vertices] = 0
vertices -= 1
for x in xs:
v1,v2 = x
counts[v1] += 1
counts[v2] += 1
return counts
def vertexNeighbours(xs):
# @xs list: edge list
# @return dict: map of list of vertex neighbours
vertices = xs.pop(0).pop(0)
neighbours = {}
while vertices > 0:
neighbours[vertices] = []
vertices -= 1
for pair in xs:
v1,v2 = pair
if v2 not in neighbours[v1]:
neighbours[v1].append(v2)
if v1 not in neighbours[v2]:
neighbours[v2].append(v1)
return neighbours
# edge list
data = open(argv[1], 'r').read().splitlines()
edgeList = [map(int, pair.split(' ')) for pair in data]
d1 = list(edgeList)
d2 = list(edgeList)
counts = vertexCounts(d1)
neighbours = vertexNeighbours(d2)
# this assumes ^ neighbours keys are sorted...
for vertex,neighbours in neighbours.iteritems():
total = 0
for neighbour in neighbours:
total += counts[neighbour]
print total,
| gpl-2.0 | -3,870,982,205,921,560,000 | 21.363636 | 55 | 0.600813 | false |
bjoernricks/python-quilt | tests/test_patch_lists.py | 1 | 2068 | # vim: fileencoding=utf-8 et sw=4 ts=4 tw=80:
# python-quilt - A Python implementation of the quilt patch system
#
# See LICENSE comming with the source of python-quilt for details.
""" Test operations that list patches """
import os.path
import sys
from unittest import TestCase
from quilt.db import Db
from quilt.patch import Patch
from six.moves import cStringIO
from helpers import run_cli, tmp_mapping, tmp_series
from quilt.cli.next import NextCommand
from quilt.cli.previous import PreviousCommand
from quilt.cli.series import SeriesCommand
class Test(TestCase):
def test_previous_only_unapplied(self):
env = self._setup_test_data()
with tmp_mapping(vars(sys)) as tmp_sys:
tmp_sys.set("stderr", cStringIO())
with self.assertRaises(SystemExit) as caught:
run_cli(PreviousCommand, dict(patch=None), *env)
self.assertEqual(caught.exception.code, 1)
self.assertIn("No patches applied", sys.stderr.getvalue())
def test_next_topmost(self):
env = self._setup_test_data()
output = run_cli(NextCommand, dict(patch=None), *env)
self.assertEqual("p1.patch\n", output)
def _setup_test_data(self):
data = os.path.join(os.path.dirname(__file__), "data")
patches = os.path.join(data, "push", "test2", "patches")
no_applied = os.path.join(data, "push", "test2")
return (patches, no_applied)
def test_series_v(self):
with tmp_series() as [dir, series]:
applied = Db(dir)
applied.add_patch(Patch("applied.patch"))
applied.add_patch(Patch("topmost.patch"))
applied.save()
series.add_patches(applied.applied_patches())
series.add_patch(Patch("unapplied.patch"))
series.save()
output = run_cli(SeriesCommand, dict(v=True),
series.dirname, applied.dirname)
self.assertMultiLineEqual(output,
"+ applied.patch\n"
"= topmost.patch\n"
" unapplied.patch\n")
| mit | -974,385,954,398,122,400 | 32.901639 | 70 | 0.633946 | false |
disulfidebond/ROO | runroo.py | 1 | 12469 | #!/usr/bin/python
import argparse
import sys
from runroo import clusterSSH
from runroo import qsubSSH
def formatCommandClusterSSH(c_dict, l):
if 'n_ct' in c_dict:
t = c_dict['n_ct']
l.append(t)
else:
print('Error, please check the formatting in the Commandline, command \'n_ct\' not found')
sys.exit()
if 'node' in c_dict:
t = c_dict['node']
l.append(t)
else:
print('Error, please check the formatting in the Commandline , command \'node\' not found')
if 'nodeNM' in c_dict:
t = c_dict['nodeNM']
l.append(t)
else:
print('Error, please check the formatting in the Commandline , command \'nodeNM\' not found')
return l
def formatCommandQsubSSH(c_dict, l):
if 'n_ct' in c_dict:
t = c_dict['n_ct']
l.append(t)
else:
print('Error, please check the formatting in the Commandline, command \'n_ct\' not found')
sys.exit()
if 'allocationName' in c_dict:
t = c_dict['allocationName']
l.append(t)
else:
print('Error, please check the formatting in the Commandline , command \'allocationName\' not found')
if 'wallTime' in c_dict:
t = c_dict['wallTime']
l.append(t)
else:
print('Error, please check the formatting in the Commandline , command \'wallTime\' not found')
if 'queueName' in c_dict:
t = c_dict['queueName']
l.append(t)
else:
print('Error, please check the formatting in the Commandline , command \'queueName\' not found')
return l
def formatCommandSingleNodeSSH(c_dict, l):
if 'n_ct' in c_dict:
t = c_dict['n_ct']
l.append(t)
else:
print('Error, please check the formatting in the Commandline, command \'n_ct\' not found')
sys.exit()
return l
def formatCommandClusterLSF(c_dict, l):
if 'n_ct' in c_dict:
t = c_dict['n_ct']
l.append(t)
else:
print('Error, please check the formatting in the Commandline, command \'n_ct\' not found')
sys.exit()
if 'queueName' in c_dict:
t = c_dict['queueName']
l.append(t)
else:
print('Error, please check the formatting in the Commandline, command \'queueName\' not found')
sys.exit()
if 'jobName' in c_dict:
t = c_dict['jobName']
l.append(t)
else:
print('Error, please check the formatting in the Commandline, command \'jobName\' not found')
sys.exit()
if 'projectName' in c_dict:
t = c_dict['projectName']
l.append(t)
else:
print('Error, please check the formatting in the Commandline, command \'projectName\' not found')
sys.exit()
if 'wallTime' in c_dict:
t = c_dict['wallTime'] # format wallTime for LSF as NN not NN:NN:NN
l.append(t)
else:
print('Error, please check the formatting in the Commandline, command \'wallTime\' not found')
sys.exit()
return l
def parseSimpleCommandList(s_list):
listlen = len(s_list)
ct = 0
commandsList = []
while ct < listlen:
s_list_row = s_list[ct]
if s_list_row[0] == '#':
continue
else:
commandsList.append(s_list_row)
ct += 1
def parseComplexCommandList(c_list):
listlen = len(c_list)
ct = 0
commandsList = []
while ct < listlen:
c_list_row = c_list[ct]
c_list_row_items = c_list_row.split(',')
if len(c_list_row_items) == 1:
commandsList.append(c_list_row_items)
else:
c_list_row_dict = dict()
def inputStartCommandFile(f):
l = []
with open(f, 'r') as cF:
for i in cF:
i = i.rstrip('\r\n')
l.append(i)
return l
def formatDescription():
print('Options for the command file:')
print('1) No command file: create a text document with the folowing\n###\nNONE ')
print('2) Typical commandFile:\n\n###\ninputFile=\n')
print('3) ###\ncommandFormat= # can be one of: \'clusterSSH, qsubSSH, clusterLSF, single-node-SSH\'\n')
print('### \n# clusterSSH:')
print('node=,n_ct=,nodeNM=\'\'')
print('### \n# qsubSSH:')
print('n_ct=,allocationName=,wallTime=,queueName=)
print('###\n clusterLSF:')
print('n_ct=,queueName=,jobName=,projectName=,wallTime=)
print('###\n single-node-SSH:')
print('n_ct=)
def parseStartCommandFile(l):
lRange = len(l)
l_1 = l[1]
parsedCommandList = []
l1_split = l_1.split('=')
try:
tmpVal = l1_split[1]
except IndexError:
if l1_split[0] == 'NONE':
return (0, [])
else:
print('Error, check formatting in commandsFile')
for i in l:
print(i)
sys.exit()
cvalue = ""
cvalue_list = []
cvalue_returnList = []
rowCt = 0
for i in xrange(0, lRange):
iRow = l[i]
if iRow[0] == '#':
continue
else: # 'clusterSSH, qsubSSH, clusterLSF, single-node-SSH'
if rowCt == 0:
iRow_split = iRow.split('=')
if iRow_split[1] == 'clusterSSH':
cvalue = iRow_split[1]
elif iRow_split[1] == 'qsubSSH':
cvalue = iRow_split[1]
elif iRow_split[1] == 'clusterLSF':
cvalue = iRow_split[1]
elif iRow_split[1] == 'single-node-SSH':
cvalue = iRow_split[1]
else:
print('Error, please check command line of commands File')
sys.exit()
rowCt += 2
elif rowCt == 2:
cvalue_tmp = dict()
iRow_split = iRow_split(',')
cvalue_list.append(cvalue)
for v in iRow_split:
v_tmp = v.split('=')
cvalue_tmp[v_tmp[0]] = v_tmp[1]
if cvalue == 'clusterSSH': # n_ct, node, nodeNM
cvalue_returnList = formatCommandClusterSSH(cvalue_tmp, cvalue_list)
elif cvalue == 'qsubSSH': # n_ct, allocationName, wallTime, queueName
cvalue_returnList = formatCommandQsubSSH(cvalue_tmp, cvalue_list)
elif cvalue == 'clusterLSF': # n_ct, queueName, jobName, projectName, wallTime
cvalue_returnList = formatCommandClusterLSF(cvalue_tmp, cvalue_list)
elif cvalue == 'single-node-SSH': # n_ct
cvalue_returnList = formatCommandSingleNodeSSH(cvalue_tmp, cvalue_list)
else:
print('Error, action command in command file not recognized.')
sys.exit()
rowCt += 2
else:
continue
return (1, cvalue_returnList)
def main():
parser = argparse.ArgumentParser(description='Remote Organizational and Operational Tool: Root')
parser.add_argument('-a', '--action', choices=['check', 'start', 'stop', 'restart'], help='check monitors a run in progress, start begins a new run, stop halts a run, restart restarts a previously stopped run')
parser.add_argument('-i', '--inputFile', help='input file, its use is dependent on the action. Ignored for \'check\' and \'stop\' actions.')
parser.add_argument('-f', '--commandfile', help='file with formatted commands for the desired action. Note that this is REQUIRED, even if commandline arguments will be provided.')
parser.add_argument('-c', '--commandline', help='commandline arguments added directly to the program, not recommended.')
parser.add_argument('-s', '--show', help='show format description for command file')
args = parser.parse_args()
if args.show:
formatDescription()
sys.exit()
if args.action == 'check':
# code stub, implementation incomplete
print(args.action)
sys.exit()
if not args.commandfile:
print('No command file found, hope you know what you\'re doing! Attempting to monitor run with the provided parameters')
else:
print('Checking command file before proceeding.')
cFile = inputStartCommandFile(args.commandfile)
checkRes = parseStartCommandFile(cFile)
if checkRes == 1:
#
else:
# proceed with no commandsFile
elif args.action == 'stop':
# code stub, implementation incomplete
print(args.action)
sys.exit()
if not args.commandfile:
print('No command file found, hope you know what you\'re doing! Attempting to halt run with the provided parameters')
else:
print('Checking command file before proceeding.')
cFile = inputStartCommandFile(args.commandfile)
checkRes = parseStartCommandFile(cFile)
if checkRes[0] == 1:
#
else:
# proceed with no commandsFile
elif args.action == 'restart':
# code stub, implementation incomplete
print(args.action)
sys.exit()
if not args.commandFile:
print('No command file has been found, and a command file is required for the restart action. If you are ABSOLUTELY sure that you do not want to use a command file, create a text file with ####\nNONE as the command file.')
sys.exit()
else:
print('Using command file ')
print(args.commandFile)
cFile = inputStartCommandFile(args.commandfile)
checkRes = parseStartCommandFile(cFile)
if not args.inputFile:
print('No input file found, please check settings.')
sys.exit()
else:
print('Using input file ')
print(args.inputFile)
if checkRes[0] == 1:
#
elif args.commandline:
#
else:
print('Sorry, the command file was not read, and commands were not readable via commandline. Please chack the formatting and retry.\n\nNote that a command file will always be checked first, and to force commandline use you must add the line\n\n ###\nNONE \n\n to a command file')
sys.exit()
elif args.action == 'start':
if not args.commandFile:
print('No command file has been found, and a command file is required for the start action. If you are ABSOLUTELY sure that you do not want to use a command file, create a text file with ####\nNONE as the command file.')
sys.exit()
else:
print('Using command file ')
print(args.commandFile)
print('for start action')
cFile = inputStartCommandFile(args.commandfile)
checkRes = parseStartCommandFile(cFile)
if not args.inputFile:
print('No input file found, please check settings.')
sys.exit()
else:
print('Using input file ')
print(args.inputFile)
print('for start action')
if checkRes[0] == 1:
args4Commands = checkRes[0]
if args4Commands[0] == 'clusterSSH':
clusterSSH(args.inputFile, args4Commands[1], args4Commands[2],args4Commands[3])
elif args4Commands[0] == 'qsubSSH':
qsubSSH(args.inputFile, args4Commands[1], args4Commands[2], args4Commands[3], args4Commands[4])
elif args4Commands[0] == 'clusterLSF':
print('Not implemented yet')
sys.exit()
clusterLSF(args.inputFile, args4Commands[1], args4Commands[2], args4Commands[3], args4Commands[4], args4Commands[5])
elif args4Commands[0] == 'single-node-SSH':
print('Not implemented yet')
sys.exit()
singleNodeSSH(args.inputFile, args4Commands[1])
elif args.commandline:
# parse arguments, determine action type, and start action
else:
print('Sorry, the command file was not read, and commands were not readable via commandline. Please chack the formatting and retry.\n\nNote that a command file will always be checked first, and to force commandline use you must add the line\n\n ###\nNONE \n\n to a command file')
sys.exit()
else:
print('error, unrecognized input!')
sys.exit()
if __name__ == "__main__":
main()
| mit | 4,549,582,067,404,025,000 | 38.334385 | 292 | 0.574304 | false |
matt77hias/FingerprintCompression | src/compression.py | 1 | 12580 | '''
3 Fingerprint compression
3.1 Compression
@author Matthias Moulin & Vincent Peeters
@version 1.0
'''
import cost
import numpy as np
import pywt
import quadtree
import utils
import wsq
###############################################################################
# COMPRESSION FUNCTIONS
###############################################################################
def compress_dwt2(S, fraction, wavelet="db4", mode=pywt.MODES.ppd, level=4, stats=[]):
'''
Computes the 2D discrete wavelet transformation for the given 2D input signal.
Sets all coefficients with an absolute value below the threshold * maximum of the absolute
values of the coefficients to zero.
Returns the inverse 2D discrete wavelet transformation for the modified coefficients
of the 2D discrete wavelet transformation.
@param S: Input signal.
Both single and double precision floating-point data types are supported
and the output type depends on the input type. If the input data is not
in one of these types it will be converted to the default double precision
data format before performing computations.
@param fraction: The fraction.
@param wavelet: Wavelet to use in the transform.
This must be a name of the wavelet from the wavelist() list.
@param mode: Signal extension mode to deal with the border distortion problem.
The default mode is periodization.
@param level: Number of decomposition steps to perform.
@return: The inverse 2D discrete wavelet transformation for the modified coefficients
of the 2D discrete wavelet transformation.
'''
# 2D discrete wavelet transform
A = pywt.wavedec2(S, wavelet=wavelet, mode=mode, level=level)
# Compression
maximum = np.amax(abs(A[0]))
for (CH, CV, CD) in A[1:]:
maximum = max(maximum, np.amax(abs(CH)), np.amax(abs(CV)), np.amax(abs(CD)))
threshold = fraction * maximum
B = [pywt.thresholding.hard(A[0], threshold, 0)]
for (CH, CV, CD) in A[1:]:
CCH = pywt.thresholding.hard(CH, threshold, 0)
CCV = pywt.thresholding.hard(CV, threshold, 0)
CCD = pywt.thresholding.hard(CD, threshold, 0)
B.append((CCH, CCV, CCD))
n = utils.number_of_large_coeffs(utils.concat_coeffs2(B), threshold=threshold)
stats.append(n)
# 2D inverse discrete wavelet transform
return pywt.waverec2(B, wavelet=wavelet, mode=mode)
def compress_wp2(S, fraction, costf=cost.cost_shannon, wavelet="db4", mode=pywt.MODES.ppd, level=4, stats=[]):
'''
Computes the 2D discrete wavelet packet transformation, with the best basis according
to the given cost function, for the given 2D input signal.
Sets all coefficients with an absolute value below the threshold * maximum of the absolute
values of the coefficients to zero.
Returns the inverse 2D discrete wavelet packet transformation for the modified coefficients
of the 2D discrete wavelet packet transformation.
@param S: Input signal.
Both single and double precision floating-point data types are supported
and the output type depends on the input type. If the input data is not
in one of these types it will be converted to the default double precision
data format before performing computations.
@param fraction: The fraction.
@param costf: The (single parameter) cost function that must be used while
searching for the best basis.
@param wavelet: Wavelet to use in the transform.
This must be a name of the wavelet from the wavelist() list.
@param mode: Signal extension mode to deal with the border distortion problem.
The default mode is periodization.
@param level: Number of decomposition steps to perform.
@return: The inverse 2D discrete wavelet packet transformation for the modified coefficients
of the 2D discrete wavelet packet transformation.
'''
# 2D discrete wavelet packet transform
Nodes = quadtree.wp2(S, costf, wavelet=wavelet, mode=mode, level=level)
# Compression
maximum = -1
for Node in Nodes:
maximum = max(maximum, np.amax(abs(Node.C)))
threshold = fraction * maximum
for Node in Nodes:
Node.C = pywt.thresholding.hard(Node.C, threshold, 0)
n = 0
for Node in Nodes:
n = n + utils.number_of_large_coeffs(Node.C, threshold=threshold)
stats.append(n)
# 2D inverse discrete wavelet packet transform
return quadtree.iwp2(Nodes, wavelet=wavelet, mode=mode)
def compress_sd(S, fraction, wavelet="db4", mode=pywt.MODES.ppd, stats=[]):
'''
Computes the subband decomposition for fingerprints for the given 2D input signal.
Sets all coefficients with an absolute value below the threshold * maximum of the absolute
values of the coefficients to zero.
Returns the inverse subband decomposition for fingerprints for the modified coefficients
of the subband decomposition for fingerprints.
@param S: Input signal.
Both single and double precision floating-point data types are supported
and the output type depends on the input type. If the input data is not
in one of these types it will be converted to the default double precision
data format before performing computations.
@param fraction: The fraction.
@param wavelet: Wavelet to use in the transform.
This must be a name of the wavelet from the wavelist() list.
@param mode: Signal extension mode to deal with the border distortion problem.
The default mode is periodization.
@return: The inverse subband decomposition for fingerprints for the modified coefficients
of the subband decomposition for fingerprints.
'''
# 2D discrete wavelet packet transform
Nodes = wsq.sd(S, wavelet=wavelet, mode=mode)
# Compression
maximum = -1
for Node in Nodes:
maximum = max(maximum, np.amax(abs(Node.C)))
threshold = fraction * maximum
for Node in Nodes:
Node.C = pywt.thresholding.hard(Node.C, threshold, 0)
n = 0
for Node in Nodes:
n = n + utils.number_of_large_coeffs(Node.C, threshold=threshold)
stats.append(n)
# 2D inverse discrete wavelet packet transform
return wsq.isd(Nodes, wavelet=wavelet, mode=mode)
###############################################################################
# COMPRESSION UTILITIES
###############################################################################
def mse(S1, S2):
'''
Returns the mean squared error of the compressed 2D signal S2
against the original 2D signal S1.
@param S1: The original 2D signal
@param S2: The compressed 2D signal
'''
D = S1-S2
return (float(np.sum(np.multiply(D, D)))) / (D.shape[0]*D.shape[1])
def best_fit(S1, S2):
(m, n) = S1.shape
(p, q) = S2.shape
bi = bj = -1
best = np.inf
for i in range(p - m + 1):
for j in range(q - n + 1):
error = mse(S1, S2[i:i+m,j:j+n])
if error < best:
best = error
bi = i
bj = j
return (S2[bi:bi+m,bj:bj+n], best)
###############################################################################
# TESTS
###############################################################################
import configuration as c
import cv2
import pylab
write_intermediate_results = True
# Note that it would of course be cleaner to change the fraction
# multiple times between the analysis and the synthesis
# but this is just a test method
def compare(fname, fractions, wavelet="db4", mode=pywt.MODES.ppd, level=4):
stats_dwt2 = []
stats_wp2_s = []
stats_wp2_t = []
S = 255 - cv2.imread(fname, 0)
E1 = np.zeros(fractions.shape)
E2 = np.zeros(fractions.shape)
E3 = np.zeros(fractions.shape)
i = 0
for f in fractions:
R1 = compress_dwt2(S, f, wavelet=wavelet, mode=mode, level=level, stats=stats_dwt2)[level:-level,level:-level]
R2 = compress_wp2(S, f, costf=cost.cost_shannon, wavelet=wavelet, mode=mode, level=level, stats=stats_wp2_s)[level:-level,level:-level]
R3 = compress_wp2(S, f, costf=cost.cost_threshold(0.01), wavelet=wavelet, mode=mode, level=level, stats=stats_wp2_t)[level:-level,level:-level]
R = S[level:-level,level:-level]
(R1, e1) = best_fit(R, R1)
(R2, e2) = best_fit(R, R2)
(R3, e3) = best_fit(R, R3)
if write_intermediate_results:
S1 = 255 - np.array(R1, dtype=np.uint8)
S2 = 255 - np.array(R2, dtype=np.uint8)
S3 = 255 - np.array(R3, dtype=np.uint8)
cv2.imwrite(str(i) + "_dwt_" + str(f) + " " + str(e1) + ".png", S1)
cv2.imwrite(str(i) + "_wp_s_" + str(f) + " " + str(e2) + ".png", S2)
cv2.imwrite(str(i) + "_wp_t_" + str(f) + " " + str(e3) + ".png", S3)
E1[i] = e1
E2[i] = e2
E3[i] = e3
i = i + 1
pylab.figure()
pylab.loglog(fractions, E1, label='DWT')
pylab.loglog(fractions, E2, label='WP Shannon')
pylab.loglog(fractions, E3, label='WP Threshold')
pylab.xlabel("Fraction")
pylab.ylabel("Mean Square Error")
pylab.legend(loc=2)
pylab.show()
pylab.figure()
pylab.loglog(fractions, stats_dwt2, label='DWT')
pylab.loglog(fractions, stats_wp2_s, label='WP Shannon')
pylab.loglog(fractions, stats_wp2_t, label='WP Threshold')
pylab.xlabel("Fraction")
pylab.ylabel("Number of large coefficients")
pylab.legend(loc=2)
pylab.show()
def compare2(fname, fractions, costf=cost.cost_shannon, wavelet="db4", mode=pywt.MODES.ppd):
stats_sd = []
stats_wp2_s = []
stats_wp2_t = []
level = 5
S = 255 - cv2.imread(fname, 0)
E1 = np.zeros(fractions.shape)
E2 = np.zeros(fractions.shape)
E3 = np.zeros(fractions.shape)
i = 0
for f in fractions:
R1 = compress_sd(S, f, wavelet=wavelet, mode=mode, stats=stats_sd)[level:-level,level:-level]
R2 = compress_wp2(S, f, costf=cost.cost_shannon, wavelet=wavelet, mode=mode, level=level, stats=stats_wp2_s)[level:-level,level:-level]
R3 = compress_wp2(S, f, costf=cost.cost_threshold(0.01), wavelet=wavelet, mode=mode, level=level, stats=stats_wp2_t)[level:-level,level:-level]
R = S[level:-level,level:-level]
(R1, e1) = best_fit(R, R1)
(R2, e2) = best_fit(R, R2)
(R3, e3) = best_fit(R, R3)
if write_intermediate_results:
S1 = 255 - np.array(R1, dtype=np.uint8)
S2 = 255 - np.array(R2, dtype=np.uint8)
S3 = 255 - np.array(R3, dtype=np.uint8)
cv2.imwrite(str(i) + "_sd_" + str(f) + " " + str(e1) + ".png", S1)
cv2.imwrite(str(i) + "_wp_s_" + str(f) + " " + str(e2) + ".png", S2)
cv2.imwrite(str(i) + "_wp_t_" + str(f) + " " + str(e3) + ".png", S3)
E1[i] = e1
E2[i] = e2
E3[i] = e3
i = i + 1
pylab.figure()
pylab.loglog(fractions, E1, label='SD')
pylab.loglog(fractions, E2, label='WP Shannon')
pylab.loglog(fractions, E3, label='WP Threshold')
pylab.xlabel("Fraction")
pylab.ylabel("Mean Square Error")
pylab.legend(loc=2)
pylab.show()
pylab.figure()
pylab.loglog(fractions, stats_sd, label='SD')
pylab.loglog(fractions, stats_wp2_s, label='WP Shannon')
pylab.loglog(fractions, stats_wp2_t, label='WP Threshold')
pylab.xlabel("Fraction")
pylab.ylabel("Number of large coefficients")
pylab.legend(loc=2)
pylab.show()
if __name__ == "__main__":
fname = c.get_dir_fingerprints() + "cmp00001.pgm"
fractions = np.append([0.0], np.power(10, np.arange(-20.0, 0.0, 0.5)))
#fractions = np.append([0.0], np.power(10, np.arange(-5.0, 0.0, 1.0)))
compare(fname, fractions)
fname = c.get_dir_fingerprints() + "cmp00002.pgm"
fractions = np.append([0.0], np.power(10, np.arange(-20.0, 0.0, 0.5)))
#fractions = np.append([0.0], np.power(10, np.arange(-5.0, 0.0, 1.0)))
#compare2(fname, fractions) | gpl-3.0 | -393,442,632,637,934,200 | 41.938567 | 151 | 0.596741 | false |
disqus/pgshovel | src/main/python/pgshovel/replication/streams/kafka.py | 1 | 4243 | from __future__ import absolute_import
import logging
from itertools import imap
from kafka.consumer.simple import SimpleConsumer
from kafka.client import KafkaClient
from pgshovel.replication.validation import validate_state
from pgshovel.interfaces.streams_pb2 import Message
from pgshovel.streams.utilities import prime_for_batch_start
from pgshovel.utilities.protobuf import BinaryCodec
logger = logging.getLogger(__name__)
class KafkaStream(object):
def __init__(self, cluster, set, hosts, topic, prime_threshold):
self.cluster = cluster
self.set = set
self.hosts = hosts
self.topic = topic
self.codec = BinaryCodec(Message)
self.prime_threshold = prime_threshold
def consume(self, state):
"""
Starts consuming from the configured Kafka topic given a possible
existing ``pgshovel.interfaces.replication_pb2:State``.
If the provided ``state`` does not contain a
``stream_state.consumer_state`` value, the ``KafaStream`` attempts to
start reading from the Kafka topic after first "priming" the stream.
Priming involves consuming messages from the topic looking for a
``BeginOperation``. Any message that is not a ``BeginOperation`` is
dropped, until a ``BeginOperation`` is seen or the ``prime_threshold``
is reached. The latter of which raises a
``pgshovel.streams.utilities:UnableToPrimeError`` error.
In general, it makes sense to set the ``prime_threshold`` to high enough
value that exceeds the max transaction size you expect to see in your
data. Generally speaking a ``prime_threshold`` can effectively be
infinite (and you could construct the stream with ``float('inf')``,
however the lack of a ``BeginOperation`` in the stream would cause the
stream to hang, possibly forever, so the ``prime_threshold`` config
parameter is provided to raise an exception if this unexpected behavior
occurs.
"""
consumer = SimpleConsumer(KafkaClient(self.hosts), None, self.topic)
# You can only update one offset at a time with kafka-python, plus
# dealing with reconstituting global order from a partitioned stream is
# hard we don't really need to deal with it right now.
assert len(consumer.offsets) is 1
decoded = imap(
lambda (offset, msg): (offset, self.codec.decode(msg.value)),
consumer
)
if state.stream_state.HasField('consumer_state'):
# Seeking to a direct offset was not in the PyPI release of
# kafka-python when this was implemented:
# https://github.com/mumrah/kafka-python/pull/412
current = consumer.offsets[0]
offset = state.stream_state.consumer_state.offset + 1
delta = offset - current
logger.debug('Moving to previous replication log offset: %s (current position: %s)...', offset, current)
consumer.seek(delta, 1)
assert consumer.offsets[0] == offset
else:
logger.info('No consumer state provided, will attempt to prime to begin BeginOperation')
# The call to ``prime_for_batch_start`` "primes" the stream by
# dropping messages until it sees a message that is an intance of
# one of the types in
# ``pgshovel.replication.validation.TRANSACTION_START_EVENT_TYPES``
decoded = prime_for_batch_start(
max_messages=self.prime_threshold,
stream=decoded
)
for offset, message in decoded:
state = validate_state(state, offset, message)
# XXX: This is necessary because of a bug in protocol buffer oneof.
state = type(state).FromString(state.SerializeToString())
yield state, offset, message
@classmethod
def configure(cls, configuration, cluster, set):
topic = '{cluster}.{set}.mutations'.format(cluster=cluster.name, set=set)
return cls(
cluster,
set,
configuration['hosts'],
topic,
configuration.get('prime_threshold', 1000)
)
| apache-2.0 | 2,423,902,336,117,804,500 | 41.858586 | 116 | 0.649305 | false |
Nyancoins/NyanFaucet | nyanfaucet/nyanfaucet/nyandrill.py | 1 | 1660 | import mandrill
from django.conf import settings
from django.core.mail.backends.base import BaseEmailBackend
"""
import logging
import django.utils.log
class MandrillLogger(django.utils.log.AdminEmailHandler):
def __init__(self, *args, **kwargs):
super(MandrillLogger, self).__init__()
self.client = mandrill.Mandrill(settings.MANDRILL_SECRET)
def send_mail(self, subject, message, *args, **kwargs):
admins = []
for name, email in settings.ADMINS:
admins.append({
'name': name,
'email': email,
})
msg = {
'to': admins,
'subject': subject,
'text': message,
}
print "sending mail", msg
self.client.messages.send(msg)
"""
class MandrillBackend(BaseEmailBackend):
def __init__(self, fail_silently = False, **kwargs):
super(MandrillBackend, self).__init__(fail_silently, **kwargs)
self.client = mandrill.Mandrill(settings.MANDRILL_SECRET)
def send_messages(self, email_messages):
if not email_messages:
return
for msg in email_messages:
"""to = []
for r in msg.recipients():
to.append({
'email': r,
})
mm = {
'to': to,
'subject': msg.subject,
'from_email': msg.from_email,
'text': msg.message().as_bytes(),
}
self.client.messages.send(mm, async=True)"""
self.client.messages.send_raw(raw_message=msg.message().as_bytes(), async=True)
| mit | 2,990,257,469,511,667,000 | 28.122807 | 91 | 0.536747 | false |
anchore/anchore-engine | tests/functional/clients/standalone/test_file_list.py | 1 | 11561 | import pytest
# from result[0]['image']['imagedata']['analysis_report']['file_list']['files.all']['base']
# generated with:
# files_all_subset = [random.choice(list(files_all.items())) for i in range(20)]
files_all_subset = [
(
"/usr/lib/python3.6/site-packages/pip/_vendor/urllib3/__pycache__/connection.cpython-36.pyc",
"0o644",
),
(
"/usr/lib64/python3.6/lib2to3/fixes/__pycache__/fix_intern.cpython-36.opt-1.pyc",
"0o644",
),
("/usr/lib/dracut/modules.d/80lvmmerge/README.md", "0o644"),
("/usr/lib64/libip6tc.so.0.1.0", "0o755"),
(
"/usr/lib/python3.6/site-packages/setuptools/_vendor/__pycache__/six.cpython-36.opt-1.pyc",
"0o644",
),
("/usr/lib/.build-id/8e/9191dffa9f716362829472319d7834fadadc5a", "0o777"),
(
"/usr/lib/python3.6/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__pycache__",
"0o755",
),
("/usr/share/licenses/libseccomp/LICENSE", "0o644"),
("/usr/lib64/python3.6/__pycache__/copy.cpython-36.opt-1.pyc", "0o644"),
("/usr/lib64/python3.6/encodings/__pycache__/cp865.cpython-36.pyc", "0o644"),
("/usr/share/zoneinfo/iso3166.tab", "0o644"),
("/etc/host.conf", "0o644"),
("/usr/share/zoneinfo/right/America/Catamarca", "0o644"),
("/etc/libaudit.conf", "0o640"),
("/usr/lib/systemd/catalog/systemd.pt_BR.catalog", "0o644"),
("/usr/lib/systemd/system/dracut-shutdown.service", "0o777"),
("/usr/lib/.build-id/66/29051069454db7e5e097271a21c6bcc26d7f8d", "0o777"),
("/usr/share/licenses/libverto", "0o755"),
("/etc/ld.so.conf.d/bind-export-aarch64.conf", "0o644"),
("/usr/lib/systemd/system/dracut-initqueue.service", "0o777"),
]
allinfo_subset = [
(
"/usr/share/zoneinfo/posix/Australia/Currie",
'{"name": "/usr/share/zoneinfo/posix/Australia/Currie", "fullpath": '
'"/usr/share/zoneinfo/posix/Australia/Currie", "size": 2223, "mode": 33188, '
'"uid": 0, "gid": 0, "linkdst": null, "linkdst_fullpath": null, "type": '
'"file", "othernames": {"/usr/share/zoneinfo/posix/Australia/Currie": '
"true}}",
),
(
"/usr/share/systemd/kbd-model-map",
'{"name": "/usr/share/systemd/kbd-model-map", "fullpath": '
'"/usr/share/systemd/kbd-model-map", "size": 3564, "mode": 33188, "uid": 0, '
'"gid": 0, "linkdst": null, "linkdst_fullpath": null, "type": "file", '
'"othernames": {"/usr/share/systemd/kbd-model-map": true}}',
),
(
"/usr/share/zoneinfo/right/Etc/GMT",
'{"name": "/usr/share/zoneinfo/right/Etc/GMT", "fullpath": '
'"/usr/share/zoneinfo/right/Etc/GMT", "size": 667, "mode": 33188, "uid": 0, '
'"gid": 0, "linkdst": null, "linkdst_fullpath": null, "type": "file", '
'"othernames": {"/usr/share/zoneinfo/right/Etc/GMT": true}}',
),
(
"/usr/share/zoneinfo/posix/Etc",
'{"name": "/usr/share/zoneinfo/posix/Etc", "fullpath": '
'"/usr/share/zoneinfo/posix/Etc", "size": 0, "mode": 16877, "uid": 0, "gid": '
'0, "linkdst": null, "linkdst_fullpath": null, "type": "dir", "othernames": '
'{"/usr/share/zoneinfo/posix/Etc": true}}',
),
(
"/usr/bin/gpgv",
'{"name": "/usr/bin/gpgv", "fullpath": "/usr/bin/gpgv", "size": 498056, '
'"mode": 33261, "uid": 0, "gid": 0, "linkdst": null, "linkdst_fullpath": '
'null, "type": "file", "othernames": {"/usr/bin/gpgv": true}}',
),
(
"/usr/lib64/python3.6/encodings/__pycache__/cp737.cpython-36.pyc",
'{"name": "/usr/lib64/python3.6/encodings/__pycache__/cp737.cpython-36.pyc", '
'"fullpath": '
'"/usr/lib64/python3.6/encodings/__pycache__/cp737.cpython-36.pyc", "size": '
'8145, "mode": 33188, "uid": 0, "gid": 0, "linkdst": null, '
'"linkdst_fullpath": null, "type": "file", "othernames": '
'{"/usr/lib64/python3.6/encodings/__pycache__/cp737.cpython-36.pyc": true}}',
),
(
"/usr/lib/python3.6/site-packages/pip/_vendor/html5lib/treebuilders/__pycache__/etree_lxml.cpython-36.pyc",
'{"name": '
'"/usr/lib/python3.6/site-packages/pip/_vendor/html5lib/treebuilders/__pycache__/etree_lxml.cpython-36.pyc", '
'"fullpath": '
'"/usr/lib/python3.6/site-packages/pip/_vendor/html5lib/treebuilders/__pycache__/etree_lxml.cpython-36.pyc", '
'"size": 11727, "mode": 33188, "uid": 0, "gid": 0, "linkdst": null, '
'"linkdst_fullpath": null, "type": "file", "othernames": '
'{"/usr/lib/python3.6/site-packages/pip/_vendor/html5lib/treebuilders/__pycache__/etree_lxml.cpython-36.pyc": '
"true}}",
),
(
"/usr/lib/python3.6/site-packages/dnf/conf/__pycache__/substitutions.cpython-36.pyc",
'{"name": '
'"/usr/lib/python3.6/site-packages/dnf/conf/__pycache__/substitutions.cpython-36.pyc", '
'"fullpath": '
'"/usr/lib/python3.6/site-packages/dnf/conf/__pycache__/substitutions.cpython-36.pyc", '
'"size": 1568, "mode": 33188, "uid": 0, "gid": 0, "linkdst": null, '
'"linkdst_fullpath": null, "type": "file", "othernames": '
'{"/usr/lib/python3.6/site-packages/dnf/conf/__pycache__/substitutions.cpython-36.pyc": '
"true}}",
),
(
"/usr/share/zoneinfo/America/Argentina/San_Juan",
'{"name": "/usr/share/zoneinfo/America/Argentina/San_Juan", "fullpath": '
'"/usr/share/zoneinfo/America/Argentina/San_Juan", "size": 1123, "mode": '
'33188, "uid": 0, "gid": 0, "linkdst": null, "linkdst_fullpath": null, '
'"type": "file", "othernames": '
'{"/usr/share/zoneinfo/America/Argentina/San_Juan": true}}',
),
(
"/usr/share/tabset/vt100",
'{"name": "/usr/share/tabset/vt100", "fullpath": "/usr/share/tabset/vt100", '
'"size": 160, "mode": 33188, "uid": 0, "gid": 0, "linkdst": null, '
'"linkdst_fullpath": null, "type": "file", "othernames": '
'{"/usr/share/tabset/vt100": true}}',
),
(
"/usr/share/zoneinfo/posix/America/Dominica",
'{"name": "/usr/share/zoneinfo/posix/America/Dominica", "fullpath": '
'"/usr/share/zoneinfo/posix/America/Dominica", "size": 170, "mode": 33188, '
'"uid": 0, "gid": 0, "linkdst": null, "linkdst_fullpath": null, "type": '
'"file", "othernames": {"/usr/share/zoneinfo/posix/America/Dominica": '
"true}}",
),
(
"/usr/lib/python3.6/site-packages/pip/_vendor/urllib3/contrib/__pycache__/__init__.cpython-36.pyc",
'{"name": '
'"/usr/lib/python3.6/site-packages/pip/_vendor/urllib3/contrib/__pycache__/__init__.cpython-36.pyc", '
'"fullpath": '
'"/usr/lib/python3.6/site-packages/pip/_vendor/urllib3/contrib/__pycache__/__init__.cpython-36.pyc", '
'"size": 113, "mode": 33188, "uid": 0, "gid": 0, "linkdst": null, '
'"linkdst_fullpath": null, "type": "file", "othernames": '
'{"/usr/lib/python3.6/site-packages/pip/_vendor/urllib3/contrib/__pycache__/__init__.cpython-36.pyc": '
"true}}",
),
(
"/usr/lib/python3.6/site-packages/pip/_vendor/chardet/__pycache__/enums.cpython-36.pyc",
'{"name": '
'"/usr/lib/python3.6/site-packages/pip/_vendor/chardet/__pycache__/enums.cpython-36.pyc", '
'"fullpath": '
'"/usr/lib/python3.6/site-packages/pip/_vendor/chardet/__pycache__/enums.cpython-36.pyc", '
'"size": 2539, "mode": 33188, "uid": 0, "gid": 0, "linkdst": null, '
'"linkdst_fullpath": null, "type": "file", "othernames": '
'{"/usr/lib/python3.6/site-packages/pip/_vendor/chardet/__pycache__/enums.cpython-36.pyc": '
"true}}",
),
(
"/usr/lib/systemd/system/systemd-user-sessions.service",
'{"name": "/usr/lib/systemd/system/systemd-user-sessions.service", '
'"fullpath": "/usr/lib/systemd/system/systemd-user-sessions.service", '
'"size": 636, "mode": 33188, "uid": 0, "gid": 0, "linkdst": null, '
'"linkdst_fullpath": null, "type": "file", "othernames": '
'{"/usr/lib/systemd/system/systemd-user-sessions.service": true}}',
),
(
"/usr/share/pki/ca-trust-source/anchors",
'{"name": "/usr/share/pki/ca-trust-source/anchors", "fullpath": '
'"/usr/share/pki/ca-trust-source/anchors", "size": 0, "mode": 16877, "uid": '
'0, "gid": 0, "linkdst": null, "linkdst_fullpath": null, "type": "dir", '
'"othernames": {"/usr/share/pki/ca-trust-source/anchors": true}}',
),
(
"/usr/lib64/python3.6/collections/__pycache__",
'{"name": "/usr/lib64/python3.6/collections/__pycache__", "fullpath": '
'"/usr/lib64/python3.6/collections/__pycache__", "size": 0, "mode": 16877, '
'"uid": 0, "gid": 0, "linkdst": null, "linkdst_fullpath": null, "type": '
'"dir", "othernames": {"/usr/lib64/python3.6/collections/__pycache__": '
"true}}",
),
(
"/usr/lib/.build-id/00/769246dbd044617cffd76a6aec384c53af30d9",
'{"name": "/usr/lib/.build-id/00/769246dbd044617cffd76a6aec384c53af30d9", '
'"fullpath": "/usr/lib/.build-id/00/769246dbd044617cffd76a6aec384c53af30d9", '
'"size": 40, "mode": 41471, "uid": 0, "gid": 0, "linkdst": '
'"../../../../usr/lib64/gconv/NATS-DANO.so", "linkdst_fullpath": '
'"/usr/lib/.build-id/00/769246dbd044617cffd76a6aec384c53af30d9", "type": '
'"slink", "othernames": '
'{"/usr/lib/.build-id/00/769246dbd044617cffd76a6aec384c53af30d9": true, '
'"../../../../usr/lib64/gconv/NATS-DANO.so": true}}',
),
(
"/usr/share/licenses/zlib",
'{"name": "/usr/share/licenses/zlib", "fullpath": '
'"/usr/share/licenses/zlib", "size": 0, "mode": 16877, "uid": 0, "gid": 0, '
'"linkdst": null, "linkdst_fullpath": null, "type": "dir", "othernames": '
'{"/usr/share/licenses/zlib": true}}',
),
(
"/usr/lib/.build-id/3b/142e9178a43068ee4c86e0000d3751e25688d2",
'{"name": "/usr/lib/.build-id/3b/142e9178a43068ee4c86e0000d3751e25688d2", '
'"fullpath": "/usr/lib/.build-id/3b/142e9178a43068ee4c86e0000d3751e25688d2", '
'"size": 25, "mode": 41471, "uid": 0, "gid": 0, "linkdst": '
'"../../../../usr/bin/ipcrm", "linkdst_fullpath": '
'"/usr/lib/.build-id/3b/142e9178a43068ee4c86e0000d3751e25688d2", "type": '
'"slink", "othernames": '
'{"/usr/lib/.build-id/3b/142e9178a43068ee4c86e0000d3751e25688d2": true, '
'"../../../../usr/bin/ipcrm": true}}',
),
(
"/usr/lib64/python3.6/email/mime/__pycache__",
'{"name": "/usr/lib64/python3.6/email/mime/__pycache__", "fullpath": '
'"/usr/lib64/python3.6/email/mime/__pycache__", "size": 0, "mode": 16877, '
'"uid": 0, "gid": 0, "linkdst": null, "linkdst_fullpath": null, "type": '
'"dir", "othernames": {"/usr/lib64/python3.6/email/mime/__pycache__": '
"true}}",
),
]
@pytest.mark.parametrize("path,metadata", allinfo_subset)
def test_allinfo(path, metadata, analyzed_data):
report = analyzed_data()
data = report["image"]["imagedata"]["analysis_report"]["file_list"][
"files.allinfo"
]["base"]
assert data[path] == metadata
@pytest.mark.parametrize("_file,bit", files_all_subset)
def test_files_all(_file, bit, analyzed_data):
report = analyzed_data()
data = report["image"]["imagedata"]["analysis_report"]["file_list"]["files.all"][
"base"
]
assert data[_file] == bit
| apache-2.0 | -722,782,270,653,081,500 | 48.618026 | 119 | 0.575123 | false |
dchaplinsky/pep.org.ua | pepdb/core/model/supplementaries.py | 1 | 10343 | # coding: utf-8
from __future__ import unicode_literals
import re
import os.path
from collections import OrderedDict
from glob import glob
from decimal import Decimal
from io import BytesIO
import random
import zlib
from django.db import models
from django.conf import settings
from django.contrib.auth.models import User
from django.utils.translation import ugettext_lazy
from django.core.files.base import File
from django.contrib.postgres.fields import HStoreField
import PyPDF2
from cacheops import cached
from core.model.exc import WatermarkException
WATERMARKS = {}
for f in glob(settings.WATERMARKS_PATH):
name, _ = os.path.splitext(os.path.basename(f))
WATERMARKS[name] = PyPDF2.PdfFileReader(open(f, "rb")).getPage(0)
class Document(models.Model):
DOC_TYPE_CHOICES = OrderedDict(
(
("business_registry", ugettext_lazy("Виписки з реєстру компаній")),
("court_decision", ugettext_lazy("Рішення суду")),
("declarations", ugettext_lazy("Декларації")),
("real_estate_registry", ugettext_lazy("Виписки з реєстру нерухомості")),
("order_to_dismiss", ugettext_lazy("Накази про звільнення")),
("media", ugettext_lazy("Публікація в медіа")),
("decree", ugettext_lazy("Рішення")),
("report", ugettext_lazy("Звіти")),
("ownership_structure", ugettext_lazy("Структури власності")),
("misc", ugettext_lazy("Інші документи")),
("other", ugettext_lazy("Неможливо ідентифікувати")),
)
)
DOC_TYPE_TO_WATERMARK = [
"misc",
"other",
"business_registry",
"court_decision",
"real_estate_registry",
"order_to_dismiss",
"decree",
"report",
"ownership_structure",
]
doc = models.FileField("Файл", upload_to="documents", max_length=1000)
doc_watermarked = models.FileField(
"Файл з водяним знаком", upload_to="documents/_", max_length=1000, blank=True
)
name = models.CharField("Людська назва", max_length=255)
uploaded = models.DateTimeField("Був завантажений", auto_now=True)
source = models.CharField("Першоджерело", blank=True, max_length=255)
uploader = models.ForeignKey(
User, verbose_name="Хто завантажив", related_name="pep_document"
)
hash = models.CharField("Хеш", max_length=40, blank=True)
comments = models.TextField("Коментарі", blank=True)
doc_type = models.CharField(
"Тип документу",
max_length=25,
choices=DOC_TYPE_CHOICES.items(),
default="other",
)
doc_type_set_manually = models.BooleanField(
"Тип документу був встановлений вручну", default=False
)
@staticmethod
def autocomplete_search_fields():
return ("id__iexact", "name__icontains", "source__icontains")
@property
def doc_url(self):
if self.doc_watermarked:
return self.doc_watermarked.url
else:
return self.doc.url
def guess_doc_type(self, force=False):
if not force and self.doc_type_set_manually:
return
outcome = "other"
filename = self.doc.name
PATTERNS = {
r"business?[-_\s]r?egistry": "business_registry",
r"court[-_\s]decision": "court_decision",
r"declaration": "declarations",
r"real[-_\s]property": "real_estate_registry",
r"property[-_\s]registry": "real_estate_registry",
r"land[-_\s]registry": "real_estate_registry",
r"real[-_\s]estate[-_\s]registry": "real_estate_registry",
r"order[-_\s]to[-_\s]dismiss": "order_to_dismiss",
r"звільнення": "order_to_dismiss",
r"decree": "decree",
r"report": "report",
r"raport": "report",
r"ownership[-_\s]structure": "ownership_structure",
}
for r, dtype in PATTERNS.items():
if re.search(r, filename, flags=re.I):
outcome = dtype
break
if outcome == "other":
if "_" in filename:
prefix, _ = filename.split("_", 1)
m = re.search(r"\.(\w+)$", prefix)
if m:
tld = m.group(1).lower()
if tld in ["ua", "com", "org", "info", "eu", "net", "tv"]:
outcome = "media"
self.doc_type = outcome
self.save()
def generate_watermark(self, force=False):
fname, ext = os.path.splitext(self.doc.name)
if self.doc_type not in self.DOC_TYPE_TO_WATERMARK:
return False
if self.doc_watermarked:
if not force:
return False
else:
self.doc_watermarked.delete()
watermark = WATERMARKS["a4_portrait"]
watermark_box = watermark.artBox
watermark_w = float(watermark_box[2] - watermark_box[0])
watermark_h = float(watermark_box[3] - watermark_box[1])
if ext.lower() == ".pdf":
try:
curr_file = PyPDF2.PdfFileReader(self.doc.file, strict=False)
pdf_writer = PyPDF2.PdfFileWriter()
for page_no in range(curr_file.getNumPages()):
curr_page = curr_file.getPage(page_no)
file_box = curr_page.artBox
file_w = float(file_box[2] - file_box[0])
file_h = float(file_box[3] - file_box[1])
scale = min(
file_w / (watermark_w + 0.01), file_h / (watermark_h + 0.01)
)
curr_page.mergeScaledPage(watermark, scale, expand=True)
pdf_writer.addPage(curr_page)
except IOError as e:
raise WatermarkException(
"Cannot find file {}, skipping".format(self.doc.name)
)
except (PyPDF2.utils.PdfReadError, ValueError, OSError) as e:
raise WatermarkException(
"Cannot read file {}, error was {}".format(self.doc.name, e)
)
except zlib.error as e:
raise WatermarkException(
"Cannot decompress page of {}, error was {}".format(
self.doc.name, e
)
)
with BytesIO() as fp:
pdf_writer.write(fp)
random.seed(self.pk)
try:
self.doc_watermarked.save(
"{}_{}_{}.pdf".format(
random.randrange(1000, 10000),
os.path.basename(fname)[:127],
random.randrange(1000, 10000),
),
File(fp),
)
except (OSError) as e:
raise WatermarkException(
"Cannot store watermark for file {}, error was {}".format(
self.doc.name, e
)
)
else:
return False
return True
def __unicode__(self):
return self.name
class Meta:
verbose_name = "Документ"
verbose_name_plural = "Документи"
class FeedbackMessage(models.Model):
person = models.CharField(ugettext_lazy("Про кого"), max_length=150, blank=True)
text = models.TextField(ugettext_lazy("Інформація"), blank=False)
link = models.URLField(ugettext_lazy("Підтвердження"), max_length=512, blank=True)
email = models.EmailField(ugettext_lazy("e-mail"), max_length=512, blank=True)
contacts = models.TextField(
ugettext_lazy("Ваше ім'я та контакти"), max_length=512, blank=True
)
read = models.BooleanField(ugettext_lazy("Прочитано"), default=False)
added = models.DateTimeField("Був надісланий", auto_now=True)
answered_by = models.ForeignKey(
User, on_delete=models.SET_NULL, verbose_name="Відповів", blank=True, null=True
)
answer_added = models.DateTimeField("Була надіслана", blank=True, null=True)
short_answer = models.TextField("Суть відповіді", blank=True, null=True)
read_and_agreed = models.BooleanField(
"Користувач підтвердив що прочитав часто задаваємі питання", default=False
)
class Meta:
verbose_name = "Зворотній зв'язок"
verbose_name_plural = "Зворотній зв'язок"
class ActionLog(models.Model):
user = models.ForeignKey(User, verbose_name="Користувач")
action = models.CharField(verbose_name="Дія", max_length=30)
timestamp = models.DateTimeField(verbose_name="Дата та час", auto_now_add=True)
details = models.TextField(verbose_name="Деталі", blank=True)
class Meta:
verbose_name = "Дія користувача"
verbose_name_plural = "Дії користувачів"
index_together = [["user", "action", "timestamp"]]
class ExchangeRateManager(models.Manager):
@cached(timeout=24 * 60 * 60)
def get_annual_rates(self):
"""
This will return annual rates
"""
rates = {}
for rate in self.filter(is_annual=True):
rates[rate.dt.year] = dict(
(k, Decimal("1.0") / Decimal(v)) for k, v in rate.rates.items()
)
return rates
class ExchangeRate(models.Model):
dt = models.DateField("Дата курсу", db_index=True)
is_annual = models.BooleanField(
"Is annual exchange rate (31.12.x)", default=False, db_index=True
)
rates = HStoreField()
objects = ExchangeRateManager()
class Meta:
ordering = ("-dt",)
verbose_name = "Курс валют"
verbose_name_plural = "Курси валют"
| mit | 6,859,312,694,315,056,000 | 34.053957 | 87 | 0.566752 | false |
mwhoffman/pygp | pygp/inference/basic.py | 1 | 2189 | """
Simple wrapper class for a Basic GP.
"""
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
import numpy as np
from ..utils.models import printable
from ..likelihoods import Gaussian
from ..kernels import SE, Matern
from .exact import ExactGP
__all__ = ['BasicGP']
@printable
class BasicGP(ExactGP):
"""
Basic GP frontend which assumes an ARD kernel and a Gaussian likelihood
(and hence performs exact inference).
"""
def __init__(self, sn, sf, ell, mu=0, ndim=None, kernel='se'):
likelihood = Gaussian(sn)
kernel = (
SE(sf, ell, ndim) if (kernel == 'se') else
Matern(sf, ell, 1, ndim) if (kernel == 'matern1') else
Matern(sf, ell, 3, ndim) if (kernel == 'matern3') else
Matern(sf, ell, 5, ndim) if (kernel == 'matern5') else None)
if kernel is None:
raise ValueError('Unknown kernel type')
super(BasicGP, self).__init__(likelihood, kernel, mu)
def _params(self):
# replace the parameters for the base GP model with a simplified
# structure and rename the likelihood's sigma parameter to sn (ie its
# the sigma corresponding to the noise).
params = [('sn', 1, True)]
params += self._kernel._params()
params += [('mu', 1, False)]
return params
@classmethod
def from_gp(cls, gp):
if not isinstance(gp._likelihood, Gaussian):
raise ValueError('BasicGP instances must have Gaussian likelihood')
if isinstance(gp._kernel, SE):
kernel = 'se'
elif isinstance(gp._kernel, Matern):
kernel = 'matern%d' % gp._kernel._d
else:
raise ValueError('BasicGP instances must have a SE/Matern kernel')
# get the relevant parameters.
sn = np.sqrt(gp._likelihood.s2)
sf = np.exp(gp._kernel._logsf)
ell = np.exp(gp._kernel._logell)
mu = gp._mean
# create the new gp and maybe add data.
newgp = cls(sn, sf, ell, mu)
if gp.ndata > 0:
X, y = gp.data
newgp.add_data(X, y)
return newgp
| bsd-2-clause | -7,655,531,662,997,177,000 | 30.271429 | 79 | 0.591138 | false |
mzajac/DBPediaExtender | src/sparql_access.py | 1 | 3962 | #!/usr/bin/env python
import urllib
import json
import sys
from urllib2 import unquote
from collections import defaultdict
from config import data_source, sparql_endpoint
def full_predicate_name(name):
return '%s/property/%s' % (data_source, name.decode('utf-8'))
def full_resource_name(name):
return '%s/resource/%s' % (data_source, name.decode('utf-8'))
def full_type_name(name):
return 'http://dbpedia.org/ontology/%s' % name
def strip_url_prefix(s):
return s[len(data_source) + len('/resource/') : ]
def get_data(query):
params = {
"query": query,
"format": "application/json"
}
request = urllib.urlencode(params)
response = urllib.urlopen(sparql_endpoint, request).read()
return json.loads(response)
def get_results(query):
data = get_data(query)['results']['bindings']
return [
unquote(strip_url_prefix(line['s']['value']).encode('utf-8'))
for line in data
]
def get_pairs(query):
data = get_data(query)['results']['bindings']
return [
(unquote(strip_url_prefix(line['s']['value']).encode('utf-8')), line['o']['value'])
for line in data
]
def select_all(d):
dd = {}
for c in ['s', 'p', 'o']:
if c not in d:
dd[c] = '?%c' % c
else:
dd[c] = '<' + d[c] + '>' if c != 'p' else '<' + full_predicate_name(d[c]) + '>'
query = 'SELECT * FROM <%s> WHERE {%s %s %s} ORDER BY ?s' % (data_source, dd['s'], dd['p'], dd['o'])
data = get_data(query)['results']['bindings']
ret = []
for line in data:
t = []
for c in ['s', 'p', 'o']:
if c in line:
value = line[c]['value']
if value.startswith('%s/resource/' % data_source):
value = strip_url_prefix(value)
value = unquote(value.encode('utf-8'))
t.append(value)
ret.append(tuple(t))
return ret
def select_types(predicate, subject=True):
whose_type = '?s' if subject else '?o'
query = '''SELECT ?s, ?type FROM <%s> WHERE {
?s <%s> ?o.
%s rdf:type ?type.
}''' % (data_source, full_predicate_name(predicate), whose_type)
data = get_data(query)['results']['bindings']
types_dict = defaultdict(list)
for line in data:
types_dict[line['s']['value']].append(line['type']['value'])
return [types for entity, types in types_dict.iteritems()]
def count_entities_of_type(type):
query = '''SELECT count(*) FROM <%s> WHERE {
?s a <%s>.
}''' % (data_source, type)
return int(get_data(query)['results']['bindings'][0]['callret-0']['value'])
def select_entities_of_type(type):
query = '''SELECT * FROM <%s> WHERE {
?s a <%s>.
}''' % (data_source, type)
return get_results(query)
def select_entities_of_type_not_in_relation(type, predicate):
#Queries like the one below don't work on Virtuoso version 6.1 (on 6.4 they do).
#Therefore I use two queries and join their results manually.
'''SELECT * WHERE {
{SELECT ?s WHERE {
?s <http://pl.dbpedia.org/property/populacja> ?o.
}}
MINUS
{{SELECT ?s WHERE {
?s <http://pl.dbpedia.org/property/stolica> ?o.
}}}
}'''
entities_of_type = select_entities_of_type(type)
entities_in_relation = set([s for s, o in select_all({'p': predicate})])
return filter(lambda e: e not in entities_in_relation, entities_of_type)
def select_entities_of_type_in_relation(type, predicate):
query = '''SELECT ?s, ?o FROM <%s> WHERE {
?s a <%s>.
?s <%s> ?o.
}''' % (data_source, full_type_name(type), full_predicate_name(predicate))
return get_pairs(query)
def select_all_entities():
query = '''SELECT DISTINCT ?s FROM <%s> WHERE {
?s ?p ?o.
}''' % data_source
return get_results(query)
if __name__ == '__main__':
pass
| gpl-3.0 | 4,306,332,467,730,327,000 | 31.47541 | 104 | 0.56209 | false |
stscieisenhamer/glue | glue/utils/decorators.py | 3 | 1050 | from __future__ import absolute_import, division, print_function
import traceback
__all__ = ['die_on_error', 'avoid_circular']
def die_on_error(msg):
"""
Non-GUI version of the decorator in glue.utils.qt.decorators.
In this case we just let the Python exception terminate the execution.
"""
def decorator(func):
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except Exception as e:
print('=' * 72)
print(msg + ' (traceback below)')
print('-' * 72)
traceback.print_exc()
print('=' * 72)
return wrapper
return decorator
def avoid_circular(meth):
def wrapper(self, *args, **kwargs):
if not hasattr(self, '_in_avoid_circular') or not self._in_avoid_circular:
self._in_avoid_circular = True
try:
return meth(self, *args, **kwargs)
finally:
self._in_avoid_circular = False
return wrapper
| bsd-3-clause | -6,189,258,101,026,877,000 | 28.166667 | 82 | 0.546667 | false |
LISTERINE/bagger | setup.py | 1 | 1499 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('HISTORY.rst') as history_file:
history = history_file.read().replace('.. :changelog:', '')
requirements = [
# TODO: put package requirements here
]
test_requirements = [
# TODO: put package test requirements here
]
setup(
name='bagger',
version='0.1.0',
description="Help with fast bagging",
long_description=readme + '\n\n' + history,
author="Jon Ferretti",
author_email='[email protected]',
url='https://github.com/LISTERINE/bagger',
packages=[
'bagger',
],
package_dir={'bagger':
'bagger'},
include_package_data=True,
install_requires=requirements,
license="BSD",
zip_safe=False,
keywords='bagger',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
test_suite='tests',
tests_require=test_requirements
)
| mit | 2,591,394,478,602,773,500 | 25.298246 | 63 | 0.60974 | false |
uber/doubles | doubles/class_double.py | 1 | 1688 | from doubles.exceptions import UnallowedMethodCallError
from doubles.instance_double import InstanceDouble
from doubles.target import Target
from doubles.verification import verify_arguments
def patch_class(input_class):
"""Create a new class based on the input_class.
:param class input_class: The class to patch.
:rtype class:
"""
class Instantiator(object):
@classmethod
def _doubles__new__(self, *args, **kwargs):
pass
new_class = type(input_class.__name__, (input_class, Instantiator), {})
return new_class
class ClassDouble(InstanceDouble):
"""
A pure double representing the target class.
::
User = ClassDouble('myapp.User')
:param str path: The absolute module path to the class.
"""
is_class = True
def __init__(self, path):
super(ClassDouble, self).__init__(path)
self._doubles_target = patch_class(self._doubles_target)
self._target = Target(self._doubles_target)
def __call__(self, *args, **kwargs):
"""Verify arguments and proxy to _doubles__new__
:rtype obj:
:raises VerifyingDoubleArgumentError: If args/kwargs don't match the expected arguments of
__init__ of the underlying class.
"""
verify_arguments(self._target, '_doubles__new__', args, kwargs)
return self._doubles__new__(*args, **kwargs)
def _doubles__new__(self, *args, **kwargs):
"""Raises an UnallowedMethodCallError
NOTE: This method is here only to raise if it has not been stubbed
"""
raise UnallowedMethodCallError('Cannot call __new__ on a ClassDouble without stubbing it')
| mit | 168,691,691,816,461,950 | 29.142857 | 98 | 0.64455 | false |
pacoqueen/ginn | ginn/formularios/dynconsulta.py | 1 | 79919 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright (C) 2005-2013 Francisco José Rodríguez Bogado #
# <[email protected]> #
# #
# This file is part of GeotexInn. #
# #
# GeotexInn is free software; you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation; either version 2 of the License, or #
# (at your option) any later version. #
# #
# GeotexInn is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with GeotexInn; if not, write to the Free Software #
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA #
###############################################################################
###################################################################
## dynconsulta.py - Consulta dinámica resumen de análisis financiero
###################################################################
## Changelog:
## 8 de febrero de 2012 -> Inicio
###################################################################
from ventana import Ventana
from formularios import utils
import pygtk
pygtk.require('2.0')
import gtk, mx.DateTime
from framework import pclases
from framework.seeker import VentanaGenerica
old_float = float
from ventana_progreso import VentanaProgreso, VentanaActividad
from widgets import replace_widget
import pprint
from collections import defaultdict
try:
from collections import MutableMapping as transformedDictBase
except ImportError:
transformedDictBase = object
from informes.treeview2pdf import treeview2pdf
from informes.treeview2csv import treeview2csv
from formularios.reports import abrir_pdf, abrir_csv
import pango
import datetime
class TransformedDict(transformedDictBase):
"""
A dictionary which applies an arbitrary key-altering function before
accessing the keys"""
# From: http://stackoverflow.com/questions/3387691/
# python-how-to-perfectly-override-a-dict
def __init__(self, *args, **kwargs):
self.store = dict()
try:
self.update(dict(*args, **kwargs)) #use the free update to set keys
except AttributeError:
self.store.update(*args, **kwargs)
def __getitem__(self, key):
return self.store[self.__keytransform__(key)]
def __setitem__(self, key, value):
self.store[self.__keytransform__(key)] = value
def __delitem__(self, key):
del self.store[self.__keytransform__(key)]
def __iter__(self):
return iter(self.store)
def __len__(self):
return len(self.store)
def __keytransform__(self, key):
return key
def __str__(self):
return pprint.pformat(self.store)
def __repr__(self):
return pprint.pformat(self.store)
class MyMonthsDict(TransformedDict):
def __keytransform__(self, key):
try:
assert isinstance(key, (type(mx.DateTime.today()), datetime.date))
key = primero_de_mes(key)
except AssertionError:
anno = mx.DateTime.today().year
mes = mx.DateTime.today().month
if key < mes:
anno += 1
return mx.DateTime.DateFrom(anno, key, 1)
else:
return key
def activate(ch, ch_destino):
ch_destino.set_sensitive(ch.get_active())
class DynConsulta(Ventana, VentanaGenerica):
def __init__(self, objeto = None, usuario = None, mes_actual = None,
num_meses = 12):
"""
Constructor. objeto puede ser un objeto de pclases con el que
comenzar la ventana (en lugar del primero de la tabla, que es
el que se muestra por defecto).
"""
self.mes_actual = (mes_actual != None and mes_actual
or mx.DateTime.localtime().month)
self.update_mes_actual()
self.num_meses = num_meses != None and num_meses or 12
self.update_mes_final()
self.usuario = usuario
self.clase = None
self.precalc = MyMonthsDict()
self.dic_campos = {}
self.old_model = {}
Ventana.__init__(self, 'dynconsulta.glade', objeto, usuario = usuario)
connections = {'b_salir/clicked': self.salir,
'b_nuevo/clicked': self.nuevo,
'b_borrar/clicked': self.borrar,
'b_actualizar/clicked': self.actualizar_ventana,
# 'b_guardar/clicked': self.guardar,
'b_buscar/clicked': self.buscar,
'sp_mes_actual/value-changed': self.update_mes_actual,
'sp_num_meses/value-changed': self.update_mes_final,
'tv_datos/query-tooltip': self.tooltip_query,
'b_exportar/clicked': self.exportar,
'b_imprimir/clicked': self.imprimir
}
self.wids['ch_presupuesto'].set_active(True)
self.wids['ch_datos_reales'].set_active(True)
self.wids['ch_reales_mes0'].set_active(True)
self.wids['ch_datos_pdtes'].set_active(False)
self.wids['ch_datos_reales'].connect("toggled",
lambda ch, chdest: chdest.set_sensitive(ch.get_active()),
self.wids['ch_datos_pdtes'])
self.inicializar_ventana()
self.actualizar_ventana(None)
self.wids['ventana'].resize(800, 600)
self.add_connections(connections)
gtk.main()
def tooltip_query(self, treeview, x, y, mode, tooltip):
path = treeview.get_path_at_pos(x, y)
if path:
treepath, column = path[:2] # @UnusedVariable
model = treeview.get_model()
itr = model.get_iter(treepath)
texto = model[itr][0].replace("&", "&")
tooltip.set_text(texto)
return False
def es_diferente(self):
"""
Devuelve True si algún valor en ventana difiere de
los del objeto.
"""
return False
def update_mes_actual(self, spinbutton_mes = None):
try:
self.mes_actual = spinbutton_mes.get_value_as_int()
except AttributeError: # ¿No se ha creado el glade todavía?
glade_loaded = False
else:
glade_loaded = True
self.fecha_mes_actual = mx.DateTime.DateFrom(
mx.DateTime.localtime().year,
self.mes_actual,
1)
if glade_loaded:
self.inicializar_ventana()
self.actualizar_ventana(None)
return False # GtkEntry - did not receive focus-out-event. If you connect a handler to this signal, it must return FALSE so the entry gets the event as well
def update_mes_final(self, sp = None):
try:
self.num_meses = sp.get_value_as_int()
except AttributeError: # ¿No se ha cargado el glade todavía?
glade_loaded = False
else:
glade_loaded = True
mes_final = ((self.fecha_mes_actual.month-1 + self.num_meses) % 12) + 1
anno_final = self.fecha_mes_actual.year + (self.num_meses / 12)
while mes_final > 12:
anno_final += 1
mes_final -= 12
self.fecha_mes_final = mx.DateTime.DateFrom(anno_final,
mes_final,
1)
if self.fecha_mes_final < self.fecha_mes_actual:
self.fecha_mes_final = mx.DateTime.DateFrom(
self.fecha_mes_final.year + 1,
self.fecha_mes_final.month,
self.fecha_mes_final.day)
if glade_loaded:
self.inicializar_ventana()
self.actualizar_ventana(None)
return False # GtkEntry - did not receive focus-out-event. If you connect a handler to this signal, it must return FALSE so the entry gets the event as well
def inicializar_ventana(self):
"""
Inicializa los controles de la ventana, estableciendo sus
valores por defecto, deshabilitando los innecesarios,
rellenando los combos, formateando el TreeView -si lo hay-...
"""
self.wids['e_costes'].modify_text(gtk.STATE_NORMAL,
self.wids['e_costes'].get_colormap().alloc_color("red"))
self.wids['e_ingresos'].modify_text(gtk.STATE_NORMAL,
self.wids['e_ingresos'].get_colormap().alloc_color("blue"))
self.wids['e_costes'].set_property("xalign", 0.9)
self.wids['e_ingresos'].set_property("xalign", 0.9)
self.wids['e_total'].set_property("xalign", 0.9)
antiguo_tv_datos = self.wids['tv_datos']
nuevo_tv_datos = gtk.TreeView()
nuevo_tv_datos.show()
replace_widget(antiguo_tv_datos,nuevo_tv_datos)
self.wids['tv_datos'] = nuevo_tv_datos
self.wids['sp_mes_actual'].set_value(self.mes_actual)
self.wids['sp_num_meses'].set_value(self.num_meses)
self.activar_widgets(False)
self.wids['b_actualizar'].set_sensitive(True)
self.wids['b_guardar'].set_sensitive(False)
self.wids['b_buscar'].set_sensitive(True)
for b in ("b_nuevo", "b_guardar", "b_borrar"):
self.wids[b].set_property("visible", False)
# Inicialización del resto de widgets:
cols = [('Concepto', 'gobject.TYPE_STRING', False, True, True, None)]
if not self.mes_actual:
mes = mx.DateTime.localtime().month
else:
mes = self.mes_actual
for m in range(self.num_meses):
mescol = ((mes - 1 + m) % 12) + 1
fechacol = mx.DateTime.DateFrom(month = mescol,
year = mx.DateTime.localtime().year + (m > 0 and 1 or 0))
if mescol == 1:
strmes = fechacol.strftime("%B'%y")
else:
strmes = fechacol.strftime("%B")
cols.append((strmes,'gobject.TYPE_STRING',False,True,True,None))
cols += [('PUID', 'gobject.TYPE_STRING', False, False, False, None)]
utils.preparar_treeview(self.wids['tv_datos'], cols)
for n in range(1, self.num_meses + 1):
self.wids['tv_datos'].get_column(n).get_cell_renderers()[0]\
.set_property("xalign", 1)
col = self.wids['tv_datos'].get_column(0)
col.set_expand(True)
self.wids['tv_datos'].connect("row-activated", self.inspect)
self.wids['tv_datos'].set_tooltip_column(0)
self.wids['tv_datos'].connect("query-tooltip", self.tooltip_query)
self.colorear(self.wids['tv_datos'])
def colorear(self, tv):
"""
Pone en rojo los valores que han cambiado respecto a la última vez
que se actualizó el model.
"""
def cell_func(col, cell, model, itr, numcol):
# Extraigo valor numérico
valor = model[itr][numcol]
try:
valor_numerico = utils._float(valor)
except (TypeError, ValueError):
valor_numerico = None
# Color gradual en función de datos reales / datos precalculados
puid = model[itr][-1]
try:
real = self.cave[puid][numcol]
except KeyError: # Es defaultdict, pero por si acaso.
real = 0 # Puro presupuesto. Nada de valor real.
if valor_numerico and real:
try:
proporcion = 1.0 - (abs(real) / abs(valor_numerico))
grade = int(proporcion * 65535)
except ZeroDivisionError: # Por si acaso. XD
grade = 0
bg_color = gtk.gdk.Color(red = int(65535*0.9 + grade*0.1),
green = int(65535*0.7 + grade * 0.3),
blue = int(65535*0.1 + grade*0.9))
else:
bg_color = None # No hay valor o es otra cosa
# Extraigo valor anterior:
if not model.iter_parent(itr): # Es concepto de primer nivel
padre = model[itr][0]
try:
old_valor = self.old_model[padre]['valores'][numcol-1]
except (KeyError, IndexError):
old_valor = None
else:
padre = model[model.iter_parent(itr)][0]
hijo = model[itr][0]
try:
old_valor = self.old_model[padre]['hijos'][hijo][numcol-1]
except (KeyError, IndexError):
old_valor = None
# Color de cambio de valores respecto a "iteración" anterior
if self.old_model and old_valor != valor:
# Valor puede ser None porque es la primera vez que se muestran
# todos los datos y en ese caso no debe colorear.
cell.set_property("foreground", "dark green")
if not model.iter_parent(itr):
cell.set_property("weight", 4000)
cell.set_property("background", "gray")
else:
cell.set_property("weight", 400)
cell.set_property("background", "yellow")
else: # Coloreado de valores +/-
if not model.iter_parent(itr):
if valor_numerico != None:
if valor_numerico == 0:
color_valor = "white"
elif valor_numerico < 0:
color_valor = "red"
else:
color_valor = "blue"
else:
color_valor = "white"
cell.set_property("foreground", color_valor)
cell.set_property("weight", 4000)
cell.set_property("background", "gray")
else:
if valor_numerico != None:
if valor_numerico == 0:
color_valor = None
elif valor_numerico < 0:
color_valor = "red"
else:
color_valor = "blue"
else:
color_valor = "white"
cell.set_property("foreground", color_valor)
cell.set_property("weight", 400)
# Si no ha cambiado y no es una fila "cabecera", entonces
# coloreo el fondo según la gradación de datos reales.
cell.set_property("background", bg_color)
cols = tv.get_columns()
for i in xrange(1, len(cols)):
column = cols[i]
cells = column.get_cell_renderers()
for cell in cells:
column.set_cell_data_func(cell, cell_func, i)
def inspect(self, tv, path, col):
"""
Muestra de dónde vienen los datos precalculados.
"""
indexcol = get_col_pos(tv, col)
if indexcol > 0:
mes = (self.mes_actual + indexcol - 1) % 12 #self.num_meses
model = tv.get_model()
valor = model[path][indexcol]
if utils._float(valor) == 0:
return
concepto = pclases.getObjetoPUID(model[path][-1])
if not isinstance(concepto, pclases.PresupuestoAnual):
# Los resúmenes no los muestro, que vayan al detalle.
concepto_desc = concepto.descripcion
txt_inspect = "%s (%s): %s = \n" % (
concepto_desc, col.get_property("title"), valor)
resultados = []
for o, importe, tm in self.tracking[mes][concepto]:
resultados.append((o.puid, o.get_info(), importe, tm))
to_open = utils.dialogo_resultado(resultados,
titulo = "INSPECCIONAR VALOR «%s»" % valor,
padre = self.wids['ventana'],
cabeceras = ['Cód. interno', 'Descripción',
'Importe', 'Toneladas'],
texto = txt_inspect)
if to_open > 0:
objeto = pclases.getObjetoPUID(to_open)
if isinstance(objeto, (pclases.ServicioTomado,
pclases.LineaDeCompra)):
if objeto.facturaCompra:
from formularios import facturas_compra
v = facturas_compra.FacturasDeEntrada( # @UnusedVariable
objeto = objeto.facturaCompra,
usuario = self.usuario)
elif objeto.albaranEntrada:
from formularios import albaranes_de_entrada
v = albaranes_de_entrada.AlbaranesDeEntrada( # @UnusedVariable
objeto = objeto.albaranEntrada,
usuario = self.usuario)
elif isinstance(objeto, (pclases.Servicio,
pclases.LineaDeVenta)):
if objeto.facturaVenta:
from formularios import facturas_venta
v = facturas_venta.FacturasVenta( # @UnusedVariable
objeto = objeto.facturaVenta,
usuario = self.usuario)
elif objeto.prefactura:
from formularios import prefacturas
v = prefacturas.Prefacturas( # @UnusedVariable
objeto = objeto.prefactura,
usuario = self.usuario)
elif objeto.albaranSalida:
from formularios import albaranes_de_salida
v = albaranes_de_salida.AlbaranesDeSalida( # @UnusedVariable
objeto = objeto.albaranSalida,
usuario = self.usuario)
elif isinstance(objeto, pclases.FacturaVenta):
from formularios import facturas_venta # @Reimport
v = facturas_venta.FacturasVenta( # @UnusedVariable
objeto = objeto,
usuario = self.usuario)
elif isinstance(objeto, pclases.FacturaCompra):
from formularios import facturas_compra # @Reimport
v = facturas_compra.FacturasDeEntrada( # @UnusedVariable
objeto = objeto,
usuario = self.usuario)
elif isinstance(objeto,
pclases.VencimientoValorPresupuestoAnual):
from formularios import presupuestos
v = presupuestos.Presupuestos( # @UnusedVariable
objeto = objeto,
usuario = self.usuario)
# PORASQUI: El get_info() no es buena idea. Demasiado "técnico"
def activar_widgets(self, s, chequear_permisos = True):
"""
Activa o desactiva (sensitive=True/False) todos
los widgets de la ventana que dependan del
objeto mostrado.
Entrada: s debe ser True o False. En todo caso
se evaluará como boolean.
"""
if self.objeto == None:
s = False
ws = []
for w in ws:
try:
self.wids[w].set_sensitive(s)
except Exception, msg:
print "Widget problemático:", w, "Excepción:", msg
import traceback
traceback.print_last()
if chequear_permisos:
self.check_permisos(nombre_fichero_ventana = "dynconsulta.py")
def actualizar_ventana(self, boton = None):
if self.wids['ch_datos_reales'].get_active():
self.precalc = precalcular(self.fecha_mes_actual,
self.fecha_mes_final,
self.wids['ventana'])
else:
self.precalc = MyMonthsDict()
self.rellenar_widgets()
self.wids['tv_datos'].expand_all()
def rellenar_widgets(self):
# Los únicos otros dos widgets son los de mes de inicio y ancho de
# tabla en meses, que ya se rellenan ellos solos.
self.costes = 0.0
self.ingresos = 0.0
padres = self.rellenar_tabla()
self.actualizar_totales(padres)
self.wids['e_costes'].set_text(utils.float2str(self.costes))
self.wids['e_ingresos'].set_text(utils.float2str(self.ingresos))
total = self.ingresos + self.costes
self.wids['e_total'].set_text(utils.float2str(total))
self.wids['e_total'].modify_text(gtk.STATE_NORMAL,
self.wids['e_total'].get_colormap().alloc_color(
total > 0 and "blue"
or total < 0 and "red"
or "green"))
self.wids['e_total'].modify_font(pango.FontDescription("bold"))
def actualizar_totales(self, padres):
"""
Recorre los nodos de primer nivel y actualiza los totales en
función del tipo de importe: gasto o ingreso.
"""
# Solo hay un (concepto de) presupuesto anual de tipo ingreso: ventas.
for concepto in padres:
fila = self.wids['tv_datos'].get_model()[padres[concepto]]
for valor in fila:
try:
valor_float = utils._float(valor)
except (ValueError, TypeError): # Es el PUID o descripción.
continue
if concepto.es_gasto():
self.costes += valor_float
else:
self.ingresos += valor_float
def rellenar_tabla(self):
self.tracking = {} # Aquí guardaré los objetos que componen cada valor.
self.cave = {}
# Por si acaso, algo de mantenimiento por aquí. Al turrón:
if pclases.DEBUG:
print __file__, "Eliminando posibles vencimientos de presupuesto"\
" duplicados...",
deleted = pclases.VencimientoValorPresupuestoAnual._remove_dupes()
if pclases.DEBUG:
print deleted
# Y ahora sí que sí. Al lío:
vpro = VentanaProgreso(padre = self.wids['ventana'])
vpro.mostrar()
model = self.wids['tv_datos'].get_model()
self.old_model = bak_model(model)
model.clear()
padres = self.cargar_conceptos_primer_nivel(vpro)
filas = self.cargar_conceptos_segundo_nivel(vpro)
if self.wids['ch_presupuesto'].get_active():
filas = self.montar_filas(filas, vpro)
nodos_conceptos = self.mostrar_matriz_en_treeview(filas, padres, vpro)
if self.wids['ch_datos_reales'].get_active():
self.mostrar_valores_reales_precalculados(nodos_conceptos,
padres, vpro)
# Ahora toca pasar el mes que se ha ido al final del año actual. Ciclo
# el mes si el último mes mostrado en la cuadrícula está completamente
# a cero. Uso como datos de referencia el del mismo mes pero del
# año anterior. Si también está a cero (nunca se ha presupuestado ese
# mes en la historia del programa), desisto.
vpro.ocultar()
return padres
def mostrar_valores_reales_precalculados(self,
nodos_conceptos,
padres,
vpro):
for mescol in range(self.num_meses):
fechacol = restar_mes(self.fecha_mes_actual, -mescol)
i = 0.0
try:
datos_reales = self.precalc[fechacol]
except KeyError: # Lo no nay ;)
datos_reales = []
for concepto in datos_reales:
vpro.set_valor(
i / len(datos_reales.keys()),
"Aplicando sustitución por valores reales en %s..."
% fechacol.strftime("%B"))
# Si había un valor previo, tengo que retirar la estimación
# y sumar lo real. En caso de granza, entonces la parte
# proporcional de las Tm.
valor_real_importe = datos_reales[concepto]['importe']
objetos = datos_reales[concepto]['objetos']
if self.wids['ch_presupuesto'].get_active():
vto_presupuestado = buscar_vencimiento_presupuestado(
fechacol,
concepto,
self.fecha_mes_actual)
else:
vto_presupuestado = None
if criterio_sustitucion(vto_presupuestado,
valor_real_importe,
self.fecha_mes_actual,
fechacol):
# Y si no, dejo lo que estaba.
if pclases.DEBUG:
print __file__, "Cambio presupuesto por real:", \
concepto.descripcion,\
vto_presupuestado, valor_real_importe
diff = self.cambiar_valor_presupuestado(valor_real_importe,
vto_presupuestado,
concepto,
fechacol,
mescol,
nodos_conceptos,
objetos)
try:
self.cave[concepto.puid][mescol+1]+=valor_real_importe
except AttributeError: # No valor real
# self.cave[concepto.puid][mescol + 1] = 0
pass
self.actualizar_sumatorio_padre(mescol, concepto, padres,
diff)
i += 1
def cambiar_valor_presupuestado(self, valor_real_importe,
valor_presupuestado, concepto, fechacol,
mescol, nodos_conceptos, objetos):
"""
Si el valor presupuestado es de granza, quita el importe
correspondiente a las toneladas del valor real y suma este valor
real a lo que quede. Deja en el cell la cantidad final.
Devuelve la diferencia entre el nuevo valor y el que había antes
para que se actualice el nodo padre únicamente sumando esa cantidad y
así evitar recalcular toda la "subcolumna".
«objetos» es una lista de objetos de los que procede el valor real.
"""
(valor_presupuestado_restante,
valor_presupuestado_importe) = self.calcular_presupuestado_restante(
valor_presupuestado,
fechacol,
concepto)
model = self.wids['tv_datos'].get_model()
nodo_concepto = nodos_conceptos[concepto]
if self.wids['ch_datos_pdtes'].get_active():
# Si los valores confirmados los ignoro, simplemente no incremento
# el valor total de la celda, pero sí que decremento el
# presupuesto. El IVA no cuenta. Eso se paga estén o no las
# facturas pendientes.
for objeto in objetos[:]:
if (not esta_pendiente(objeto)
and (valor_presupuestado
and not valor_presupuestado.es_de_iva())):
try:
importe_confirmado = objeto.get_subtotal(iva = True,
prorrateado = True)
except AttributeError: # Es factura o algo asín
importe_confirmado = objeto.calcular_importe_total()
if concepto.es_gasto:
importe_confirmado *= -1
valor_real_importe -= importe_confirmado
objetos.remove(objeto)
model[nodo_concepto][mescol + 1] = utils.float2str(
valor_presupuestado_restante
+ valor_real_importe)
self.actualizar_traza(objetos, concepto, fechacol, valor_presupuestado)
delta = ((valor_presupuestado_restante + valor_real_importe)
- valor_presupuestado_importe)
if pclases.DEBUG:
print __file__, ">>> cambiar_valor_presupuestado >>> ð =", delta
return delta
def actualizar_traza(self, objetos, concepto, fechacol,
valor_presupuestado):
if not fechacol.month in self.tracking:
self.tracking[fechacol.month] = defaultdict(list)
for o in objetos:
if (isinstance(o, pclases.LineaDeCompra)
and o.productoCompra in buscar_productos_granza()):
importe_objeto = o.get_subtotal(iva = True, prorrateado=True)
try:
numvtos = len(o.facturaCompra.vencimientosPago)
except AttributeError:
numvtos = max(
len(o.albaranEntrada.proveedor.get_vencimientos()), 1)
tm = o.cantidad / numvtos
if concepto.es_gasto():
trinfo = (o, -importe_objeto, -tm)
else:
trinfo = (o, importe_objeto, tm)
restar_en_traza_presupuesto(self.tracking,
fechacol.month,
self.mes_actual,
concepto,
valor_presupuestado,
importe_objeto,
tm)
else:
try:
importe_objeto = o.get_subtotal(iva = True,
prorrateado = True)
if isinstance(o, (pclases.LineaDeCompra,
pclases.ServicioTomado)):
importe_objeto = -importe_objeto
except AttributeError: # Es factura o algo así.
importe_objeto = o.calcular_importe_total(iva = True)
if isinstance(o, pclases.FacturaCompra):
# IVA es gasto, pero tiene fras de venta que deben ir en
# positivo. No puedo usar el criterio concepto.es_gasto().
importe_objeto = -importe_objeto
trinfo = (o, importe_objeto, None)
restar_en_traza_presupuesto(self.tracking,
fechacol.month,
self.mes_actual,
concepto,
valor_presupuestado,
importe_objeto)
self.tracking[fechacol.month][concepto].append(trinfo)
def calcular_presupuestado_restante(self, valor_presupuestado, fechacol,
concepto):
valor_real_toneladas = None
if valor_presupuestado:
valor_presupuestado_importe = valor_presupuestado.importe
if valor_presupuestado.es_de_granza():
precalc_concepto = self.precalc[fechacol][concepto]
valor_real_toneladas = precalc_concepto['toneladas']
valor_presupuestado_restante = (valor_presupuestado.precio
#* (valor_presupuestado.toneladas - valor_real_toneladas))
# Sumo porque las tm presupuestadas ya vienen en negativo.
* (valor_presupuestado.toneladas + valor_real_toneladas))
# Si "me como" todo lo presupuestado, parto de cero para
# mostrar el valor real completo. (Si no, acabará restando
# ese delta y falseará el resultado)
# Uso min porque las toneladas vienen en negativo al ser gasto.
valor_presupuestado_restante = min(0,
valor_presupuestado_restante)
else:
# Como voy a sustituirlo entero, el valor restante es 0.0 para
# que solo se vea el valor real que le voy a sumar.
valor_presupuestado_restante = 0.0
else:
valor_presupuestado_restante = 0.0
valor_presupuestado_importe = 0.0
return valor_presupuestado_restante, valor_presupuestado_importe
def actualizar_sumatorio_padre(self, mescol, concepto, padres, diff):
# Thanks bicycle repair man!
model = self.wids['tv_datos'].get_model()
pa = concepto.presupuestoAnual
nodo_padre = padres[pa]
try:
model[nodo_padre][mescol + 1] = (utils.float2str(
utils.parse_float(model[nodo_padre][mescol + 1])
+ diff))
except (TypeError, ValueError):
model[nodo_padre][mescol + 1] = utils.float2str(diff)
def mostrar_matriz_en_treeview(self, filas, padres, vpro):
model = self.wids['tv_datos'].get_model()
i = 0.0
nodos_conceptos = {}
for c in filas:
vpro.set_valor(i / len(filas.keys()),
"Montando matriz...")
pa = c.presupuestoAnual
nodo_padre = padres[pa]
fila = [c.descripcion # FIXME: .replace("&", "&") #
# Problemas con el tooltip.
] + [utils.float2str(w) for w in filas[c]] + [c.puid]
nodos_conceptos[c] = model.append(nodo_padre, fila)
for mes_matriz in range(1, self.num_meses + 1):
# Actualizo totales de fila padre
try:
model[nodo_padre][mes_matriz] = utils.float2str(
utils.parse_float(model[nodo_padre][mes_matriz])
+ utils.parse_float(fila[mes_matriz]))
except (TypeError, ValueError):
model[nodo_padre][mes_matriz] = utils.float2str(
fila[mes_matriz])
i += 1
return nodos_conceptos
def montar_filas(self, filas, vpro):
i = 0.0
# Estos valores se metieron en la fecha y concepto que fueran, pero
# aquí tienen que moverse a la fecha de la FDP que corresponda al
# concepto.
valores = pclases.VencimientoValorPresupuestoAnual.select(pclases.AND(
pclases.VencimientoValorPresupuestoAnual.q.fecha
>= self.fecha_mes_actual,
pclases.VencimientoValorPresupuestoAnual.q.fecha
< self.fecha_mes_final))
valores_count = valores.count()
for v in valores:
v.sync()
# CWT: En mes actual no valen valores presupuestados. Solo reales.
if (self.wids['ch_reales_mes0'].get_active() and
self.fecha_mes_actual
<= v.fecha <= final_de_mes(self.fecha_mes_actual)):
continue
# Hay valores de meses anteriores al primero de la tabla cuyos
# vencimientos caen ahora. Esos los quito. Si el mes en que se
# presupuestaron ya se ha ido, sus vencimientos no valen.
vp = v.valorPresupuestoAnual
if vp.fecha < self.fecha_mes_actual:
continue
c = v.conceptoPresupuestoAnual
mes_offset = (v.fecha.month - self.fecha_mes_actual.month) % (
self.num_meses)
try:
filas[c][mes_offset] += v.importe
except KeyError: # Que será lo normal. No debería haber dos vtos.
# en la misma fecha para un mismo concepto.
filas[c][mes_offset] = v.importe
if not v.fecha.month in self.tracking:
self.tracking[v.fecha.month] = defaultdict(list)
try:
tm = v.toneladas
except ValueError:
tm = None
self.tracking[v.fecha.month][c].append(
(v, v.importe, tm))
vpro.set_valor(i / valores_count,
"Cargando valores de dynconsulta...")
i += 1
return filas
def cargar_conceptos_primer_nivel(self, vpro):
vpro.set_valor(0, "Cargando conceptos de primer nivel...")
model = self.wids['tv_datos'].get_model()
padres = {}
pas = pclases.PresupuestoAnual.select()
pas_count = pas.count()
i = 0.0
for pa in pas:
self.cave[pa.puid] = defaultdict(old_float)
fila = [pa.descripcion] #FIXME: .replace("&", "&")]
for m in range(self.num_meses): # @UnusedVariable
fila.append("")
fila.append(pa.puid)
nodo = model.append(None, fila)
padres[pa] = nodo
vpro.set_valor(i / pas_count,
"Cargando conceptos de primer nivel...")
i += 1
return padres
def cargar_conceptos_segundo_nivel(self, vpro):
"""
Solo carga los conceptos. Con todos los valores a cero.
"""
i = 0.0
conceptos = pclases.ConceptoPresupuestoAnual.select()
conceptos_count = conceptos.count()
filas = {}
for c in conceptos:
self.cave[c.puid] = defaultdict(old_float)
filas[c] = []
for m in range(self.num_meses): # @UnusedVariable
filas[c].append(0)
vpro.set_valor(i / conceptos_count,
"Cargando conceptos de dynconsulta...")
i += 1
return filas
def buscar(self, widget):
"""
Muestra una ventana de búsqueda y a continuación los
resultados. El objeto seleccionado se hará activo
en la ventana a no ser que se pulse en Cancelar en
la ventana de resultados.
"""
# TODO: Buscar dentro de todas las filas y tracking un texto tecleado
# y pasarle el foco o algo.
pass
def imprimir(self, boton):
"""
Prepara la vista preliminar para la impresión del informe.
"""
resp = utils.dialogo(titulo = "¿IMPRIMIR DESGLOSE?",
texto = "Puede imprimir un resumen o todo el contenido de "
"la consulta\n¿Desea imprimir toda la información "
"desglosada?",
padre = self.wids['ventana'])
if resp:
tv = self.wids['tv_datos']
tv.expand_all()
while gtk.events_pending(): gtk.main_iteration(False)
cols_a_totalizar = []
else:
tv = self.wids['tv_datos']
tv.collapse_all()
while gtk.events_pending(): gtk.main_iteration(False)
from consulta_ventas_por_producto import convertir_a_listview
tv = convertir_a_listview(tv)
cols_a_totalizar = range(1, self.num_meses + 1)
strfecha = "De %s a %s" % (utils.str_fecha(self.fecha_mes_actual),
utils.str_fecha(self.fecha_mes_final - mx.DateTime.oneDay))
abrir_pdf(
treeview2pdf(tv, titulo = "Informe resumen financiero",
fecha = strfecha, apaisado = True,
numcols_a_totalizar = cols_a_totalizar))
def exportar(self, boton):
"""
Exporta el TreeView a CSV.
"""
abrir_csv(treeview2csv(self.wids['tv_datos']))
def precalcular(fecha_ini, fecha_fin, ventana_padre = None, usuario = None):
"""
Devuelve un diccionario de conceptos del mes especificado con los valores
que se puedan calcular a partir de datos reales.
Si el concepto no existe, lo crea en la base de datos
cobrados / pagados).
"""
vpro = VentanaActividad(ventana_padre, "Precalculando datos reales...")
vpro.mostrar()
# Valores que puedo conocer del ERP (de momento):
# 1.- Entradas de granza
res = calcular_entradas_de_granza(vpro, fecha_ini, fecha_fin, usuario)
# 2.- IVA (soportado - repercutido)
calcular_iva_real(res, vpro, fecha_ini, fecha_fin)
# 3.- Ventas por tipo (internacionales, geotextiles, geocompuestos...)
calcular_ventas(res, vpro, fecha_ini, fecha_fin)
# 4.- Compras que no son de granza
calcular_compras_no_granza(res, vpro, fecha_ini, fecha_fin)
if pclases.DEBUG and pclases.VERBOSE:
print __file__, res
vpro.ocultar()
return res
def calcular_iva_real(res, vpro, fechaini, fechafin):
"""
Calcula el IVA del mes de la fecha y lo almacena en el concepto
«Impuestos» de los valores precalculados.
"""
vpro.mover()
concepto = buscar_concepto_iva()
fecha = fechaini
while fecha <= fechafin:
vpro.mover()
soportado, fras_soportadas = calcular_soportado(vpro, fecha)
vpro.mover()
repercutido, fras_repercutidas = calcular_repercutido(vpro, fecha)
vpro.mover()
importe_iva = soportado - repercutido
if importe_iva:
# Paso de guardar valores nulos. La RAM es un bien escaso!
if fecha not in res:
res[fecha] = {}
try:
res[fecha][concepto]['importe'] += importe_iva
res[fecha][concepto]['objetos'] += fras_soportadas
res[fecha][concepto]['objetos'] += fras_repercutidas
except KeyError:
res[fecha][concepto] = {'importe': importe_iva,
'objetos': fras_soportadas + fras_repercutidas}
# IVA a devolver se compensa el mes siguiente.
try:
importe_este_mes = res[fecha][concepto]['importe']
except KeyError:
importe_este_mes = None
if importe_este_mes > 0 and restar_mes(fecha, -1) < fechafin:
# El último mes ya no arrastro, no me quedan celdas donde acumular.
fechanext = restar_mes(fecha, -1)
if fechanext not in res:
res[fechanext] = {}
try:
res[fechanext][concepto]['importe'] += importe_este_mes
res[fechanext][concepto]['objetos'] \
= res[fecha][concepto]['objetos']
except KeyError:
res[fechanext][concepto] = {'importe': importe_este_mes,
'objetos': res[fecha][concepto]['objetos']}
res[fecha][concepto]['importe'] -= importe_este_mes # = 0.0
res[fecha][concepto]['objetos'] = []
fecha = restar_mes(fecha, -1)
# FIXME: Devuelvo en negativo o positivo, pero el resto de cifras (ventas,
# compras, salarios, etc.) va en positivo aunque sean gastos. Convertir a
# negativo automáticamente aquí y en presupuesto si es de tipo gasto.
def buscar_concepto_iva():
# OJO: Harcoded
try:
c = pclases.ConceptoPresupuestoAnual.selectBy(descripcion = "IVA")[0]
except IndexError:
try:
padre=pclases.PresupuestoAnual.selectBy(descripcion="Impuestos")[0]
except IndexError:
padre = pclases.PresupuestoAnual(descripcion = "Impuestos")
c = pclases.ConceptoPresupuestoAnual(descripcion = "IVA",
presupuestoAnual = padre)
return c
def calcular_soportado(vpro, fecha):
# Pago este mes el IVA del mes pasado. Ojo.
fini = restar_mes(fecha)
fini = mx.DateTime.DateTimeFrom(fini.year, fini.month, 1)
ffin = mx.DateTime.DateTimeFrom(fini.year, fini.month, -1)
frascompra = pclases.FacturaCompra.select(pclases.AND(
pclases.FacturaCompra.q.fecha >= fini,
pclases.FacturaCompra.q.fecha <= ffin))
iva = sum([f.calcular_importe_iva() for f in frascompra])
return iva, pclases.SQLlist(frascompra)
def calcular_repercutido(vpro, fecha):
# Pago este mes el IVA del mes pasado. Ojo.
fini = restar_mes(fecha)
fini = mx.DateTime.DateTimeFrom(fini.year, fini.month, 1)
ffin = mx.DateTime.DateTimeFrom(fini.year, fini.month, -1)
frasventa = pclases.FacturaVenta.select(pclases.AND(
pclases.FacturaVenta.q.fecha >= fini,
pclases.FacturaVenta.q.fecha <= ffin))
iva = sum([f.calcular_total_iva() for f in frasventa])
return iva, pclases.SQLlist(frasventa)
def calcular_ventas(res, vpro, fechaini, fechafin):
"""
Calcula y clasifica las ventas realizadas entre las fechas de inicio y
fin.
"""
vpro.mover()
fecha = fechaini
while fecha <= fechafin:
vpro.mover()
ldv_vencimientos_ventas, srv_vencimientos_ventas \
= buscar_vencimientos_ventas(vpro, fecha)
vpro.mover()
lineas_no_facturadas, servicios_no_facturados \
= buscar_lineas_albaranes_venta(vpro, fecha)
vpro.mover()
clasificar_ventas(res, ldv_vencimientos_ventas,
srv_vencimientos_ventas, lineas_no_facturadas,
servicios_no_facturados, fecha, vpro)
fecha = restar_mes(fecha, -1)
def buscar_vencimientos_ventas(vpro, fecha):
"""
Devuelve líneas de venta y servicios correspondientes a vencimientos de
facturas en el mes indicado por «fecha».
"""
fini = primero_de_mes(fecha)
ffin = final_de_mes(fecha)
vtos_venta = pclases.VencimientoCobro.select(pclases.AND(
pclases.VencimientoCobro.q.fecha >= fini,
pclases.VencimientoCobro.q.fecha <= ffin))
ldvs = []
srvs = []
for v in vtos_venta:
vpro.mover()
f = v.factura
for ldv in f.lineasDeVenta:
if ldv not in ldvs:
ldvs.append(ldv)
vpro.mover()
for srv in f.servicios:
if srv not in srvs:
srvs.append(srv)
vpro.mover()
return ldvs, srvs
def buscar_lineas_albaranes_venta(vpro, fecha):
"""
Devuelve las líneas de venta correspondientes a albaranes no facturados
del mes indicado por la fecha «fecha».
"""
fini = primero_de_mes(fecha)
ffin = final_de_mes(fecha)
albs = pclases.AlbaranSalida.select(pclases.AND(
pclases.AlbaranSalida.q.fecha >= fini,
pclases.AlbaranSalida.q.fecha <= ffin))
# Filtro y me quedo con las líneas no facturadas
ldvs = []
srvs = []
for a in albs:
vpro.mover()
for ldv in a.lineasDeVenta:
vpro.mover()
if not ldv.factura:
ldvs.append(ldv)
for srv in a.servicios:
vpro.mover()
if not srv.factura:
srvs.append(srv)
return ldvs, srvs
def clasificar_ventas(res, ldv_facturadas, srv_facturados, ldv_no_facturadas,
srv_no_facturados, fecha, vpro):
"""
De los dos grupos de líneas de venta recibidos determina su importe, fecha
de vencimiento y concepto donde clasificarlas. Incrementa la celda* de la
columna de fecha de vencimiento y fila del concepto en la cantidad del
importe de la línea de venta. Si tiene varios vencimientos, prorratea la
cantidad.
* En realidad el importe real en el diccionario de la celda que ocupará si
supera el criterio de sustitución.
"""
for ldv in ldv_facturadas:
vpro.mover()
importe_prorrateado_ldv = ldv.get_subtotal(iva = True,
prorrateado = True)
concepto = buscar_concepto_ldv(ldv.factura.cliente, ldv.producto)
if not fecha in res:
res[fecha] = {}
try:
res[fecha][concepto]['importe'] += importe_prorrateado_ldv
res[fecha][concepto]['objetos'].append(ldv)
except KeyError:
res[fecha][concepto] = {'importe': importe_prorrateado_ldv,
'objetos': [ldv]}
for srv in srv_facturados:
vpro.mover()
importe_prorrateado_srv = srv.get_subtotal(iva = True,
prorrateado = True)
concepto = buscar_concepto_ldv(srv.factura.cliente, None)
if not fecha in res:
res[fecha] = {}
try:
res[fecha][concepto]['importe'] += importe_prorrateado_srv
res[fecha][concepto]['objetos'].append(srv)
except KeyError:
res[fecha][concepto] = {'importe': importe_prorrateado_srv,
'objetos': [srv]}
for ldv in ldv_no_facturadas:
# En este caso la fecha no es la fecha de vencimiento, sino la del
# albarán. Así que necesito determinar cuándo vence según el
# cliente.
vpro.mover()
importe_prorrateado_ldv = ldv.get_subtotal(iva = True,
prorrateado = True)
concepto = buscar_concepto_ldv(ldv.albaranSalida.cliente, ldv.producto)
fechas = ldv.albaranSalida.cliente.get_fechas_vtos_por_defecto(
ldv.albaranSalida.fecha)
if not fechas:
fechas = [fecha] # Uso la del albarán porque el cliente no
# tiene información suficiente.
for fecha in fechas:
if not fecha in res:
res[fecha] = {}
try:
res[fecha][concepto]['importe'] += importe_prorrateado_ldv
res[fecha][concepto]['objetos'].append(ldv)
except KeyError:
res[fecha][concepto] = {'importe': importe_prorrateado_ldv,
'objetos': [ldv]}
for srv in srv_no_facturados:
# En este caso la fecha no es la fecha de vencimiento, sino la del
# albarán. Así que necesito determinar cuándo vence según el
# cliente.
vpro.mover()
importe_prorrateado_srv = srv.get_subtotal(iva = True,
prorrateado = True)
concepto = buscar_concepto_ldv(srv.albaranSalida.cliente, None)
fechas = srv.albaranSalida.cliente.get_fechas_vtos_por_defecto(
srv.albaranSalida.fecha)
if not fechas:
fechas = [fecha] # Uso la del albarán porque el cliente no
# tiene información suficiente.
for fecha in fechas:
if not fecha in res:
res[fecha] = {}
try:
res[fecha][concepto]['importe'] += importe_prorrateado_srv
res[fecha][concepto]['objetos'].append(srv)
except KeyError:
res[fecha][concepto] = {'importe': importe_prorrateado_srv,
'objetos': [srv]}
def buscar_concepto_ldv(cliente, producto = None):
"""
Devuelve el concepto de presupuesto que corresponde al cliente y
producto recibido. Si no se recibe producto se considera que es un
servicio y devuelve el tipo de concepto "General".
"""
# Concepto por defecto, el del cliente.
if cliente.es_extranjero():
nac = "Internacionales"
else:
nac = "Nacionales"
try:
tdp = cliente.tipoDeCliente.descripcion
except AttributeError: # No está clasificado por los usuarios. Uso general.
tdp = "General"
# Ahora afino en función del tipo de producto de la línea de venta.
try:
if producto.es_fibra():
tdp = "Fibra"
elif producto.es_bigbag() or producto.es_bolsa() or producto.es_caja():
tdp = "Geocem"
elif isinstance(producto, pclases.ProductoCompra):
tdp = "Comercializado"
except AttributeError:
pass
try:
concepto = pclases.ConceptoPresupuestoAnual.selectBy(
descripcion = "%s - %s" % (nac, tdp))[0]
except IndexError:
# No existe el concepto. DEBERÍA. Lo creo.
concepto = pclases.ConceptoPresupuestoAnual(
descripcion = "%s - %s" % (nac, tdp),
presupuestoAnual = pclases.PresupuestoAnual.selectBy(
descripcion = "Clientes")[0],
proveedor = None)
return concepto
def calcular_compras_no_granza(res, vpro, fechaini, fechafin):
"""
Calcula y clasifica las compras realizadas entre las fechas de inicio y
fin.
"""
vpro.mover()
fecha = fechaini
granzas = buscar_productos_granza()
while fecha <= fechafin:
vpro.mover()
ldc_vencimientos_compras, srv_vencimientos_compras \
= buscar_vencimientos_compras_no_granza(vpro, fecha, granzas)
vpro.mover()
lineas_no_facturadas, servicios_no_facturados \
= buscar_lineas_albaranes_compra_no_granza(vpro, fecha, granzas)
vpro.mover()
clasificar_compras(res, ldc_vencimientos_compras,
srv_vencimientos_compras, lineas_no_facturadas,
servicios_no_facturados, fecha, vpro)
fecha = restar_mes(fecha, -1)
def buscar_vencimientos_compras_no_granza(vpro, fecha, granzas):
"""
Devuelve líneas de compra y servicios correspondientes a vencimientos de
facturas en el mes indicado por «fecha» que no sean de granza.
"""
fini = primero_de_mes(fecha)
ffin = final_de_mes(fecha)
vtos_compra = pclases.VencimientoPago.select(pclases.AND(
pclases.VencimientoPago.q.fecha >= fini,
pclases.VencimientoPago.q.fecha <= ffin))
ldcs = []
srvs = []
for v in vtos_compra:
vpro.mover()
f = v.facturaCompra
for ldc in f.lineasDeCompra:
if ldc.productoCompra not in granzas and ldc not in ldcs:
ldcs.append(ldc)
vpro.mover()
for srv in f.serviciosTomados:
if srv not in srvs:
srvs.append(srv)
vpro.mover()
return ldcs, srvs
def buscar_lineas_albaranes_compra_no_granza(vpro, fecha, granzas):
"""
Devuelve las líneas de compra correspondientes a albaranes no facturados
del mes indicado por la fecha «fecha» que no sean de granza.
"""
fini = primero_de_mes(fecha)
ffin = final_de_mes(fecha)
albs = pclases.AlbaranEntrada.select(pclases.AND(
pclases.AlbaranEntrada.q.fecha >= fini,
pclases.AlbaranEntrada.q.fecha <= ffin))
# Filtro y me quedo con las líneas no facturadas y que no sean de granza.
ldcs = []
srvs = []
for a in albs:
vpro.mover()
for ldc in a.lineasDeCompra:
vpro.mover()
if not ldc.facturaCompra and ldc.productoCompra not in granzas:
ldcs.append(ldc)
#for srv in a.serviciosTomados:
# vpro.mover()
# if not srv.factura:
# srvs.append(srv)
# Los albaranes de entrada no tienen servicios. Los servicios se
# facturan directamente.
return ldcs, srvs
def clasificar_compras(res, ldc_facturadas, srv_facturados, ldc_no_facturadas,
srv_no_facturados, fecha, vpro):
"""
De los dos grupos de líneas de compra recibidos determina su importe, fecha
de vencimiento y concepto donde clasificarlas. Incrementa la celda* de la
columna de fecha de vencimiento y fila del concepto en la cantidad del
importe de la línea de venta. Si tiene varios vencimientos, prorratea la
cantidad.
* En realidad el importe real en el diccionario de la celda que ocupará si
supera el criterio de sustitución.
"""
for ldc in ldc_facturadas:
vpro.mover()
# Gasto. En negativo.
importe_prorrateado_ldc = -ldc.get_subtotal(iva = True,
prorrateado = True)
concepto = buscar_concepto_ldc(ldc.facturaCompra.proveedor,
ldc.productoCompra)
if not fecha in res:
res[fecha] = {}
try:
res[fecha][concepto]['importe'] += importe_prorrateado_ldc
res[fecha][concepto]['objetos'].append(ldc)
except KeyError:
res[fecha][concepto] = {'importe': importe_prorrateado_ldc,
'objetos': [ldc]}
for srv in srv_facturados:
vpro.mover()
# Gasto. Negativo
importe_prorrateado_srv = -srv.get_subtotal(iva = True,
prorrateado = True)
concepto = buscar_concepto_ldc(srv.facturaCompra.proveedor, None)
if not fecha in res:
res[fecha] = {}
try:
res[fecha][concepto]['importe'] += importe_prorrateado_srv
res[fecha][concepto]['objetos'].append(srv)
except KeyError:
res[fecha][concepto] = {'importe': importe_prorrateado_srv,
'objetos': [srv]}
for ldc in ldc_no_facturadas:
# En este caso la fecha no es la fecha de vencimiento, sino la del
# albarán. Así que necesito determinar cuándo vence según el
# proveedor.
vpro.mover()
# Gasto. En negativo
importe_prorrateado_ldc = -ldc.get_subtotal(iva = True,
prorrateado = True)
concepto = buscar_concepto_ldc(ldc.albaranEntrada.proveedor,
ldc.productoCompra)
try:
fechas = ldc.albaranEntrada.proveedor.get_fechas_vtos_por_defecto(
ldc.albaranEntrada.fecha)
except AttributeError: # No proveedor. Sí albarán. El objeto viene
# de una búsqueda de albaranes no facturados.
fechas = [] # fecha es similar a ldc.albaranEntrada.fecha
if not fechas:
fechas = [fecha] # Uso la del albarán porque el proveedor no
# tiene información suficiente.
for fecha in fechas:
if not fecha in res:
res[fecha] = {}
try:
res[fecha][concepto]['importe'] += importe_prorrateado_ldc
res[fecha][concepto]['objetos'].append(ldc)
except KeyError:
res[fecha][concepto] = {'importe': importe_prorrateado_ldc,
'objetos': [ldc]}
for srv in srv_no_facturados:
# En este caso la fecha no es la fecha de vencimiento, sino la del
# albarán. Así que necesito determinar cuándo vence según el
# proveedor.
vpro.mover()
# Gasto. En negativo
importe_prorrateado_srv = -srv.get_subtotal(iva = True,
prorrateado = True)
concepto = buscar_concepto_ldc(srv.albaranEntrada.proveedor, None)
fechas = srv.albaranEntrada.proveedor.get_fechas_vtos_por_defecto(
srv.albaranEntrada.fecha)
if not fechas:
fechas = [fecha] # Uso la del albarán porque el proveedor no
# tiene información suficiente.
for fecha in fechas:
if not fecha in res:
res[fecha] = {}
try:
res[fecha][concepto]['importe'] += importe_prorrateado_srv
res[fecha][concepto]['objetos'].append(srv)
except KeyError:
res[fecha][concepto] = {'importe': importe_prorrateado_srv,
'objetos': [srv]}
def buscar_concepto_ldc(proveedor, producto = None):
"""
Devuelve el concepto de presupuesto que corresponde al proveedor y
producto recibido. Si no se recibe producto se considera que es un
servicio y devuelve el tipo de concepto "General".
"""
# Concepto por defecto, el del proveedor.
try:
proveedor.sync()
tdp = proveedor.tipoDeProveedor.descripcion
except AttributeError: # No está clasificado por los usuarios. Uso resto.
tdp = "Resto"
if tdp == "Granza": # Si por la ldc no puedo sacar el tipo, entonces
tdp = "Resto" # lo clasifico como general. Porque todas las
# compras de granza ya se tratan en otro sitio.
# Ahora afino en función del tipo de producto de la línea de venta.
if producto:
producto.sync()
tdm = producto.tipoDeMaterial
# OJO: HARCODED. Tipos de material conocidos. Si se crearan nuevos,
# caería en el tipo del proveedor.
iny = {'Materia Prima': None, # Usaré el del proveedor.
'Material adicional': 'Materiales',
'Mantenimiento': 'Materiales',
'Repuestos geotextiles': 'Repuestos',
'Repuestos fibra': 'Repuestos',
'Aceites y lubricantes': 'Materiales',
'Mercancía inicial Valdemoro': 'Comercializados',
'Productos comercializados': 'Comercializados',
'Comercializados': 'Comercializados'}
try:
tdpiny = iny[tdm]
except KeyError:
pass # Si no está o no tiene, uso el del proveedor.
else:
if tdpiny != None:
tdp = tdpiny
try:
concepto = pclases.ConceptoPresupuestoAnual.selectBy(
descripcion = tdp)[0]
except IndexError:
# No existe el concepto. DEBERÍA. Lo creo.
if proveedor.es_extranjero():
nac = "Internacionales"
else:
nac = "Nacionales"
concepto = pclases.ConceptoPresupuestoAnual(
descripcion = "%s - %s" % (nac, tdp),
presupuestoAnual = pclases.PresupuestoAnual.selectBy(
descripcion = "Proveedores")[0],
proveedor = None)
return concepto
def restar_mes(fecha = mx.DateTime.today(), meses = 1):
if meses > 0:
try:
return restar_mes(
mx.DateTime.DateFrom(fecha.year,
fecha.month - 1,
fecha.day),
meses - 1)
except mx.DateTime.RangeError:
return restar_mes(
mx.DateTime.DateFrom(fecha.year - 1,
12,
fecha.day),
meses - 1)
elif meses < 0:
try:
return restar_mes(
mx.DateTime.DateFrom(fecha.year,
fecha.month + 1,
fecha.day),
meses + 1)
except mx.DateTime.RangeError:
return restar_mes(
mx.DateTime.DateFrom(fecha.year + 1,
1,
fecha.day),
meses + 1)
else:
return fecha
def calcular_entradas_de_granza(vpro, fecha_ini, fecha_fin, usuario):
vpro.mover()
primes = fecha_ini
finmes = mx.DateTime.DateFrom(fecha_fin.year, fecha_fin.month, -1)
vpro.mover()
# Primero: productos granza:
granzas = buscar_productos_granza()
# Saco datos de facturas:
vtos = buscar_vencimientos_compra(primes, finmes)
# Filtro para quedarme con las de granza:
vpro.mover()
res = MyMonthsDict()
clasificar_vencimientos_compra(vtos, granzas, usuario, res, vpro)
vpro.mover()
# Y ahora de los albaranes no facturados.
albs = buscar_albaranes_de_entrada(primes, finmes)
vpro.mover()
# Filtro para quedarme con los de granza:
clasificar_albaranes_de_entrada(albs, granzas, usuario, res, vpro)
vpro.mover()
return res
def clasificar_albaranes_de_entrada(albs, granzas, usuario, res, vpro):
for a in albs:
for ldc in a.lineasDeCompra:
ldc.sync()
# Solo quiero lo no facturado.
if (not ldc.facturaCompraID and ldc.productoCompra in granzas
and ldc.cantidad):
# Si la línea no tiene cantidad de nada, paso. No quiero
# guardar valores nulos que me coman tiempo de proceso o RAM.
# Piensa como si siguieras programando con 640 K, old boy.
if pclases.DEBUG: # and pclases.VERBOSE:
print __file__, a.get_info(), ldc.get_info()
concepto = buscar_concepto_proveedor_granza(ldc.proveedor,
usuario)
proveedor = ldc.albaranEntrada.proveedor
fechas_vto = proveedor.get_fechas_vtos_por_defecto(
ldc.albaranEntrada.fecha)
if not fechas_vto:
fechas_vto = [ldc.albaranEntrada.fecha]
numvtos = len(fechas_vto)
for fecha_vto in fechas_vto:
fecha = primero_de_mes(fecha_vto)
if fecha not in res:
res[fecha] = {}
cantidad_prorrateada = ldc.cantidad / numvtos
try:
# Gasto. En negativo
res[fecha][concepto]['importe'] += -ldc.get_subtotal(
iva = True,
prorrateado = True)
res[fecha][concepto]['toneladas']+=cantidad_prorrateada
res[fecha][concepto]['objetos'].append(ldc)
except KeyError:
# Gasto. En negativo
res[fecha][concepto] = {
'importe': -ldc.get_subtotal(iva = True,
prorrateado = True),
'toneladas': cantidad_prorrateada,
'objetos': [ldc]}
vpro.mover()
def buscar_albaranes_de_entrada(primes, finmes):
# ¡OJO! Si el albarán es de otra fecha anterior a «primes», aunque entren
# sus "teóricos" vencimientos en los meses del TreeView, se va a ignorar.
# La consulta no lo encontrará.
albs = pclases.AlbaranEntrada.select(pclases.AND(
pclases.AlbaranEntrada.q.fecha >= primes,
pclases.AlbaranEntrada.q.fecha <= finmes))
if pclases.DEBUG:
print __file__, albs.count(), "albaranes encontrados."
return albs
def clasificar_vencimientos_compra(vtos, granzas, usuario, res, vpro):
# Me quedo solo con los vencimientos de fras. de compra de granza.
for v in vtos:
if pclases.DEBUG and pclases.VERBOSE:
print __file__, v.get_info(), v.fecha
fra = v.facturaCompra
for ldc in fra.lineasDeCompra:
ldc.sync
ldc.sync()
ldc.facturaCompra and ldc.facturaCompra.sync()
ldc.albaranEntrada and ldc.albaranEntrada.sync()
if ldc.productoCompra in granzas:
if pclases.DEBUG and pclases.VERBOSE:
print __file__, fra.get_info(), ldc.get_info()
concepto = buscar_concepto_proveedor_granza(ldc.proveedor,
usuario)
fechas_mes_vto = buscar_mes_vto(ldc.facturaCompra)
# Gasto. En negativo
importe = -ldc.get_subtotal(iva = True, prorrateado = True)
cantidad = ldc.cantidad / len(fechas_mes_vto)
#for fecha_mes_vto in fechas_mes_vto:
fecha_mes_vto = v.fecha
if fecha_mes_vto not in res:
res[fecha_mes_vto] = {}
try:
res[fecha_mes_vto][concepto]['importe'] += importe
res[fecha_mes_vto][concepto]['toneladas'] += cantidad
res[fecha_mes_vto][concepto]['objetos'].append(ldc)
except KeyError:
res[fecha_mes_vto][concepto] = {'importe': importe,
'toneladas': cantidad,
'objetos': [ldc]}
vpro.mover()
def buscar_mes_vto(fra_compra):
"""Devuelve las fechas de vencimiento de la factura. Si no tiene
vencimientos (algún usuario se está haciendo el remolón con su trabajo)
entonces devuelve la fecha de la factura.
Las fechas las devuelve a primero del mes que sea, ignorando el día real
de pago.
:fra_compra: pclases.FacturaCompra
:returns: mx.DateTime.Date(Time)
"""
fechas = []
for v in fra_compra.vencimientosPago:
fechas.append(primero_de_mes(v.fecha))
if not fechas:
fechas = [primero_de_mes(fra_compra.fecha)]
return fechas
def primero_de_mes(f):
return mx.DateTime.DateFrom(f.year, f.month, 1)
def final_de_mes(f):
return mx.DateTime.DateFrom(f.year, f.month, -1)
def buscar_vencimientos_compra(primes, finmes):
vtos = pclases.VencimientoPago.select(pclases.AND(
pclases.VencimientoPago.q.fecha >= primes,
pclases.VencimientoPago.q.fecha <= finmes))
# Filtro y saco los que ya están pagados (ver doc. requisitos)
vtos_pdtes = [v for v in vtos if v.calcular_importe_pdte() > 0]
if pclases.DEBUG:
print __file__, len(vtos_pdtes), "de", vtos.count(), \
"vencimientos encontrados."
return vtos
def buscar_productos_granza():
granzas = pclases.ProductoCompra.select(pclases.AND(
pclases.ProductoCompra.q.descripcion.contains("granza"),
pclases.ProductoCompra.q.obsoleto == False,
pclases.ProductoCompra.q.tipoDeMaterialID
== pclases.TipoDeMaterial.select(
pclases.TipoDeMaterial.q.descripcion.contains("prima")
)[0].id))
return granzas
def buscar_concepto_proveedor_granza(proveedor, usuario = None):
"""
Busca el concepto del dynconsulta anual correspondiente al proveedor. Si
no lo encuentra, lo crea.
"""
try:
concepto = pclases.ConceptoPresupuestoAnual.select(
pclases.ConceptoPresupuestoAnual.q.descripcion==proveedor.nombre)[0]
except IndexError:
concepto = pclases.ConceptoPresupuestoAnual(
descripcion = proveedor.nombre,
presupuestoAnual = pclases.PresupuestoAnual.select(
pclases.PresupuestoAnual.q.descripcion
== "Proveedores granza")[0], # EXISTE. Hay un check al
# principio que se asegura de eso.
proveedor = proveedor)
pclases.Auditoria.nuevo(concepto, usuario, __file__)
return concepto
def get_col_pos(tv, col):
"""
Devuelve la posición (índice entero comenzando por 0) de la columna en
el TreeView.
"""
return tv.get_columns().index(col)
def bak_model(model):
res = {}
for fila in model:
res[fila[0]] = {'valores': [], 'hijos': {}}
for i in range(1, len(fila)):
res[fila[0]]['valores'].append(fila[i])
for sub_fila in fila.iterchildren():
res[fila[0]]['hijos'][sub_fila[0]] = []
for j in range(1, len(sub_fila)):
res[fila[0]]['hijos'][sub_fila[0]].append(sub_fila[j])
return res
def criterio_sustitucion(vto_presupuesto, valor_real_importe,
fecha_primera_col, fecha = None):
if not fecha:
fecha = primero_de_mes(mx.DateTime.today())
# Si ni siquiera hay valor presupuestado, está claro, ¿no? Mostrar el real:
sustituir_por_reales = True
if vto_presupuesto:
# Para el mes actual SIEMPRE valores reales.
if primero_de_mes(fecha) <= fecha_primera_col <= final_de_mes(fecha):
sustituir_por_reales = True
else:
sustituir_por_reales = False
### Caso granza
if vto_presupuesto.es_de_granza():
# DONE: Principio de realidad: Siempre que
# haya datos reales, se sustituyen las estimaciones por datos
# reales. En granza además se puede hacer de forma
# proporcional complementando al presupuesto.
# Esto significa que si han entrado 15 toneladas de granza,
# en el mes de su vencimiento se retiran 15 toneladas al
# precio estimado y se pone el importe real de esas 15
# toneladas sumadas al resto de estimado.
sustituir_por_reales = True
### Caso IVA
if (vto_presupuesto.es_de_iva()
and abs(valor_real_importe)>abs(vto_presupuesto.importe)):
# En el caso del IVA se muestra el importe calculado a partir
# de datos reales cuando sea el mes corriente (primer "if" de
# arriba) o cuando se supere la estimación.
sustituir_por_reales = True
### Caso ventas.
if (vto_presupuesto.es_de_ventas()
and valor_real_importe > vto_presupuesto.importe):
# Solo sustituyo cuando supere lo previsto.
sustituir_por_reales = True
### Caso resto proveedores.
if (vto_presupuesto.es_de_compras()
and abs(valor_real_importe)>abs(vto_presupuesto.importe)):
# Solo sustituyo cuando supere lo previsto.
sustituir_por_reales = True
if pclases.DEBUG:
print __file__, "-------->>>>", \
vto_presupuesto.conceptoPresupuestoAnual.descripcion, \
"; mes presup.:", \
vto_presupuesto.valorPresupuestoAnual.fecha.month, \
"; mes vto.:", vto_presupuesto.fecha.month, \
"; presup. en mes vto.:", vto_presupuesto.importe, \
"; real:", valor_real_importe,
if pclases.DEBUG:
print __file__, valor_real_importe, "sustituir_por_reales [", \
sustituir_por_reales, "]"
return sustituir_por_reales
def buscar_vencimiento_presupuestado(fecha, concepto, fecha_mes_actual):
"""
Devuelve el objeto VencimientoValorPresupuesto del presupuesto para la
fecha DE VENCIMIENTO y concepto especificados.
"""
try:
vtos = pclases.VencimientoValorPresupuestoAnual.select(pclases.AND(
pclases.VencimientoValorPresupuestoAnual.q.fecha
>= primero_de_mes(fecha),
pclases.VencimientoValorPresupuestoAnual.q.fecha
<= final_de_mes(fecha)))
vto = [v for v in vtos if v.conceptoPresupuestoAnual == concepto][0]
vp = vto.valorPresupuestoAnual
# No interesan los vencimientos de valores presupuestados en el pasado.
if vp.fecha < fecha_mes_actual:
return None
return vto
except IndexError:
return None
def restar_en_traza_presupuesto(dict_tracking,
mes,
mes_actual,
concepto,
valor_presupuestado,
valor_real_importe,
valor_real_toneladas = None):
"""
Del diccionario de trazabilidad, extrae el objeto del valor presupuestado
y lo sustituye de nuevo por él mismo pero con la cantidad que aporta al
valor final decrementada en «valor_real_importe» y toneladas si es el caso.
"""
for obj, importe, tm in dict_tracking[mes][concepto]:
if obj == valor_presupuestado:
dict_tracking[mes][concepto].remove((obj, importe, tm))
# Para el mes actual nunca hay valores presupuestados. No lo
# vuelvo a agregar y santas pascuas.
if mes != mes_actual:
if valor_real_toneladas != None:
tm -= -valor_real_toneladas # Real: +. En presup.: -
importe = obj.precio * tm
else:
importe -= valor_real_importe
# Quito también valores negativos. Ya no influyen. Se ha
# sustituido por completo el valor presupuestado.
if ((not concepto.es_gasto() and importe > 0) or (
concepto.es_gasto() and importe < 0)):
dict_tracking[mes][concepto].append((obj,
importe,
tm))
break
def esta_pendiente(o):
"""
Devuelve True si el objeto recibido está pendiente de cobro/pago. Ya sea
factura, línea de factura o servicio.
Si no puede determinar la naturaleza, devuelve False por defecto.
En el caso de las líneas de venta/compra y servicios mira si están
pendientes o no basándose en su factura **completa**.
"""
o.sync()
try:
o = o.factura
o.sync()
except AttributeError:
pass
try:
o = o.facturaCompra
o.sync()
except AttributeError:
pass
try:
res = o.calcular_pendiente_cobro()
except AttributeError:
try:
res = o.get_importe_primer_vencimiento_pendiente()
except AttributeError:
res = False
return res
if __name__ == "__main__":
p = DynConsulta()
| gpl-2.0 | 2,879,280,261,875,653,000 | 44.758462 | 167 | 0.531615 | false |
google-research/graph-attribution | graph_attribution/graphs.py | 1 | 11067 | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Extra functions for manipulating GraphsTuple objects."""
from typing import Iterator, List, Tuple
import graph_nets
import numpy as np
import tensorflow as tf
import tree
GraphsTuple = graph_nets.graphs.GraphsTuple
# Alias to mirror the tf version.
cast_to_np = graph_nets.utils_tf.nest_to_numpy
# Numpy and tf compatible version of graph_nets.utils_tf.get_num_graphs
def get_num_graphs(graph): return graph.n_node.shape[0]
def get_input_spec(x: GraphsTuple) -> tf.TensorSpec:
"""Gets input signature for a graphstuple, useful for tf.function."""
return graph_nets.utils_tf.specs_from_graphs_tuple(
x, dynamic_num_graphs=True)
def print_graphs_tuple(graphs: GraphsTuple):
"""Print a graph tuple's shapes and contents."""
print("Shapes of GraphsTuple's fields:")
print(
graphs.map(
lambda x: x if x is None else x.shape,
fields=graph_nets.graphs.ALL_FIELDS))
def cast_to_tf(graphs: GraphsTuple) -> GraphsTuple:
"""Convert GraphsTuple numpy arrays to tf.Tensor."""
def cast_fn(x):
return tf.convert_to_tensor(x) if isinstance(x, np.ndarray) else x
return tree.map_structure(cast_fn, graphs)
def reduce_sum_edges(graphs: GraphsTuple) -> GraphsTuple:
"""Adds edge information into nodes and sets edges to None."""
if graphs.nodes.ndim > 1:
raise ValueError('Can only deal with 1D node information.')
if graphs.edges is not None and graphs.edges.ndim > 1:
raise ValueError('Can only deal with 1D edge information.')
if graphs.edges is None:
return graphs
num_nodes = tf.reduce_sum(graphs.n_node)
edge_contribution = tf.math.unsorted_segment_sum(graphs.edges,
graphs.receivers, num_nodes)
new_nodes = graphs.nodes + edge_contribution
return graphs.replace(nodes=new_nodes, edges=None)
def binarize_np_nodes(graph: GraphsTuple, tol: float) -> GraphsTuple:
"""Binarize node values based on a threshold, useful for classification."""
return graph.replace(nodes=(graph.nodes >= tol).astype(np.float32))
def make_constant_like(graphs: GraphsTuple, node_vec: np.ndarray,
edge_vec: np.ndarray) -> GraphsTuple:
"""Make a similar graph but with constant nodes and edges."""
using_tensors = isinstance(graphs.nodes, tf.Tensor)
nodes = np.tile(node_vec, (sum(graphs.n_node), 1))
edges = np.tile(edge_vec, (sum(graphs.n_edge), 1))
if using_tensors:
nodes = tf.convert_to_tensor(nodes, graphs.nodes.dtype)
edges = tf.convert_to_tensor(edges, graphs.edges.dtype)
return graphs.replace(nodes=nodes, edges=edges)
def segment_mean_stddev(
data: tf.Tensor, segment_counts: tf.Tensor) -> Tuple[tf.Tensor, tf.Tensor]:
"""Calculate mean and stddev for segmented tensor (e.g.
ragged-like).
Expects a 2D tensor for data and will return mean and std in the same shape,
with repeats acoording to segment_counts.
Args:
data: 2D tensor.
segment_counts: 1D int tensor with counts for each segment. Should satisfy
sum(segment_counts) = data.shape[0].
Returns:
Segment-wise mean and std, replicated to same shape as data.
"""
segment_ids = tf.repeat(
tf.range(segment_counts.shape[0]), segment_counts, axis=0)
mean_per_segment = tf.math.segment_mean(data, segment_ids)
mean = tf.repeat(mean_per_segment, segment_counts, axis=0)
diff_squared_sum = tf.math.segment_sum(tf.square(data - mean), segment_ids)
counts = tf.reshape(tf.cast(segment_counts, tf.float32), (-1, 1))
std_per_segment = tf.sqrt(diff_squared_sum / counts)
std = tf.repeat(std_per_segment, segment_counts, axis=0)
return mean, std
def perturb_graphs_tuple(graphs: GraphsTuple, num_samples: int,
sigma: float) -> GraphsTuple:
"""Sample graphs with additive gaussian noise.
For a given collection of graphs we create noisey versions of the initial
graphs by summing random normal noise scaled by a constant factor (sigma)
and per-graph variance on node and edge information. Connectivity is the
same.
Args:
graphs: input graphs on which to add noise.
num_samples: number of times to create noisy graphs.
sigma: scaling factor for noise.
Returns:
GraphsTuple with num_samples times more graphs.
"""
_, node_stddev = segment_mean_stddev(graphs.nodes, graphs.n_node)
_, edge_stddev = segment_mean_stddev(graphs.edges, graphs.n_edge)
def add_noise(x, stddev):
return x + tf.random.normal(x.shape,
stddev=sigma * stddev, dtype=x.dtype)
graph_list = []
for _ in tf.range(num_samples):
graph = graphs.replace(
nodes=add_noise(graphs.nodes, node_stddev),
edges=add_noise(graphs.edges, edge_stddev))
graph_list.append(graph)
return graph_nets.utils_tf.concat(graph_list, axis=0)
def split_graphs_tuple(graphs: GraphsTuple) -> Iterator[GraphsTuple]:
"""Converts several grouped graphs into a list of single graphs."""
n = get_num_graphs(graphs)
nodes = []
node_offsets = [0] + np.cumsum(graphs.n_node).tolist()
for i, j in zip(node_offsets[:-1], node_offsets[1:]):
nodes.append(graphs.nodes[i:j])
edges = []
has_edges = graphs.edges is not None
receivers, senders = [], []
edge_offsets = [0] + np.cumsum(graphs.n_edge).tolist()
for node_offset, i, j in zip(node_offsets[:-1], edge_offsets[:-1],
edge_offsets[1:]):
if has_edges:
edges.append(graphs.edges[i:j])
else:
edges.append(None)
receivers.append(graphs.receivers[i:j] - node_offset)
senders.append(graphs.senders[i:j] - node_offset)
if graphs.globals is None:
g_globals = [None for i in range(n)]
else:
g_globals = [graphs.globals[i] for i in range(n)]
graph_list = map(GraphsTuple, nodes, edges, receivers, senders, g_globals,
graphs.n_node[:, np.newaxis], graphs.n_edge[:, np.newaxis])
return graph_list
def get_graphs_np(graphs: GraphsTuple, indices=List[int]) -> GraphsTuple:
"""Gets a new graphstuple (numpy) based on a list of indices."""
node_indices = np.insert(np.cumsum(graphs.n_node), 0, 0)
node_slice = np.concatenate(
[np.arange(node_indices[i], node_indices[i + 1]) for i in indices])
nodes = graphs.nodes[node_slice]
edge_indices = np.insert(np.cumsum(graphs.n_edge), 0, 0)
edge_slice = np.concatenate(
[np.arange(edge_indices[i], edge_indices[i + 1]) for i in indices])
edges = graphs.edges[edge_slice] if graphs.edges is not None else None
n_edge = graphs.n_edge[indices]
n_node = graphs.n_node[indices]
offsets = np.repeat(node_indices[indices], graphs.n_edge[indices])
new_offsets = np.insert(np.cumsum(n_node), 0, 0)
senders = graphs.senders[edge_slice] - offsets
receivers = graphs.receivers[edge_slice] - offsets
senders = senders + np.repeat(new_offsets[:-1], n_edge)
receivers = receivers + np.repeat(new_offsets[:-1], n_edge)
g_globals = graphs.globals[indices] if graphs.globals is not None else None
return GraphsTuple(
nodes=nodes,
edges=edges,
globals=g_globals,
senders=senders,
receivers=receivers,
n_node=n_node,
n_edge=n_edge)
def get_graphs_tf(graphs: GraphsTuple, indices: np.ndarray) -> GraphsTuple:
"""Gets a new graphstuple (tf) based on a list of indices."""
node_indices = tf.concat(
[tf.constant([0]), tf.cumsum(graphs.n_node)], axis=0)
node_starts = tf.gather(node_indices, indices)
node_ends = tf.gather(node_indices, indices + 1)
node_slice = tf.ragged.range(node_starts, node_ends).values
nodes = tf.gather(graphs.nodes, node_slice)
edge_indices = tf.concat(
[tf.constant([0]), tf.cumsum(graphs.n_edge)], axis=0)
edge_starts = tf.gather(edge_indices, indices)
edge_ends = tf.gather(edge_indices, indices + 1)
edge_slice = tf.ragged.range(edge_starts, edge_ends).values
edges = tf.gather(graphs.edges,
edge_slice) if graphs.edges is not None else None
n_edge = tf.gather(graphs.n_edge, indices)
n_node = tf.gather(graphs.n_node, indices)
offsets = tf.repeat(node_starts, tf.gather(graphs.n_edge, indices))
senders = tf.gather(graphs.senders, edge_slice) - offsets
receivers = tf.gather(graphs.receivers, edge_slice) - offsets
new_offsets = tf.concat([tf.constant([0]), tf.cumsum(n_node)], axis=0)
senders = senders + tf.repeat(new_offsets[:-1], n_edge)
receivers = receivers + tf.repeat(new_offsets[:-1], n_edge)
g_globals = tf.gather(graphs.globals,
indices) if graphs.globals is not None else None
return GraphsTuple(
nodes=nodes,
edges=edges,
globals=g_globals,
senders=senders,
receivers=receivers,
n_node=n_node,
n_edge=n_edge)
def _interp_array(start: tf.Tensor, end: tf.Tensor,
num_steps: int) -> tf.Tensor:
"""Linearly interpolate 2D tensors, returns 3D tensors.
Args:
start: 2D tensor for start point of interpolation of shape [x,y].
end: 2D tensor as end point of interpolation of shape [x,y] (same as start).
num_steps: number of steps to interpolate.
Returns:
New tensor of shape [num_steps, x, y]
"""
alpha = tf.linspace(0., 1., num_steps)
beta = 1 - alpha
return tf.einsum('a,bc->abc', alpha, end) + tf.einsum('a,bc->abc', beta,
start)
def interpolate_graphs_tuple(
start: GraphsTuple, end: GraphsTuple,
num_steps: int) -> Tuple[GraphsTuple, tf.Tensor, tf.Tensor]:
"""Interpolate two graphs of same shape."""
nodes_interp = _interp_array(start.nodes, end.nodes, num_steps)
edges_interp = _interp_array(start.edges, end.edges, num_steps)
node_steps = tf.tile(nodes_interp[1] - nodes_interp[0], (num_steps, 1))
edge_steps = tf.tile(edges_interp[1] - edges_interp[0], (num_steps, 1))
graphs = []
for nodes, edges in zip(nodes_interp, edges_interp):
graphs.append(end.replace(nodes=nodes, edges=edges))
interp_graph = graph_nets.utils_tf.concat(graphs, axis=0)
return interp_graph, node_steps, edge_steps
| apache-2.0 | -9,002,149,192,423,218,000 | 36.900685 | 83 | 0.653384 | false |
ninjawil/weather-station | scripts/pyenergenie/energenie/Devices.py | 1 | 1064 | # Devices.py 30/09/2015 D.J.Whale
#
# Information about specific Energenie devices
MFRID_ENERGENIE = 0x04
PRODUCTID_C1_MONITOR = 0x01
PRODUCTID_R1_MONITOR_AND_CONTROL = 0x02
PRODUCTID_ETRV = 0x03
CRYPT_PID = 242
CRYPT_PIP = 0x0100
# OpenHEMS does not support a broadcast id, but Energine added one for their
# MiHome Adaptors. This makes simple discovery possible.
BROADCAST_ID = 0xFFFFFF # energenie broadcast
# TODO put additional products in here from the Energenie directory
def getDescription(mfrid, productid):
mfr = "UNKNOWN"
product = "UNKNOWN"
if mfrid == MFRID_ENERGENIE:
mfr = "Energenie"
if productid == PRODUCTID_C1_MONITOR:
product = "C1 MONITOR"
elif productid == PRODUCTID_R1_MONITOR_AND_CONTROL:
product = "R1 MONITOR/CONTROL"
elif productid == PRODUCTID_ETRV:
product = "eTRV"
return "Manufactuer:%s Product:%s" % (mfr, product)
# END
| mit | -4,096,557,223,199,147,500 | 30.294118 | 76 | 0.611842 | false |
fkie-cad/FACT_core | src/test/acceptance/rest/test_rest_analyze_firmware.py | 1 | 4292 | # pylint: disable=wrong-import-order
import json
import time
import urllib.parse
from multiprocessing import Event, Value
from storage.db_interface_backend import BackEndDbInterface
from test.acceptance.base import TestAcceptanceBase
from test.common_helper import get_firmware_for_rest_upload_test
class TestRestFirmware(TestAcceptanceBase):
def setUp(self):
super().setUp()
self.analysis_finished_event = Event()
self.elements_finished_analyzing = Value('i', 0)
self.db_backend_service = BackEndDbInterface(config=self.config)
self._start_backend(post_analysis=self._analysis_callback)
self.test_container_uid = '418a54d78550e8584291c96e5d6168133621f352bfc1d43cf84e81187fef4962_787'
time.sleep(2) # wait for systems to start
def tearDown(self):
self._stop_backend()
self.db_backend_service.shutdown()
super().tearDown()
def _analysis_callback(self, fo):
self.db_backend_service.add_analysis(fo)
self.elements_finished_analyzing.value += 1
if self.elements_finished_analyzing.value == 4 * 3: # container including 3 files times 3 plugins
self.analysis_finished_event.set()
def _rest_upload_firmware(self):
data = get_firmware_for_rest_upload_test()
rv = self.test_client.put('/rest/firmware', json=data, follow_redirects=True)
assert b'"status": 0' in rv.data, 'rest upload not successful'
assert self.test_container_uid.encode() in rv.data, 'uid not found in rest upload reply'
def _rest_get_analysis_result(self):
rv = self.test_client.get(f'/rest/firmware/{self.test_container_uid}', follow_redirects=True)
assert b'analysis_date' in rv.data, 'rest analysis download not successful'
assert b'software_components' in rv.data, 'rest analysis not successful'
assert b'"device_part": "test_part' in rv.data, 'device part not present'
def _rest_search(self):
query = urllib.parse.quote('{"device_class": "test_class"}')
rv = self.test_client.get(f'/rest/firmware?query={query}', follow_redirects=True)
assert self.test_container_uid.encode() in rv.data, 'test firmware not found in rest search'
def _rest_search_fw_only(self):
query = json.dumps({'sha256': self.test_container_uid.split('_')[0]})
rv = self.test_client.get(f'/rest/firmware?query={urllib.parse.quote(query)}', follow_redirects=True)
assert self.test_container_uid.encode() in rv.data, 'test firmware not found in rest search'
def _rest_update_analysis_bad_analysis(self):
query = urllib.parse.quote('["unknown_system"]')
rv = self.test_client.put(f'/rest/firmware/{self.test_container_uid}?update={query}', follow_redirects=True)
assert 'Unknown analysis system'.encode() in rv.data, "rest analysis update should break on request of non existing system"
def _rest_update_analysis_success(self):
update = urllib.parse.quote(json.dumps(['crypto_material']))
rv = self.test_client.put(f'/rest/firmware/{self.test_container_uid}?update={update}', follow_redirects=True)
assert b'error_message' not in rv.data, 'Error on update request'
def _rest_check_new_analysis_exists(self):
rv = self.test_client.get(f'/rest/firmware/{self.test_container_uid}', follow_redirects=True)
response_data = json.loads(rv.data.decode())
assert response_data['firmware']['analysis']['crypto_material']
assert response_data['firmware']['analysis']['crypto_material']['analysis_date'] > response_data['firmware']['analysis']['software_components']['analysis_date']
def test_run_from_upload_to_show_analysis_and_search(self):
self._rest_upload_firmware()
self.analysis_finished_event.wait(timeout=15)
self.elements_finished_analyzing.value = 4 * 2 # only one plugin to update so we offset with 4 times 2 plugins
self.analysis_finished_event.clear()
self._rest_get_analysis_result()
self._rest_search()
self._rest_search_fw_only()
self._rest_update_analysis_bad_analysis()
self._rest_update_analysis_success()
self.analysis_finished_event.wait(timeout=10)
self._rest_check_new_analysis_exists()
| gpl-3.0 | -2,622,167,854,439,962,600 | 48.906977 | 168 | 0.687092 | false |
warner/magic-wormhole | src/wormhole/test/dilate/test_connection.py | 1 | 12789 | from __future__ import print_function, unicode_literals
import mock
from zope.interface import alsoProvides
from twisted.trial import unittest
from twisted.internet.task import Clock
from twisted.internet.interfaces import ITransport
from ...eventual import EventualQueue
from ..._interfaces import IDilationConnector
from ..._dilation.roles import LEADER, FOLLOWER
from ..._dilation.connection import (DilatedConnectionProtocol, encode_record,
KCM, Open, Ack)
from .common import clear_mock_calls
def make_con(role, use_relay=False):
clock = Clock()
eq = EventualQueue(clock)
connector = mock.Mock()
alsoProvides(connector, IDilationConnector)
n = mock.Mock() # pretends to be a Noise object
n.write_message = mock.Mock(side_effect=[b"handshake"])
c = DilatedConnectionProtocol(eq, role, "desc", connector, n,
b"outbound_prologue\n", b"inbound_prologue\n")
if use_relay:
c.use_relay(b"relay_handshake\n")
t = mock.Mock()
alsoProvides(t, ITransport)
return c, n, connector, t, eq
class Connection(unittest.TestCase):
def test_hashable(self):
c, n, connector, t, eq = make_con(LEADER)
hash(c)
def test_bad_prologue(self):
c, n, connector, t, eq = make_con(LEADER)
c.makeConnection(t)
d = c.when_disconnected()
self.assertEqual(n.mock_calls, [mock.call.start_handshake()])
self.assertEqual(connector.mock_calls, [])
self.assertEqual(t.mock_calls, [mock.call.write(b"outbound_prologue\n")])
clear_mock_calls(n, connector, t)
c.dataReceived(b"prologue\n")
self.assertEqual(n.mock_calls, [])
self.assertEqual(connector.mock_calls, [])
self.assertEqual(t.mock_calls, [mock.call.loseConnection()])
eq.flush_sync()
self.assertNoResult(d)
c.connectionLost(b"why")
eq.flush_sync()
self.assertIdentical(self.successResultOf(d), c)
def _test_no_relay(self, role):
c, n, connector, t, eq = make_con(role)
t_kcm = KCM()
t_open = Open(seqnum=1, scid=0x11223344)
t_ack = Ack(resp_seqnum=2)
n.decrypt = mock.Mock(side_effect=[
encode_record(t_kcm),
encode_record(t_open),
])
exp_kcm = b"\x00\x00\x00\x03kcm"
n.encrypt = mock.Mock(side_effect=[b"kcm", b"ack1"])
m = mock.Mock() # Manager
c.makeConnection(t)
self.assertEqual(n.mock_calls, [mock.call.start_handshake()])
self.assertEqual(connector.mock_calls, [])
self.assertEqual(t.mock_calls, [mock.call.write(b"outbound_prologue\n")])
clear_mock_calls(n, connector, t, m)
c.dataReceived(b"inbound_prologue\n")
exp_handshake = b"\x00\x00\x00\x09handshake"
if role is LEADER:
# the LEADER sends the Noise handshake message immediately upon
# receipt of the prologue
self.assertEqual(n.mock_calls, [mock.call.write_message()])
self.assertEqual(t.mock_calls, [mock.call.write(exp_handshake)])
else:
# however the FOLLOWER waits until receiving the leader's
# handshake before sending their own
self.assertEqual(n.mock_calls, [])
self.assertEqual(t.mock_calls, [])
self.assertEqual(connector.mock_calls, [])
clear_mock_calls(n, connector, t, m)
c.dataReceived(b"\x00\x00\x00\x0Ahandshake2")
if role is LEADER:
# we're the leader, so we don't send the KCM right away
self.assertEqual(n.mock_calls, [
mock.call.read_message(b"handshake2")])
self.assertEqual(connector.mock_calls, [])
self.assertEqual(t.mock_calls, [])
self.assertEqual(c._manager, None)
else:
# we're the follower, so we send our Noise handshake, then
# encrypt and send the KCM immediately
self.assertEqual(n.mock_calls, [
mock.call.read_message(b"handshake2"),
mock.call.write_message(),
mock.call.encrypt(encode_record(t_kcm)),
])
self.assertEqual(connector.mock_calls, [])
self.assertEqual(t.mock_calls, [
mock.call.write(exp_handshake),
mock.call.write(exp_kcm)])
self.assertEqual(c._manager, None)
clear_mock_calls(n, connector, t, m)
c.dataReceived(b"\x00\x00\x00\x03KCM")
# leader: inbound KCM means we add the candidate
# follower: inbound KCM means we've been selected.
# in both cases we notify Connector.add_candidate(), and the Connector
# decides if/when to call .select()
self.assertEqual(n.mock_calls, [mock.call.decrypt(b"KCM")])
self.assertEqual(connector.mock_calls, [mock.call.add_candidate(c)])
self.assertEqual(t.mock_calls, [])
clear_mock_calls(n, connector, t, m)
# now pretend this connection wins (either the Leader decides to use
# this one among all the candidates, or we're the Follower and the
# Connector is reacting to add_candidate() by recognizing we're the
# only candidate there is)
c.select(m)
self.assertIdentical(c._manager, m)
if role is LEADER:
# TODO: currently Connector.select_and_stop_remaining() is
# responsible for sending the KCM just before calling c.select()
# iff we're the LEADER, therefore Connection.select won't send
# anything. This should be moved to c.select().
self.assertEqual(n.mock_calls, [])
self.assertEqual(connector.mock_calls, [])
self.assertEqual(t.mock_calls, [])
self.assertEqual(m.mock_calls, [])
c.send_record(KCM())
self.assertEqual(n.mock_calls, [
mock.call.encrypt(encode_record(t_kcm)),
])
self.assertEqual(connector.mock_calls, [])
self.assertEqual(t.mock_calls, [mock.call.write(exp_kcm)])
self.assertEqual(m.mock_calls, [])
else:
# follower: we already sent the KCM, do nothing
self.assertEqual(n.mock_calls, [])
self.assertEqual(connector.mock_calls, [])
self.assertEqual(t.mock_calls, [])
self.assertEqual(m.mock_calls, [])
clear_mock_calls(n, connector, t, m)
c.dataReceived(b"\x00\x00\x00\x04msg1")
self.assertEqual(n.mock_calls, [mock.call.decrypt(b"msg1")])
self.assertEqual(connector.mock_calls, [])
self.assertEqual(t.mock_calls, [])
self.assertEqual(m.mock_calls, [mock.call.got_record(t_open)])
clear_mock_calls(n, connector, t, m)
c.send_record(t_ack)
exp_ack = b"\x06\x00\x00\x00\x02"
self.assertEqual(n.mock_calls, [mock.call.encrypt(exp_ack)])
self.assertEqual(connector.mock_calls, [])
self.assertEqual(t.mock_calls, [mock.call.write(b"\x00\x00\x00\x04ack1")])
self.assertEqual(m.mock_calls, [])
clear_mock_calls(n, connector, t, m)
c.disconnect()
self.assertEqual(n.mock_calls, [])
self.assertEqual(connector.mock_calls, [])
self.assertEqual(t.mock_calls, [mock.call.loseConnection()])
self.assertEqual(m.mock_calls, [])
clear_mock_calls(n, connector, t, m)
def test_no_relay_leader(self):
return self._test_no_relay(LEADER)
def test_no_relay_follower(self):
return self._test_no_relay(FOLLOWER)
def test_relay(self):
c, n, connector, t, eq = make_con(LEADER, use_relay=True)
c.makeConnection(t)
self.assertEqual(n.mock_calls, [mock.call.start_handshake()])
self.assertEqual(connector.mock_calls, [])
self.assertEqual(t.mock_calls, [mock.call.write(b"relay_handshake\n")])
clear_mock_calls(n, connector, t)
c.dataReceived(b"ok\n")
self.assertEqual(n.mock_calls, [])
self.assertEqual(connector.mock_calls, [])
self.assertEqual(t.mock_calls, [mock.call.write(b"outbound_prologue\n")])
clear_mock_calls(n, connector, t)
c.dataReceived(b"inbound_prologue\n")
self.assertEqual(n.mock_calls, [mock.call.write_message()])
self.assertEqual(connector.mock_calls, [])
exp_handshake = b"\x00\x00\x00\x09handshake"
self.assertEqual(t.mock_calls, [mock.call.write(exp_handshake)])
clear_mock_calls(n, connector, t)
def test_relay_jilted(self):
c, n, connector, t, eq = make_con(LEADER, use_relay=True)
d = c.when_disconnected()
c.makeConnection(t)
self.assertEqual(n.mock_calls, [mock.call.start_handshake()])
self.assertEqual(connector.mock_calls, [])
self.assertEqual(t.mock_calls, [mock.call.write(b"relay_handshake\n")])
clear_mock_calls(n, connector, t)
c.connectionLost(b"why")
eq.flush_sync()
self.assertIdentical(self.successResultOf(d), c)
def test_relay_bad_response(self):
c, n, connector, t, eq = make_con(LEADER, use_relay=True)
c.makeConnection(t)
self.assertEqual(n.mock_calls, [mock.call.start_handshake()])
self.assertEqual(connector.mock_calls, [])
self.assertEqual(t.mock_calls, [mock.call.write(b"relay_handshake\n")])
clear_mock_calls(n, connector, t)
c.dataReceived(b"not ok\n")
self.assertEqual(n.mock_calls, [])
self.assertEqual(connector.mock_calls, [])
self.assertEqual(t.mock_calls, [mock.call.loseConnection()])
clear_mock_calls(n, connector, t)
def test_follower_combined(self):
c, n, connector, t, eq = make_con(FOLLOWER)
t_kcm = KCM()
t_open = Open(seqnum=1, scid=0x11223344)
n.decrypt = mock.Mock(side_effect=[
encode_record(t_kcm),
encode_record(t_open),
])
exp_kcm = b"\x00\x00\x00\x03kcm"
n.encrypt = mock.Mock(side_effect=[b"kcm", b"ack1"])
m = mock.Mock() # Manager
c.makeConnection(t)
self.assertEqual(n.mock_calls, [mock.call.start_handshake()])
self.assertEqual(connector.mock_calls, [])
self.assertEqual(t.mock_calls, [mock.call.write(b"outbound_prologue\n")])
clear_mock_calls(n, connector, t, m)
c.dataReceived(b"inbound_prologue\n")
exp_handshake = b"\x00\x00\x00\x09handshake"
# however the FOLLOWER waits until receiving the leader's
# handshake before sending their own
self.assertEqual(n.mock_calls, [])
self.assertEqual(t.mock_calls, [])
self.assertEqual(connector.mock_calls, [])
clear_mock_calls(n, connector, t, m)
c.dataReceived(b"\x00\x00\x00\x0Ahandshake2")
# we're the follower, so we send our Noise handshake, then
# encrypt and send the KCM immediately
self.assertEqual(n.mock_calls, [
mock.call.read_message(b"handshake2"),
mock.call.write_message(),
mock.call.encrypt(encode_record(t_kcm)),
])
self.assertEqual(connector.mock_calls, [])
self.assertEqual(t.mock_calls, [
mock.call.write(exp_handshake),
mock.call.write(exp_kcm)])
self.assertEqual(c._manager, None)
clear_mock_calls(n, connector, t, m)
# the leader will select a connection, send the KCM, and then
# immediately send some more data
kcm_and_msg1 = (b"\x00\x00\x00\x03KCM" +
b"\x00\x00\x00\x04msg1")
c.dataReceived(kcm_and_msg1)
# follower: inbound KCM means we've been selected.
# in both cases we notify Connector.add_candidate(), and the Connector
# decides if/when to call .select()
self.assertEqual(n.mock_calls, [mock.call.decrypt(b"KCM"),
mock.call.decrypt(b"msg1")])
self.assertEqual(connector.mock_calls, [mock.call.add_candidate(c)])
self.assertEqual(t.mock_calls, [])
clear_mock_calls(n, connector, t, m)
# now pretend this connection wins (either the Leader decides to use
# this one among all the candidates, or we're the Follower and the
# Connector is reacting to add_candidate() by recognizing we're the
# only candidate there is)
c.select(m)
self.assertIdentical(c._manager, m)
# follower: we already sent the KCM, do nothing
self.assertEqual(n.mock_calls, [])
self.assertEqual(connector.mock_calls, [])
self.assertEqual(t.mock_calls, [])
self.assertEqual(m.mock_calls, [mock.call.got_record(t_open)])
clear_mock_calls(n, connector, t, m)
| mit | -7,842,375,537,309,062,000 | 40.522727 | 82 | 0.610916 | false |
recsm/SQP | sqp/views_ui_model_views.py | 1 | 1209 |
from sqp.views_ui_utils import URL
def question_base(question):
return {
"id": question.id,
"url": URL.question(question.id),
"urlCodingHistory": URL.question_coding_history(question.id),
"itemId": question.item.id,
"studyId": question.item.study.id,
"languageIso" : question.language.iso,
"countryIso" : question.country.iso,
"studyName": question.item.study.name,
"itemPart": question.item.main_or_supplementary(),
"itemCode": question.item.admin,
"itemName" : question.item.name,
"country": question.country.name,
"countryIso": question.country.iso,
"countryPrediction": question.country_prediction.name,
"countryPredictionIso": question.country_prediction.iso,
"language": question.language.name,
"itemDescription": question.item.concept,
"hasMTMM": question.rel
} | mit | -115,245,372,784,078,450 | 42.214286 | 77 | 0.490488 | false |
mesosphere/dcos-template | dcos_template/tmpl.py | 1 | 1823 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import jinja2
from . import log, mesos
class Template(object):
def __init__(self, val):
self.set_opts(val)
self.get_template()
def render(self):
tmpl = jinja2.Template(self.source).render(data={
"services": mesos.services()
})
log.debug(tmpl)
with open(self.dest_path, "w") as fobj:
fobj.write(tmpl)
def set_opts(self, opts):
try:
self.source_path, self.dest_path, self.cmd = opts.split(":", 3)
except:
raise argparse.ArgumentTypeError(
"'{0}' is not valid. It must be of the form " \
"'source:dest:cmd'".format(opts))
def get_template(self):
try:
with open(self.source_path, 'r') as fobj:
self.source = fobj.read()
except:
raise argparse.ArgumentTypeError(
"'{0}' cannot be opened. Make sure the path is valid and " \
"you have permissions".format(self.source_path))
| apache-2.0 | 6,061,769,448,179,644,000 | 34.745098 | 76 | 0.641799 | false |
christippett/django-postmark-inbound | postmark_inbound/models.py | 1 | 3194 | from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from .settings import inbound_mail_options as option
@python_2_unicode_compatible
class InboundMail(models.Model):
from_name = models.CharField(blank=True, max_length=255)
from_email = models.EmailField(max_length=254)
to_email = models.CharField(blank=True, max_length=255)
cc_email = models.CharField(blank=True, max_length=255)
bcc_email = models.CharField(blank=True, max_length=255)
original_recipient = models.CharField(blank=True, max_length=255)
subject = models.CharField(blank=True, max_length=255)
message_id = models.CharField(blank=True, max_length=255)
reply_to = models.CharField(blank=True, max_length=255)
mailbox_hash = models.CharField(blank=True, max_length=255)
date = models.DateTimeField()
text_body = models.TextField(blank=True)
html_body = models.TextField(blank=True)
stripped_text_reply = models.TextField(blank=True)
tag = models.CharField(blank=True, max_length=255)
def __str__(self):
return ('%s: %s' % (self.from_email, self.subject))
def has_attachment(self):
return self.attachments.all().count() > 0
has_attachment.boolean = True
has_attachment.short_description = 'Attachment'
@property
def from_full(self):
return self.address_details.get(address_type='FROM')
@property
def to_full(self):
return self.address_details.filter(address_type='TO')
@property
def cc_full(self):
return self.address_details.filter(address_type='CC')
@property
def bcc_full(self):
return self.address_details.filter(address_type='BCC')
@python_2_unicode_compatible
class InboundMailHeader(models.Model):
parent_mail = models.ForeignKey(InboundMail, related_name='headers', on_delete=models.CASCADE)
name = models.CharField(max_length=255)
value = models.TextField(blank=True)
def __str__(self):
return ('%s: %s' % (self.name, self.value))
@python_2_unicode_compatible
class InboundMailAttachment(models.Model):
parent_mail = models.ForeignKey(InboundMail, related_name='attachments', on_delete=models.CASCADE)
name = models.CharField(max_length=255)
content_type = models.CharField(max_length=255)
content = models.FileField(upload_to=option.ATTACHMENT_UPLOAD_TO)
content_id = models.CharField(blank=True, max_length=255)
content_length = models.IntegerField()
def __str__(self):
return ('%s (%s)' % (self.name, self.content_type))
# Declare sources of email addresses
ADDRESS_TYPES = tuple(map(lambda x: (x, x), ['FROM', 'TO', 'CC', 'BCC']))
@python_2_unicode_compatible
class InboundMailDetail(models.Model):
parent_mail = models.ForeignKey(InboundMail, related_name='address_details', on_delete=models.CASCADE)
address_type = models.CharField(choices=ADDRESS_TYPES, max_length=10)
email = models.EmailField(blank=True, max_length=254)
name = models.CharField(blank=True, max_length=255)
mailbox_hash = models.CharField(blank=True, max_length=255)
def __str__(self):
return ('%s (%s)' % (self.email, self.address_type))
| mit | 3,361,655,656,052,681,000 | 36.139535 | 106 | 0.701628 | false |
Skyscanner/pages | test/test_label.py | 1 | 2252 | ############################################################################
# Copyright 2015 Skyscanner Ltd #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); #
# you may not use this file except in compliance with the License. #
# You may obtain a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
############################################################################
import unittest
from hamcrest import assert_that, equal_to
from selenium.webdriver.common.by import By
from selenium.webdriver.remote.command import Command
from pages.standard_components.label import Label
from test.utils.mocks import MockedWebDriver
class LabelTest(unittest.TestCase):
"""
Unit test for Label class.
"""
def __init__(self, methodName='runTest'):
super(LabelTest, self).__init__(methodName)
self.driver = MockedWebDriver()
def test_get_for_attribute(self):
self.driver.set_dom_element([By.ID, 'label'], return_values=[{'name': {'for': 'attr'}}])
self.driver.set_expected_command(Command.GET_ELEMENT_ATTRIBUTE, {'sessionId': self.driver.session_id,
'id': self.driver.get_id_for_stored_element([By.ID, 'label'])})
#
image_src = Label(self.driver, 'label', [By.ID, 'label']).get_for_attribute()
#
assert_that(image_src, equal_to('attr'), 'attribute for image should match')
assert_that(self.driver.has_fulfilled_expectations(), equal_to(True))
| apache-2.0 | -4,472,129,643,436,705,000 | 52.619048 | 109 | 0.515986 | false |
polyaxon/polyaxon | sdks/python/http_client/v1/polyaxon_sdk/models/v1_matrix.py | 1 | 7755 | #!/usr/bin/python
#
# Copyright 2018-2021 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
"""
Polyaxon SDKs and REST API specification.
Polyaxon SDKs and REST API specification. # noqa: E501
The version of the OpenAPI document: 1.10.0
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from polyaxon_sdk.configuration import Configuration
class V1Matrix(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'random': 'V1RandomSearch',
'grid': 'V1GridSearch',
'hyperband': 'V1Hyperband',
'bayes': 'V1Bayes',
'hyperopt': 'V1Hyperopt',
'iterative': 'V1Iterative',
'mapping': 'V1Mapping'
}
attribute_map = {
'random': 'random',
'grid': 'grid',
'hyperband': 'hyperband',
'bayes': 'bayes',
'hyperopt': 'hyperopt',
'iterative': 'iterative',
'mapping': 'mapping'
}
def __init__(self, random=None, grid=None, hyperband=None, bayes=None, hyperopt=None, iterative=None, mapping=None, local_vars_configuration=None): # noqa: E501
"""V1Matrix - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._random = None
self._grid = None
self._hyperband = None
self._bayes = None
self._hyperopt = None
self._iterative = None
self._mapping = None
self.discriminator = None
if random is not None:
self.random = random
if grid is not None:
self.grid = grid
if hyperband is not None:
self.hyperband = hyperband
if bayes is not None:
self.bayes = bayes
if hyperopt is not None:
self.hyperopt = hyperopt
if iterative is not None:
self.iterative = iterative
if mapping is not None:
self.mapping = mapping
@property
def random(self):
"""Gets the random of this V1Matrix. # noqa: E501
:return: The random of this V1Matrix. # noqa: E501
:rtype: V1RandomSearch
"""
return self._random
@random.setter
def random(self, random):
"""Sets the random of this V1Matrix.
:param random: The random of this V1Matrix. # noqa: E501
:type: V1RandomSearch
"""
self._random = random
@property
def grid(self):
"""Gets the grid of this V1Matrix. # noqa: E501
:return: The grid of this V1Matrix. # noqa: E501
:rtype: V1GridSearch
"""
return self._grid
@grid.setter
def grid(self, grid):
"""Sets the grid of this V1Matrix.
:param grid: The grid of this V1Matrix. # noqa: E501
:type: V1GridSearch
"""
self._grid = grid
@property
def hyperband(self):
"""Gets the hyperband of this V1Matrix. # noqa: E501
:return: The hyperband of this V1Matrix. # noqa: E501
:rtype: V1Hyperband
"""
return self._hyperband
@hyperband.setter
def hyperband(self, hyperband):
"""Sets the hyperband of this V1Matrix.
:param hyperband: The hyperband of this V1Matrix. # noqa: E501
:type: V1Hyperband
"""
self._hyperband = hyperband
@property
def bayes(self):
"""Gets the bayes of this V1Matrix. # noqa: E501
:return: The bayes of this V1Matrix. # noqa: E501
:rtype: V1Bayes
"""
return self._bayes
@bayes.setter
def bayes(self, bayes):
"""Sets the bayes of this V1Matrix.
:param bayes: The bayes of this V1Matrix. # noqa: E501
:type: V1Bayes
"""
self._bayes = bayes
@property
def hyperopt(self):
"""Gets the hyperopt of this V1Matrix. # noqa: E501
:return: The hyperopt of this V1Matrix. # noqa: E501
:rtype: V1Hyperopt
"""
return self._hyperopt
@hyperopt.setter
def hyperopt(self, hyperopt):
"""Sets the hyperopt of this V1Matrix.
:param hyperopt: The hyperopt of this V1Matrix. # noqa: E501
:type: V1Hyperopt
"""
self._hyperopt = hyperopt
@property
def iterative(self):
"""Gets the iterative of this V1Matrix. # noqa: E501
:return: The iterative of this V1Matrix. # noqa: E501
:rtype: V1Iterative
"""
return self._iterative
@iterative.setter
def iterative(self, iterative):
"""Sets the iterative of this V1Matrix.
:param iterative: The iterative of this V1Matrix. # noqa: E501
:type: V1Iterative
"""
self._iterative = iterative
@property
def mapping(self):
"""Gets the mapping of this V1Matrix. # noqa: E501
:return: The mapping of this V1Matrix. # noqa: E501
:rtype: V1Mapping
"""
return self._mapping
@mapping.setter
def mapping(self, mapping):
"""Sets the mapping of this V1Matrix.
:param mapping: The mapping of this V1Matrix. # noqa: E501
:type: V1Mapping
"""
self._mapping = mapping
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, V1Matrix):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, V1Matrix):
return True
return self.to_dict() != other.to_dict()
| apache-2.0 | 6,591,988,645,365,214,000 | 25.467577 | 165 | 0.572921 | false |
arhote/exchange | exchange/themes/migrations/0001_initial.py | 1 | 4513 | from __future__ import unicode_literals
from django.db import migrations, models
import exchange.themes.fields
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Theme',
fields=[
('id', models.AutoField(
verbose_name='ID',
serialize=False,
auto_created=True,
primary_key=True
)),
('name', models.CharField(max_length=28)),
('description', models.CharField(max_length=64, blank=True)),
('default_theme', models.BooleanField(
default=False,
editable=False
)),
('active_theme', models.BooleanField(default=False)),
('title', models.CharField(
default=None,
max_length=32,
null=True,
verbose_name=b'Landing Page Title',
blank=True
)),
('tagline', models.CharField(
default=None,
max_length=64,
null=True,
verbose_name=b'Landing Page Tagline',
blank=True
)),
('running_hex', exchange.themes.fields.ColorField(
default=b'0F1A2C',
max_length=7,
null=True,
verbose_name=b'Header Footer Color',
blank=True
)),
('running_text_hex', exchange.themes.fields.ColorField(
default=b'FFFFFF',
max_length=7,
null=True,
verbose_name=b'Header Footer Text Color',
blank=True
)),
('hyperlink_hex', exchange.themes.fields.ColorField(
default=b'0F1A2C',
max_length=7,
null=True,
verbose_name=b'Hyperlink Color',
blank=True
)),
('pb_text', models.CharField(
default=b'Boundless Spatial',
max_length=32,
blank=True,
help_text=b'Text for the Powered by section in the footer',
null=True,
verbose_name=b'Footer Link Text'
)),
('pb_link', models.URLField(
default=b'http://boundlessgeo.com/',
blank=True,
help_text=b'Link for the Powered by section in the footer',
null=True,
verbose_name=b'Footer Link URL'
)),
('docs_link', models.URLField(
default=None,
blank=True,
help_text=b'Link for the Documentation',
null=True,
verbose_name=b'Documentation Link URL'
)),
('docs_text', models.CharField(
default=b'Documentation',
max_length=32,
blank=True,
help_text=b'Text for the documentation link',
null=True,
verbose_name=b'Documentation Text'
)),
('background_logo', models.ImageField(
default=None,
upload_to=b'theme/img/',
blank=True,
help_text=b'Note: will resize to 1440px (w) 350px (h)',
null=True,
verbose_name=b'Background Image'
)),
('primary_logo', models.ImageField(
default=None,
upload_to=b'theme/img/',
blank=True,
help_text=b'Note: will resize to height 96px',
null=True,
verbose_name=b'Primary Logo'
)),
('banner_logo', models.ImageField(
default=None,
upload_to=b'theme/img/',
blank=True,
help_text=b'Note: will resize to height 35px',
null=True,
verbose_name=b'Header Logo'
)),
],
),
]
| gpl-3.0 | -7,043,302,363,441,026,000 | 36.92437 | 79 | 0.41458 | false |
mirestrepo/voxels-at-lems | ply_util/thresh_ply.py | 1 | 3284 | #!/usr/bin/env python
# encoding: utf-8
"""
Author: Isabel Restrepo
Script to threshold a .ply file based on percentiles.
CAUTION! - This method is very memory inefficient
"""
import os
import sys
import numpy as np
from scipy import stats
from optparse import OptionParser
def write_ply(file_out, data):
#Create header
rows, cols = data.shape
header = ('ply\n' +
'format ascii 1.0\n' +
'element vertex ' + str(rows) + '\n' +
'property float x\nproperty float y\nproperty float z\n' +
'property float nx\nproperty float ny\nproperty float nz\n' +
'property float prob\nproperty float vis\nproperty float nmag\n' +
'property uchar diffuse_red\nproperty uchar diffuse_green\nproperty uchar diffuse_blue\n'+
'end_header\n');
fid = open( file_out , 'w' )
fid.write( header )
np.savetxt( fid , data , fmt='%.5f %.5f %.5f %.5f %.5f %.5f %.5f %.5f %.5f %d %d %d', delimiter=' ')
fid.close()
#Threshold using pvn and bounding box
def thresh_bbox(file_in, file_out,
min_pt, max_pt):
fid = open(file_in, 'r')
data_full = np.genfromtxt(fid, dtype=float, delimiter=' ', skip_header=16);
fid.close()
data = data_full[(data_full[:,0] > min_pt[0]), :]
data = data[(data[:,0] < max_pt[0]), :]
data = data[(data[:,1] > min_pt[1]), :]
data = data[(data[:,1] < max_pt[1]), :]
data = data[(data[:,2] > min_pt[2]), :]
data = data[(data[:,2] < max_pt[2]), :]
write_ply(file_out, data)
#Threshold using a bounding sphere
def thresh_bsphere(file_in, file_out,
centroid, max_pt):
fid = open(file_in, 'r')
data_full = np.genfromtxt(fid, dtype=float, delimiter=' ', skip_header=16);
fid.close()
rad = (max_pt - centroid) * (max_pt - centroid);
radXY = rad[0] + rad[1]
radZ = rad[2]
dx = (data_full[:,0] - centroid[0])*(data_full[:,0] - centroid[0])
dy = (data_full[:,1] - centroid[1])*(data_full[:,1] - centroid[1])
indeces = (dx + dy) < radXY
data = data_full[indeces, :]
dz = (data[:,2] - centroid[2])*(data[:,2] - centroid[2])
data = data[ dz < radZ, :]
write_ply(file_out, data)
def thresh_pvn( file_in, out_basename):
fid = open(file_in, 'r')
data = np.genfromtxt(fid, dtype=float, delimiter=' ', skip_header=16);
fid.close()
#normalize visibility
data[:,7] = data[:,7]/(data[:,7].max());
#normalize nmag
data[:,8] = data[:,8]/(data[:,8].max());
percentile = [90, 95, 99];
data_measure = data[:,6] *data[:,7] *data[:,8]
for p in percentile:
print 'Percentile: ' , p
file_out = out_basename + '_' + str(p) + ".ply"
indices = (data_measure > stats.scoreatpercentile(data_measure, p));
filtered_data = data[indices, :];
write_ply(file_out, filtered_data)
if __name__ == "__main__":
#######################################################
# handle inputs #
#######################################################
parser = OptionParser()
parser.add_option("-i", action="store", type="string", dest="file_in", default="", help=".PLY file to threshold")
parser.add_option("-o", action="store", type="string", dest="out_basename", default="", help="Output files are saved as out_basename_%.ply")
(opts, args) = parser.parse_args()
thresh_pvn(opts.file_in,opts.out_basename)
| bsd-2-clause | 4,835,252,583,100,231,000 | 30.883495 | 144 | 0.584957 | false |
termoshtt/continuate | continuate/test_misc.py | 1 | 1398 | # -*- coding: utf-8 -*-
import numpy as np
import unittest
from . import misc
@misc.array_adapter
def apply_func(func, x):
return func(x)
def sample_gen():
for i in range(10):
yield i, i*i
class TestArrayAdapter(unittest.TestCase):
def test_apply_first(self):
f = lambda x: 2*x
t = 2, 3, 4
s = misc._apply(f, t)
self.assertEqual(type(s), tuple)
self.assertEqual(s, (4, 3, 4))
def test_apply_first_gen(self):
f = lambda x: 2*x
G = misc._apply_first_gen(f, sample_gen())
for t, (a, b) in enumerate(G):
self.assertEqual(a, 2*t)
self.assertEqual(b, t*t)
def test_apply(self):
f = lambda x: 2*x
self.assertEqual(misc._apply(f, 2), 4)
s = misc._apply(f, (2, 3, 4))
self.assertEqual(type(s), tuple)
self.assertEqual(s, (4, 3, 4))
G = misc._apply(f, sample_gen())
for t, (a, b) in enumerate(G):
self.assertEqual(a, 2*t)
self.assertEqual(b, t*t)
def test_array_adapter(self):
shape = (2, 3)
def f(x):
self.assertEqual(x.shape, shape)
return 2*x
x = np.ones(shape)
ad = misc.ArrayAdapter(f, x)
y = ad.convert(x)
np.testing.assert_allclose(y, np.ones(6))
y = ad(y)
np.testing.assert_allclose(y, 2*np.ones(6))
| mit | -6,239,520,831,284,313,000 | 23.964286 | 51 | 0.527897 | false |
FenrirUnbound/kessel-run | tests/timings_test.py | 1 | 1450 | import json
import mock
import unittest
from google.appengine.ext import ndb
from google.appengine.ext import testbed
from main import app
from models.timing import Timing
class TimingsTest(unittest.TestCase):
def setUp(self):
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.init_datastore_v3_stub()
self.testbed.init_memcache_stub()
self.app = app.test_client()
@mock.patch('services.gmaps.googlemaps.Client')
def test_mark_route_datapoint(self, mock_gmaps):
map_payload = [
{
'summary': 'US-101 S',
'legs': [
{
'distance': { 'text': 'distance' },
'duration_in_traffic': { 'value': 1111 }
}
],
'duration': { 'value': 9999 } # default duration
}
]
# todo: assert parameters
mock_gmaps.return_value.directions.return_value = map_payload
endpoint = '/api/v1/timings/1'
response = self.app.get(endpoint)
self.assertEqual(response.status_code, 204)
key = ndb.Key('Route', 1)
query_results = Timing.query(ancestor=key).fetch(2)
self.assertEqual(len(query_results), 1)
test_data = query_results.pop()
self.assertEqual(test_data.duration, 1111)
self.assertEqual(test_data.distance, 'distance')
| apache-2.0 | 7,189,558,713,362,636,000 | 30.521739 | 69 | 0.571034 | false |
Alberto-Beralix/Beralix | i386-squashfs-root/usr/share/pyshared/ubuntuone-client/ubuntuone/syncdaemon/u1fsfsm.py | 1 | 182120 | """This is a generated python file"""
# make pylint accept this
# pylint: disable-msg=C0301
state_machine = {'events': {u'AQ_DIR_DELETE_ERROR': [{'ACTION': u'md.create(path=path, uuid=uuid, type=type) aq.query(uuid=uuid)',
'ACTION_FUNC': u'',
'COMMENTS': u'the user deleted something we couldnt delete from the server. Re create.',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'T',
u'not_available': u'F'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'T'}},
{'ACTION': u'md.create(path=path, uuid=uuid, type=type) aq.query(uuid=uuid)',
'ACTION_FUNC': u'',
'COMMENTS': u'the user deleted something we couldnt delete from the server. Re create.',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'F',
u'not_available': u'T'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'T'}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'*'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'*',
u'not_available': u'NA'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'*',
u'not_available': u'NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'*'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'CONFLICT, recreate from deleted uuid',
'ACTION_FUNC': u'',
'COMMENTS': u'the user deleted something we could not delete from the server and replaced it with stuff. Move user files to conflict and re download stuff from the server',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'F',
u'not_available': u'T'},
'STATE': {u'changed': u'!SERVER',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'T'}},
{'ACTION': u'DESPAIR',
'ACTION_FUNC': u'',
'COMMENTS': u'the user deleted something we could not delete from the server and replaced it with stuff. Move user files to conflict and re download stuff from the server',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'F',
u'not_available': u'T'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'CONFLICT',
'ACTION_FUNC': u'',
'COMMENTS': u'the user deleted something we could not delete from the server and replaced it with stuff. Move user files to conflict and re download stuff from the server',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'T',
u'not_available': u'F'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'T'}},
{'ACTION': u'PANIC',
'ACTION_FUNC': u'',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'F',
u'not_available': u'F'},
'STATE': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'T'}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'T',
u'not_available': u'T'},
'STATE': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}}],
u'AQ_DIR_DELETE_OK': [{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'vanilla case',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'we deleted something the user recreated, someone else is taking care of uploading this changes',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'T'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'we deleted something the user recreated, someone else is taking care of uploading this changes',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'LOCAL',
u'has_metadata': u'T',
u'is_directory': u'T'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'DESPAIR',
'ACTION_FUNC': u'DESPAIR',
'COMMENTS': u'somehow we lost ordering of stuff and we got a new file and changes since we tried to delete this file',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'!SERVER',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}}],
u'AQ_DIR_NEW_ERROR': [{'ACTION': u'pass',
'ACTION_FUNC': u'release_marker_error',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'*',
u'not_available': u'*'},
'STATE': {u'changed': u'*',
u'has_metadata': u'F',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'CONFLICT',
'ACTION_FUNC': u'filedir_error_in_creation',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'*',
u'not_available': u'*'},
'STATE': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'T'},
'STATE_OUT': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'}},
{'ACTION': u'pass',
'ACTION_FUNC': u'release_marker_error',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'*',
u'not_available': u'*'},
'STATE': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}}],
u'AQ_DIR_NEW_OK': [{'ACTION': u'aq.uuid_map.set(marker, new_id)',
'ACTION_FUNC': u'release_marker_ok',
'COMMENTS': u"it's a file now",
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'aq.uuid_map.set(marker, new_id)',
'ACTION_FUNC': u'release_marker_ok',
'COMMENTS': u'the dir was now gone',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'md.set(mdid, server_uuid=server_uuid)\nPANIC',
'ACTION_FUNC': u'DESPAIR',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'T'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'='}},
{'ACTION': u'md.set(mdid, server_uuid=server_uuid)\nRESCAN',
'ACTION_FUNC': u'new_local_dir_created',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'T'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}}],
u'AQ_DOWNLOAD_DOES_NOT_EXIST': [{'ACTION': u'',
'ACTION_FUNC': u'delete_file',
'COMMENTS': u"Directory doesn't exist anymore, remove it",
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'T'},
'STATE_OUT': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'}},
{'ACTION': u'',
'ACTION_FUNC': u'delete_file',
'COMMENTS': u"File doesn't exist anymore, remove it",
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'!LOCAL',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'}},
{'ACTION': u'',
'ACTION_FUNC': u'conflict_and_delete',
'COMMENTS': u"File doesn't exist on server, but has local changes",
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'LOCAL',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'}},
{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'F',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}}],
u'AQ_DOWNLOAD_ERROR': [{'ACTION': u'md.remove_partial(uuid);',
'ACTION_FUNC': u'remove_partial',
'COMMENTS': u'error while downloading, remove the partial file',
'PARAMETERS': {u'hash_eq_local_hash': u'*',
u'hash_eq_server_hash': u'*',
u'not_authorized': u'T',
u'not_available': u'F'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'md.remove_partial(uuid);',
'ACTION_FUNC': u'remove_partial',
'COMMENTS': u'error while downloading, remove the partial file',
'PARAMETERS': {u'hash_eq_local_hash': u'*',
u'hash_eq_server_hash': u'*',
u'not_authorized': u'F',
u'not_available': u'T'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'*',
u'hash_eq_server_hash': u'*',
u'not_authorized': u'T',
u'not_available': u'T'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'PANIC',
'ACTION_FUNC': u'remove_partial',
'COMMENTS': u'Should DESPAIR, but ATM we handle this in the handle_AQ_DOWNLOAD_ERROR as we have an extra error type returned by AQ when the .partial file is deleted by the user',
'PARAMETERS': {u'hash_eq_local_hash': u'*',
u'hash_eq_server_hash': u'*',
u'not_authorized': u'F',
u'not_available': u'F'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'*',
u'hash_eq_server_hash': u'*',
u'not_authorized': u'*',
u'not_available': u'NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'*',
u'hash_eq_server_hash': u'*',
u'not_authorized': u'NA',
u'not_available': u'*'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'*',
u'hash_eq_server_hash': u'*',
u'not_authorized': u'*',
u'not_available': u'*'},
'STATE': {u'changed': u'*',
u'has_metadata': u'F',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}}],
u'AQ_DOWNLOAD_FINISHED': [{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'*',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'*',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'file was removed while we where downloading',
'PARAMETERS': {u'hash_eq_local_hash': u'*',
u'hash_eq_server_hash': u'*',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'cancelled by e.g. SV_HASH_NEW',
'PARAMETERS': {u'hash_eq_local_hash': u'T',
u'hash_eq_server_hash': u'T',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'NONE, T, F is a falacy (NONE implies server_hash == local_hash)',
'PARAMETERS': {u'hash_eq_local_hash': u'F',
u'hash_eq_server_hash': u'T',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'NONE, F, T is a falacy (NONE implies server_hash == local_hash)',
'PARAMETERS': {u'hash_eq_local_hash': u'T',
u'hash_eq_server_hash': u'F',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'cancelled by e.g. SV_HASH_NEW',
'PARAMETERS': {u'hash_eq_local_hash': u'F',
u'hash_eq_server_hash': u'F',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'SERVER, T, T is a falacy (SERVER implies server_hash != local_hash)',
'PARAMETERS': {u'hash_eq_local_hash': u'T',
u'hash_eq_server_hash': u'T',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'md.commit_partial(uuid, local_hash=hash)',
'ACTION_FUNC': u'commit_file',
'COMMENTS': u'this is the vainilla case',
'PARAMETERS': {u'hash_eq_local_hash': u'F',
u'hash_eq_server_hash': u'T',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'not what we want',
'PARAMETERS': {u'hash_eq_local_hash': u'T',
u'hash_eq_server_hash': u'F',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'LOCAL',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'another download is already in progress',
'PARAMETERS': {u'hash_eq_local_hash': u'F',
u'hash_eq_server_hash': u'F',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'LOCAL, T, T is a falacy (LOCAL implies server_hash != local_hash)',
'PARAMETERS': {u'hash_eq_local_hash': u'T',
u'hash_eq_server_hash': u'T',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'LOCAL',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'cancelled by e.g. SV_HASH_NEW',
'PARAMETERS': {u'hash_eq_local_hash': u'F',
u'hash_eq_server_hash': u'T',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'LOCAL',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'cancelled by e.g. SV_HASH_NEW',
'PARAMETERS': {u'hash_eq_local_hash': u'T',
u'hash_eq_server_hash': u'F',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'LOCAL',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'cancelled by e.g. SV_HASH_NEW',
'PARAMETERS': {u'hash_eq_local_hash': u'F',
u'hash_eq_server_hash': u'F',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'LOCAL',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'pass',
'ACTION_FUNC': u'DESPAIR',
'COMMENTS': u'we dont download directories anymore',
'PARAMETERS': {u'hash_eq_local_hash': u'!NA',
u'hash_eq_server_hash': u'!NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'T'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'merge_from_partial(uuid)',
'ACTION_FUNC': u'DESPAIR',
'COMMENTS': u'we dont download directories anymore',
'PARAMETERS': {u'hash_eq_local_hash': u'!NA',
u'hash_eq_server_hash': u'T',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'T'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'pass',
'ACTION_FUNC': u'DESPAIR',
'COMMENTS': u'we dont download directories anymore',
'PARAMETERS': {u'hash_eq_local_hash': u'!NA',
u'hash_eq_server_hash': u'F',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'T'},
'STATE_OUT': {u'changed': u'SERVER',
u'has_metadata': u'=',
u'is_directory': u'='}}],
u'AQ_FILE_DELETE_ERROR': [{'ACTION': u'md.create(path=path, uuid=uuid, type=type) aq.query(uuid=uuid)',
'ACTION_FUNC': u'',
'COMMENTS': u'the user deleted something we couldnt delete from the server. Re create.',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'T',
u'not_available': u'F'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'F'}},
{'ACTION': u'md.create(path=path, uuid=uuid, type=type) aq.query(uuid=uuid)',
'ACTION_FUNC': u'',
'COMMENTS': u'the user deleted something we couldnt delete from the server. Re create.',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'F',
u'not_available': u'T'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'='}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'*'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'*',
u'not_available': u'NA'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'*',
u'not_available': u'NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'*'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'CONFLICT, recreate from deleted uuid',
'ACTION_FUNC': u'',
'COMMENTS': u'the user deleted something we could not delete from the server and replaced it with stuff. Move user files to conflict and re download stuff from the server',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'F',
u'not_available': u'T'},
'STATE': {u'changed': u'!SERVER',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'F'}},
{'ACTION': u'DESPAIR',
'ACTION_FUNC': u'DESPAIR',
'COMMENTS': u'the user deleted something we could not delete from the server and replaced it with stuff. Move user files to conflict and re download stuff from the server',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'F',
u'not_available': u'T'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'CONFLICT',
'ACTION_FUNC': u'',
'COMMENTS': u'the user deleted something we could not delete from the server and replaced it with stuff. Move user files to conflict and re download stuff from the server',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'T',
u'not_available': u'F'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'T'}},
{'ACTION': u'PANIC',
'ACTION_FUNC': u'',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'F',
u'not_available': u'F'},
'STATE': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'F'}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'T',
u'not_available': u'T'},
'STATE': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}}],
u'AQ_FILE_DELETE_OK': [{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'vanilla case',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'/c',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'we deleted something the user recreated, someone else is taking care of uploading this changes',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'LOCAL',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'DESPAIR',
'ACTION_FUNC': u'DESPAIR',
'COMMENTS': u'somehow we lost ordering of stuff and we got a new file and changes since we tried to delete this file',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'!SERVER',
u'has_metadata': u'T',
u'is_directory': u'T'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}}],
u'AQ_FILE_MOVE_OK': [{'ACTION': u'',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'deleted locally',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'vanilla case',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'T',
u'is_directory': u'='}}],
u'AQ_FILE_NEW_ERROR': [{'ACTION': u'pass',
'ACTION_FUNC': u'release_marker_error',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'*',
u'not_available': u'*'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'CONFLICT',
'ACTION_FUNC': u'filedir_error_in_creation',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'*',
u'not_available': u'*'},
'STATE': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'}},
{'ACTION': u'pass',
'ACTION_FUNC': u'release_marker_error',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'*',
u'not_available': u'*'},
'STATE': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'T'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}}],
u'AQ_FILE_NEW_OK': [{'ACTION': u'md.set(mdid, server_uuid=server_uuid)',
'ACTION_FUNC': u'new_local_file_created',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'md.set(mdid, server_uuid=server_uuid)',
'ACTION_FUNC': u'new_local_file_created',
'COMMENTS': u'we got IN_FILE_CHANGED and HQ_HASH_NEW between IN_FILE_NEW and AQ_FILE_NEW_OK',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'LOCAL',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'aq.uuid_map.set(marker, new_id)',
'ACTION_FUNC': u'release_marker_ok',
'COMMENTS': u'file deleted locally',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u"Node got node_id with a SV_FILE_NEW and now it's uploading something",
'ACTION_FUNC': u'nothing',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'aq.uuid_map.set(marker, new_id)',
'ACTION_FUNC': u'release_marker_ok',
'COMMENTS': u"it's a directory now",
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'T'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}}],
u'AQ_MOVE_ERROR': [{'ACTION': u'md.create(path=path, uuid=target_uuid, type=type)\naq.query(uuid=target_uuid)',
'ACTION_FUNC': u'clean_move_limbo',
'COMMENTS': u'deleted after local move and move failed on server. ',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'move file to conflict\nquery(uuid=source.parent)\nquery(uuid=dest.parent)\n',
'ACTION_FUNC': u'clean_move_limbo',
'COMMENTS': u'something bad happened. Conflict and rescan',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}}],
u'AQ_MOVE_OK': [{'ACTION': u'pass',
'ACTION_FUNC': u'clean_move_limbo',
'COMMENTS': u'deleted after move',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'pass',
'ACTION_FUNC': u'clean_move_limbo',
'COMMENTS': u'everything good',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}}],
u'AQ_UNLINK_ERROR': [{'ACTION': u'remove the node from trash',
'ACTION_FUNC': u'remove_trash',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}}],
u'AQ_UNLINK_OK': [{'ACTION': u'remove the node from trash',
'ACTION_FUNC': u'remove_trash',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}}],
u'AQ_UPLOAD_ERROR': [{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'*',
u'hash_eq_server_hash': u'*',
u'not_authorized': u'*',
u'not_available': u'*'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'*',
u'hash_eq_server_hash': u'*',
u'not_authorized': u'*',
u'not_available': u'NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'*',
u'hash_eq_server_hash': u'*',
u'not_authorized': u'NA',
u'not_available': u'*'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'*',
u'hash_eq_server_hash': u'*',
u'not_authorized': u'F',
u'not_available': u'F'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'*',
u'hash_eq_server_hash': u'*',
u'not_authorized': u'T',
u'not_available': u'T'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'*',
u'hash_eq_server_hash': u'*',
u'not_authorized': u'F',
u'not_available': u'T'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'*',
u'hash_eq_server_hash': u'*',
u'not_authorized': u'T',
u'not_available': u'F'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'DESPAIR',
'ACTION_FUNC': u'DESPAIR',
'COMMENTS': u'we never try to upload directories',
'PARAMETERS': {u'hash_eq_local_hash': u'*',
u'hash_eq_server_hash': u'*',
u'not_authorized': u'!NA',
u'not_available': u'!NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'T'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}}],
u'AQ_UPLOAD_FINISHED': [{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'*',
u'hash_eq_server_hash': u'*',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'*',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'*',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'md.upload_finished(mdid, server_hash=hash)',
'ACTION_FUNC': u'commit_upload',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'T',
u'hash_eq_server_hash': u'T',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'F',
u'hash_eq_server_hash': u'T',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'T',
u'hash_eq_server_hash': u'F',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'md.upload_finished(mdid, server_hash=hash)',
'ACTION_FUNC': u'commit_upload',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'F',
u'hash_eq_server_hash': u'F',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'LOCAL',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'T',
u'hash_eq_server_hash': u'T',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'T',
u'hash_eq_server_hash': u'T',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'LOCAL',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'md.upload_finished(mdid, server_hash=hash)',
'ACTION_FUNC': u'commit_upload',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'F',
u'hash_eq_server_hash': u'T',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'md.upload_finished(mdid, server_hash=hash)',
'ACTION_FUNC': u'reput_file_from_ok',
'COMMENTS': u'we finished a download, but this download should have been cancelled. So the real upload will conflict. Reschedule upload.',
'PARAMETERS': {u'hash_eq_local_hash': u'F',
u'hash_eq_server_hash': u'T',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'LOCAL',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'cancel download; commit upload',
'ACTION_FUNC': u'cancel_and_commit',
'COMMENTS': u'we couldnt cancel an upload and we overwrote what was on the server',
'PARAMETERS': {u'hash_eq_local_hash': u'T',
u'hash_eq_server_hash': u'F',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'='}},
{'ACTION': u'md.upload_finished(mdid, server_hash=hash)',
'ACTION_FUNC': u'commit_upload',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'T',
u'hash_eq_server_hash': u'F',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'LOCAL',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'md.upload_finished(mdid, server_hash=hash)',
'ACTION_FUNC': u'commit_upload',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'F',
u'hash_eq_server_hash': u'F',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'SERVER',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'md.upload_finished(mdid, server_hash=hash)',
'ACTION_FUNC': u'reput_file_from_ok',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'F',
u'hash_eq_server_hash': u'F',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'LOCAL',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'DESPAIR',
'ACTION_FUNC': u'DESPAIR',
'COMMENTS': u'we never try to upload directories',
'PARAMETERS': {u'hash_eq_local_hash': u'*',
u'hash_eq_server_hash': u'*',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'T'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}}],
u'FS_DIR_CREATE': [{'ACTION': u'mdid = md.create(path=path)\naq.makefile(mdid)',
'ACTION_FUNC': u'new_local_dir',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'T'}},
{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'duplicate IN_DIRECTORY_NEW',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'duplicate IN_DIRECTORY_NEW',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'F'}},
{'ACTION': u'DESPAIR',
'ACTION_FUNC': u'DESPAIR',
'COMMENTS': u'...?',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'LOCAL',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'duplicate IN_DIRECTORY_NEW',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'T'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'T'}}],
u'FS_DIR_DELETE': [{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'this is the result of a delete we did',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'aq.delete_file(uuid, type=type); md.remove(uuid)',
'ACTION_FUNC': u'delete_on_server',
'COMMENTS': u'when deleting files we remove the metadata, server rescan will find this again and downloadi it if we are shutdown before this delete has gone up. Generations may break this. ',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'}},
{'ACTION': u'',
'ACTION_FUNC': u'DESPAIR',
'COMMENTS': u'delete of file when it should be a dir is bad',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'LOCAL',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'}},
{'ACTION': u'',
'ACTION_FUNC': u'DESPAIR',
'COMMENTS': u'delete of file when it should be a dir is bad',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'}},
{'ACTION': u'md.cancel_download(md); md.remove(uuid)',
'ACTION_FUNC': u'deleted_dir_while_downloading',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'T'},
'STATE_OUT': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'}}],
u'FS_DIR_MOVE': [{'ACTION': u'',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'side efect of local move',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'},
'STATE_OUT': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'}},
{'ACTION': u'aq.move()',
'ACTION_FUNC': u'client_moved',
'COMMENTS': u'vanilla case',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'T',
u'is_directory': u'='}},
{'ACTION': u'cancel upload; move; restart upload',
'ACTION_FUNC': u'moved_dirty_local',
'COMMENTS': u'we got a move while we were downloading it',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'LOCAL',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'F'}},
{'ACTION': u'cancel_download; move; query',
'ACTION_FUNC': u'moved_dirty_server',
'COMMENTS': u'we got a move while we were downloading it',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'='}}],
u'FS_FILE_CLOSE_WRITE': [{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'changes from a file that no longer exists',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'DESPAIR',
'ACTION_FUNC': u'DESPAIR',
'COMMENTS': u'event cant happen in directories',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'T'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'hq.insert(path)',
'ACTION_FUNC': u'calculate_hash',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}}],
u'FS_FILE_CREATE': [{'ACTION': u'mdid = md.create(path=path)\naq.makefile(mdid)',
'ACTION_FUNC': u'new_local_file',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'F'}},
{'ACTION': u'',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'ignore this. We created the file.',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u"As we're ignoring some files, we are in the situation where some editors move the file to something we ignore, and then create the file again, so we receive the FS_FILE_CREATE for a node that we actually have",
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u"Same as before, but we're uploading that node we already have.",
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'LOCAL',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'log warning',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'duplicate IN_FILE_NEW',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'T'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'F'}}],
u'FS_FILE_DELETE': [{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'this is the result of a delete we did',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'aq.delete_file(uuid, type=type); md.remove(uuid)',
'ACTION_FUNC': u'delete_on_server',
'COMMENTS': u'when deleting files we remove the metadata, server rescan will find this again and downloadi it if we are shutdown before this delete has gone up. Generations may break this. ',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'}},
{'ACTION': u'aq.cancel_upload(uuid); aq.delete_file(uuid); md.remove(uuid)',
'ACTION_FUNC': u'cancel_upload_and_delete_on_server',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'LOCAL',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'}},
{'ACTION': u'aq.cancel_download(uuid); aq.delete_file(uuid)\n md.remove(uuid)',
'ACTION_FUNC': u'cancel_download_and_delete_on_server',
'COMMENTS': u'This is policy. We could declare this to be a conflict. But we should assume that the client knows what he is doing',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'}},
{'ACTION': u'',
'ACTION_FUNC': u'DESPAIR',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'T'},
'STATE_OUT': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'}}],
u'FS_FILE_MOVE': [{'ACTION': u'',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'side efect of local move',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'},
'STATE_OUT': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'}},
{'ACTION': u'aq.move()',
'ACTION_FUNC': u'client_moved',
'COMMENTS': u'vanilla case',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'T',
u'is_directory': u'='}},
{'ACTION': u'cancel upload; move; restart upload',
'ACTION_FUNC': u'moved_dirty_local',
'COMMENTS': u'we got a move while we were downloading it',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'LOCAL',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'F'}},
{'ACTION': u'cancel_download; move; query',
'ACTION_FUNC': u'moved_dirty_server',
'COMMENTS': u'we got a move while we were downloading it',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'='}}],
u'HQ_HASH_ERROR': [{'ACTION': u'',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'it was deleted at some point',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'},
'STATE_OUT': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'}},
{'ACTION': u'DESPAIR',
'ACTION_FUNC': u'DESPAIR',
'COMMENTS': u'we should never be hashing a directory',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'T'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'hq.insert(path)',
'ACTION_FUNC': u'calculate_hash',
'COMMENTS': u'we need to re send the hash to the HQ',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}}],
u'HQ_HASH_NEW': [{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'*',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'*',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'md.set(mdid, stat=stat)',
'ACTION_FUNC': u'save_stat',
'COMMENTS': u'hash == local_hash == server_hash; nothing changed but the file was \u201ctouched\u201d',
'PARAMETERS': {u'hash_eq_local_hash': u'!NA',
u'hash_eq_server_hash': u'T',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'',
'ACTION_FUNC': u'reput_file',
'COMMENTS': u'just to be on the safe side, we try to put the file again.',
'PARAMETERS': {u'hash_eq_local_hash': u'!NA',
u'hash_eq_server_hash': u'T',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'LOCAL',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'LOCAL',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'md.set(mdid, local_hash=hash)\naq.putfile(*mdid)',
'ACTION_FUNC': u'put_file',
'COMMENTS': u'plain \u201cuser modified the file on this machine\u201d case',
'PARAMETERS': {u'hash_eq_local_hash': u'!NA',
u'hash_eq_server_hash': u'F',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'LOCAL',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'we dont send directories to hq. This is old. Ignore.',
'PARAMETERS': {u'hash_eq_local_hash': u'F',
u'hash_eq_server_hash': u'F',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'T'},
'STATE_OUT': {u'changed': u'SERVER',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'NA',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'T',
u'hash_eq_server_hash': u'F',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'T'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'the directories are the same',
'PARAMETERS': {u'hash_eq_local_hash': u'!NA',
u'hash_eq_server_hash': u'T',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'T'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'md.set(mdid, local_hash=hash)\naq.putfile(*mdid)',
'ACTION_FUNC': u'reput_file',
'COMMENTS': u'another upload is in progress',
'PARAMETERS': {u'hash_eq_local_hash': u'!NA',
u'hash_eq_server_hash': u'F',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'LOCAL',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'LOCAL',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'md.set(mdid, local_hash=hash)\naq.cancel_download(mdid)',
'ACTION_FUNC': u'converges_to_server',
'COMMENTS': u'local file already has server changes that are being downloaded',
'PARAMETERS': {u'hash_eq_local_hash': u'!NA',
u'hash_eq_server_hash': u'T',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'CONFLICT',
'ACTION_FUNC': u'file_conflict',
'COMMENTS': u'local file was modified by the user while download of next version was in progress',
'PARAMETERS': {u'hash_eq_local_hash': u'!NA',
u'hash_eq_server_hash': u'F',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'='}},
{'ACTION': u'',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'we dont send directories to hq. This is old. Ignore.',
'PARAMETERS': {u'hash_eq_local_hash': u'!NA',
u'hash_eq_server_hash': u'T',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'T'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'we dont send directories to hq. This is old. Ignore.',
'PARAMETERS': {u'hash_eq_local_hash': u'!NA',
u'hash_eq_server_hash': u'F',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'T'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'*',
u'hash_eq_server_hash': u'*',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'F',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}}],
u'LR_SCAN_ERROR': [{'ACTION': u'',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'it was deleted at some point',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'},
'STATE_OUT': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'}},
{'ACTION': u'DESPAIR',
'ACTION_FUNC': u'DESPAIR',
'COMMENTS': u'we should never be local-scanning a file',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'RESCAN',
'ACTION_FUNC': u'rescan_dir',
'COMMENTS': u'we need to re start the local rescan',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'T'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}}],
u'SV_DIR_NEW': [{'ACTION': u'md.create(path=path, uuid=uuid, type=type)\naq.query(uuid=uuid)',
'ACTION_FUNC': u'new_dir',
'COMMENTS': u'good case, we send a query to see if there are anychanges we need to merge',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'T'}},
{'ACTION': u'CONFLICT',
'ACTION_FUNC': u'new_dir_on_server_with_local_file',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'T'}},
{'ACTION': u'list the dir to get new info and converge',
'ACTION_FUNC': u'new_dir_on_server_with_local_dir',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'T'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'T'}}],
u'SV_FILE_DELETED': [{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'we deleted something and the server did the same',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'md.remove(uuid)',
'ACTION_FUNC': u'delete_file',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'}},
{'ACTION': u'CONFLICT; md.remove(uuid)',
'ACTION_FUNC': u'conflict_and_delete',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'LOCAL',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'}},
{'ACTION': u'md.cancel_download(uuid); md.remove(uuid)',
'ACTION_FUNC': u'file_gone_wile_downloading',
'COMMENTS': u'we are still downloading some content we dont care about anymore',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'}},
{'ACTION': u'md.cancel_download(md); md.remove(uuid)',
'ACTION_FUNC': u'file_gone_wile_downloading',
'COMMENTS': u'we are still downloading some content we dont care about anymore',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'T'},
'STATE_OUT': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'}}],
u'SV_FILE_NEW': [{'ACTION': u'md.create(path=path, uuid=uuid, type=type)\naq.query(uuid=uuid)',
'ACTION_FUNC': u'new_file',
'COMMENTS': u'good case, we send a query to see if there are anychanges we need to merge',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'F'}},
{'ACTION': u"Didn't find the node by node_id, but found it by path",
'ACTION_FUNC': u'new_server_file_having_local',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}}],
u'SV_HASH_NEW': [{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'*',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'*',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'server side changes while trying to delete something',
'PARAMETERS': {u'hash_eq_local_hash': u'*',
u'hash_eq_server_hash': u'*',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'NA',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'no news is good news',
'PARAMETERS': {u'hash_eq_local_hash': u'T',
u'hash_eq_server_hash': u'T',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'NONE, T, F is a falacy (NONE implies server_hash == local_hash)',
'PARAMETERS': {u'hash_eq_local_hash': u'F',
u'hash_eq_server_hash': u'T',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'NONE, F, T is a falacy (NONE implies server_hash == local_hash)',
'PARAMETERS': {u'hash_eq_local_hash': u'T',
u'hash_eq_server_hash': u'F',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'md.set(uuid, server_hash=hash)\npartial = md.create_partial(uuid)\naq.getcontent(*partial)',
'ACTION_FUNC': u'get_file',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'F',
u'hash_eq_server_hash': u'F',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'SERVER',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'SERVER, T, T is a impossible (SERVER implies server_hash != local_hash)',
'PARAMETERS': {u'hash_eq_local_hash': u'T',
u'hash_eq_server_hash': u'T',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'A download for a content object with the same hash is already in progress',
'PARAMETERS': {u'hash_eq_local_hash': u'F',
u'hash_eq_server_hash': u'T',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'md.remove_partial(uuid)\nmd.set(uuid, server_hash=hash)\naq.cancel_download(uuid)',
'ACTION_FUNC': u'server_file_changed_back',
'COMMENTS': u"the local file is equal to the file that is now on the server, but a download is in progress from an older version of the server. Removing the partial ensures that we never complete the download (thus we avoid the dreaded !!! state) Note that this makes it important for AQ_DOWNLOAD_FINISHED to 'pass' on downloads that aren't partials",
'PARAMETERS': {u'hash_eq_local_hash': u'T',
u'hash_eq_server_hash': u'F',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'aq.cancel_download(uuid)\nmd.set(uuid, server_hash=hash)\npartial = md.get_partial(uuid)\naq.getcontent(*partial)',
'ACTION_FUNC': u'reget_file',
'COMMENTS': u'a download was in progress but the server changed again. Note that this makes it important for AQ_DOWNLOAD_FINISHED to check the server hash.',
'PARAMETERS': {u'hash_eq_local_hash': u'F',
u'hash_eq_server_hash': u'F',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'SERVER',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'LOCAL, T, T is a impossible (LOCAL implies server_hash != local_hash)',
'PARAMETERS': {u'hash_eq_local_hash': u'T',
u'hash_eq_server_hash': u'T',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'LOCAL',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'aq.upload()',
'ACTION_FUNC': u'reput_file_from_local',
'COMMENTS': u'The upload was interrupted, just try it again. ',
'PARAMETERS': {u'hash_eq_local_hash': u'F',
u'hash_eq_server_hash': u'T',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'LOCAL',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'aq.cancel_upload(uuid)\nmd.set(uuid, server_hash=hash)',
'ACTION_FUNC': u'server_file_now_matches',
'COMMENTS': u"there's a small chance that the cancel fails, in which case we're simply redundant",
'PARAMETERS': {u'hash_eq_local_hash': u'T',
u'hash_eq_server_hash': u'F',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'LOCAL',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'CONFLICT',
'ACTION_FUNC': u'local_file_conflict',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'F',
u'hash_eq_server_hash': u'F',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'LOCAL',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'='}},
{'ACTION': u'pass',
'ACTION_FUNC': u'DESPAIR',
'COMMENTS': u'cant set hash on directories',
'PARAMETERS': {u'hash_eq_local_hash': u'!NA',
u'hash_eq_server_hash': u'T',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'T'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'md.set(uuid, server_hash=hash)\npartial = md.create_partial(uuid)\naq.getcontent(*partial)',
'ACTION_FUNC': u'DESPAIR',
'COMMENTS': u'cant set hash on directories',
'PARAMETERS': {u'hash_eq_local_hash': u'!NA',
u'hash_eq_server_hash': u'F',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'T'},
'STATE_OUT': {u'changed': u'SERVER',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'aq.cancel_download(uuid) \nmd.set(uuid, server_hash=hash)',
'ACTION_FUNC': u'DESPAIR',
'COMMENTS': u'cant set hash on directories',
'PARAMETERS': {u'hash_eq_local_hash': u'!NA',
u'hash_eq_server_hash': u'T',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'T'},
'STATE_OUT': {u'changed': u'SERVER',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'aq.cancel_download(uuid)\nmd.set(uuid, server_hash=hash)\npartial = md.get_partial(uuid)\naq.getcontent(*partial)',
'ACTION_FUNC': u'DESPAIR',
'COMMENTS': u'cant set hash on directories',
'PARAMETERS': {u'hash_eq_local_hash': u'!NA',
u'hash_eq_server_hash': u'F',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'T'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'=',
u'is_directory': u'='}}],
u'SV_MOVED': [{'ACTION': u'DESPAIR',
'ACTION_FUNC': u'DESPAIR',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'md.move(uuid)',
'ACTION_FUNC': u'server_moved',
'COMMENTS': u'all pending changes should arrive to the moved file\naq should remove the destination if its there',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'!SERVER',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'T',
u'is_directory': u'='}},
{'ACTION': u'cancel_download; move; query',
'ACTION_FUNC': u'server_moved_dirty',
'COMMENTS': u'we got a move while we were downloading it',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'T',
u'is_directory': u'='}}]},
'invalid': [{u'changed': u'NONE',
u'has_metadata': u'F',
u'is_directory': u'T'},
{u'changed': u'SERVER',
u'has_metadata': u'F',
u'is_directory': u'T'},
{u'changed': u'LOCAL',
u'has_metadata': u'F',
u'is_directory': u'T'},
{u'changed': u'NONE',
u'has_metadata': u'F',
u'is_directory': u'F'},
{u'changed': u'SERVER',
u'has_metadata': u'F',
u'is_directory': u'F'},
{u'changed': u'LOCAL',
u'has_metadata': u'F',
u'is_directory': u'F'},
{u'changed': u'NONE',
u'has_metadata': u'F',
u'is_directory': u'NA'},
{u'changed': u'SERVER',
u'has_metadata': u'F',
u'is_directory': u'NA'},
{u'changed': u'LOCAL',
u'has_metadata': u'F',
u'is_directory': u'NA'},
{u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'NA'},
{u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'NA'},
{u'changed': u'LOCAL',
u'has_metadata': u'T',
u'is_directory': u'NA'},
{u'changed': u'NA',
u'has_metadata': u'T',
u'is_directory': u'NA'},
{u'changed': u'NA',
u'has_metadata': u'T',
u'is_directory': u'F'},
{u'changed': u'NA',
u'has_metadata': u'T',
u'is_directory': u'T'},
{u'changed': u'LOCAL',
u'has_metadata': u'T',
u'is_directory': u'T'},
{u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'T'},
{u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'F'}],
'parameters': {u'hash_eq_local_hash': u'hash == md.local_hash',
u'hash_eq_server_hash': u'hash == md.server_hash',
u'not_authorized': u'error == not authorized',
u'not_available': u'error == not available'},
'state_vars': {u'changed': u'changed',
u'has_metadata': u'\u2203 md',
u'is_directory': u'isDirectory'}} | gpl-3.0 | 1,302,812,209,180,732,200 | 70.504122 | 381 | 0.270415 | false |
pennlabs/penn-mobile-server | server/portal/account.py | 1 | 7128 | import uuid
from datetime import datetime, timedelta
from flask import jsonify, redirect, request
from pytz import timezone
from sqlalchemy import exists
from server import app, bcrypt, sqldb
from server.models import Post, PostAccount, PostAccountEmail, PostTester
"""
Endpoint: /portal/account/new
HTTP Methods: POST
Response Formats: JSON
Content-Type: application/x-www-form-urlencoded
Parameters: name, email, password
Creates new account
If successful, returns account ID
"""
@app.route("/portal/account/new", methods=["POST"])
def create_account():
name = request.form.get("name")
email = request.form.get("email")
password = request.form.get("password")
encrypted_password = bcrypt.generate_password_hash(password)
if any(x is None for x in [name, email, encrypted_password]):
return jsonify({"error": "Parameter is missing"}), 400
account_exists = sqldb.session.query(exists().where(PostAccount.email == email)).scalar()
if account_exists:
return jsonify({"msg": "An account already exists for this email."}), 400
account = PostAccount(name=name, email=email, encrypted_password=encrypted_password)
sqldb.session.add(account)
sqldb.session.commit()
return jsonify({"account_id": account.id})
"""
Endpoint: /portal/account/login
HTTP Methods: POST
Response Formats: JSON
Content-Type: application/x-www-form-urlencoded
Parameters: email, password
Logins to existing account
If successful, returns account ID
"""
@app.route("/portal/account/login", methods=["POST"])
def login():
email = request.form.get("email")
password = request.form.get("password")
if any(x is None for x in [email, password]):
return jsonify({"error": "Parameter is missing"}), 400
account = PostAccount.query.filter(PostAccount.email == email).first()
is_correct_password = bcrypt.check_password_hash(account.encrypted_password, password)
if account and is_correct_password:
account.sign_in_count = account.sign_in_count + 1
account.last_sign_in_at = datetime.now()
sqldb.session.commit()
return jsonify({"account_id": account.id})
else:
return jsonify({"error": "Unable to authenticate"}), 400
"""
Endpoint: /portal/account
HTTP Methods: GET
Response Formats: JSON
Parameters: account_id
Get all relevant information for an account
"""
@app.route("/portal/account", methods=["GET"])
def get_account_info():
try:
account_id = request.args.get("account_id")
account = PostAccount.get_account(account_id)
except ValueError as e:
return jsonify({"error": str(e)}), 400
if account.email == "[email protected]":
isAdmin = True
else:
isAdmin = False
verified_emails = (
sqldb.session.query(PostAccountEmail.email)
.filter_by(account=account.id, verified=True)
.all()
)
account_json = {
"id": account.id,
"name": account.name,
"email": account.email,
"verified_emails": verified_emails,
"is_admin": isAdmin,
}
return jsonify({"account": account_json})
"""
Endpoint: /portal/account/reset/request
HTTP Methods: POST
Response Formats: JSON
Content-Type: application/x-www-form-urlencoded
Parameters: email
Add password reset token to account
Sends email with link with reset token to the account's email
"""
@app.route("/portal/account/reset/request", methods=["POST"])
def request_account_password_reset_token():
email = request.form.get("email")
account = PostAccount.query.filter_by(email=email).first()
if not account:
return jsonify({"error": "Account not found."}), 400
# TODO: send verification email
token = str(uuid.uuid4())
print(token)
account.reset_password_token = token
account.reset_password_token_sent_at = datetime.now()
sqldb.session.commit()
return jsonify({"msg": "An email has been sent to reset your password."})
"""
Endpoint: /portal/account/reset
HTTP Methods: GET
Response Formats: JSON, HTML
Parameters: token
Verify a reset password token
"""
@app.route("/portal/account/reset", methods=["GET"])
def verify_account_password_reset():
token = request.args.get("token")
now = datetime.now()
account = PostAccount.query.filter_by(reset_password_token=token).first()
if not account:
return jsonify({"error": "Invalid auth token. Please try again."})
elif (
account.reset_password_token_sent_at
and account.reset_password_token_sent_at + timedelta(minutes=30) < now
):
return jsonify({"error": "This token has expired."})
else:
return redirect("https://pennlabs.org?token={}".format(token), code=302)
"""
Endpoint: /portal/account/reset
HTTP Methods: POST
Response Formats: JSON
Content-Type: application/x-www-form-urlencoded
Parameters: token, password
Reset password and remove password reset token from account
"""
@app.route("/portal/account/reset", methods=["POST"])
def reset_account_password():
token = request.form.get("token")
password = request.form.get("password")
encrypted_password = bcrypt.generate_password_hash(password)
now = datetime.now()
account = PostAccount.query.filter_by(reset_password_token=token).first()
if not account:
return jsonify({"error": "Invalid auth token. Please try again."})
elif (
account.reset_password_token_sent_at
and account.reset_password_token_sent_at + timedelta(minutes=30) < now
):
return jsonify({"error": "This token has expired."})
elif not encrypted_password:
return jsonify({"error": "Invalid password. Please try again."})
account.encrypted_password = encrypted_password
account.updated_at = datetime.now()
account.reset_password_token = None
account.reset_password_token_sent_at = None
sqldb.session.commit()
return jsonify({"msg": "Your password has been reset."})
"""
Endpoint: /portal/email/verify
HTTP Methods: GET
Response Formats: JSON, HTML
Parameters: token, account_email
Verifies a test email for an account and adds that test email to all upcoming posts
"""
@app.route("/portal/email/verify", methods=["GET"])
def verify_account_email_token():
token = request.args.get("token")
account_email = PostAccountEmail.query.filter_by(auth_token=token).first()
if not account_email:
return jsonify({"error": "Invalid auth token. Please try again."})
elif account_email.verified:
return jsonify({"error": "This email has already been verified for this account."})
else:
account_email.verified = True
est = timezone("EST")
now = datetime.now(est).replace(tzinfo=None)
upcoming_posts = (
Post.query.filter(Post.account == account_email.account)
.filter(Post.end_date >= now)
.all()
)
for post in upcoming_posts:
tester = PostTester(post=post.id, email=account_email.email)
sqldb.session.add(tester)
sqldb.session.commit()
return redirect("https://pennlabs.org", code=302)
| mit | -3,219,336,553,651,108,400 | 29.331915 | 93 | 0.682379 | false |
ulikoehler/ODBPy | ODBPy/ComponentParser.py | 1 | 3919 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
ODB++ surface parser components
"""
import re
from collections import namedtuple
from .Decoder import DecoderOption, run_decoder
from .Structures import *
from .Utils import try_parse_number
__all__ = ["components_decoder_options", "parse_components",
"consolidate_component_tags", "Component", "map_components_by_name"]
_prp_re = re.compile(r"^PRP\s+(\S+)\s+'([^']+)'\s*$") # Property record
# _prp_re.search("PRP Name 'EEUFR1H470'")
_top_re = re.compile(r"^TOP\s+(\d+)\s+(-?[\.\d]+)\s+(-?[\.\d]+)\s+(-?[\.\d]+)\s+(N|M|X|Y|XY)\s+(\d+)\s+(\d+)\s+(\S+)\s*$") # Toeprint record
_cmp_re = re.compile(r"^CMP\s+(\d+)\s+(-?[\.\d]+)\s+(-?[\.\d]+)\s+(-?[\.\d]+)\s+(N|M|X|Y|XY)\s+(\S+)\s+(\S+)\s*(;\s*.+?)?$") # component record
ComponentRecordTag = namedtuple("ComponentRecordTag",[
"package_ref", "location", "rotation", "mirror", "name", "part_name", "attributes"])
PropertyRecordTag = namedtuple("PropertyRecord", ["key", "value"])
ToeprintRecord = namedtuple("ToeprintRecord", [
"pin_num", "location", "rotation", "mirrored", "net_num", "subnet_num", "toeprint_name"])
Component = namedtuple("Component", [
"name", "part_name", "location", "rotation", "mirror", "attributes", "properties", "toeprints"])
def consolidate_component_tags(tags):
component = None # Expect only one
properties = {}
toeprints = []
for tag in tags:
if isinstance(tag, ComponentRecordTag):
if component is not None:
raise ValueError("Multiple CMP records in section. Last one: {}".format(tag))
component = tag
if isinstance(tag, PropertyRecordTag):
properties[tag.key] = tag.value
if isinstance(tag, ToeprintRecord):
toeprints.append(tag)
if component is None:
raise ValueError("No CMP record in section")
return Component(
component.name, component.part_name, component.location,
component.rotation, component.mirror, component.attributes,
properties, toeprints
)
def _parse_prp(match):
key, value = match.groups()
return PropertyRecordTag(key, value)
def _parse_top(match):
pin_num, x, y, rot, mirror, net_num, subnet_num, toeprint_name = match.groups()
return ToeprintRecord(
int(pin_num),
Point(float(x), float(y)),
float(rot),
mirror_map[mirror],
int(net_num),
int(subnet_num),
try_parse_number(toeprint_name)
)
def _parse_cmp(match):
pkg_ref, x, y, rot, mirror, name, part_name, attributes = match.groups()
attributes = parse_attributes(attributes[1:]) \
if attributes is not None else {}
return ComponentRecordTag(
int(pkg_ref),
Point(float(x), float(y)),
float(rot),
mirror_map[mirror],
try_parse_number(name.strip()),
try_parse_number(part_name.strip()),
attributes
)
components_decoder_options = [
DecoderOption(_prp_re, _parse_prp),
DecoderOption(_top_re, _parse_top),
DecoderOption(_cmp_re, _parse_cmp)
]
def component_name_to_id(name):
"""
Convert a section header name ("CMP 0" in DipTrace)
to an identifier (e.g. 0)
"""
if name.startswith("CMP"):
return int(name[len("CMP"):].strip())
return name
def parse_components(components):
# Build rulesets
return {
component_name_to_id(name): consolidate_component_tags(
list(run_decoder(component, components_decoder_options)))
for name, component in components.items()
if name is not None
}
def map_components_by_name(components):
"""Given a dictionary or list of components, map them into a dictionary by name"""
if isinstance(components, dict):
components = components.values()
return {
component.name: component
for component in components
}
| apache-2.0 | 1,282,930,216,420,034,800 | 33.991071 | 143 | 0.615463 | false |
kilon/sverchok | utils/sv_easing_functions.py | 1 | 7135 | # ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
'''
original c code:
https://raw.githubusercontent.com/warrenm/AHEasing/master/AHEasing/easing.c
Copyright (c) 2011, Auerhaus Development, LLC
http://sam.zoy.org/wtfpl/COPYING for more details.
'''
from math import sqrt, pow, sin, cos
from math import pi as M_PI
M_PI_2 = M_PI * 2
# Modeled after the line y = x
def LinearInterpolation(p):
return p
# Modeled after the parabola y = x^2
def QuadraticEaseIn(p):
return p * p
# Modeled after the parabola y = -x^2 + 2x
def QuadraticEaseOut(p):
return -(p * (p - 2))
# Modeled after the piecewise quadratic
# y = (1/2)((2x)^2) ; [0, 0.5)
# y = -(1/2)((2x-1)*(2x-3) - 1) ; [0.5, 1]
def QuadraticEaseInOut(p):
if (p < 0.5):
return 2 * p * p
return (-2 * p * p) + (4 * p) - 1
# Modeled after the cubic y = x^3
def CubicEaseIn(p):
return p * p * p
# Modeled after the cubic y = (x - 1)^3 + 1
def CubicEaseOut(p):
f = (p - 1)
return f * f * f + 1
# Modeled after the piecewise cubic
# y = (1/2)((2x)^3) ; [0, 0.5)
# y = (1/2)((2x-2)^3 + 2) ; [0.5, 1]
def CubicEaseInOut(p):
if (p < 0.5):
return 4 * p * p * p
else:
f = ((2 * p) - 2)
return 0.5 * f * f * f + 1
# Modeled after the quartic x^4
def QuarticEaseIn(p):
return p * p * p * p
# Modeled after the quartic y = 1 - (x - 1)^4
def QuarticEaseOut(p):
f = (p - 1)
return f * f * f * (1 - p) + 1
# Modeled after the piecewise quartic
# y = (1/2)((2x)^4) ; [0, 0.5)
# y = -(1/2)((2x-2)^4 - 2) ; [0.5, 1]
def QuarticEaseInOut(p):
if (p < 0.5):
return 8 * p * p * p * p
else:
f = (p - 1)
return -8 * f * f * f * f + 1
# Modeled after the quintic y = x^5
def QuinticEaseIn(p):
return p * p * p * p * p
# Modeled after the quintic y = (x - 1)^5 + 1
def QuinticEaseOut(p):
f = (p - 1)
return f * f * f * f * f + 1
# Modeled after the piecewise quintic
# y = (1/2)((2x)^5) ; [0, 0.5)
# y = (1/2)((2x-2)^5 + 2) ; [0.5, 1]
def QuinticEaseInOut(p):
if (p < 0.5):
return 16 * p * p * p * p * p
else:
f = ((2 * p) - 2)
return 0.5 * f * f * f * f * f + 1
# Modeled after quarter-cycle of sine wave
def SineEaseIn(p):
return sin((p - 1) * M_PI_2) + 1
# Modeled after quarter-cycle of sine wave (different phase)
def SineEaseOut(p):
return sin(p * M_PI_2)
# Modeled after half sine wave
def SineEaseInOut(p):
return 0.5 * (1 - cos(p * M_PI))
# Modeled after shifted quadrant IV of unit circle
def CircularEaseIn(p):
return 1 - sqrt(1 - (p * p))
# Modeled after shifted quadrant II of unit circle
def CircularEaseOut(p):
return sqrt((2 - p) * p)
# Modeled after the piecewise circular function
# y = (1/2)(1 - sqrt(1 - 4x^2)) ; [0, 0.5)
# y = (1/2)(sqrt(-(2x - 3)*(2x - 1)) + 1) ; [0.5, 1]
def CircularEaseInOut(p):
if(p < 0.5):
return 0.5 * (1 - sqrt(1 - 4 * (p * p)))
else:
return 0.5 * (sqrt(-((2 * p) - 3) * ((2 * p) - 1)) + 1)
# Modeled after the exponential function y = 2^(10(x - 1))
def ExponentialEaseIn(p):
return p if (p == 0.0) else pow(2, 10 * (p - 1))
# Modeled after the exponential function y = -2^(-10x) + 1
def ExponentialEaseOut(p):
return p if (p == 1.0) else 1 - pow(2, -10 * p)
# Modeled after the piecewise exponential
# y = (1/2)2^(10(2x - 1)) ; [0,0.5)
# y = -(1/2)*2^(-10(2x - 1))) + 1 ; [0.5,1]
def ExponentialEaseInOut(p):
if(p == 0.0 or p == 1.0):
return p
if(p < 0.5):
return 0.5 * pow(2, (20 * p) - 10)
else:
return -0.5 * pow(2, (-20 * p) + 10) + 1
# Modeled after the damped sine wave y = sin(13pi/2*x)*pow(2, 10 * (x - 1))
def ElasticEaseIn(p):
return sin(13 * M_PI_2 * p) * pow(2, 10 * (p - 1))
# Modeled after the damped sine wave y = sin(-13pi/2*(x + 1))*pow(2, -10x) + 1
def ElasticEaseOut(p):
return sin(-13 * M_PI_2 * (p + 1)) * pow(2, -10 * p) + 1
# Modeled after the piecewise exponentially-damped sine wave:
# y = (1/2)*sin(13pi/2*(2*x))*pow(2, 10 * ((2*x) - 1)) ; [0,0.5)
# y = (1/2)*(sin(-13pi/2*((2x-1)+1))*pow(2,-10(2*x-1)) + 2) ; [0.5, 1]
def ElasticEaseInOut(p):
if (p < 0.5):
return 0.5 * sin(13 * M_PI_2 * (2 * p)) * pow(2, 10 * ((2 * p) - 1))
else:
return 0.5 * (sin(-13 * M_PI_2 * ((2 * p - 1) + 1)) * pow(2, -10 * (2 * p - 1)) + 2)
# Modeled after the overshooting cubic y = x^3-x*sin(x*pi)
def BackEaseIn(p):
return p * p * p - p * sin(p * M_PI)
# Modeled after overshooting cubic y = 1-((1-x)^3-(1-x)*sin((1-x)*pi))
def BackEaseOut(p):
f = (1 - p)
return 1 - (f * f * f - f * sin(f * M_PI))
# Modeled after the piecewise overshooting cubic function:
# y = (1/2)*((2x)^3-(2x)*sin(2*x*pi)) ; [0, 0.5)
# y = (1/2)*(1-((1-x)^3-(1-x)*sin((1-x)*pi))+1) ; [0.5, 1]
def BackEaseInOut(p):
if (p < 0.5):
f = 2 * p
return 0.5 * (f * f * f - f * sin(f * M_PI))
else:
f = (1 - (2 * p - 1))
return 0.5 * (1 - (f * f * f - f * sin(f * M_PI))) + 0.5
def BounceEaseIn(p):
return 1 - BounceEaseOut(1 - p)
def BounceEaseOut(p):
if(p < 4 / 11.0):
return (121 * p * p) / 16.0
elif(p < 8 / 11.0):
return (363 / 40.0 * p * p) - (99 / 10.0 * p) + 17 / 5.0
elif(p < 9 / 10.0):
return (4356 / 361.0 * p * p) - (35442 / 1805.0 * p) + 16061 / 1805.0
else:
return (54 / 5.0 * p * p) - (513 / 25.0 * p) + 268 / 25.0
def BounceEaseInOut(p):
if(p < 0.5):
return 0.5 * BounceEaseIn(p * 2)
else:
return 0.5 * BounceEaseOut(p * 2 - 1) + 0.5
easing_dict = {
0: LinearInterpolation,
1: QuadraticEaseIn,
2: QuadraticEaseOut,
3: QuadraticEaseInOut,
4: CubicEaseIn,
5: CubicEaseOut,
6: CubicEaseInOut,
7: QuarticEaseIn,
8: QuarticEaseOut,
9: QuarticEaseInOut,
10: QuinticEaseIn,
11: QuinticEaseOut,
12: QuinticEaseInOut,
13: SineEaseIn,
14: SineEaseOut,
15: SineEaseInOut,
16: CircularEaseIn,
17: CircularEaseOut,
18: CircularEaseInOut,
19: ExponentialEaseIn,
20: ExponentialEaseOut,
21: ExponentialEaseInOut,
22: ElasticEaseIn,
23: ElasticEaseOut,
24: ElasticEaseInOut,
25: BackEaseIn,
26: BackEaseOut,
27: BackEaseInOut,
28: BounceEaseIn,
29: BounceEaseOut,
30: BounceEaseInOut
}
| gpl-3.0 | -8,912,327,687,947,498,000 | 24.573477 | 92 | 0.552908 | false |
Corvia/django-tenant-users | dtu_test_project/customers/migrations/0001_initial.py | 1 | 1338 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
try:
import django_tenants.postgresql_backend.base
VALIDATOR = django_tenants.postgresql_backend.base._check_schema_name
except ImportError as e:
import tenant_schemas.postgresql_backend.base
VALIDATOR = tenant_schemas.postgresql_backend.base._check_schema_name
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Client',
fields=[
('id', models.AutoField(verbose_name='ID', auto_created=True, primary_key=True, serialize=False)),
('domain_url', models.CharField(max_length=128, unique=True)),
('schema_name', models.CharField(max_length=63, unique=True,
validators=[VALIDATOR])),
('slug', models.SlugField(verbose_name='Tenant URL Name', blank=True)),
('created', models.DateTimeField()),
('modified', models.DateTimeField(blank=True)),
('name', models.CharField(max_length=100)),
('description', models.TextField(max_length=200)),
],
options={
'abstract': False,
},
),
]
| mit | -8,869,141,853,439,523,000 | 33.307692 | 114 | 0.577728 | false |
rbi13/CommandServer-py | Pattern.py | 1 | 1060 | #!/usr/bin/python
## Pattern.py
import re
from PatternMatch import PatternMatch
class Pattern:
# Pattern keys
PATTERN_KEY = "pattern"
CLI_COMMAND_KEY = "cliCommand"
FUNCTION_KEY = "function"
def __init__(self, info):
self.pattern = info.get(Pattern.PATTERN_KEY)
self.cliCommand = info.get(Pattern.CLI_COMMAND_KEY)
self.function = info.get(Pattern.FUNCTION_KEY)
def match(self, compare):
match = re.search(self.pattern, compare)
if match:
return PatternMatch(self, match)
else:
return None
@staticmethod
def getMatches(pattern_list, compare):
matches = []
for pattern in pattern_list:
match = pattern.match(compare)
if match:
matches.append(match)
return matches
@staticmethod
def load(json_list):
# Pattern[]
patterns = []
# pprint(json_list)
for patternDef in json_list:
patterns.append(Pattern(patternDef))
return patterns
| lgpl-3.0 | -1,920,804,193,334,539,300 | 24.238095 | 59 | 0.59434 | false |
cbun/assembly | lib/assembly/assembly.py | 1 | 7846 | #! /usr/bin/env python
"""Assembly execution drivers.
This module provides the default parameters and handling of
assembler-specific configurations.
Assembler defaults are set in the 'arast.conf' file
"""
import logging
import os
import re
import subprocess
import shutil
import glob
import metadata as meta
from ConfigParser import SafeConfigParser
def get_default(key):
"""Get assemblers default value from config file."""
return parser.get('assemblers', key)
def run(assembler, job_data):
plugin = self.pmanager.getPluginByName(assembler)
settings = plugin.details.items('Settings')
return plugin.plugin_object(settings, job_data)
def get_tar_name(job_id, suffix):
name = 'job' + str(job_id)
name += '_'
name += suffix
name += '.tar.gz'
return name
def tar(outpath, asm_data, tarname):
print "Compressing"
outfile = outpath + '/tar/'
try:
os.makedirs(outfile)
except:
pass
outfile += tarname
targs = ['tar', '-czvf', outfile, asm_data]
t = subprocess.Popen(targs)
t.wait()
return outfile
def tar_directory(outpath, directory, tarname):
outfile = outpath
try:
os.makedirs(outfile)
except:
pass
outfile += tarname
targs = ['tar', '-czvf', outfile, './']
t = subprocess.Popen(targs, cwd=directory)
t.wait()
return outfile
def tar_list(outpath, file_list, tarname):
""" Tars a file list. Attempts to find the highest common path"""
common_path = os.path.commonprefix(file_list)
outfile = outpath + '/tar/'
try: os.makedirs(outfile)
except: pass
outfile += tarname
targs = ['tar', '-czvf', outfile]
targs += [os.path.relpath(path, common_path) for path in file_list]
logging.debug("Tar command: %s: " % targs)
t = subprocess.Popen(targs, cwd=common_path)
t.wait()
return outfile
def ls_recursive(path):
""" Returns list of all files in a dir"""
allfiles = []
for root, sub_dirs, files in os.walk(path):
for f in files:
allfiles.append(os.path.join(root, f))
return allfiles
def prefix_file_move(file, prefix):
""" Adds prefix to file, returns new file name, moves file"""
if os.path.isdir(file):
return file
f = '/' + str(prefix) + '__' + os.path.basename(file)
newfile = os.path.split(file)[0] + f
os.rename(file, newfile)
return newfile
def prefix_file(file, prefix):
""" Adds prefix to file, returns new filename"""
if os.path.isdir(file):
return file
f = '/' + str(prefix) + '__' + os.path.basename(file)
newfile = os.path.split(file)[0] + f
return newfile
def rename_file_copy(filepath, newname):
""" Renames the file, keeping the file extension, copies to new file name"""
f = '/' + newname + '.' + os.path.basename(filepath).rsplit('.', 1)[1]
newfile = os.path.split(filepath)[0] + f
shutil.copy(filepath, newfile)
return newfile
def rename_file_symlink(filepath, newname):
""" Renames the file, keeping the file extension, symlinks to new file name"""
f = '/' + newname + '.' + os.path.basename(filepath).rsplit('.', 1)[1]
newfile = os.path.split(filepath)[0] + f
os.symlink(filepath, newfile)
return newfile
def get_fasta(directory):
""" Return the list of Fasta files in DIRECTORY
"""
files = os.listdir(directory)
fasta_files = [file for file in files
if re.search(r'\.fa$|\.fasta$', file, re.IGNORECASE) is not None]
return fasta_files
def get_fastq(directory):
""" Return the list of Fastq files in DIRECTORY
"""
files = os.listdir(directory)
fastq_files = [file for file in files
if re.search(r'\.fq$|\.fastq$', file, re.IGNORECASE) is not None]
return fastq_files
def get_quala(directory):
""" Return the list of Quala files in DIRECTORY
"""
files = os.listdir(directory)
quala_files = [file for file in files
if re.search(r'\.qa$|\.quala$', file, re.IGNORECASE) is not None]
return fastq_files
def read_config():
pass
def run_bwa(data_dir, ref_name, read_files, prefix):
""" Ex: run_bwa(velvet_data, 'contigs.fa', reads_list, 'velvet') """
bwa_exec = 'bwa'
samtools_exec = 'samtools'
tmp_files = []
ref_file = data_dir + ref_name
# Run the index on reference
bwa_args = [bwa_exec, 'index']
bwa_args.append(ref_file)
logging.info(bwa_args)
p_index = subprocess.Popen(bwa_args)
p_index.wait()
# Align reads to reference
bwa_args = [bwa_exec, 'aln']
bwa_args.append(ref_file)
if len(read_files) > 1:
# Concatenate read files
reads = data_dir + 'reads.fa'
destination = open(reads,'wb')
for rf in read_files:
logging.info("Concatenating read file: %s", rf)
shutil.copyfileobj(open(rf,'rb'), destination)
destination.close()
tmp_files.append(reads)
else:
reads = read_files[0]
bwa_args.append(reads)
aln_out = data_dir + prefix
aln_out += '_aln.sai'
aln_outbuffer = open(aln_out, 'wb')
tmp_files.append(aln_out)
bwa_args.append(aln_out)
logging.info(bwa_args)
p_aln = subprocess.Popen(bwa_args, stdout=aln_outbuffer)
p_aln.wait()
aln_outbuffer.close()
# Create Sam file
#bwa samse $ref $dir/aln-$refX$reads.sai $reads > $dir/aln-$refX$reads.sam
bwa_args = [bwa_exec, 'samse', ref_file, aln_out, reads]
sam_out = data_dir + prefix
sam_out += '_aln.sam'
sam_outbuffer = open(sam_out, 'wb')
tmp_files.append(sam_out)
bwa_args.append(sam_out)
logging.info(bwa_args)
p_sam = subprocess.Popen(bwa_args, stdout=sam_outbuffer)
p_sam.wait()
sam_outbuffer.close()
# Create bam file
# samtools view -S -b -o $dir/aln-$refX$reads.bam $dir/aln-$refX$reads.sam
samtools_args = [samtools_exec, 'view', '-S', '-b', '-o']
bam_out = data_dir + prefix
bam_out += '_aln.bam'
bam_outbuffer = open(bam_out, 'wb')
samtools_args.append(bam_out)
samtools_args.append(sam_out)
logging.info(samtools_args)
p_bam = subprocess.Popen(samtools_args, stdout=bam_outbuffer)
p_bam.wait()
bam_outbuffer.close()
for temp in tmp_files:
try:
os.remove(temp)
except:
logging.info("Could not remove %s" % temp)
return bam_out
def get_qual_encoding(file):
f = open(file, 'r')
while True:
bline = f.readline()
if bline.find('+') != -1: # Line before quality line
line = f.readline()
for c in line:
if ord(c) > 74:
logging.info("Detected phred64 quality encoding")
return 'phred64'
elif ord(c) < 64:
logging.info("Detected phred33 quality encoding")
return 'phred33'
if len(bline) == 0: #EOF
break
return
def tab_to_fasta(tabbed_file, outfile, threshold):
tabbed = open(tabbed_file, 'r')
fasta = open(outfile, 'w')
#prefixes = ['>_', ' len_', ' cov_', ' stdev_', ' GC_', '\n']
prefixes = ['>_', ' len_', ' cov_', ' stdev_', ' GC_', ' seed_', '\n']
for line in tabbed:
l = line.split('\t')
if int(l[1]) <= threshold:
for i in range(len(l)):
fasta.write(prefixes[i] + l[i])
tabbed.close()
fasta.close()
def arast_reads(filelist):
""" Returns a list of files into the ARAST reads dict format """
filedicts = []
for f in filelist:
filedicts.append({'type':'single', 'files':[f]})
return filedicts
parser = SafeConfigParser()
#parser.read('arast.conf')
#basepath = get_default('basepath')
#metadata = meta.MetadataConnection(parser.get('meta','mongo.remote.host'))
| mit | -8,488,182,745,002,458,000 | 28.276119 | 84 | 0.604639 | false |
numberoverzero/bloop | tests/integ/test_inheritance.py | 1 | 6089 | import random
import uuid
from datetime import datetime, timezone
from string import ascii_letters
import pytest
from tests.integ.models import ExternalUser, MixinUser, Role
from bloop import (
UUID,
BaseModel,
Column,
DateTime,
GlobalSecondaryIndex,
Integer,
)
from bloop.exceptions import InvalidModel
def test_inheritance_simple(engine):
class NewBase(BaseModel):
class Meta:
abstract = True
uuid = Column(UUID)
class SimpleModel(NewBase):
id = Column(Integer, hash_key=True)
created_at = Column(DateTime)
model = SimpleModel()
assert len(model.Meta.columns) == 3
assert len(model.Meta.keys) == 1
assert list(model.Meta.keys)[0].name == 'id'
def test_inheritance_base_hashkey(engine):
class NewBase(BaseModel):
class Meta:
abstract = True
uuid = Column(UUID, hash_key=True)
class SimpleModel(NewBase):
id = Column(Integer)
created_at = Column(DateTime)
model = SimpleModel()
assert len(model.Meta.columns) == 3
assert len(model.Meta.keys) == 1
assert list(model.Meta.keys)[0].name == 'uuid'
def test_inheritance_mixins(engine):
model = MixinUser()
assert len(model.Meta.columns) == 8
assert len(model.Meta.keys) == 2
assert model.Meta.hash_key.name == 'id'
assert model.Meta.range_key.name == 'created'
def _create_user(cls, **extra):
now = datetime.now(timezone.utc)
first_name = "".join([random.choice(ascii_letters) for _ in range(8)])
last_name = "".join([random.choice(ascii_letters) for _ in range(12)])
email = f"{first_name}.{last_name}@example.com"
return cls(
id=uuid.uuid4(), created=now, updated=now, active=True,
first_name=first_name, last_name=last_name, email=email,
**extra
)
def gen_external_user():
extra = {'company': 'Acme', 'roles': {Role.user, Role.admin}}
return _create_user(ExternalUser, **extra)
def gen_mixin_user():
extra = {'roles': {Role.user}}
return _create_user(MixinUser, **extra)
@pytest.mark.parametrize("cls, factory", [
(MixinUser, gen_mixin_user),
(ExternalUser, gen_external_user)
])
def test_inheritance_load(engine, cls, factory):
engine.bind(BaseModel)
obj = factory()
engine.save(obj)
same_obj = cls(id=obj.id, created=obj.created)
engine.load(same_obj)
assert same_obj.Meta.model is cls
for attr in [col.name for col in obj.Meta.columns]:
assert getattr(same_obj, attr) == getattr(obj, attr)
def test_inheritance_lsi_from_baseclass(engine):
engine.bind(BaseModel)
first_group = []
for x in range(3):
user = gen_mixin_user()
engine.save(user)
first_group.append(user)
saved_date = datetime.now(timezone.utc)
second_group = []
for x in range(3):
user = gen_mixin_user()
engine.save(user)
second_group.append(user)
# ensure that we won't find a user in the first group that has a created after our saved date.
cond = (MixinUser.id == first_group[0].id) & (MixinUser.created > saved_date)
q = engine.query(MixinUser.by_created, key=cond)
assert len(list(q)) == 0
# ensure that we *do* find a user in the second group that has a created after our saved date.
cond = (MixinUser.id == second_group[-1].id) & (MixinUser.created > saved_date)
q = engine.query(MixinUser.by_created, key=cond)
items = list(q)
assert len(items) == 1
assert items[0].Meta.model is MixinUser
def test_inheritance_lsi_from_concrete_subclass(engine):
engine.bind(BaseModel)
first_group = []
for x in range(3):
user = gen_external_user()
engine.save(user)
first_group.append(user)
saved_date = datetime.now(timezone.utc)
second_group = []
for x in range(3):
user = gen_external_user()
engine.save(user)
second_group.append(user)
# ensure that we won't find a user in the first group that has a created after our saved date.
cond = (ExternalUser.id == first_group[0].id) & (ExternalUser.created > saved_date)
q = engine.query(ExternalUser.by_created, key=cond)
assert len(list(q)) == 0
# ensure that we *do* find a user in the second group that has a created after our saved date.
cond = (ExternalUser.id == second_group[-1].id) & (ExternalUser.created > saved_date)
q = engine.query(ExternalUser.by_created, key=cond)
items = list(q)
assert len(items) == 1
assert items[0].Meta.model is ExternalUser
def test_inheritance_gsi_to_baseclass(engine):
engine.bind(BaseModel)
user1 = gen_mixin_user()
engine.save(user1)
cond = MixinUser.email == user1.email
user2 = engine.query(MixinUser.by_email, key=cond).one()
assert user2.Meta.model is MixinUser
for attr in [col.name for col in user1.Meta.columns]:
assert getattr(user2, attr) == getattr(user1, attr)
def test_inheritance_gsi_from_concrete_subclass(engine):
engine.bind(BaseModel)
user1 = gen_external_user()
engine.save(user1)
cond = ExternalUser.email == user1.email
user2 = engine.query(ExternalUser.by_email, key=cond).one()
assert user2.Meta.model is ExternalUser
for attr in [col.name for col in user1.Meta.columns]:
assert getattr(user2, attr) == getattr(user1, attr)
def test_inheritance_overwrites_rangekey(engine):
class NextGenUser(MixinUser):
version = Column(Integer, range_key=True)
def test_inheritance_overwrites_hashkey(engine):
class NextGenUser(MixinUser):
version = Column(Integer, hash_key=True)
def test_inheritance_two_models_same_dynamo_index_name(engine):
class NextGenUser(MixinUser):
version = Column(Integer)
next_by_email = GlobalSecondaryIndex(projection='all', dynamo_name='email-index', hash_key='email')
def test_inheritance_two_models_same_dynamo_column_name(engine):
with pytest.raises(InvalidModel):
class NextGenUser(MixinUser):
version = Column(Integer, dynamo_name='email')
| mit | 4,920,138,609,226,647,000 | 28.133971 | 107 | 0.661028 | false |
lausser/coshsh | coshsh/datasource.py | 1 | 4889 | #!/usr/bin/env python
#-*- coding: utf-8 -*-
#
# This file belongs to coshsh.
# Copyright Gerhard Lausser.
# This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
import sys
import os
import re
import imp
import inspect
import logging
import coshsh
from coshsh.util import compare_attr, substenv
logger = logging.getLogger('coshsh')
class DatasourceNotImplemented(Exception):
pass
class DatasourceNotReady(Exception):
# datasource is currently being updated
pass
class DatasourceNotCurrent(Exception):
# datasources was not updated lately.
# it makes no sense to continue.
pass
class DatasourceNotAvailable(Exception):
pass
class DatasourceCorrupt(Exception):
pass
class Datasource(object):
my_type = 'datasource'
class_factory = []
def __init__(self, **params):
#print "datasourceinit with", self.__class__
for key in [k for k in params if k.startswith("recipe_")]:
setattr(self, key, params[key])
short = key.replace("recipe_", "")
if not short in params:
params[short] = params[key]
for key in params.keys():
if isinstance(params[key], basestring):
params[key] = re.sub('%.*?%', substenv, params[key])
if self.__class__ == Datasource:
#print "generic ds", params
newcls = self.__class__.get_class(params)
if newcls:
#print "i rebless anon datasource to", newcls, params
self.__class__ = newcls
self.__init__(**params)
else:
logger.critical('datasource for %s is not implemented' % params)
#print "i raise DatasourceNotImplemented"
raise DatasourceNotImplemented
else:
setattr(self, 'name', params["name"])
self.objects = {}
pass
# i am a generic datasource
# i find a suitable class
# i rebless
# i call __init__
def open(self, **kwargs):
pass
def read(self, **kwargs):
pass
def close(self):
pass
def add(self, objtype, obj):
try:
self.objects[objtype][obj.fingerprint()] = obj
except Exception:
self.objects[objtype] = {}
self.objects[objtype][obj.fingerprint()] = obj
if objtype == 'applications':
if self.find('hosts', obj.host_name):
setattr(obj, 'host', self.get('hosts', obj.host_name))
def get(self, objtype, fingerprint):
try:
return self.objects[objtype][fingerprint]
except Exception:
# should be None
return None
return 'i do not exist. no. no!'
def getall(self, objtype):
try:
return self.objects[objtype].values()
except Exception:
return []
def find(self, objtype, fingerprint):
return objtype in self.objects and fingerprint in self.objects[objtype]
@classmethod
def init_classes(cls, classpath):
sys.dont_write_bytecode = True
for p in [p for p in reversed(classpath) if os.path.exists(p) and os.path.isdir(p)]:
for module, path in [(item, p) for item in os.listdir(p) if item[-3:] == ".py" and item.startswith('datasource_')]:
try:
#print "try ds", module, path
path = os.path.abspath(path)
fp, filename, data = imp.find_module(module.replace('.py', ''), [path])
toplevel = imp.load_source(module.replace(".py", ""), filename)
for cl in inspect.getmembers(toplevel, inspect.isfunction):
if cl[0] == "__ds_ident__":
cls.class_factory.append([path, module, cl[1]])
except Exception, exp:
logger.critical("could not load datasource %s from %s: %s" % (module, path, exp))
finally:
if fp:
fp.close()
@classmethod
def get_class(cls, params={}):
#print "get_classhoho", cls, len(cls.class_factory), cls.class_factory
for path, module, class_func in cls.class_factory:
try:
#print "try", path, module, class_func
newcls = class_func(params)
if newcls:
return newcls
except Exception ,exp:
dsname = 'INVALID' if 'name' not in params else params['name']
print 'Datasource.get_class exception while trying module "%s" for datasource "%s": %s %s' % \
(os.path.join(path, module), dsname, type(exp), exp)
pass
logger.debug("found no matching class for this datasource %s" % params)
| agpl-3.0 | 8,794,060,652,986,661,000 | 32.033784 | 127 | 0.557987 | false |
openstack/networking-plumgrid | networking_plumgrid/neutron/plugins/drivers/fake_plumlib.py | 1 | 5565 | # Copyright 2015 PLUMgrid, Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from networking_plumgrid._i18n import _LI
from neutron.extensions import providernet as provider
from oslo_log import log as logging
LOG = logging.getLogger(__name__)
class Plumlib(object):
"""Class PLUMgrid Fake Library.
This library is a by-pass implementation for the PLUMgrid Library.
This class is being used by the unit test integration in Neutron.
"""
def __init__(self):
LOG.info(_LI('Python PLUMgrid Fake Library Started '))
pass
def director_conn(self, director_plumgrid, director_port, timeout,
director_admin, director_password):
LOG.info(_LI('Fake Director: %s'),
director_plumgrid + ':' + str(director_port))
pass
def create_network(self, tenant_id, net_db, network, **kwargs):
net_db["network"] = {}
for key in (provider.NETWORK_TYPE,
provider.PHYSICAL_NETWORK,
provider.SEGMENTATION_ID):
net_db["network"][key] = network["network"][key]
return net_db
def update_network(self, tenant_id, net_id, network, orig_net_db):
pass
def delete_network(self, net_db, net_id):
pass
def create_subnet(self, sub_db, net_db, ipnet):
pass
def update_subnet(self, orig_sub_db, new_sub_db, ipnet, net_db):
pass
def delete_subnet(self, tenant_id, net_db, net_id, sub_db):
pass
def create_port(self, port_db, router_db, subnet_db):
pass
def update_port(self, port_db, router_db, subnet_db):
pass
def delete_port(self, port_db, router_db):
pass
def create_router(self, tenant_id, router_db):
pass
def update_router(self, router_db, router_id):
pass
def delete_router(self, tenant_id, router_id):
pass
def add_router_interface(self, tenant_id, router_id, port_db, ipnet,
ip_version):
pass
def remove_router_interface(self, tenant_id, net_id, router_id):
pass
def create_floatingip(self, floating_ip):
pass
def update_floatingip(self, floating_ip_orig, floating_ip, id):
pass
def delete_floatingip(self, floating_ip_orig, id):
pass
def disassociate_floatingips(self, fip, port_id):
return dict((key, fip[key]) for key in ("id", "floating_network_id",
"floating_ip_address"))
def create_security_group(self, sg_db):
pass
def update_security_group(self, sg_db):
pass
def delete_security_group(self, sg_db):
pass
def create_security_group_rule(self, sg_rule_db):
pass
def create_security_group_rule_bulk(self, sg_rule_db):
pass
def delete_security_group_rule(self, sg_rule_db):
pass
def create_l2_gateway(self, director_plumgrid,
director_admin,
director_password,
gateway_info,
vendor_type,
sw_username,
sw_password):
pass
def delete_l2_gateway(self, gw_info):
pass
def add_l2_gateway_connection(self, gw_conn_info):
pass
def delete_l2_gateway_connection(self, gw_conn_info):
pass
def create_physical_attachment_point(self, physical_attachment_point):
pass
def update_physical_attachment_point(self, physical_attachment_point):
pass
def delete_physical_attachment_point(self, pap_id):
pass
def create_transit_domain(self, transit_domain, db):
pass
def update_transit_domain(self, transit_domain, db):
pass
def delete_transit_domain(self, tvd_id):
pass
def get_available_interface(self):
return "host1", "ifc1"
def create_policy_tag(self, tenant_id, policy_tag_db):
pass
def delete_policy_tag(self, tenant_id, ptag_id):
pass
def create_endpoint_group(self, tenant_id, ep_grp, ptag_db):
pass
def delete_endpoint_group(self, tenant_id, epg_id, ptag_db):
pass
def update_endpoint_group(self, tenant_id, epg_id, epg_db, ptag_db):
pass
def create_policy_service(self, tenant_id, ps_db, ps_mac_list):
pass
def delete_policy_service(self, tenant_id, ps_id):
pass
def update_policy_service(self, tenant_id, ps_id, ps_db, ps_mac_list):
pass
def create_policy_rule(self, tenant_id, pr_db):
pass
def delete_policy_rule(self, tenant_id, pr_id, remote_target=None):
pass
def create_endpoint(self, tenant_id, ep_db, port_mac=None):
pass
def delete_endpoint(self, tenant_id, ep_id, ep_db, port_mac=None):
pass
def update_endpoint(self, tenant_id, ep_id, ep_db, port_mac=None):
pass
def get_ext_links(self, tenant_id):
pass
| apache-2.0 | -7,382,820,481,788,809,000 | 27.248731 | 78 | 0.608625 | false |
YiqunPeng/Leetcode-pyq | solutions/694NumberOfDistinctIslands.py | 1 | 1174 | class Solution:
def numDistinctIslands(self, grid):
"""
:type grid: List[List[int]]
:rtype: int
"""
if not grid: return 0
island_list = set()
m, n = len(grid), len(grid[0])
v = [[False for j in range(n)] for i in range(m)]
def bfs(x, y, v):
island = []
m, n = len(grid), len(grid[0])
q = [(x, y)]
while q:
i, j = q.pop(0)
for n_i, n_j in [(i+1, j), (i-1, j), (i, j+1), (i, j-1)]:
if not (0 <= n_i < m and 0 <= n_j < n) or v[n_i][n_j]:
continue
if grid[n_i][n_j] == 1:
v[n_i][n_j] = True
island.append((n_i - x, n_j - y))
q.append((n_i, n_j))
return str(island)
for i in range(m):
for j in range(n):
if v[i][j]: continue
if grid[i][j] == 1 and not v[i][j]:
v[i][j] = True
island_list.add(bfs(i, j, v))
return len(island_list)
| gpl-3.0 | 8,159,184,933,781,888,000 | 30.756757 | 74 | 0.348382 | false |
facebookresearch/ParlAI | parlai/agents/examples/transformer_variant.py | 1 | 6891 | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
"""
Example code for specifying custom transformer variants.
TransformerVariantAgent:
- Minimal changes needed to:
- Swap out a high-level component (encoder)
- Swap out a low-level component (decoder->layer->self_attention)
VerboseTransformerAgent:
- Doesn't swap out anything
- Fully specifies all components, for illustration
ConfigurableTransformerAgent:
- Swaps out components based on command line args
"""
from __future__ import annotations
import torch
from enum import Enum
from typing import Dict, Optional, Tuple, Union
from parlai.agents.transformer.modules import (
TransformerFFN,
MultiHeadAttention,
TransformerDecoder,
TransformerDecoderLayer,
TransformerEncoder,
TransformerEncoderLayer,
TransformerGeneratorModel,
)
from parlai.agents.transformer.transformer import TransformerGeneratorAgent
from parlai.core.opt import Opt
from parlai.core.params import ParlaiParser
import parlai.utils.logging as logging
###########################################
# Transformer With Two Components Swapped #
###########################################
class TransformerVariantAgent(TransformerGeneratorAgent):
"""
Swapping out two things:
1. Encoder (high-level component)
2. Decoder self attention (low-level component)
"""
def build_model(self, states=None):
wrapped_class = TransformerGeneratorModel.with_components(
encoder=MyCustomEncoder,
decoder=TransformerDecoder.with_components(
layer=TransformerDecoderLayer.with_components(
self_attention=MyCustomAttention
)
),
)
return wrapped_class(self.opt, self.dict)
class MyCustomEncoder(TransformerEncoder):
"""
For brevity this subclasses TransformerEncoder, but you could write your own
nn.Module from scratch as long as the __init__ and forward signatures match
TransformerEncoder.
"""
def forward(
self,
input: torch.LongTensor,
positions: Optional[torch.LongTensor] = None,
segments: Optional[torch.LongTensor] = None,
) -> Union[torch.Tensor, Tuple[torch.Tensor, torch.BoolTensor]]:
logging.info("Custom encoder called!")
# Comment out the following line and write your custom `forward` instead.
return super().forward(input, positions, segments) # type: ignore
class MyCustomAttention(MultiHeadAttention):
"""
For brevity this just renames MultiHeadAttention, but ideally you'd define a new
nn.Module with the same __init__ and forward signature as MultiHeadAttention.
"""
def forward(
self,
query: torch.Tensor,
key: Optional[torch.Tensor] = None,
value: Optional[torch.Tensor] = None,
mask: torch.Tensor = None,
incr_state: Optional[Dict[str, torch.Tensor]] = None,
static_kv: bool = False,
) -> Tuple[torch.Tensor, Dict[str, torch.Tensor], torch.Tensor]:
logging.info("Custom attention called!")
# Comment out the following line and write your custom `forward` instead.
return super().forward(
query,
key=key,
value=value,
mask=mask,
incr_state=incr_state,
static_kv=static_kv,
)
#######################################
# Fully-specified Default Transformer #
#######################################
class VerboseTransformerAgent(TransformerGeneratorAgent):
"""
Doesn't make any changes to TransformerGeneratorModel, just specifies all
subcomponents explicitly.
This is meant to be a reference for how to swap any component within
TransformerGeneratorModel.
"""
def build_model(self, states=None):
wrapped_class = TransformerGeneratorModel.with_components(
encoder=TransformerEncoder.with_components(
layer=TransformerEncoderLayer.with_components(
self_attention=MultiHeadAttention, feedforward=TransformerFFN
)
),
decoder=TransformerDecoder.with_components(
layer=TransformerDecoderLayer.with_components(
encoder_attention=MultiHeadAttention,
self_attention=MultiHeadAttention,
feedforward=TransformerFFN,
)
),
)
return wrapped_class(opt=self.opt, dictionary=self.dict)
################################################
# Command-line Configurable Custom Transformer #
################################################
class DecoderFeedForwardVariant(Enum):
ONE = 'one'
TWO = 'two'
class DecoderFFNOne(TransformerFFN):
def forward(self, x: torch.Tensor) -> torch.Tensor:
logging.info("Using Decoder FFN Variant One")
return super().forward(x)
class DecoderFFNTwo(TransformerFFN):
def forward(self, x: torch.Tensor) -> torch.Tensor:
logging.info("Using Decoder FFN Variant Two")
return super().forward(x)
class ConfigurableTransformerAgent(TransformerGeneratorAgent):
"""
Illustrates swapping out components based on command line args.
Specifically, swaps out the decoder ffn between two options.
"""
@classmethod
def add_cmdline_args(
cls, parser: ParlaiParser, partial_opt: Optional[Opt] = None
) -> ParlaiParser:
super().add_cmdline_args(parser, partial_opt=partial_opt)
agent = parser.add_argument_group('MyCustom Transformer Arguments')
parser.add_argument(
'--decoder-ffn-variants',
type=DecoderFeedForwardVariant,
default=DecoderFeedForwardVariant.ONE,
help='Some variants in the decoder FFN implementation',
)
return agent # type: ignore
def build_model(self, states=None):
decoder_variant: DecoderFeedForwardVariant = self.opt['decoder_ffn_variants']
if decoder_variant == DecoderFeedForwardVariant.ONE:
decoder_ffn_class = DecoderFFNOne
elif decoder_variant == DecoderFeedForwardVariant.TWO:
decoder_ffn_class = DecoderFFNTwo
else:
logging.error(
'Invalid --decoder-ffn-variants option, defaulting to original ffn implementation.'
)
decoder_ffn_class = TransformerFFN
wrapped_class = TransformerGeneratorModel.with_components(
decoder=TransformerDecoder.with_components(
layer=TransformerDecoderLayer.with_components(
feedforward=decoder_ffn_class
)
)
)
return wrapped_class(opt=self.opt, dictionary=self.dict)
| mit | -694,793,330,464,155,900 | 32.779412 | 99 | 0.643448 | false |
perryl/morph | morphlib/morphloader.py | 1 | 26505 | # Copyright (C) 2013-2016 Codethink Limited
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
#
# =*= License: GPL-2 =*=
import os
import collections
import warnings
import yaml
import morphlib
class MorphologySyntaxError(morphlib.Error):
pass
class MorphologyNotYamlError(MorphologySyntaxError):
def __init__(self, morphology, errmsg):
self.msg = 'Syntax error in morphology %s:\n%s' % (morphology, errmsg)
class NotADictionaryError(MorphologySyntaxError):
def __init__(self, morph_filename, errmsg=None):
self.msg = 'Not a dictionary: morphology %s' % morph_filename
if errmsg:
self.msg += "\n%s" % (errmsg)
class MorphologyValidationError(morphlib.Error):
pass
class UnknownKindError(MorphologyValidationError):
def __init__(self, kind, morph_filename):
self.msg = (
'Unknown kind %s in morphology %s' % (kind, morph_filename))
class MissingFieldError(MorphologyValidationError):
def __init__(self, field, morphology_name):
self.field = field
self.morphology_name = morphology_name
self.msg = (
'Missing field %s from morphology %s' % (field, morphology_name))
class InvalidStringError(MorphologyValidationError):
def __init__(self, field, spec, morph_filename):
self.field = field
self.spec = spec
self.morph_filename = morph_filename
MorphologyValidationError.__init__(
self, "Field '%(field)s' must be a non-empty string in %(spec)s"\
" for morphology %(morph_filename)s" % locals())
class InvalidFieldError(MorphologyValidationError):
def __init__(self, field, morphology_name):
self.field = field
self.morphology_name = morphology_name
self.msg = (
'Field %s not allowed in morphology %s' % (field, morphology_name))
class InvalidTypeError(MorphologyValidationError):
def __init__(self, field, expected, actual, morphology_name):
self.field = field
self.expected = expected
self.actual = actual
self.morphology_name = morphology_name
self.msg = (
'Field %s expected type %s, got %s in morphology %s' %
(field, expected, actual, morphology_name))
class UnknownArchitectureError(MorphologyValidationError):
def __init__(self, arch, morph_filename):
self.msg = ('Unknown architecture %s in morphology %s'
% (arch, morph_filename))
class UnknownBuildSystemError(MorphologyValidationError):
def __init__(self, build_system, morph_filename):
self.msg = ('Undefined build system %s in morphology %s'
% (build_system, morph_filename))
class NoStratumBuildDependenciesError(MorphologyValidationError):
def __init__(self, stratum_name, morph_filename):
self.msg = (
'Stratum %s has no build dependencies in %s' %
(stratum_name, morph_filename))
class EmptyStratumError(MorphologyValidationError):
def __init__(self, stratum_name, morph_filename):
self.msg = (
'Stratum %s has no chunks in %s' %
(stratum_name, morph_filename))
class DuplicateChunkError(MorphologyValidationError):
def __init__(self, stratum_name, chunk_name):
self.stratum_name = stratum_name
self.chunk_name = chunk_name
MorphologyValidationError.__init__(
self, 'Duplicate chunk %(chunk_name)s '\
'in stratum %(stratum_name)s' % locals())
class ChunkSpecConflictingFieldsError(MorphologyValidationError):
def __init__(self, fields, chunk_name, stratum_name):
self.chunk_name = chunk_name
self.stratum_name = stratum_name
self.fields = fields
MorphologyValidationError.__init__(
self, 'Conflicting fields "%s" for %s in stratum %s.' % (
', and '.join(fields), chunk_name, stratum_name))
class ChunkSpecNoBuildInstructionsError(MorphologyValidationError):
def __init__(self, chunk_name, stratum_name):
self.chunk_name = chunk_name
self.stratum_name = stratum_name
self.msg = (
'Chunk %(chunk_name)s in stratum %(stratum_name)s has no '
'build-system defined, and no chunk .morph file referenced '
'either. Please specify how to build the chunk, either by setting '
'"build-system: " in the stratum, or adding a chunk .morph file '
'and setting "morph: " in the stratum.' % locals())
class SystemStrataNotListError(MorphologyValidationError):
def __init__(self, system_name, strata_type):
self.system_name = system_name
self.strata_type = strata_type
typename = strata_type.__name__
MorphologyValidationError.__init__(
self, 'System %(system_name)s has the wrong type for its strata: '\
'%(typename)s, expected list' % locals())
class DuplicateStratumError(MorphologyValidationError):
def __init__(self, system_name, stratum_name):
self.system_name = system_name
self.stratum_name = stratum_name
MorphologyValidationError.__init__(
self, 'Duplicate stratum %(stratum_name)s '\
'in system %(system_name)s' % locals())
class SystemStratumSpecsNotMappingError(MorphologyValidationError):
def __init__(self, system_name, strata):
self.system_name = system_name
self.strata = strata
MorphologyValidationError.__init__(
self, 'System %(system_name)s has stratum specs '\
'that are not mappings.' % locals())
class EmptySystemError(MorphologyValidationError):
def __init__(self, system_name):
MorphologyValidationError.__init__(
self, 'System %(system_name)s has no strata.' % locals())
class DependsOnSelfError(MorphologyValidationError):
def __init__(self, name, filename):
msg = ("Stratum %(name)s build-depends on itself (%(filename)s)"
% locals())
MorphologyValidationError.__init__(self, msg)
class MultipleValidationErrors(MorphologyValidationError):
def __init__(self, name, errors):
self.name = name
self.errors = errors
self.msg = 'Multiple errors when validating %(name)s:'
for error in errors:
self.msg += ('\n' + str(error))
class DuplicateDeploymentNameError(MorphologyValidationError):
def __init__(self, cluster_filename, duplicates):
self.duplicates = duplicates
self.cluster_filename = cluster_filename
morphlib.Error.__init__(self,
'Cluster %s contains the following duplicate deployment names:%s'
% (cluster_filename, '\n ' + '\n '.join(duplicates)))
class MorphologyDumper(yaml.SafeDumper):
keyorder = (
'name',
'kind',
'description',
'arch',
'strata',
'configuration-extensions',
'morph',
'repo',
'ref',
'unpetrify-ref',
'build-depends',
'build-mode',
'artifacts',
'max-jobs',
'submodules',
'products',
'chunks',
'build-system',
'pre-configure-commands',
'configure-commands',
'post-configure-commands',
'pre-build-commands',
'build-commands',
'pre-test-commands',
'test-commands',
'post-test-commands',
'post-build-commands',
'pre-install-commands',
'install-commands',
'post-install-commands',
'artifact',
'include',
'systems',
'deploy-defaults',
'deploy',
'type',
'location',
)
@classmethod
def _iter_in_global_order(cls, mapping):
for key in cls.keyorder:
if key in mapping:
yield key, mapping[key]
for key in sorted(mapping.iterkeys()):
if key not in cls.keyorder:
yield key, mapping[key]
@classmethod
def _represent_dict(cls, dumper, mapping):
return dumper.represent_mapping('tag:yaml.org,2002:map',
cls._iter_in_global_order(mapping))
@classmethod
def _represent_str(cls, dumper, orig_data):
fallback_representer = yaml.representer.SafeRepresenter.represent_str
try:
data = unicode(orig_data, 'ascii')
if data.count('\n') == 0:
return fallback_representer(dumper, orig_data)
except UnicodeDecodeError:
try:
data = unicode(orig_data, 'utf-8')
if data.count('\n') == 0:
return fallback_representer(dumper, orig_data)
except UnicodeDecodeError:
return fallback_representer(dumper, orig_data)
return dumper.represent_scalar(u'tag:yaml.org,2002:str',
data, style='|')
@classmethod
def _represent_unicode(cls, dumper, data):
if data.count('\n') == 0:
return yaml.representer.SafeRepresenter.represent_unicode(dumper,
data)
return dumper.represent_scalar(u'tag:yaml.org,2002:str',
data, style='|')
def __init__(self, *args, **kwargs):
yaml.SafeDumper.__init__(self, *args, **kwargs)
self.add_representer(dict, self._represent_dict)
self.add_representer(str, self._represent_str)
self.add_representer(unicode, self._represent_unicode)
class MorphologyLoader(object):
'''Load morphologies from disk, or save them back to disk.'''
_required_fields = {
'chunk': [
'name',
],
'stratum': [
'name',
],
'system': [
'name',
'arch',
'strata',
],
'cluster': [
'name',
'systems',
],
}
_static_defaults = {
'chunk': {
'description': '',
'pre-configure-commands': None,
'configure-commands': None,
'post-configure-commands': None,
'pre-build-commands': None,
'build-commands': None,
'post-build-commands': None,
'pre-test-commands': None,
'test-commands': None,
'post-test-commands': None,
'pre-install-commands': None,
'install-commands': None,
'post-install-commands': None,
'pre-strip-commands': None,
'strip-commands': None,
'post-strip-commands': None,
'devices': [],
'submodules': {},
'products': [],
'max-jobs': None,
'build-system': 'manual',
'build-mode': 'staging',
'prefix': '/usr',
'system-integration': [],
},
'stratum': {
'chunks': [],
'description': '',
'build-depends': [],
'products': [],
},
'system': {
'description': '',
'arch': None,
'configuration-extensions': [],
},
'cluster': {
'description': '',
},
}
def __init__(self,
predefined_build_systems={}):
self._predefined_build_systems = predefined_build_systems.copy()
if 'manual' not in self._predefined_build_systems:
self._predefined_build_systems['manual'] = \
morphlib.buildsystem.ManualBuildSystem()
def load_from_string(self, string, filename='string',
set_defaults=True): # pragma: no cover
'''Load a morphology from a string.
Return the Morphology object.
'''
try:
obj = yaml.safe_load(string)
except yaml.error.YAMLError as e:
raise MorphologyNotYamlError(filename, e)
if not isinstance(obj, dict):
raise NotADictionaryError(filename)
m = morphlib.morphology.Morphology(obj)
m.filename = filename
self.validate(m)
if set_defaults:
self.set_commands(m)
self.set_defaults(m)
return m
def load_from_file(self, filename, set_defaults=True):
'''Load a morphology from a named file.
Return the Morphology object.
'''
with open(filename) as f:
text = f.read()
return self.load_from_string(text, filename=filename,
set_defaults=set_defaults)
def save_to_string(self, morphology):
'''Return normalised textual form of morphology.'''
return yaml.dump(morphology.data, Dumper=MorphologyDumper,
default_flow_style=False)
def save_to_file(self, filename, morphology):
'''Save a morphology object to a named file.'''
text = self.save_to_string(morphology)
with morphlib.savefile.SaveFile(filename, 'w') as f:
f.write(text)
def validate(self, morph):
'''Validate a morphology.'''
# Validate that the kind field is there.
self._require_field('kind', morph)
# The rest of the validation is dependent on the kind.
kind = morph['kind']
if kind not in ('system', 'stratum', 'chunk', 'cluster'):
raise UnknownKindError(morph['kind'], morph.filename)
required = ['kind'] + self._required_fields[kind]
allowed = self._static_defaults[kind].keys()
self._require_fields(required, morph)
self._deny_unknown_fields(required + allowed, morph)
getattr(self, '_validate_%s' % kind)(morph)
def _validate_cluster(self, morph):
# Deployment names must be unique within a cluster
deployments = collections.Counter()
for system in morph['systems']:
deployments.update(system['deploy'].iterkeys())
if 'subsystems' in system:
deployments.update(self._get_subsystem_names(system))
duplicates = set(deployment for deployment, count
in deployments.iteritems() if count > 1)
if duplicates:
raise DuplicateDeploymentNameError(morph.filename, duplicates)
def _get_subsystem_names(self, system): # pragma: no cover
for subsystem in system.get('subsystems', []):
for name in subsystem['deploy'].iterkeys():
yield name
for name in self._get_subsystem_names(subsystem):
yield name
def _validate_system(self, morph):
# A system must contain at least one stratum
strata = morph['strata']
if (not isinstance(strata, collections.Iterable)
or isinstance(strata, collections.Mapping)):
raise SystemStrataNotListError(morph['name'],
type(strata))
if not strata:
raise EmptySystemError(morph['name'])
if not all(isinstance(o, collections.Mapping) for o in strata):
raise SystemStratumSpecsNotMappingError(morph['name'], strata)
# All stratum names should be unique within a system.
names = set()
for spec in strata:
name = spec['morph']
if name in names:
raise DuplicateStratumError(morph['name'], name)
names.add(name)
# Architecture name must be known.
if morph['arch'] not in morphlib.valid_archs:
raise UnknownArchitectureError(morph['arch'], morph.filename)
def _validate_stratum(self, morph):
# Require at least one chunk.
if len(morph.get('chunks', [])) == 0:
raise EmptyStratumError(morph['name'], morph.filename)
# Require build-dependencies for the stratum itself, unless
# it has chunks built in bootstrap mode.
if 'build-depends' in morph:
if not isinstance(morph['build-depends'], list):
raise InvalidTypeError(
'build-depends', list, type(morph['build-depends']),
morph['name'])
for dep in morph['build-depends']:
if dep['morph'] == morph.filename:
raise DependsOnSelfError(morph['name'], morph.filename)
else:
for spec in morph['chunks']:
if spec.get('build-mode') in ['bootstrap', 'test']:
break
else:
raise NoStratumBuildDependenciesError(
morph['name'], morph.filename)
# All chunk names must be unique within a stratum.
names = set()
for spec in morph['chunks']:
name = spec['name']
if name in names:
raise DuplicateChunkError(morph['name'], name)
names.add(name)
# Check each reference to a chunk.
for spec in morph['chunks']:
chunk_name = spec['name']
# All chunks repos and refs must be strings
def validate_chunk_str_field(field, spec, morph_filename):
if field not in spec:
raise MissingFieldError('%s in %s' % (field, spec),
morph.filename)
val = spec[field]
if not val or not isinstance(val, basestring) or (
not val.strip()):
raise InvalidStringError(
field, spec, morph_filename)
validate_chunk_str_field('repo', spec, morph.filename)
validate_chunk_str_field('ref', spec, morph.filename)
# The build-depends field must be a list.
if 'build-depends' in spec:
if not isinstance(spec['build-depends'], list):
raise InvalidTypeError(
'%s.build-depends' % chunk_name, list,
type(spec['build-depends']), morph['name'])
# Either 'morph' or 'build-system' must be specified.
if 'morph' in spec and 'build-system' in spec:
raise ChunkSpecConflictingFieldsError(
['morph', 'build-system'], chunk_name, morph.filename)
if 'morph' not in spec and 'build-system' not in spec:
raise ChunkSpecNoBuildInstructionsError(
chunk_name, morph.filename)
def validate_submodules(submodules, morph_filename):
for sub_name in submodules:
validate_chunk_str_field('url', submodules[sub_name],
morph_filename)
if 'submodules' in spec:
if not isinstance(spec['submodules'], dict):
raise NotADictionaryError(
morph.filename, "The 'submodules' in chunk '%s' have "
"to be a dict" % (chunk_name))
validate_submodules(spec['submodules'], morph.filename)
@classmethod
def _validate_chunk(cls, morphology):
errors = []
if 'products' in morphology:
cls._validate_products(morphology['name'],
morphology['products'], errors)
for key in MorphologyDumper.keyorder:
if key.endswith('-commands') and key in morphology:
cls._validate_commands(morphology['name'], key,
morphology[key], errors)
if len(errors) == 1:
raise errors[0]
elif errors:
raise MultipleValidationErrors(morphology['name'], errors)
@classmethod
def _validate_commands(cls, morphology_name, key, commands, errors):
if commands is None:
return
for cmd_index, cmd in enumerate(commands): # pragma: no cover
if not isinstance(cmd, basestring):
e = InvalidTypeError('%s[%d]' % (key, cmd_index),
str, type(cmd), morphology_name)
errors.append(e)
@classmethod
def _validate_products(cls, morphology_name, products, errors):
'''Validate the products field is of the correct type.'''
if (not isinstance(products, collections.Iterable)
or isinstance(products, collections.Mapping)):
raise InvalidTypeError('products', list,
type(products), morphology_name)
for spec_index, spec in enumerate(products):
if not isinstance(spec, collections.Mapping):
e = InvalidTypeError('products[%d]' % spec_index,
dict, type(spec), morphology_name)
errors.append(e)
continue
cls._validate_products_spec_fields_exist(morphology_name,
spec_index, spec, errors)
if 'include' in spec:
cls._validate_products_specs_include(
morphology_name, spec_index, spec['include'], errors)
product_spec_required_fields = ('artifact', 'include')
@classmethod
def _validate_products_spec_fields_exist(
cls, morphology_name, spec_index, spec, errors):
given_fields = sorted(spec.iterkeys())
missing = (field for field in cls.product_spec_required_fields
if field not in given_fields)
for field in missing:
e = MissingFieldError('products[%d].%s' % (spec_index, field),
morphology_name)
errors.append(e)
unexpected = (field for field in given_fields
if field not in cls.product_spec_required_fields)
for field in unexpected:
e = InvalidFieldError('products[%d].%s' % (spec_index, field),
morphology_name)
errors.append(e)
@classmethod
def _validate_products_specs_include(cls, morphology_name, spec_index,
include_patterns, errors):
'''Validate that products' include field is a list of strings.'''
# Allow include to be most iterables, but not a mapping
# or a string, since iter of a mapping is just the keys,
# and the iter of a string is a 1 character length string,
# which would also validate as an iterable of strings.
if (not isinstance(include_patterns, collections.Iterable)
or isinstance(include_patterns, collections.Mapping)
or isinstance(include_patterns, basestring)):
e = InvalidTypeError('products[%d].include' % spec_index, list,
type(include_patterns), morphology_name)
errors.append(e)
else:
for pattern_index, pattern in enumerate(include_patterns):
pattern_path = ('products[%d].include[%d]' %
(spec_index, pattern_index))
if not isinstance(pattern, basestring):
e = InvalidTypeError(pattern_path, str,
type(pattern), morphology_name)
errors.append(e)
def _require_field(self, field, morphology):
if field not in morphology:
raise MissingFieldError(field, morphology.filename)
def _require_fields(self, fields, morphology):
for field in fields:
self._require_field(field, morphology)
def _deny_unknown_fields(self, allowed, morphology):
for field in morphology:
if field not in allowed:
raise InvalidFieldError(field, morphology.filename)
def set_defaults(self, morphology):
'''Set all missing fields in the morpholoy to their defaults.
The morphology is assumed to be valid.
'''
kind = morphology['kind']
defaults = self._static_defaults[kind]
for key in defaults:
if key not in morphology:
morphology[key] = defaults[key]
getattr(self, '_set_%s_defaults' % kind)(morphology)
def _set_cluster_defaults(self, morph):
for system in morph.get('systems', []):
if 'deploy-defaults' not in system:
system['deploy-defaults'] = {}
if 'deploy' not in system:
system['deploy'] = {}
def _set_system_defaults(self, morph):
pass
def _set_stratum_defaults(self, morph):
for spec in morph['chunks']:
if 'repo' not in spec:
spec['repo'] = spec['name']
if 'build-mode' not in spec:
spec['build-mode'] = \
self._static_defaults['chunk']['build-mode']
if 'prefix' not in spec:
spec['prefix'] = \
self._static_defaults['chunk']['prefix']
if 'submodules' not in spec:
spec['submodules'] = \
self._static_defaults['chunk']['submodules']
def _set_chunk_defaults(self, morph):
if morph['max-jobs'] is not None:
morph['max-jobs'] = int(morph['max-jobs'])
def lookup_build_system(self, name):
return self._predefined_build_systems[name]
def set_commands(self, morph):
if morph['kind'] == 'chunk':
default = self._static_defaults['chunk']['build-system']
bs_name = morph.get('build-system', default)
try:
bs = self.lookup_build_system(bs_name)
except KeyError:
raise UnknownBuildSystemError(bs_name, morph['name'])
for key in self._static_defaults['chunk']:
if 'commands' not in key: continue
if key not in morph:
attr = '_'.join(key.split('-'))
morph[key] = getattr(bs, attr)
| gpl-2.0 | -5,550,887,847,934,322,000 | 34.577181 | 79 | 0.565554 | false |
bshillingford/python-torchfile | torchfile.py | 1 | 15223 | """
Mostly direct port of the Lua and C serialization implementation to
Python, depending only on `struct`, `array`, and numpy.
Supported types:
* `nil` to Python `None`
* numbers to Python floats, or by default a heuristic changes them to ints or
longs if they are integral
* booleans
* strings: read as byte strings (Python 3) or normal strings (Python 2), like
lua strings which don't support unicode, and that can contain null chars
* tables converted to a special dict (*); if they are list-like (i.e. have
numeric keys from 1 through n) they become a python list by default
* Torch classes: supports Tensors and Storages, and most classes such as
modules. Trivially extensible much like the Torch serialization code.
Trivial torch classes like most `nn.Module` subclasses become
`TorchObject`s. The `type_handlers` dict contains the mapping from class
names to reading functions.
* functions: loaded into the `LuaFunction` `namedtuple`,
which simply wraps the raw serialized data, i.e. upvalues and code.
These are mostly useless, but exist so you can deserialize anything.
(*) Since Lua allows you to index a table with a table but Python does not, we
replace dicts with a subclass that is hashable, and change its
equality comparison behaviour to compare by reference.
See `hashable_uniq_dict`.
Currently, the implementation assumes the system-dependent binary Torch
format, but minor refactoring can give support for the ascii format as well.
"""
import struct
from array import array
import numpy as np
import sys
from collections import namedtuple
TYPE_NIL = 0
TYPE_NUMBER = 1
TYPE_STRING = 2
TYPE_TABLE = 3
TYPE_TORCH = 4
TYPE_BOOLEAN = 5
TYPE_FUNCTION = 6
TYPE_RECUR_FUNCTION = 8
LEGACY_TYPE_RECUR_FUNCTION = 7
LuaFunction = namedtuple('LuaFunction',
['size', 'dumped', 'upvalues'])
class hashable_uniq_dict(dict):
"""
Subclass of dict with equality and hashing semantics changed:
equality and hashing is purely by reference/instance, to match
the behaviour of lua tables.
Supports lua-style dot indexing.
This way, dicts can be keys of other dicts.
"""
def __hash__(self):
return id(self)
def __getattr__(self, key):
if key in self:
return self[key]
if isinstance(key, (str, bytes)):
return self.get(key.encode('utf8'))
def __eq__(self, other):
return id(self) == id(other)
def __ne__(self, other):
return id(self) != id(other)
def _disabled_binop(self, other):
raise TypeError(
'hashable_uniq_dict does not support these comparisons')
__cmp__ = __ne__ = __le__ = __gt__ = __lt__ = _disabled_binop
class TorchObject(object):
"""
Simple torch object, used by `add_trivial_class_reader`.
Supports both forms of lua-style indexing, i.e. getattr and getitem.
Use the `torch_typename` method to get the object's torch class name.
Equality is by reference, as usual for lua (and the default for Python
objects).
"""
def __init__(self, typename, obj=None, version_number=0):
self._typename = typename
self._obj = obj
self._version_number = version_number
def __getattr__(self, k):
if k in self._obj:
return self._obj[k]
if isinstance(k, (str, bytes)):
return self._obj.get(k.encode('utf8'))
def __getitem__(self, k):
if k in self._obj:
return self._obj[k]
if isinstance(k, (str, bytes)):
return self._obj.get(k.encode('utf8'))
def torch_typename(self):
return self._typename
def __repr__(self):
return "TorchObject(%s, %s)" % (self._typename, repr(self._obj))
def __str__(self):
return repr(self)
def __dir__(self):
keys = list(self._obj.keys())
keys.append('torch_typename')
return keys
type_handlers = {}
def register_handler(typename):
def do_register(handler):
type_handlers[typename] = handler
return do_register
def add_tensor_reader(typename, dtype):
def read_tensor_generic(reader, version):
# https://github.com/torch/torch7/blob/1e86025/generic/Tensor.c#L1249
ndim = reader.read_int()
size = reader.read_long_array(ndim)
stride = reader.read_long_array(ndim)
storage_offset = reader.read_long() - 1 # 0-indexing
# read storage:
storage = reader.read_obj()
if storage is None or ndim == 0 or len(size) == 0 or len(stride) == 0:
# empty torch tensor
return np.empty((0), dtype=dtype)
# convert stride to numpy style (i.e. in bytes)
stride = [storage.dtype.itemsize * x for x in stride]
# create numpy array that indexes into the storage:
return np.lib.stride_tricks.as_strided(
storage[storage_offset:],
shape=size,
strides=stride)
type_handlers[typename] = read_tensor_generic
add_tensor_reader(b'torch.ByteTensor', dtype=np.uint8)
add_tensor_reader(b'torch.CharTensor', dtype=np.int8)
add_tensor_reader(b'torch.ShortTensor', dtype=np.int16)
add_tensor_reader(b'torch.IntTensor', dtype=np.int32)
add_tensor_reader(b'torch.LongTensor', dtype=np.int64)
add_tensor_reader(b'torch.FloatTensor', dtype=np.float32)
add_tensor_reader(b'torch.DoubleTensor', dtype=np.float64)
add_tensor_reader(b'torch.CudaTensor', dtype=np.float32)
add_tensor_reader(b'torch.CudaByteTensor', dtype=np.uint8)
add_tensor_reader(b'torch.CudaCharTensor', dtype=np.int8)
add_tensor_reader(b'torch.CudaShortTensor', dtype=np.int16)
add_tensor_reader(b'torch.CudaIntTensor', dtype=np.int32)
add_tensor_reader(b'torch.CudaDoubleTensor', dtype=np.float64)
def add_storage_reader(typename, dtype):
def read_storage(reader, version):
# https://github.com/torch/torch7/blob/1e86025/generic/Storage.c#L237
size = reader.read_long()
return np.fromfile(reader.f, dtype=dtype, count=size)
type_handlers[typename] = read_storage
add_storage_reader(b'torch.ByteStorage', dtype=np.uint8)
add_storage_reader(b'torch.CharStorage', dtype=np.int8)
add_storage_reader(b'torch.ShortStorage', dtype=np.int16)
add_storage_reader(b'torch.IntStorage', dtype=np.int32)
add_storage_reader(b'torch.LongStorage', dtype=np.int64)
add_storage_reader(b'torch.FloatStorage', dtype=np.float32)
add_storage_reader(b'torch.DoubleStorage', dtype=np.float64)
add_storage_reader(b'torch.CudaStorage', dtype=np.float32)
add_storage_reader(b'torch.CudaByteStorage', dtype=np.uint8)
add_storage_reader(b'torch.CudaCharStorage', dtype=np.int8)
add_storage_reader(b'torch.CudaShortStorage', dtype=np.int16)
add_storage_reader(b'torch.CudaIntStorage', dtype=np.int32)
add_storage_reader(b'torch.CudaDoubleStorage', dtype=np.float64)
def add_notimpl_reader(typename):
def read_notimpl(reader, version):
raise NotImplementedError('Reader not implemented for: ' + typename)
type_handlers[typename] = read_notimpl
add_notimpl_reader(b'torch.HalfTensor')
add_notimpl_reader(b'torch.HalfStorage')
add_notimpl_reader(b'torch.CudaHalfTensor')
add_notimpl_reader(b'torch.CudaHalfStorage')
@register_handler(b'tds.Vec')
def tds_Vec_reader(reader, version):
size = reader.read_int()
obj = []
_ = reader.read_obj()
for i in range(size):
e = reader.read_obj()
obj.append(e)
return obj
@register_handler(b'tds.Hash')
def tds_Hash_reader(reader, version):
size = reader.read_int()
obj = hashable_uniq_dict()
_ = reader.read_obj()
for i in range(size):
k = reader.read_obj()
v = reader.read_obj()
obj[k] = v
return obj
class T7ReaderException(Exception):
pass
class T7Reader:
def __init__(self,
fileobj,
use_list_heuristic=True,
use_int_heuristic=True,
utf8_decode_strings=False,
force_deserialize_classes=None,
force_8bytes_long=False):
"""
Params:
* `fileobj`: file object to read from, must be an actual file object
as it will be read by `array`, `struct`, and `numpy`. Since
it is only read sequentially, certain objects like pipes or
`sys.stdin` should work as well (untested).
* `use_list_heuristic`: automatically turn tables with only consecutive
positive integral indices into lists
(default True)
* `use_int_heuristic`: cast all whole floats into ints (default True)
* `utf8_decode_strings`: decode all strings as UTF8. By default they
remain as byte strings. Version strings always
are byte strings, but this setting affects
class names. (default False)
* `force_deserialize_classes`: deprecated.
"""
self.f = fileobj
self.objects = {} # read objects so far
if force_deserialize_classes is not None:
raise DeprecationWarning(
'force_deserialize_classes is now always '
'forced to be true, so no longer required')
self.use_list_heuristic = use_list_heuristic
self.use_int_heuristic = use_int_heuristic
self.utf8_decode_strings = utf8_decode_strings
self.force_8bytes_long = force_8bytes_long
def _read(self, fmt):
sz = struct.calcsize(fmt)
return struct.unpack(fmt, self.f.read(sz))
def read_boolean(self):
return self.read_int() == 1
def read_int(self):
return self._read('i')[0]
def read_long(self):
if self.force_8bytes_long:
return self._read('q')[0]
else:
return self._read('l')[0]
def read_long_array(self, n):
if self.force_8bytes_long:
lst = []
for i in range(n):
lst.append(self.read_long())
return lst
else:
arr = array('l')
arr.fromfile(self.f, n)
return arr.tolist()
def read_float(self):
return self._read('f')[0]
def read_double(self):
return self._read('d')[0]
def read_string(self, disable_utf8=False):
size = self.read_int()
s = self.f.read(size)
if disable_utf8 or not self.utf8_decode_strings:
return s
return s.decode('utf8')
def read_obj(self):
typeidx = self.read_int()
if typeidx == TYPE_NIL:
return None
elif typeidx == TYPE_NUMBER:
x = self.read_double()
# Extra checking for integral numbers:
if self.use_int_heuristic and x.is_integer():
return int(x)
return x
elif typeidx == TYPE_BOOLEAN:
return self.read_boolean()
elif typeidx == TYPE_STRING:
return self.read_string()
elif (typeidx == TYPE_TABLE or typeidx == TYPE_TORCH or
typeidx == TYPE_FUNCTION or typeidx == TYPE_RECUR_FUNCTION or
typeidx == LEGACY_TYPE_RECUR_FUNCTION):
# read the object reference index
index = self.read_int()
# check it is loaded already
if index in self.objects:
return self.objects[index]
# otherwise read it
if (typeidx == TYPE_FUNCTION or typeidx == TYPE_RECUR_FUNCTION or
typeidx == LEGACY_TYPE_RECUR_FUNCTION):
size = self.read_int()
dumped = self.f.read(size)
upvalues = self.read_obj()
obj = LuaFunction(size, dumped, upvalues)
self.objects[index] = obj
return obj
elif typeidx == TYPE_TORCH:
version = self.read_string(disable_utf8=True)
if version.startswith(b'V '):
version_number = int(float(version.partition(b' ')[2]))
class_name = self.read_string(disable_utf8=True)
else:
class_name = version
# created before existence of versioning
version_number = 0
if class_name in type_handlers:
# TODO: can custom readers ever be self-referential?
self.objects[index] = None # FIXME: if self-referential
obj = type_handlers[class_name](self, version)
self.objects[index] = obj
else:
# This must be performed in two steps to allow objects
# to be a property of themselves.
obj = TorchObject(
class_name, version_number=version_number)
self.objects[index] = obj
# After self.objects is populated, it's safe to read in
# case self-referential
obj._obj = self.read_obj()
return obj
else: # it is a table: returns a custom dict or a list
size = self.read_int()
# custom hashable dict, so that it can be a key, see above
obj = hashable_uniq_dict()
# For checking if keys are consecutive and positive ints;
# if so, returns a list with indices converted to 0-indices.
key_sum = 0
keys_natural = True
# bugfix: obj must be registered before reading keys and vals
self.objects[index] = obj
for _ in range(size):
k = self.read_obj()
v = self.read_obj()
obj[k] = v
if self.use_list_heuristic:
if not isinstance(k, int) or k <= 0:
keys_natural = False
elif isinstance(k, int):
key_sum += k
if self.use_list_heuristic:
# n(n+1)/2 = sum <=> consecutive and natural numbers
n = len(obj)
if keys_natural and n * (n + 1) == 2 * key_sum:
lst = []
for i in range(len(obj)):
elem = obj[i + 1]
# In case it is self-referential. This is not
# needed in lua torch since the tables are never
# modified as they are here.
if elem == obj:
elem = lst
lst.append(elem)
self.objects[index] = obj = lst
return obj
else:
raise T7ReaderException(
"unknown object type / typeidx: {}".format(typeidx))
def load(filename, **kwargs):
"""
Loads the given t7 file using default settings; kwargs are forwarded
to `T7Reader`.
"""
with open(filename, 'rb') as f:
reader = T7Reader(f, **kwargs)
return reader.read_obj()
| bsd-3-clause | 4,489,873,722,622,736,000 | 34.903302 | 79 | 0.588649 | false |
paulross/cpip | src/cpip/TokenCss.py | 1 | 8529 | #!/usr/bin/env python
# CPIP is a C/C++ Preprocessor implemented in Python.
# Copyright (C) 2008-2017 Paul Ross
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Paul Ross: [email protected]
"""CSS Support for ITU+TU files in HTML."""
__author__ = 'Paul Ross'
__date__ = '2011-07-10'
__rights__ = 'Copyright (c) 2008-2017 Paul Ross'
import os
from cpip import ExceptionCpip
from cpip.core import ItuToTokens
#from cpip.util import XmlWrite
import string
class ExceptionTokenCss(ExceptionCpip):
pass
#: Map of {token_type : enum_int, ...}
TT_ENUM_MAP = {}
#: Reverse map of {enum_int : token_type, ...}
ENUM_TT_MAP = {}
for __i, __tt in enumerate(ItuToTokens.ITU_TOKEN_TYPES):
__enum = string.ascii_lowercase[__i]
TT_ENUM_MAP[__tt] = __enum
ENUM_TT_MAP[__enum] = __tt
ITU_CSS_LIST = [
"""/* Conditionally compiled == %s. */
span.%s {
background-color: GreenYellow;
}""" % (True, True),
"""/* Conditionally compiled == %s. */
span.%s {
background-color: Salmon;
}""" % (False, False),
"""/* Conditionally compiled == %s. */
span.%s {
background-color: yellowgreen;
}""" % ('Maybe', 'Maybe'),
"""/* %s */
span.%s {
color: Chartreuse;
font-style: italic;
}""" % ('header-name', TT_ENUM_MAP['header-name']),
"""/* %s */
span.%s {
color: BlueViolet;
font-style: normal;
}""" % ('identifier', TT_ENUM_MAP['identifier']),
"""/* %s */
span.%s {
color: HotPink;
font-style: normal;
}""" % ('pp-number', TT_ENUM_MAP['pp-number']),
"""/* %s */
span.%s {
color: orange;
font-style: italic;
}""" % ('character-literal', TT_ENUM_MAP['character-literal']),
"""/* %s */
span.%s {
color: LimeGreen;
font-style: italic;
}""" % ('string-literal', TT_ENUM_MAP['string-literal']),
"""/* %s */
span.%s {
color: black;
font-weight: bold;
font-style: normal;
}""" % ('preprocessing-op-or-punc', TT_ENUM_MAP['preprocessing-op-or-punc']),
"""/* %s */
span.%s {
color: silver;
font-style: normal;
}""" % ('non-whitespace', TT_ENUM_MAP['non-whitespace']),
"""/* %s */
span.%s {
color: black;
font-style: normal;
}""" % ('whitespace', TT_ENUM_MAP['whitespace']),
"""/* %s */
span.%s {
color: black;
font-style: normal;
}""" % ('concat', TT_ENUM_MAP['concat']),
"""/* %s */
span.%s {
color: red;
font-style: normal;
}""" % ('trigraph', TT_ENUM_MAP['trigraph']),
"""/* %s */
span.%s {
color: sienna;
font-style: normal;
}""" % ('C comment', TT_ENUM_MAP['C comment']),
"""/* %s */
span.%s {
color: peru;
font-style: normal;
}""" % ('C++ comment', TT_ENUM_MAP['C++ comment']),
"""/* %s */
span.%s {
color: red;
font-style: normal;
}""" % ('keyword', TT_ENUM_MAP['keyword']),
"""/* %s */
span.%s {
color: blue;
font-style: normal;
}""" % ('preprocessing-directive', TT_ENUM_MAP['preprocessing-directive']),
"""/* %s */
span.%s {
color: red;
font-style: italic;
}""" % ('Unknown', TT_ENUM_MAP['Unknown']),
# Other non-enumerated styles
# HTML styling
"""body {
font-size: 12px;
font-family: arial,helvetica,sans-serif;
margin: 6px;
padding: 6px;
}""",
#===============================================================================
# """h1 {
# font-family: Sans-serif;
# font-size: 1.5em;
# color: silver;
# font-style: italic;
# }""",
#===============================================================================
"""h1 {
color: darkgoldenrod;
font-family: sans-serif;
font-size: 14pt;
font-weight: bold;
}""",
"""h2 {
color: IndianRed;
font-family: sans-serif;
font-size: 14pt;
font-weight: normal;
}""",
"""h3 {
color: Black;
font-family: sans-serif;
font-size: 12pt;
font-weight: bold;
}""",
"""h4 {
color: FireBrick;
font-family: sans-serif;
font-size: 10pt;
font-weight: bold;
}""",
# Specialised classes
# Line numbers
"""span.line {
color: slategrey;
/*font-style: italic; */
}""",
# File names
"""span.file {
color: black;
font-style: italic;
}""",
# Files in tables
"""table.filetable {
border: 2px solid black;
font-family: monospace;
color: black;
}""",
"""th.filetable, td.filetable {
/* border: 1px solid black; */
border: 1px;
border-top-style:solid;
border-right-style:dotted;
border-bottom-style:none;
border-left-style:none;
vertical-align:top;
padding: 2px 6px 2px 6px;
}""",
# Monospaced tables e.g. for token counts
"""table.monospace {
border: 2px solid black;
border-collapse: collapse;
font-family: monospace;
color: black;
}""",
"""th.monospace, td.monospace {
border: 1px solid black;
vertical-align: top;
padding: 2px 6px 2px 6px;
}""",
# Macro presentation
"""span.macro_s_f_r_f_name{
color: DarkSlateGray;
font-family: monospace;
font-weight: normal;
font-style: italic;
}""",
"""span.macro_s_t_r_f_name {
color: DarkSlateGray;
font-family: monospace;
font-weight: normal;
font-style: normal;
}""",
"""span.macro_s_f_r_t_name {
color: Red; /* OrangeRed; */
font-family: monospace;
font-weight: bold;
font-style: italic;
}""",
"""span.macro_s_t_r_t_name{
color: Red; /* OrangeRed; */
font-family: monospace;
font-weight: bold;
font-style: normal;
}""",
"""span.macro_s_f_r_f_repl{
color: SlateGray;
font-family: monospace;
font-weight: normal;
font-style: italic;
}""",
"""span.macro_s_t_r_f_repl {
color: SlateGray;
font-family: monospace;
font-weight: normal;
font-style: normal;
}""",
"""span.macro_s_f_r_t_repl {
color: RosyBrown; /* Orange; */
font-family: monospace;
font-weight: bold;
font-style: italic;
}""",
"""span.macro_s_t_r_t_repl{
color: RosyBrown; /* Orange; */
font-family: monospace;
font-weight: bold;
font-style: normal;
}""",
# File declarations in the macro pages
"""span.file_decl {
color: black;
font-family: monospace;
/* font-weight: bold;
font-style: italic; */
}""",
# Conditional preprocessing directives - True
"""span.CcgNodeTrue {
color: LimeGreen;
font-family: monospace;
/* font-weight: bold; */
/* font-style: italic; */
}""",
# Conditional preprocessing directives - False
"""span.CcgNodeFalse {
color: red;
font-family: monospace;
/* font-weight: bold; */
/* font-style: italic; */
}""",
]
TT_CSS_FILE = 'cpip.css'
TT_CSS_STRING = '\n'.join(ITU_CSS_LIST)
def writeCssToDir(theDir):
"""Writes the CSS file into to the directory.
:param theDir: Directory.
:type theDir: ``str``
:returns: ``NoneType``
"""
try:
if not os.path.exists(theDir):
os.makedirs(theDir)
open(os.path.join(theDir, TT_CSS_FILE), 'w').write(TT_CSS_STRING)
except IOError as err:
raise ExceptionTokenCss('writeCssToDir(): %s' % str(err)) from err
def writeCssForFile(theFile):
"""Writes the CSS file into to the directory that the file is in."""
return writeCssToDir(os.path.dirname(theFile))
def retClass(theTt):
"""
:param theTt: Token type
:type theTt: ``str``
:returns: ``str`` -- CSS class.
:raises: ``ExceptionTokenCss`` For unknown token type.
"""
try:
return TT_ENUM_MAP[theTt]
except KeyError:
raise ExceptionTokenCss('Unknown token type %s' % theTt)
| gpl-2.0 | -6,426,611,412,637,766,000 | 25.405573 | 80 | 0.557158 | false |
Connexions/cnx-archive | cnxarchive/tests/test_routes.py | 1 | 9632 | # -*- coding: utf-8 -*-
# ###
# Copyright (c) 2013, 2015 Rice University
# This software is subject to the provisions of the GNU Affero General
# Public License version 3 (AGPLv3).
# See LICENCE.txt for details.
# ###
import unittest
from pyramid.testing import DummyRequest
class RoutingTestCase(unittest.TestCase):
def test_route_setup(self):
from .testing import integration_test_settings
settings = integration_test_settings()
from .. import main
app = main({}, **settings)
tests = {
# controller name: (path, routing args)
'content': (
('/contents/abcd-1234.html', {
'ident_hash': 'abcd-1234',
'page_ident_hash': '',
'separator': '',
'ignore': '',
'ext': '.html',
}),
('/contents/abcd-1234/title.html', {
'ident_hash': 'abcd-1234',
'page_ident_hash': '',
'separator': '',
'ignore': '/title',
'ext': '.html',
}),
('/contents/[email protected]:[email protected]', {
'ident_hash': '[email protected]',
'page_ident_hash': 'efgh-5678@3',
'separator': ':',
'ignore': '',
'ext': '.html',
}),
('/contents/[email protected]:efgh-5678@3/ignore.html', {
'ident_hash': '[email protected]',
'page_ident_hash': 'efgh-5678@3',
'separator': ':',
'ignore': '/ignore',
'ext': '.html',
}),
('/contents/abcd-1234.json', {
'ident_hash': 'abcd-1234',
'page_ident_hash': '',
'separator': '',
'ignore': '',
'ext': '.json',
}),
('/contents/abcd-1234/title.json', {
'ident_hash': 'abcd-1234',
'page_ident_hash': '',
'separator': '',
'ignore': '/title',
'ext': '.json',
}),
('/contents/[email protected]:[email protected]', {
'ident_hash': '[email protected]',
'page_ident_hash': 'efgh-5678@3',
'separator': ':',
'ignore': '',
'ext': '.json',
}),
('/contents/[email protected]:efgh-5678@3/ignore.json', {
'ident_hash': '[email protected]',
'page_ident_hash': 'efgh-5678@3',
'separator': ':',
'ignore': '/ignore',
'ext': '.json',
}),
('/contents/abcd-1234', {
'ident_hash': 'abcd-1234',
'page_ident_hash': '',
'separator': '',
'ignore': '',
'ext': '',
}),
('/contents/abcd-1234/', {
'ident_hash': 'abcd-1234',
'page_ident_hash': '',
'separator': '',
'ignore': '/',
'ext': '',
}),
('/contents/abcd-1234/title', {
'ident_hash': 'abcd-1234',
'page_ident_hash': '',
'separator': '',
'ignore': '/title',
'ext': '',
}),
('/contents/abcd-1234/title/', {
'ident_hash': 'abcd-1234',
'page_ident_hash': '',
'separator': '',
'ignore': '/title/',
'ext': '',
}),
('/contents/abcd-1234:efgh-5678@3/ignore', {
'ident_hash': 'abcd-1234',
'page_ident_hash': 'efgh-5678@3',
'separator': ':',
'ignore': '/ignore',
'ext': '',
}),
('/contents/[email protected]:efgh-5678', {
'ident_hash': '[email protected]',
'page_ident_hash': 'efgh-5678',
'separator': ':',
'ignore': '',
'ext': '',
}),
('/contents/[email protected]:efgh-5678/', {
'ident_hash': '[email protected]',
'page_ident_hash': 'efgh-5678',
'separator': ':',
'ignore': '/',
'ext': '',
}),
('/contents/[email protected]:efgh-5678@3/ignore', {
'ident_hash': '[email protected]',
'page_ident_hash': 'efgh-5678@3',
'separator': ':',
'ignore': '/ignore',
'ext': '',
})
),
'resource': (
('/resources/abcd1234', {
'hash': 'abcd1234',
'ignore': '',
}),
('/resources/abcd1234/', {
'hash': 'abcd1234',
'ignore': '/',
}),
('/resources/abcd1234/picture.jpg', {
'hash': 'abcd1234',
'ignore': '/picture.jpg',
}),
),
'export': (
('/exports/abcd-1234.pdf', {
'ident_hash': 'abcd-1234',
'type': 'pdf',
'ignore': '',
}),
('/exports/abcd-1234.pdf/title.pdf', {
'ident_hash': 'abcd-1234',
'type': 'pdf',
'ignore': '/title.pdf',
}),
),
'extras': (
('/extras', {
'key': ''
}),
('/extras/featured', {
'key': '/featured'
}),
('/extras/messages', {
'key': '/messages'
}),
('/extras/licenses', {
'key': '/licenses'
}),
('/extras/subjects', {
'key': '/subjects'
}),
('/extras/languages', {
'key': '/languages'
}),
),
'content-extras': (
('/extras/abcd@1234', {
'ident_hash': 'abcd@1234',
'page_ident_hash': '',
'separator': ''
}),
),
'content-extras': (
('/extras/abcd@1234:efgh@5678', {
'ident_hash': 'abcd@1234',
'page_ident_hash': 'efgh@5678',
'separator': ':'
}),
),
'search': (
('/search', {}),
),
'sitemap': (
('/sitemap-1.xml', {
'from_id': '1',
}),
),
'sitemap-index': (
('/sitemap_index.xml', {}),
),
'legacy-redirect': (
('/content/m12345', {
'objid': 'm12345',
'ignore': '',
}),
('/content/m12345/', {
'objid': 'm12345',
'ignore': '/',
}),
),
'legacy-redirect-w-version': (
('/content/m12345/1.2', {
'objid': 'm12345',
'objver': '1.2',
'ignore': '',
'filename': '',
}),
('/content/m12345/1.2/', {
'objid': 'm12345',
'objver': '1.2',
'ignore': '/',
'filename': '',
}),
('/content/m12345/1.2/picture.jpg', {
'objid': 'm12345',
'objver': '1.2',
'ignore': '/',
'filename': 'picture.jpg',
}),
),
'legacy-redirect-latest': (
('/content/m12345/latest', {
'objid': 'm12345',
'ignore': '',
'filename': '',
}),
('/content/m12345/latest/', {
'objid': 'm12345',
'ignore': '/',
'filename': '',
}),
),
None: (
('/extras/', None),
('/contents', None),
('/contents/', None),
),
}
for controller_name, args in tests.items():
for path, routing_args in args:
req = DummyRequest(environ={'PATH_INFO': path})
routemap = app.routes_mapper(req)
route = routemap['route']
routename = getattr(route, 'name', None)
self.assertEqual(routename, controller_name)
self.assertEqual(routemap['match'], routing_args)
| agpl-3.0 | 6,305,812,099,231,620,000 | 35.484848 | 70 | 0.310735 | false |
stefanvanwouw/najnaf | src/worker/controller.py | 1 | 1720 | from shared.app import AppState, AppCommand
import socket, pickle, conf, time
class WorkerController(object):
def __init__(self, state_obj, consumer):
self._state_obj = state_obj
self._consumer = consumer
self._sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self._sock.bind((conf.CONTROLLER_BIND_IP, conf.CONTROLLER_PORT))
self._master_ip = None
def run(self):
while True:
data, addr = self._sock.recvfrom(1024) # Receive, 1024 bytes buffer.
cmd = pickle.loads(data) # Deserialize data into command object.
if not isinstance(cmd,AppCommand):
continue
else:
if cmd.type == AppCommand.INIT and (self._state_obj.state ==
AppState.READY or self._state_obj.state ==
AppState.ERROR or (self._state_obj.state ==
AppState.RUNNING and self._master_ip != cmd.data)):
print "received init"
self._state_obj.state = AppState.INIT
self._master_ip = cmd.data
try:
self._consumer.connect(self._master_ip, conf.APOLLO_QUEUE_PORT)
self._consumer.subscribe()
self._state_obj.state = AppState.RUNNING
except:
self._state_obj.state = AppState.ERROR
elif cmd.type == AppCommand.SHUTDOWN and self._state_obj.state == AppState.RUNNING:
self._state_obj.state = AppState.SHUTTING_DOWN
# unsubscribe and join threads
self._consumer.shut_down()
| gpl-3.0 | -4,000,206,089,064,693,000 | 43.102564 | 99 | 0.538372 | false |
lago-project/lago-ost-plugin | ovirtlago/server.py | 1 | 4700 | #
# Copyright 2014-2017 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Refer to the README and COPYING files for full details of the license
#
import contextlib
import errno
import logging
import os
import threading
from SimpleHTTPServer import SimpleHTTPRequestHandler
from SocketServer import ThreadingTCPServer
import sys
import traceback
LOGGER = logging.getLogger(__name__)
class LagoThreadingTCPServer(ThreadingTCPServer):
""" A custom multi-threaded TCP server.
We use `allow_reuse_address` in order to avoid a race when opening and
closing multiple servers (at each point in time only one server is
listening).
For example, the first server has a connection in 'time_wait' state,
while the second server tries to bind its socket.
Attributes:
_allowed_exceptions(tuple of Exceptions): If an exception occurs
and its type isn't not in `_allowed_exceptions`, its traceback
will be printed to the log.
_allowed_errnos(tuple of ints): If an OSError exception occurs
and its errno isn't not in `_allowed_errnos`, its traceback
will be printed to the log.
"""
allow_reuse_address = True
def __init__(
self,
server_address,
RequestHandlerClass,
allowed_exceptions=(),
allowed_errnos=(errno.EPIPE, ),
):
# We can't use super since the superclass isn't a new style class
ThreadingTCPServer.__init__(self, server_address, RequestHandlerClass)
self._allowed_exceptions = allowed_exceptions
self._allowed_errnos = allowed_errnos
def handle_error(self, request, client_address):
""" Handle an error gracefully
Overrides the default implementation which prints
the error to stdout and stderr
"""
_, value, _ = sys.exc_info()
ignore_err_conditions = [
hasattr(value, 'errno') and value.errno in self._allowed_errnos,
isinstance(value, self._allowed_exceptions),
]
if any(ignore_err_conditions):
return
LOGGER.debug(traceback.format_exc())
def generate_request_handler(root_dir):
"""
Factory for _BetterHTTPRequestHandler classes
Args:
root_dir (path): Path to the dir to serve
Returns:
_BetterHTTPRequestHandler: A ready to be used improved http request
handler
"""
class _BetterHTTPRequestHandler(SimpleHTTPRequestHandler):
__root_dir = root_dir
_len_cwd = len(os.getcwd())
def translate_path(self, path):
return os.path.join(
self.__root_dir,
SimpleHTTPRequestHandler.translate_path(
self, path
)[self._len_cwd:].lstrip('/')
)
def log_message(self, *args, **kwargs):
pass
return _BetterHTTPRequestHandler
def _create_http_server(listen_ip, listen_port, root_dir):
"""
Starts an http server with an improved request handler
Args:
listen_ip (str): Ip to listen on
port (int): Port to register on
root_dir (str): path to the directory to serve
Returns:
BaseHTTPServer: instance of the http server, already running on a
thread
"""
server = LagoThreadingTCPServer(
(listen_ip, listen_port),
generate_request_handler(root_dir),
)
threading.Thread(target=server.serve_forever).start()
return server
@contextlib.contextmanager
def repo_server_context(gw_ip, port, root_dir):
"""
Context manager that starts a generic http server that serves `root_dir`,
and listens on `gw_ip`:`port`.
Args:
gw_ip(str): IP to listen on
port(int): Port to listen on
root_dir(str): The root directory that will be served.
"""
server = _create_http_server(
listen_ip=gw_ip,
listen_port=port,
root_dir=root_dir,
)
try:
yield
finally:
server.shutdown()
| gpl-2.0 | -6,331,778,533,540,957,000 | 29.718954 | 79 | 0.65383 | false |
ajdiaz/mico | mico/util/switch.py | 1 | 1215 | #! /usr/bin/env python
# -*- encoding: utf-8 -*-
# vim:fenc=utf-8:
from __builtin__ import env
"""This module provide a switcher, which when is accesses toggle an internal
status.
"""
class Switcher(object):
"""Switch a global status using the environment as critical region, and
setted using class constructor.
"""
_switch = (None, None)
def __init__(self, init_value=None):
self._old_value = env.get(self._switch[0], None)
env[self._switch[0]] = self._switch[1] \
if init_value is None \
else init_value
def __enter__(self):
pass
def __exit__(self, t, v, tr):
if self._old_value is None:
del env[self._switch[0]]
else:
env[self._switch[0]] = self._old_value
@staticmethod
def getValue(key):
"""Class method to get the value of an specified switcher using key.
"""
return env.get(key, None)
@classmethod
def from_key(cls, key, value):
"""Class method to create a new switcher using key and value.
"""
return type.__new__(type,"switch_%s" % key, (Switcher,),{ "_switch": (key, value) })
| gpl-2.0 | -290,881,152,891,639,100 | 26.613636 | 92 | 0.558025 | false |
sailthru/stolos | stolos/examples/valid_if_or_example.py | 1 | 1515 | import datetime
def func(app_name, **parsed_job_id):
"""
This example function evaluates the identifiers in a job_id to decide
whether a particular job should be executed or not. To use it, your task
configuration must define a "valid_if_or" section that points to this
function.
This functionality is useful when you don't wish to create a new identifier
for a particular job_id or you wish to have Stolos mark specific
job_ids as "skipped"
PARAMS:
app_name - the task name. We provide this option so this func can be
generalized to more than one scheduled app
**parsed_job_id - specifies the identifiers that make up the job_id.
You could also just decide to explicitly define keyword args like:
def func(app_name, date, client_id, collection_name)
In this particular example, this function will not let Stolos queue
any job_ids except those job_ids where client_id is 1111 where the given
date is a Wednesday or Sunday. All other job_ids this function receives
will not be queued by Stolos, and will instead be marked as
"skipped"
"""
c1 = parsed_job_id['client_id'] == 1111
_day_of_week = datetime.datetime.strptime(
str(parsed_job_id['date']), '%Y%m%d').strftime("%A")
c2 = _day_of_week in ['Friday', 'Sunday']
from stolos.examples import log
log.critical('%s %s %s %s' % (app_name, parsed_job_id, c1, c2))
if c1 and c2:
return True
else:
return False
| apache-2.0 | 8,128,489,602,805,298,000 | 37.846154 | 79 | 0.676568 | false |
PaddlePaddle/models | PaddleCV/tracking/ltr/train_settings/siamrpn/siamrpn_res50.py | 1 | 5570 | import paddle.fluid as fluid
import paddle.fluid.dygraph as dygraph
import ltr.actors as actors
import ltr.data.transforms as dltransforms
from ltr.data import processing, sampler, loader
from ltr.dataset import ImagenetVID, ImagenetDET, MSCOCOSeq, YoutubeVOS
from ltr.models.siam.siam import SiamRPN_ResNet50
from ltr.models.loss import select_softmax_with_cross_entropy_loss, weight_l1_loss
from ltr.trainers import LTRTrainer
import numpy as np
import cv2 as cv
from PIL import Image, ImageEnhance
def run(settings):
# Most common settings are assigned in the settings struct
settings.description = 'SiamRPN with ResNet-50 backbone.'
settings.print_interval = 100 # How often to print loss and other info
settings.batch_size = 32 # Batch size
settings.num_workers = 4 # Number of workers for image loading
settings.search_area_factor = {'train': 1.0, 'test': 255./127.}
settings.output_sz = {'train': 127, 'test': 255}
settings.scale_type = 'context'
settings.border_type = 'meanpad'
# Settings for the image sample and label generation
settings.center_jitter_factor = {'train': 0.1, 'test': 1.5}
settings.scale_jitter_factor = {'train': 0.05, 'test': 0.18}
settings.label_params = {
'search_size': 255,
'output_size': 25,
'anchor_stride': 8,
'anchor_ratios': [0.33, 0.5, 1, 2, 3],
'anchor_scales': [8],
'num_pos': 16,
'num_neg': 16,
'num_total': 64,
'thr_high': 0.6,
'thr_low': 0.3
}
settings.loss_weights = {'cls': 1., 'loc': 1.2}
settings.neg = 0.2
# Train datasets
vos_train = YoutubeVOS()
vid_train = ImagenetVID()
coco_train = MSCOCOSeq()
det_train = ImagenetDET()
# Validation datasets
#vid_val = ImagenetVID()
vid_val = coco_train
# The joint augmentation transform, that is applied to the pairs jointly
transform_joint = dltransforms.ToGrayscale(probability=0.25)
# The augmentation transform applied to the training set (individually to each image in the pair)
transform_exemplar = dltransforms.Transpose()
transform_instance = dltransforms.Transpose()
# Data processing to do on the training pairs
data_processing_train = processing.SiamProcessing(
search_area_factor=settings.search_area_factor,
output_sz=settings.output_sz,
center_jitter_factor=settings.center_jitter_factor,
scale_jitter_factor=settings.scale_jitter_factor,
scale_type=settings.scale_type,
border_type=settings.border_type,
mode='sequence',
label_params=settings.label_params,
train_transform=transform_exemplar,
test_transform=transform_instance,
joint_transform=transform_joint)
# Data processing to do on the validation pairs
data_processing_val = processing.SiamProcessing(
search_area_factor=settings.search_area_factor,
output_sz=settings.output_sz,
center_jitter_factor=settings.center_jitter_factor,
scale_jitter_factor=settings.scale_jitter_factor,
scale_type=settings.scale_type,
border_type=settings.border_type,
mode='sequence',
label_params=settings.label_params,
transform=transform_exemplar,
joint_transform=transform_joint)
# The sampler for training
dataset_train = sampler.MaskSampler(
[vid_train, coco_train, det_train, vos_train],
[2, 1 ,1, 2],
samples_per_epoch=5000 * settings.batch_size,
max_gap=100,
processing=data_processing_train,
neg=settings.neg)
# The loader for training
train_loader = loader.LTRLoader(
'train',
dataset_train,
training=True,
batch_size=settings.batch_size,
num_workers=settings.num_workers,
stack_dim=0)
# The sampler for validation
dataset_val = sampler.MaskSampler(
[vid_val],
[1, ],
samples_per_epoch=100 * settings.batch_size,
max_gap=100,
processing=data_processing_val)
# The loader for validation
val_loader = loader.LTRLoader(
'val',
dataset_val,
training=False,
batch_size=settings.batch_size,
num_workers=settings.num_workers,
stack_dim=0)
# creat network, set objective, creat optimizer, learning rate scheduler, trainer
with dygraph.guard():
# Create network
def scale_loss(loss):
total_loss = 0
for k in settings.loss_weights:
total_loss += loss[k] * settings.loss_weights[k]
return total_loss
net = SiamRPN_ResNet50(scale_loss=scale_loss)
# Define objective
objective = {
'cls': select_softmax_with_cross_entropy_loss,
'loc': weight_l1_loss,
}
# Create actor, which wraps network and objective
actor = actors.SiamActor(net=net, objective=objective)
# Set to training mode
actor.train()
# Define optimizer and learning rate
lr_scheduler = fluid.layers.exponential_decay(
learning_rate=0.005,
decay_steps=5000,
decay_rate=0.9659,
staircase=True)
optimizer = fluid.optimizer.Adam(
parameter_list=net.rpn_head.parameters() + net.neck.parameters(),
learning_rate=lr_scheduler)
trainer = LTRTrainer(actor, [train_loader, val_loader], optimizer, settings, lr_scheduler)
trainer.train(50, load_latest=False, fail_safe=False)
| apache-2.0 | -2,883,852,501,765,936,000 | 34.031447 | 101 | 0.646499 | false |
cerebis/meta-sweeper | bin/metaART.py | 1 | 8681 | #!/usr/bin/env python
"""
meta-sweeper - for performing parametric sweeps of simulated
metagenomic sequencing experiments.
Copyright (C) 2016 "Matthew Z DeMaere"
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published
by the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import argparse
import atexit
import os
import subprocess
import sys
import numpy as np
from Bio import SeqIO
import abundance
import io_utils
TMP_INPUT = 'seq.tmp'
TMP_OUTPUT = 'reads.tmp'
# low-high seeds, giving 5M values
LOW_SEED_VALUE = 1000000
HIGH_SEED_VALUE = 6000000
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Simulate a metagenomic data set from an abundance profile')
parser.add_argument('-C', '--compress', choices=['gzip', 'bzip2'], default=None, help='Compress output files')
parser.add_argument('-n', '--output-name', metavar='PATH', help='Output file base name', required=True)
parser.add_argument('-P', '--profile', dest='profile', required=False,
help='Community abundance profile', metavar='FILE')
parser.add_argument('-M', '--max-coverage', metavar='INT', type=int, required=True,
help='Coverage of must abundant taxon')
parser.add_argument('-S', '--seed', metavar='INT', type=int, required=True, help='Random seed')
parser.add_argument('-l', '--read-len', metavar='INT', type=int, required=True, help='Read length')
parser.add_argument('-m', '--insert-len', metavar='INT', type=int, required=True, help='Insert length')
parser.add_argument('-s', '--insert-sd', metavar='INT', type=int, required=True, help='Insert standard deviation')
parser.add_argument('--art-path', default='art_illumina', help='Path to ART executable [default: art_illumina]')
parser.add_argument('--log', default='metaART.log', type=argparse.FileType('w'), help='Log file name')
parser.add_argument('--coverage-out', metavar='FILE', default='coverage.tsv',
help='Output file for simulated genome coverage table', required=False)
parser.add_argument('-z', '--num-samples', metavar='INT', type=int, default=1, required=True,
help='Number of transect samples')
parser.add_argument('--dist', metavar='DISTNAME', choices=['equal', 'uniform', 'lognormal'],
help='Abundance profile distribution [equal, uniform, lognormal]')
parser.add_argument('--lognorm-mu', metavar='FLOAT', type=float, default=1.0, required=False,
help='Log-normal relative abundance mu parameter')
parser.add_argument('--lognorm-sigma', metavar='FLOAT', type=float, default=1.0, required=False,
help='Log-normal relative abundance sigma parameter')
parser.add_argument('fasta', metavar='MULTIFASTA',
help='Input multi-fasta of all sequences')
parser.add_argument('output_dir', metavar='DIR',
help='Output directory')
args = parser.parse_args()
@atexit.register
def close_cov():
coverage_file.close()
seq_index = SeqIO.index(args.fasta, 'fasta')
base_name = os.path.join(args.output_dir, args.output_name)
r1_tmp = os.path.join(args.output_dir, '{0}1.fq'.format(TMP_OUTPUT))
r2_tmp = os.path.join(args.output_dir, '{0}2.fq'.format(TMP_OUTPUT))
seq_tmp = os.path.join(args.output_dir, TMP_INPUT)
coverage_file = open(os.path.join(args.output_dir, args.coverage_out), 'w')
RANDOM_STATE = np.random.RandomState(args.seed)
child_seeds = RANDOM_STATE.randint(LOW_SEED_VALUE, HIGH_SEED_VALUE, args.num_samples).tolist()
profile_seeds = RANDOM_STATE.randint(LOW_SEED_VALUE, HIGH_SEED_VALUE, args.num_samples).tolist()
if args.profile:
# if specified, read the static profile table from disk rather than calculate at runtime.
# this will meant the same abundance profile is used in each sample -- in multisample mode.
profile = abundance.read_profile(args.profile)
# generate N simulated communities
for n in xrange(0, args.num_samples):
# generate abundance profile from global seeded random state -- if not using a static table
if not args.profile:
seq_names = [si for si in seq_index]
profile = abundance.generate_profile(profile_seeds[n], seq_names, mode=args.dist,
lognorm_mu=args.lognorm_mu, lognorm_sigma=args.lognorm_sigma)
for i, chr_abn in enumerate(profile.values(), start=1):
coverage_file.write('{0}\t{1}\t{2}\t{3}\t{4}\n'.format(
n + 1, i, chr_abn.name, chr_abn.cell,
chr_abn.effective_abundance() * args.max_coverage))
print 'Sample {0} Relative Abundances:'.format(n)
profile.write_table(sys.stdout)
if args.num_samples > 1:
r1_final = '{0}.{1}.r1.fq'.format(base_name, n+1)
r2_final = '{0}.{1}.r2.fq'.format(base_name, n+1)
else:
r1_final = '{0}.r1.fq'.format(base_name)
r2_final = '{0}.r2.fq'.format(base_name)
r1_tmp = os.path.join(args.output_dir, '{0}1.fq'.format(TMP_OUTPUT, n+1))
r2_tmp = os.path.join(args.output_dir, '{0}2.fq'.format(TMP_OUTPUT, n+1))
output_R1 = io_utils.open_output(r1_final, mode='w', compress=args.compress)
output_R2 = io_utils.open_output(r2_final, mode='w', compress=args.compress)
try:
# iteratively call ART for each chromosome in profile, accumulate the results
for chr_abn in profile:
coverage = chr_abn.effective_abundance() * args.max_coverage
print '\tRequesting {0:.4f} coverage for {1}'.format(coverage, chr_abn.name)
# iteration target for ART
try:
ref_seq = seq_index[chr_abn.name]
ref_len = len(ref_seq)
SeqIO.write([ref_seq], seq_tmp, 'fasta')
subprocess.check_call([args.art_path,
'-p', # paired-end sequencing
'-na', # no alignment file
'-rs', str(child_seeds[n]),
'-m', str(args.insert_len),
'-s', str(args.insert_sd),
'-l', str(args.read_len),
'-f', str(coverage),
'-i', seq_tmp,
'-o', os.path.join(args.output_dir, TMP_OUTPUT)],
stdout=args.log, stderr=args.log)
except OSError as e:
print "There was an error executing \"art_illumina\"."
print "Check that it is either on your PATH or specify it at runtime."
raise e
except subprocess.CalledProcessError as e:
print e
raise e
# count generated reads
r1_n = 0
for seq in SeqIO.parse(r1_tmp, 'fastq'):
r1_n += 1
r2_n = 0
for seq in SeqIO.parse(r2_tmp, 'fastq'):
r2_n += 1
assert r1_n == r2_n, 'Error: failed to generate an equal number of fwd and rev reads'
effective_cov = args.read_len * (r1_n + r2_n) / float(ref_len)
print '\tGenerated {0} pairs for {1}, {2:.3f} coverage'.format(r1_n, chr_abn.name, effective_cov)
if r1_n != r2_n:
print 'Error: paired-end counts do not match {0} vs {1}'.format(r1_n, r2_n)
sys.exit(1)
io_utils.multicopy_tostream(r1_tmp, output_R1)
io_utils.multicopy_tostream(r2_tmp, output_R2)
os.remove(r1_tmp)
os.remove(r2_tmp)
os.remove(seq_tmp)
except Exception as e:
print e
print 'Warning!! -- non-zero exit'
sys.exit(1)
| gpl-3.0 | 4,648,134,745,466,227,000 | 45.672043 | 118 | 0.58173 | false |
johncfaver/iddqd | cgi-bin/molecule.py | 1 | 4991 | #!/usr/bin/env python
from atom import atom
from sys import exit
from chem import sym2mass
class molecule:
def __init__(self,filename,filetype=None):
self.filename=filename
if filetype == None:
if filename.lower()[-3:]=='pdb':
self.filetype='pdb'
elif filename.lower()[-3:]=='xyz':
self.filetype='xyz'
elif filename.lower()[-3:]=='mol':
self.filetype='mol'
if self.filetype == 'xyz':
self.charge=0
self.molweight=0.
self.atoms=self.readXYZfile(filename)
self.natoms=len(self.atoms)
if self.filetype == 'pdb':
self.charge=0
self.molweight=0.
self.atoms,self.firstatominres,self.resnames=self.readPDBfile(filename)
self.nres=len(self.resnames)
self.natoms=len(self.atoms)
self.calcCharge()
if self.filetype == 'mol':
self.charge=0
self.molweight=0
self.atoms=self.readMOLfile(filename)
self.natoms=len(self.atoms)
def readPDBfile(self,filename):
try:
f=open(filename)
except Exception:
print 'ERROR LOADING ',filename
exit()
atomlist=[]
firstatominres=[]
res='1'
firstatominres.append(1)
resnames=[]
for line in f.readlines():
if line.split()[0].strip()=='ATOM':
atomlist.append(atom(line.split()[2][0],line.split()[5],line.split()[6],line.split()[7],line.split()[2]))
if len(resnames)==0:
resnames.append(line.split()[3])
if line.split()[4] != res:
firstatominres.append(len(atomlist))
resnames.append(line.split()[3])
res=line.split()[4]
return (atomlist,firstatominres,resnames)
def readXYZfile(self,filename):
try:
f=open(filename)
except Exception:
print 'ERROR LOADING ',filename
return 1
natoms=int(f.readline().strip())
try:
line=f.readline().strip()
if len(line.split())==1:
self.charge=int(line)
elif len(line.split())==2:
self.charge=int(line.split()[1])
except Exception:
print line.split(),filename
print 'ERROR reading XYZ file. Please put the charge on line 2.'
exit()
fl=f.readlines()
f.close()
atomlist=[]
for i in range(natoms):
try:
atomlist.append(atom(fl[i].split()[0],fl[i].split()[1],fl[i].split()[2],fl[i].split()[3]))
self.molweight+=sym2mass(atomlist[-1].atsym.upper())
except Exception:
print 'ERROR reading XYZ file. Check line', str(fl.index(i)+3),' of ',filename,'.'
break
return atomlist
def readMOLfile(self,filename):
try:
f=open(filename)
except Exception:
print 'ERROR LOADING ',filename
return 1
for i in xrange(3):
f.readline()
natoms=int(f.readline().split()[0])
atomlist=[]
for i in xrange(natoms):
try:
line=f.readline()
atomlist.append(atom(line.split()[3],line.split()[0],line.split()[1],line.split()[2]))
self.molweight+=sym2mass[atomlist[-1].atsym.upper()]
except Exception:
print 'ERROR Reading MOL file at line:', line.split()
break
f.close()
return atomlist
def calcCharge(self):
if self.filetype != 'pdb':
return 0
for i in self.resnames:
if i in ['ASP','GLU']:
self.charge-=1
if i in ['LYS','ARG','HIS']:
self.charge+=1
def writeXYZfile(self,filename):
f=open(filename,'w')
f.write(str(len(self.atoms))+' \n')
f.write('comment \n')
for i in self.atoms:
f.write(i.atsym+' '+str(i.x)+' '+str(i.y)+' '+str(i.z)+' \n')
f.close()
def printInfo(self):
print self.filename,self.natoms,' atoms',self.charge,' charge'
for k in self.atoms:
k.printInfo()
def formula(self):
symbols=[]
counts=[]
for i in self.atoms:
if i.atsym in symbols:
counts[symbols.index(i.atsym)]+=1
else:
symbols.append(i.atsym)
counts.append(1)
order=['C','H','BR','CL','F','I','N','O','P','S']
fstr=''
for i in order:
if i in symbols:
j=symbols.index(i)
fstr+=symbols.pop(j)+str(counts.pop(j))
for i,j in enumerate(symbols):
fstr+=j+str(counts[i])
return fstr
| gpl-3.0 | 7,313,591,759,236,521,000 | 33.184932 | 121 | 0.495091 | false |
yakumaa/mopidy-soundcloud | tests/test_api.py | 1 | 4628 | from __future__ import unicode_literals
import unittest
import mock
import vcr
from mopidy.models import Track
from mopidy_soundcloud import SoundCloudExtension
from mopidy_soundcloud.soundcloud import SoundCloudClient, readable_url
class ApiTest(unittest.TestCase):
@vcr.use_cassette('tests/fixtures/sc-login.yaml')
def setUp(self):
config = SoundCloudExtension().get_config_schema()
config['auth_token'] = '1-35204-61921957-55796ebef403996'
config['explore_songs'] = 10
# using this user http://maildrop.cc/inbox/mopidytestuser
self.api = SoundCloudClient(config)
def test_resolves_string(self):
_id = self.api.parse_track_uri('soundcloud:song.38720262')
self.assertEquals(_id, '38720262')
@vcr.use_cassette('tests/fixtures/sc-login-error.yaml')
def test_responds_with_error(self):
with mock.patch('mopidy_soundcloud.soundcloud.logger.error') as d:
config = SoundCloudExtension().get_config_schema()
config['auth_token'] = '1-fake-token'
SoundCloudClient(config)
d.assert_called_once_with('Invalid "auth_token" used for '
'SoundCloud authentication!')
@vcr.use_cassette('tests/fixtures/sc-resolve-track.yaml')
def test_resolves_object(self):
trackc = {}
trackc[b'uri'] = 'soundcloud:song.38720262'
track = Track(**trackc)
id = self.api.parse_track_uri(track)
self.assertEquals(id, '38720262')
@vcr.use_cassette('tests/fixtures/sc-resolve-track-none.yaml')
def test_resolves_emptyTrack(self):
track = self.api.get_track('s38720262')
self.assertIsInstance(track, Track)
self.assertEquals(track.uri, None)
@vcr.use_cassette('tests/fixtures/sc-resolve-track.yaml')
def test_resolves_Track(self):
track = self.api.get_track('38720262')
self.assertIsInstance(track, Track)
self.assertEquals(
track.uri,
'soundcloud:song/Burial Four Tet - Nova.38720262'
)
@vcr.use_cassette('tests/fixtures/sc-resolve-http.yaml')
def test_resolves_http_url(self):
track = self.api.resolve_url(
'https://soundcloud.com/bbc-radio-4/m-w-cloud'
)[0]
self.assertIsInstance(track, Track)
self.assertEquals(
track.uri,
'soundcloud:song/That Mitchell and Webb Sound The Cloud.122889665'
)
@vcr.use_cassette('tests/fixtures/sc-liked.yaml')
def test_get_user_liked(self):
tracks = self.api.get_user_liked()
self.assertIsInstance(tracks, list)
@vcr.use_cassette('tests/fixtures/sc-stream.yaml')
def test_get_user_stream(self):
tracks = self.api.get_user_stream()
self.assertIsInstance(tracks, list)
@vcr.use_cassette('tests/fixtures/sc-explore.yaml')
def test_get_explore(self):
tracks = self.api.get_explore()
self.assertIsInstance(tracks, list)
self.assertEquals(tracks[0], 'Popular+Music')
@vcr.use_cassette('tests/fixtures/sc-popular.yaml')
def test_get_explore_popular_music(self):
tracks = self.api.get_explore('1')
self.assertIsInstance(tracks, list)
self.assertIsInstance(tracks[0], Track)
@vcr.use_cassette('tests/fixtures/sc-following.yaml')
def test_get_followings(self):
tracks = self.api.get_followings()
self.assertIsInstance(tracks, list)
@vcr.use_cassette('tests/fixtures/sc-sets.yaml')
def test_get_sets(self):
tracks = self.api.get_sets()
self.assertIsInstance(tracks, list)
@vcr.use_cassette('tests/fixtures/sc-groups.yaml')
def test_get_groups(self):
tracks = self.api.get_groups()
self.assertIsInstance(tracks, list)
@vcr.use_cassette('tests/fixtures/sc-tracks.yaml')
def test_get_group_tracks(self):
tracks = self.api.get_groups(136)
self.assertIsInstance(tracks[0], Track)
def test_readeble_url(self):
self.assertEquals('Barsuk Records',
readable_url('"@"Barsuk Records'))
self.assertEquals('_Barsuk Records',
readable_url('_Barsuk \'Records\''))
@vcr.use_cassette('tests/fixtures/sc-resolve-track-id.yaml')
def test_resolves_stream_Track(self):
track = self.api.get_track('38720262', True)
self.assertIsInstance(track, Track)
self.assertEquals(
track.uri,
'https://api.soundcloud.com/tracks/'
'38720262/stream?client_id=93e33e327fd8a9b77becd179652272e2'
)
| mit | 2,706,715,131,386,229,000 | 35.730159 | 78 | 0.642178 | false |
Risto-Stevcev/django-openworld | openworld/flatten_taxonomy.py | 1 | 1749 | """
Flattens the OpenTaxonomy for tagging purposes
Currently parses v2.0
Source for the taxonomy:
http://openeligibility.org
"""
__author__ = 'Risto Stevcev'
import argparse
import xml.etree.ElementTree as ET
import json
class Flattener(object):
def __init__(self, input_file, output_file):
self.input_file = input_file
self.output_file = output_file
self.services = set()
self.situations = set()
self.tags = {}
def run(self):
tree = ET.parse(self.input_file)
root = tree.getroot()
self.flatten(self.services, root.find('services'))
self.flatten(self.situations, root.find('situations'))
self.tags = {"services": sorted(list(self.services)), "situations": sorted(list(self.situations))}
json.dump(self.tags, self.output_file)
def flatten(self, tags, node):
if not node:
return
for item in node:
title = item.attrib.get('title')
if title:
tags.add(title.lower())
self.flatten(tags, item)
def main():
argparser = argparse.ArgumentParser(description='OpenTaxonomy Flattener - by %s.' % __author__)
argparser.add_argument('-i', '-input-file', type=argparse.FileType('r'), required=True,
help='taxonomy file (xml)')
argparser.add_argument('-o', '-output-file', type=argparse.FileType('w'), required=True,
help='output file (json)')
args = argparser.parse_args()
flattener = Flattener(args.i, args.o)
flattener.run()
print("Complete: the taxonomy file '%s' has been flattened into '%s'." % (args.i.name, args.o.name))
args.i.close()
args.o.close()
if __name__ == "__main__":
main()
| bsd-3-clause | 6,089,947,045,113,044,000 | 28.15 | 106 | 0.608348 | false |
spaceexperiment/forum-app | app/api/views/main.py | 1 | 1613 | from flask import request, session, g, redirect, url_for, abort
from flask.views import MethodView
from . import api
from ..decorators import api_render
@api.route('/')
@api_render
def index():
return {'welcome': 'home'}
class BaseMethodView(MethodView):
decorators = [api_render]
model = None
def get_or_404(self, id):
obj = self.model.get(id)
if not obj:
abort(404)
return obj
def is_admin(self):
if hasattr(g, 'user'):
if g.user.is_admin == 'True':
return True
return abort(401)
def is_authenticated(self):
if hasattr(g, 'user'):
return True
return abort(401)
def error(self, message, code):
return {'message': message}, code
def bad_request(self, message):
return {'message': message}, 400
def missing_data(self, data):
"""
return None if data in request.json else return 400 with
missing data in message
param data: a list of strings of requered fields
"""
missing_fields = []
for key in data:
if not key in request.json:
missing_fields.append(key)
if missing_fields:
message = 'Missing ' + ', '.join(missing_fields)
return self.bad_request(message)
return None
def clean_data(self, fields):
data = {}
# stip away any key not in fields
for key in request.json:
if key in fields:
data[key] = request.json[key]
return data | mit | 5,932,561,269,379,271,000 | 25.032258 | 64 | 0.554867 | false |
aminotti/yameo | app/defaults_routes.py | 1 | 3243 | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014-2015 Anthony Minotti <[email protected]>.
#
#
# This file is part of Yameo framework.
#
# Yameo framework is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Yameo framework is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Yameo framework. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import os
import io
import os
import shutil
from werkzeug import secure_filename
from flask import current_app, send_file, request, Response
from .controller import Controller
from .context import models
from .module import SmartManagement
from lib.exceptions import *
from lib.orm.binary import Binary
from .config import conf
ctl = Controller()
# TODO gerer les permissions sur toutes les routes
@ctl.route('/binaries/<ressource>/<path:ids>/<filename>', methods=['GET', 'PUT', 'PATCH'])
def getBinariesFiles(ressource, ids, filename):
ids = ids.split('/')
attribute, extension = os.path.splitext(secure_filename(filename))
ressource = ressource.capitalize()
res = models.get(ressource)
if not res:
raise Core404Exception("Ressource '{}' not found.".format(ressource))
res = res.get(*ids)
if not res:
raise Core404Exception("Ressource with ids '{}' not found.".format(ids))
if request.method == 'GET':
field = getattr(res, attribute.lower(), None)
if not field:
raise Core404Exception("'{}' of ressource '{}' not found.".format(attribute, ressource))
if field.extension != extension[1:]:
raise Core404Exception("File {}{} not found.".format(attribute, extension))
return send_file(field.stream, field.mimetype)
else:
if attribute.lower() not in res._columns:
raise Core400Exception("Bad binary attribute : '{}'".format(attribute))
binary = Binary(ressource.lower(), attribute, request.headers['Content-Type'], extension[1:], io.BytesIO(request.data))
setattr(res, attribute.lower(), binary)
res.write()
r = Response(None)
del r.headers['content-type']
r.status_code = 204
return r
@ctl.route('/')
def yameo_hello():
return "Hello {}!".format(current_app.tenant)
"""
@ctl.route('/yameo/install/<module>/')
def yameo_install(module):
SmartManagement.install(module, current_app)
return "Install ok!!"
"""
# TESTS :
@ctl.route('/yameo/booking/<int:code>/')
def yameo_test(code):
book = models.Booking.get(code)
if book:
return book.name
# book.unlink()
else:
return "Booking {} pas trouvé, devrais ernvoyer un 404!".format(code)
| agpl-3.0 | -7,180,037,846,009,860,000 | 30.173077 | 127 | 0.649291 | false |
MarkusHackspacher/unknown-horizons | development/extract_strings_from_sqlite.py | 1 | 3600 | #!/usr/bin/env python3
# ###################################################
# Copyright (C) 2008-2017 The Unknown Horizons Team
# [email protected]
# This file is part of Unknown Horizons.
#
# Unknown Horizons is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the
# Free Software Foundation, Inc.,
# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
# ###################################################
###############################################################################
#
# == I18N DEV USE CASES: CHEATSHEET ==
#
# ** Refer to development/create_pot.sh for help with building or updating
# the translation files for Unknown Horizons.
#
###############################################################################
#
# THIS SCRIPT IS A HELPER SCRIPT. DO NOT INVOKE MANUALLY!
#
###############################################################################
from __future__ import print_function
import os
import sqlalchemy
import sqlalchemy.orm
import sqlalchemy.ext.declarative
import sqlite3
import sys
import tempfile
from collections import defaultdict
sys.path.append(".")
sys.path.append("./horizons")
from horizons.constants import PATHS
# sqlalchemy doesn't support importing sql files,
# therefore we work around this by using sqlite3
filename = tempfile.mkstemp(text=True)[1]
conn = sqlite3.connect(filename)
for db_file in PATHS.DB_FILES:
conn.executescript(open(db_file, "r").read())
conn.commit()
engine = sqlalchemy.create_engine('sqlite:///' + filename) # must be 4 slashes total, sqlalchemy breaks the unixoid conventions here
Session = sqlalchemy.orm.sessionmaker(bind=engine)
db_session = Session()
Base = sqlalchemy.ext.declarative.declarative_base()
#
# Classes
#
class Message(Base):
__tablename__ = 'message_text'
text = sqlalchemy.Column(sqlalchemy.String, primary_key=True)
class Resource(Base):
__tablename__ = 'resource'
name = sqlalchemy.Column(sqlalchemy.String, primary_key=True)
class Tier(Base):
__tablename__ = 'tier'
name = sqlalchemy.Column(sqlalchemy.String, primary_key=True)
#
# print it
#
class MSGID_collect:
msgids = defaultdict(list)
def __init__(self):
pass
def add_to_collection(self, msgid, place):
self.msgids[msgid].append(place)
def __str__(self):
s = []
for text, locations in self.msgids.items():
comment = '#. This is a database entry: {}\n'.format(','.join(locations))
s += [comment + build_msgid(text)]
return '\n'.join(s).strip()
def build_msgid(msgid):
return 'msgid "{}"\nmsgstr ""\n'.format(msgid.replace('"', '\\"'))
def collect_all():
collector = MSGID_collect()
for message in db_session.query(Message):
collector.add_to_collection(message.text, 'a messagewidget message (left part of the screen)')
for resource in db_session.query(Resource):
collector.add_to_collection(resource.name, 'the name of a resource')
for tier in db_session.query(Tier):
collector.add_to_collection(tier.name, 'the name of an inhabitant tier (level)')
return collector
print(collect_all())
os.unlink(filename)
| gpl-2.0 | 1,333,327,644,805,271,800 | 25.865672 | 132 | 0.660833 | false |
schiermike/supervise-web | tests/test_io.py | 1 | 2216 | from tests import BaseTestCase
from supervise_web.svio import svstat, svcontrol
import datetime
import os
import time
class IoTestCase(BaseTestCase):
def test_svstat(self):
self._create_test_daemon(1)
x = svstat(self._daemon_dir(1))
self.assertFalse(x['daemon_paused'])
self.assertTrue(x['daemon_autostart'])
self.assertTrue(x['alive'])
self.assertFalse(x['daemon_once'])
self.assertTrue(x['daemon_up'])
self.assertAlmostEquals(datetime.datetime.now(), x['daemon_timestamp'], delta=datetime.timedelta(seconds=1))
self.assertTrue(0 < x['daemon_pid'] < 65536)
def test_with_down_file(self):
self._create_test_daemon(1)
with open(os.path.join(self._daemon_supervise_dir(1), 'down'), 'w'):
pass
x = svstat(self._daemon_dir(1))
self.assertFalse(x['daemon_paused'])
self.assertFalse(x['daemon_autostart'])
def test_svcontrol(self):
self.assertFalse(svcontrol('/some/nonexisting/directory', 'u'))
self._create_test_daemon(1)
p = self._daemon_dir(1)
self.assertTrue(svcontrol(p, 'u'))
self.assertFalse(svstat(p)['daemon_once'])
self.assertTrue(svstat(p)['daemon_up'])
ts1 = svstat(p)['daemon_timestamp']
svcontrol(p, 'd') # stop the daemon
time.sleep(1)
ts2 = svstat(p)['daemon_timestamp']
self.assertGreater(ts2, ts1)
self.assertFalse(svstat(p)['daemon_once'])
self.assertFalse(svstat(p)['daemon_up'])
svcontrol(p, 'o') # start daemon in 'run_once' mode
time.sleep(1)
ts3 = svstat(p)['daemon_timestamp']
self.assertGreater(ts3, ts2)
self.assertTrue(svstat(p)['daemon_once'])
self.assertTrue(svstat(p)['daemon_up'])
svcontrol(p, 'd') # start the daemon up again
time.sleep(1)
self.assertGreater(ts3, ts2)
self.assertFalse(svstat(p)['daemon_once'])
self.assertFalse(svstat(p)['daemon_up'])
def test_many_daemons(self):
for i in range(15):
self._create_test_daemon(i)
for i in range(15):
self.assertTrue(svstat(self._daemon_dir(i))['alive'])
| mit | -5,285,409,751,470,107,000 | 34.741935 | 116 | 0.610108 | false |
suselrd/django-content-interactions | content_interactions_stats/handlers.py | 1 | 7773 | # coding=utf-8
from django.contrib.contenttypes.models import ContentType
from settings import (
CONTENT_INTERACTIONS_LIKE_PROCESSING_DELAY,
CONTENT_INTERACTIONS_RATE_PROCESSING_DELAY,
CONTENT_INTERACTIONS_FAVORITE_PROCESSING_DELAY,
CONTENT_INTERACTIONS_DENOUNCE_PROCESSING_DELAY,
CONTENT_INTERACTIONS_SHARE_PROCESSING_DELAY,
CONTENT_INTERACTIONS_COMMENT_PROCESSING_DELAY,
CONTENT_INTERACTIONS_VISIT_PROCESSING_DELAY,
)
# noinspection PyUnresolvedReferences,PyUnusedLocal
def like_handler(instance, **kwargs):
if CONTENT_INTERACTIONS_LIKE_PROCESSING_DELAY:
try:
from tasks import item_like_process
item_like_process.delay(instance.pk, ContentType.objects.get_for_model(instance))
return
except ImportError:
pass
from utils import item_like_process as sync_item_like_process
sync_item_like_process(instance.pk, ContentType.objects.get_for_model(instance))
# noinspection PyUnresolvedReferences,PyUnusedLocal
def dislike_handler(instance, **kwargs):
if CONTENT_INTERACTIONS_LIKE_PROCESSING_DELAY:
try:
from tasks import item_dislike_process
item_dislike_process.delay(instance.pk, ContentType.objects.get_for_model(instance))
return
except ImportError:
pass
from utils import item_dislike_process as sync_item_dislike_process
sync_item_dislike_process(instance.pk, ContentType.objects.get_for_model(instance))
# noinspection PyUnresolvedReferences,PyUnusedLocal
def new_rating_handler(instance, rating, **kwargs):
if CONTENT_INTERACTIONS_RATE_PROCESSING_DELAY:
try:
from tasks import item_new_rating_process
item_new_rating_process.delay(instance.pk, ContentType.objects.get_for_model(instance), rating)
return
except ImportError:
pass
from utils import item_new_rating_process as sync_item_new_rating_process
sync_item_new_rating_process(instance.pk, ContentType.objects.get_for_model(instance), rating)
# noinspection PyUnresolvedReferences,PyUnusedLocal
def updated_rating_handler(instance, rating, old_rating, **kwargs):
if CONTENT_INTERACTIONS_RATE_PROCESSING_DELAY:
try:
from tasks import item_updated_rating_process
item_updated_rating_process.delay(
instance.pk, ContentType.objects.get_for_model(instance), old_rating, rating
)
return
except ImportError:
pass
from utils import item_updated_rating_process as sync_item_updated_rating_process
sync_item_updated_rating_process(instance.pk, ContentType.objects.get_for_model(instance), old_rating, rating)
# noinspection PyUnresolvedReferences,PyUnusedLocal
def update_cached_rating(instance, **kwargs):
instance.rating = (
5 * instance.rating_5_count +
4 * instance.rating_4_count +
3 * instance.rating_3_count +
2 * instance.rating_2_count +
instance.rating_1_count
)/(instance.ratings * float(1)) if instance.ratings else 0
return instance
# noinspection PyUnresolvedReferences,PyUnusedLocal
def favorite_mark_handler(instance, **kwargs):
if CONTENT_INTERACTIONS_FAVORITE_PROCESSING_DELAY:
try:
from tasks import item_marked_favorite_process
item_marked_favorite_process.delay(instance.pk, ContentType.objects.get_for_model(instance))
return
except ImportError:
pass
from utils import item_marked_favorite_process as sync_item_marked_favorite_process
sync_item_marked_favorite_process(instance.pk, ContentType.objects.get_for_model(instance))
# noinspection PyUnresolvedReferences,PyUnusedLocal
def favorite_unmark_handler(instance, **kwargs):
if CONTENT_INTERACTIONS_FAVORITE_PROCESSING_DELAY:
try:
from tasks import item_unmarked_favorite_process
item_unmarked_favorite_process.delay(instance.pk, ContentType.objects.get_for_model(instance))
return
except ImportError:
pass
from utils import item_unmarked_favorite_process as sync_item_unmarked_favorite_process
sync_item_unmarked_favorite_process(instance.pk, ContentType.objects.get_for_model(instance))
# noinspection PyUnresolvedReferences,PyUnusedLocal
def denounce_handler(instance, **kwargs):
if CONTENT_INTERACTIONS_DENOUNCE_PROCESSING_DELAY:
try:
from tasks import item_denounced_process
item_denounced_process.delay(instance.pk, ContentType.objects.get_for_model(instance))
return
except ImportError:
pass
from utils import item_denounced_process as sync_item_denounced_process
sync_item_denounced_process(instance.pk, ContentType.objects.get_for_model(instance))
# noinspection PyUnresolvedReferences,PyUnusedLocal
def denounce_remove_handler(instance, **kwargs):
if CONTENT_INTERACTIONS_DENOUNCE_PROCESSING_DELAY:
try:
from tasks import item_denounce_removed_process
item_denounce_removed_process.delay(instance.pk, ContentType.objects.get_for_model(instance))
return
except ImportError:
pass
from utils import item_denounce_removed_process as sync_item_denounce_removed_process
sync_item_denounce_removed_process(instance.pk, ContentType.objects.get_for_model(instance))
# noinspection PyUnresolvedReferences,PyUnusedLocal
def share_handler(instance, **kwargs):
if CONTENT_INTERACTIONS_SHARE_PROCESSING_DELAY:
try:
from tasks import item_shared_process
item_shared_process.delay(instance.pk, ContentType.objects.get_for_model(instance))
return
except ImportError:
pass
from utils import item_shared_process as sync_item_shared_process
sync_item_shared_process(instance.pk, ContentType.objects.get_for_model(instance))
# noinspection PyUnresolvedReferences,PyUnusedLocal
def comment_handler(instance, user, answer_to, **kwargs):
commented_item = instance.content_object
if CONTENT_INTERACTIONS_COMMENT_PROCESSING_DELAY:
try:
from tasks import item_got_comment_process
item_got_comment_process.delay(commented_item.pk, ContentType.objects.get_for_model(commented_item))
return
except ImportError:
pass
from utils import item_got_comment_process as sync_item_got_comment_process
sync_item_got_comment_process(commented_item.pk, ContentType.objects.get_for_model(commented_item))
# noinspection PyUnresolvedReferences,PyUnusedLocal
def comment_deleted_handler(instance, **kwargs):
commented_item = instance.content_object
if CONTENT_INTERACTIONS_COMMENT_PROCESSING_DELAY:
try:
from tasks import item_comment_deleted_process
item_comment_deleted_process.delay(commented_item.pk, ContentType.objects.get_for_model(commented_item))
return
except ImportError:
pass
from utils import item_comment_deleted_process as sync_item_comment_deleted_process
sync_item_comment_deleted_process(commented_item.pk, ContentType.objects.get_for_model(commented_item))
# noinspection PyUnresolvedReferences,PyUnusedLocal
def visit_handler(instance, **kwargs):
if CONTENT_INTERACTIONS_VISIT_PROCESSING_DELAY:
try:
from tasks import item_visited_process
item_visited_process.delay(instance.pk, ContentType.objects.get_for_model(instance))
return
except ImportError:
pass
from utils import item_visited_process as sync_item_visited_process
sync_item_visited_process(instance.pk, ContentType.objects.get_for_model(instance))
| bsd-3-clause | 8,075,730,207,214,451,000 | 41.47541 | 116 | 0.717226 | false |
Subsets and Splits