repo_name
stringlengths 5
100
| path
stringlengths 4
254
| copies
stringlengths 1
5
| size
stringlengths 4
7
| content
stringlengths 681
1M
| license
stringclasses 15
values | hash
int64 -9,223,351,895,964,839,000
9,223,298,349B
| line_mean
float64 3.5
100
| line_max
int64 15
1k
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class | ratio
float64 1.5
8.15
| config_test
bool 2
classes | has_no_keywords
bool 2
classes | few_assignments
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
onshape-public/onshape-clients | python/onshape_client/oas/models/bt_torus_description1834.py | 1 | 8265 | # coding: utf-8
"""
Onshape REST API
The Onshape REST API consumed by all clients. # noqa: E501
The version of the OpenAPI document: 1.113
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
import sys # noqa: F401
import six # noqa: F401
import nulltype # noqa: F401
from onshape_client.oas.model_utils import ( # noqa: F401
ModelComposed,
ModelNormal,
ModelSimple,
date,
datetime,
file_type,
int,
none_type,
str,
validate_get_composed_info,
)
try:
from onshape_client.oas.models import bt_surface_description1564
except ImportError:
bt_surface_description1564 = sys.modules[
"onshape_client.oas.models.bt_surface_description1564"
]
try:
from onshape_client.oas.models import bt_torus_description1834_all_of
except ImportError:
bt_torus_description1834_all_of = sys.modules[
"onshape_client.oas.models.bt_torus_description1834_all_of"
]
try:
from onshape_client.oas.models import bt_vector3d389
except ImportError:
bt_vector3d389 = sys.modules["onshape_client.oas.models.bt_vector3d389"]
class BTTorusDescription1834(ModelComposed):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
("type",): {
"PLANE": "PLANE",
"CYLINDER": "CYLINDER",
"CONE": "CONE",
"SPHERE": "SPHERE",
"TORUS": "TORUS",
"SPUN": "SPUN",
"SWEEP": "SWEEP",
"OFFSET": "OFFSET",
"BLEND": "BLEND",
"BSURFACE": "BSURFACE",
"OTHER": "OTHER",
"UNKNOWN": "UNKNOWN",
},
}
validations = {}
additional_properties_type = None
@staticmethod
def openapi_types():
"""
This must be a class method so a model may have properties that are
of type self, this ensures that we don't create a cyclic import
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
"axis": (bt_vector3d389.BTVector3d389,), # noqa: E501
"bt_type": (str,), # noqa: E501
"major_radius": (float,), # noqa: E501
"minor_radius": (float,), # noqa: E501
"origin": (bt_vector3d389.BTVector3d389,), # noqa: E501
"type": (str,), # noqa: E501
}
@staticmethod
def discriminator():
return None
attribute_map = {
"axis": "axis", # noqa: E501
"bt_type": "btType", # noqa: E501
"major_radius": "majorRadius", # noqa: E501
"minor_radius": "minorRadius", # noqa: E501
"origin": "origin", # noqa: E501
"type": "type", # noqa: E501
}
required_properties = set(
[
"_data_store",
"_check_type",
"_from_server",
"_path_to_item",
"_configuration",
"_composed_instances",
"_var_name_to_model_instances",
"_additional_properties_model_instances",
]
)
def __init__(
self,
_check_type=True,
_from_server=False,
_path_to_item=(),
_configuration=None,
**kwargs
): # noqa: E501
"""bt_torus_description1834.BTTorusDescription1834 - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_from_server (bool): True if the data is from the server
False if the data is from the client (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
axis (bt_vector3d389.BTVector3d389): [optional] # noqa: E501
bt_type (str): [optional] # noqa: E501
major_radius (float): [optional] # noqa: E501
minor_radius (float): [optional] # noqa: E501
origin (bt_vector3d389.BTVector3d389): [optional] # noqa: E501
type (str): [optional] # noqa: E501
"""
self._data_store = {}
self._check_type = _check_type
self._from_server = _from_server
self._path_to_item = _path_to_item
self._configuration = _configuration
constant_args = {
"_check_type": _check_type,
"_path_to_item": _path_to_item,
"_from_server": _from_server,
"_configuration": _configuration,
}
required_args = {}
# remove args whose value is Null because they are unset
required_arg_names = list(required_args.keys())
for required_arg_name in required_arg_names:
if required_args[required_arg_name] is nulltype.Null:
del required_args[required_arg_name]
model_args = {}
model_args.update(required_args)
model_args.update(kwargs)
composed_info = validate_get_composed_info(constant_args, model_args, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
unused_args = composed_info[3]
for var_name, var_value in required_args.items():
setattr(self, var_name, var_value)
for var_name, var_value in six.iteritems(kwargs):
if (
var_name in unused_args
and self._configuration is not None
and self._configuration.discard_unknown_keys
and not self._additional_properties_model_instances
):
# discard variable.
continue
setattr(self, var_name, var_value)
@staticmethod
def _composed_schemas():
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error beause the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return {
"anyOf": [],
"allOf": [
bt_surface_description1564.BTSurfaceDescription1564,
bt_torus_description1834_all_of.BTTorusDescription1834AllOf,
],
"oneOf": [],
}
| mit | 504,170,164,720,567,740 | 34.934783 | 87 | 0.576891 | false | 4.027778 | true | false | false |
mazulo/simplemooc | simplemooc/courses/admin.py | 1 | 1421 | from django.contrib import admin
# Register your models here.
from .models import (
Course,
CourseTRB,
CourseRequest,
Announcement,
Comment,
Enrollment,
Lesson,
LessonTRB,
Material,
KnowledgeLevel,
ChooseKnowledgeLevel,
ChooseCategoryCognitiveProcess,
CategoryCognitiveProcess,
Verb,
)
class CourseAdmin(admin.ModelAdmin):
list_display = ['name', 'slug', 'start_date', 'created_at']
search_fields = ['name', 'slug']
prepopulated_fields = {'slug': ['name']}
class CourseTRBAdmin(CourseAdmin):
pass
class CourseRequestAdmin(admin.ModelAdmin):
list_display = ['name', 'description', 'start_date', 'professor', 'is_trb']
search_fields = ['name', 'professor']
class LessonAdmin(admin.ModelAdmin):
list_display = ['name', 'number', 'course', 'release_date']
search_fields = ['name', 'description']
list_filter = ['created_at']
admin.site.register(Course, CourseAdmin)
admin.site.register(CourseTRB, CourseTRBAdmin)
admin.site.register(CourseRequest, CourseRequestAdmin)
admin.site.register([Enrollment, Announcement, Comment, Material])
admin.site.register(Lesson, LessonAdmin)
admin.site.register(LessonTRB, LessonAdmin)
admin.site.register(KnowledgeLevel)
admin.site.register(ChooseKnowledgeLevel)
admin.site.register(CategoryCognitiveProcess)
admin.site.register(ChooseCategoryCognitiveProcess)
admin.site.register(Verb)
| mit | 7,870,221,218,341,444,000 | 25.811321 | 79 | 0.729768 | false | 3.383333 | false | false | false |
davidnk/insolater | setup.py | 1 | 1092 | try:
from setuptools import setup
except ImportError:
from distutils.core import setup
packages = ['insolater'],
requires = []
with open('README.rst') as f:
readme = f.read()
with open('LICENSE.txt') as f:
license = f.read()
setup(
name='insolater',
version='0.0.1',
description='Tool to easily switch between original and modified versions of a directory.',
long_description=readme,
author='David Karesh',
author_email='[email protected]',
url='github.com/davidnk/insolater',
download_url="https://github.com/davidnk/insolater/tarball/v0.0.1",
packages=['insolater'],
include_package_data=True,
install_requires=['argparse', 'pexpect'],
license=license,
entry_points={'console_scripts': ['inso = insolater.run:main']},
classifiers=(
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
),
)
| mit | -6,779,866,917,094,095,000 | 29.333333 | 95 | 0.64011 | false | 3.765517 | false | true | false |
pyfa-org/eos | tests/integration/calculator/similar_mods/test_dogma.py | 1 | 4872 | # ==============================================================================
# Copyright (C) 2011 Diego Duclos
# Copyright (C) 2011-2018 Anton Vorobyov
#
# This file is part of Eos.
#
# Eos is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Eos is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Eos. If not, see <http://www.gnu.org/licenses/>.
# ==============================================================================
from eos import Ship
from eos.const.eos import ModAffecteeFilter
from eos.const.eos import ModDomain
from eos.const.eos import ModOperator
from eos.const.eve import EffectCategoryId
from tests.integration.calculator.testcase import CalculatorTestCase
class TestSimilarModifiersDogma(CalculatorTestCase):
def make_modifier(self, src_attr, tgt_attr):
return self.mkmod(
affectee_filter=ModAffecteeFilter.item,
affectee_domain=ModDomain.self,
affectee_attr_id=tgt_attr.id,
operator=ModOperator.post_percent,
affector_attr_id=src_attr.id)
def test_same_item(self):
# Real scenario - capital ships boost their agility via proxy attrs
# Setup
tgt_attr = self.mkattr()
src_attr1 = self.mkattr()
src_attr2 = self.mkattr()
modifier1 = self.make_modifier(src_attr1, tgt_attr)
modifier2 = self.make_modifier(src_attr2, tgt_attr)
effect1 = self.mkeffect(
category_id=EffectCategoryId.passive,
modifiers=[modifier1])
effect2 = self.mkeffect(
category_id=EffectCategoryId.passive,
modifiers=[modifier2])
item = Ship(self.mktype(
attrs={src_attr1.id: 20, src_attr2.id: 20, tgt_attr.id: 100},
effects=(effect1, effect2)).id)
# Action
self.fit.ship = item
# Verification
self.assertAlmostEqual(item.attrs[tgt_attr.id], 144)
# Cleanup
self.assert_solsys_buffers_empty(self.fit.solar_system)
self.assert_log_entries(0)
def test_same_item_attr(self):
# Setup
tgt_attr = self.mkattr()
src_attr = self.mkattr()
modifier1 = self.make_modifier(src_attr, tgt_attr)
modifier2 = self.make_modifier(src_attr, tgt_attr)
effect1 = self.mkeffect(
category_id=EffectCategoryId.passive,
modifiers=[modifier1])
effect2 = self.mkeffect(
category_id=EffectCategoryId.passive,
modifiers=[modifier2])
item = Ship(self.mktype(
attrs={src_attr.id: 20, tgt_attr.id: 100},
effects=(effect1, effect2)).id)
# Action
self.fit.ship = item
# Verification
self.assertAlmostEqual(item.attrs[tgt_attr.id], 144)
# Cleanup
self.assert_solsys_buffers_empty(self.fit.solar_system)
self.assert_log_entries(0)
def test_same_item_effect(self):
# Setup
tgt_attr = self.mkattr()
src_attr1 = self.mkattr()
src_attr2 = self.mkattr()
modifier1 = self.make_modifier(src_attr1, tgt_attr)
modifier2 = self.make_modifier(src_attr2, tgt_attr)
effect1 = self.mkeffect(
category_id=EffectCategoryId.passive,
modifiers=[modifier1, modifier2])
item = Ship(self.mktype(
attrs={src_attr1.id: 20, src_attr2.id: 20, tgt_attr.id: 100},
effects=[effect1]).id)
# Action
self.fit.ship = item
# Verification
self.assertAlmostEqual(item.attrs[tgt_attr.id], 144)
# Cleanup
self.assert_solsys_buffers_empty(self.fit.solar_system)
self.assert_log_entries(0)
def test_same_item_effect_attr(self):
# Setup
tgt_attr = self.mkattr()
src_attr = self.mkattr()
modifier1 = self.make_modifier(src_attr, tgt_attr)
modifier2 = self.make_modifier(src_attr, tgt_attr)
effect1 = self.mkeffect(
category_id=EffectCategoryId.passive,
modifiers=[modifier1, modifier2])
item = Ship(self.mktype(
attrs={src_attr.id: 20, tgt_attr.id: 100},
effects=[effect1]).id)
# Action
self.fit.ship = item
# Verification
self.assertAlmostEqual(item.attrs[tgt_attr.id], 144)
# Cleanup
self.assert_solsys_buffers_empty(self.fit.solar_system)
self.assert_log_entries(0)
| lgpl-3.0 | -3,637,831,094,017,428,500 | 37.362205 | 80 | 0.613095 | false | 3.702128 | true | false | false |
r-portas/brisbane-bus-stops | analyse.py | 1 | 2506 | """Analyses datasets"""
from csv import reader
def parse_bus_stops(filename, suburb_filter=""):
"""Parses a csv file of bus stops
Returns a list of bus stops
"""
bus_stops = []
with open(filename, "rb") as bus_stop_file:
bus_csv_reader = reader(bus_stop_file)
header = bus_csv_reader.next()
# Each second line of the file is garbage
toggle = 0
for line in bus_csv_reader:
if toggle:
if suburb_filter != "":
if line[3] == suburb_filter:
bus_stops.append(BusStop(line[0], line[2], line[3], line[7], line[8]))
else:
bus_stops.append(BusStop(line[0], line[2], line[3], line[7], line[8]))
toggle = 0
else:
toggle = 1
return bus_stops
"""Finds the middle location of all stops in the list, used for centering the map on the points
Return a list of coordinates, [lat, long]
"""
def get_mid_location(bus_stops):
max_lat = 0
min_lat = 0
max_long = 0
min_long = 0
for stop in bus_stops:
# Find the lats
if max_lat == 0:
max_lat = stop.lat
else:
if max_lat < stop.lat:
max_lat = stop.lat
if min_lat == 0:
min_lat = stop.lat
else:
if min_lat > stop.lat:
min_lat = stop.lat
# Find the longs
if max_long == 0:
max_long = stop.long
else:
if max_long < stop.long:
max_long = stop.long
if min_long == 0:
min_long = stop.long
else:
if min_long > stop.long:
min_long = stop.long
mid_lat = ((max_lat - min_lat) / 2) + min_lat
mid_long = ((max_long - min_long) / 2) + min_long
return [mid_lat, mid_long]
"""Stores a bus stop"""
class BusStop:
def __init__(self, stopid, road, suburb, lat, long):
self.stopid = stopid
self.road = road
self.suburb = suburb
self.lat = float(lat)
self.long = float(long)
def __repr__(self):
return "{} - {}, {} - ({}, {})".format(self.stopid, self.road, self.suburb, self.long, self.lat)
def get_location(self):
"""Returns the location of the bus stop in a list [lat, long]"""
return [self.lat, self.long]
if __name__ == "__main__":
stops = parse_bus_stops("datasets/dataset_bus_stops.csv")
print(stops) | mit | -4,975,846,715,688,513,000 | 26.549451 | 104 | 0.513966 | false | 3.524613 | false | false | false |
lyndon160/REF | openflow_bandwidth/SwitchPoll.py | 1 | 1690 | from ryu.base import app_manager
from ryu.controller import ofp_event
from ryu.controller.handler import CONFIG_DISPATCHER, MAIN_DISPATCHER
from ryu.controller.handler import set_ev_cls
from ryu.ofproto import ofproto_v1_3
from ryu.lib.packet import packet
import time
from ryu.lib.packet import ethernet
class SwitchPoll():
def __init__(self):
self._running = True
def terminate(self):
self._running = False
#input switch to send to
def send_port_stats_request(self, datapath):
ofp = datapath.ofproto
ofp_parser = datapath.ofproto_parser
req = ofp_parser.OFPPortStatsRequest(datapath, 0, ofp.OFPP_ANY)
datapath.send_msg(req)
def send_flow_stats_request(self, datapath):
ofp = datapath.ofproto
ofp_parser = datapath.ofproto_parser
req = ofp_parser.OFPFlowStatsRequest(datapath, 0, ofp.OFPTT_ALL,
ofp.OFPP_ANY, ofp.OFPG_ANY,
0, 0, ofp_parser.OFPMatch())
datapath.send_msg(req)
def send_meter_stats_request(self, datapath):
ofp = datapath.ofproto
ofp_parser = datapath.ofproto_parser
req = ofp_parser.OFPMeterStatsRequest(datapath, 0, ofp.OFPM_ALL)
datapath.send_msg(req)
#input time for every request and list of switches to request to
def run(self, pollTime,datapathdict):
while True:
for the_key, datapath in datapathdict.iteritems():
self.send_port_stats_request(datapath)
self.send_flow_stats_request(datapath)
self.send_meter_stats_request(datapath)
time.sleep(pollTime)
| apache-2.0 | -959,407,658,754,910,100 | 34.208333 | 72 | 0.642604 | false | 3.995272 | false | false | false |
fcl-93/rootio_web | rootio/radio/views.py | 1 | 22221 | # -*- coding: utf-8 -*-
from __future__ import print_function
import string
import random
import os
import re
from datetime import datetime, timedelta
import sys
import time
import dateutil.rrule, dateutil.parser
from flask import g, current_app, Blueprint, render_template, request, flash, Response, json, url_for
from flask.ext.login import login_required, current_user
from crontab import CronTab
from flask.ext.babel import gettext as _
from sqlalchemy.exc import IntegrityError
from werkzeug.utils import redirect
from ..telephony import Message
from .models import Station, Program, ScheduledBlock, ScheduledProgram, Location, Person, StationhasBots, Language, ProgramType, MediaFiles
from .forms import StationForm, ProgramForm, BlockForm, LocationForm, ScheduleProgramForm, PersonForm, AddBotForm, MediaForm
from ..decorators import returns_json, returns_flat_json
from ..utils import error_dict, fk_lookup_form_data, allowed_audio_file, ALLOWED_AUDIO_EXTENSIONS
from ..extensions import db
from ..utils_bot import add_cron, send_mail, removeCron
from werkzeug import secure_filename
import mutagen
from ..messenger import messages
radio = Blueprint('radio', __name__, url_prefix='/radio')
@radio.route('/', methods=['GET'])
def index():
stations = Station.query.all()
return render_template('radio/index.html',stations=stations)
@radio.route('/emergency/', methods=['GET'])
def emergency():
stations = Station.query.all()
#demo, override station statuses
for s in stations:
s.status = "on"
#end demo
return render_template('radio/emergency.html',stations=stations)
@radio.route('/station/', methods=['GET'])
def stations():
stations = Station.query.order_by('name').all()
return render_template('radio/stations.html', stations=stations, active='stations')
@radio.route('/station/<int:station_id>', methods=['GET', 'POST'])
def station(station_id):
station = Station.query.filter_by(id=station_id).first_or_404()
form = StationForm(obj=station, next=request.args.get('next'))
if form.validate_on_submit():
form.populate_obj(station)
db.session.add(station)
db.session.commit()
flash(_('Station updated.'), 'success')
return render_template('radio/station.html', station=station, form=form)
@radio.route('/station/add/', methods=['GET', 'POST'])
@login_required
def station_add():
form = StationForm(request.form)
station = None
if form.validate_on_submit():
cleaned_data = form.data #make a copy
cleaned_data.pop('submit',None) #remove submit field from list
cleaned_data.pop('phone_inline',None) #and also inline forms
cleaned_data.pop('location_inline',None)
station = Station(**cleaned_data) #create new object from data
db.session.add(station)
db.session.commit()
flash(_('Station added.'), 'success')
elif request.method == "POST":
flash(_('Validation error'),'error')
return render_template('radio/station.html', station=station, form=form)
@radio.route('/program/', methods=['GET'])
def programs():
programs = Program.query.all()
return render_template('radio/programs.html', programs=programs, active='programs')
@radio.route('/people/', methods=['GET'])
def people():
people = Person.query.all()
return render_template('radio/people.html', people=people, active='people')
@radio.route('/people/<int:person_id>', methods=['GET', 'POST'])
def person(person_id):
person = Person.query.filter_by(id=person_id).first_or_404()
form = PersonForm(obj=person, next=request.args.get('next'))
if form.validate_on_submit():
form.populate_obj(person)
db.session.add(person)
db.session.commit()
flash(_('Person updated.'), 'success')
return render_template('radio/person.html', person=person, form=form)
@radio.route('/people/add/', methods=['GET', 'POST'])
@login_required
def person_add():
form = PersonForm(request.form)
person = None
if form.validate_on_submit():
cleaned_data = form.data #make a copy
cleaned_data.pop('submit',None) #remove submit field from list
person = Person(**cleaned_data) #create new object from data
db.session.add(person)
db.session.commit()
flash(_('Person added.'), 'success')
elif request.method == "POST":
flash(_('Validation error'),'error')
return render_template('radio/person.html', person=person, form=form)
@radio.route('/location/add/ajax/', methods=['POST'])
@login_required
@returns_json
def location_add_ajax():
data = json.loads(request.data)
#handle floats individually
float_vals = ['latitude','longitude']
for field in float_vals:
try:
data[field] = float(data[field])
except ValueError:
response = {'status':'error','errors':{field:_('Invalid ')+field},'status_code':400}
return response
form = LocationForm(None, **data) #use this format to avoid multidict-type issue
location = None
if form.validate_on_submit():
cleaned_data = form.data #make a copy
cleaned_data.pop('submit',None) #remove submit field from list
location = Location(**cleaned_data) #create new object from data
db.session.add(location)
db.session.commit()
response = {'status':'success','result':{'id':location.id,'string':unicode(location)},'status_code':200}
elif request.method == "POST":
#convert the error dictionary to something serializable
response = {'status':'error','errors':error_dict(form.errors),'status_code':400}
return response
@radio.route('/block/', methods=['GET'])
def scheduled_blocks():
scheduled_blocks = ScheduledBlock.query.all()
#TODO, display only those that are scheduled on stations the user can view
return render_template('radio/scheduled_blocks.html', scheduled_blocks=scheduled_blocks, active='blocks')
@radio.route('/block/<int:block_id>', methods=['GET', 'POST'])
def scheduled_block(block_id):
block = ScheduledBlock.query.filter_by(id=block_id).first_or_404()
form = BlockForm(obj=block, next=request.args.get('next'))
if form.validate_on_submit():
form.populate_obj(block)
db.session.add(block)
db.session.commit()
flash(_('Block updated.'), 'success')
return render_template('radio/scheduled_block.html', scheduled_block=block, form=form)
@radio.route('/block/add/', methods=['GET', 'POST'])
@login_required
def scheduled_block_add():
form = BlockForm(request.form)
block = None
if form.validate_on_submit():
cleaned_data = form.data #make a copy
cleaned_data.pop('submit',None) #remove submit field from list
block = ScheduledBlock(**cleaned_data) #create new object from data
db.session.add(block)
db.session.commit()
flash(_('Block added.'), 'success')
elif request.method == "POST":
flash(_('Validation error'),'error')
return render_template('radio/scheduled_block.html', block=block, form=form)
@radio.route('/scheduleprogram/add/ajax/', methods=['POST'])
@login_required
@returns_json
def schedule_program_add_ajax():
data = json.loads(request.data)
if not 'program' in data:
return {'status':'error','errors':'program required','status_code':400}
if not 'station' in data:
return {'status':'error','errors':'station required','status_code':400}
#lookup objects from ids
fk_errors = fk_lookup_form_data({'program':Program,'station':Station}, data)
if fk_errors:
return fk_errors
program = data['program']
scheduled_program = ScheduledProgram(program=data['program'], station=data['station'])
scheduled_program.start = dateutil.parser.parse(data['start'])
scheduled_program.end = scheduled_program.start + program.duration
db.session.add(scheduled_program)
db.session.commit()
return {'status':'success','result':{'id':scheduled_program.id},'status_code':200}
@radio.route('/scheduleprogram/delete/<int:_id>/', methods=['POST'])
@login_required
def delete_program(_id):
_program = ScheduledProgram.query.get(_id)
db.session.delete(_program)
db.session.commit()
return ""
@radio.route('/scheduleprogram/edit/ajax/', methods=['POST'])
@login_required
@returns_json
def schedule_program_edit_ajax():
data = json.loads(request.data)
if not 'scheduledprogram' in data:
return {'status':'error','errors':'scheduledprogram required','status_code':400}
#lookup objects from ids
fk_errors = fk_lookup_form_data({'scheduledprogram':ScheduledProgram}, data)
if fk_errors:
return fk_errors
scheduled_program = data['scheduledprogram']
scheduled_program.start = dateutil.parser.parse(data['start'])
program = scheduled_program.program
scheduled_program.end = scheduled_program.start + program.duration
db.session.add(scheduled_program)
db.session.commit()
return {'status':'success','result':{'id':scheduled_program.id},'status_code':200}
@radio.route('/scheduleprogram/add/recurring_ajax/', methods=['POST'])
@login_required
@returns_json
def schedule_recurring_program_ajax():
"Schedule a recurring program"
data = json.loads(request.data)
#ensure specified foreign key ids are valid
fk_errors = fk_lookup_form_data({'program':Program,'station':Station}, data)
if fk_errors:
return fk_errors
form = ScheduleProgramForm(None, **data)
try:
air_time = datetime.strptime(form.data['air_time'],'%H:%M').time()
except ValueError:
response = {'status':'error','errors':{'air_time':'Invalid time'},'status_code':400}
return response
if form.validate_on_submit():
#save refs to form objects
program = form.data['program']
station = form.data['station']
#parse recurrence rule
r = dateutil.rrule.rrulestr(form.data['recurrence'])
for instance in r[:10]: #TODO: dynamically determine instance limit
scheduled_program = ScheduledProgram(program=program, station=station)
scheduled_program.start = datetime.combine(instance,air_time) #combine instance day and air_time time
scheduled_program.end = scheduled_program.start + program.duration
db.session.add(scheduled_program)
db.session.commit()
response = {'status':'success','result':{},'status_code':200}
elif request.method == "POST":
response = {'status':'error','errors':error_dict(form.errors),'status_code':400}
return response
@radio.route('/station/<int:station_id>/scheduledprograms.json', methods=['GET'])
@returns_flat_json
def scheduled_programs_json(station_id):
if request.args.get('start') and request.args.get('end'):
start = dateutil.parser.parse(request.args.get('start'))
end = dateutil.parser.parse(request.args.get('end'))
scheduled_programs = ScheduledProgram.query.filter_by(station_id=station_id)
#TODO: filter by start > start, end < end
else:
scheduled_programs = ScheduledProgram.query.filter_by(station_id=station_id)
resp = []
for s in scheduled_programs:
d = {'title':s.program.name,
'start':s.start.isoformat(),
'end':s.end.isoformat(),
'id':s.id}
resp.append(d)
return resp
@radio.route('/station/<int:station_id>/scheduledblocks.json', methods=['GET'])
@returns_flat_json
def scheduled_block_json(station_id):
scheduled_blocks = ScheduledBlock.query.filter_by(station_id=station_id)
if not ('start' in request.args and 'end' in request.args):
return {'status':'error','errors':'scheduledblocks.json requires start and end','status_code':400}
#TODO: fullcalendar updates based on these params
start = dateutil.parser.parse(request.args.get('start'))
end = dateutil.parser.parse(request.args.get('end'))
resp = []
for block in scheduled_blocks:
r = dateutil.rrule.rrulestr(block.recurrence)
for instance in r.between(start,end):
d = {'title':block.name,
'start':datetime.combine(instance,block.start_time),
'end':datetime.combine(instance,block.end_time),
'id':block.id,
'isBackground':True, #the magic flag that tells full calendar to render as block
}
resp.append(d)
return resp
@radio.route('/schedule/', methods=['GET'])
def schedule():
#TODO, if user is authorized to view only one station, redirect them there
stations = Station.query.order_by('name').all()
return render_template('radio/schedules.html',
stations=stations, active='schedule')
@radio.route('/schedule/<int:station_id>/', methods=['GET'])
def schedule_station(station_id):
station = Station.query.filter_by(id=station_id).first_or_404()
#TODO: move this logic to an ajax call, like scheduled_block_json
scheduled_blocks = ScheduledBlock.query.filter_by(station_id=station.id)
block_list = []
for block in scheduled_blocks:
r = dateutil.rrule.rrulestr(block.recurrence)
for instance in r[:10]: #TODO: dynamically determine instance limit from calendar view
d = {'title':block.name,
'start':datetime.combine(instance,block.start_time),
'end':datetime.combine(instance,block.end_time)}
block_list.append(d)
form = ScheduleProgramForm()
all_programs = Program.query.all()
#TODO: filter by language?
return render_template('radio/schedule.html',
form=form, station=station, block_list=block_list, addable_programs=all_programs,
active='schedule')
@radio.route('/bots/', methods=['GET'])
def list_bots():
"""
Presents a list with all the bots that have been created and the radios where they\'re working
:return:
"""
stations = Station.query.all()
return render_template('radio/bots.html', stations=stations)
@radio.route('/bots/add/', methods=['GET', 'POST'])
@login_required
def new_bot_add():
"""
Renders the form to insert a new bot in the database.
Add cronJobs if the state bot is active
"""
form = AddBotForm(request.form)
bot = None
type = "add"
if form.validate_on_submit():
cleaned_data = form.data # make a copy
cleaned_data.pop('submit', None) # remove submit field from list
bot = StationhasBots(**cleaned_data) # create new object from data
try:
bot = add_cron(bot,type)
db.session.add(bot)
db.session.commit()
flash(_('Bot added.'), 'success')
except Exception as e:
removeCron(bot, CronTab(user=True))
db.session.rollback()
db.session.flush()
print (str(e))
send_mail("Error happened while you're adding a bot", str(e))
flash(_('Error Bot Not Added.'), 'error')
elif request.method == "POST":
flash(_('Validation error'), 'error')
return render_template('radio/bot.html', bot=bot, form=form)
@radio.route('/bot/<int:radio_id>/<int:function_id>', methods=['GET', 'POST'])
@login_required
def bot_edit(radio_id, function_id):
bot = StationhasBots.query.filter_by(fk_radio_station_id=radio_id, fk_bot_function_id=function_id).first_or_404()
form = AddBotForm(obj=bot, next=request.args.get('next'))
type = "edit"
if form.validate_on_submit():
form.populate_obj(bot)
try:
bot = add_cron(bot, type)
db.session.add(bot)
db.session.commit()
flash(_('Bot updated.'), 'success')
except Exception as e:
removeCron(bot,CronTab(user=True))
db.session.rollback()
db.session.flush()
print(str(e))
send_mail("Error happened editig the bot", str(e))
flash(_('Error Bot Not Updated.'), 'error')
elif request.method == "POST":
flash(_('Validation error'), 'error')
return render_template('radio/bot.html', bot=bot, form=form)
@radio.route('/media', methods=['GET', 'POST'])
@login_required
def media_files():
media = MediaFiles.query.all()
return render_template('radio/media.html', media=media)
@radio.route('/media/add', methods=['GET', 'POST'])
@login_required
def media_add():
form = MediaForm(request.form)
media = None
if form.validate_on_submit():
cleaned_data = form.data # make a copy
upload_file = request.files[form.path.name]
if upload_file and allowed_audio_file(upload_file.filename):
data = upload_file.read()
path_file = os.path.join(current_app.config['UPLOAD_FOLDER'], upload_file.filename)
open(path_file, 'w').write(data)
filename, file_extension = os.path.splitext(path_file)
if file_extension == '.wav':
import wave
import contextlib
with contextlib.closing(wave.open(path_file, 'r')) as f:
frames = f.getnframes()
rate = f.getframerate()
duration = unicode(timedelta(seconds=frames / float(rate)))
else:
audio = mutagen.File(path_file)
duration = unicode(timedelta(seconds=audio.info.length))
cleaned_data.pop('submit', None) # remove submit field from list
cleaned_data['path'] = path_file
cleaned_data['duration'] = duration
media = MediaFiles(**cleaned_data) # create new object from data
db.session.add(media)
db.session.commit()
flash(_('Media File added.'), 'success')
else:
flash("Please upload files with extensions: %s" % "/".join(ALLOWED_AUDIO_EXTENSIONS), 'error')
elif request.method == "POST":
flash(_('Validation error'), 'error')
return render_template('radio/mediaform.html', media=media, form=form)
@radio.route('/media/<int:media_id>', methods=['GET', 'POST'])
@login_required
def media_edit(media_id):
media = MediaFiles.query.filter_by(id=media_id).first_or_404()
form = MediaForm(obj=media, next=request.args.get('next'))
if form.validate_on_submit():
form.populate_obj(media)
upload_file = request.files[form.path.name]
if upload_file and allowed_audio_file(upload_file.filename):
data = upload_file.read()
path_file = os.path.join(current_app.config['UPLOAD_FOLDER'], upload_file.filename)
open(path_file, 'w').write(data)
filename, file_extension = os.path.splitext(path_file)
if file_extension == '.wav':
import wave
import contextlib
with contextlib.closing(wave.open(path_file, 'r')) as f:
frames = f.getnframes()
rate = f.getframerate()
duration = unicode(timedelta(seconds=frames / float(rate)))
else:
audio = mutagen.File(path_file)
duration = unicode(timedelta(seconds=audio.info.length))
media.path = path_file
media.duration = duration
db.session.add(media)
db.session.commit()
flash(_('Media File updated.'), 'success')
else:
flash("Please upload files with extensions: %s" % "/".join(ALLOWED_AUDIO_EXTENSIONS), 'error')
return render_template('radio/mediaform.html', media=media, form=form)
@radio.route('/media/list', methods=['GET', 'POST'])
@login_required
def media_list():
media = dict()
for m in MediaFiles.query.all():
media[m.id] = {'media_id': m.id, 'name': m.name, 'description': m.description, 'path': m.path,
'language': unicode(m.language), 'type': m.type,
'duration': m.duration}
return json.jsonify(media)
@radio.route('/media/find', methods=['GET', 'POST'])
@login_required
def media_find():
try:
media = MediaFiles.query.filter_by(path=request.form['path'])
return media[0].name
except:
media = MediaFiles.query.filter_by(path=request.form['path[]'])
return media[0].name
@radio.route('/sms/', methods=['GET', 'POST'])
@login_required
def list_sms():
messages = dict()
for m in Message.query.all():
messages[m.id] = {'message_id':m.id,'message_uuid':m.message_uuid,'sendtime':m.sendtime,
'text': m.text,'from_phonenumber_id':m.from_phonenumber_id,
'to_phonenumber_id':m.to_phonenumber_id,'onairprogram_id': m.onairprogram_id}
return json.jsonify(messages)
@radio.route('/program/add/', methods=['GET', 'POST'])
@login_required
def program_add():
form = ProgramForm(request.form)
program = None
if form.validate_on_submit():
cleaned_data = form.data # make a copy
cleaned_data.pop('submit', None) # remove submit field from list
cleaned_data['duration'] = request.form['est_time']
cleaned_data['description'] = request.form['description']
program = Program(**cleaned_data) # create new object from data
db.session.add(program)
db.session.commit()
flash(_('Program added.'), 'success')
elif request.method == "POST":
flash(_('Validation error'), 'error')
return render_template('radio/program.html', program=program, form=form)
@radio.route('/program/<int:program_id>', methods=['GET', 'POST'])
def program(program_id):
program = Program.query.filter_by(id=program_id).first_or_404()
form = ProgramForm(obj=program, next=request.args.get('next'))
if form.validate_on_submit():
form.populate_obj(program)
program.duration = request.form['est_time']
program.description = request.form['description']
db.session.add(program)
db.session.commit()
flash(_('Program updated.'), 'success')
return render_template('radio/program.html', program=program, form=form) | agpl-3.0 | -2,909,168,198,307,072,500 | 35.489327 | 139 | 0.64169 | false | 3.803663 | false | false | false |
vincentropy/battleground | battleground/persistence/game_data.py | 1 | 5042 | # from uuid import uuid4
import pymongo
from pymongo import MongoClient
import bson
import json
from os import environ
import datetime
global_client = None
def get_client():
global global_client
if global_client is None:
if "MONGO_HOST" in environ:
host = environ["MONGO_HOST"]
global_client = MongoClient(host)
else:
global_client = MongoClient()
return global_client
def get_db_handle(name=None, client=None):
if name is None:
name = "game_states"
if client is None:
client = get_client()
db_handle = client[name]
return db_handle
def save_game_states(game_id,
game_type,
game_states,
db_handle=None):
"""
save one or more documents to the data-store.
game_states: [dict,...]
each key, value will be stord as key: json(value) in the document.
expected keys are "game_state", "last_move" and "player_ids"
"""
if db_handle is None:
db_handle = get_db_handle()
collection = db_handle.game_states
all_docs = []
for i, game_state in enumerate(game_states):
doc = {
"sequence": i,
"game_id": game_id,
"game_type": game_type
}
for key, value in game_state.items():
assert key not in doc
doc[key] = json.dumps(value)
all_docs.append(doc)
result = collection.insert_many(all_docs)
return result
def save_game_meta_data(game_type, num_states, utc_time=None, db_handle=None):
if db_handle is None:
db_handle = get_db_handle()
if utc_time is None:
utc_time = str(datetime.datetime.utcnow())
doc = {
"game_type": game_type,
"utc_time": utc_time,
"num_states": num_states}
game_id = db_handle.games.insert_one(doc).inserted_id
return game_id
def save_game_history(game_type, game_states, db_handle=None):
"""
save a sequence of documents to the data-store.
game_states: array of dict
each array element will be stored as one document in the doc-store.
each key, value in each dict will be stored as key: json(value) in the document.
expected keys are "game_state", "last_move" and "player_ids"
"""
if db_handle is None:
db_handle = get_db_handle()
game_id = save_game_meta_data(game_type=game_type,
num_states=len(game_states),
db_handle=db_handle)
save_game_states(game_id=game_id,
game_type=game_type,
game_states=game_states,
db_handle=db_handle)
return game_id
def load_game_history(game_id, db_handle=None):
"""load all states with the same game ID and return an ordered sequence"""
if db_handle is None:
db_handle = get_db_handle()
if not isinstance(game_id, bson.ObjectId):
game_id = bson.ObjectId(str(game_id))
collection = db_handle.game_states
result = collection.find({"game_id": game_id})
data = result[:]
states_in_sequence = dict()
# now decode some of the values that are json strings
for loaded_doc in data:
output_doc = {}
for data_key in loaded_doc:
if data_key in ["game_state", "last_move"]:
# decode these two keys, because they are special
output_doc[data_key] = json.loads(loaded_doc[data_key])
else:
output_doc[data_key] = loaded_doc[data_key]
states_in_sequence[output_doc["sequence"]] = output_doc
states_in_sequence = [states_in_sequence[key] for key in range(len(states_in_sequence))]
return states_in_sequence
def get_games_list(game_type=None, db_handle=None):
"""
get a list of unique game IDs
"""
if db_handle is None:
db_handle = get_db_handle()
collection = db_handle.games
if game_type is None:
result = collection.find(sort=[('utc_time', pymongo.DESCENDING)])
else:
result = collection.find(sort=[('utc_time', pymongo.DESCENDING)],
filter={"game_type": game_type})
return result
def get_ids_to_purge_(date=None, db_handle=None):
if db_handle is None:
db_handle = get_db_handle()
games_list = get_games_list(db_handle=db_handle)
ids_to_purge = []
for game in games_list:
# print(game)
game_time = datetime.datetime.strptime(game['utc_time'], "%Y-%m-%d %H:%M:%S.%f")
if game_time < date:
ids_to_purge.append(game['_id'])
return ids_to_purge
def purge_game_data(date=None, db_handle=None):
if db_handle is None:
db_handle = get_db_handle()
ids_to_purge = get_ids_to_purge_(date, db_handle)
collection = db_handle.games
collection.delete_many({'_id': {'$in': ids_to_purge}})
collection = db_handle.game_states
collection.delete_many({'game_id': {'$in': ids_to_purge}})
| mit | 1,045,269,744,107,249,800 | 27.811429 | 92 | 0.590242 | false | 3.601429 | false | false | false |
hongyan0118/uiautotestmacaca | Public/MacacaServer.py | 1 | 2320 | import os
import time
import requests
from multiprocessing import Pool
class MacacaServer:
def __init__(self, runs):
self._runs = runs
self._cmd = 'macaca server -p %s --verbose'
self._url = 'http://127.0.0.1:%s/wd/hub/status'
self._file = 'macaca_server_port_%s.log'
self._kill = 'taskkill /PID %d /F'
self._pids = []
@staticmethod
def server_url(port):
server_url = {
'hostname': '127.0.0.1',
'port': port,
}
return server_url
def start_server(self):
pool = Pool(processes=len(self._runs))
for run in self._runs:
pool.apply_async(self._run_server, args=(run,))
pool.close()
# after start macaca server, macaca server process can not return, so should not join
# p.join()
for run in self._runs:
while not self.is_running(run.get_port()):
print('wait macaca server all ready...')
time.sleep(1)
print('macaca server all ready')
#for run in self._runs:
#file = str(run.get_path() + '\\' + self._file) % run.get_port()
#with open(file, 'w+') as f:
#line = f.readline()
#start = line.find('pid:')
#end = line[start:].find(' ')
#pid = line[start:][4:end]
#self._pids.append(int(pid))
def _run_server(self, run):
port = run.get_port()
cmd = str(self._cmd + ' > ' + run.get_path() + '\\' + self._file) % (port, port)
os.system(cmd)
def is_running(self, port):
url = self._url % port
response = None
try:
response = requests.get(url, timeout=0.1)
if str(response.status_code).startswith('2'):
# data = json.loads((response.content).decode("utf-8"))
# if data.get("staus") == 0:
return True
return False
except requests.exceptions.ConnectionError:
return False
except requests.exceptions.ReadTimeout:
return False
finally:
if response:
response.close()
def kill_macaca_server(self):
for pid in self._pids:
os.popen(self._kill % pid)
| gpl-3.0 | -1,112,649,500,990,466,000 | 26.951807 | 93 | 0.510345 | false | 3.860233 | false | false | false |
mokyue/Modia-Player | Core/AudioManager.py | 1 | 11101 | # -*- coding: utf-8 -*-
from PyQt4.QtCore import QTime, Qt, QString
from PyQt4.QtGui import QMessageBox, QTableWidgetItem, QFileDialog, QDesktopServices
from PyQt4.phonon import Phonon
from Widget.MButton import MButton
import os
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
class AudioManager():
def __init__(self, window, lyric_panel):
self.__main_window = window
self.__lyric_panel = lyric_panel
self.__audio_output = Phonon.AudioOutput(Phonon.MusicCategory, self.__main_window)
self.__media_object = Phonon.MediaObject(self.__main_window)
self.__media_object.setTickInterval(1000)
self.__media_object.tick.connect(self.tick)
self.__media_object.stateChanged.connect(self.stateChanged)
self.__media_object.currentSourceChanged.connect(self.currentSourceChanged)
self.__media_object.aboutToFinish.connect(self.aboutToFinish)
self.__meta_information_resolver = Phonon.MediaObject(self.__main_window)
self.__meta_information_resolver.stateChanged.connect(self.metaStateChanged)
self.__music_table = self.__main_window.getMusicTable()
self.__list_music = list()
Phonon.createPath(self.__media_object, self.__audio_output)
self.__register_ui()
def __register_ui(self):
self.__main_window.getActionBar().get_widget('SLD_VOL').setAudioOutput(self.__audio_output)
self.__main_window.getActionBar().get_widget('SLD_SEEK').setMediaObject(self.__media_object)
def tick(self, time):
self.__main_window.getActionBar().get_widget('LBL_TIME_REMAIN').setText(
QTime(0, (time / 60000) % 60, (time / 1000) % 60).toString('mm:ss'))
self.__lyric_panel.switchLyric(time)
def play(self, media_source=None):
if media_source != None:
if not isinstance(media_source, Phonon.MediaSource):
raise ValueError('Given argument not Phonon.MediaSource type. (Phonon.MediaSource type required)')
else:
self.__media_object.setCurrentSource(media_source)
if len(self.__list_music) < 1:
self.addMusic()
if len(self.__list_music) > 0:
self.__media_object.setCurrentSource(self.__list_music[len(self.__list_music) - 1])
self.__media_object.play()
def pause(self):
self.__media_object.pause()
def stop(self):
self.__media_object.stop()
def next(self):
index_next = self.__list_music.index(self.__media_object.currentSource()) + 1
if index_next < len(self.__list_music):
self.play(self.__list_music[index_next])
else:
self.play(self.__list_music[0])
def previous(self):
index_previous = self.__list_music.index(self.__media_object.currentSource()) - 1
if index_previous > -1:
self.play(self.__list_music[index_previous])
else:
self.play(self.__list_music[len(self.__list_music) - 1])
def stateChanged(self, newState, oldState):
if newState == Phonon.ErrorState:
if self.__media_object.errorType() == Phonon.FatalError:
QMessageBox.warning(self.__main_window, "Fatal Error", self.__media_object.errorString())
self.__media_object.setCurrentSource(self.__list_music[0])
self.__list_music.remove(self.__media_object.currentSource())
else:
QMessageBox.warning(self.__main_window, "Error", self.__media_object.errorString())
self.__media_object.setCurrentSource(self.__list_music[0])
self.__list_music.remove(self.__media_object.currentSource())
elif newState == Phonon.PlayingState:
self.__main_window.getActionBar().get_widget('BTN_PLAY_PAUSE').setMStyle(MButton.Type.Pause)
self.__main_window.getActionBar().get_widget('BTN_PLAY_PAUSE').setToolTip('暂停')
self.__main_window.setSuspendStatus(False)
if self.__media_object.isSeekable():
self.__main_window.getActionBar().get_widget('SLD_SEEK').setCursor(Qt.PointingHandCursor)
self.__main_window.getActionBar().get_widget('INDICT_INFO').setText(self.__get_music_display_info())
time_total = self.__media_object.totalTime()
self.__main_window.getActionBar().get_widget('LBL_TIME_TOTAL').setText(
QTime(0, (time_total / 60000) % 60, (time_total / 1000) % 60).toString('mm:ss'))
btn_music_stop = self.__main_window.getActionBar().get_widget('BTN_STOP')
if not btn_music_stop.isEnabled():
btn_music_stop.setEnabled(True)
self.__set_lyric(self.__media_object.currentSource().fileName())
elif newState == Phonon.StoppedState:
self.__main_window.getActionBar().get_widget('SLD_SEEK').setCursor(Qt.ArrowCursor)
self.__main_window.getActionBar().get_widget('INDICT_INFO').setText(u'无音乐')
self.__main_window.getActionBar().get_widget('LBL_TIME_TOTAL').setText('00:00')
btn_music_stop = self.__main_window.getActionBar().get_widget('BTN_STOP')
if btn_music_stop.isEnabled():
btn_music_stop.setEnabled(False)
self.__lyric_panel.setNoLyric()
self.__main_window.getActionBar().get_widget('BTN_PLAY_PAUSE').setMStyle(MButton.Type.Play)
self.__main_window.getActionBar().get_widget('BTN_PLAY_PAUSE').setToolTip('播放')
self.__main_window.setSuspendStatus(True)
elif newState == Phonon.PausedState:
self.__main_window.getActionBar().get_widget('BTN_PLAY_PAUSE').setMStyle(MButton.Type.Play)
self.__main_window.getActionBar().get_widget('BTN_PLAY_PAUSE').setToolTip('播放')
self.__main_window.setSuspendStatus(True)
if newState != Phonon.StoppedState and newState != Phonon.PausedState:
return
def __set_lyric(self, music_path):
lrc_path = str(music_path.left(music_path.lastIndexOf('.'))) + u'.lrc'
if os.path.exists(lrc_path):
self.__lyric_panel.setLyricFile(lrc_path)
else:
self.__lyric_panel.setNoLyric()
def __get_music_display_info(self):
metadata = self.__media_object.metaData()
str_title = metadata.get(QString('TITLE'), [''])[0]
if str_title != '':
str_indicator = str(str_title)
else:
str_indicator = str(self.__media_object.currentSource().fileName())
str_artist = metadata.get(QString('ARTIST'), [''])[0]
if str_artist != '':
str_indicator += ' - '
str_indicator += str(str_artist)
str_description = metadata.get(QString('DESCRIPTION'), [''])[0]
if str_description != '':
str_indicator += ' '
str_indicator += str(str_description)
return str_indicator
def metaStateChanged(self, newState, oldState):
if newState == Phonon.ErrorState:
QMessageBox.warning(self.__main_window, "Error opening files",
self.__meta_information_resolver.errorString())
while self.__list_music and self.__list_music.pop() != self.__meta_information_resolver.currentSource():
pass
return
if newState != Phonon.StoppedState and newState != Phonon.PausedState:
return
if self.__meta_information_resolver.currentSource().type() == Phonon.MediaSource.Invalid:
return
metaData = self.__meta_information_resolver.metaData()
title = metaData.get(QString('TITLE'), [''])[0]
if not title:
title = self.__meta_information_resolver.currentSource().fileName()
artist = metaData.get(QString('ARTIST'), [''])[0]
if artist:
title = title + ' - ' + artist
titleItem = QTableWidgetItem(title)
titleItem.setFlags(titleItem.flags() ^ Qt.ItemIsEditable)
titleItem.setTextAlignment(Qt.AlignLeft | Qt.AlignVCenter)
long_duration = self.__meta_information_resolver.totalTime()
total_time_item = QTableWidgetItem(
QTime(0, (long_duration / 60000) % 60, (long_duration / 1000) % 60).toString('mm:ss'))
total_time_item.setFlags(total_time_item.flags() ^ Qt.ItemIsEditable)
total_time_item.setTextAlignment(Qt.AlignHCenter | Qt.AlignVCenter)
currentRow = self.__music_table.rowCount()
self.__music_table.insertRow(currentRow)
self.__music_table.setItem(currentRow, 0, titleItem)
self.__music_table.setItem(currentRow, 1, total_time_item)
if not self.__music_table.selectedItems():
self.__music_table.selectRow(0)
self.__media_object.setCurrentSource(self.__meta_information_resolver.currentSource())
index = self.__list_music.index(self.__meta_information_resolver.currentSource()) + 1
if len(self.__list_music) > index:
self.__meta_information_resolver.setCurrentSource(self.__list_music[index])
def currentSourceChanged(self, source):
self.__music_table.selectRow(self.__list_music.index(source))
def aboutToFinish(self):
index_next = self.__list_music.index(self.__media_object.currentSource()) + 1
if index_next < len(self.__list_music):
self.__media_object.enqueue(self.__list_music[index_next])
else:
self.__media_object.enqueue(self.__list_music[0])
def addMusic(self):
if len(self.__list_music) < 1:
is_empty = True
else:
is_empty = False
sources = QFileDialog.getOpenFileNames(self.__main_window, "Select Music Files",
QDesktopServices.storageLocation(QDesktopServices.MusicLocation))
if not sources:
return
index = len(self.__list_music)
for music_file in sources:
media_source = Phonon.MediaSource(music_file)
if not self.__is_existing(media_source):
self.__list_music.append(media_source)
if is_empty:
self.__media_object.setCurrentSource(self.__list_music[len(self.__list_music) - 1])
if index == len(self.__list_music):
return
if self.__list_music:
self.__meta_information_resolver.setCurrentSource(self.__list_music[index])
def __is_existing(self, media_source):
for ms in self.__list_music:
if media_source.fileName() == ms.fileName():
return True
return False
def clearQueue(self):
self.__media_object.clearQueue()
def setCurrentSourceByIndex(self, int_index):
self.__media_object.setCurrentSource(self.__list_music[int_index])
def getMediaObjectState(self):
return self.__media_object.state()
| unlicense | -5,796,457,515,968,300,000 | 48.607306 | 116 | 0.606334 | false | 3.805975 | false | false | false |
TamiaLab/carnetdumaker | apps/registration/managers.py | 1 | 5803 | """
Objects managers for the registration app.
"""
import re
import uuid
from django.db import models
from django.db.models import Q
from django.contrib.auth import get_user_model
from .signals import user_registered
class UserRegistrationManager(models.Manager):
"""
``UserRegistrationProfile`` objects manager.
"""
@staticmethod
def _generate_new_activation_key():
"""
Generate a new (random) activation key of 32 alphanumeric characters.
"""
return uuid.uuid4().hex
def create_inactive_user(self, username, email, password):
"""
Create a new inactive user using the given username, email and password.
Also create an ``UserRegistrationProfile`` for the newly created user.
Once the ``User`` and ``UserRegistrationProfile`` are created, send the
``user_registered`` signal for other apps to do their jobs.
Return the created ``UserRegistrationProfile`` for any external purpose.
:param username: The user's username.
:param email: The user's email address.
:param password: The user's password (plain text).
"""
new_user = get_user_model().objects.create_user(username, email, password)
new_user.is_active = False
new_user.save(update_fields=('is_active', ))
registration_profile = self._create_profile(new_user)
user_registered.send(sender=UserRegistrationManager, user=new_user)
return registration_profile
def _create_profile(self, user):
"""
Create a new ``UserRegistrationProfile`` for the given user.
"""
activation_key = self._generate_new_activation_key()
return self.create(user=user, activation_key=activation_key)
def delete_expired_users(self, queryset=None):
"""
Remove expired instances of ``UserRegistrationProfile`` and their
associated ``User``s.
Accounts to be deleted are identified by searching for
instances of ``UserRegistrationProfile`` with NOT USED expired activation
keys, and then checking to see if their associated ``User``
instances have the field ``is_active`` set to ``False``; any
``User`` who is both inactive and has a not used expired activation
key will be deleted. If the key has been used, the ``User`` will not
be deleted. This allow administrators to disable accounts temporally.
It is recommended that this method be executed regularly as
part of your routine site maintenance; this application
provides a custom management command which will call this
method, accessible as ``manage.py cleanupregistration``.
Regularly clearing out accounts which have never been
activated serves two useful purposes:
1. It alleviates the occasional need to reset a
``UserRegistrationProfile`` and/or re-send an activation email
when a user does not receive or does not act upon the
initial activation email; since the account will be
deleted, the user will be able to simply re-register and
receive a new activation key.
2. It prevents the possibility of a malicious user registering
one or more accounts and never activating them (thus
denying the use of those usernames to anyone else); since
those accounts will be deleted, the usernames will become
available for use again.
:param queryset: If the ``queryset`` parameter is not specified the cleanup process will run on
all the ``UserRegistrationProfile`` entries currently in database.
"""
if not queryset:
queryset = self.all()
# Delete all used activation key (optimal way)
queryset.filter(activation_key_used=True).delete()
# Delete all expired (but not used) activation key
# The filter(activation_key_used=False) avoid running race
for profile in queryset.filter(activation_key_used=False):
if profile.activation_key_expired():
try:
user = profile.user
if not user.is_active:
user.delete()
except get_user_model().DoesNotExist:
pass
profile.delete()
class BannedUsernameManager(models.Manager):
"""
``BannedUsername`` objects manager.
"""
def is_username_banned(self, username):
"""
Test if the given username is banned or not.
:param username: The username to be checked.
"""
return self.filter(username__iexact=username).exists()
class BannedEmailManager(models.Manager):
"""
``BannedEmail`` objects manager.
"""
def is_email_address_banned(self, email_address):
"""
Test if the given email address is banned or not.
:param email_address: The email address to be check.
"""
email_username, email_provider = email_address.split('@')
email_provider_no_tld = email_provider.rsplit('.', 1)[0]
banned = self.filter(Q(email__iexact=email_address) |
Q(email__iexact='%s@*' % email_username) |
Q(email__iexact='*@%s' % email_provider) |
Q(email__iexact='*@%s.*' % email_provider_no_tld)).exists()
if not banned:
# Use regex to get ride of Gmail dot trick
email_username_no_dot = email_username.replace('.', '')
username_re = r'\.?'.join(re.escape(email_username_no_dot))
provider_re = re.escape(email_provider)
return self.filter(email__iregex=r'^%s@(\*|%s)$' % (username_re, provider_re)).exists()
return True
| agpl-3.0 | -6,189,914,938,721,366,000 | 41.985185 | 103 | 0.633293 | false | 4.537138 | false | false | false |
matpow2/cuwo | tools/convertqmo.py | 1 | 2922 | # Copyright (c) Mathias Kaerlev 2013-2017.
#
# This file is part of cuwo.
#
# cuwo is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# cuwo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with cuwo. If not, see <http://www.gnu.org/licenses/>.
import os
import sys
import argparse
cmd_folder = os.path.realpath(os.path.abspath('.'))
if cmd_folder not in sys.path:
sys.path.insert(0, cmd_folder)
from cuwo.bytes import ByteReader, ByteWriter
from cuwo.qmo import QubicleFile, QubicleModel
from cuwo.cub import CubModel
import os
def switch_axes(x, y, z):
return x, z, y
def to_qmo(in_file, out_file):
cub = CubModel(ByteReader(open(in_file, 'rb').read()))
qmo_file = QubicleFile()
qmo_model = QubicleModel()
x_size, y_size, z_size = switch_axes(cub.x_size, cub.y_size, cub.z_size)
qmo_model.x_size = x_size
qmo_model.y_size = y_size
qmo_model.z_size = z_size
qmo_model.x_offset = -x_size / 2
qmo_model.y_offset = 0
qmo_model.z_offset = -z_size / 2
for k, v in cub.blocks.items():
x, y, z = k
x2, y2, z2 = switch_axes(x, y, z)
qmo_model.blocks[x2, y2, z2] = v
qmo_file.models.append(qmo_model)
writer = ByteWriter()
qmo_file.write(writer)
with open(out_file, 'wb') as fp:
fp.write(writer.get())
def to_cub(in_file, out_file):
qmo_file = QubicleFile(ByteReader(open(in_file, 'rb').read()))
qmo_model = qmo_file.models[0]
cub = CubModel()
x_size, y_size, z_size = switch_axes(qmo_model.x_size,
qmo_model.y_size,
qmo_model.z_size)
cub.x_size = x_size
cub.y_size = y_size
cub.z_size = z_size
for k, v in qmo_model.blocks.items():
x, y, z = k
x2, y2, z2 = switch_axes(x, y, z)
cub.blocks[x2, y2, z2] = v
writer = ByteWriter()
cub.write(writer)
with open(out_file, 'wb') as fp:
fp.write(writer.get())
def main():
parser = argparse.ArgumentParser(
description='Convert between cub and qmo files')
parser.add_argument('files', metavar='FILE', nargs='+',
help='path to file to convert')
for path in parser.parse_args().files:
print("Converting %r" % path)
filename, ext = os.path.splitext(path)
if ext == '.cub':
to_qmo(path, filename + '.qmo')
else:
to_cub(path, filename + '.cub')
if __name__ == '__main__':
main()
| gpl-3.0 | 5,897,666,201,858,834,000 | 30.76087 | 76 | 0.615674 | false | 3.066107 | false | false | false |
ucsd-ccbb/Oncolist | src/restLayer/app/MyGeneInfo.py | 1 | 3530 | import sys
import pymongo
import requests
import urllib2
from app.util import set_status, create_edges_index
from app.status import Status
from bson.json_util import dumps
__author__ = 'aarongary'
def get_gene_info_by_id(gene_id):
#return ["UNKNOWN"]
alt_term_id = []
if(len(gene_id) > 2):
r_json = {}
try:
url = 'http://mygene.info/v3/query?q=' + gene_id
r = requests.get(url)
r_json = r.json()
if 'hits' in r_json and len(r_json['hits']) > 0:
for alt_term in r_json['hits']:
if(isinstance(alt_term['symbol'], list)):
alt_term_id.append(alt_term['symbol'][0].upper())
else:
alt_term_id.append(alt_term['symbol'].upper())
#gene_symbol = r_json['hits'][0]['symbol'].upper()
return alt_term_id
except Exception as e:
print e.message
return {'hits': [{'symbol': gene_id, 'entrezgene': '', 'name': 'Entrez results: 0'}]}
return ["UNKNOWN"]
else :
return ["UNKNOWN"]
def get_entrezgene_info_by_symbol(gene_id):
if(len(gene_id) > 0):
try:
url = 'http://mygene.info/v3/query?q=' + gene_id
r = requests.get(url)
r_json = r.json()
if 'hits' in r_json and len(r_json['hits']) > 0:
for alt_term in r_json['hits']:
if(isinstance(alt_term['entrezgene'], list)):
return str(alt_term['entrezgene'][0])
else:
return str(alt_term['entrezgene'])
except Exception as e:
print e.message
return {'hits': [{'symbol': gene_id, 'entrezgene': '', 'name': 'Entrez results: 0'}]}
return ["UNKNOWN"]
else :
return ["UNKNOWN"]
def getMyGeneInfoByID(gene_id):
if(len(gene_id) > 0):
try:
mir_resolved_id = get_mir_name_converter(gene_id)
if(mir_resolved_id is not "UNKNOWN"):
url = 'http://mygene.info/v3/query?q=' + mir_resolved_id
r = requests.get(url)
r_json = r.json()
if 'hits' in r_json and len(r_json['hits']) > 0:
for alt_hit in r_json['hits']:
entrezgene_id = alt_hit['entrezgene']
url2 = 'http://mygene.info/v3/gene/' + str(entrezgene_id)
r2 = requests.get(url2)
r2_json = r2.json()
return r2_json
return r
else:
return "UNKNOWN TERM"
entrez_url = "http://mygene.info/v3/query?q=" + str(gene_id)
entrez_content = "";
entrez_data = {
'hits': []
}
for line in urllib2.urlopen(entrez_url):
entrez_content += line.rstrip() + " "
hit = {
'name': entrez_content,
'_score': 0,
'symbol': gene_id,
'source': 'Entrez'
}
entrez_data['hits'].append(hit)
except Exception as e:
print e.message
return {'hits': [{'symbol': gene_id, 'name': 'Entrez results: 0'}]}
return entrez_data
else :
return {'hits': [{'symbol': gene_id, 'name': 'not vailable'}]}
def get_mir_name_converter(mirna_id):
return "UNKNOWN" | mit | 789,984,549,225,411,300 | 29.439655 | 97 | 0.470822 | false | 3.707983 | false | false | false |
eugena/django-rest-auth | rest_auth/views.py | 1 | 7437 | from django.contrib.auth import login, logout, get_user_model
from django.conf import settings
from rest_framework import status
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework.generics import GenericAPIView
from rest_framework.permissions import IsAuthenticated, AllowAny
from rest_framework.authtoken.models import Token
from rest_framework.generics import RetrieveUpdateAPIView
from .app_settings import (
TokenSerializer, UserDetailsSerializer, SimpleLoginSerializer, SimpleTokenLoginSerializer,
LoginSerializer, PasswordResetSerializer, PasswordResetConfirmSerializer,
PasswordChangeSerializer
)
from .utils import get_user_id_by_session_key, flush_session_by_session_key
class BaseSimpleLoginView(GenericAPIView):
permission_classes = (AllowAny,)
def login(self):
self.user = self.serializer.validated_data['user']
if getattr(settings, 'REST_SESSION_LOGIN', True):
login(self.request, self.user)
def get_error_response(self):
return Response(
self.serializer.errors, status=status.HTTP_400_BAD_REQUEST
)
def post(self, request, *args, **kwargs):
self.serializer = self.get_serializer(data=self.request.data)
if not self.serializer.is_valid():
return self.get_error_response()
self.login()
return Response({'session_key': request.session.session_key}, status=status.HTTP_200_OK)
class SimpleLoginView(BaseSimpleLoginView):
"""
Check the credentials and authenticated if the credentials are valid.
Calls Django Auth login method to register User ID
in Django session framework
Accept the following POST parameters: username, password
"""
serializer_class = SimpleLoginSerializer
class SimpleTokenLoginView(BaseSimpleLoginView):
"""
Check the credentials and authenticated if the credentials are valid.
Calls Django Auth login method to register User ID
in Django session framework
Accept the following POST parameters: uid, token
"""
serializer_class = SimpleTokenLoginSerializer
class LoginView(GenericAPIView):
"""
Check the credentials and return the REST Token
if the credentials are valid and authenticated.
Calls Django Auth login method to register User ID
in Django session framework
Accept the following POST parameters: username, password
Return the REST Framework Token Object's key.
"""
permission_classes = (AllowAny,)
serializer_class = LoginSerializer
token_model = Token
response_serializer = TokenSerializer
def login(self):
self.user = self.serializer.validated_data['user']
self.token, created = self.token_model.objects.get_or_create(
user=self.user)
if getattr(settings, 'REST_SESSION_LOGIN', True):
login(self.request, self.user)
def get_response(self):
return Response(
self.response_serializer(self.token).data, status=status.HTTP_200_OK
)
def get_error_response(self):
return Response(
self.serializer.errors, status=status.HTTP_400_BAD_REQUEST
)
def post(self, request, *args, **kwargs):
self.serializer = self.get_serializer(data=self.request.data)
if not self.serializer.is_valid():
return self.get_error_response()
self.login()
return self.get_response()
class LogoutView(APIView):
"""
Calls Django logout method and delete the Token object
assigned to the current User object.
Accepts/Returns nothing.
"""
permission_classes = (AllowAny,)
def post(self, request, **kwargs):
if getattr(settings, 'USING_SESSION_KEY', False):
flush_session_by_session_key(self.kwargs.get('session_key'))
else:
try:
request.user.auth_token.delete()
except:
pass
logout(request)
response = Response(
{"success": "Successfully logged out."},
status=status.HTTP_200_OK)
response.delete_cookie(settings.SESSION_COOKIE_NAME)
return response
class UserDetailsView(RetrieveUpdateAPIView):
"""
Returns User's details in JSON format.
Accepts the following GET parameters: token
Accepts the following POST parameters:
Required: token
Optional: email, first_name, last_name and UserProfile fields
Returns the updated UserProfile and/or User object.
"""
serializer_class = UserDetailsSerializer
permission_classes = (IsAuthenticated,)
def get_object(self):
if getattr(settings, 'USING_SESSION_KEY', False):
try:
user = get_user_model()._default_manager.get(
pk=get_user_id_by_session_key(self.context.get('view').kwargs.get('session_key') or None))
except:
user = None
else:
user = self.request.user
return user
class PasswordResetView(GenericAPIView):
"""
Calls Django Auth PasswordResetForm save method.
Accepts the following POST parameters: email
Returns the success/fail message.
"""
serializer_class = PasswordResetSerializer
permission_classes = (AllowAny,)
def post(self, request, *args, **kwargs):
# Create a serializer with request.data
serializer = self.get_serializer(data=request.data)
if not serializer.is_valid():
return Response(serializer.errors,
status=status.HTTP_400_BAD_REQUEST)
serializer.save()
# Return the success message with OK HTTP status
return Response(
{"success": "Password reset e-mail has been sent."},
status=status.HTTP_200_OK
)
class PasswordResetConfirmView(GenericAPIView):
"""
Password reset e-mail link is confirmed, therefore this resets the user's password.
Accepts the following POST parameters: new_password1, new_password2
Accepts the following Django URL arguments: token, uid
Returns the success/fail message.
"""
serializer_class = PasswordResetConfirmSerializer
permission_classes = (AllowAny,)
def post(self, request):
serializer = self.get_serializer(data=request.data)
if not serializer.is_valid():
return Response(
serializer.errors, status=status.HTTP_400_BAD_REQUEST
)
serializer.save()
return Response({"success": "Password has been reset with the new password."})
class PasswordChangeView(GenericAPIView):
"""
Calls Django Auth SetPasswordForm save method.
Accepts the following POST parameters: new_password1, new_password2
Returns the success/fail message.
"""
serializer_class = PasswordChangeSerializer
def __init__(self):
if not getattr(settings, 'USING_SESSION_KEY', False):
self.permission_classes = (IsAuthenticated,)
super(PasswordChangeView, self).__init__()
def post(self, request, **kwargs):
serializer = self.get_serializer(data=request.data)
if not serializer.is_valid():
return Response(
serializer.errors, status=status.HTTP_400_BAD_REQUEST
)
serializer.save()
return Response({"success": "New password has been saved."})
| mit | 4,463,591,443,124,861,400 | 30.646809 | 110 | 0.669894 | false | 4.493656 | false | false | false |
twinaphex/sdcell | ps3_osk/rect_gen.py | 1 | 1904 | def printrect(x, y, w, h):
print "{{" + str(x) + ", " + str(y) + ", " + str(w) + ", " + str(h) + "}, HID_x, KMOD_NONE, 0},"
printrect(0, 0, 60, 60)
for i in range(0, 4):
printrect(120 + i * 60, 0, 60, 60)
for i in range(0, 4):
printrect(390 + i * 60, 0, 60, 60)
for i in range(0, 4):
printrect(660 + i * 60, 0, 60, 60)
for i in range(0, 3):
printrect(910 + i * 60, 0, 60, 60)
#ROW2
for i in range(0, 13):
printrect(i * 60, 90, 60, 60)
printrect(780, 90, 120, 60)
for i in range(0, 3):
printrect(910 + 60 * i, 90, 60, 60)
for i in range(0, 4):
printrect(1095 + 60 * i, 90, 60, 60)
#ROW3
printrect(0, 150, 90, 60)
for i in range(0, 12):
printrect(90 + i * 60, 150, 60, 60)
printrect(810, 150, 90, 60)
for i in range(0, 3):
printrect(910 + 60 * i, 150, 60, 60)
for i in range(0, 3):
printrect(1095 + 60 * i, 150, 60, 60)
printrect(1095 + 60 * 3, 150, 60, 120)
#ROW4
printrect(0, 210, 105, 60)
for i in range(0, 11):
printrect(105 + i * 60, 210, 60, 60)
printrect(765, 210, 135, 60)
for i in range(0, 3):
printrect(1095 + 60 * i, 210, 60, 60)
#ROW5
printrect(0, 270, 135, 60)
for i in range(0, 10):
printrect(135 + i * 60, 270, 60, 60)
printrect(735, 270, 165, 60)
printrect(910 + 60, 270, 60, 60)
for i in range(0, 3):
printrect(1095 + 60 * i, 270, 60, 60)
printrect(1095 + 60 * 3, 270, 60, 120)
#ROW 6
cursor = 0
printrect(cursor, 330, 90, 60)
cursor = cursor + 90
printrect(cursor, 330, 60, 60)
cursor = cursor + 60
printrect(cursor, 330, 90, 60)
cursor = cursor + 90
printrect(cursor, 330, 360, 60)
cursor = cursor + 360
printrect(cursor, 330, 90, 60)
cursor = cursor + 90
printrect(cursor, 330, 60, 60)
cursor = cursor + 60
printrect(cursor, 330, 60, 60)
cursor = cursor + 60
printrect(cursor, 330, 90, 60)
cursor = cursor + 90
for i in range(0, 3):
printrect(910 + 60 * i, 330, 60, 60)
printrect(1095, 330, 120, 60)
printrect(1095 + 120, 330, 60, 60)
| lgpl-2.1 | 3,855,062,236,392,920,000 | 17.666667 | 97 | 0.60084 | false | 2.245283 | false | false | false |
hiuwo/acq4 | acq4/devices/Camera/taskGUI.py | 1 | 6177 | # -*- coding: utf-8 -*-
from PyQt4 import QtCore, QtGui
from TaskTemplate import *
from acq4.devices.DAQGeneric.taskGUI import DAQGenericTaskGui
from acq4.devices.Device import TaskGui
#from acq4.pyqtgraph.WidgetGroup import WidgetGroup
import numpy as np
import acq4.pyqtgraph as pg
#from acq4.pyqtgraph.graphicsItems import InfiniteLine, VTickGroup
#from PyQt4 import Qwt5 as Qwt
class CameraTaskGui(DAQGenericTaskGui):
def __init__(self, dev, taskRunner):
DAQGenericTaskGui.__init__(self, dev, taskRunner, ownUi=False) ## When initializing superclass, make sure it knows this class is creating the ui.
self.ui = Ui_Form()
self.ui.setupUi(self)
self.stateGroup = pg.WidgetGroup(self) ## create state group before DAQ creates its own interface
self.ui.horizSplitter.setStretchFactor(0, 0)
self.ui.horizSplitter.setStretchFactor(1, 1)
DAQGenericTaskGui.createChannelWidgets(self, self.ui.ctrlSplitter, self.ui.plotSplitter)
self.ui.plotSplitter.setStretchFactor(0, 10)
self.ui.plotSplitter.setStretchFactor(1, 1)
self.ui.plotSplitter.setStretchFactor(2, 1)
## plots should not be storing more than one trace at a time.
for p in self.plots.values():
p.plotItem.ctrl.maxTracesCheck.setChecked(True)
p.plotItem.ctrl.maxTracesSpin.setValue(1)
p.plotItem.ctrl.forgetTracesCheck.setChecked(True)
#self.stateGroup = WidgetGroup([
#(self.ui.recordCheck, 'record'),
#(self.ui.triggerCheck, 'trigger'),
#(self.ui.displayCheck, 'display'),
#(self.ui.recordExposeCheck, 'recordExposeChannel'),
#(self.ui.splitter, 'splitter')
#])
conf = self.dev.camConfig
#if 'exposeChannel' not in conf:
#self.ui.exposureGroupBox.hide()
#if 'triggerInChannel' not in conf:
#self.ui.triggerGroupBox.hide()
#if 'triggerOutChannel' not in conf:
#self.ui.triggerCheck.hide()
#self.exposeCurve = None
tModes = self.dev.listParams('triggerMode')[0]
#tModes.remove('Normal')
#tModes = ['Normal'] + tModes
for m in tModes:
item = self.ui.triggerModeCombo.addItem(m)
self.vLines = []
if 'trigger' in self.plots:
l = pg.InfiniteLine()
self.vLines.append(l)
self.plots['trigger'].addItem(self.vLines[0])
if 'exposure' in self.plots:
l = pg.InfiniteLine()
self.vLines.append(l)
self.plots['exposure'].addItem(self.vLines[1])
self.frameTicks = pg.VTickGroup()
self.frameTicks.setYRange([0.8, 1.0])
self.ui.imageView.sigTimeChanged.connect(self.timeChanged)
self.taskRunner.sigTaskPaused.connect(self.taskPaused)
def timeChanged(self, i, t):
for l in self.vLines:
l.setValue(t)
def saveState(self):
s = self.currentState()
s['daqState'] = DAQGenericTaskGui.saveState(self)
return s
def restoreState(self, state):
self.stateGroup.setState(state)
if 'daqState' in state:
DAQGenericTaskGui.restoreState(self, state['daqState'])
def generateTask(self, params=None):
daqProt = DAQGenericTaskGui.generateTask(self, params)
if params is None:
params = {}
state = self.currentState()
task = {
'record': state['recordCheck'],
#'recordExposeChannel': state['recordExposeCheck'],
'triggerProtocol': state['triggerCheck'],
'params': {
'triggerMode': state['triggerModeCombo']
}
}
task['channels'] = daqProt
if state['releaseBetweenRadio']:
task['pushState'] = None
task['popState'] = None
return task
def taskSequenceStarted(self):
DAQGenericTaskGui.taskSequenceStarted(self)
if self.ui.releaseAfterRadio.isChecked():
self.dev.pushState('cam_proto_state')
def taskFinished(self):
DAQGenericTaskGui.taskFinished(self)
if self.ui.releaseAfterRadio.isChecked():
self.dev.popState('cam_proto_state')
def taskPaused(self): ## If the task is paused, return the camera to its previous state until we start again
if self.ui.releaseAfterRadio.isChecked():
self.dev.popState('cam_proto_state')
self.dev.pushState('cam_proto_state')
def currentState(self):
return self.stateGroup.state()
def handleResult(self, result, params):
#print result
state = self.stateGroup.state()
if state['displayCheck']:
if result is None or len(result.frames()) == 0:
print "No images returned from camera task."
self.ui.imageView.clear()
else:
self.ui.imageView.setImage(result.asMetaArray())
#print " frame times:", list(result['frames'].xvals('Time'))
frameTimes, precise = result.frameTimes()
if precise:
self.frameTicks.setXVals(frameTimes)
DAQGenericTaskGui.handleResult(self, result.daqResult(), params)
#if state['displayExposureCheck'] and 'expose' in result and result['expose'] is not None:
#d = result['expose']
#if self.exposeCurve is None:
#self.exposeCurve = self.ui.exposePlot.plot(d.view(ndarray), x=d.xvals('Time'), pen=QtGui.QPen(QtGui.QColor(200, 200, 200)))
#else:
#self.exposeCurve.setData(y=d.view(ndarray), x=d.xvals('Time'))
#self.ui.exposePlot.replot()
#def recordExposeClicked(self):
#daq = self.dev.config['exposeChannel'][0]
#self.task.getDevice(daq)
def quit(self):
self.ui.imageView.close()
DAQGenericTaskGui.quit(self)
| mit | 601,782,246,092,227,100 | 37.36646 | 154 | 0.593816 | false | 3.899621 | false | false | false |
Scorched-Moon/server | server/miniboa/async.py | 1 | 6429 | """
Handle Asynchronous Telnet Connections.
"""
import socket
import select
import sys
import logging
from .telnet import TelnetClient
from .telnet import ConnectionLost
# Cap sockets to 512 on Windows because winsock can only process 512 at time
# Cap sockets to 1000 on UNIX because you can only have 1024 file descriptors
MAX_CONNECTIONS = 500 if sys.platform == 'win32' else 1000
#-----------------------------------------------------Dummy Connection Handlers
def _on_connect(client):
"""
Placeholder new connection handler.
"""
logging.info("++ Opened connection to {}, sending greeting...".format(client.addrport()))
client.send("Greetings from Miniboa-py3!\n")
def _on_disconnect(client):
"""
Placeholder lost connection handler.
"""
logging.info ("-- Lost connection to %s".format(client.addrport()))
#-----------------------------------------------------------------Telnet Server
class TelnetServer(object):
"""
Poll sockets for new connections and sending/receiving data from clients.
"""
def __init__(self, port=7777, address='', on_connect=_on_connect,
on_disconnect=_on_disconnect, max_connections=MAX_CONNECTIONS,
timeout=0.05):
"""
Create a new Telnet Server.
port -- Port to listen for new connection on. On UNIX-like platforms,
you made need root access to use ports under 1025.
address -- Address of the LOCAL network interface to listen on. You
can usually leave this blank unless you want to restrict traffic
to a specific network device. This will usually NOT be the same
as the Internet address of your server.
on_connect -- function to call with new telnet connections
on_disconnect -- function to call when a client's connection dies,
either through a terminated session or client.active being set
to False.
max_connections -- maximum simultaneous the server will accept at once
timeout -- amount of time that Poll() will wait from user input
before returning. Also frees a slice of CPU time.
"""
self.port = port
self.address = address
self.on_connect = on_connect
self.on_disconnect = on_disconnect
self.max_connections = min(max_connections, MAX_CONNECTIONS)
self.timeout = timeout
server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
try:
server_socket.bind((address, port))
server_socket.listen(5)
except socket.err as err:
logging.critical("Unable to create the server socket: " + str(err))
raise
self.server_socket = server_socket
self.server_fileno = server_socket.fileno()
# Dictionary of active clients,
# key = file descriptor, value = TelnetClient (see miniboa.telnet)
self.clients = {}
def stop(self):
"""
Disconnects the clients and shuts down the server
"""
for clients in self.client_list():
clients.sock.close()
self.server_socket.close()
def client_count(self):
"""
Returns the number of active connections.
"""
return len(self.clients)
def client_list(self):
"""
Returns a list of connected clients.
"""
return self.clients.values()
def poll(self):
"""
Perform a non-blocking scan of recv and send states on the server
and client connection sockets. Process new connection requests,
read incomming data, and send outgoing data. Sends and receives may
be partial.
"""
# Build a list of connections to test for receive data pending
recv_list = [self.server_fileno] # always add the server
del_list = [] # list of clients to delete after polling
for client in self.clients.values():
if client.active:
recv_list.append(client.fileno)
else:
self.on_disconnect(client)
del_list.append(client.fileno)
# Delete inactive connections from the dictionary
for client in del_list:
del self.clients[client]
# Build a list of connections that need to send data
send_list = []
for client in self.clients.values():
if client.send_pending:
send_list.append(client.fileno)
# Get active socket file descriptors from select.select()
try:
rlist, slist, elist = select.select(recv_list, send_list, [],
self.timeout)
except select.error as err:
# If we can't even use select(), game over man, game over
logging.critical("SELECT socket error '{}'".format(str(err)))
raise
# Process socket file descriptors with data to recieve
for sock_fileno in rlist:
# If it's coming from the server's socket then this is a new connection request.
if sock_fileno == self.server_fileno:
try:
sock, addr_tup = self.server_socket.accept()
except socket.error as err:
logging.error("ACCEPT socket error '{}:{}'.".format(err[0], err[1]))
continue
# Check for maximum connections
if self.client_count() >= self.max_connections:
logging.warning("Refusing new connection, maximum already in use.")
sock.close()
continue
# Create the client instance
new_client = TelnetClient(sock, addr_tup)
# Add the connection to our dictionary and call handler
self.clients[new_client.fileno] = new_client
self.on_connect(new_client)
else:
# Call the connection's recieve method
try:
self.clients[sock_fileno].socket_recv()
except ConnectionLost:
self.clients[sock_fileno].deactivate()
# Process sockets with data to send
for sock_fileno in slist:
# Call the connection's send method
self.clients[sock_fileno].socket_send()
| gpl-3.0 | 1,876,447,987,906,235,600 | 34.324176 | 93 | 0.591383 | false | 4.662074 | false | false | false |
Videoclases/videoclases | fabfile.py | 1 | 5559 | from fabric.contrib import django as ddd
import django
ddd.project("project")
django.setup()
import getpass
import os
import time
from django.contrib.auth.models import User
from django.utils import timezone
from fabric.api import env, require, run, sudo, cd, local, get
from project.fabfile_secret import *
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
file_name = 'videoclases/project/settings_secret.py'
template_name = 'videoclases/project/settings_secret.py.template'
def _load_data(reboot=False):
local('python manage.py makemigrations')
local('python manage.py migrate')
if reboot:
fixtures = ['devgroups', 'devusers', 'devschool', 'devcourses', 'devstudents', 'devteachers',
'devhomeworks']
for f in fixtures:
local('python manage.py loaddata ' + f)
# fab devserver -> states that you will connect to devserver server
def devserver():
env.hosts = [env.server_name]
# activates videoclases virtualenv in server
def virtualenv(command, use_sudo=False):
if use_sudo:
func = sudo
else:
func = run
func('source %sbin/activate && %s' % (env.virtualenv_root, command))
# creates file in ~/
# usage: fab devserver test_connection
def test_connection():
require('hosts', provided_by=[devserver])
virtualenv('echo "It works!" > fabric_connection_works.txt')
# util for prompt confirmation
def _confirm():
prompt = "Please confirm you want to sync the branch 'master' in the server 'buho'"
prompt = '%s [%s/%s]: ' % (prompt, 'y', 'n')
while True:
ans = raw_input(prompt)
if not ans:
print 'Please answer Y or N.'
continue
if ans not in ['y', 'Y', 'n', 'N']:
print 'Please answer Y or N.'
continue
if ans == 'y' or ans == 'Y':
return True
if ans == 'n' or ans == 'N':
return False
# updates dev server project from git repository
def update():
require('hosts', provided_by=[devserver])
with cd(env.repo_root):
run('git pull origin master')
# installs requirements in server
def install_requirements():
require('hosts', provided_by=[devserver])
virtualenv('pip install -q -r %(requirements_file)s' % env)
# aux function for calling manage.py functions
def manage_py(command, use_sudo=False):
require('hosts', provided_by=[devserver])
with cd(env.manage_dir):
virtualenv('python manage.py %s' % command, use_sudo)
# syncs db in server
def makemigrations():
require('hosts', provided_by=[devserver])
manage_py('makemigrations')
# south migrate for db
def migrate():
require('hosts', provided_by=[devserver])
manage_py('migrate')
# collects static files
def collectstatic():
require('hosts', provided_by=[devserver])
manage_py('collectstatic --noinput')
# restarts apache in server
def reload():
require('hosts', provided_by=[devserver])
sudo('service apache2 restart')
# deploy on development server
def deploy():
require('hosts', provided_by=[devserver])
if _confirm():
update()
install_requirements()
makemigrations()
migrate()
collectstatic()
reload()
else:
print 'Deploy cancelado'
# sync and migrate local db and start server
def restart(reboot=False):
_load_data(reboot)
local('python manage.py runserver 0.0.0.0:8000')
# reset local db and start server
def reboot():
try:
local('rm db.sqlite3')
except:
pass
restart(True)
def _create_teacher():
print '---------------------------------------'
print 'Now you will be asked for the necessary data to create a Professor.'
from videoclases.models.course import Course
from videoclases.models.teacher import Teacher
from videoclases.models.school import School
username = raw_input('Insert username: ')
password = getpass.getpass('Insert password: ')
password2 = getpass.getpass('Confirm password: ')
while password != password2:
print 'Passwords were not equal.'
password = getpass.getpass('Insert password again: ')
password2 = getpass.getpass('Confirm password: ')
first_name = raw_input('Insert first name: ')
last_name = raw_input('Insert last name: ')
school = raw_input('Insert school name: ')
course = raw_input('Insert course name: ')
user = User.objects.create_user(username=username, password=password)
user.first_name = first_name
user.last_name = last_name
user.save()
School.objects.create(name=school).save()
co = School.objects.get(name=school)
Course.objects.create(name=course, school=co, year=timezone.now().year).save()
cu = Course.objects.get(name=course, school=co, year=timezone.now().year)
Teacher.objects.create(user=user, school=co)
p = Teacher.objects.get(user=user, school=co)
p.courses.add(cu)
p.save()
def install():
local('cp ' + os.path.join(BASE_DIR, template_name) + ' ' + os.path.join(BASE_DIR, file_name))
_load_data()
local('python manage.py collectstatic --noinput -l')
local('python manage.py test')
local('python manage.py loaddata devgroups')
_create_teacher()
local('python manage.py runserver 0.0.0.0:8000')
def install_with_data():
local('cp ' + os.path.join(BASE_DIR, template_name) + ' ' + os.path.join(BASE_DIR, file_name))
_load_data(True)
local('python manage.py collectstatic --noinput -l')
local('python manage.py test')
local('python manage.py runserver 0.0.0.0:8000') | gpl-3.0 | 8,812,348,572,361,951,000 | 30.771429 | 101 | 0.655334 | false | 3.607398 | false | false | false |
damonmcminn/rosalind | algorithmic-heights/ddeg.py | 1 | 1230 | from sys import argv
def vertexCounts(xs):
# @xs list: edge list
# @return dict: map of vertex counts
firstLine = xs.pop(0)
vertices,edges = firstLine
counts = {}
while vertices > 0:
counts[vertices] = 0
vertices -= 1
for x in xs:
v1,v2 = x
counts[v1] += 1
counts[v2] += 1
return counts
def vertexNeighbours(xs):
# @xs list: edge list
# @return dict: map of list of vertex neighbours
vertices = xs.pop(0).pop(0)
neighbours = {}
while vertices > 0:
neighbours[vertices] = []
vertices -= 1
for pair in xs:
v1,v2 = pair
if v2 not in neighbours[v1]:
neighbours[v1].append(v2)
if v1 not in neighbours[v2]:
neighbours[v2].append(v1)
return neighbours
# edge list
data = open(argv[1], 'r').read().splitlines()
edgeList = [map(int, pair.split(' ')) for pair in data]
d1 = list(edgeList)
d2 = list(edgeList)
counts = vertexCounts(d1)
neighbours = vertexNeighbours(d2)
# this assumes ^ neighbours keys are sorted...
for vertex,neighbours in neighbours.iteritems():
total = 0
for neighbour in neighbours:
total += counts[neighbour]
print total,
| gpl-2.0 | -3,870,982,205,921,560,000 | 21.363636 | 55 | 0.600813 | false | 3.245383 | false | false | false |
disulfidebond/ROO | runroo.py | 1 | 12469 | #!/usr/bin/python
import argparse
import sys
from runroo import clusterSSH
from runroo import qsubSSH
def formatCommandClusterSSH(c_dict, l):
if 'n_ct' in c_dict:
t = c_dict['n_ct']
l.append(t)
else:
print('Error, please check the formatting in the Commandline, command \'n_ct\' not found')
sys.exit()
if 'node' in c_dict:
t = c_dict['node']
l.append(t)
else:
print('Error, please check the formatting in the Commandline , command \'node\' not found')
if 'nodeNM' in c_dict:
t = c_dict['nodeNM']
l.append(t)
else:
print('Error, please check the formatting in the Commandline , command \'nodeNM\' not found')
return l
def formatCommandQsubSSH(c_dict, l):
if 'n_ct' in c_dict:
t = c_dict['n_ct']
l.append(t)
else:
print('Error, please check the formatting in the Commandline, command \'n_ct\' not found')
sys.exit()
if 'allocationName' in c_dict:
t = c_dict['allocationName']
l.append(t)
else:
print('Error, please check the formatting in the Commandline , command \'allocationName\' not found')
if 'wallTime' in c_dict:
t = c_dict['wallTime']
l.append(t)
else:
print('Error, please check the formatting in the Commandline , command \'wallTime\' not found')
if 'queueName' in c_dict:
t = c_dict['queueName']
l.append(t)
else:
print('Error, please check the formatting in the Commandline , command \'queueName\' not found')
return l
def formatCommandSingleNodeSSH(c_dict, l):
if 'n_ct' in c_dict:
t = c_dict['n_ct']
l.append(t)
else:
print('Error, please check the formatting in the Commandline, command \'n_ct\' not found')
sys.exit()
return l
def formatCommandClusterLSF(c_dict, l):
if 'n_ct' in c_dict:
t = c_dict['n_ct']
l.append(t)
else:
print('Error, please check the formatting in the Commandline, command \'n_ct\' not found')
sys.exit()
if 'queueName' in c_dict:
t = c_dict['queueName']
l.append(t)
else:
print('Error, please check the formatting in the Commandline, command \'queueName\' not found')
sys.exit()
if 'jobName' in c_dict:
t = c_dict['jobName']
l.append(t)
else:
print('Error, please check the formatting in the Commandline, command \'jobName\' not found')
sys.exit()
if 'projectName' in c_dict:
t = c_dict['projectName']
l.append(t)
else:
print('Error, please check the formatting in the Commandline, command \'projectName\' not found')
sys.exit()
if 'wallTime' in c_dict:
t = c_dict['wallTime'] # format wallTime for LSF as NN not NN:NN:NN
l.append(t)
else:
print('Error, please check the formatting in the Commandline, command \'wallTime\' not found')
sys.exit()
return l
def parseSimpleCommandList(s_list):
listlen = len(s_list)
ct = 0
commandsList = []
while ct < listlen:
s_list_row = s_list[ct]
if s_list_row[0] == '#':
continue
else:
commandsList.append(s_list_row)
ct += 1
def parseComplexCommandList(c_list):
listlen = len(c_list)
ct = 0
commandsList = []
while ct < listlen:
c_list_row = c_list[ct]
c_list_row_items = c_list_row.split(',')
if len(c_list_row_items) == 1:
commandsList.append(c_list_row_items)
else:
c_list_row_dict = dict()
def inputStartCommandFile(f):
l = []
with open(f, 'r') as cF:
for i in cF:
i = i.rstrip('\r\n')
l.append(i)
return l
def formatDescription():
print('Options for the command file:')
print('1) No command file: create a text document with the folowing\n###\nNONE ')
print('2) Typical commandFile:\n\n###\ninputFile=\n')
print('3) ###\ncommandFormat= # can be one of: \'clusterSSH, qsubSSH, clusterLSF, single-node-SSH\'\n')
print('### \n# clusterSSH:')
print('node=,n_ct=,nodeNM=\'\'')
print('### \n# qsubSSH:')
print('n_ct=,allocationName=,wallTime=,queueName=)
print('###\n clusterLSF:')
print('n_ct=,queueName=,jobName=,projectName=,wallTime=)
print('###\n single-node-SSH:')
print('n_ct=)
def parseStartCommandFile(l):
lRange = len(l)
l_1 = l[1]
parsedCommandList = []
l1_split = l_1.split('=')
try:
tmpVal = l1_split[1]
except IndexError:
if l1_split[0] == 'NONE':
return (0, [])
else:
print('Error, check formatting in commandsFile')
for i in l:
print(i)
sys.exit()
cvalue = ""
cvalue_list = []
cvalue_returnList = []
rowCt = 0
for i in xrange(0, lRange):
iRow = l[i]
if iRow[0] == '#':
continue
else: # 'clusterSSH, qsubSSH, clusterLSF, single-node-SSH'
if rowCt == 0:
iRow_split = iRow.split('=')
if iRow_split[1] == 'clusterSSH':
cvalue = iRow_split[1]
elif iRow_split[1] == 'qsubSSH':
cvalue = iRow_split[1]
elif iRow_split[1] == 'clusterLSF':
cvalue = iRow_split[1]
elif iRow_split[1] == 'single-node-SSH':
cvalue = iRow_split[1]
else:
print('Error, please check command line of commands File')
sys.exit()
rowCt += 2
elif rowCt == 2:
cvalue_tmp = dict()
iRow_split = iRow_split(',')
cvalue_list.append(cvalue)
for v in iRow_split:
v_tmp = v.split('=')
cvalue_tmp[v_tmp[0]] = v_tmp[1]
if cvalue == 'clusterSSH': # n_ct, node, nodeNM
cvalue_returnList = formatCommandClusterSSH(cvalue_tmp, cvalue_list)
elif cvalue == 'qsubSSH': # n_ct, allocationName, wallTime, queueName
cvalue_returnList = formatCommandQsubSSH(cvalue_tmp, cvalue_list)
elif cvalue == 'clusterLSF': # n_ct, queueName, jobName, projectName, wallTime
cvalue_returnList = formatCommandClusterLSF(cvalue_tmp, cvalue_list)
elif cvalue == 'single-node-SSH': # n_ct
cvalue_returnList = formatCommandSingleNodeSSH(cvalue_tmp, cvalue_list)
else:
print('Error, action command in command file not recognized.')
sys.exit()
rowCt += 2
else:
continue
return (1, cvalue_returnList)
def main():
parser = argparse.ArgumentParser(description='Remote Organizational and Operational Tool: Root')
parser.add_argument('-a', '--action', choices=['check', 'start', 'stop', 'restart'], help='check monitors a run in progress, start begins a new run, stop halts a run, restart restarts a previously stopped run')
parser.add_argument('-i', '--inputFile', help='input file, its use is dependent on the action. Ignored for \'check\' and \'stop\' actions.')
parser.add_argument('-f', '--commandfile', help='file with formatted commands for the desired action. Note that this is REQUIRED, even if commandline arguments will be provided.')
parser.add_argument('-c', '--commandline', help='commandline arguments added directly to the program, not recommended.')
parser.add_argument('-s', '--show', help='show format description for command file')
args = parser.parse_args()
if args.show:
formatDescription()
sys.exit()
if args.action == 'check':
# code stub, implementation incomplete
print(args.action)
sys.exit()
if not args.commandfile:
print('No command file found, hope you know what you\'re doing! Attempting to monitor run with the provided parameters')
else:
print('Checking command file before proceeding.')
cFile = inputStartCommandFile(args.commandfile)
checkRes = parseStartCommandFile(cFile)
if checkRes == 1:
#
else:
# proceed with no commandsFile
elif args.action == 'stop':
# code stub, implementation incomplete
print(args.action)
sys.exit()
if not args.commandfile:
print('No command file found, hope you know what you\'re doing! Attempting to halt run with the provided parameters')
else:
print('Checking command file before proceeding.')
cFile = inputStartCommandFile(args.commandfile)
checkRes = parseStartCommandFile(cFile)
if checkRes[0] == 1:
#
else:
# proceed with no commandsFile
elif args.action == 'restart':
# code stub, implementation incomplete
print(args.action)
sys.exit()
if not args.commandFile:
print('No command file has been found, and a command file is required for the restart action. If you are ABSOLUTELY sure that you do not want to use a command file, create a text file with ####\nNONE as the command file.')
sys.exit()
else:
print('Using command file ')
print(args.commandFile)
cFile = inputStartCommandFile(args.commandfile)
checkRes = parseStartCommandFile(cFile)
if not args.inputFile:
print('No input file found, please check settings.')
sys.exit()
else:
print('Using input file ')
print(args.inputFile)
if checkRes[0] == 1:
#
elif args.commandline:
#
else:
print('Sorry, the command file was not read, and commands were not readable via commandline. Please chack the formatting and retry.\n\nNote that a command file will always be checked first, and to force commandline use you must add the line\n\n ###\nNONE \n\n to a command file')
sys.exit()
elif args.action == 'start':
if not args.commandFile:
print('No command file has been found, and a command file is required for the start action. If you are ABSOLUTELY sure that you do not want to use a command file, create a text file with ####\nNONE as the command file.')
sys.exit()
else:
print('Using command file ')
print(args.commandFile)
print('for start action')
cFile = inputStartCommandFile(args.commandfile)
checkRes = parseStartCommandFile(cFile)
if not args.inputFile:
print('No input file found, please check settings.')
sys.exit()
else:
print('Using input file ')
print(args.inputFile)
print('for start action')
if checkRes[0] == 1:
args4Commands = checkRes[0]
if args4Commands[0] == 'clusterSSH':
clusterSSH(args.inputFile, args4Commands[1], args4Commands[2],args4Commands[3])
elif args4Commands[0] == 'qsubSSH':
qsubSSH(args.inputFile, args4Commands[1], args4Commands[2], args4Commands[3], args4Commands[4])
elif args4Commands[0] == 'clusterLSF':
print('Not implemented yet')
sys.exit()
clusterLSF(args.inputFile, args4Commands[1], args4Commands[2], args4Commands[3], args4Commands[4], args4Commands[5])
elif args4Commands[0] == 'single-node-SSH':
print('Not implemented yet')
sys.exit()
singleNodeSSH(args.inputFile, args4Commands[1])
elif args.commandline:
# parse arguments, determine action type, and start action
else:
print('Sorry, the command file was not read, and commands were not readable via commandline. Please chack the formatting and retry.\n\nNote that a command file will always be checked first, and to force commandline use you must add the line\n\n ###\nNONE \n\n to a command file')
sys.exit()
else:
print('error, unrecognized input!')
sys.exit()
if __name__ == "__main__":
main()
| mit | 4,549,582,067,404,025,000 | 38.334385 | 292 | 0.574304 | false | 3.861567 | false | false | false |
fisele/slimta-abusix | test/test_slimta_smtp_client.py | 1 | 12950 | import unittest2 as unittest
from mox3.mox import MoxTestBase, IsA
from gevent.socket import socket
from slimta.smtp.client import Client, LmtpClient
from slimta.smtp.reply import Reply
class TestSmtpClient(unittest.TestCase, MoxTestBase):
def setUp(self):
super(TestSmtpClient, self).setUp()
self.sock = self.mox.CreateMock(socket)
self.sock.fileno = lambda: -1
self.sock.getpeername = lambda: ('test', 0)
self.tls_args = {'test': 'test'}
def test_get_reply(self):
self.sock.recv(IsA(int)).AndReturn(b'421 Test\r\n')
self.mox.ReplayAll()
client = Client(self.sock)
reply = client.get_reply(b'[TEST]')
self.assertEqual('421', reply.code)
self.assertEqual('4.0.0 Test', reply.message)
self.assertEqual(b'[TEST]', reply.command)
def test_get_banner(self):
self.sock.recv(IsA(int)).AndReturn(b'220 Go\r\n')
self.mox.ReplayAll()
client = Client(self.sock)
reply = client.get_banner()
self.assertEqual('220', reply.code)
self.assertEqual('Go', reply.message)
self.assertEqual(b'[BANNER]', reply.command)
def test_custom_command(self):
self.sock.sendall(b'cmd arg\r\n')
self.sock.recv(IsA(int)).AndReturn(b'250 Ok\r\n')
self.mox.ReplayAll()
client = Client(self.sock)
reply = client.custom_command(b'cmd', b'arg')
self.assertEqual('250', reply.code)
self.assertEqual('2.0.0 Ok', reply.message)
self.assertEqual(b'CMD', reply.command)
def test_ehlo(self):
self.sock.sendall(b'EHLO there\r\n')
self.sock.recv(IsA(int)).AndReturn(b'250-Hello there\r\n250-TEST arg\r\n')
self.sock.recv(IsA(int)).AndReturn(b'250 EXTEN\r\n')
self.mox.ReplayAll()
client = Client(self.sock)
reply = client.ehlo('there')
self.assertEqual('250', reply.code)
self.assertEqual('Hello there', reply.message)
self.assertEqual(b'EHLO', reply.command)
self.assertTrue('TEST' in client.extensions)
self.assertTrue('EXTEN' in client.extensions)
self.assertEqual('arg', client.extensions.getparam('TEST'))
def test_helo(self):
self.sock.sendall(b'HELO there\r\n')
self.sock.recv(IsA(int)).AndReturn(b'250 Hello\r\n')
self.mox.ReplayAll()
client = Client(self.sock)
reply = client.helo('there')
self.assertEqual('250', reply.code)
self.assertEqual('Hello', reply.message)
self.assertEqual(b'HELO', reply.command)
def test_starttls(self):
sock = self.mox.CreateMockAnything()
sock.fileno = lambda: -1
sock.getpeername = lambda: ('test', 0)
sock.sendall(b'STARTTLS\r\n')
sock.recv(IsA(int)).AndReturn(b'220 Go ahead\r\n')
sock.tls_wrapper(sock, self.tls_args).AndReturn(sock)
self.mox.ReplayAll()
client = Client(sock, tls_wrapper=sock.tls_wrapper)
reply = client.starttls(self.tls_args)
self.assertEqual('220', reply.code)
self.assertEqual('2.0.0 Go ahead', reply.message)
self.assertEqual(b'STARTTLS', reply.command)
def test_starttls_noencrypt(self):
self.sock.sendall(b'STARTTLS\r\n')
self.sock.recv(IsA(int)).AndReturn(b'420 Nope\r\n')
self.mox.ReplayAll()
client = Client(self.sock)
reply = client.starttls({})
self.assertEqual('420', reply.code)
self.assertEqual('4.0.0 Nope', reply.message)
self.assertEqual(b'STARTTLS', reply.command)
def test_auth(self):
self.sock.sendall(b'AUTH PLAIN AHRlc3RAZXhhbXBsZS5jb20AYXNkZg==\r\n')
self.sock.recv(IsA(int)).AndReturn(b'235 Ok\r\n')
self.mox.ReplayAll()
client = Client(self.sock)
client.extensions.add('AUTH', b'PLAIN')
reply = client.auth('[email protected]', 'asdf')
self.assertEqual('235', reply.code)
self.assertEqual('2.0.0 Ok', reply.message)
self.assertEqual(b'AUTH', reply.command)
def test_auth_force_mechanism(self):
self.sock.sendall(b'AUTH PLAIN AHRlc3RAZXhhbXBsZS5jb20AYXNkZg==\r\n')
self.sock.recv(IsA(int)).AndReturn(b'535 Nope!\r\n')
self.mox.ReplayAll()
client = Client(self.sock)
reply = client.auth('[email protected]', 'asdf', mechanism=b'PLAIN')
self.assertEqual('535', reply.code)
self.assertEqual('5.0.0 Nope!', reply.message)
self.assertEqual(b'AUTH', reply.command)
def test_mailfrom(self):
self.sock.sendall(b'MAIL FROM:<test>\r\n')
self.sock.recv(IsA(int)).AndReturn(b'250 2.0.0 Ok\r\n')
self.mox.ReplayAll()
client = Client(self.sock)
reply = client.mailfrom('test')
self.assertEqual('250', reply.code)
self.assertEqual('2.0.0 Ok', reply.message)
self.assertEqual(b'MAIL', reply.command)
def test_mailfrom_pipelining(self):
self.sock.sendall(b'MAIL FROM:<test>\r\n')
self.sock.recv(IsA(int)).AndReturn(b'250 2.0.0 Ok\r\n')
self.mox.ReplayAll()
client = Client(self.sock)
client.extensions.add('PIPELINING')
reply = client.mailfrom('test')
self.assertEqual(None, reply.code)
self.assertEqual(None, reply.message)
self.assertEqual(b'MAIL', reply.command)
client._flush_pipeline()
self.assertEqual('250', reply.code)
self.assertEqual('2.0.0 Ok', reply.message)
def test_mailfrom_size(self):
self.sock.sendall(b'MAIL FROM:<test> SIZE=10\r\n')
self.sock.recv(IsA(int)).AndReturn(b'250 2.0.0 Ok\r\n')
self.mox.ReplayAll()
client = Client(self.sock)
client.extensions.add('SIZE', 100)
reply = client.mailfrom('test', 10)
self.assertEqual('250', reply.code)
self.assertEqual('2.0.0 Ok', reply.message)
self.assertEqual(b'MAIL', reply.command)
def test_rcptto(self):
self.sock.sendall(b'RCPT TO:<test>\r\n')
self.sock.recv(IsA(int)).AndReturn(b'250 2.0.0 Ok\r\n')
self.mox.ReplayAll()
client = Client(self.sock)
reply = client.rcptto('test')
self.assertEqual('250', reply.code)
self.assertEqual('2.0.0 Ok', reply.message)
self.assertEqual(b'RCPT', reply.command)
def test_rcptto_pipelining(self):
self.sock.sendall(b'RCPT TO:<test>\r\n')
self.sock.recv(IsA(int)).AndReturn(b'250 2.0.0 Ok\r\n')
self.mox.ReplayAll()
client = Client(self.sock)
client.extensions.add('PIPELINING')
reply = client.rcptto('test')
self.assertEqual(None, reply.code)
self.assertEqual(None, reply.message)
self.assertEqual(b'RCPT', reply.command)
client._flush_pipeline()
self.assertEqual('250', reply.code)
self.assertEqual('2.0.0 Ok', reply.message)
def test_data(self):
self.sock.sendall(b'DATA\r\n')
self.sock.recv(IsA(int)).AndReturn(b'354 Go ahead\r\n')
self.mox.ReplayAll()
client = Client(self.sock)
reply = client.data()
self.assertEqual('354', reply.code)
self.assertEqual('Go ahead', reply.message)
self.assertEqual(b'DATA', reply.command)
def test_send_empty_data(self):
self.sock.sendall(b'.\r\n')
self.sock.recv(IsA(int)).AndReturn(b'250 2.0.0 Done\r\n')
self.mox.ReplayAll()
client = Client(self.sock)
reply = client.send_empty_data()
self.assertEqual('250', reply.code)
self.assertEqual('2.0.0 Done', reply.message)
self.assertEqual(b'[SEND_DATA]', reply.command)
def test_send_data(self):
self.sock.sendall(b'One\r\nTwo\r\n..Three\r\n.\r\n')
self.sock.recv(IsA(int)).AndReturn(b'250 2.0.0 Done\r\n')
self.mox.ReplayAll()
client = Client(self.sock)
reply = client.send_data(b'One\r\nTwo\r\n.Three')
self.assertEqual('250', reply.code)
self.assertEqual('2.0.0 Done', reply.message)
self.assertEqual(b'[SEND_DATA]', reply.command)
def test_rset(self):
self.sock.sendall(b'RSET\r\n')
self.sock.recv(IsA(int)).AndReturn(b'250 Ok\r\n')
self.mox.ReplayAll()
client = Client(self.sock)
reply = client.rset()
self.assertEqual('250', reply.code)
self.assertEqual('2.0.0 Ok', reply.message)
self.assertEqual(b'RSET', reply.command)
def test_quit(self):
self.sock.sendall(b'QUIT\r\n')
self.sock.recv(IsA(int)).AndReturn(b'221 Bye\r\n')
self.mox.ReplayAll()
client = Client(self.sock)
reply = client.quit()
self.assertEqual('221', reply.code)
self.assertEqual('2.0.0 Bye', reply.message)
self.assertEqual(b'QUIT', reply.command)
class TestLmtpClient(unittest.TestCase, MoxTestBase):
def setUp(self):
super(TestLmtpClient, self).setUp()
self.sock = self.mox.CreateMock(socket)
self.sock.fileno = lambda: -1
self.sock.getpeername = lambda: ('test', 0)
self.tls_args = {'test': 'test'}
def test_ehlo_invalid(self):
client = LmtpClient(self.sock)
self.assertRaises(NotImplementedError, client.ehlo, 'there')
def test_helo_invalid(self):
client = LmtpClient(self.sock)
self.assertRaises(NotImplementedError, client.helo, 'there')
def test_lhlo(self):
self.sock.sendall(b'LHLO there\r\n')
self.sock.recv(IsA(int)).AndReturn(b'250-Hello there\r\n250-TEST arg\r\n')
self.sock.recv(IsA(int)).AndReturn(b'250 EXTEN\r\n')
self.mox.ReplayAll()
client = LmtpClient(self.sock)
reply = client.lhlo('there')
self.assertEqual('250', reply.code)
self.assertEqual('Hello there', reply.message)
self.assertEqual(b'LHLO', reply.command)
self.assertTrue('TEST' in client.extensions)
self.assertTrue('EXTEN' in client.extensions)
self.assertEqual('arg', client.extensions.getparam('TEST'))
def test_rcptto(self):
self.sock.sendall(b'RCPT TO:<test>\r\n')
self.sock.recv(IsA(int)).AndReturn(b'250 2.0.0 Ok\r\n')
self.mox.ReplayAll()
client = LmtpClient(self.sock)
reply = client.rcptto('test')
self.assertEqual('250', reply.code)
self.assertEqual('2.0.0 Ok', reply.message)
self.assertEqual(b'RCPT', reply.command)
self.assertEqual([('test', reply)], client.rcpttos)
def test_rset(self):
self.sock.sendall(b'RSET\r\n')
self.sock.recv(IsA(int)).AndReturn(b'250 Ok\r\n')
self.mox.ReplayAll()
client = LmtpClient(self.sock)
client.rcpttos = 'testing'
reply = client.rset()
self.assertEqual('250', reply.code)
self.assertEqual('2.0.0 Ok', reply.message)
self.assertEqual(b'RSET', reply.command)
self.assertEqual([], client.rcpttos)
def test_send_data(self):
self.sock.sendall(b'One\r\nTwo\r\n..Three\r\n.\r\n')
self.sock.recv(IsA(int)).AndReturn(b'250 2.0.0 Ok\r\n'
b'550 5.0.0 Not Ok\r\n')
self.mox.ReplayAll()
client = LmtpClient(self.sock)
client.rcpttos = [('test1', Reply('250')),
('test2', Reply('250')),
('test3', Reply('550'))]
replies = client.send_data(b'One\r\nTwo\r\n.Three')
self.assertEqual(2, len(replies))
self.assertEqual('test1', replies[0][0])
self.assertEqual('250', replies[0][1].code)
self.assertEqual('2.0.0 Ok', replies[0][1].message)
self.assertEqual(b'[SEND_DATA]', replies[0][1].command)
self.assertEqual('test2', replies[1][0])
self.assertEqual('550', replies[1][1].code)
self.assertEqual('5.0.0 Not Ok', replies[1][1].message)
self.assertEqual(b'[SEND_DATA]', replies[1][1].command)
def test_send_empty_data(self):
self.sock.sendall(b'.\r\n')
self.sock.recv(IsA(int)).AndReturn(b'250 2.0.0 Ok\r\n'
b'550 5.0.0 Not Ok\r\n')
self.mox.ReplayAll()
client = LmtpClient(self.sock)
client.rcpttos = [('test1', Reply('250')),
('test2', Reply('250')),
('test3', Reply('550'))]
replies = client.send_empty_data()
self.assertEqual(2, len(replies))
self.assertEqual('test1', replies[0][0])
self.assertEqual('250', replies[0][1].code)
self.assertEqual('2.0.0 Ok', replies[0][1].message)
self.assertEqual(b'[SEND_DATA]', replies[0][1].command)
self.assertEqual('test2', replies[1][0])
self.assertEqual('550', replies[1][1].code)
self.assertEqual('5.0.0 Not Ok', replies[1][1].message)
self.assertEqual(b'[SEND_DATA]', replies[1][1].command)
# vim:et:fdm=marker:sts=4:sw=4:ts=4
| mit | -1,104,638,757,514,313,500 | 39.46875 | 82 | 0.605405 | false | 3.276822 | true | false | false |
matt77hias/FingerprintCompression | src/compression.py | 1 | 12580 | '''
3 Fingerprint compression
3.1 Compression
@author Matthias Moulin & Vincent Peeters
@version 1.0
'''
import cost
import numpy as np
import pywt
import quadtree
import utils
import wsq
###############################################################################
# COMPRESSION FUNCTIONS
###############################################################################
def compress_dwt2(S, fraction, wavelet="db4", mode=pywt.MODES.ppd, level=4, stats=[]):
'''
Computes the 2D discrete wavelet transformation for the given 2D input signal.
Sets all coefficients with an absolute value below the threshold * maximum of the absolute
values of the coefficients to zero.
Returns the inverse 2D discrete wavelet transformation for the modified coefficients
of the 2D discrete wavelet transformation.
@param S: Input signal.
Both single and double precision floating-point data types are supported
and the output type depends on the input type. If the input data is not
in one of these types it will be converted to the default double precision
data format before performing computations.
@param fraction: The fraction.
@param wavelet: Wavelet to use in the transform.
This must be a name of the wavelet from the wavelist() list.
@param mode: Signal extension mode to deal with the border distortion problem.
The default mode is periodization.
@param level: Number of decomposition steps to perform.
@return: The inverse 2D discrete wavelet transformation for the modified coefficients
of the 2D discrete wavelet transformation.
'''
# 2D discrete wavelet transform
A = pywt.wavedec2(S, wavelet=wavelet, mode=mode, level=level)
# Compression
maximum = np.amax(abs(A[0]))
for (CH, CV, CD) in A[1:]:
maximum = max(maximum, np.amax(abs(CH)), np.amax(abs(CV)), np.amax(abs(CD)))
threshold = fraction * maximum
B = [pywt.thresholding.hard(A[0], threshold, 0)]
for (CH, CV, CD) in A[1:]:
CCH = pywt.thresholding.hard(CH, threshold, 0)
CCV = pywt.thresholding.hard(CV, threshold, 0)
CCD = pywt.thresholding.hard(CD, threshold, 0)
B.append((CCH, CCV, CCD))
n = utils.number_of_large_coeffs(utils.concat_coeffs2(B), threshold=threshold)
stats.append(n)
# 2D inverse discrete wavelet transform
return pywt.waverec2(B, wavelet=wavelet, mode=mode)
def compress_wp2(S, fraction, costf=cost.cost_shannon, wavelet="db4", mode=pywt.MODES.ppd, level=4, stats=[]):
'''
Computes the 2D discrete wavelet packet transformation, with the best basis according
to the given cost function, for the given 2D input signal.
Sets all coefficients with an absolute value below the threshold * maximum of the absolute
values of the coefficients to zero.
Returns the inverse 2D discrete wavelet packet transformation for the modified coefficients
of the 2D discrete wavelet packet transformation.
@param S: Input signal.
Both single and double precision floating-point data types are supported
and the output type depends on the input type. If the input data is not
in one of these types it will be converted to the default double precision
data format before performing computations.
@param fraction: The fraction.
@param costf: The (single parameter) cost function that must be used while
searching for the best basis.
@param wavelet: Wavelet to use in the transform.
This must be a name of the wavelet from the wavelist() list.
@param mode: Signal extension mode to deal with the border distortion problem.
The default mode is periodization.
@param level: Number of decomposition steps to perform.
@return: The inverse 2D discrete wavelet packet transformation for the modified coefficients
of the 2D discrete wavelet packet transformation.
'''
# 2D discrete wavelet packet transform
Nodes = quadtree.wp2(S, costf, wavelet=wavelet, mode=mode, level=level)
# Compression
maximum = -1
for Node in Nodes:
maximum = max(maximum, np.amax(abs(Node.C)))
threshold = fraction * maximum
for Node in Nodes:
Node.C = pywt.thresholding.hard(Node.C, threshold, 0)
n = 0
for Node in Nodes:
n = n + utils.number_of_large_coeffs(Node.C, threshold=threshold)
stats.append(n)
# 2D inverse discrete wavelet packet transform
return quadtree.iwp2(Nodes, wavelet=wavelet, mode=mode)
def compress_sd(S, fraction, wavelet="db4", mode=pywt.MODES.ppd, stats=[]):
'''
Computes the subband decomposition for fingerprints for the given 2D input signal.
Sets all coefficients with an absolute value below the threshold * maximum of the absolute
values of the coefficients to zero.
Returns the inverse subband decomposition for fingerprints for the modified coefficients
of the subband decomposition for fingerprints.
@param S: Input signal.
Both single and double precision floating-point data types are supported
and the output type depends on the input type. If the input data is not
in one of these types it will be converted to the default double precision
data format before performing computations.
@param fraction: The fraction.
@param wavelet: Wavelet to use in the transform.
This must be a name of the wavelet from the wavelist() list.
@param mode: Signal extension mode to deal with the border distortion problem.
The default mode is periodization.
@return: The inverse subband decomposition for fingerprints for the modified coefficients
of the subband decomposition for fingerprints.
'''
# 2D discrete wavelet packet transform
Nodes = wsq.sd(S, wavelet=wavelet, mode=mode)
# Compression
maximum = -1
for Node in Nodes:
maximum = max(maximum, np.amax(abs(Node.C)))
threshold = fraction * maximum
for Node in Nodes:
Node.C = pywt.thresholding.hard(Node.C, threshold, 0)
n = 0
for Node in Nodes:
n = n + utils.number_of_large_coeffs(Node.C, threshold=threshold)
stats.append(n)
# 2D inverse discrete wavelet packet transform
return wsq.isd(Nodes, wavelet=wavelet, mode=mode)
###############################################################################
# COMPRESSION UTILITIES
###############################################################################
def mse(S1, S2):
'''
Returns the mean squared error of the compressed 2D signal S2
against the original 2D signal S1.
@param S1: The original 2D signal
@param S2: The compressed 2D signal
'''
D = S1-S2
return (float(np.sum(np.multiply(D, D)))) / (D.shape[0]*D.shape[1])
def best_fit(S1, S2):
(m, n) = S1.shape
(p, q) = S2.shape
bi = bj = -1
best = np.inf
for i in range(p - m + 1):
for j in range(q - n + 1):
error = mse(S1, S2[i:i+m,j:j+n])
if error < best:
best = error
bi = i
bj = j
return (S2[bi:bi+m,bj:bj+n], best)
###############################################################################
# TESTS
###############################################################################
import configuration as c
import cv2
import pylab
write_intermediate_results = True
# Note that it would of course be cleaner to change the fraction
# multiple times between the analysis and the synthesis
# but this is just a test method
def compare(fname, fractions, wavelet="db4", mode=pywt.MODES.ppd, level=4):
stats_dwt2 = []
stats_wp2_s = []
stats_wp2_t = []
S = 255 - cv2.imread(fname, 0)
E1 = np.zeros(fractions.shape)
E2 = np.zeros(fractions.shape)
E3 = np.zeros(fractions.shape)
i = 0
for f in fractions:
R1 = compress_dwt2(S, f, wavelet=wavelet, mode=mode, level=level, stats=stats_dwt2)[level:-level,level:-level]
R2 = compress_wp2(S, f, costf=cost.cost_shannon, wavelet=wavelet, mode=mode, level=level, stats=stats_wp2_s)[level:-level,level:-level]
R3 = compress_wp2(S, f, costf=cost.cost_threshold(0.01), wavelet=wavelet, mode=mode, level=level, stats=stats_wp2_t)[level:-level,level:-level]
R = S[level:-level,level:-level]
(R1, e1) = best_fit(R, R1)
(R2, e2) = best_fit(R, R2)
(R3, e3) = best_fit(R, R3)
if write_intermediate_results:
S1 = 255 - np.array(R1, dtype=np.uint8)
S2 = 255 - np.array(R2, dtype=np.uint8)
S3 = 255 - np.array(R3, dtype=np.uint8)
cv2.imwrite(str(i) + "_dwt_" + str(f) + " " + str(e1) + ".png", S1)
cv2.imwrite(str(i) + "_wp_s_" + str(f) + " " + str(e2) + ".png", S2)
cv2.imwrite(str(i) + "_wp_t_" + str(f) + " " + str(e3) + ".png", S3)
E1[i] = e1
E2[i] = e2
E3[i] = e3
i = i + 1
pylab.figure()
pylab.loglog(fractions, E1, label='DWT')
pylab.loglog(fractions, E2, label='WP Shannon')
pylab.loglog(fractions, E3, label='WP Threshold')
pylab.xlabel("Fraction")
pylab.ylabel("Mean Square Error")
pylab.legend(loc=2)
pylab.show()
pylab.figure()
pylab.loglog(fractions, stats_dwt2, label='DWT')
pylab.loglog(fractions, stats_wp2_s, label='WP Shannon')
pylab.loglog(fractions, stats_wp2_t, label='WP Threshold')
pylab.xlabel("Fraction")
pylab.ylabel("Number of large coefficients")
pylab.legend(loc=2)
pylab.show()
def compare2(fname, fractions, costf=cost.cost_shannon, wavelet="db4", mode=pywt.MODES.ppd):
stats_sd = []
stats_wp2_s = []
stats_wp2_t = []
level = 5
S = 255 - cv2.imread(fname, 0)
E1 = np.zeros(fractions.shape)
E2 = np.zeros(fractions.shape)
E3 = np.zeros(fractions.shape)
i = 0
for f in fractions:
R1 = compress_sd(S, f, wavelet=wavelet, mode=mode, stats=stats_sd)[level:-level,level:-level]
R2 = compress_wp2(S, f, costf=cost.cost_shannon, wavelet=wavelet, mode=mode, level=level, stats=stats_wp2_s)[level:-level,level:-level]
R3 = compress_wp2(S, f, costf=cost.cost_threshold(0.01), wavelet=wavelet, mode=mode, level=level, stats=stats_wp2_t)[level:-level,level:-level]
R = S[level:-level,level:-level]
(R1, e1) = best_fit(R, R1)
(R2, e2) = best_fit(R, R2)
(R3, e3) = best_fit(R, R3)
if write_intermediate_results:
S1 = 255 - np.array(R1, dtype=np.uint8)
S2 = 255 - np.array(R2, dtype=np.uint8)
S3 = 255 - np.array(R3, dtype=np.uint8)
cv2.imwrite(str(i) + "_sd_" + str(f) + " " + str(e1) + ".png", S1)
cv2.imwrite(str(i) + "_wp_s_" + str(f) + " " + str(e2) + ".png", S2)
cv2.imwrite(str(i) + "_wp_t_" + str(f) + " " + str(e3) + ".png", S3)
E1[i] = e1
E2[i] = e2
E3[i] = e3
i = i + 1
pylab.figure()
pylab.loglog(fractions, E1, label='SD')
pylab.loglog(fractions, E2, label='WP Shannon')
pylab.loglog(fractions, E3, label='WP Threshold')
pylab.xlabel("Fraction")
pylab.ylabel("Mean Square Error")
pylab.legend(loc=2)
pylab.show()
pylab.figure()
pylab.loglog(fractions, stats_sd, label='SD')
pylab.loglog(fractions, stats_wp2_s, label='WP Shannon')
pylab.loglog(fractions, stats_wp2_t, label='WP Threshold')
pylab.xlabel("Fraction")
pylab.ylabel("Number of large coefficients")
pylab.legend(loc=2)
pylab.show()
if __name__ == "__main__":
fname = c.get_dir_fingerprints() + "cmp00001.pgm"
fractions = np.append([0.0], np.power(10, np.arange(-20.0, 0.0, 0.5)))
#fractions = np.append([0.0], np.power(10, np.arange(-5.0, 0.0, 1.0)))
compare(fname, fractions)
fname = c.get_dir_fingerprints() + "cmp00002.pgm"
fractions = np.append([0.0], np.power(10, np.arange(-20.0, 0.0, 0.5)))
#fractions = np.append([0.0], np.power(10, np.arange(-5.0, 0.0, 1.0)))
#compare2(fname, fractions) | gpl-3.0 | -393,442,632,637,934,200 | 41.938567 | 151 | 0.596741 | false | 3.513966 | false | false | false |
disqus/pgshovel | src/main/python/pgshovel/replication/streams/kafka.py | 1 | 4243 | from __future__ import absolute_import
import logging
from itertools import imap
from kafka.consumer.simple import SimpleConsumer
from kafka.client import KafkaClient
from pgshovel.replication.validation import validate_state
from pgshovel.interfaces.streams_pb2 import Message
from pgshovel.streams.utilities import prime_for_batch_start
from pgshovel.utilities.protobuf import BinaryCodec
logger = logging.getLogger(__name__)
class KafkaStream(object):
def __init__(self, cluster, set, hosts, topic, prime_threshold):
self.cluster = cluster
self.set = set
self.hosts = hosts
self.topic = topic
self.codec = BinaryCodec(Message)
self.prime_threshold = prime_threshold
def consume(self, state):
"""
Starts consuming from the configured Kafka topic given a possible
existing ``pgshovel.interfaces.replication_pb2:State``.
If the provided ``state`` does not contain a
``stream_state.consumer_state`` value, the ``KafaStream`` attempts to
start reading from the Kafka topic after first "priming" the stream.
Priming involves consuming messages from the topic looking for a
``BeginOperation``. Any message that is not a ``BeginOperation`` is
dropped, until a ``BeginOperation`` is seen or the ``prime_threshold``
is reached. The latter of which raises a
``pgshovel.streams.utilities:UnableToPrimeError`` error.
In general, it makes sense to set the ``prime_threshold`` to high enough
value that exceeds the max transaction size you expect to see in your
data. Generally speaking a ``prime_threshold`` can effectively be
infinite (and you could construct the stream with ``float('inf')``,
however the lack of a ``BeginOperation`` in the stream would cause the
stream to hang, possibly forever, so the ``prime_threshold`` config
parameter is provided to raise an exception if this unexpected behavior
occurs.
"""
consumer = SimpleConsumer(KafkaClient(self.hosts), None, self.topic)
# You can only update one offset at a time with kafka-python, plus
# dealing with reconstituting global order from a partitioned stream is
# hard we don't really need to deal with it right now.
assert len(consumer.offsets) is 1
decoded = imap(
lambda (offset, msg): (offset, self.codec.decode(msg.value)),
consumer
)
if state.stream_state.HasField('consumer_state'):
# Seeking to a direct offset was not in the PyPI release of
# kafka-python when this was implemented:
# https://github.com/mumrah/kafka-python/pull/412
current = consumer.offsets[0]
offset = state.stream_state.consumer_state.offset + 1
delta = offset - current
logger.debug('Moving to previous replication log offset: %s (current position: %s)...', offset, current)
consumer.seek(delta, 1)
assert consumer.offsets[0] == offset
else:
logger.info('No consumer state provided, will attempt to prime to begin BeginOperation')
# The call to ``prime_for_batch_start`` "primes" the stream by
# dropping messages until it sees a message that is an intance of
# one of the types in
# ``pgshovel.replication.validation.TRANSACTION_START_EVENT_TYPES``
decoded = prime_for_batch_start(
max_messages=self.prime_threshold,
stream=decoded
)
for offset, message in decoded:
state = validate_state(state, offset, message)
# XXX: This is necessary because of a bug in protocol buffer oneof.
state = type(state).FromString(state.SerializeToString())
yield state, offset, message
@classmethod
def configure(cls, configuration, cluster, set):
topic = '{cluster}.{set}.mutations'.format(cluster=cluster.name, set=set)
return cls(
cluster,
set,
configuration['hosts'],
topic,
configuration.get('prime_threshold', 1000)
)
| apache-2.0 | 2,423,902,336,117,804,500 | 41.858586 | 116 | 0.649305 | false | 4.480465 | true | false | false |
Nyancoins/NyanFaucet | nyanfaucet/nyanfaucet/nyandrill.py | 1 | 1660 | import mandrill
from django.conf import settings
from django.core.mail.backends.base import BaseEmailBackend
"""
import logging
import django.utils.log
class MandrillLogger(django.utils.log.AdminEmailHandler):
def __init__(self, *args, **kwargs):
super(MandrillLogger, self).__init__()
self.client = mandrill.Mandrill(settings.MANDRILL_SECRET)
def send_mail(self, subject, message, *args, **kwargs):
admins = []
for name, email in settings.ADMINS:
admins.append({
'name': name,
'email': email,
})
msg = {
'to': admins,
'subject': subject,
'text': message,
}
print "sending mail", msg
self.client.messages.send(msg)
"""
class MandrillBackend(BaseEmailBackend):
def __init__(self, fail_silently = False, **kwargs):
super(MandrillBackend, self).__init__(fail_silently, **kwargs)
self.client = mandrill.Mandrill(settings.MANDRILL_SECRET)
def send_messages(self, email_messages):
if not email_messages:
return
for msg in email_messages:
"""to = []
for r in msg.recipients():
to.append({
'email': r,
})
mm = {
'to': to,
'subject': msg.subject,
'from_email': msg.from_email,
'text': msg.message().as_bytes(),
}
self.client.messages.send(mm, async=True)"""
self.client.messages.send_raw(raw_message=msg.message().as_bytes(), async=True)
| mit | 2,990,257,469,511,667,000 | 28.122807 | 91 | 0.536747 | false | 3.915094 | false | false | false |
anchore/anchore-engine | tests/functional/clients/standalone/test_file_list.py | 1 | 11561 | import pytest
# from result[0]['image']['imagedata']['analysis_report']['file_list']['files.all']['base']
# generated with:
# files_all_subset = [random.choice(list(files_all.items())) for i in range(20)]
files_all_subset = [
(
"/usr/lib/python3.6/site-packages/pip/_vendor/urllib3/__pycache__/connection.cpython-36.pyc",
"0o644",
),
(
"/usr/lib64/python3.6/lib2to3/fixes/__pycache__/fix_intern.cpython-36.opt-1.pyc",
"0o644",
),
("/usr/lib/dracut/modules.d/80lvmmerge/README.md", "0o644"),
("/usr/lib64/libip6tc.so.0.1.0", "0o755"),
(
"/usr/lib/python3.6/site-packages/setuptools/_vendor/__pycache__/six.cpython-36.opt-1.pyc",
"0o644",
),
("/usr/lib/.build-id/8e/9191dffa9f716362829472319d7834fadadc5a", "0o777"),
(
"/usr/lib/python3.6/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__pycache__",
"0o755",
),
("/usr/share/licenses/libseccomp/LICENSE", "0o644"),
("/usr/lib64/python3.6/__pycache__/copy.cpython-36.opt-1.pyc", "0o644"),
("/usr/lib64/python3.6/encodings/__pycache__/cp865.cpython-36.pyc", "0o644"),
("/usr/share/zoneinfo/iso3166.tab", "0o644"),
("/etc/host.conf", "0o644"),
("/usr/share/zoneinfo/right/America/Catamarca", "0o644"),
("/etc/libaudit.conf", "0o640"),
("/usr/lib/systemd/catalog/systemd.pt_BR.catalog", "0o644"),
("/usr/lib/systemd/system/dracut-shutdown.service", "0o777"),
("/usr/lib/.build-id/66/29051069454db7e5e097271a21c6bcc26d7f8d", "0o777"),
("/usr/share/licenses/libverto", "0o755"),
("/etc/ld.so.conf.d/bind-export-aarch64.conf", "0o644"),
("/usr/lib/systemd/system/dracut-initqueue.service", "0o777"),
]
allinfo_subset = [
(
"/usr/share/zoneinfo/posix/Australia/Currie",
'{"name": "/usr/share/zoneinfo/posix/Australia/Currie", "fullpath": '
'"/usr/share/zoneinfo/posix/Australia/Currie", "size": 2223, "mode": 33188, '
'"uid": 0, "gid": 0, "linkdst": null, "linkdst_fullpath": null, "type": '
'"file", "othernames": {"/usr/share/zoneinfo/posix/Australia/Currie": '
"true}}",
),
(
"/usr/share/systemd/kbd-model-map",
'{"name": "/usr/share/systemd/kbd-model-map", "fullpath": '
'"/usr/share/systemd/kbd-model-map", "size": 3564, "mode": 33188, "uid": 0, '
'"gid": 0, "linkdst": null, "linkdst_fullpath": null, "type": "file", '
'"othernames": {"/usr/share/systemd/kbd-model-map": true}}',
),
(
"/usr/share/zoneinfo/right/Etc/GMT",
'{"name": "/usr/share/zoneinfo/right/Etc/GMT", "fullpath": '
'"/usr/share/zoneinfo/right/Etc/GMT", "size": 667, "mode": 33188, "uid": 0, '
'"gid": 0, "linkdst": null, "linkdst_fullpath": null, "type": "file", '
'"othernames": {"/usr/share/zoneinfo/right/Etc/GMT": true}}',
),
(
"/usr/share/zoneinfo/posix/Etc",
'{"name": "/usr/share/zoneinfo/posix/Etc", "fullpath": '
'"/usr/share/zoneinfo/posix/Etc", "size": 0, "mode": 16877, "uid": 0, "gid": '
'0, "linkdst": null, "linkdst_fullpath": null, "type": "dir", "othernames": '
'{"/usr/share/zoneinfo/posix/Etc": true}}',
),
(
"/usr/bin/gpgv",
'{"name": "/usr/bin/gpgv", "fullpath": "/usr/bin/gpgv", "size": 498056, '
'"mode": 33261, "uid": 0, "gid": 0, "linkdst": null, "linkdst_fullpath": '
'null, "type": "file", "othernames": {"/usr/bin/gpgv": true}}',
),
(
"/usr/lib64/python3.6/encodings/__pycache__/cp737.cpython-36.pyc",
'{"name": "/usr/lib64/python3.6/encodings/__pycache__/cp737.cpython-36.pyc", '
'"fullpath": '
'"/usr/lib64/python3.6/encodings/__pycache__/cp737.cpython-36.pyc", "size": '
'8145, "mode": 33188, "uid": 0, "gid": 0, "linkdst": null, '
'"linkdst_fullpath": null, "type": "file", "othernames": '
'{"/usr/lib64/python3.6/encodings/__pycache__/cp737.cpython-36.pyc": true}}',
),
(
"/usr/lib/python3.6/site-packages/pip/_vendor/html5lib/treebuilders/__pycache__/etree_lxml.cpython-36.pyc",
'{"name": '
'"/usr/lib/python3.6/site-packages/pip/_vendor/html5lib/treebuilders/__pycache__/etree_lxml.cpython-36.pyc", '
'"fullpath": '
'"/usr/lib/python3.6/site-packages/pip/_vendor/html5lib/treebuilders/__pycache__/etree_lxml.cpython-36.pyc", '
'"size": 11727, "mode": 33188, "uid": 0, "gid": 0, "linkdst": null, '
'"linkdst_fullpath": null, "type": "file", "othernames": '
'{"/usr/lib/python3.6/site-packages/pip/_vendor/html5lib/treebuilders/__pycache__/etree_lxml.cpython-36.pyc": '
"true}}",
),
(
"/usr/lib/python3.6/site-packages/dnf/conf/__pycache__/substitutions.cpython-36.pyc",
'{"name": '
'"/usr/lib/python3.6/site-packages/dnf/conf/__pycache__/substitutions.cpython-36.pyc", '
'"fullpath": '
'"/usr/lib/python3.6/site-packages/dnf/conf/__pycache__/substitutions.cpython-36.pyc", '
'"size": 1568, "mode": 33188, "uid": 0, "gid": 0, "linkdst": null, '
'"linkdst_fullpath": null, "type": "file", "othernames": '
'{"/usr/lib/python3.6/site-packages/dnf/conf/__pycache__/substitutions.cpython-36.pyc": '
"true}}",
),
(
"/usr/share/zoneinfo/America/Argentina/San_Juan",
'{"name": "/usr/share/zoneinfo/America/Argentina/San_Juan", "fullpath": '
'"/usr/share/zoneinfo/America/Argentina/San_Juan", "size": 1123, "mode": '
'33188, "uid": 0, "gid": 0, "linkdst": null, "linkdst_fullpath": null, '
'"type": "file", "othernames": '
'{"/usr/share/zoneinfo/America/Argentina/San_Juan": true}}',
),
(
"/usr/share/tabset/vt100",
'{"name": "/usr/share/tabset/vt100", "fullpath": "/usr/share/tabset/vt100", '
'"size": 160, "mode": 33188, "uid": 0, "gid": 0, "linkdst": null, '
'"linkdst_fullpath": null, "type": "file", "othernames": '
'{"/usr/share/tabset/vt100": true}}',
),
(
"/usr/share/zoneinfo/posix/America/Dominica",
'{"name": "/usr/share/zoneinfo/posix/America/Dominica", "fullpath": '
'"/usr/share/zoneinfo/posix/America/Dominica", "size": 170, "mode": 33188, '
'"uid": 0, "gid": 0, "linkdst": null, "linkdst_fullpath": null, "type": '
'"file", "othernames": {"/usr/share/zoneinfo/posix/America/Dominica": '
"true}}",
),
(
"/usr/lib/python3.6/site-packages/pip/_vendor/urllib3/contrib/__pycache__/__init__.cpython-36.pyc",
'{"name": '
'"/usr/lib/python3.6/site-packages/pip/_vendor/urllib3/contrib/__pycache__/__init__.cpython-36.pyc", '
'"fullpath": '
'"/usr/lib/python3.6/site-packages/pip/_vendor/urllib3/contrib/__pycache__/__init__.cpython-36.pyc", '
'"size": 113, "mode": 33188, "uid": 0, "gid": 0, "linkdst": null, '
'"linkdst_fullpath": null, "type": "file", "othernames": '
'{"/usr/lib/python3.6/site-packages/pip/_vendor/urllib3/contrib/__pycache__/__init__.cpython-36.pyc": '
"true}}",
),
(
"/usr/lib/python3.6/site-packages/pip/_vendor/chardet/__pycache__/enums.cpython-36.pyc",
'{"name": '
'"/usr/lib/python3.6/site-packages/pip/_vendor/chardet/__pycache__/enums.cpython-36.pyc", '
'"fullpath": '
'"/usr/lib/python3.6/site-packages/pip/_vendor/chardet/__pycache__/enums.cpython-36.pyc", '
'"size": 2539, "mode": 33188, "uid": 0, "gid": 0, "linkdst": null, '
'"linkdst_fullpath": null, "type": "file", "othernames": '
'{"/usr/lib/python3.6/site-packages/pip/_vendor/chardet/__pycache__/enums.cpython-36.pyc": '
"true}}",
),
(
"/usr/lib/systemd/system/systemd-user-sessions.service",
'{"name": "/usr/lib/systemd/system/systemd-user-sessions.service", '
'"fullpath": "/usr/lib/systemd/system/systemd-user-sessions.service", '
'"size": 636, "mode": 33188, "uid": 0, "gid": 0, "linkdst": null, '
'"linkdst_fullpath": null, "type": "file", "othernames": '
'{"/usr/lib/systemd/system/systemd-user-sessions.service": true}}',
),
(
"/usr/share/pki/ca-trust-source/anchors",
'{"name": "/usr/share/pki/ca-trust-source/anchors", "fullpath": '
'"/usr/share/pki/ca-trust-source/anchors", "size": 0, "mode": 16877, "uid": '
'0, "gid": 0, "linkdst": null, "linkdst_fullpath": null, "type": "dir", '
'"othernames": {"/usr/share/pki/ca-trust-source/anchors": true}}',
),
(
"/usr/lib64/python3.6/collections/__pycache__",
'{"name": "/usr/lib64/python3.6/collections/__pycache__", "fullpath": '
'"/usr/lib64/python3.6/collections/__pycache__", "size": 0, "mode": 16877, '
'"uid": 0, "gid": 0, "linkdst": null, "linkdst_fullpath": null, "type": '
'"dir", "othernames": {"/usr/lib64/python3.6/collections/__pycache__": '
"true}}",
),
(
"/usr/lib/.build-id/00/769246dbd044617cffd76a6aec384c53af30d9",
'{"name": "/usr/lib/.build-id/00/769246dbd044617cffd76a6aec384c53af30d9", '
'"fullpath": "/usr/lib/.build-id/00/769246dbd044617cffd76a6aec384c53af30d9", '
'"size": 40, "mode": 41471, "uid": 0, "gid": 0, "linkdst": '
'"../../../../usr/lib64/gconv/NATS-DANO.so", "linkdst_fullpath": '
'"/usr/lib/.build-id/00/769246dbd044617cffd76a6aec384c53af30d9", "type": '
'"slink", "othernames": '
'{"/usr/lib/.build-id/00/769246dbd044617cffd76a6aec384c53af30d9": true, '
'"../../../../usr/lib64/gconv/NATS-DANO.so": true}}',
),
(
"/usr/share/licenses/zlib",
'{"name": "/usr/share/licenses/zlib", "fullpath": '
'"/usr/share/licenses/zlib", "size": 0, "mode": 16877, "uid": 0, "gid": 0, '
'"linkdst": null, "linkdst_fullpath": null, "type": "dir", "othernames": '
'{"/usr/share/licenses/zlib": true}}',
),
(
"/usr/lib/.build-id/3b/142e9178a43068ee4c86e0000d3751e25688d2",
'{"name": "/usr/lib/.build-id/3b/142e9178a43068ee4c86e0000d3751e25688d2", '
'"fullpath": "/usr/lib/.build-id/3b/142e9178a43068ee4c86e0000d3751e25688d2", '
'"size": 25, "mode": 41471, "uid": 0, "gid": 0, "linkdst": '
'"../../../../usr/bin/ipcrm", "linkdst_fullpath": '
'"/usr/lib/.build-id/3b/142e9178a43068ee4c86e0000d3751e25688d2", "type": '
'"slink", "othernames": '
'{"/usr/lib/.build-id/3b/142e9178a43068ee4c86e0000d3751e25688d2": true, '
'"../../../../usr/bin/ipcrm": true}}',
),
(
"/usr/lib64/python3.6/email/mime/__pycache__",
'{"name": "/usr/lib64/python3.6/email/mime/__pycache__", "fullpath": '
'"/usr/lib64/python3.6/email/mime/__pycache__", "size": 0, "mode": 16877, '
'"uid": 0, "gid": 0, "linkdst": null, "linkdst_fullpath": null, "type": '
'"dir", "othernames": {"/usr/lib64/python3.6/email/mime/__pycache__": '
"true}}",
),
]
@pytest.mark.parametrize("path,metadata", allinfo_subset)
def test_allinfo(path, metadata, analyzed_data):
report = analyzed_data()
data = report["image"]["imagedata"]["analysis_report"]["file_list"][
"files.allinfo"
]["base"]
assert data[path] == metadata
@pytest.mark.parametrize("_file,bit", files_all_subset)
def test_files_all(_file, bit, analyzed_data):
report = analyzed_data()
data = report["image"]["imagedata"]["analysis_report"]["file_list"]["files.all"][
"base"
]
assert data[_file] == bit
| apache-2.0 | -722,782,270,653,081,500 | 48.618026 | 119 | 0.575123 | false | 2.82665 | false | false | false |
dchaplinsky/pep.org.ua | pepdb/core/model/supplementaries.py | 1 | 10343 | # coding: utf-8
from __future__ import unicode_literals
import re
import os.path
from collections import OrderedDict
from glob import glob
from decimal import Decimal
from io import BytesIO
import random
import zlib
from django.db import models
from django.conf import settings
from django.contrib.auth.models import User
from django.utils.translation import ugettext_lazy
from django.core.files.base import File
from django.contrib.postgres.fields import HStoreField
import PyPDF2
from cacheops import cached
from core.model.exc import WatermarkException
WATERMARKS = {}
for f in glob(settings.WATERMARKS_PATH):
name, _ = os.path.splitext(os.path.basename(f))
WATERMARKS[name] = PyPDF2.PdfFileReader(open(f, "rb")).getPage(0)
class Document(models.Model):
DOC_TYPE_CHOICES = OrderedDict(
(
("business_registry", ugettext_lazy("Виписки з реєстру компаній")),
("court_decision", ugettext_lazy("Рішення суду")),
("declarations", ugettext_lazy("Декларації")),
("real_estate_registry", ugettext_lazy("Виписки з реєстру нерухомості")),
("order_to_dismiss", ugettext_lazy("Накази про звільнення")),
("media", ugettext_lazy("Публікація в медіа")),
("decree", ugettext_lazy("Рішення")),
("report", ugettext_lazy("Звіти")),
("ownership_structure", ugettext_lazy("Структури власності")),
("misc", ugettext_lazy("Інші документи")),
("other", ugettext_lazy("Неможливо ідентифікувати")),
)
)
DOC_TYPE_TO_WATERMARK = [
"misc",
"other",
"business_registry",
"court_decision",
"real_estate_registry",
"order_to_dismiss",
"decree",
"report",
"ownership_structure",
]
doc = models.FileField("Файл", upload_to="documents", max_length=1000)
doc_watermarked = models.FileField(
"Файл з водяним знаком", upload_to="documents/_", max_length=1000, blank=True
)
name = models.CharField("Людська назва", max_length=255)
uploaded = models.DateTimeField("Був завантажений", auto_now=True)
source = models.CharField("Першоджерело", blank=True, max_length=255)
uploader = models.ForeignKey(
User, verbose_name="Хто завантажив", related_name="pep_document"
)
hash = models.CharField("Хеш", max_length=40, blank=True)
comments = models.TextField("Коментарі", blank=True)
doc_type = models.CharField(
"Тип документу",
max_length=25,
choices=DOC_TYPE_CHOICES.items(),
default="other",
)
doc_type_set_manually = models.BooleanField(
"Тип документу був встановлений вручну", default=False
)
@staticmethod
def autocomplete_search_fields():
return ("id__iexact", "name__icontains", "source__icontains")
@property
def doc_url(self):
if self.doc_watermarked:
return self.doc_watermarked.url
else:
return self.doc.url
def guess_doc_type(self, force=False):
if not force and self.doc_type_set_manually:
return
outcome = "other"
filename = self.doc.name
PATTERNS = {
r"business?[-_\s]r?egistry": "business_registry",
r"court[-_\s]decision": "court_decision",
r"declaration": "declarations",
r"real[-_\s]property": "real_estate_registry",
r"property[-_\s]registry": "real_estate_registry",
r"land[-_\s]registry": "real_estate_registry",
r"real[-_\s]estate[-_\s]registry": "real_estate_registry",
r"order[-_\s]to[-_\s]dismiss": "order_to_dismiss",
r"звільнення": "order_to_dismiss",
r"decree": "decree",
r"report": "report",
r"raport": "report",
r"ownership[-_\s]structure": "ownership_structure",
}
for r, dtype in PATTERNS.items():
if re.search(r, filename, flags=re.I):
outcome = dtype
break
if outcome == "other":
if "_" in filename:
prefix, _ = filename.split("_", 1)
m = re.search(r"\.(\w+)$", prefix)
if m:
tld = m.group(1).lower()
if tld in ["ua", "com", "org", "info", "eu", "net", "tv"]:
outcome = "media"
self.doc_type = outcome
self.save()
def generate_watermark(self, force=False):
fname, ext = os.path.splitext(self.doc.name)
if self.doc_type not in self.DOC_TYPE_TO_WATERMARK:
return False
if self.doc_watermarked:
if not force:
return False
else:
self.doc_watermarked.delete()
watermark = WATERMARKS["a4_portrait"]
watermark_box = watermark.artBox
watermark_w = float(watermark_box[2] - watermark_box[0])
watermark_h = float(watermark_box[3] - watermark_box[1])
if ext.lower() == ".pdf":
try:
curr_file = PyPDF2.PdfFileReader(self.doc.file, strict=False)
pdf_writer = PyPDF2.PdfFileWriter()
for page_no in range(curr_file.getNumPages()):
curr_page = curr_file.getPage(page_no)
file_box = curr_page.artBox
file_w = float(file_box[2] - file_box[0])
file_h = float(file_box[3] - file_box[1])
scale = min(
file_w / (watermark_w + 0.01), file_h / (watermark_h + 0.01)
)
curr_page.mergeScaledPage(watermark, scale, expand=True)
pdf_writer.addPage(curr_page)
except IOError as e:
raise WatermarkException(
"Cannot find file {}, skipping".format(self.doc.name)
)
except (PyPDF2.utils.PdfReadError, ValueError, OSError) as e:
raise WatermarkException(
"Cannot read file {}, error was {}".format(self.doc.name, e)
)
except zlib.error as e:
raise WatermarkException(
"Cannot decompress page of {}, error was {}".format(
self.doc.name, e
)
)
with BytesIO() as fp:
pdf_writer.write(fp)
random.seed(self.pk)
try:
self.doc_watermarked.save(
"{}_{}_{}.pdf".format(
random.randrange(1000, 10000),
os.path.basename(fname)[:127],
random.randrange(1000, 10000),
),
File(fp),
)
except (OSError) as e:
raise WatermarkException(
"Cannot store watermark for file {}, error was {}".format(
self.doc.name, e
)
)
else:
return False
return True
def __unicode__(self):
return self.name
class Meta:
verbose_name = "Документ"
verbose_name_plural = "Документи"
class FeedbackMessage(models.Model):
person = models.CharField(ugettext_lazy("Про кого"), max_length=150, blank=True)
text = models.TextField(ugettext_lazy("Інформація"), blank=False)
link = models.URLField(ugettext_lazy("Підтвердження"), max_length=512, blank=True)
email = models.EmailField(ugettext_lazy("e-mail"), max_length=512, blank=True)
contacts = models.TextField(
ugettext_lazy("Ваше ім'я та контакти"), max_length=512, blank=True
)
read = models.BooleanField(ugettext_lazy("Прочитано"), default=False)
added = models.DateTimeField("Був надісланий", auto_now=True)
answered_by = models.ForeignKey(
User, on_delete=models.SET_NULL, verbose_name="Відповів", blank=True, null=True
)
answer_added = models.DateTimeField("Була надіслана", blank=True, null=True)
short_answer = models.TextField("Суть відповіді", blank=True, null=True)
read_and_agreed = models.BooleanField(
"Користувач підтвердив що прочитав часто задаваємі питання", default=False
)
class Meta:
verbose_name = "Зворотній зв'язок"
verbose_name_plural = "Зворотній зв'язок"
class ActionLog(models.Model):
user = models.ForeignKey(User, verbose_name="Користувач")
action = models.CharField(verbose_name="Дія", max_length=30)
timestamp = models.DateTimeField(verbose_name="Дата та час", auto_now_add=True)
details = models.TextField(verbose_name="Деталі", blank=True)
class Meta:
verbose_name = "Дія користувача"
verbose_name_plural = "Дії користувачів"
index_together = [["user", "action", "timestamp"]]
class ExchangeRateManager(models.Manager):
@cached(timeout=24 * 60 * 60)
def get_annual_rates(self):
"""
This will return annual rates
"""
rates = {}
for rate in self.filter(is_annual=True):
rates[rate.dt.year] = dict(
(k, Decimal("1.0") / Decimal(v)) for k, v in rate.rates.items()
)
return rates
class ExchangeRate(models.Model):
dt = models.DateField("Дата курсу", db_index=True)
is_annual = models.BooleanField(
"Is annual exchange rate (31.12.x)", default=False, db_index=True
)
rates = HStoreField()
objects = ExchangeRateManager()
class Meta:
ordering = ("-dt",)
verbose_name = "Курс валют"
verbose_name_plural = "Курси валют"
| mit | 6,859,312,694,315,056,000 | 34.053957 | 87 | 0.566752 | false | 3.343053 | false | false | false |
mwhoffman/pygp | pygp/inference/basic.py | 1 | 2189 | """
Simple wrapper class for a Basic GP.
"""
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
import numpy as np
from ..utils.models import printable
from ..likelihoods import Gaussian
from ..kernels import SE, Matern
from .exact import ExactGP
__all__ = ['BasicGP']
@printable
class BasicGP(ExactGP):
"""
Basic GP frontend which assumes an ARD kernel and a Gaussian likelihood
(and hence performs exact inference).
"""
def __init__(self, sn, sf, ell, mu=0, ndim=None, kernel='se'):
likelihood = Gaussian(sn)
kernel = (
SE(sf, ell, ndim) if (kernel == 'se') else
Matern(sf, ell, 1, ndim) if (kernel == 'matern1') else
Matern(sf, ell, 3, ndim) if (kernel == 'matern3') else
Matern(sf, ell, 5, ndim) if (kernel == 'matern5') else None)
if kernel is None:
raise ValueError('Unknown kernel type')
super(BasicGP, self).__init__(likelihood, kernel, mu)
def _params(self):
# replace the parameters for the base GP model with a simplified
# structure and rename the likelihood's sigma parameter to sn (ie its
# the sigma corresponding to the noise).
params = [('sn', 1, True)]
params += self._kernel._params()
params += [('mu', 1, False)]
return params
@classmethod
def from_gp(cls, gp):
if not isinstance(gp._likelihood, Gaussian):
raise ValueError('BasicGP instances must have Gaussian likelihood')
if isinstance(gp._kernel, SE):
kernel = 'se'
elif isinstance(gp._kernel, Matern):
kernel = 'matern%d' % gp._kernel._d
else:
raise ValueError('BasicGP instances must have a SE/Matern kernel')
# get the relevant parameters.
sn = np.sqrt(gp._likelihood.s2)
sf = np.exp(gp._kernel._logsf)
ell = np.exp(gp._kernel._logell)
mu = gp._mean
# create the new gp and maybe add data.
newgp = cls(sn, sf, ell, mu)
if gp.ndata > 0:
X, y = gp.data
newgp.add_data(X, y)
return newgp
| bsd-2-clause | -7,655,531,662,997,177,000 | 30.271429 | 79 | 0.591138 | false | 3.833625 | false | false | false |
mzajac/DBPediaExtender | src/sparql_access.py | 1 | 3962 | #!/usr/bin/env python
import urllib
import json
import sys
from urllib2 import unquote
from collections import defaultdict
from config import data_source, sparql_endpoint
def full_predicate_name(name):
return '%s/property/%s' % (data_source, name.decode('utf-8'))
def full_resource_name(name):
return '%s/resource/%s' % (data_source, name.decode('utf-8'))
def full_type_name(name):
return 'http://dbpedia.org/ontology/%s' % name
def strip_url_prefix(s):
return s[len(data_source) + len('/resource/') : ]
def get_data(query):
params = {
"query": query,
"format": "application/json"
}
request = urllib.urlencode(params)
response = urllib.urlopen(sparql_endpoint, request).read()
return json.loads(response)
def get_results(query):
data = get_data(query)['results']['bindings']
return [
unquote(strip_url_prefix(line['s']['value']).encode('utf-8'))
for line in data
]
def get_pairs(query):
data = get_data(query)['results']['bindings']
return [
(unquote(strip_url_prefix(line['s']['value']).encode('utf-8')), line['o']['value'])
for line in data
]
def select_all(d):
dd = {}
for c in ['s', 'p', 'o']:
if c not in d:
dd[c] = '?%c' % c
else:
dd[c] = '<' + d[c] + '>' if c != 'p' else '<' + full_predicate_name(d[c]) + '>'
query = 'SELECT * FROM <%s> WHERE {%s %s %s} ORDER BY ?s' % (data_source, dd['s'], dd['p'], dd['o'])
data = get_data(query)['results']['bindings']
ret = []
for line in data:
t = []
for c in ['s', 'p', 'o']:
if c in line:
value = line[c]['value']
if value.startswith('%s/resource/' % data_source):
value = strip_url_prefix(value)
value = unquote(value.encode('utf-8'))
t.append(value)
ret.append(tuple(t))
return ret
def select_types(predicate, subject=True):
whose_type = '?s' if subject else '?o'
query = '''SELECT ?s, ?type FROM <%s> WHERE {
?s <%s> ?o.
%s rdf:type ?type.
}''' % (data_source, full_predicate_name(predicate), whose_type)
data = get_data(query)['results']['bindings']
types_dict = defaultdict(list)
for line in data:
types_dict[line['s']['value']].append(line['type']['value'])
return [types for entity, types in types_dict.iteritems()]
def count_entities_of_type(type):
query = '''SELECT count(*) FROM <%s> WHERE {
?s a <%s>.
}''' % (data_source, type)
return int(get_data(query)['results']['bindings'][0]['callret-0']['value'])
def select_entities_of_type(type):
query = '''SELECT * FROM <%s> WHERE {
?s a <%s>.
}''' % (data_source, type)
return get_results(query)
def select_entities_of_type_not_in_relation(type, predicate):
#Queries like the one below don't work on Virtuoso version 6.1 (on 6.4 they do).
#Therefore I use two queries and join their results manually.
'''SELECT * WHERE {
{SELECT ?s WHERE {
?s <http://pl.dbpedia.org/property/populacja> ?o.
}}
MINUS
{{SELECT ?s WHERE {
?s <http://pl.dbpedia.org/property/stolica> ?o.
}}}
}'''
entities_of_type = select_entities_of_type(type)
entities_in_relation = set([s for s, o in select_all({'p': predicate})])
return filter(lambda e: e not in entities_in_relation, entities_of_type)
def select_entities_of_type_in_relation(type, predicate):
query = '''SELECT ?s, ?o FROM <%s> WHERE {
?s a <%s>.
?s <%s> ?o.
}''' % (data_source, full_type_name(type), full_predicate_name(predicate))
return get_pairs(query)
def select_all_entities():
query = '''SELECT DISTINCT ?s FROM <%s> WHERE {
?s ?p ?o.
}''' % data_source
return get_results(query)
if __name__ == '__main__':
pass
| gpl-3.0 | 4,306,332,467,730,327,000 | 31.47541 | 104 | 0.56209 | false | 3.326616 | false | false | false |
stscieisenhamer/glue | glue/utils/decorators.py | 3 | 1050 | from __future__ import absolute_import, division, print_function
import traceback
__all__ = ['die_on_error', 'avoid_circular']
def die_on_error(msg):
"""
Non-GUI version of the decorator in glue.utils.qt.decorators.
In this case we just let the Python exception terminate the execution.
"""
def decorator(func):
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except Exception as e:
print('=' * 72)
print(msg + ' (traceback below)')
print('-' * 72)
traceback.print_exc()
print('=' * 72)
return wrapper
return decorator
def avoid_circular(meth):
def wrapper(self, *args, **kwargs):
if not hasattr(self, '_in_avoid_circular') or not self._in_avoid_circular:
self._in_avoid_circular = True
try:
return meth(self, *args, **kwargs)
finally:
self._in_avoid_circular = False
return wrapper
| bsd-3-clause | -6,189,258,101,026,877,000 | 28.166667 | 82 | 0.546667 | false | 4.251012 | false | false | false |
uber/doubles | doubles/class_double.py | 1 | 1688 | from doubles.exceptions import UnallowedMethodCallError
from doubles.instance_double import InstanceDouble
from doubles.target import Target
from doubles.verification import verify_arguments
def patch_class(input_class):
"""Create a new class based on the input_class.
:param class input_class: The class to patch.
:rtype class:
"""
class Instantiator(object):
@classmethod
def _doubles__new__(self, *args, **kwargs):
pass
new_class = type(input_class.__name__, (input_class, Instantiator), {})
return new_class
class ClassDouble(InstanceDouble):
"""
A pure double representing the target class.
::
User = ClassDouble('myapp.User')
:param str path: The absolute module path to the class.
"""
is_class = True
def __init__(self, path):
super(ClassDouble, self).__init__(path)
self._doubles_target = patch_class(self._doubles_target)
self._target = Target(self._doubles_target)
def __call__(self, *args, **kwargs):
"""Verify arguments and proxy to _doubles__new__
:rtype obj:
:raises VerifyingDoubleArgumentError: If args/kwargs don't match the expected arguments of
__init__ of the underlying class.
"""
verify_arguments(self._target, '_doubles__new__', args, kwargs)
return self._doubles__new__(*args, **kwargs)
def _doubles__new__(self, *args, **kwargs):
"""Raises an UnallowedMethodCallError
NOTE: This method is here only to raise if it has not been stubbed
"""
raise UnallowedMethodCallError('Cannot call __new__ on a ClassDouble without stubbing it')
| mit | 168,691,691,816,461,950 | 29.142857 | 98 | 0.64455 | false | 4.087167 | false | false | false |
pacoqueen/ginn | ginn/formularios/dynconsulta.py | 1 | 79919 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright (C) 2005-2013 Francisco José Rodríguez Bogado #
# <[email protected]> #
# #
# This file is part of GeotexInn. #
# #
# GeotexInn is free software; you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation; either version 2 of the License, or #
# (at your option) any later version. #
# #
# GeotexInn is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with GeotexInn; if not, write to the Free Software #
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA #
###############################################################################
###################################################################
## dynconsulta.py - Consulta dinámica resumen de análisis financiero
###################################################################
## Changelog:
## 8 de febrero de 2012 -> Inicio
###################################################################
from ventana import Ventana
from formularios import utils
import pygtk
pygtk.require('2.0')
import gtk, mx.DateTime
from framework import pclases
from framework.seeker import VentanaGenerica
old_float = float
from ventana_progreso import VentanaProgreso, VentanaActividad
from widgets import replace_widget
import pprint
from collections import defaultdict
try:
from collections import MutableMapping as transformedDictBase
except ImportError:
transformedDictBase = object
from informes.treeview2pdf import treeview2pdf
from informes.treeview2csv import treeview2csv
from formularios.reports import abrir_pdf, abrir_csv
import pango
import datetime
class TransformedDict(transformedDictBase):
"""
A dictionary which applies an arbitrary key-altering function before
accessing the keys"""
# From: http://stackoverflow.com/questions/3387691/
# python-how-to-perfectly-override-a-dict
def __init__(self, *args, **kwargs):
self.store = dict()
try:
self.update(dict(*args, **kwargs)) #use the free update to set keys
except AttributeError:
self.store.update(*args, **kwargs)
def __getitem__(self, key):
return self.store[self.__keytransform__(key)]
def __setitem__(self, key, value):
self.store[self.__keytransform__(key)] = value
def __delitem__(self, key):
del self.store[self.__keytransform__(key)]
def __iter__(self):
return iter(self.store)
def __len__(self):
return len(self.store)
def __keytransform__(self, key):
return key
def __str__(self):
return pprint.pformat(self.store)
def __repr__(self):
return pprint.pformat(self.store)
class MyMonthsDict(TransformedDict):
def __keytransform__(self, key):
try:
assert isinstance(key, (type(mx.DateTime.today()), datetime.date))
key = primero_de_mes(key)
except AssertionError:
anno = mx.DateTime.today().year
mes = mx.DateTime.today().month
if key < mes:
anno += 1
return mx.DateTime.DateFrom(anno, key, 1)
else:
return key
def activate(ch, ch_destino):
ch_destino.set_sensitive(ch.get_active())
class DynConsulta(Ventana, VentanaGenerica):
def __init__(self, objeto = None, usuario = None, mes_actual = None,
num_meses = 12):
"""
Constructor. objeto puede ser un objeto de pclases con el que
comenzar la ventana (en lugar del primero de la tabla, que es
el que se muestra por defecto).
"""
self.mes_actual = (mes_actual != None and mes_actual
or mx.DateTime.localtime().month)
self.update_mes_actual()
self.num_meses = num_meses != None and num_meses or 12
self.update_mes_final()
self.usuario = usuario
self.clase = None
self.precalc = MyMonthsDict()
self.dic_campos = {}
self.old_model = {}
Ventana.__init__(self, 'dynconsulta.glade', objeto, usuario = usuario)
connections = {'b_salir/clicked': self.salir,
'b_nuevo/clicked': self.nuevo,
'b_borrar/clicked': self.borrar,
'b_actualizar/clicked': self.actualizar_ventana,
# 'b_guardar/clicked': self.guardar,
'b_buscar/clicked': self.buscar,
'sp_mes_actual/value-changed': self.update_mes_actual,
'sp_num_meses/value-changed': self.update_mes_final,
'tv_datos/query-tooltip': self.tooltip_query,
'b_exportar/clicked': self.exportar,
'b_imprimir/clicked': self.imprimir
}
self.wids['ch_presupuesto'].set_active(True)
self.wids['ch_datos_reales'].set_active(True)
self.wids['ch_reales_mes0'].set_active(True)
self.wids['ch_datos_pdtes'].set_active(False)
self.wids['ch_datos_reales'].connect("toggled",
lambda ch, chdest: chdest.set_sensitive(ch.get_active()),
self.wids['ch_datos_pdtes'])
self.inicializar_ventana()
self.actualizar_ventana(None)
self.wids['ventana'].resize(800, 600)
self.add_connections(connections)
gtk.main()
def tooltip_query(self, treeview, x, y, mode, tooltip):
path = treeview.get_path_at_pos(x, y)
if path:
treepath, column = path[:2] # @UnusedVariable
model = treeview.get_model()
itr = model.get_iter(treepath)
texto = model[itr][0].replace("&", "&")
tooltip.set_text(texto)
return False
def es_diferente(self):
"""
Devuelve True si algún valor en ventana difiere de
los del objeto.
"""
return False
def update_mes_actual(self, spinbutton_mes = None):
try:
self.mes_actual = spinbutton_mes.get_value_as_int()
except AttributeError: # ¿No se ha creado el glade todavía?
glade_loaded = False
else:
glade_loaded = True
self.fecha_mes_actual = mx.DateTime.DateFrom(
mx.DateTime.localtime().year,
self.mes_actual,
1)
if glade_loaded:
self.inicializar_ventana()
self.actualizar_ventana(None)
return False # GtkEntry - did not receive focus-out-event. If you connect a handler to this signal, it must return FALSE so the entry gets the event as well
def update_mes_final(self, sp = None):
try:
self.num_meses = sp.get_value_as_int()
except AttributeError: # ¿No se ha cargado el glade todavía?
glade_loaded = False
else:
glade_loaded = True
mes_final = ((self.fecha_mes_actual.month-1 + self.num_meses) % 12) + 1
anno_final = self.fecha_mes_actual.year + (self.num_meses / 12)
while mes_final > 12:
anno_final += 1
mes_final -= 12
self.fecha_mes_final = mx.DateTime.DateFrom(anno_final,
mes_final,
1)
if self.fecha_mes_final < self.fecha_mes_actual:
self.fecha_mes_final = mx.DateTime.DateFrom(
self.fecha_mes_final.year + 1,
self.fecha_mes_final.month,
self.fecha_mes_final.day)
if glade_loaded:
self.inicializar_ventana()
self.actualizar_ventana(None)
return False # GtkEntry - did not receive focus-out-event. If you connect a handler to this signal, it must return FALSE so the entry gets the event as well
def inicializar_ventana(self):
"""
Inicializa los controles de la ventana, estableciendo sus
valores por defecto, deshabilitando los innecesarios,
rellenando los combos, formateando el TreeView -si lo hay-...
"""
self.wids['e_costes'].modify_text(gtk.STATE_NORMAL,
self.wids['e_costes'].get_colormap().alloc_color("red"))
self.wids['e_ingresos'].modify_text(gtk.STATE_NORMAL,
self.wids['e_ingresos'].get_colormap().alloc_color("blue"))
self.wids['e_costes'].set_property("xalign", 0.9)
self.wids['e_ingresos'].set_property("xalign", 0.9)
self.wids['e_total'].set_property("xalign", 0.9)
antiguo_tv_datos = self.wids['tv_datos']
nuevo_tv_datos = gtk.TreeView()
nuevo_tv_datos.show()
replace_widget(antiguo_tv_datos,nuevo_tv_datos)
self.wids['tv_datos'] = nuevo_tv_datos
self.wids['sp_mes_actual'].set_value(self.mes_actual)
self.wids['sp_num_meses'].set_value(self.num_meses)
self.activar_widgets(False)
self.wids['b_actualizar'].set_sensitive(True)
self.wids['b_guardar'].set_sensitive(False)
self.wids['b_buscar'].set_sensitive(True)
for b in ("b_nuevo", "b_guardar", "b_borrar"):
self.wids[b].set_property("visible", False)
# Inicialización del resto de widgets:
cols = [('Concepto', 'gobject.TYPE_STRING', False, True, True, None)]
if not self.mes_actual:
mes = mx.DateTime.localtime().month
else:
mes = self.mes_actual
for m in range(self.num_meses):
mescol = ((mes - 1 + m) % 12) + 1
fechacol = mx.DateTime.DateFrom(month = mescol,
year = mx.DateTime.localtime().year + (m > 0 and 1 or 0))
if mescol == 1:
strmes = fechacol.strftime("%B'%y")
else:
strmes = fechacol.strftime("%B")
cols.append((strmes,'gobject.TYPE_STRING',False,True,True,None))
cols += [('PUID', 'gobject.TYPE_STRING', False, False, False, None)]
utils.preparar_treeview(self.wids['tv_datos'], cols)
for n in range(1, self.num_meses + 1):
self.wids['tv_datos'].get_column(n).get_cell_renderers()[0]\
.set_property("xalign", 1)
col = self.wids['tv_datos'].get_column(0)
col.set_expand(True)
self.wids['tv_datos'].connect("row-activated", self.inspect)
self.wids['tv_datos'].set_tooltip_column(0)
self.wids['tv_datos'].connect("query-tooltip", self.tooltip_query)
self.colorear(self.wids['tv_datos'])
def colorear(self, tv):
"""
Pone en rojo los valores que han cambiado respecto a la última vez
que se actualizó el model.
"""
def cell_func(col, cell, model, itr, numcol):
# Extraigo valor numérico
valor = model[itr][numcol]
try:
valor_numerico = utils._float(valor)
except (TypeError, ValueError):
valor_numerico = None
# Color gradual en función de datos reales / datos precalculados
puid = model[itr][-1]
try:
real = self.cave[puid][numcol]
except KeyError: # Es defaultdict, pero por si acaso.
real = 0 # Puro presupuesto. Nada de valor real.
if valor_numerico and real:
try:
proporcion = 1.0 - (abs(real) / abs(valor_numerico))
grade = int(proporcion * 65535)
except ZeroDivisionError: # Por si acaso. XD
grade = 0
bg_color = gtk.gdk.Color(red = int(65535*0.9 + grade*0.1),
green = int(65535*0.7 + grade * 0.3),
blue = int(65535*0.1 + grade*0.9))
else:
bg_color = None # No hay valor o es otra cosa
# Extraigo valor anterior:
if not model.iter_parent(itr): # Es concepto de primer nivel
padre = model[itr][0]
try:
old_valor = self.old_model[padre]['valores'][numcol-1]
except (KeyError, IndexError):
old_valor = None
else:
padre = model[model.iter_parent(itr)][0]
hijo = model[itr][0]
try:
old_valor = self.old_model[padre]['hijos'][hijo][numcol-1]
except (KeyError, IndexError):
old_valor = None
# Color de cambio de valores respecto a "iteración" anterior
if self.old_model and old_valor != valor:
# Valor puede ser None porque es la primera vez que se muestran
# todos los datos y en ese caso no debe colorear.
cell.set_property("foreground", "dark green")
if not model.iter_parent(itr):
cell.set_property("weight", 4000)
cell.set_property("background", "gray")
else:
cell.set_property("weight", 400)
cell.set_property("background", "yellow")
else: # Coloreado de valores +/-
if not model.iter_parent(itr):
if valor_numerico != None:
if valor_numerico == 0:
color_valor = "white"
elif valor_numerico < 0:
color_valor = "red"
else:
color_valor = "blue"
else:
color_valor = "white"
cell.set_property("foreground", color_valor)
cell.set_property("weight", 4000)
cell.set_property("background", "gray")
else:
if valor_numerico != None:
if valor_numerico == 0:
color_valor = None
elif valor_numerico < 0:
color_valor = "red"
else:
color_valor = "blue"
else:
color_valor = "white"
cell.set_property("foreground", color_valor)
cell.set_property("weight", 400)
# Si no ha cambiado y no es una fila "cabecera", entonces
# coloreo el fondo según la gradación de datos reales.
cell.set_property("background", bg_color)
cols = tv.get_columns()
for i in xrange(1, len(cols)):
column = cols[i]
cells = column.get_cell_renderers()
for cell in cells:
column.set_cell_data_func(cell, cell_func, i)
def inspect(self, tv, path, col):
"""
Muestra de dónde vienen los datos precalculados.
"""
indexcol = get_col_pos(tv, col)
if indexcol > 0:
mes = (self.mes_actual + indexcol - 1) % 12 #self.num_meses
model = tv.get_model()
valor = model[path][indexcol]
if utils._float(valor) == 0:
return
concepto = pclases.getObjetoPUID(model[path][-1])
if not isinstance(concepto, pclases.PresupuestoAnual):
# Los resúmenes no los muestro, que vayan al detalle.
concepto_desc = concepto.descripcion
txt_inspect = "%s (%s): %s = \n" % (
concepto_desc, col.get_property("title"), valor)
resultados = []
for o, importe, tm in self.tracking[mes][concepto]:
resultados.append((o.puid, o.get_info(), importe, tm))
to_open = utils.dialogo_resultado(resultados,
titulo = "INSPECCIONAR VALOR «%s»" % valor,
padre = self.wids['ventana'],
cabeceras = ['Cód. interno', 'Descripción',
'Importe', 'Toneladas'],
texto = txt_inspect)
if to_open > 0:
objeto = pclases.getObjetoPUID(to_open)
if isinstance(objeto, (pclases.ServicioTomado,
pclases.LineaDeCompra)):
if objeto.facturaCompra:
from formularios import facturas_compra
v = facturas_compra.FacturasDeEntrada( # @UnusedVariable
objeto = objeto.facturaCompra,
usuario = self.usuario)
elif objeto.albaranEntrada:
from formularios import albaranes_de_entrada
v = albaranes_de_entrada.AlbaranesDeEntrada( # @UnusedVariable
objeto = objeto.albaranEntrada,
usuario = self.usuario)
elif isinstance(objeto, (pclases.Servicio,
pclases.LineaDeVenta)):
if objeto.facturaVenta:
from formularios import facturas_venta
v = facturas_venta.FacturasVenta( # @UnusedVariable
objeto = objeto.facturaVenta,
usuario = self.usuario)
elif objeto.prefactura:
from formularios import prefacturas
v = prefacturas.Prefacturas( # @UnusedVariable
objeto = objeto.prefactura,
usuario = self.usuario)
elif objeto.albaranSalida:
from formularios import albaranes_de_salida
v = albaranes_de_salida.AlbaranesDeSalida( # @UnusedVariable
objeto = objeto.albaranSalida,
usuario = self.usuario)
elif isinstance(objeto, pclases.FacturaVenta):
from formularios import facturas_venta # @Reimport
v = facturas_venta.FacturasVenta( # @UnusedVariable
objeto = objeto,
usuario = self.usuario)
elif isinstance(objeto, pclases.FacturaCompra):
from formularios import facturas_compra # @Reimport
v = facturas_compra.FacturasDeEntrada( # @UnusedVariable
objeto = objeto,
usuario = self.usuario)
elif isinstance(objeto,
pclases.VencimientoValorPresupuestoAnual):
from formularios import presupuestos
v = presupuestos.Presupuestos( # @UnusedVariable
objeto = objeto,
usuario = self.usuario)
# PORASQUI: El get_info() no es buena idea. Demasiado "técnico"
def activar_widgets(self, s, chequear_permisos = True):
"""
Activa o desactiva (sensitive=True/False) todos
los widgets de la ventana que dependan del
objeto mostrado.
Entrada: s debe ser True o False. En todo caso
se evaluará como boolean.
"""
if self.objeto == None:
s = False
ws = []
for w in ws:
try:
self.wids[w].set_sensitive(s)
except Exception, msg:
print "Widget problemático:", w, "Excepción:", msg
import traceback
traceback.print_last()
if chequear_permisos:
self.check_permisos(nombre_fichero_ventana = "dynconsulta.py")
def actualizar_ventana(self, boton = None):
if self.wids['ch_datos_reales'].get_active():
self.precalc = precalcular(self.fecha_mes_actual,
self.fecha_mes_final,
self.wids['ventana'])
else:
self.precalc = MyMonthsDict()
self.rellenar_widgets()
self.wids['tv_datos'].expand_all()
def rellenar_widgets(self):
# Los únicos otros dos widgets son los de mes de inicio y ancho de
# tabla en meses, que ya se rellenan ellos solos.
self.costes = 0.0
self.ingresos = 0.0
padres = self.rellenar_tabla()
self.actualizar_totales(padres)
self.wids['e_costes'].set_text(utils.float2str(self.costes))
self.wids['e_ingresos'].set_text(utils.float2str(self.ingresos))
total = self.ingresos + self.costes
self.wids['e_total'].set_text(utils.float2str(total))
self.wids['e_total'].modify_text(gtk.STATE_NORMAL,
self.wids['e_total'].get_colormap().alloc_color(
total > 0 and "blue"
or total < 0 and "red"
or "green"))
self.wids['e_total'].modify_font(pango.FontDescription("bold"))
def actualizar_totales(self, padres):
"""
Recorre los nodos de primer nivel y actualiza los totales en
función del tipo de importe: gasto o ingreso.
"""
# Solo hay un (concepto de) presupuesto anual de tipo ingreso: ventas.
for concepto in padres:
fila = self.wids['tv_datos'].get_model()[padres[concepto]]
for valor in fila:
try:
valor_float = utils._float(valor)
except (ValueError, TypeError): # Es el PUID o descripción.
continue
if concepto.es_gasto():
self.costes += valor_float
else:
self.ingresos += valor_float
def rellenar_tabla(self):
self.tracking = {} # Aquí guardaré los objetos que componen cada valor.
self.cave = {}
# Por si acaso, algo de mantenimiento por aquí. Al turrón:
if pclases.DEBUG:
print __file__, "Eliminando posibles vencimientos de presupuesto"\
" duplicados...",
deleted = pclases.VencimientoValorPresupuestoAnual._remove_dupes()
if pclases.DEBUG:
print deleted
# Y ahora sí que sí. Al lío:
vpro = VentanaProgreso(padre = self.wids['ventana'])
vpro.mostrar()
model = self.wids['tv_datos'].get_model()
self.old_model = bak_model(model)
model.clear()
padres = self.cargar_conceptos_primer_nivel(vpro)
filas = self.cargar_conceptos_segundo_nivel(vpro)
if self.wids['ch_presupuesto'].get_active():
filas = self.montar_filas(filas, vpro)
nodos_conceptos = self.mostrar_matriz_en_treeview(filas, padres, vpro)
if self.wids['ch_datos_reales'].get_active():
self.mostrar_valores_reales_precalculados(nodos_conceptos,
padres, vpro)
# Ahora toca pasar el mes que se ha ido al final del año actual. Ciclo
# el mes si el último mes mostrado en la cuadrícula está completamente
# a cero. Uso como datos de referencia el del mismo mes pero del
# año anterior. Si también está a cero (nunca se ha presupuestado ese
# mes en la historia del programa), desisto.
vpro.ocultar()
return padres
def mostrar_valores_reales_precalculados(self,
nodos_conceptos,
padres,
vpro):
for mescol in range(self.num_meses):
fechacol = restar_mes(self.fecha_mes_actual, -mescol)
i = 0.0
try:
datos_reales = self.precalc[fechacol]
except KeyError: # Lo no nay ;)
datos_reales = []
for concepto in datos_reales:
vpro.set_valor(
i / len(datos_reales.keys()),
"Aplicando sustitución por valores reales en %s..."
% fechacol.strftime("%B"))
# Si había un valor previo, tengo que retirar la estimación
# y sumar lo real. En caso de granza, entonces la parte
# proporcional de las Tm.
valor_real_importe = datos_reales[concepto]['importe']
objetos = datos_reales[concepto]['objetos']
if self.wids['ch_presupuesto'].get_active():
vto_presupuestado = buscar_vencimiento_presupuestado(
fechacol,
concepto,
self.fecha_mes_actual)
else:
vto_presupuestado = None
if criterio_sustitucion(vto_presupuestado,
valor_real_importe,
self.fecha_mes_actual,
fechacol):
# Y si no, dejo lo que estaba.
if pclases.DEBUG:
print __file__, "Cambio presupuesto por real:", \
concepto.descripcion,\
vto_presupuestado, valor_real_importe
diff = self.cambiar_valor_presupuestado(valor_real_importe,
vto_presupuestado,
concepto,
fechacol,
mescol,
nodos_conceptos,
objetos)
try:
self.cave[concepto.puid][mescol+1]+=valor_real_importe
except AttributeError: # No valor real
# self.cave[concepto.puid][mescol + 1] = 0
pass
self.actualizar_sumatorio_padre(mescol, concepto, padres,
diff)
i += 1
def cambiar_valor_presupuestado(self, valor_real_importe,
valor_presupuestado, concepto, fechacol,
mescol, nodos_conceptos, objetos):
"""
Si el valor presupuestado es de granza, quita el importe
correspondiente a las toneladas del valor real y suma este valor
real a lo que quede. Deja en el cell la cantidad final.
Devuelve la diferencia entre el nuevo valor y el que había antes
para que se actualice el nodo padre únicamente sumando esa cantidad y
así evitar recalcular toda la "subcolumna".
«objetos» es una lista de objetos de los que procede el valor real.
"""
(valor_presupuestado_restante,
valor_presupuestado_importe) = self.calcular_presupuestado_restante(
valor_presupuestado,
fechacol,
concepto)
model = self.wids['tv_datos'].get_model()
nodo_concepto = nodos_conceptos[concepto]
if self.wids['ch_datos_pdtes'].get_active():
# Si los valores confirmados los ignoro, simplemente no incremento
# el valor total de la celda, pero sí que decremento el
# presupuesto. El IVA no cuenta. Eso se paga estén o no las
# facturas pendientes.
for objeto in objetos[:]:
if (not esta_pendiente(objeto)
and (valor_presupuestado
and not valor_presupuestado.es_de_iva())):
try:
importe_confirmado = objeto.get_subtotal(iva = True,
prorrateado = True)
except AttributeError: # Es factura o algo asín
importe_confirmado = objeto.calcular_importe_total()
if concepto.es_gasto:
importe_confirmado *= -1
valor_real_importe -= importe_confirmado
objetos.remove(objeto)
model[nodo_concepto][mescol + 1] = utils.float2str(
valor_presupuestado_restante
+ valor_real_importe)
self.actualizar_traza(objetos, concepto, fechacol, valor_presupuestado)
delta = ((valor_presupuestado_restante + valor_real_importe)
- valor_presupuestado_importe)
if pclases.DEBUG:
print __file__, ">>> cambiar_valor_presupuestado >>> ð =", delta
return delta
def actualizar_traza(self, objetos, concepto, fechacol,
valor_presupuestado):
if not fechacol.month in self.tracking:
self.tracking[fechacol.month] = defaultdict(list)
for o in objetos:
if (isinstance(o, pclases.LineaDeCompra)
and o.productoCompra in buscar_productos_granza()):
importe_objeto = o.get_subtotal(iva = True, prorrateado=True)
try:
numvtos = len(o.facturaCompra.vencimientosPago)
except AttributeError:
numvtos = max(
len(o.albaranEntrada.proveedor.get_vencimientos()), 1)
tm = o.cantidad / numvtos
if concepto.es_gasto():
trinfo = (o, -importe_objeto, -tm)
else:
trinfo = (o, importe_objeto, tm)
restar_en_traza_presupuesto(self.tracking,
fechacol.month,
self.mes_actual,
concepto,
valor_presupuestado,
importe_objeto,
tm)
else:
try:
importe_objeto = o.get_subtotal(iva = True,
prorrateado = True)
if isinstance(o, (pclases.LineaDeCompra,
pclases.ServicioTomado)):
importe_objeto = -importe_objeto
except AttributeError: # Es factura o algo así.
importe_objeto = o.calcular_importe_total(iva = True)
if isinstance(o, pclases.FacturaCompra):
# IVA es gasto, pero tiene fras de venta que deben ir en
# positivo. No puedo usar el criterio concepto.es_gasto().
importe_objeto = -importe_objeto
trinfo = (o, importe_objeto, None)
restar_en_traza_presupuesto(self.tracking,
fechacol.month,
self.mes_actual,
concepto,
valor_presupuestado,
importe_objeto)
self.tracking[fechacol.month][concepto].append(trinfo)
def calcular_presupuestado_restante(self, valor_presupuestado, fechacol,
concepto):
valor_real_toneladas = None
if valor_presupuestado:
valor_presupuestado_importe = valor_presupuestado.importe
if valor_presupuestado.es_de_granza():
precalc_concepto = self.precalc[fechacol][concepto]
valor_real_toneladas = precalc_concepto['toneladas']
valor_presupuestado_restante = (valor_presupuestado.precio
#* (valor_presupuestado.toneladas - valor_real_toneladas))
# Sumo porque las tm presupuestadas ya vienen en negativo.
* (valor_presupuestado.toneladas + valor_real_toneladas))
# Si "me como" todo lo presupuestado, parto de cero para
# mostrar el valor real completo. (Si no, acabará restando
# ese delta y falseará el resultado)
# Uso min porque las toneladas vienen en negativo al ser gasto.
valor_presupuestado_restante = min(0,
valor_presupuestado_restante)
else:
# Como voy a sustituirlo entero, el valor restante es 0.0 para
# que solo se vea el valor real que le voy a sumar.
valor_presupuestado_restante = 0.0
else:
valor_presupuestado_restante = 0.0
valor_presupuestado_importe = 0.0
return valor_presupuestado_restante, valor_presupuestado_importe
def actualizar_sumatorio_padre(self, mescol, concepto, padres, diff):
# Thanks bicycle repair man!
model = self.wids['tv_datos'].get_model()
pa = concepto.presupuestoAnual
nodo_padre = padres[pa]
try:
model[nodo_padre][mescol + 1] = (utils.float2str(
utils.parse_float(model[nodo_padre][mescol + 1])
+ diff))
except (TypeError, ValueError):
model[nodo_padre][mescol + 1] = utils.float2str(diff)
def mostrar_matriz_en_treeview(self, filas, padres, vpro):
model = self.wids['tv_datos'].get_model()
i = 0.0
nodos_conceptos = {}
for c in filas:
vpro.set_valor(i / len(filas.keys()),
"Montando matriz...")
pa = c.presupuestoAnual
nodo_padre = padres[pa]
fila = [c.descripcion # FIXME: .replace("&", "&") #
# Problemas con el tooltip.
] + [utils.float2str(w) for w in filas[c]] + [c.puid]
nodos_conceptos[c] = model.append(nodo_padre, fila)
for mes_matriz in range(1, self.num_meses + 1):
# Actualizo totales de fila padre
try:
model[nodo_padre][mes_matriz] = utils.float2str(
utils.parse_float(model[nodo_padre][mes_matriz])
+ utils.parse_float(fila[mes_matriz]))
except (TypeError, ValueError):
model[nodo_padre][mes_matriz] = utils.float2str(
fila[mes_matriz])
i += 1
return nodos_conceptos
def montar_filas(self, filas, vpro):
i = 0.0
# Estos valores se metieron en la fecha y concepto que fueran, pero
# aquí tienen que moverse a la fecha de la FDP que corresponda al
# concepto.
valores = pclases.VencimientoValorPresupuestoAnual.select(pclases.AND(
pclases.VencimientoValorPresupuestoAnual.q.fecha
>= self.fecha_mes_actual,
pclases.VencimientoValorPresupuestoAnual.q.fecha
< self.fecha_mes_final))
valores_count = valores.count()
for v in valores:
v.sync()
# CWT: En mes actual no valen valores presupuestados. Solo reales.
if (self.wids['ch_reales_mes0'].get_active() and
self.fecha_mes_actual
<= v.fecha <= final_de_mes(self.fecha_mes_actual)):
continue
# Hay valores de meses anteriores al primero de la tabla cuyos
# vencimientos caen ahora. Esos los quito. Si el mes en que se
# presupuestaron ya se ha ido, sus vencimientos no valen.
vp = v.valorPresupuestoAnual
if vp.fecha < self.fecha_mes_actual:
continue
c = v.conceptoPresupuestoAnual
mes_offset = (v.fecha.month - self.fecha_mes_actual.month) % (
self.num_meses)
try:
filas[c][mes_offset] += v.importe
except KeyError: # Que será lo normal. No debería haber dos vtos.
# en la misma fecha para un mismo concepto.
filas[c][mes_offset] = v.importe
if not v.fecha.month in self.tracking:
self.tracking[v.fecha.month] = defaultdict(list)
try:
tm = v.toneladas
except ValueError:
tm = None
self.tracking[v.fecha.month][c].append(
(v, v.importe, tm))
vpro.set_valor(i / valores_count,
"Cargando valores de dynconsulta...")
i += 1
return filas
def cargar_conceptos_primer_nivel(self, vpro):
vpro.set_valor(0, "Cargando conceptos de primer nivel...")
model = self.wids['tv_datos'].get_model()
padres = {}
pas = pclases.PresupuestoAnual.select()
pas_count = pas.count()
i = 0.0
for pa in pas:
self.cave[pa.puid] = defaultdict(old_float)
fila = [pa.descripcion] #FIXME: .replace("&", "&")]
for m in range(self.num_meses): # @UnusedVariable
fila.append("")
fila.append(pa.puid)
nodo = model.append(None, fila)
padres[pa] = nodo
vpro.set_valor(i / pas_count,
"Cargando conceptos de primer nivel...")
i += 1
return padres
def cargar_conceptos_segundo_nivel(self, vpro):
"""
Solo carga los conceptos. Con todos los valores a cero.
"""
i = 0.0
conceptos = pclases.ConceptoPresupuestoAnual.select()
conceptos_count = conceptos.count()
filas = {}
for c in conceptos:
self.cave[c.puid] = defaultdict(old_float)
filas[c] = []
for m in range(self.num_meses): # @UnusedVariable
filas[c].append(0)
vpro.set_valor(i / conceptos_count,
"Cargando conceptos de dynconsulta...")
i += 1
return filas
def buscar(self, widget):
"""
Muestra una ventana de búsqueda y a continuación los
resultados. El objeto seleccionado se hará activo
en la ventana a no ser que se pulse en Cancelar en
la ventana de resultados.
"""
# TODO: Buscar dentro de todas las filas y tracking un texto tecleado
# y pasarle el foco o algo.
pass
def imprimir(self, boton):
"""
Prepara la vista preliminar para la impresión del informe.
"""
resp = utils.dialogo(titulo = "¿IMPRIMIR DESGLOSE?",
texto = "Puede imprimir un resumen o todo el contenido de "
"la consulta\n¿Desea imprimir toda la información "
"desglosada?",
padre = self.wids['ventana'])
if resp:
tv = self.wids['tv_datos']
tv.expand_all()
while gtk.events_pending(): gtk.main_iteration(False)
cols_a_totalizar = []
else:
tv = self.wids['tv_datos']
tv.collapse_all()
while gtk.events_pending(): gtk.main_iteration(False)
from consulta_ventas_por_producto import convertir_a_listview
tv = convertir_a_listview(tv)
cols_a_totalizar = range(1, self.num_meses + 1)
strfecha = "De %s a %s" % (utils.str_fecha(self.fecha_mes_actual),
utils.str_fecha(self.fecha_mes_final - mx.DateTime.oneDay))
abrir_pdf(
treeview2pdf(tv, titulo = "Informe resumen financiero",
fecha = strfecha, apaisado = True,
numcols_a_totalizar = cols_a_totalizar))
def exportar(self, boton):
"""
Exporta el TreeView a CSV.
"""
abrir_csv(treeview2csv(self.wids['tv_datos']))
def precalcular(fecha_ini, fecha_fin, ventana_padre = None, usuario = None):
"""
Devuelve un diccionario de conceptos del mes especificado con los valores
que se puedan calcular a partir de datos reales.
Si el concepto no existe, lo crea en la base de datos
cobrados / pagados).
"""
vpro = VentanaActividad(ventana_padre, "Precalculando datos reales...")
vpro.mostrar()
# Valores que puedo conocer del ERP (de momento):
# 1.- Entradas de granza
res = calcular_entradas_de_granza(vpro, fecha_ini, fecha_fin, usuario)
# 2.- IVA (soportado - repercutido)
calcular_iva_real(res, vpro, fecha_ini, fecha_fin)
# 3.- Ventas por tipo (internacionales, geotextiles, geocompuestos...)
calcular_ventas(res, vpro, fecha_ini, fecha_fin)
# 4.- Compras que no son de granza
calcular_compras_no_granza(res, vpro, fecha_ini, fecha_fin)
if pclases.DEBUG and pclases.VERBOSE:
print __file__, res
vpro.ocultar()
return res
def calcular_iva_real(res, vpro, fechaini, fechafin):
"""
Calcula el IVA del mes de la fecha y lo almacena en el concepto
«Impuestos» de los valores precalculados.
"""
vpro.mover()
concepto = buscar_concepto_iva()
fecha = fechaini
while fecha <= fechafin:
vpro.mover()
soportado, fras_soportadas = calcular_soportado(vpro, fecha)
vpro.mover()
repercutido, fras_repercutidas = calcular_repercutido(vpro, fecha)
vpro.mover()
importe_iva = soportado - repercutido
if importe_iva:
# Paso de guardar valores nulos. La RAM es un bien escaso!
if fecha not in res:
res[fecha] = {}
try:
res[fecha][concepto]['importe'] += importe_iva
res[fecha][concepto]['objetos'] += fras_soportadas
res[fecha][concepto]['objetos'] += fras_repercutidas
except KeyError:
res[fecha][concepto] = {'importe': importe_iva,
'objetos': fras_soportadas + fras_repercutidas}
# IVA a devolver se compensa el mes siguiente.
try:
importe_este_mes = res[fecha][concepto]['importe']
except KeyError:
importe_este_mes = None
if importe_este_mes > 0 and restar_mes(fecha, -1) < fechafin:
# El último mes ya no arrastro, no me quedan celdas donde acumular.
fechanext = restar_mes(fecha, -1)
if fechanext not in res:
res[fechanext] = {}
try:
res[fechanext][concepto]['importe'] += importe_este_mes
res[fechanext][concepto]['objetos'] \
= res[fecha][concepto]['objetos']
except KeyError:
res[fechanext][concepto] = {'importe': importe_este_mes,
'objetos': res[fecha][concepto]['objetos']}
res[fecha][concepto]['importe'] -= importe_este_mes # = 0.0
res[fecha][concepto]['objetos'] = []
fecha = restar_mes(fecha, -1)
# FIXME: Devuelvo en negativo o positivo, pero el resto de cifras (ventas,
# compras, salarios, etc.) va en positivo aunque sean gastos. Convertir a
# negativo automáticamente aquí y en presupuesto si es de tipo gasto.
def buscar_concepto_iva():
# OJO: Harcoded
try:
c = pclases.ConceptoPresupuestoAnual.selectBy(descripcion = "IVA")[0]
except IndexError:
try:
padre=pclases.PresupuestoAnual.selectBy(descripcion="Impuestos")[0]
except IndexError:
padre = pclases.PresupuestoAnual(descripcion = "Impuestos")
c = pclases.ConceptoPresupuestoAnual(descripcion = "IVA",
presupuestoAnual = padre)
return c
def calcular_soportado(vpro, fecha):
# Pago este mes el IVA del mes pasado. Ojo.
fini = restar_mes(fecha)
fini = mx.DateTime.DateTimeFrom(fini.year, fini.month, 1)
ffin = mx.DateTime.DateTimeFrom(fini.year, fini.month, -1)
frascompra = pclases.FacturaCompra.select(pclases.AND(
pclases.FacturaCompra.q.fecha >= fini,
pclases.FacturaCompra.q.fecha <= ffin))
iva = sum([f.calcular_importe_iva() for f in frascompra])
return iva, pclases.SQLlist(frascompra)
def calcular_repercutido(vpro, fecha):
# Pago este mes el IVA del mes pasado. Ojo.
fini = restar_mes(fecha)
fini = mx.DateTime.DateTimeFrom(fini.year, fini.month, 1)
ffin = mx.DateTime.DateTimeFrom(fini.year, fini.month, -1)
frasventa = pclases.FacturaVenta.select(pclases.AND(
pclases.FacturaVenta.q.fecha >= fini,
pclases.FacturaVenta.q.fecha <= ffin))
iva = sum([f.calcular_total_iva() for f in frasventa])
return iva, pclases.SQLlist(frasventa)
def calcular_ventas(res, vpro, fechaini, fechafin):
"""
Calcula y clasifica las ventas realizadas entre las fechas de inicio y
fin.
"""
vpro.mover()
fecha = fechaini
while fecha <= fechafin:
vpro.mover()
ldv_vencimientos_ventas, srv_vencimientos_ventas \
= buscar_vencimientos_ventas(vpro, fecha)
vpro.mover()
lineas_no_facturadas, servicios_no_facturados \
= buscar_lineas_albaranes_venta(vpro, fecha)
vpro.mover()
clasificar_ventas(res, ldv_vencimientos_ventas,
srv_vencimientos_ventas, lineas_no_facturadas,
servicios_no_facturados, fecha, vpro)
fecha = restar_mes(fecha, -1)
def buscar_vencimientos_ventas(vpro, fecha):
"""
Devuelve líneas de venta y servicios correspondientes a vencimientos de
facturas en el mes indicado por «fecha».
"""
fini = primero_de_mes(fecha)
ffin = final_de_mes(fecha)
vtos_venta = pclases.VencimientoCobro.select(pclases.AND(
pclases.VencimientoCobro.q.fecha >= fini,
pclases.VencimientoCobro.q.fecha <= ffin))
ldvs = []
srvs = []
for v in vtos_venta:
vpro.mover()
f = v.factura
for ldv in f.lineasDeVenta:
if ldv not in ldvs:
ldvs.append(ldv)
vpro.mover()
for srv in f.servicios:
if srv not in srvs:
srvs.append(srv)
vpro.mover()
return ldvs, srvs
def buscar_lineas_albaranes_venta(vpro, fecha):
"""
Devuelve las líneas de venta correspondientes a albaranes no facturados
del mes indicado por la fecha «fecha».
"""
fini = primero_de_mes(fecha)
ffin = final_de_mes(fecha)
albs = pclases.AlbaranSalida.select(pclases.AND(
pclases.AlbaranSalida.q.fecha >= fini,
pclases.AlbaranSalida.q.fecha <= ffin))
# Filtro y me quedo con las líneas no facturadas
ldvs = []
srvs = []
for a in albs:
vpro.mover()
for ldv in a.lineasDeVenta:
vpro.mover()
if not ldv.factura:
ldvs.append(ldv)
for srv in a.servicios:
vpro.mover()
if not srv.factura:
srvs.append(srv)
return ldvs, srvs
def clasificar_ventas(res, ldv_facturadas, srv_facturados, ldv_no_facturadas,
srv_no_facturados, fecha, vpro):
"""
De los dos grupos de líneas de venta recibidos determina su importe, fecha
de vencimiento y concepto donde clasificarlas. Incrementa la celda* de la
columna de fecha de vencimiento y fila del concepto en la cantidad del
importe de la línea de venta. Si tiene varios vencimientos, prorratea la
cantidad.
* En realidad el importe real en el diccionario de la celda que ocupará si
supera el criterio de sustitución.
"""
for ldv in ldv_facturadas:
vpro.mover()
importe_prorrateado_ldv = ldv.get_subtotal(iva = True,
prorrateado = True)
concepto = buscar_concepto_ldv(ldv.factura.cliente, ldv.producto)
if not fecha in res:
res[fecha] = {}
try:
res[fecha][concepto]['importe'] += importe_prorrateado_ldv
res[fecha][concepto]['objetos'].append(ldv)
except KeyError:
res[fecha][concepto] = {'importe': importe_prorrateado_ldv,
'objetos': [ldv]}
for srv in srv_facturados:
vpro.mover()
importe_prorrateado_srv = srv.get_subtotal(iva = True,
prorrateado = True)
concepto = buscar_concepto_ldv(srv.factura.cliente, None)
if not fecha in res:
res[fecha] = {}
try:
res[fecha][concepto]['importe'] += importe_prorrateado_srv
res[fecha][concepto]['objetos'].append(srv)
except KeyError:
res[fecha][concepto] = {'importe': importe_prorrateado_srv,
'objetos': [srv]}
for ldv in ldv_no_facturadas:
# En este caso la fecha no es la fecha de vencimiento, sino la del
# albarán. Así que necesito determinar cuándo vence según el
# cliente.
vpro.mover()
importe_prorrateado_ldv = ldv.get_subtotal(iva = True,
prorrateado = True)
concepto = buscar_concepto_ldv(ldv.albaranSalida.cliente, ldv.producto)
fechas = ldv.albaranSalida.cliente.get_fechas_vtos_por_defecto(
ldv.albaranSalida.fecha)
if not fechas:
fechas = [fecha] # Uso la del albarán porque el cliente no
# tiene información suficiente.
for fecha in fechas:
if not fecha in res:
res[fecha] = {}
try:
res[fecha][concepto]['importe'] += importe_prorrateado_ldv
res[fecha][concepto]['objetos'].append(ldv)
except KeyError:
res[fecha][concepto] = {'importe': importe_prorrateado_ldv,
'objetos': [ldv]}
for srv in srv_no_facturados:
# En este caso la fecha no es la fecha de vencimiento, sino la del
# albarán. Así que necesito determinar cuándo vence según el
# cliente.
vpro.mover()
importe_prorrateado_srv = srv.get_subtotal(iva = True,
prorrateado = True)
concepto = buscar_concepto_ldv(srv.albaranSalida.cliente, None)
fechas = srv.albaranSalida.cliente.get_fechas_vtos_por_defecto(
srv.albaranSalida.fecha)
if not fechas:
fechas = [fecha] # Uso la del albarán porque el cliente no
# tiene información suficiente.
for fecha in fechas:
if not fecha in res:
res[fecha] = {}
try:
res[fecha][concepto]['importe'] += importe_prorrateado_srv
res[fecha][concepto]['objetos'].append(srv)
except KeyError:
res[fecha][concepto] = {'importe': importe_prorrateado_srv,
'objetos': [srv]}
def buscar_concepto_ldv(cliente, producto = None):
"""
Devuelve el concepto de presupuesto que corresponde al cliente y
producto recibido. Si no se recibe producto se considera que es un
servicio y devuelve el tipo de concepto "General".
"""
# Concepto por defecto, el del cliente.
if cliente.es_extranjero():
nac = "Internacionales"
else:
nac = "Nacionales"
try:
tdp = cliente.tipoDeCliente.descripcion
except AttributeError: # No está clasificado por los usuarios. Uso general.
tdp = "General"
# Ahora afino en función del tipo de producto de la línea de venta.
try:
if producto.es_fibra():
tdp = "Fibra"
elif producto.es_bigbag() or producto.es_bolsa() or producto.es_caja():
tdp = "Geocem"
elif isinstance(producto, pclases.ProductoCompra):
tdp = "Comercializado"
except AttributeError:
pass
try:
concepto = pclases.ConceptoPresupuestoAnual.selectBy(
descripcion = "%s - %s" % (nac, tdp))[0]
except IndexError:
# No existe el concepto. DEBERÍA. Lo creo.
concepto = pclases.ConceptoPresupuestoAnual(
descripcion = "%s - %s" % (nac, tdp),
presupuestoAnual = pclases.PresupuestoAnual.selectBy(
descripcion = "Clientes")[0],
proveedor = None)
return concepto
def calcular_compras_no_granza(res, vpro, fechaini, fechafin):
"""
Calcula y clasifica las compras realizadas entre las fechas de inicio y
fin.
"""
vpro.mover()
fecha = fechaini
granzas = buscar_productos_granza()
while fecha <= fechafin:
vpro.mover()
ldc_vencimientos_compras, srv_vencimientos_compras \
= buscar_vencimientos_compras_no_granza(vpro, fecha, granzas)
vpro.mover()
lineas_no_facturadas, servicios_no_facturados \
= buscar_lineas_albaranes_compra_no_granza(vpro, fecha, granzas)
vpro.mover()
clasificar_compras(res, ldc_vencimientos_compras,
srv_vencimientos_compras, lineas_no_facturadas,
servicios_no_facturados, fecha, vpro)
fecha = restar_mes(fecha, -1)
def buscar_vencimientos_compras_no_granza(vpro, fecha, granzas):
"""
Devuelve líneas de compra y servicios correspondientes a vencimientos de
facturas en el mes indicado por «fecha» que no sean de granza.
"""
fini = primero_de_mes(fecha)
ffin = final_de_mes(fecha)
vtos_compra = pclases.VencimientoPago.select(pclases.AND(
pclases.VencimientoPago.q.fecha >= fini,
pclases.VencimientoPago.q.fecha <= ffin))
ldcs = []
srvs = []
for v in vtos_compra:
vpro.mover()
f = v.facturaCompra
for ldc in f.lineasDeCompra:
if ldc.productoCompra not in granzas and ldc not in ldcs:
ldcs.append(ldc)
vpro.mover()
for srv in f.serviciosTomados:
if srv not in srvs:
srvs.append(srv)
vpro.mover()
return ldcs, srvs
def buscar_lineas_albaranes_compra_no_granza(vpro, fecha, granzas):
"""
Devuelve las líneas de compra correspondientes a albaranes no facturados
del mes indicado por la fecha «fecha» que no sean de granza.
"""
fini = primero_de_mes(fecha)
ffin = final_de_mes(fecha)
albs = pclases.AlbaranEntrada.select(pclases.AND(
pclases.AlbaranEntrada.q.fecha >= fini,
pclases.AlbaranEntrada.q.fecha <= ffin))
# Filtro y me quedo con las líneas no facturadas y que no sean de granza.
ldcs = []
srvs = []
for a in albs:
vpro.mover()
for ldc in a.lineasDeCompra:
vpro.mover()
if not ldc.facturaCompra and ldc.productoCompra not in granzas:
ldcs.append(ldc)
#for srv in a.serviciosTomados:
# vpro.mover()
# if not srv.factura:
# srvs.append(srv)
# Los albaranes de entrada no tienen servicios. Los servicios se
# facturan directamente.
return ldcs, srvs
def clasificar_compras(res, ldc_facturadas, srv_facturados, ldc_no_facturadas,
srv_no_facturados, fecha, vpro):
"""
De los dos grupos de líneas de compra recibidos determina su importe, fecha
de vencimiento y concepto donde clasificarlas. Incrementa la celda* de la
columna de fecha de vencimiento y fila del concepto en la cantidad del
importe de la línea de venta. Si tiene varios vencimientos, prorratea la
cantidad.
* En realidad el importe real en el diccionario de la celda que ocupará si
supera el criterio de sustitución.
"""
for ldc in ldc_facturadas:
vpro.mover()
# Gasto. En negativo.
importe_prorrateado_ldc = -ldc.get_subtotal(iva = True,
prorrateado = True)
concepto = buscar_concepto_ldc(ldc.facturaCompra.proveedor,
ldc.productoCompra)
if not fecha in res:
res[fecha] = {}
try:
res[fecha][concepto]['importe'] += importe_prorrateado_ldc
res[fecha][concepto]['objetos'].append(ldc)
except KeyError:
res[fecha][concepto] = {'importe': importe_prorrateado_ldc,
'objetos': [ldc]}
for srv in srv_facturados:
vpro.mover()
# Gasto. Negativo
importe_prorrateado_srv = -srv.get_subtotal(iva = True,
prorrateado = True)
concepto = buscar_concepto_ldc(srv.facturaCompra.proveedor, None)
if not fecha in res:
res[fecha] = {}
try:
res[fecha][concepto]['importe'] += importe_prorrateado_srv
res[fecha][concepto]['objetos'].append(srv)
except KeyError:
res[fecha][concepto] = {'importe': importe_prorrateado_srv,
'objetos': [srv]}
for ldc in ldc_no_facturadas:
# En este caso la fecha no es la fecha de vencimiento, sino la del
# albarán. Así que necesito determinar cuándo vence según el
# proveedor.
vpro.mover()
# Gasto. En negativo
importe_prorrateado_ldc = -ldc.get_subtotal(iva = True,
prorrateado = True)
concepto = buscar_concepto_ldc(ldc.albaranEntrada.proveedor,
ldc.productoCompra)
try:
fechas = ldc.albaranEntrada.proveedor.get_fechas_vtos_por_defecto(
ldc.albaranEntrada.fecha)
except AttributeError: # No proveedor. Sí albarán. El objeto viene
# de una búsqueda de albaranes no facturados.
fechas = [] # fecha es similar a ldc.albaranEntrada.fecha
if not fechas:
fechas = [fecha] # Uso la del albarán porque el proveedor no
# tiene información suficiente.
for fecha in fechas:
if not fecha in res:
res[fecha] = {}
try:
res[fecha][concepto]['importe'] += importe_prorrateado_ldc
res[fecha][concepto]['objetos'].append(ldc)
except KeyError:
res[fecha][concepto] = {'importe': importe_prorrateado_ldc,
'objetos': [ldc]}
for srv in srv_no_facturados:
# En este caso la fecha no es la fecha de vencimiento, sino la del
# albarán. Así que necesito determinar cuándo vence según el
# proveedor.
vpro.mover()
# Gasto. En negativo
importe_prorrateado_srv = -srv.get_subtotal(iva = True,
prorrateado = True)
concepto = buscar_concepto_ldc(srv.albaranEntrada.proveedor, None)
fechas = srv.albaranEntrada.proveedor.get_fechas_vtos_por_defecto(
srv.albaranEntrada.fecha)
if not fechas:
fechas = [fecha] # Uso la del albarán porque el proveedor no
# tiene información suficiente.
for fecha in fechas:
if not fecha in res:
res[fecha] = {}
try:
res[fecha][concepto]['importe'] += importe_prorrateado_srv
res[fecha][concepto]['objetos'].append(srv)
except KeyError:
res[fecha][concepto] = {'importe': importe_prorrateado_srv,
'objetos': [srv]}
def buscar_concepto_ldc(proveedor, producto = None):
"""
Devuelve el concepto de presupuesto que corresponde al proveedor y
producto recibido. Si no se recibe producto se considera que es un
servicio y devuelve el tipo de concepto "General".
"""
# Concepto por defecto, el del proveedor.
try:
proveedor.sync()
tdp = proveedor.tipoDeProveedor.descripcion
except AttributeError: # No está clasificado por los usuarios. Uso resto.
tdp = "Resto"
if tdp == "Granza": # Si por la ldc no puedo sacar el tipo, entonces
tdp = "Resto" # lo clasifico como general. Porque todas las
# compras de granza ya se tratan en otro sitio.
# Ahora afino en función del tipo de producto de la línea de venta.
if producto:
producto.sync()
tdm = producto.tipoDeMaterial
# OJO: HARCODED. Tipos de material conocidos. Si se crearan nuevos,
# caería en el tipo del proveedor.
iny = {'Materia Prima': None, # Usaré el del proveedor.
'Material adicional': 'Materiales',
'Mantenimiento': 'Materiales',
'Repuestos geotextiles': 'Repuestos',
'Repuestos fibra': 'Repuestos',
'Aceites y lubricantes': 'Materiales',
'Mercancía inicial Valdemoro': 'Comercializados',
'Productos comercializados': 'Comercializados',
'Comercializados': 'Comercializados'}
try:
tdpiny = iny[tdm]
except KeyError:
pass # Si no está o no tiene, uso el del proveedor.
else:
if tdpiny != None:
tdp = tdpiny
try:
concepto = pclases.ConceptoPresupuestoAnual.selectBy(
descripcion = tdp)[0]
except IndexError:
# No existe el concepto. DEBERÍA. Lo creo.
if proveedor.es_extranjero():
nac = "Internacionales"
else:
nac = "Nacionales"
concepto = pclases.ConceptoPresupuestoAnual(
descripcion = "%s - %s" % (nac, tdp),
presupuestoAnual = pclases.PresupuestoAnual.selectBy(
descripcion = "Proveedores")[0],
proveedor = None)
return concepto
def restar_mes(fecha = mx.DateTime.today(), meses = 1):
if meses > 0:
try:
return restar_mes(
mx.DateTime.DateFrom(fecha.year,
fecha.month - 1,
fecha.day),
meses - 1)
except mx.DateTime.RangeError:
return restar_mes(
mx.DateTime.DateFrom(fecha.year - 1,
12,
fecha.day),
meses - 1)
elif meses < 0:
try:
return restar_mes(
mx.DateTime.DateFrom(fecha.year,
fecha.month + 1,
fecha.day),
meses + 1)
except mx.DateTime.RangeError:
return restar_mes(
mx.DateTime.DateFrom(fecha.year + 1,
1,
fecha.day),
meses + 1)
else:
return fecha
def calcular_entradas_de_granza(vpro, fecha_ini, fecha_fin, usuario):
vpro.mover()
primes = fecha_ini
finmes = mx.DateTime.DateFrom(fecha_fin.year, fecha_fin.month, -1)
vpro.mover()
# Primero: productos granza:
granzas = buscar_productos_granza()
# Saco datos de facturas:
vtos = buscar_vencimientos_compra(primes, finmes)
# Filtro para quedarme con las de granza:
vpro.mover()
res = MyMonthsDict()
clasificar_vencimientos_compra(vtos, granzas, usuario, res, vpro)
vpro.mover()
# Y ahora de los albaranes no facturados.
albs = buscar_albaranes_de_entrada(primes, finmes)
vpro.mover()
# Filtro para quedarme con los de granza:
clasificar_albaranes_de_entrada(albs, granzas, usuario, res, vpro)
vpro.mover()
return res
def clasificar_albaranes_de_entrada(albs, granzas, usuario, res, vpro):
for a in albs:
for ldc in a.lineasDeCompra:
ldc.sync()
# Solo quiero lo no facturado.
if (not ldc.facturaCompraID and ldc.productoCompra in granzas
and ldc.cantidad):
# Si la línea no tiene cantidad de nada, paso. No quiero
# guardar valores nulos que me coman tiempo de proceso o RAM.
# Piensa como si siguieras programando con 640 K, old boy.
if pclases.DEBUG: # and pclases.VERBOSE:
print __file__, a.get_info(), ldc.get_info()
concepto = buscar_concepto_proveedor_granza(ldc.proveedor,
usuario)
proveedor = ldc.albaranEntrada.proveedor
fechas_vto = proveedor.get_fechas_vtos_por_defecto(
ldc.albaranEntrada.fecha)
if not fechas_vto:
fechas_vto = [ldc.albaranEntrada.fecha]
numvtos = len(fechas_vto)
for fecha_vto in fechas_vto:
fecha = primero_de_mes(fecha_vto)
if fecha not in res:
res[fecha] = {}
cantidad_prorrateada = ldc.cantidad / numvtos
try:
# Gasto. En negativo
res[fecha][concepto]['importe'] += -ldc.get_subtotal(
iva = True,
prorrateado = True)
res[fecha][concepto]['toneladas']+=cantidad_prorrateada
res[fecha][concepto]['objetos'].append(ldc)
except KeyError:
# Gasto. En negativo
res[fecha][concepto] = {
'importe': -ldc.get_subtotal(iva = True,
prorrateado = True),
'toneladas': cantidad_prorrateada,
'objetos': [ldc]}
vpro.mover()
def buscar_albaranes_de_entrada(primes, finmes):
# ¡OJO! Si el albarán es de otra fecha anterior a «primes», aunque entren
# sus "teóricos" vencimientos en los meses del TreeView, se va a ignorar.
# La consulta no lo encontrará.
albs = pclases.AlbaranEntrada.select(pclases.AND(
pclases.AlbaranEntrada.q.fecha >= primes,
pclases.AlbaranEntrada.q.fecha <= finmes))
if pclases.DEBUG:
print __file__, albs.count(), "albaranes encontrados."
return albs
def clasificar_vencimientos_compra(vtos, granzas, usuario, res, vpro):
# Me quedo solo con los vencimientos de fras. de compra de granza.
for v in vtos:
if pclases.DEBUG and pclases.VERBOSE:
print __file__, v.get_info(), v.fecha
fra = v.facturaCompra
for ldc in fra.lineasDeCompra:
ldc.sync
ldc.sync()
ldc.facturaCompra and ldc.facturaCompra.sync()
ldc.albaranEntrada and ldc.albaranEntrada.sync()
if ldc.productoCompra in granzas:
if pclases.DEBUG and pclases.VERBOSE:
print __file__, fra.get_info(), ldc.get_info()
concepto = buscar_concepto_proveedor_granza(ldc.proveedor,
usuario)
fechas_mes_vto = buscar_mes_vto(ldc.facturaCompra)
# Gasto. En negativo
importe = -ldc.get_subtotal(iva = True, prorrateado = True)
cantidad = ldc.cantidad / len(fechas_mes_vto)
#for fecha_mes_vto in fechas_mes_vto:
fecha_mes_vto = v.fecha
if fecha_mes_vto not in res:
res[fecha_mes_vto] = {}
try:
res[fecha_mes_vto][concepto]['importe'] += importe
res[fecha_mes_vto][concepto]['toneladas'] += cantidad
res[fecha_mes_vto][concepto]['objetos'].append(ldc)
except KeyError:
res[fecha_mes_vto][concepto] = {'importe': importe,
'toneladas': cantidad,
'objetos': [ldc]}
vpro.mover()
def buscar_mes_vto(fra_compra):
"""Devuelve las fechas de vencimiento de la factura. Si no tiene
vencimientos (algún usuario se está haciendo el remolón con su trabajo)
entonces devuelve la fecha de la factura.
Las fechas las devuelve a primero del mes que sea, ignorando el día real
de pago.
:fra_compra: pclases.FacturaCompra
:returns: mx.DateTime.Date(Time)
"""
fechas = []
for v in fra_compra.vencimientosPago:
fechas.append(primero_de_mes(v.fecha))
if not fechas:
fechas = [primero_de_mes(fra_compra.fecha)]
return fechas
def primero_de_mes(f):
return mx.DateTime.DateFrom(f.year, f.month, 1)
def final_de_mes(f):
return mx.DateTime.DateFrom(f.year, f.month, -1)
def buscar_vencimientos_compra(primes, finmes):
vtos = pclases.VencimientoPago.select(pclases.AND(
pclases.VencimientoPago.q.fecha >= primes,
pclases.VencimientoPago.q.fecha <= finmes))
# Filtro y saco los que ya están pagados (ver doc. requisitos)
vtos_pdtes = [v for v in vtos if v.calcular_importe_pdte() > 0]
if pclases.DEBUG:
print __file__, len(vtos_pdtes), "de", vtos.count(), \
"vencimientos encontrados."
return vtos
def buscar_productos_granza():
granzas = pclases.ProductoCompra.select(pclases.AND(
pclases.ProductoCompra.q.descripcion.contains("granza"),
pclases.ProductoCompra.q.obsoleto == False,
pclases.ProductoCompra.q.tipoDeMaterialID
== pclases.TipoDeMaterial.select(
pclases.TipoDeMaterial.q.descripcion.contains("prima")
)[0].id))
return granzas
def buscar_concepto_proveedor_granza(proveedor, usuario = None):
"""
Busca el concepto del dynconsulta anual correspondiente al proveedor. Si
no lo encuentra, lo crea.
"""
try:
concepto = pclases.ConceptoPresupuestoAnual.select(
pclases.ConceptoPresupuestoAnual.q.descripcion==proveedor.nombre)[0]
except IndexError:
concepto = pclases.ConceptoPresupuestoAnual(
descripcion = proveedor.nombre,
presupuestoAnual = pclases.PresupuestoAnual.select(
pclases.PresupuestoAnual.q.descripcion
== "Proveedores granza")[0], # EXISTE. Hay un check al
# principio que se asegura de eso.
proveedor = proveedor)
pclases.Auditoria.nuevo(concepto, usuario, __file__)
return concepto
def get_col_pos(tv, col):
"""
Devuelve la posición (índice entero comenzando por 0) de la columna en
el TreeView.
"""
return tv.get_columns().index(col)
def bak_model(model):
res = {}
for fila in model:
res[fila[0]] = {'valores': [], 'hijos': {}}
for i in range(1, len(fila)):
res[fila[0]]['valores'].append(fila[i])
for sub_fila in fila.iterchildren():
res[fila[0]]['hijos'][sub_fila[0]] = []
for j in range(1, len(sub_fila)):
res[fila[0]]['hijos'][sub_fila[0]].append(sub_fila[j])
return res
def criterio_sustitucion(vto_presupuesto, valor_real_importe,
fecha_primera_col, fecha = None):
if not fecha:
fecha = primero_de_mes(mx.DateTime.today())
# Si ni siquiera hay valor presupuestado, está claro, ¿no? Mostrar el real:
sustituir_por_reales = True
if vto_presupuesto:
# Para el mes actual SIEMPRE valores reales.
if primero_de_mes(fecha) <= fecha_primera_col <= final_de_mes(fecha):
sustituir_por_reales = True
else:
sustituir_por_reales = False
### Caso granza
if vto_presupuesto.es_de_granza():
# DONE: Principio de realidad: Siempre que
# haya datos reales, se sustituyen las estimaciones por datos
# reales. En granza además se puede hacer de forma
# proporcional complementando al presupuesto.
# Esto significa que si han entrado 15 toneladas de granza,
# en el mes de su vencimiento se retiran 15 toneladas al
# precio estimado y se pone el importe real de esas 15
# toneladas sumadas al resto de estimado.
sustituir_por_reales = True
### Caso IVA
if (vto_presupuesto.es_de_iva()
and abs(valor_real_importe)>abs(vto_presupuesto.importe)):
# En el caso del IVA se muestra el importe calculado a partir
# de datos reales cuando sea el mes corriente (primer "if" de
# arriba) o cuando se supere la estimación.
sustituir_por_reales = True
### Caso ventas.
if (vto_presupuesto.es_de_ventas()
and valor_real_importe > vto_presupuesto.importe):
# Solo sustituyo cuando supere lo previsto.
sustituir_por_reales = True
### Caso resto proveedores.
if (vto_presupuesto.es_de_compras()
and abs(valor_real_importe)>abs(vto_presupuesto.importe)):
# Solo sustituyo cuando supere lo previsto.
sustituir_por_reales = True
if pclases.DEBUG:
print __file__, "-------->>>>", \
vto_presupuesto.conceptoPresupuestoAnual.descripcion, \
"; mes presup.:", \
vto_presupuesto.valorPresupuestoAnual.fecha.month, \
"; mes vto.:", vto_presupuesto.fecha.month, \
"; presup. en mes vto.:", vto_presupuesto.importe, \
"; real:", valor_real_importe,
if pclases.DEBUG:
print __file__, valor_real_importe, "sustituir_por_reales [", \
sustituir_por_reales, "]"
return sustituir_por_reales
def buscar_vencimiento_presupuestado(fecha, concepto, fecha_mes_actual):
"""
Devuelve el objeto VencimientoValorPresupuesto del presupuesto para la
fecha DE VENCIMIENTO y concepto especificados.
"""
try:
vtos = pclases.VencimientoValorPresupuestoAnual.select(pclases.AND(
pclases.VencimientoValorPresupuestoAnual.q.fecha
>= primero_de_mes(fecha),
pclases.VencimientoValorPresupuestoAnual.q.fecha
<= final_de_mes(fecha)))
vto = [v for v in vtos if v.conceptoPresupuestoAnual == concepto][0]
vp = vto.valorPresupuestoAnual
# No interesan los vencimientos de valores presupuestados en el pasado.
if vp.fecha < fecha_mes_actual:
return None
return vto
except IndexError:
return None
def restar_en_traza_presupuesto(dict_tracking,
mes,
mes_actual,
concepto,
valor_presupuestado,
valor_real_importe,
valor_real_toneladas = None):
"""
Del diccionario de trazabilidad, extrae el objeto del valor presupuestado
y lo sustituye de nuevo por él mismo pero con la cantidad que aporta al
valor final decrementada en «valor_real_importe» y toneladas si es el caso.
"""
for obj, importe, tm in dict_tracking[mes][concepto]:
if obj == valor_presupuestado:
dict_tracking[mes][concepto].remove((obj, importe, tm))
# Para el mes actual nunca hay valores presupuestados. No lo
# vuelvo a agregar y santas pascuas.
if mes != mes_actual:
if valor_real_toneladas != None:
tm -= -valor_real_toneladas # Real: +. En presup.: -
importe = obj.precio * tm
else:
importe -= valor_real_importe
# Quito también valores negativos. Ya no influyen. Se ha
# sustituido por completo el valor presupuestado.
if ((not concepto.es_gasto() and importe > 0) or (
concepto.es_gasto() and importe < 0)):
dict_tracking[mes][concepto].append((obj,
importe,
tm))
break
def esta_pendiente(o):
"""
Devuelve True si el objeto recibido está pendiente de cobro/pago. Ya sea
factura, línea de factura o servicio.
Si no puede determinar la naturaleza, devuelve False por defecto.
En el caso de las líneas de venta/compra y servicios mira si están
pendientes o no basándose en su factura **completa**.
"""
o.sync()
try:
o = o.factura
o.sync()
except AttributeError:
pass
try:
o = o.facturaCompra
o.sync()
except AttributeError:
pass
try:
res = o.calcular_pendiente_cobro()
except AttributeError:
try:
res = o.get_importe_primer_vencimiento_pendiente()
except AttributeError:
res = False
return res
if __name__ == "__main__":
p = DynConsulta()
| gpl-2.0 | 2,879,280,261,875,653,000 | 44.758462 | 167 | 0.531615 | false | 3.413817 | false | false | false |
google-research/graph-attribution | graph_attribution/graphs.py | 1 | 11067 | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Extra functions for manipulating GraphsTuple objects."""
from typing import Iterator, List, Tuple
import graph_nets
import numpy as np
import tensorflow as tf
import tree
GraphsTuple = graph_nets.graphs.GraphsTuple
# Alias to mirror the tf version.
cast_to_np = graph_nets.utils_tf.nest_to_numpy
# Numpy and tf compatible version of graph_nets.utils_tf.get_num_graphs
def get_num_graphs(graph): return graph.n_node.shape[0]
def get_input_spec(x: GraphsTuple) -> tf.TensorSpec:
"""Gets input signature for a graphstuple, useful for tf.function."""
return graph_nets.utils_tf.specs_from_graphs_tuple(
x, dynamic_num_graphs=True)
def print_graphs_tuple(graphs: GraphsTuple):
"""Print a graph tuple's shapes and contents."""
print("Shapes of GraphsTuple's fields:")
print(
graphs.map(
lambda x: x if x is None else x.shape,
fields=graph_nets.graphs.ALL_FIELDS))
def cast_to_tf(graphs: GraphsTuple) -> GraphsTuple:
"""Convert GraphsTuple numpy arrays to tf.Tensor."""
def cast_fn(x):
return tf.convert_to_tensor(x) if isinstance(x, np.ndarray) else x
return tree.map_structure(cast_fn, graphs)
def reduce_sum_edges(graphs: GraphsTuple) -> GraphsTuple:
"""Adds edge information into nodes and sets edges to None."""
if graphs.nodes.ndim > 1:
raise ValueError('Can only deal with 1D node information.')
if graphs.edges is not None and graphs.edges.ndim > 1:
raise ValueError('Can only deal with 1D edge information.')
if graphs.edges is None:
return graphs
num_nodes = tf.reduce_sum(graphs.n_node)
edge_contribution = tf.math.unsorted_segment_sum(graphs.edges,
graphs.receivers, num_nodes)
new_nodes = graphs.nodes + edge_contribution
return graphs.replace(nodes=new_nodes, edges=None)
def binarize_np_nodes(graph: GraphsTuple, tol: float) -> GraphsTuple:
"""Binarize node values based on a threshold, useful for classification."""
return graph.replace(nodes=(graph.nodes >= tol).astype(np.float32))
def make_constant_like(graphs: GraphsTuple, node_vec: np.ndarray,
edge_vec: np.ndarray) -> GraphsTuple:
"""Make a similar graph but with constant nodes and edges."""
using_tensors = isinstance(graphs.nodes, tf.Tensor)
nodes = np.tile(node_vec, (sum(graphs.n_node), 1))
edges = np.tile(edge_vec, (sum(graphs.n_edge), 1))
if using_tensors:
nodes = tf.convert_to_tensor(nodes, graphs.nodes.dtype)
edges = tf.convert_to_tensor(edges, graphs.edges.dtype)
return graphs.replace(nodes=nodes, edges=edges)
def segment_mean_stddev(
data: tf.Tensor, segment_counts: tf.Tensor) -> Tuple[tf.Tensor, tf.Tensor]:
"""Calculate mean and stddev for segmented tensor (e.g.
ragged-like).
Expects a 2D tensor for data and will return mean and std in the same shape,
with repeats acoording to segment_counts.
Args:
data: 2D tensor.
segment_counts: 1D int tensor with counts for each segment. Should satisfy
sum(segment_counts) = data.shape[0].
Returns:
Segment-wise mean and std, replicated to same shape as data.
"""
segment_ids = tf.repeat(
tf.range(segment_counts.shape[0]), segment_counts, axis=0)
mean_per_segment = tf.math.segment_mean(data, segment_ids)
mean = tf.repeat(mean_per_segment, segment_counts, axis=0)
diff_squared_sum = tf.math.segment_sum(tf.square(data - mean), segment_ids)
counts = tf.reshape(tf.cast(segment_counts, tf.float32), (-1, 1))
std_per_segment = tf.sqrt(diff_squared_sum / counts)
std = tf.repeat(std_per_segment, segment_counts, axis=0)
return mean, std
def perturb_graphs_tuple(graphs: GraphsTuple, num_samples: int,
sigma: float) -> GraphsTuple:
"""Sample graphs with additive gaussian noise.
For a given collection of graphs we create noisey versions of the initial
graphs by summing random normal noise scaled by a constant factor (sigma)
and per-graph variance on node and edge information. Connectivity is the
same.
Args:
graphs: input graphs on which to add noise.
num_samples: number of times to create noisy graphs.
sigma: scaling factor for noise.
Returns:
GraphsTuple with num_samples times more graphs.
"""
_, node_stddev = segment_mean_stddev(graphs.nodes, graphs.n_node)
_, edge_stddev = segment_mean_stddev(graphs.edges, graphs.n_edge)
def add_noise(x, stddev):
return x + tf.random.normal(x.shape,
stddev=sigma * stddev, dtype=x.dtype)
graph_list = []
for _ in tf.range(num_samples):
graph = graphs.replace(
nodes=add_noise(graphs.nodes, node_stddev),
edges=add_noise(graphs.edges, edge_stddev))
graph_list.append(graph)
return graph_nets.utils_tf.concat(graph_list, axis=0)
def split_graphs_tuple(graphs: GraphsTuple) -> Iterator[GraphsTuple]:
"""Converts several grouped graphs into a list of single graphs."""
n = get_num_graphs(graphs)
nodes = []
node_offsets = [0] + np.cumsum(graphs.n_node).tolist()
for i, j in zip(node_offsets[:-1], node_offsets[1:]):
nodes.append(graphs.nodes[i:j])
edges = []
has_edges = graphs.edges is not None
receivers, senders = [], []
edge_offsets = [0] + np.cumsum(graphs.n_edge).tolist()
for node_offset, i, j in zip(node_offsets[:-1], edge_offsets[:-1],
edge_offsets[1:]):
if has_edges:
edges.append(graphs.edges[i:j])
else:
edges.append(None)
receivers.append(graphs.receivers[i:j] - node_offset)
senders.append(graphs.senders[i:j] - node_offset)
if graphs.globals is None:
g_globals = [None for i in range(n)]
else:
g_globals = [graphs.globals[i] for i in range(n)]
graph_list = map(GraphsTuple, nodes, edges, receivers, senders, g_globals,
graphs.n_node[:, np.newaxis], graphs.n_edge[:, np.newaxis])
return graph_list
def get_graphs_np(graphs: GraphsTuple, indices=List[int]) -> GraphsTuple:
"""Gets a new graphstuple (numpy) based on a list of indices."""
node_indices = np.insert(np.cumsum(graphs.n_node), 0, 0)
node_slice = np.concatenate(
[np.arange(node_indices[i], node_indices[i + 1]) for i in indices])
nodes = graphs.nodes[node_slice]
edge_indices = np.insert(np.cumsum(graphs.n_edge), 0, 0)
edge_slice = np.concatenate(
[np.arange(edge_indices[i], edge_indices[i + 1]) for i in indices])
edges = graphs.edges[edge_slice] if graphs.edges is not None else None
n_edge = graphs.n_edge[indices]
n_node = graphs.n_node[indices]
offsets = np.repeat(node_indices[indices], graphs.n_edge[indices])
new_offsets = np.insert(np.cumsum(n_node), 0, 0)
senders = graphs.senders[edge_slice] - offsets
receivers = graphs.receivers[edge_slice] - offsets
senders = senders + np.repeat(new_offsets[:-1], n_edge)
receivers = receivers + np.repeat(new_offsets[:-1], n_edge)
g_globals = graphs.globals[indices] if graphs.globals is not None else None
return GraphsTuple(
nodes=nodes,
edges=edges,
globals=g_globals,
senders=senders,
receivers=receivers,
n_node=n_node,
n_edge=n_edge)
def get_graphs_tf(graphs: GraphsTuple, indices: np.ndarray) -> GraphsTuple:
"""Gets a new graphstuple (tf) based on a list of indices."""
node_indices = tf.concat(
[tf.constant([0]), tf.cumsum(graphs.n_node)], axis=0)
node_starts = tf.gather(node_indices, indices)
node_ends = tf.gather(node_indices, indices + 1)
node_slice = tf.ragged.range(node_starts, node_ends).values
nodes = tf.gather(graphs.nodes, node_slice)
edge_indices = tf.concat(
[tf.constant([0]), tf.cumsum(graphs.n_edge)], axis=0)
edge_starts = tf.gather(edge_indices, indices)
edge_ends = tf.gather(edge_indices, indices + 1)
edge_slice = tf.ragged.range(edge_starts, edge_ends).values
edges = tf.gather(graphs.edges,
edge_slice) if graphs.edges is not None else None
n_edge = tf.gather(graphs.n_edge, indices)
n_node = tf.gather(graphs.n_node, indices)
offsets = tf.repeat(node_starts, tf.gather(graphs.n_edge, indices))
senders = tf.gather(graphs.senders, edge_slice) - offsets
receivers = tf.gather(graphs.receivers, edge_slice) - offsets
new_offsets = tf.concat([tf.constant([0]), tf.cumsum(n_node)], axis=0)
senders = senders + tf.repeat(new_offsets[:-1], n_edge)
receivers = receivers + tf.repeat(new_offsets[:-1], n_edge)
g_globals = tf.gather(graphs.globals,
indices) if graphs.globals is not None else None
return GraphsTuple(
nodes=nodes,
edges=edges,
globals=g_globals,
senders=senders,
receivers=receivers,
n_node=n_node,
n_edge=n_edge)
def _interp_array(start: tf.Tensor, end: tf.Tensor,
num_steps: int) -> tf.Tensor:
"""Linearly interpolate 2D tensors, returns 3D tensors.
Args:
start: 2D tensor for start point of interpolation of shape [x,y].
end: 2D tensor as end point of interpolation of shape [x,y] (same as start).
num_steps: number of steps to interpolate.
Returns:
New tensor of shape [num_steps, x, y]
"""
alpha = tf.linspace(0., 1., num_steps)
beta = 1 - alpha
return tf.einsum('a,bc->abc', alpha, end) + tf.einsum('a,bc->abc', beta,
start)
def interpolate_graphs_tuple(
start: GraphsTuple, end: GraphsTuple,
num_steps: int) -> Tuple[GraphsTuple, tf.Tensor, tf.Tensor]:
"""Interpolate two graphs of same shape."""
nodes_interp = _interp_array(start.nodes, end.nodes, num_steps)
edges_interp = _interp_array(start.edges, end.edges, num_steps)
node_steps = tf.tile(nodes_interp[1] - nodes_interp[0], (num_steps, 1))
edge_steps = tf.tile(edges_interp[1] - edges_interp[0], (num_steps, 1))
graphs = []
for nodes, edges in zip(nodes_interp, edges_interp):
graphs.append(end.replace(nodes=nodes, edges=edges))
interp_graph = graph_nets.utils_tf.concat(graphs, axis=0)
return interp_graph, node_steps, edge_steps
| apache-2.0 | -9,002,149,192,423,218,000 | 36.900685 | 83 | 0.653384 | false | 3.502215 | false | false | false |
ninjawil/weather-station | scripts/pyenergenie/energenie/Devices.py | 1 | 1064 | # Devices.py 30/09/2015 D.J.Whale
#
# Information about specific Energenie devices
MFRID_ENERGENIE = 0x04
PRODUCTID_C1_MONITOR = 0x01
PRODUCTID_R1_MONITOR_AND_CONTROL = 0x02
PRODUCTID_ETRV = 0x03
CRYPT_PID = 242
CRYPT_PIP = 0x0100
# OpenHEMS does not support a broadcast id, but Energine added one for their
# MiHome Adaptors. This makes simple discovery possible.
BROADCAST_ID = 0xFFFFFF # energenie broadcast
# TODO put additional products in here from the Energenie directory
def getDescription(mfrid, productid):
mfr = "UNKNOWN"
product = "UNKNOWN"
if mfrid == MFRID_ENERGENIE:
mfr = "Energenie"
if productid == PRODUCTID_C1_MONITOR:
product = "C1 MONITOR"
elif productid == PRODUCTID_R1_MONITOR_AND_CONTROL:
product = "R1 MONITOR/CONTROL"
elif productid == PRODUCTID_ETRV:
product = "eTRV"
return "Manufactuer:%s Product:%s" % (mfr, product)
# END
| mit | -4,096,557,223,199,147,500 | 30.294118 | 76 | 0.611842 | false | 3.224242 | false | false | false |
fkie-cad/FACT_core | src/test/acceptance/rest/test_rest_analyze_firmware.py | 1 | 4292 | # pylint: disable=wrong-import-order
import json
import time
import urllib.parse
from multiprocessing import Event, Value
from storage.db_interface_backend import BackEndDbInterface
from test.acceptance.base import TestAcceptanceBase
from test.common_helper import get_firmware_for_rest_upload_test
class TestRestFirmware(TestAcceptanceBase):
def setUp(self):
super().setUp()
self.analysis_finished_event = Event()
self.elements_finished_analyzing = Value('i', 0)
self.db_backend_service = BackEndDbInterface(config=self.config)
self._start_backend(post_analysis=self._analysis_callback)
self.test_container_uid = '418a54d78550e8584291c96e5d6168133621f352bfc1d43cf84e81187fef4962_787'
time.sleep(2) # wait for systems to start
def tearDown(self):
self._stop_backend()
self.db_backend_service.shutdown()
super().tearDown()
def _analysis_callback(self, fo):
self.db_backend_service.add_analysis(fo)
self.elements_finished_analyzing.value += 1
if self.elements_finished_analyzing.value == 4 * 3: # container including 3 files times 3 plugins
self.analysis_finished_event.set()
def _rest_upload_firmware(self):
data = get_firmware_for_rest_upload_test()
rv = self.test_client.put('/rest/firmware', json=data, follow_redirects=True)
assert b'"status": 0' in rv.data, 'rest upload not successful'
assert self.test_container_uid.encode() in rv.data, 'uid not found in rest upload reply'
def _rest_get_analysis_result(self):
rv = self.test_client.get(f'/rest/firmware/{self.test_container_uid}', follow_redirects=True)
assert b'analysis_date' in rv.data, 'rest analysis download not successful'
assert b'software_components' in rv.data, 'rest analysis not successful'
assert b'"device_part": "test_part' in rv.data, 'device part not present'
def _rest_search(self):
query = urllib.parse.quote('{"device_class": "test_class"}')
rv = self.test_client.get(f'/rest/firmware?query={query}', follow_redirects=True)
assert self.test_container_uid.encode() in rv.data, 'test firmware not found in rest search'
def _rest_search_fw_only(self):
query = json.dumps({'sha256': self.test_container_uid.split('_')[0]})
rv = self.test_client.get(f'/rest/firmware?query={urllib.parse.quote(query)}', follow_redirects=True)
assert self.test_container_uid.encode() in rv.data, 'test firmware not found in rest search'
def _rest_update_analysis_bad_analysis(self):
query = urllib.parse.quote('["unknown_system"]')
rv = self.test_client.put(f'/rest/firmware/{self.test_container_uid}?update={query}', follow_redirects=True)
assert 'Unknown analysis system'.encode() in rv.data, "rest analysis update should break on request of non existing system"
def _rest_update_analysis_success(self):
update = urllib.parse.quote(json.dumps(['crypto_material']))
rv = self.test_client.put(f'/rest/firmware/{self.test_container_uid}?update={update}', follow_redirects=True)
assert b'error_message' not in rv.data, 'Error on update request'
def _rest_check_new_analysis_exists(self):
rv = self.test_client.get(f'/rest/firmware/{self.test_container_uid}', follow_redirects=True)
response_data = json.loads(rv.data.decode())
assert response_data['firmware']['analysis']['crypto_material']
assert response_data['firmware']['analysis']['crypto_material']['analysis_date'] > response_data['firmware']['analysis']['software_components']['analysis_date']
def test_run_from_upload_to_show_analysis_and_search(self):
self._rest_upload_firmware()
self.analysis_finished_event.wait(timeout=15)
self.elements_finished_analyzing.value = 4 * 2 # only one plugin to update so we offset with 4 times 2 plugins
self.analysis_finished_event.clear()
self._rest_get_analysis_result()
self._rest_search()
self._rest_search_fw_only()
self._rest_update_analysis_bad_analysis()
self._rest_update_analysis_success()
self.analysis_finished_event.wait(timeout=10)
self._rest_check_new_analysis_exists()
| gpl-3.0 | -2,622,167,854,439,962,600 | 48.906977 | 168 | 0.687092 | false | 3.668376 | true | false | false |
andybondar/CloudFerry | devlab/tests/test_resource_migration.py | 1 | 16093 | import re
import config
import unittest
import subprocess
import functional_test
from time import sleep
from generate_load import Prerequisites
from filtering_utils import FilteringUtils
class ResourceMigrationTests(functional_test.FunctionalTest):
def setUp(self):
self.src_cloud = Prerequisites(cloud_prefix='SRC')
self.dst_cloud = Prerequisites(cloud_prefix='DST')
self.filtering_utils = FilteringUtils()
def validator(self, source_resources, dest_resources, resource_name):
if not source_resources <= dest_resources:
missing = source_resources - dest_resources
self.fail("Not all {resources} migrated. Missing resources on dest: {missing}".format(
resources=resource_name, missing=missing))
def validate_resource_parameter_in_dst_dict(self, src_list, dst_list,
resource_name='resource',
parameter='name'):
# Validating only uniq parameter's value
source_resources = set([x[parameter] for x in src_list])
dest_resources = set([x[parameter] for x in dst_list])
self.validator(source_resources, dest_resources, resource_name)
def validate_resource_parameter_in_dst(self, src_list, dst_list,
resource_name='resource',
parameter='name'):
# Validating only uniq parameter's value
source_resources = set([x.__dict__[parameter] for x in src_list])
dest_resources = set([x.__dict__[parameter] for x in dst_list])
self.validator(source_resources, dest_resources, resource_name)
def validate_neutron_resource_parameter_in_dst(self, src_list, dst_list,
resource_name='networks',
parameter='name'):
# Validating only uniq parameter's value
source_resources = set([x[parameter] for x in src_list[resource_name]])
dest_resources = set([x[parameter] for x in dst_list[resource_name]])
self.validator(source_resources, dest_resources, resource_name)
def test_migrate_keystone_users(self):
src_users = self.src_cloud.keystoneclient.users.list()
dst_users = self.dst_cloud.keystoneclient.users.list()
self.validate_resource_parameter_in_dst(src_users, dst_users,
resource_name='user',
parameter='name')
self.validate_resource_parameter_in_dst(src_users, dst_users,
resource_name='user',
parameter='email')
def test_migrate_keystone_roles(self):
src_roles = self.src_cloud.keystoneclient.roles.list()
dst_roles = self.dst_cloud.keystoneclient.roles.list()
self.validate_resource_parameter_in_dst(src_roles, dst_roles,
resource_name='role',
parameter='name')
def test_migrate_keystone_tenants(self):
src_tenants = self.src_cloud.keystoneclient.tenants.list()
dst_tenants = self.dst_cloud.keystoneclient.tenants.list()
self.validate_resource_parameter_in_dst(src_tenants, dst_tenants,
resource_name='tenant',
parameter='name')
self.validate_resource_parameter_in_dst(src_tenants, dst_tenants,
resource_name='tenant',
parameter='description')
def test_migrate_nova_keypairs(self):
src_keypairs = self.src_cloud.novaclient.keypairs.list()
dst_keypairs = self.dst_cloud.novaclient.keypairs.list()
self.validate_resource_parameter_in_dst(src_keypairs, dst_keypairs,
resource_name='keypair',
parameter='name')
self.validate_resource_parameter_in_dst(src_keypairs, dst_keypairs,
resource_name='keypair',
parameter='fingerprint')
def test_migrate_nova_flavors(self):
src_flavors = self.src_cloud.novaclient.flavors.list()
dst_flavors = self.dst_cloud.novaclient.flavors.list()
self.validate_resource_parameter_in_dst(src_flavors, dst_flavors,
resource_name='flavor',
parameter='name')
self.validate_resource_parameter_in_dst(src_flavors, dst_flavors,
resource_name='flavor',
parameter='ram')
self.validate_resource_parameter_in_dst(src_flavors, dst_flavors,
resource_name='flavor',
parameter='vcpus')
self.validate_resource_parameter_in_dst(src_flavors, dst_flavors,
resource_name='flavor',
parameter='disk')
# Id can be changed, but for now in CloudFerry we moving flavor with
# its id.
self.validate_resource_parameter_in_dst(src_flavors, dst_flavors,
resource_name='flavor',
parameter='id')
def test_migrate_nova_security_groups(self):
src_sec_gr = self.src_cloud.novaclient.security_groups.list()
dst_sec_gr = self.dst_cloud.novaclient.security_groups.list()
self.validate_resource_parameter_in_dst(src_sec_gr, dst_sec_gr,
resource_name='security_group',
parameter='name')
self.validate_resource_parameter_in_dst(src_sec_gr, dst_sec_gr,
resource_name='security_group',
parameter='description')
def test_migrate_glance_images(self):
src_images = self.src_cloud.glanceclient.images.list()
dst_images_gen = self.dst_cloud.glanceclient.images.list()
dst_images = [x.__dict__ for x in dst_images_gen]
filtering_data = self.filtering_utils.filter_images(src_images)
src_images = filtering_data[0]
self.validate_resource_parameter_in_dst_dict(src_images, dst_images,
resource_name='image',
parameter='name')
self.validate_resource_parameter_in_dst_dict(src_images, dst_images,
resource_name='image',
parameter='disk_format')
self.validate_resource_parameter_in_dst_dict(src_images, dst_images,
resource_name='image',
parameter='container_format')
self.validate_resource_parameter_in_dst_dict(src_images, dst_images,
resource_name='image',
parameter='size')
self.validate_resource_parameter_in_dst_dict(src_images, dst_images,
resource_name='image',
parameter='checksum')
def test_glance_images_not_in_filter_did_not_migrate(self):
src_images = self.src_cloud.glanceclient.images.list()
filtering_data = self.filtering_utils.filter_images(src_images)
dst_images_gen = self.dst_cloud.glanceclient.images.list()
dst_images = [x.__dict__['name'] for x in dst_images_gen]
images_filtered_out = filtering_data[1]
for image in images_filtered_out:
self.assertTrue(image['name'] not in dst_images, 'Image migrated despite '
'that it was not '
'included in filter, '
'Image info: \n{}'.format(image))
def test_migrate_neutron_networks(self):
src_nets = self.src_cloud.neutronclient.list_networks()
dst_nets = self.dst_cloud.neutronclient.list_networks()
self.validate_neutron_resource_parameter_in_dst(src_nets, dst_nets)
self.validate_neutron_resource_parameter_in_dst(
src_nets, dst_nets, parameter='provider:network_type')
def test_migrate_neutron_subnets(self):
src_subnets = self.src_cloud.neutronclient.list_subnets()
dst_subnets = self.dst_cloud.neutronclient.list_subnets()
self.validate_neutron_resource_parameter_in_dst(
src_subnets, dst_subnets, resource_name='subnets')
self.validate_neutron_resource_parameter_in_dst(
src_subnets, dst_subnets, resource_name='subnets',
parameter='gateway_ip')
self.validate_neutron_resource_parameter_in_dst(
src_subnets, dst_subnets, resource_name='subnets',
parameter='cidr')
def test_migrate_neutron_routers(self):
src_routers = self.src_cloud.neutronclient.list_routers()
dst_routers = self.dst_cloud.neutronclient.list_routers()
self.validate_neutron_resource_parameter_in_dst(
src_routers, dst_routers, resource_name='routers')
def test_migrate_vms_parameters(self):
src_vms_names = self.src_cloud.novaclient.servers.list(
search_opts={'all_tenants': 1})
dst_vms_names = self.dst_cloud.novaclient.servers.list(
search_opts={'all_tenants': 1})
src_vms = [x.__dict__ for x in src_vms_names]
dst_vms = [x.__dict__ for x in dst_vms_names]
filtering_data = self.filtering_utils.filter_vms(src_vms)
src_vms = filtering_data[0]
src_vms = [vm for vm in src_vms if vm['status'] != 'ERROR']
self.validate_resource_parameter_in_dst_dict(
src_vms, dst_vms, resource_name='VM', parameter='name')
self.validate_resource_parameter_in_dst_dict(
src_vms, dst_vms, resource_name='VM', parameter='config_drive')
self.validate_resource_parameter_in_dst_dict(
src_vms, dst_vms, resource_name='VM', parameter='key_name')
def test_migrate_cinder_volumes(self):
src_volume_list = self.src_cloud.cinderclient.volumes.list(
search_opts={'all_tenants': 1})
dst_volume_list = self.dst_cloud.cinderclient.volumes.list(
search_opts={'all_tenants': 1})
self.validate_resource_parameter_in_dst(
src_volume_list, dst_volume_list, resource_name='volume',
parameter='display_name')
self.validate_resource_parameter_in_dst(
src_volume_list, dst_volume_list, resource_name='volume',
parameter='size')
@unittest.skip("Temporarily disabled: snapshots doesn't implemented in "
"cinder's nfs driver")
def test_migrate_cinder_snapshots(self):
src_volume_list = self.src_cloud.cinderclient.volume_snapshots.list(
search_opts={'all_tenants': 1})
dst_volume_list = self.dst_cloud.cinderclient.volume_snapshots.list(
search_opts={'all_tenants': 1})
self.validate_resource_parameter_in_dst(
src_volume_list, dst_volume_list, resource_name='volume',
parameter='display_name')
self.validate_resource_parameter_in_dst(
src_volume_list, dst_volume_list, resource_name='volume',
parameter='size')
def test_migrate_tenant_quotas(self):
"""
Validate tenant's quotas were migrated to correct tenant
"""
def _delete_id_from_dict(_dict):
for key in _dict:
del _dict[key]['id']
def check_and_delete_keys(tenant):
return {key: dst_quotas[tenant][key] for key in dst_quotas[tenant]
if key in src_quotas[tenant]}
src_quotas = {i.name: self.src_cloud.novaclient.quotas.get(i.id)._info
for i in self.src_cloud.keystoneclient.tenants.list()}
dst_quotas = {i.name: self.dst_cloud.novaclient.quotas.get(i.id)._info
for i in self.dst_cloud.keystoneclient.tenants.list()}
# Delete tenant's ids
_delete_id_from_dict(src_quotas)
_delete_id_from_dict(dst_quotas)
for tenant in src_quotas:
self.assertIn(tenant, dst_quotas,
'Tenant %s is missing on dst' % tenant)
# Delete quotas which we have on dst but do not have on src
_dst_quotas = check_and_delete_keys(tenant)
self.assertDictEqual(
src_quotas[tenant], _dst_quotas,
'Quotas for tenant %s on src and dst are different' % tenant)
@unittest.skip("Temporarily disabled: test failed and should be fixed")
def test_ssh_connectivity_by_keypair(self):
def retry_cmd_execute(cmd):
timeout = 300 # set timeout for retry 300 seconds
for i in range(timeout):
try:
subprocess.check_output(cmd, shell=True)
return
except Exception:
sleep(1)
raise RuntimeError("Command %s was failed" % cmd)
dst_key_name = 'test_prv_key.pem' # random name for new key
ip_regexp = '.+(\d{3}\.\d{1,3}\.\d{1,3}\.\d{1,3}).+'
ip = re.match(ip_regexp, self.dst_cloud.auth_url).group(1)
cmd = 'ssh -i {0} root@{1} "%s"'.format(config.dst_prv_key_path, ip)
# create privet key on dst node
create_key_cmd = "echo '{0}' > {1}".format(
config.private_key['id_rsa'], dst_key_name)
subprocess.check_output(cmd % create_key_cmd, shell=True)
cmd_change_rights = 'chmod 400 %s' % dst_key_name
subprocess.check_output(cmd % cmd_change_rights, shell=True)
# find vm with valid keypair
vms = self.dst_cloud.novaclient.servers.list(
search_opts={'all_tenants': 1})
for _vm in vms:
if 'keypair_test' in _vm.name:
vm = _vm
break
else:
raise RuntimeError('VM for current test was not spawned')
# get net id for ssh through namespace
net_list = self.dst_cloud.neutronclient.list_networks()['networks']
for net in net_list:
if net['name'] in vm.networks:
net_id = net['id']
ip_address = vm.networks[net['name']].pop()
break
else:
raise RuntimeError(
"Networks for vm %s were not configured" % vm.name)
cmd_ssh_to_vm = 'sudo ip netns exec {2} ssh' \
' -o StrictHostKeyChecking=no -i {0} root@{1} date'
cmd_ssh_to_vm = cmd_ssh_to_vm.format(dst_key_name, ip_address,
'qdhcp-' + net_id)
# make sure 22 port in sec group is open
sec_grps = self.dst_cloud.get_sec_group_id_by_tenant_id(vm.tenant_id)
for sec_gr in sec_grps:
try:
self.dst_cloud.create_security_group_rule(
sec_gr, vm.tenant_id, protocol='tcp', port_range_max=22,
port_range_min=22, direction='ingress')
except Exception:
pass
try:
retry_cmd_execute(cmd % cmd_ssh_to_vm)
finally:
subprocess.check_output(cmd % 'rm ' + dst_key_name, shell=True)
| apache-2.0 | -5,646,999,895,222,576,000 | 48.365031 | 98 | 0.546511 | false | 4.319109 | true | false | false |
christippett/django-postmark-inbound | postmark_inbound/models.py | 1 | 3194 | from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from .settings import inbound_mail_options as option
@python_2_unicode_compatible
class InboundMail(models.Model):
from_name = models.CharField(blank=True, max_length=255)
from_email = models.EmailField(max_length=254)
to_email = models.CharField(blank=True, max_length=255)
cc_email = models.CharField(blank=True, max_length=255)
bcc_email = models.CharField(blank=True, max_length=255)
original_recipient = models.CharField(blank=True, max_length=255)
subject = models.CharField(blank=True, max_length=255)
message_id = models.CharField(blank=True, max_length=255)
reply_to = models.CharField(blank=True, max_length=255)
mailbox_hash = models.CharField(blank=True, max_length=255)
date = models.DateTimeField()
text_body = models.TextField(blank=True)
html_body = models.TextField(blank=True)
stripped_text_reply = models.TextField(blank=True)
tag = models.CharField(blank=True, max_length=255)
def __str__(self):
return ('%s: %s' % (self.from_email, self.subject))
def has_attachment(self):
return self.attachments.all().count() > 0
has_attachment.boolean = True
has_attachment.short_description = 'Attachment'
@property
def from_full(self):
return self.address_details.get(address_type='FROM')
@property
def to_full(self):
return self.address_details.filter(address_type='TO')
@property
def cc_full(self):
return self.address_details.filter(address_type='CC')
@property
def bcc_full(self):
return self.address_details.filter(address_type='BCC')
@python_2_unicode_compatible
class InboundMailHeader(models.Model):
parent_mail = models.ForeignKey(InboundMail, related_name='headers', on_delete=models.CASCADE)
name = models.CharField(max_length=255)
value = models.TextField(blank=True)
def __str__(self):
return ('%s: %s' % (self.name, self.value))
@python_2_unicode_compatible
class InboundMailAttachment(models.Model):
parent_mail = models.ForeignKey(InboundMail, related_name='attachments', on_delete=models.CASCADE)
name = models.CharField(max_length=255)
content_type = models.CharField(max_length=255)
content = models.FileField(upload_to=option.ATTACHMENT_UPLOAD_TO)
content_id = models.CharField(blank=True, max_length=255)
content_length = models.IntegerField()
def __str__(self):
return ('%s (%s)' % (self.name, self.content_type))
# Declare sources of email addresses
ADDRESS_TYPES = tuple(map(lambda x: (x, x), ['FROM', 'TO', 'CC', 'BCC']))
@python_2_unicode_compatible
class InboundMailDetail(models.Model):
parent_mail = models.ForeignKey(InboundMail, related_name='address_details', on_delete=models.CASCADE)
address_type = models.CharField(choices=ADDRESS_TYPES, max_length=10)
email = models.EmailField(blank=True, max_length=254)
name = models.CharField(blank=True, max_length=255)
mailbox_hash = models.CharField(blank=True, max_length=255)
def __str__(self):
return ('%s (%s)' % (self.email, self.address_type))
| mit | 3,361,655,656,052,681,000 | 36.139535 | 106 | 0.701628 | false | 3.475517 | false | false | false |
arhote/exchange | exchange/themes/migrations/0001_initial.py | 1 | 4513 | from __future__ import unicode_literals
from django.db import migrations, models
import exchange.themes.fields
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Theme',
fields=[
('id', models.AutoField(
verbose_name='ID',
serialize=False,
auto_created=True,
primary_key=True
)),
('name', models.CharField(max_length=28)),
('description', models.CharField(max_length=64, blank=True)),
('default_theme', models.BooleanField(
default=False,
editable=False
)),
('active_theme', models.BooleanField(default=False)),
('title', models.CharField(
default=None,
max_length=32,
null=True,
verbose_name=b'Landing Page Title',
blank=True
)),
('tagline', models.CharField(
default=None,
max_length=64,
null=True,
verbose_name=b'Landing Page Tagline',
blank=True
)),
('running_hex', exchange.themes.fields.ColorField(
default=b'0F1A2C',
max_length=7,
null=True,
verbose_name=b'Header Footer Color',
blank=True
)),
('running_text_hex', exchange.themes.fields.ColorField(
default=b'FFFFFF',
max_length=7,
null=True,
verbose_name=b'Header Footer Text Color',
blank=True
)),
('hyperlink_hex', exchange.themes.fields.ColorField(
default=b'0F1A2C',
max_length=7,
null=True,
verbose_name=b'Hyperlink Color',
blank=True
)),
('pb_text', models.CharField(
default=b'Boundless Spatial',
max_length=32,
blank=True,
help_text=b'Text for the Powered by section in the footer',
null=True,
verbose_name=b'Footer Link Text'
)),
('pb_link', models.URLField(
default=b'http://boundlessgeo.com/',
blank=True,
help_text=b'Link for the Powered by section in the footer',
null=True,
verbose_name=b'Footer Link URL'
)),
('docs_link', models.URLField(
default=None,
blank=True,
help_text=b'Link for the Documentation',
null=True,
verbose_name=b'Documentation Link URL'
)),
('docs_text', models.CharField(
default=b'Documentation',
max_length=32,
blank=True,
help_text=b'Text for the documentation link',
null=True,
verbose_name=b'Documentation Text'
)),
('background_logo', models.ImageField(
default=None,
upload_to=b'theme/img/',
blank=True,
help_text=b'Note: will resize to 1440px (w) 350px (h)',
null=True,
verbose_name=b'Background Image'
)),
('primary_logo', models.ImageField(
default=None,
upload_to=b'theme/img/',
blank=True,
help_text=b'Note: will resize to height 96px',
null=True,
verbose_name=b'Primary Logo'
)),
('banner_logo', models.ImageField(
default=None,
upload_to=b'theme/img/',
blank=True,
help_text=b'Note: will resize to height 35px',
null=True,
verbose_name=b'Header Logo'
)),
],
),
]
| gpl-3.0 | -7,043,302,363,441,026,000 | 36.92437 | 79 | 0.41458 | false | 5.163616 | false | false | false |
mirestrepo/voxels-at-lems | ply_util/thresh_ply.py | 1 | 3284 | #!/usr/bin/env python
# encoding: utf-8
"""
Author: Isabel Restrepo
Script to threshold a .ply file based on percentiles.
CAUTION! - This method is very memory inefficient
"""
import os
import sys
import numpy as np
from scipy import stats
from optparse import OptionParser
def write_ply(file_out, data):
#Create header
rows, cols = data.shape
header = ('ply\n' +
'format ascii 1.0\n' +
'element vertex ' + str(rows) + '\n' +
'property float x\nproperty float y\nproperty float z\n' +
'property float nx\nproperty float ny\nproperty float nz\n' +
'property float prob\nproperty float vis\nproperty float nmag\n' +
'property uchar diffuse_red\nproperty uchar diffuse_green\nproperty uchar diffuse_blue\n'+
'end_header\n');
fid = open( file_out , 'w' )
fid.write( header )
np.savetxt( fid , data , fmt='%.5f %.5f %.5f %.5f %.5f %.5f %.5f %.5f %.5f %d %d %d', delimiter=' ')
fid.close()
#Threshold using pvn and bounding box
def thresh_bbox(file_in, file_out,
min_pt, max_pt):
fid = open(file_in, 'r')
data_full = np.genfromtxt(fid, dtype=float, delimiter=' ', skip_header=16);
fid.close()
data = data_full[(data_full[:,0] > min_pt[0]), :]
data = data[(data[:,0] < max_pt[0]), :]
data = data[(data[:,1] > min_pt[1]), :]
data = data[(data[:,1] < max_pt[1]), :]
data = data[(data[:,2] > min_pt[2]), :]
data = data[(data[:,2] < max_pt[2]), :]
write_ply(file_out, data)
#Threshold using a bounding sphere
def thresh_bsphere(file_in, file_out,
centroid, max_pt):
fid = open(file_in, 'r')
data_full = np.genfromtxt(fid, dtype=float, delimiter=' ', skip_header=16);
fid.close()
rad = (max_pt - centroid) * (max_pt - centroid);
radXY = rad[0] + rad[1]
radZ = rad[2]
dx = (data_full[:,0] - centroid[0])*(data_full[:,0] - centroid[0])
dy = (data_full[:,1] - centroid[1])*(data_full[:,1] - centroid[1])
indeces = (dx + dy) < radXY
data = data_full[indeces, :]
dz = (data[:,2] - centroid[2])*(data[:,2] - centroid[2])
data = data[ dz < radZ, :]
write_ply(file_out, data)
def thresh_pvn( file_in, out_basename):
fid = open(file_in, 'r')
data = np.genfromtxt(fid, dtype=float, delimiter=' ', skip_header=16);
fid.close()
#normalize visibility
data[:,7] = data[:,7]/(data[:,7].max());
#normalize nmag
data[:,8] = data[:,8]/(data[:,8].max());
percentile = [90, 95, 99];
data_measure = data[:,6] *data[:,7] *data[:,8]
for p in percentile:
print 'Percentile: ' , p
file_out = out_basename + '_' + str(p) + ".ply"
indices = (data_measure > stats.scoreatpercentile(data_measure, p));
filtered_data = data[indices, :];
write_ply(file_out, filtered_data)
if __name__ == "__main__":
#######################################################
# handle inputs #
#######################################################
parser = OptionParser()
parser.add_option("-i", action="store", type="string", dest="file_in", default="", help=".PLY file to threshold")
parser.add_option("-o", action="store", type="string", dest="out_basename", default="", help="Output files are saved as out_basename_%.ply")
(opts, args) = parser.parse_args()
thresh_pvn(opts.file_in,opts.out_basename)
| bsd-2-clause | 4,835,252,583,100,231,000 | 30.883495 | 144 | 0.584957 | false | 3.063433 | false | false | false |
Alberto-Beralix/Beralix | i386-squashfs-root/usr/share/pyshared/ubuntuone-client/ubuntuone/syncdaemon/u1fsfsm.py | 1 | 182120 | """This is a generated python file"""
# make pylint accept this
# pylint: disable-msg=C0301
state_machine = {'events': {u'AQ_DIR_DELETE_ERROR': [{'ACTION': u'md.create(path=path, uuid=uuid, type=type) aq.query(uuid=uuid)',
'ACTION_FUNC': u'',
'COMMENTS': u'the user deleted something we couldnt delete from the server. Re create.',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'T',
u'not_available': u'F'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'T'}},
{'ACTION': u'md.create(path=path, uuid=uuid, type=type) aq.query(uuid=uuid)',
'ACTION_FUNC': u'',
'COMMENTS': u'the user deleted something we couldnt delete from the server. Re create.',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'F',
u'not_available': u'T'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'T'}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'*'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'*',
u'not_available': u'NA'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'*',
u'not_available': u'NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'*'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'CONFLICT, recreate from deleted uuid',
'ACTION_FUNC': u'',
'COMMENTS': u'the user deleted something we could not delete from the server and replaced it with stuff. Move user files to conflict and re download stuff from the server',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'F',
u'not_available': u'T'},
'STATE': {u'changed': u'!SERVER',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'T'}},
{'ACTION': u'DESPAIR',
'ACTION_FUNC': u'',
'COMMENTS': u'the user deleted something we could not delete from the server and replaced it with stuff. Move user files to conflict and re download stuff from the server',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'F',
u'not_available': u'T'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'CONFLICT',
'ACTION_FUNC': u'',
'COMMENTS': u'the user deleted something we could not delete from the server and replaced it with stuff. Move user files to conflict and re download stuff from the server',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'T',
u'not_available': u'F'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'T'}},
{'ACTION': u'PANIC',
'ACTION_FUNC': u'',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'F',
u'not_available': u'F'},
'STATE': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'T'}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'T',
u'not_available': u'T'},
'STATE': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}}],
u'AQ_DIR_DELETE_OK': [{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'vanilla case',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'we deleted something the user recreated, someone else is taking care of uploading this changes',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'T'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'we deleted something the user recreated, someone else is taking care of uploading this changes',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'LOCAL',
u'has_metadata': u'T',
u'is_directory': u'T'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'DESPAIR',
'ACTION_FUNC': u'DESPAIR',
'COMMENTS': u'somehow we lost ordering of stuff and we got a new file and changes since we tried to delete this file',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'!SERVER',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}}],
u'AQ_DIR_NEW_ERROR': [{'ACTION': u'pass',
'ACTION_FUNC': u'release_marker_error',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'*',
u'not_available': u'*'},
'STATE': {u'changed': u'*',
u'has_metadata': u'F',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'CONFLICT',
'ACTION_FUNC': u'filedir_error_in_creation',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'*',
u'not_available': u'*'},
'STATE': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'T'},
'STATE_OUT': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'}},
{'ACTION': u'pass',
'ACTION_FUNC': u'release_marker_error',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'*',
u'not_available': u'*'},
'STATE': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}}],
u'AQ_DIR_NEW_OK': [{'ACTION': u'aq.uuid_map.set(marker, new_id)',
'ACTION_FUNC': u'release_marker_ok',
'COMMENTS': u"it's a file now",
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'aq.uuid_map.set(marker, new_id)',
'ACTION_FUNC': u'release_marker_ok',
'COMMENTS': u'the dir was now gone',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'md.set(mdid, server_uuid=server_uuid)\nPANIC',
'ACTION_FUNC': u'DESPAIR',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'T'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'='}},
{'ACTION': u'md.set(mdid, server_uuid=server_uuid)\nRESCAN',
'ACTION_FUNC': u'new_local_dir_created',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'T'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}}],
u'AQ_DOWNLOAD_DOES_NOT_EXIST': [{'ACTION': u'',
'ACTION_FUNC': u'delete_file',
'COMMENTS': u"Directory doesn't exist anymore, remove it",
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'T'},
'STATE_OUT': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'}},
{'ACTION': u'',
'ACTION_FUNC': u'delete_file',
'COMMENTS': u"File doesn't exist anymore, remove it",
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'!LOCAL',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'}},
{'ACTION': u'',
'ACTION_FUNC': u'conflict_and_delete',
'COMMENTS': u"File doesn't exist on server, but has local changes",
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'LOCAL',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'}},
{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'F',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}}],
u'AQ_DOWNLOAD_ERROR': [{'ACTION': u'md.remove_partial(uuid);',
'ACTION_FUNC': u'remove_partial',
'COMMENTS': u'error while downloading, remove the partial file',
'PARAMETERS': {u'hash_eq_local_hash': u'*',
u'hash_eq_server_hash': u'*',
u'not_authorized': u'T',
u'not_available': u'F'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'md.remove_partial(uuid);',
'ACTION_FUNC': u'remove_partial',
'COMMENTS': u'error while downloading, remove the partial file',
'PARAMETERS': {u'hash_eq_local_hash': u'*',
u'hash_eq_server_hash': u'*',
u'not_authorized': u'F',
u'not_available': u'T'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'*',
u'hash_eq_server_hash': u'*',
u'not_authorized': u'T',
u'not_available': u'T'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'PANIC',
'ACTION_FUNC': u'remove_partial',
'COMMENTS': u'Should DESPAIR, but ATM we handle this in the handle_AQ_DOWNLOAD_ERROR as we have an extra error type returned by AQ when the .partial file is deleted by the user',
'PARAMETERS': {u'hash_eq_local_hash': u'*',
u'hash_eq_server_hash': u'*',
u'not_authorized': u'F',
u'not_available': u'F'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'*',
u'hash_eq_server_hash': u'*',
u'not_authorized': u'*',
u'not_available': u'NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'*',
u'hash_eq_server_hash': u'*',
u'not_authorized': u'NA',
u'not_available': u'*'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'*',
u'hash_eq_server_hash': u'*',
u'not_authorized': u'*',
u'not_available': u'*'},
'STATE': {u'changed': u'*',
u'has_metadata': u'F',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}}],
u'AQ_DOWNLOAD_FINISHED': [{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'*',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'*',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'file was removed while we where downloading',
'PARAMETERS': {u'hash_eq_local_hash': u'*',
u'hash_eq_server_hash': u'*',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'cancelled by e.g. SV_HASH_NEW',
'PARAMETERS': {u'hash_eq_local_hash': u'T',
u'hash_eq_server_hash': u'T',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'NONE, T, F is a falacy (NONE implies server_hash == local_hash)',
'PARAMETERS': {u'hash_eq_local_hash': u'F',
u'hash_eq_server_hash': u'T',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'NONE, F, T is a falacy (NONE implies server_hash == local_hash)',
'PARAMETERS': {u'hash_eq_local_hash': u'T',
u'hash_eq_server_hash': u'F',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'cancelled by e.g. SV_HASH_NEW',
'PARAMETERS': {u'hash_eq_local_hash': u'F',
u'hash_eq_server_hash': u'F',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'SERVER, T, T is a falacy (SERVER implies server_hash != local_hash)',
'PARAMETERS': {u'hash_eq_local_hash': u'T',
u'hash_eq_server_hash': u'T',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'md.commit_partial(uuid, local_hash=hash)',
'ACTION_FUNC': u'commit_file',
'COMMENTS': u'this is the vainilla case',
'PARAMETERS': {u'hash_eq_local_hash': u'F',
u'hash_eq_server_hash': u'T',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'not what we want',
'PARAMETERS': {u'hash_eq_local_hash': u'T',
u'hash_eq_server_hash': u'F',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'LOCAL',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'another download is already in progress',
'PARAMETERS': {u'hash_eq_local_hash': u'F',
u'hash_eq_server_hash': u'F',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'LOCAL, T, T is a falacy (LOCAL implies server_hash != local_hash)',
'PARAMETERS': {u'hash_eq_local_hash': u'T',
u'hash_eq_server_hash': u'T',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'LOCAL',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'cancelled by e.g. SV_HASH_NEW',
'PARAMETERS': {u'hash_eq_local_hash': u'F',
u'hash_eq_server_hash': u'T',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'LOCAL',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'cancelled by e.g. SV_HASH_NEW',
'PARAMETERS': {u'hash_eq_local_hash': u'T',
u'hash_eq_server_hash': u'F',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'LOCAL',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'cancelled by e.g. SV_HASH_NEW',
'PARAMETERS': {u'hash_eq_local_hash': u'F',
u'hash_eq_server_hash': u'F',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'LOCAL',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'pass',
'ACTION_FUNC': u'DESPAIR',
'COMMENTS': u'we dont download directories anymore',
'PARAMETERS': {u'hash_eq_local_hash': u'!NA',
u'hash_eq_server_hash': u'!NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'T'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'merge_from_partial(uuid)',
'ACTION_FUNC': u'DESPAIR',
'COMMENTS': u'we dont download directories anymore',
'PARAMETERS': {u'hash_eq_local_hash': u'!NA',
u'hash_eq_server_hash': u'T',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'T'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'pass',
'ACTION_FUNC': u'DESPAIR',
'COMMENTS': u'we dont download directories anymore',
'PARAMETERS': {u'hash_eq_local_hash': u'!NA',
u'hash_eq_server_hash': u'F',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'T'},
'STATE_OUT': {u'changed': u'SERVER',
u'has_metadata': u'=',
u'is_directory': u'='}}],
u'AQ_FILE_DELETE_ERROR': [{'ACTION': u'md.create(path=path, uuid=uuid, type=type) aq.query(uuid=uuid)',
'ACTION_FUNC': u'',
'COMMENTS': u'the user deleted something we couldnt delete from the server. Re create.',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'T',
u'not_available': u'F'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'F'}},
{'ACTION': u'md.create(path=path, uuid=uuid, type=type) aq.query(uuid=uuid)',
'ACTION_FUNC': u'',
'COMMENTS': u'the user deleted something we couldnt delete from the server. Re create.',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'F',
u'not_available': u'T'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'='}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'*'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'*',
u'not_available': u'NA'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'*',
u'not_available': u'NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'*'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'CONFLICT, recreate from deleted uuid',
'ACTION_FUNC': u'',
'COMMENTS': u'the user deleted something we could not delete from the server and replaced it with stuff. Move user files to conflict and re download stuff from the server',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'F',
u'not_available': u'T'},
'STATE': {u'changed': u'!SERVER',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'F'}},
{'ACTION': u'DESPAIR',
'ACTION_FUNC': u'DESPAIR',
'COMMENTS': u'the user deleted something we could not delete from the server and replaced it with stuff. Move user files to conflict and re download stuff from the server',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'F',
u'not_available': u'T'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'CONFLICT',
'ACTION_FUNC': u'',
'COMMENTS': u'the user deleted something we could not delete from the server and replaced it with stuff. Move user files to conflict and re download stuff from the server',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'T',
u'not_available': u'F'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'T'}},
{'ACTION': u'PANIC',
'ACTION_FUNC': u'',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'F',
u'not_available': u'F'},
'STATE': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'F'}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'T',
u'not_available': u'T'},
'STATE': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}}],
u'AQ_FILE_DELETE_OK': [{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'vanilla case',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'/c',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'we deleted something the user recreated, someone else is taking care of uploading this changes',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'LOCAL',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'DESPAIR',
'ACTION_FUNC': u'DESPAIR',
'COMMENTS': u'somehow we lost ordering of stuff and we got a new file and changes since we tried to delete this file',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'!SERVER',
u'has_metadata': u'T',
u'is_directory': u'T'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}}],
u'AQ_FILE_MOVE_OK': [{'ACTION': u'',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'deleted locally',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'vanilla case',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'T',
u'is_directory': u'='}}],
u'AQ_FILE_NEW_ERROR': [{'ACTION': u'pass',
'ACTION_FUNC': u'release_marker_error',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'*',
u'not_available': u'*'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'CONFLICT',
'ACTION_FUNC': u'filedir_error_in_creation',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'*',
u'not_available': u'*'},
'STATE': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'}},
{'ACTION': u'pass',
'ACTION_FUNC': u'release_marker_error',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'*',
u'not_available': u'*'},
'STATE': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'T'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}}],
u'AQ_FILE_NEW_OK': [{'ACTION': u'md.set(mdid, server_uuid=server_uuid)',
'ACTION_FUNC': u'new_local_file_created',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'md.set(mdid, server_uuid=server_uuid)',
'ACTION_FUNC': u'new_local_file_created',
'COMMENTS': u'we got IN_FILE_CHANGED and HQ_HASH_NEW between IN_FILE_NEW and AQ_FILE_NEW_OK',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'LOCAL',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'aq.uuid_map.set(marker, new_id)',
'ACTION_FUNC': u'release_marker_ok',
'COMMENTS': u'file deleted locally',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u"Node got node_id with a SV_FILE_NEW and now it's uploading something",
'ACTION_FUNC': u'nothing',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'aq.uuid_map.set(marker, new_id)',
'ACTION_FUNC': u'release_marker_ok',
'COMMENTS': u"it's a directory now",
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'T'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}}],
u'AQ_MOVE_ERROR': [{'ACTION': u'md.create(path=path, uuid=target_uuid, type=type)\naq.query(uuid=target_uuid)',
'ACTION_FUNC': u'clean_move_limbo',
'COMMENTS': u'deleted after local move and move failed on server. ',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'move file to conflict\nquery(uuid=source.parent)\nquery(uuid=dest.parent)\n',
'ACTION_FUNC': u'clean_move_limbo',
'COMMENTS': u'something bad happened. Conflict and rescan',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}}],
u'AQ_MOVE_OK': [{'ACTION': u'pass',
'ACTION_FUNC': u'clean_move_limbo',
'COMMENTS': u'deleted after move',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'pass',
'ACTION_FUNC': u'clean_move_limbo',
'COMMENTS': u'everything good',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}}],
u'AQ_UNLINK_ERROR': [{'ACTION': u'remove the node from trash',
'ACTION_FUNC': u'remove_trash',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}}],
u'AQ_UNLINK_OK': [{'ACTION': u'remove the node from trash',
'ACTION_FUNC': u'remove_trash',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}}],
u'AQ_UPLOAD_ERROR': [{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'*',
u'hash_eq_server_hash': u'*',
u'not_authorized': u'*',
u'not_available': u'*'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'*',
u'hash_eq_server_hash': u'*',
u'not_authorized': u'*',
u'not_available': u'NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'*',
u'hash_eq_server_hash': u'*',
u'not_authorized': u'NA',
u'not_available': u'*'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'*',
u'hash_eq_server_hash': u'*',
u'not_authorized': u'F',
u'not_available': u'F'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'*',
u'hash_eq_server_hash': u'*',
u'not_authorized': u'T',
u'not_available': u'T'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'*',
u'hash_eq_server_hash': u'*',
u'not_authorized': u'F',
u'not_available': u'T'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'*',
u'hash_eq_server_hash': u'*',
u'not_authorized': u'T',
u'not_available': u'F'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'DESPAIR',
'ACTION_FUNC': u'DESPAIR',
'COMMENTS': u'we never try to upload directories',
'PARAMETERS': {u'hash_eq_local_hash': u'*',
u'hash_eq_server_hash': u'*',
u'not_authorized': u'!NA',
u'not_available': u'!NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'T'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}}],
u'AQ_UPLOAD_FINISHED': [{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'*',
u'hash_eq_server_hash': u'*',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'*',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'*',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'md.upload_finished(mdid, server_hash=hash)',
'ACTION_FUNC': u'commit_upload',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'T',
u'hash_eq_server_hash': u'T',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'F',
u'hash_eq_server_hash': u'T',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'T',
u'hash_eq_server_hash': u'F',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'md.upload_finished(mdid, server_hash=hash)',
'ACTION_FUNC': u'commit_upload',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'F',
u'hash_eq_server_hash': u'F',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'LOCAL',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'T',
u'hash_eq_server_hash': u'T',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'T',
u'hash_eq_server_hash': u'T',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'LOCAL',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'md.upload_finished(mdid, server_hash=hash)',
'ACTION_FUNC': u'commit_upload',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'F',
u'hash_eq_server_hash': u'T',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'md.upload_finished(mdid, server_hash=hash)',
'ACTION_FUNC': u'reput_file_from_ok',
'COMMENTS': u'we finished a download, but this download should have been cancelled. So the real upload will conflict. Reschedule upload.',
'PARAMETERS': {u'hash_eq_local_hash': u'F',
u'hash_eq_server_hash': u'T',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'LOCAL',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'cancel download; commit upload',
'ACTION_FUNC': u'cancel_and_commit',
'COMMENTS': u'we couldnt cancel an upload and we overwrote what was on the server',
'PARAMETERS': {u'hash_eq_local_hash': u'T',
u'hash_eq_server_hash': u'F',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'='}},
{'ACTION': u'md.upload_finished(mdid, server_hash=hash)',
'ACTION_FUNC': u'commit_upload',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'T',
u'hash_eq_server_hash': u'F',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'LOCAL',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'md.upload_finished(mdid, server_hash=hash)',
'ACTION_FUNC': u'commit_upload',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'F',
u'hash_eq_server_hash': u'F',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'SERVER',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'md.upload_finished(mdid, server_hash=hash)',
'ACTION_FUNC': u'reput_file_from_ok',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'F',
u'hash_eq_server_hash': u'F',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'LOCAL',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'DESPAIR',
'ACTION_FUNC': u'DESPAIR',
'COMMENTS': u'we never try to upload directories',
'PARAMETERS': {u'hash_eq_local_hash': u'*',
u'hash_eq_server_hash': u'*',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'T'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}}],
u'FS_DIR_CREATE': [{'ACTION': u'mdid = md.create(path=path)\naq.makefile(mdid)',
'ACTION_FUNC': u'new_local_dir',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'T'}},
{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'duplicate IN_DIRECTORY_NEW',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'duplicate IN_DIRECTORY_NEW',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'F'}},
{'ACTION': u'DESPAIR',
'ACTION_FUNC': u'DESPAIR',
'COMMENTS': u'...?',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'LOCAL',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'duplicate IN_DIRECTORY_NEW',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'T'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'T'}}],
u'FS_DIR_DELETE': [{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'this is the result of a delete we did',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'aq.delete_file(uuid, type=type); md.remove(uuid)',
'ACTION_FUNC': u'delete_on_server',
'COMMENTS': u'when deleting files we remove the metadata, server rescan will find this again and downloadi it if we are shutdown before this delete has gone up. Generations may break this. ',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'}},
{'ACTION': u'',
'ACTION_FUNC': u'DESPAIR',
'COMMENTS': u'delete of file when it should be a dir is bad',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'LOCAL',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'}},
{'ACTION': u'',
'ACTION_FUNC': u'DESPAIR',
'COMMENTS': u'delete of file when it should be a dir is bad',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'}},
{'ACTION': u'md.cancel_download(md); md.remove(uuid)',
'ACTION_FUNC': u'deleted_dir_while_downloading',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'T'},
'STATE_OUT': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'}}],
u'FS_DIR_MOVE': [{'ACTION': u'',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'side efect of local move',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'},
'STATE_OUT': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'}},
{'ACTION': u'aq.move()',
'ACTION_FUNC': u'client_moved',
'COMMENTS': u'vanilla case',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'T',
u'is_directory': u'='}},
{'ACTION': u'cancel upload; move; restart upload',
'ACTION_FUNC': u'moved_dirty_local',
'COMMENTS': u'we got a move while we were downloading it',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'LOCAL',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'F'}},
{'ACTION': u'cancel_download; move; query',
'ACTION_FUNC': u'moved_dirty_server',
'COMMENTS': u'we got a move while we were downloading it',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'='}}],
u'FS_FILE_CLOSE_WRITE': [{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'changes from a file that no longer exists',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'DESPAIR',
'ACTION_FUNC': u'DESPAIR',
'COMMENTS': u'event cant happen in directories',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'T'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'hq.insert(path)',
'ACTION_FUNC': u'calculate_hash',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}}],
u'FS_FILE_CREATE': [{'ACTION': u'mdid = md.create(path=path)\naq.makefile(mdid)',
'ACTION_FUNC': u'new_local_file',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'F'}},
{'ACTION': u'',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'ignore this. We created the file.',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u"As we're ignoring some files, we are in the situation where some editors move the file to something we ignore, and then create the file again, so we receive the FS_FILE_CREATE for a node that we actually have",
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u"Same as before, but we're uploading that node we already have.",
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'LOCAL',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'log warning',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'duplicate IN_FILE_NEW',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'T'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'F'}}],
u'FS_FILE_DELETE': [{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'this is the result of a delete we did',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'aq.delete_file(uuid, type=type); md.remove(uuid)',
'ACTION_FUNC': u'delete_on_server',
'COMMENTS': u'when deleting files we remove the metadata, server rescan will find this again and downloadi it if we are shutdown before this delete has gone up. Generations may break this. ',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'}},
{'ACTION': u'aq.cancel_upload(uuid); aq.delete_file(uuid); md.remove(uuid)',
'ACTION_FUNC': u'cancel_upload_and_delete_on_server',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'LOCAL',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'}},
{'ACTION': u'aq.cancel_download(uuid); aq.delete_file(uuid)\n md.remove(uuid)',
'ACTION_FUNC': u'cancel_download_and_delete_on_server',
'COMMENTS': u'This is policy. We could declare this to be a conflict. But we should assume that the client knows what he is doing',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'}},
{'ACTION': u'',
'ACTION_FUNC': u'DESPAIR',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'T'},
'STATE_OUT': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'}}],
u'FS_FILE_MOVE': [{'ACTION': u'',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'side efect of local move',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'},
'STATE_OUT': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'}},
{'ACTION': u'aq.move()',
'ACTION_FUNC': u'client_moved',
'COMMENTS': u'vanilla case',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'T',
u'is_directory': u'='}},
{'ACTION': u'cancel upload; move; restart upload',
'ACTION_FUNC': u'moved_dirty_local',
'COMMENTS': u'we got a move while we were downloading it',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'LOCAL',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'F'}},
{'ACTION': u'cancel_download; move; query',
'ACTION_FUNC': u'moved_dirty_server',
'COMMENTS': u'we got a move while we were downloading it',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'='}}],
u'HQ_HASH_ERROR': [{'ACTION': u'',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'it was deleted at some point',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'},
'STATE_OUT': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'}},
{'ACTION': u'DESPAIR',
'ACTION_FUNC': u'DESPAIR',
'COMMENTS': u'we should never be hashing a directory',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'T'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'hq.insert(path)',
'ACTION_FUNC': u'calculate_hash',
'COMMENTS': u'we need to re send the hash to the HQ',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}}],
u'HQ_HASH_NEW': [{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'*',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'*',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'md.set(mdid, stat=stat)',
'ACTION_FUNC': u'save_stat',
'COMMENTS': u'hash == local_hash == server_hash; nothing changed but the file was \u201ctouched\u201d',
'PARAMETERS': {u'hash_eq_local_hash': u'!NA',
u'hash_eq_server_hash': u'T',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'',
'ACTION_FUNC': u'reput_file',
'COMMENTS': u'just to be on the safe side, we try to put the file again.',
'PARAMETERS': {u'hash_eq_local_hash': u'!NA',
u'hash_eq_server_hash': u'T',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'LOCAL',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'LOCAL',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'md.set(mdid, local_hash=hash)\naq.putfile(*mdid)',
'ACTION_FUNC': u'put_file',
'COMMENTS': u'plain \u201cuser modified the file on this machine\u201d case',
'PARAMETERS': {u'hash_eq_local_hash': u'!NA',
u'hash_eq_server_hash': u'F',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'LOCAL',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'we dont send directories to hq. This is old. Ignore.',
'PARAMETERS': {u'hash_eq_local_hash': u'F',
u'hash_eq_server_hash': u'F',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'T'},
'STATE_OUT': {u'changed': u'SERVER',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'NA',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'T',
u'hash_eq_server_hash': u'F',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'T'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'the directories are the same',
'PARAMETERS': {u'hash_eq_local_hash': u'!NA',
u'hash_eq_server_hash': u'T',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'T'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'md.set(mdid, local_hash=hash)\naq.putfile(*mdid)',
'ACTION_FUNC': u'reput_file',
'COMMENTS': u'another upload is in progress',
'PARAMETERS': {u'hash_eq_local_hash': u'!NA',
u'hash_eq_server_hash': u'F',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'LOCAL',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'LOCAL',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'md.set(mdid, local_hash=hash)\naq.cancel_download(mdid)',
'ACTION_FUNC': u'converges_to_server',
'COMMENTS': u'local file already has server changes that are being downloaded',
'PARAMETERS': {u'hash_eq_local_hash': u'!NA',
u'hash_eq_server_hash': u'T',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'CONFLICT',
'ACTION_FUNC': u'file_conflict',
'COMMENTS': u'local file was modified by the user while download of next version was in progress',
'PARAMETERS': {u'hash_eq_local_hash': u'!NA',
u'hash_eq_server_hash': u'F',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'='}},
{'ACTION': u'',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'we dont send directories to hq. This is old. Ignore.',
'PARAMETERS': {u'hash_eq_local_hash': u'!NA',
u'hash_eq_server_hash': u'T',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'T'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'we dont send directories to hq. This is old. Ignore.',
'PARAMETERS': {u'hash_eq_local_hash': u'!NA',
u'hash_eq_server_hash': u'F',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'T'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'*',
u'hash_eq_server_hash': u'*',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'F',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}}],
u'LR_SCAN_ERROR': [{'ACTION': u'',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'it was deleted at some point',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'},
'STATE_OUT': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'}},
{'ACTION': u'DESPAIR',
'ACTION_FUNC': u'DESPAIR',
'COMMENTS': u'we should never be local-scanning a file',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'RESCAN',
'ACTION_FUNC': u'rescan_dir',
'COMMENTS': u'we need to re start the local rescan',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'T'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}}],
u'SV_DIR_NEW': [{'ACTION': u'md.create(path=path, uuid=uuid, type=type)\naq.query(uuid=uuid)',
'ACTION_FUNC': u'new_dir',
'COMMENTS': u'good case, we send a query to see if there are anychanges we need to merge',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'T'}},
{'ACTION': u'CONFLICT',
'ACTION_FUNC': u'new_dir_on_server_with_local_file',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'T'}},
{'ACTION': u'list the dir to get new info and converge',
'ACTION_FUNC': u'new_dir_on_server_with_local_dir',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'T'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'T'}}],
u'SV_FILE_DELETED': [{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'we deleted something and the server did the same',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'md.remove(uuid)',
'ACTION_FUNC': u'delete_file',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'}},
{'ACTION': u'CONFLICT; md.remove(uuid)',
'ACTION_FUNC': u'conflict_and_delete',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'LOCAL',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'}},
{'ACTION': u'md.cancel_download(uuid); md.remove(uuid)',
'ACTION_FUNC': u'file_gone_wile_downloading',
'COMMENTS': u'we are still downloading some content we dont care about anymore',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'}},
{'ACTION': u'md.cancel_download(md); md.remove(uuid)',
'ACTION_FUNC': u'file_gone_wile_downloading',
'COMMENTS': u'we are still downloading some content we dont care about anymore',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'T'},
'STATE_OUT': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'}}],
u'SV_FILE_NEW': [{'ACTION': u'md.create(path=path, uuid=uuid, type=type)\naq.query(uuid=uuid)',
'ACTION_FUNC': u'new_file',
'COMMENTS': u'good case, we send a query to see if there are anychanges we need to merge',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'F'}},
{'ACTION': u"Didn't find the node by node_id, but found it by path",
'ACTION_FUNC': u'new_server_file_having_local',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}}],
u'SV_HASH_NEW': [{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'*',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'*',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'*',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'server side changes while trying to delete something',
'PARAMETERS': {u'hash_eq_local_hash': u'*',
u'hash_eq_server_hash': u'*',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'NA',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'no news is good news',
'PARAMETERS': {u'hash_eq_local_hash': u'T',
u'hash_eq_server_hash': u'T',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'NONE, T, F is a falacy (NONE implies server_hash == local_hash)',
'PARAMETERS': {u'hash_eq_local_hash': u'F',
u'hash_eq_server_hash': u'T',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'NONE, F, T is a falacy (NONE implies server_hash == local_hash)',
'PARAMETERS': {u'hash_eq_local_hash': u'T',
u'hash_eq_server_hash': u'F',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'md.set(uuid, server_hash=hash)\npartial = md.create_partial(uuid)\naq.getcontent(*partial)',
'ACTION_FUNC': u'get_file',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'F',
u'hash_eq_server_hash': u'F',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'SERVER',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'SERVER, T, T is a impossible (SERVER implies server_hash != local_hash)',
'PARAMETERS': {u'hash_eq_local_hash': u'T',
u'hash_eq_server_hash': u'T',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'pass',
'ACTION_FUNC': u'nothing',
'COMMENTS': u'A download for a content object with the same hash is already in progress',
'PARAMETERS': {u'hash_eq_local_hash': u'F',
u'hash_eq_server_hash': u'T',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'md.remove_partial(uuid)\nmd.set(uuid, server_hash=hash)\naq.cancel_download(uuid)',
'ACTION_FUNC': u'server_file_changed_back',
'COMMENTS': u"the local file is equal to the file that is now on the server, but a download is in progress from an older version of the server. Removing the partial ensures that we never complete the download (thus we avoid the dreaded !!! state) Note that this makes it important for AQ_DOWNLOAD_FINISHED to 'pass' on downloads that aren't partials",
'PARAMETERS': {u'hash_eq_local_hash': u'T',
u'hash_eq_server_hash': u'F',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'aq.cancel_download(uuid)\nmd.set(uuid, server_hash=hash)\npartial = md.get_partial(uuid)\naq.getcontent(*partial)',
'ACTION_FUNC': u'reget_file',
'COMMENTS': u'a download was in progress but the server changed again. Note that this makes it important for AQ_DOWNLOAD_FINISHED to check the server hash.',
'PARAMETERS': {u'hash_eq_local_hash': u'F',
u'hash_eq_server_hash': u'F',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'SERVER',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'NA',
'ACTION_FUNC': u'',
'COMMENTS': u'LOCAL, T, T is a impossible (LOCAL implies server_hash != local_hash)',
'PARAMETERS': {u'hash_eq_local_hash': u'T',
u'hash_eq_server_hash': u'T',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'LOCAL',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'aq.upload()',
'ACTION_FUNC': u'reput_file_from_local',
'COMMENTS': u'The upload was interrupted, just try it again. ',
'PARAMETERS': {u'hash_eq_local_hash': u'F',
u'hash_eq_server_hash': u'T',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'LOCAL',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'aq.cancel_upload(uuid)\nmd.set(uuid, server_hash=hash)',
'ACTION_FUNC': u'server_file_now_matches',
'COMMENTS': u"there's a small chance that the cancel fails, in which case we're simply redundant",
'PARAMETERS': {u'hash_eq_local_hash': u'T',
u'hash_eq_server_hash': u'F',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'LOCAL',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'CONFLICT',
'ACTION_FUNC': u'local_file_conflict',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'F',
u'hash_eq_server_hash': u'F',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'LOCAL',
u'has_metadata': u'T',
u'is_directory': u'F'},
'STATE_OUT': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'='}},
{'ACTION': u'pass',
'ACTION_FUNC': u'DESPAIR',
'COMMENTS': u'cant set hash on directories',
'PARAMETERS': {u'hash_eq_local_hash': u'!NA',
u'hash_eq_server_hash': u'T',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'T'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'md.set(uuid, server_hash=hash)\npartial = md.create_partial(uuid)\naq.getcontent(*partial)',
'ACTION_FUNC': u'DESPAIR',
'COMMENTS': u'cant set hash on directories',
'PARAMETERS': {u'hash_eq_local_hash': u'!NA',
u'hash_eq_server_hash': u'F',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'T'},
'STATE_OUT': {u'changed': u'SERVER',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'aq.cancel_download(uuid) \nmd.set(uuid, server_hash=hash)',
'ACTION_FUNC': u'DESPAIR',
'COMMENTS': u'cant set hash on directories',
'PARAMETERS': {u'hash_eq_local_hash': u'!NA',
u'hash_eq_server_hash': u'T',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'T'},
'STATE_OUT': {u'changed': u'SERVER',
u'has_metadata': u'=',
u'is_directory': u'='}},
{'ACTION': u'aq.cancel_download(uuid)\nmd.set(uuid, server_hash=hash)\npartial = md.get_partial(uuid)\naq.getcontent(*partial)',
'ACTION_FUNC': u'DESPAIR',
'COMMENTS': u'cant set hash on directories',
'PARAMETERS': {u'hash_eq_local_hash': u'!NA',
u'hash_eq_server_hash': u'F',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'T'},
'STATE_OUT': {u'changed': u'NONE',
u'has_metadata': u'=',
u'is_directory': u'='}}],
u'SV_MOVED': [{'ACTION': u'DESPAIR',
'ACTION_FUNC': u'DESPAIR',
'COMMENTS': u'',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'NA'},
'STATE_OUT': {u'changed': u'*',
u'has_metadata': u'*',
u'is_directory': u'*'}},
{'ACTION': u'md.move(uuid)',
'ACTION_FUNC': u'server_moved',
'COMMENTS': u'all pending changes should arrive to the moved file\naq should remove the destination if its there',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'!SERVER',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'T',
u'is_directory': u'='}},
{'ACTION': u'cancel_download; move; query',
'ACTION_FUNC': u'server_moved_dirty',
'COMMENTS': u'we got a move while we were downloading it',
'PARAMETERS': {u'hash_eq_local_hash': u'NA',
u'hash_eq_server_hash': u'NA',
u'not_authorized': u'NA',
u'not_available': u'NA'},
'STATE': {u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'*'},
'STATE_OUT': {u'changed': u'=',
u'has_metadata': u'T',
u'is_directory': u'='}}]},
'invalid': [{u'changed': u'NONE',
u'has_metadata': u'F',
u'is_directory': u'T'},
{u'changed': u'SERVER',
u'has_metadata': u'F',
u'is_directory': u'T'},
{u'changed': u'LOCAL',
u'has_metadata': u'F',
u'is_directory': u'T'},
{u'changed': u'NONE',
u'has_metadata': u'F',
u'is_directory': u'F'},
{u'changed': u'SERVER',
u'has_metadata': u'F',
u'is_directory': u'F'},
{u'changed': u'LOCAL',
u'has_metadata': u'F',
u'is_directory': u'F'},
{u'changed': u'NONE',
u'has_metadata': u'F',
u'is_directory': u'NA'},
{u'changed': u'SERVER',
u'has_metadata': u'F',
u'is_directory': u'NA'},
{u'changed': u'LOCAL',
u'has_metadata': u'F',
u'is_directory': u'NA'},
{u'changed': u'NONE',
u'has_metadata': u'T',
u'is_directory': u'NA'},
{u'changed': u'SERVER',
u'has_metadata': u'T',
u'is_directory': u'NA'},
{u'changed': u'LOCAL',
u'has_metadata': u'T',
u'is_directory': u'NA'},
{u'changed': u'NA',
u'has_metadata': u'T',
u'is_directory': u'NA'},
{u'changed': u'NA',
u'has_metadata': u'T',
u'is_directory': u'F'},
{u'changed': u'NA',
u'has_metadata': u'T',
u'is_directory': u'T'},
{u'changed': u'LOCAL',
u'has_metadata': u'T',
u'is_directory': u'T'},
{u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'T'},
{u'changed': u'NA',
u'has_metadata': u'F',
u'is_directory': u'F'}],
'parameters': {u'hash_eq_local_hash': u'hash == md.local_hash',
u'hash_eq_server_hash': u'hash == md.server_hash',
u'not_authorized': u'error == not authorized',
u'not_available': u'error == not available'},
'state_vars': {u'changed': u'changed',
u'has_metadata': u'\u2203 md',
u'is_directory': u'isDirectory'}} | gpl-3.0 | 1,302,812,209,180,732,200 | 70.504122 | 381 | 0.270415 | false | 5.572827 | false | false | false |
ulikoehler/ODBPy | ODBPy/ComponentParser.py | 1 | 3919 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
ODB++ surface parser components
"""
import re
from collections import namedtuple
from .Decoder import DecoderOption, run_decoder
from .Structures import *
from .Utils import try_parse_number
__all__ = ["components_decoder_options", "parse_components",
"consolidate_component_tags", "Component", "map_components_by_name"]
_prp_re = re.compile(r"^PRP\s+(\S+)\s+'([^']+)'\s*$") # Property record
# _prp_re.search("PRP Name 'EEUFR1H470'")
_top_re = re.compile(r"^TOP\s+(\d+)\s+(-?[\.\d]+)\s+(-?[\.\d]+)\s+(-?[\.\d]+)\s+(N|M|X|Y|XY)\s+(\d+)\s+(\d+)\s+(\S+)\s*$") # Toeprint record
_cmp_re = re.compile(r"^CMP\s+(\d+)\s+(-?[\.\d]+)\s+(-?[\.\d]+)\s+(-?[\.\d]+)\s+(N|M|X|Y|XY)\s+(\S+)\s+(\S+)\s*(;\s*.+?)?$") # component record
ComponentRecordTag = namedtuple("ComponentRecordTag",[
"package_ref", "location", "rotation", "mirror", "name", "part_name", "attributes"])
PropertyRecordTag = namedtuple("PropertyRecord", ["key", "value"])
ToeprintRecord = namedtuple("ToeprintRecord", [
"pin_num", "location", "rotation", "mirrored", "net_num", "subnet_num", "toeprint_name"])
Component = namedtuple("Component", [
"name", "part_name", "location", "rotation", "mirror", "attributes", "properties", "toeprints"])
def consolidate_component_tags(tags):
component = None # Expect only one
properties = {}
toeprints = []
for tag in tags:
if isinstance(tag, ComponentRecordTag):
if component is not None:
raise ValueError("Multiple CMP records in section. Last one: {}".format(tag))
component = tag
if isinstance(tag, PropertyRecordTag):
properties[tag.key] = tag.value
if isinstance(tag, ToeprintRecord):
toeprints.append(tag)
if component is None:
raise ValueError("No CMP record in section")
return Component(
component.name, component.part_name, component.location,
component.rotation, component.mirror, component.attributes,
properties, toeprints
)
def _parse_prp(match):
key, value = match.groups()
return PropertyRecordTag(key, value)
def _parse_top(match):
pin_num, x, y, rot, mirror, net_num, subnet_num, toeprint_name = match.groups()
return ToeprintRecord(
int(pin_num),
Point(float(x), float(y)),
float(rot),
mirror_map[mirror],
int(net_num),
int(subnet_num),
try_parse_number(toeprint_name)
)
def _parse_cmp(match):
pkg_ref, x, y, rot, mirror, name, part_name, attributes = match.groups()
attributes = parse_attributes(attributes[1:]) \
if attributes is not None else {}
return ComponentRecordTag(
int(pkg_ref),
Point(float(x), float(y)),
float(rot),
mirror_map[mirror],
try_parse_number(name.strip()),
try_parse_number(part_name.strip()),
attributes
)
components_decoder_options = [
DecoderOption(_prp_re, _parse_prp),
DecoderOption(_top_re, _parse_top),
DecoderOption(_cmp_re, _parse_cmp)
]
def component_name_to_id(name):
"""
Convert a section header name ("CMP 0" in DipTrace)
to an identifier (e.g. 0)
"""
if name.startswith("CMP"):
return int(name[len("CMP"):].strip())
return name
def parse_components(components):
# Build rulesets
return {
component_name_to_id(name): consolidate_component_tags(
list(run_decoder(component, components_decoder_options)))
for name, component in components.items()
if name is not None
}
def map_components_by_name(components):
"""Given a dictionary or list of components, map them into a dictionary by name"""
if isinstance(components, dict):
components = components.values()
return {
component.name: component
for component in components
}
| apache-2.0 | 1,282,930,216,420,034,800 | 33.991071 | 143 | 0.615463 | false | 3.468142 | false | false | false |
kilon/sverchok | utils/sv_easing_functions.py | 1 | 7135 | # ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
'''
original c code:
https://raw.githubusercontent.com/warrenm/AHEasing/master/AHEasing/easing.c
Copyright (c) 2011, Auerhaus Development, LLC
http://sam.zoy.org/wtfpl/COPYING for more details.
'''
from math import sqrt, pow, sin, cos
from math import pi as M_PI
M_PI_2 = M_PI * 2
# Modeled after the line y = x
def LinearInterpolation(p):
return p
# Modeled after the parabola y = x^2
def QuadraticEaseIn(p):
return p * p
# Modeled after the parabola y = -x^2 + 2x
def QuadraticEaseOut(p):
return -(p * (p - 2))
# Modeled after the piecewise quadratic
# y = (1/2)((2x)^2) ; [0, 0.5)
# y = -(1/2)((2x-1)*(2x-3) - 1) ; [0.5, 1]
def QuadraticEaseInOut(p):
if (p < 0.5):
return 2 * p * p
return (-2 * p * p) + (4 * p) - 1
# Modeled after the cubic y = x^3
def CubicEaseIn(p):
return p * p * p
# Modeled after the cubic y = (x - 1)^3 + 1
def CubicEaseOut(p):
f = (p - 1)
return f * f * f + 1
# Modeled after the piecewise cubic
# y = (1/2)((2x)^3) ; [0, 0.5)
# y = (1/2)((2x-2)^3 + 2) ; [0.5, 1]
def CubicEaseInOut(p):
if (p < 0.5):
return 4 * p * p * p
else:
f = ((2 * p) - 2)
return 0.5 * f * f * f + 1
# Modeled after the quartic x^4
def QuarticEaseIn(p):
return p * p * p * p
# Modeled after the quartic y = 1 - (x - 1)^4
def QuarticEaseOut(p):
f = (p - 1)
return f * f * f * (1 - p) + 1
# Modeled after the piecewise quartic
# y = (1/2)((2x)^4) ; [0, 0.5)
# y = -(1/2)((2x-2)^4 - 2) ; [0.5, 1]
def QuarticEaseInOut(p):
if (p < 0.5):
return 8 * p * p * p * p
else:
f = (p - 1)
return -8 * f * f * f * f + 1
# Modeled after the quintic y = x^5
def QuinticEaseIn(p):
return p * p * p * p * p
# Modeled after the quintic y = (x - 1)^5 + 1
def QuinticEaseOut(p):
f = (p - 1)
return f * f * f * f * f + 1
# Modeled after the piecewise quintic
# y = (1/2)((2x)^5) ; [0, 0.5)
# y = (1/2)((2x-2)^5 + 2) ; [0.5, 1]
def QuinticEaseInOut(p):
if (p < 0.5):
return 16 * p * p * p * p * p
else:
f = ((2 * p) - 2)
return 0.5 * f * f * f * f * f + 1
# Modeled after quarter-cycle of sine wave
def SineEaseIn(p):
return sin((p - 1) * M_PI_2) + 1
# Modeled after quarter-cycle of sine wave (different phase)
def SineEaseOut(p):
return sin(p * M_PI_2)
# Modeled after half sine wave
def SineEaseInOut(p):
return 0.5 * (1 - cos(p * M_PI))
# Modeled after shifted quadrant IV of unit circle
def CircularEaseIn(p):
return 1 - sqrt(1 - (p * p))
# Modeled after shifted quadrant II of unit circle
def CircularEaseOut(p):
return sqrt((2 - p) * p)
# Modeled after the piecewise circular function
# y = (1/2)(1 - sqrt(1 - 4x^2)) ; [0, 0.5)
# y = (1/2)(sqrt(-(2x - 3)*(2x - 1)) + 1) ; [0.5, 1]
def CircularEaseInOut(p):
if(p < 0.5):
return 0.5 * (1 - sqrt(1 - 4 * (p * p)))
else:
return 0.5 * (sqrt(-((2 * p) - 3) * ((2 * p) - 1)) + 1)
# Modeled after the exponential function y = 2^(10(x - 1))
def ExponentialEaseIn(p):
return p if (p == 0.0) else pow(2, 10 * (p - 1))
# Modeled after the exponential function y = -2^(-10x) + 1
def ExponentialEaseOut(p):
return p if (p == 1.0) else 1 - pow(2, -10 * p)
# Modeled after the piecewise exponential
# y = (1/2)2^(10(2x - 1)) ; [0,0.5)
# y = -(1/2)*2^(-10(2x - 1))) + 1 ; [0.5,1]
def ExponentialEaseInOut(p):
if(p == 0.0 or p == 1.0):
return p
if(p < 0.5):
return 0.5 * pow(2, (20 * p) - 10)
else:
return -0.5 * pow(2, (-20 * p) + 10) + 1
# Modeled after the damped sine wave y = sin(13pi/2*x)*pow(2, 10 * (x - 1))
def ElasticEaseIn(p):
return sin(13 * M_PI_2 * p) * pow(2, 10 * (p - 1))
# Modeled after the damped sine wave y = sin(-13pi/2*(x + 1))*pow(2, -10x) + 1
def ElasticEaseOut(p):
return sin(-13 * M_PI_2 * (p + 1)) * pow(2, -10 * p) + 1
# Modeled after the piecewise exponentially-damped sine wave:
# y = (1/2)*sin(13pi/2*(2*x))*pow(2, 10 * ((2*x) - 1)) ; [0,0.5)
# y = (1/2)*(sin(-13pi/2*((2x-1)+1))*pow(2,-10(2*x-1)) + 2) ; [0.5, 1]
def ElasticEaseInOut(p):
if (p < 0.5):
return 0.5 * sin(13 * M_PI_2 * (2 * p)) * pow(2, 10 * ((2 * p) - 1))
else:
return 0.5 * (sin(-13 * M_PI_2 * ((2 * p - 1) + 1)) * pow(2, -10 * (2 * p - 1)) + 2)
# Modeled after the overshooting cubic y = x^3-x*sin(x*pi)
def BackEaseIn(p):
return p * p * p - p * sin(p * M_PI)
# Modeled after overshooting cubic y = 1-((1-x)^3-(1-x)*sin((1-x)*pi))
def BackEaseOut(p):
f = (1 - p)
return 1 - (f * f * f - f * sin(f * M_PI))
# Modeled after the piecewise overshooting cubic function:
# y = (1/2)*((2x)^3-(2x)*sin(2*x*pi)) ; [0, 0.5)
# y = (1/2)*(1-((1-x)^3-(1-x)*sin((1-x)*pi))+1) ; [0.5, 1]
def BackEaseInOut(p):
if (p < 0.5):
f = 2 * p
return 0.5 * (f * f * f - f * sin(f * M_PI))
else:
f = (1 - (2 * p - 1))
return 0.5 * (1 - (f * f * f - f * sin(f * M_PI))) + 0.5
def BounceEaseIn(p):
return 1 - BounceEaseOut(1 - p)
def BounceEaseOut(p):
if(p < 4 / 11.0):
return (121 * p * p) / 16.0
elif(p < 8 / 11.0):
return (363 / 40.0 * p * p) - (99 / 10.0 * p) + 17 / 5.0
elif(p < 9 / 10.0):
return (4356 / 361.0 * p * p) - (35442 / 1805.0 * p) + 16061 / 1805.0
else:
return (54 / 5.0 * p * p) - (513 / 25.0 * p) + 268 / 25.0
def BounceEaseInOut(p):
if(p < 0.5):
return 0.5 * BounceEaseIn(p * 2)
else:
return 0.5 * BounceEaseOut(p * 2 - 1) + 0.5
easing_dict = {
0: LinearInterpolation,
1: QuadraticEaseIn,
2: QuadraticEaseOut,
3: QuadraticEaseInOut,
4: CubicEaseIn,
5: CubicEaseOut,
6: CubicEaseInOut,
7: QuarticEaseIn,
8: QuarticEaseOut,
9: QuarticEaseInOut,
10: QuinticEaseIn,
11: QuinticEaseOut,
12: QuinticEaseInOut,
13: SineEaseIn,
14: SineEaseOut,
15: SineEaseInOut,
16: CircularEaseIn,
17: CircularEaseOut,
18: CircularEaseInOut,
19: ExponentialEaseIn,
20: ExponentialEaseOut,
21: ExponentialEaseInOut,
22: ElasticEaseIn,
23: ElasticEaseOut,
24: ElasticEaseInOut,
25: BackEaseIn,
26: BackEaseOut,
27: BackEaseInOut,
28: BounceEaseIn,
29: BounceEaseOut,
30: BounceEaseInOut
}
| gpl-3.0 | -8,912,327,687,947,498,000 | 24.573477 | 92 | 0.552908 | false | 2.440985 | false | false | false |
Corvia/django-tenant-users | dtu_test_project/customers/migrations/0001_initial.py | 1 | 1338 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
try:
import django_tenants.postgresql_backend.base
VALIDATOR = django_tenants.postgresql_backend.base._check_schema_name
except ImportError as e:
import tenant_schemas.postgresql_backend.base
VALIDATOR = tenant_schemas.postgresql_backend.base._check_schema_name
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Client',
fields=[
('id', models.AutoField(verbose_name='ID', auto_created=True, primary_key=True, serialize=False)),
('domain_url', models.CharField(max_length=128, unique=True)),
('schema_name', models.CharField(max_length=63, unique=True,
validators=[VALIDATOR])),
('slug', models.SlugField(verbose_name='Tenant URL Name', blank=True)),
('created', models.DateTimeField()),
('modified', models.DateTimeField(blank=True)),
('name', models.CharField(max_length=100)),
('description', models.TextField(max_length=200)),
],
options={
'abstract': False,
},
),
]
| mit | -8,869,141,853,439,523,000 | 33.307692 | 114 | 0.577728 | false | 4.597938 | false | false | false |
rbi13/CommandServer-py | Pattern.py | 1 | 1060 | #!/usr/bin/python
## Pattern.py
import re
from PatternMatch import PatternMatch
class Pattern:
# Pattern keys
PATTERN_KEY = "pattern"
CLI_COMMAND_KEY = "cliCommand"
FUNCTION_KEY = "function"
def __init__(self, info):
self.pattern = info.get(Pattern.PATTERN_KEY)
self.cliCommand = info.get(Pattern.CLI_COMMAND_KEY)
self.function = info.get(Pattern.FUNCTION_KEY)
def match(self, compare):
match = re.search(self.pattern, compare)
if match:
return PatternMatch(self, match)
else:
return None
@staticmethod
def getMatches(pattern_list, compare):
matches = []
for pattern in pattern_list:
match = pattern.match(compare)
if match:
matches.append(match)
return matches
@staticmethod
def load(json_list):
# Pattern[]
patterns = []
# pprint(json_list)
for patternDef in json_list:
patterns.append(Pattern(patternDef))
return patterns
| lgpl-3.0 | -1,920,804,193,334,539,300 | 24.238095 | 59 | 0.59434 | false | 4.156863 | false | false | false |
lausser/coshsh | coshsh/datasource.py | 1 | 4889 | #!/usr/bin/env python
#-*- coding: utf-8 -*-
#
# This file belongs to coshsh.
# Copyright Gerhard Lausser.
# This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
import sys
import os
import re
import imp
import inspect
import logging
import coshsh
from coshsh.util import compare_attr, substenv
logger = logging.getLogger('coshsh')
class DatasourceNotImplemented(Exception):
pass
class DatasourceNotReady(Exception):
# datasource is currently being updated
pass
class DatasourceNotCurrent(Exception):
# datasources was not updated lately.
# it makes no sense to continue.
pass
class DatasourceNotAvailable(Exception):
pass
class DatasourceCorrupt(Exception):
pass
class Datasource(object):
my_type = 'datasource'
class_factory = []
def __init__(self, **params):
#print "datasourceinit with", self.__class__
for key in [k for k in params if k.startswith("recipe_")]:
setattr(self, key, params[key])
short = key.replace("recipe_", "")
if not short in params:
params[short] = params[key]
for key in params.keys():
if isinstance(params[key], basestring):
params[key] = re.sub('%.*?%', substenv, params[key])
if self.__class__ == Datasource:
#print "generic ds", params
newcls = self.__class__.get_class(params)
if newcls:
#print "i rebless anon datasource to", newcls, params
self.__class__ = newcls
self.__init__(**params)
else:
logger.critical('datasource for %s is not implemented' % params)
#print "i raise DatasourceNotImplemented"
raise DatasourceNotImplemented
else:
setattr(self, 'name', params["name"])
self.objects = {}
pass
# i am a generic datasource
# i find a suitable class
# i rebless
# i call __init__
def open(self, **kwargs):
pass
def read(self, **kwargs):
pass
def close(self):
pass
def add(self, objtype, obj):
try:
self.objects[objtype][obj.fingerprint()] = obj
except Exception:
self.objects[objtype] = {}
self.objects[objtype][obj.fingerprint()] = obj
if objtype == 'applications':
if self.find('hosts', obj.host_name):
setattr(obj, 'host', self.get('hosts', obj.host_name))
def get(self, objtype, fingerprint):
try:
return self.objects[objtype][fingerprint]
except Exception:
# should be None
return None
return 'i do not exist. no. no!'
def getall(self, objtype):
try:
return self.objects[objtype].values()
except Exception:
return []
def find(self, objtype, fingerprint):
return objtype in self.objects and fingerprint in self.objects[objtype]
@classmethod
def init_classes(cls, classpath):
sys.dont_write_bytecode = True
for p in [p for p in reversed(classpath) if os.path.exists(p) and os.path.isdir(p)]:
for module, path in [(item, p) for item in os.listdir(p) if item[-3:] == ".py" and item.startswith('datasource_')]:
try:
#print "try ds", module, path
path = os.path.abspath(path)
fp, filename, data = imp.find_module(module.replace('.py', ''), [path])
toplevel = imp.load_source(module.replace(".py", ""), filename)
for cl in inspect.getmembers(toplevel, inspect.isfunction):
if cl[0] == "__ds_ident__":
cls.class_factory.append([path, module, cl[1]])
except Exception, exp:
logger.critical("could not load datasource %s from %s: %s" % (module, path, exp))
finally:
if fp:
fp.close()
@classmethod
def get_class(cls, params={}):
#print "get_classhoho", cls, len(cls.class_factory), cls.class_factory
for path, module, class_func in cls.class_factory:
try:
#print "try", path, module, class_func
newcls = class_func(params)
if newcls:
return newcls
except Exception ,exp:
dsname = 'INVALID' if 'name' not in params else params['name']
print 'Datasource.get_class exception while trying module "%s" for datasource "%s": %s %s' % \
(os.path.join(path, module), dsname, type(exp), exp)
pass
logger.debug("found no matching class for this datasource %s" % params)
| agpl-3.0 | 8,794,060,652,986,661,000 | 32.033784 | 127 | 0.557987 | false | 4.171502 | false | false | false |
openstack/networking-plumgrid | networking_plumgrid/neutron/plugins/drivers/fake_plumlib.py | 1 | 5565 | # Copyright 2015 PLUMgrid, Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from networking_plumgrid._i18n import _LI
from neutron.extensions import providernet as provider
from oslo_log import log as logging
LOG = logging.getLogger(__name__)
class Plumlib(object):
"""Class PLUMgrid Fake Library.
This library is a by-pass implementation for the PLUMgrid Library.
This class is being used by the unit test integration in Neutron.
"""
def __init__(self):
LOG.info(_LI('Python PLUMgrid Fake Library Started '))
pass
def director_conn(self, director_plumgrid, director_port, timeout,
director_admin, director_password):
LOG.info(_LI('Fake Director: %s'),
director_plumgrid + ':' + str(director_port))
pass
def create_network(self, tenant_id, net_db, network, **kwargs):
net_db["network"] = {}
for key in (provider.NETWORK_TYPE,
provider.PHYSICAL_NETWORK,
provider.SEGMENTATION_ID):
net_db["network"][key] = network["network"][key]
return net_db
def update_network(self, tenant_id, net_id, network, orig_net_db):
pass
def delete_network(self, net_db, net_id):
pass
def create_subnet(self, sub_db, net_db, ipnet):
pass
def update_subnet(self, orig_sub_db, new_sub_db, ipnet, net_db):
pass
def delete_subnet(self, tenant_id, net_db, net_id, sub_db):
pass
def create_port(self, port_db, router_db, subnet_db):
pass
def update_port(self, port_db, router_db, subnet_db):
pass
def delete_port(self, port_db, router_db):
pass
def create_router(self, tenant_id, router_db):
pass
def update_router(self, router_db, router_id):
pass
def delete_router(self, tenant_id, router_id):
pass
def add_router_interface(self, tenant_id, router_id, port_db, ipnet,
ip_version):
pass
def remove_router_interface(self, tenant_id, net_id, router_id):
pass
def create_floatingip(self, floating_ip):
pass
def update_floatingip(self, floating_ip_orig, floating_ip, id):
pass
def delete_floatingip(self, floating_ip_orig, id):
pass
def disassociate_floatingips(self, fip, port_id):
return dict((key, fip[key]) for key in ("id", "floating_network_id",
"floating_ip_address"))
def create_security_group(self, sg_db):
pass
def update_security_group(self, sg_db):
pass
def delete_security_group(self, sg_db):
pass
def create_security_group_rule(self, sg_rule_db):
pass
def create_security_group_rule_bulk(self, sg_rule_db):
pass
def delete_security_group_rule(self, sg_rule_db):
pass
def create_l2_gateway(self, director_plumgrid,
director_admin,
director_password,
gateway_info,
vendor_type,
sw_username,
sw_password):
pass
def delete_l2_gateway(self, gw_info):
pass
def add_l2_gateway_connection(self, gw_conn_info):
pass
def delete_l2_gateway_connection(self, gw_conn_info):
pass
def create_physical_attachment_point(self, physical_attachment_point):
pass
def update_physical_attachment_point(self, physical_attachment_point):
pass
def delete_physical_attachment_point(self, pap_id):
pass
def create_transit_domain(self, transit_domain, db):
pass
def update_transit_domain(self, transit_domain, db):
pass
def delete_transit_domain(self, tvd_id):
pass
def get_available_interface(self):
return "host1", "ifc1"
def create_policy_tag(self, tenant_id, policy_tag_db):
pass
def delete_policy_tag(self, tenant_id, ptag_id):
pass
def create_endpoint_group(self, tenant_id, ep_grp, ptag_db):
pass
def delete_endpoint_group(self, tenant_id, epg_id, ptag_db):
pass
def update_endpoint_group(self, tenant_id, epg_id, epg_db, ptag_db):
pass
def create_policy_service(self, tenant_id, ps_db, ps_mac_list):
pass
def delete_policy_service(self, tenant_id, ps_id):
pass
def update_policy_service(self, tenant_id, ps_id, ps_db, ps_mac_list):
pass
def create_policy_rule(self, tenant_id, pr_db):
pass
def delete_policy_rule(self, tenant_id, pr_id, remote_target=None):
pass
def create_endpoint(self, tenant_id, ep_db, port_mac=None):
pass
def delete_endpoint(self, tenant_id, ep_id, ep_db, port_mac=None):
pass
def update_endpoint(self, tenant_id, ep_id, ep_db, port_mac=None):
pass
def get_ext_links(self, tenant_id):
pass
| apache-2.0 | -7,382,820,481,788,809,000 | 27.248731 | 78 | 0.608625 | false | 3.680556 | false | false | false |
YiqunPeng/Leetcode-pyq | solutions/694NumberOfDistinctIslands.py | 1 | 1174 | class Solution:
def numDistinctIslands(self, grid):
"""
:type grid: List[List[int]]
:rtype: int
"""
if not grid: return 0
island_list = set()
m, n = len(grid), len(grid[0])
v = [[False for j in range(n)] for i in range(m)]
def bfs(x, y, v):
island = []
m, n = len(grid), len(grid[0])
q = [(x, y)]
while q:
i, j = q.pop(0)
for n_i, n_j in [(i+1, j), (i-1, j), (i, j+1), (i, j-1)]:
if not (0 <= n_i < m and 0 <= n_j < n) or v[n_i][n_j]:
continue
if grid[n_i][n_j] == 1:
v[n_i][n_j] = True
island.append((n_i - x, n_j - y))
q.append((n_i, n_j))
return str(island)
for i in range(m):
for j in range(n):
if v[i][j]: continue
if grid[i][j] == 1 and not v[i][j]:
v[i][j] = True
island_list.add(bfs(i, j, v))
return len(island_list)
| gpl-3.0 | 8,159,184,933,781,888,000 | 30.756757 | 74 | 0.348382 | false | 3.316384 | false | false | false |
facebookresearch/ParlAI | parlai/agents/examples/transformer_variant.py | 1 | 6891 | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
"""
Example code for specifying custom transformer variants.
TransformerVariantAgent:
- Minimal changes needed to:
- Swap out a high-level component (encoder)
- Swap out a low-level component (decoder->layer->self_attention)
VerboseTransformerAgent:
- Doesn't swap out anything
- Fully specifies all components, for illustration
ConfigurableTransformerAgent:
- Swaps out components based on command line args
"""
from __future__ import annotations
import torch
from enum import Enum
from typing import Dict, Optional, Tuple, Union
from parlai.agents.transformer.modules import (
TransformerFFN,
MultiHeadAttention,
TransformerDecoder,
TransformerDecoderLayer,
TransformerEncoder,
TransformerEncoderLayer,
TransformerGeneratorModel,
)
from parlai.agents.transformer.transformer import TransformerGeneratorAgent
from parlai.core.opt import Opt
from parlai.core.params import ParlaiParser
import parlai.utils.logging as logging
###########################################
# Transformer With Two Components Swapped #
###########################################
class TransformerVariantAgent(TransformerGeneratorAgent):
"""
Swapping out two things:
1. Encoder (high-level component)
2. Decoder self attention (low-level component)
"""
def build_model(self, states=None):
wrapped_class = TransformerGeneratorModel.with_components(
encoder=MyCustomEncoder,
decoder=TransformerDecoder.with_components(
layer=TransformerDecoderLayer.with_components(
self_attention=MyCustomAttention
)
),
)
return wrapped_class(self.opt, self.dict)
class MyCustomEncoder(TransformerEncoder):
"""
For brevity this subclasses TransformerEncoder, but you could write your own
nn.Module from scratch as long as the __init__ and forward signatures match
TransformerEncoder.
"""
def forward(
self,
input: torch.LongTensor,
positions: Optional[torch.LongTensor] = None,
segments: Optional[torch.LongTensor] = None,
) -> Union[torch.Tensor, Tuple[torch.Tensor, torch.BoolTensor]]:
logging.info("Custom encoder called!")
# Comment out the following line and write your custom `forward` instead.
return super().forward(input, positions, segments) # type: ignore
class MyCustomAttention(MultiHeadAttention):
"""
For brevity this just renames MultiHeadAttention, but ideally you'd define a new
nn.Module with the same __init__ and forward signature as MultiHeadAttention.
"""
def forward(
self,
query: torch.Tensor,
key: Optional[torch.Tensor] = None,
value: Optional[torch.Tensor] = None,
mask: torch.Tensor = None,
incr_state: Optional[Dict[str, torch.Tensor]] = None,
static_kv: bool = False,
) -> Tuple[torch.Tensor, Dict[str, torch.Tensor], torch.Tensor]:
logging.info("Custom attention called!")
# Comment out the following line and write your custom `forward` instead.
return super().forward(
query,
key=key,
value=value,
mask=mask,
incr_state=incr_state,
static_kv=static_kv,
)
#######################################
# Fully-specified Default Transformer #
#######################################
class VerboseTransformerAgent(TransformerGeneratorAgent):
"""
Doesn't make any changes to TransformerGeneratorModel, just specifies all
subcomponents explicitly.
This is meant to be a reference for how to swap any component within
TransformerGeneratorModel.
"""
def build_model(self, states=None):
wrapped_class = TransformerGeneratorModel.with_components(
encoder=TransformerEncoder.with_components(
layer=TransformerEncoderLayer.with_components(
self_attention=MultiHeadAttention, feedforward=TransformerFFN
)
),
decoder=TransformerDecoder.with_components(
layer=TransformerDecoderLayer.with_components(
encoder_attention=MultiHeadAttention,
self_attention=MultiHeadAttention,
feedforward=TransformerFFN,
)
),
)
return wrapped_class(opt=self.opt, dictionary=self.dict)
################################################
# Command-line Configurable Custom Transformer #
################################################
class DecoderFeedForwardVariant(Enum):
ONE = 'one'
TWO = 'two'
class DecoderFFNOne(TransformerFFN):
def forward(self, x: torch.Tensor) -> torch.Tensor:
logging.info("Using Decoder FFN Variant One")
return super().forward(x)
class DecoderFFNTwo(TransformerFFN):
def forward(self, x: torch.Tensor) -> torch.Tensor:
logging.info("Using Decoder FFN Variant Two")
return super().forward(x)
class ConfigurableTransformerAgent(TransformerGeneratorAgent):
"""
Illustrates swapping out components based on command line args.
Specifically, swaps out the decoder ffn between two options.
"""
@classmethod
def add_cmdline_args(
cls, parser: ParlaiParser, partial_opt: Optional[Opt] = None
) -> ParlaiParser:
super().add_cmdline_args(parser, partial_opt=partial_opt)
agent = parser.add_argument_group('MyCustom Transformer Arguments')
parser.add_argument(
'--decoder-ffn-variants',
type=DecoderFeedForwardVariant,
default=DecoderFeedForwardVariant.ONE,
help='Some variants in the decoder FFN implementation',
)
return agent # type: ignore
def build_model(self, states=None):
decoder_variant: DecoderFeedForwardVariant = self.opt['decoder_ffn_variants']
if decoder_variant == DecoderFeedForwardVariant.ONE:
decoder_ffn_class = DecoderFFNOne
elif decoder_variant == DecoderFeedForwardVariant.TWO:
decoder_ffn_class = DecoderFFNTwo
else:
logging.error(
'Invalid --decoder-ffn-variants option, defaulting to original ffn implementation.'
)
decoder_ffn_class = TransformerFFN
wrapped_class = TransformerGeneratorModel.with_components(
decoder=TransformerDecoder.with_components(
layer=TransformerDecoderLayer.with_components(
feedforward=decoder_ffn_class
)
)
)
return wrapped_class(opt=self.opt, dictionary=self.dict)
| mit | -694,793,330,464,155,900 | 32.779412 | 99 | 0.643448 | false | 4.50098 | false | false | false |
perryl/morph | morphlib/morphloader.py | 1 | 26505 | # Copyright (C) 2013-2016 Codethink Limited
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
#
# =*= License: GPL-2 =*=
import os
import collections
import warnings
import yaml
import morphlib
class MorphologySyntaxError(morphlib.Error):
pass
class MorphologyNotYamlError(MorphologySyntaxError):
def __init__(self, morphology, errmsg):
self.msg = 'Syntax error in morphology %s:\n%s' % (morphology, errmsg)
class NotADictionaryError(MorphologySyntaxError):
def __init__(self, morph_filename, errmsg=None):
self.msg = 'Not a dictionary: morphology %s' % morph_filename
if errmsg:
self.msg += "\n%s" % (errmsg)
class MorphologyValidationError(morphlib.Error):
pass
class UnknownKindError(MorphologyValidationError):
def __init__(self, kind, morph_filename):
self.msg = (
'Unknown kind %s in morphology %s' % (kind, morph_filename))
class MissingFieldError(MorphologyValidationError):
def __init__(self, field, morphology_name):
self.field = field
self.morphology_name = morphology_name
self.msg = (
'Missing field %s from morphology %s' % (field, morphology_name))
class InvalidStringError(MorphologyValidationError):
def __init__(self, field, spec, morph_filename):
self.field = field
self.spec = spec
self.morph_filename = morph_filename
MorphologyValidationError.__init__(
self, "Field '%(field)s' must be a non-empty string in %(spec)s"\
" for morphology %(morph_filename)s" % locals())
class InvalidFieldError(MorphologyValidationError):
def __init__(self, field, morphology_name):
self.field = field
self.morphology_name = morphology_name
self.msg = (
'Field %s not allowed in morphology %s' % (field, morphology_name))
class InvalidTypeError(MorphologyValidationError):
def __init__(self, field, expected, actual, morphology_name):
self.field = field
self.expected = expected
self.actual = actual
self.morphology_name = morphology_name
self.msg = (
'Field %s expected type %s, got %s in morphology %s' %
(field, expected, actual, morphology_name))
class UnknownArchitectureError(MorphologyValidationError):
def __init__(self, arch, morph_filename):
self.msg = ('Unknown architecture %s in morphology %s'
% (arch, morph_filename))
class UnknownBuildSystemError(MorphologyValidationError):
def __init__(self, build_system, morph_filename):
self.msg = ('Undefined build system %s in morphology %s'
% (build_system, morph_filename))
class NoStratumBuildDependenciesError(MorphologyValidationError):
def __init__(self, stratum_name, morph_filename):
self.msg = (
'Stratum %s has no build dependencies in %s' %
(stratum_name, morph_filename))
class EmptyStratumError(MorphologyValidationError):
def __init__(self, stratum_name, morph_filename):
self.msg = (
'Stratum %s has no chunks in %s' %
(stratum_name, morph_filename))
class DuplicateChunkError(MorphologyValidationError):
def __init__(self, stratum_name, chunk_name):
self.stratum_name = stratum_name
self.chunk_name = chunk_name
MorphologyValidationError.__init__(
self, 'Duplicate chunk %(chunk_name)s '\
'in stratum %(stratum_name)s' % locals())
class ChunkSpecConflictingFieldsError(MorphologyValidationError):
def __init__(self, fields, chunk_name, stratum_name):
self.chunk_name = chunk_name
self.stratum_name = stratum_name
self.fields = fields
MorphologyValidationError.__init__(
self, 'Conflicting fields "%s" for %s in stratum %s.' % (
', and '.join(fields), chunk_name, stratum_name))
class ChunkSpecNoBuildInstructionsError(MorphologyValidationError):
def __init__(self, chunk_name, stratum_name):
self.chunk_name = chunk_name
self.stratum_name = stratum_name
self.msg = (
'Chunk %(chunk_name)s in stratum %(stratum_name)s has no '
'build-system defined, and no chunk .morph file referenced '
'either. Please specify how to build the chunk, either by setting '
'"build-system: " in the stratum, or adding a chunk .morph file '
'and setting "morph: " in the stratum.' % locals())
class SystemStrataNotListError(MorphologyValidationError):
def __init__(self, system_name, strata_type):
self.system_name = system_name
self.strata_type = strata_type
typename = strata_type.__name__
MorphologyValidationError.__init__(
self, 'System %(system_name)s has the wrong type for its strata: '\
'%(typename)s, expected list' % locals())
class DuplicateStratumError(MorphologyValidationError):
def __init__(self, system_name, stratum_name):
self.system_name = system_name
self.stratum_name = stratum_name
MorphologyValidationError.__init__(
self, 'Duplicate stratum %(stratum_name)s '\
'in system %(system_name)s' % locals())
class SystemStratumSpecsNotMappingError(MorphologyValidationError):
def __init__(self, system_name, strata):
self.system_name = system_name
self.strata = strata
MorphologyValidationError.__init__(
self, 'System %(system_name)s has stratum specs '\
'that are not mappings.' % locals())
class EmptySystemError(MorphologyValidationError):
def __init__(self, system_name):
MorphologyValidationError.__init__(
self, 'System %(system_name)s has no strata.' % locals())
class DependsOnSelfError(MorphologyValidationError):
def __init__(self, name, filename):
msg = ("Stratum %(name)s build-depends on itself (%(filename)s)"
% locals())
MorphologyValidationError.__init__(self, msg)
class MultipleValidationErrors(MorphologyValidationError):
def __init__(self, name, errors):
self.name = name
self.errors = errors
self.msg = 'Multiple errors when validating %(name)s:'
for error in errors:
self.msg += ('\n' + str(error))
class DuplicateDeploymentNameError(MorphologyValidationError):
def __init__(self, cluster_filename, duplicates):
self.duplicates = duplicates
self.cluster_filename = cluster_filename
morphlib.Error.__init__(self,
'Cluster %s contains the following duplicate deployment names:%s'
% (cluster_filename, '\n ' + '\n '.join(duplicates)))
class MorphologyDumper(yaml.SafeDumper):
keyorder = (
'name',
'kind',
'description',
'arch',
'strata',
'configuration-extensions',
'morph',
'repo',
'ref',
'unpetrify-ref',
'build-depends',
'build-mode',
'artifacts',
'max-jobs',
'submodules',
'products',
'chunks',
'build-system',
'pre-configure-commands',
'configure-commands',
'post-configure-commands',
'pre-build-commands',
'build-commands',
'pre-test-commands',
'test-commands',
'post-test-commands',
'post-build-commands',
'pre-install-commands',
'install-commands',
'post-install-commands',
'artifact',
'include',
'systems',
'deploy-defaults',
'deploy',
'type',
'location',
)
@classmethod
def _iter_in_global_order(cls, mapping):
for key in cls.keyorder:
if key in mapping:
yield key, mapping[key]
for key in sorted(mapping.iterkeys()):
if key not in cls.keyorder:
yield key, mapping[key]
@classmethod
def _represent_dict(cls, dumper, mapping):
return dumper.represent_mapping('tag:yaml.org,2002:map',
cls._iter_in_global_order(mapping))
@classmethod
def _represent_str(cls, dumper, orig_data):
fallback_representer = yaml.representer.SafeRepresenter.represent_str
try:
data = unicode(orig_data, 'ascii')
if data.count('\n') == 0:
return fallback_representer(dumper, orig_data)
except UnicodeDecodeError:
try:
data = unicode(orig_data, 'utf-8')
if data.count('\n') == 0:
return fallback_representer(dumper, orig_data)
except UnicodeDecodeError:
return fallback_representer(dumper, orig_data)
return dumper.represent_scalar(u'tag:yaml.org,2002:str',
data, style='|')
@classmethod
def _represent_unicode(cls, dumper, data):
if data.count('\n') == 0:
return yaml.representer.SafeRepresenter.represent_unicode(dumper,
data)
return dumper.represent_scalar(u'tag:yaml.org,2002:str',
data, style='|')
def __init__(self, *args, **kwargs):
yaml.SafeDumper.__init__(self, *args, **kwargs)
self.add_representer(dict, self._represent_dict)
self.add_representer(str, self._represent_str)
self.add_representer(unicode, self._represent_unicode)
class MorphologyLoader(object):
'''Load morphologies from disk, or save them back to disk.'''
_required_fields = {
'chunk': [
'name',
],
'stratum': [
'name',
],
'system': [
'name',
'arch',
'strata',
],
'cluster': [
'name',
'systems',
],
}
_static_defaults = {
'chunk': {
'description': '',
'pre-configure-commands': None,
'configure-commands': None,
'post-configure-commands': None,
'pre-build-commands': None,
'build-commands': None,
'post-build-commands': None,
'pre-test-commands': None,
'test-commands': None,
'post-test-commands': None,
'pre-install-commands': None,
'install-commands': None,
'post-install-commands': None,
'pre-strip-commands': None,
'strip-commands': None,
'post-strip-commands': None,
'devices': [],
'submodules': {},
'products': [],
'max-jobs': None,
'build-system': 'manual',
'build-mode': 'staging',
'prefix': '/usr',
'system-integration': [],
},
'stratum': {
'chunks': [],
'description': '',
'build-depends': [],
'products': [],
},
'system': {
'description': '',
'arch': None,
'configuration-extensions': [],
},
'cluster': {
'description': '',
},
}
def __init__(self,
predefined_build_systems={}):
self._predefined_build_systems = predefined_build_systems.copy()
if 'manual' not in self._predefined_build_systems:
self._predefined_build_systems['manual'] = \
morphlib.buildsystem.ManualBuildSystem()
def load_from_string(self, string, filename='string',
set_defaults=True): # pragma: no cover
'''Load a morphology from a string.
Return the Morphology object.
'''
try:
obj = yaml.safe_load(string)
except yaml.error.YAMLError as e:
raise MorphologyNotYamlError(filename, e)
if not isinstance(obj, dict):
raise NotADictionaryError(filename)
m = morphlib.morphology.Morphology(obj)
m.filename = filename
self.validate(m)
if set_defaults:
self.set_commands(m)
self.set_defaults(m)
return m
def load_from_file(self, filename, set_defaults=True):
'''Load a morphology from a named file.
Return the Morphology object.
'''
with open(filename) as f:
text = f.read()
return self.load_from_string(text, filename=filename,
set_defaults=set_defaults)
def save_to_string(self, morphology):
'''Return normalised textual form of morphology.'''
return yaml.dump(morphology.data, Dumper=MorphologyDumper,
default_flow_style=False)
def save_to_file(self, filename, morphology):
'''Save a morphology object to a named file.'''
text = self.save_to_string(morphology)
with morphlib.savefile.SaveFile(filename, 'w') as f:
f.write(text)
def validate(self, morph):
'''Validate a morphology.'''
# Validate that the kind field is there.
self._require_field('kind', morph)
# The rest of the validation is dependent on the kind.
kind = morph['kind']
if kind not in ('system', 'stratum', 'chunk', 'cluster'):
raise UnknownKindError(morph['kind'], morph.filename)
required = ['kind'] + self._required_fields[kind]
allowed = self._static_defaults[kind].keys()
self._require_fields(required, morph)
self._deny_unknown_fields(required + allowed, morph)
getattr(self, '_validate_%s' % kind)(morph)
def _validate_cluster(self, morph):
# Deployment names must be unique within a cluster
deployments = collections.Counter()
for system in morph['systems']:
deployments.update(system['deploy'].iterkeys())
if 'subsystems' in system:
deployments.update(self._get_subsystem_names(system))
duplicates = set(deployment for deployment, count
in deployments.iteritems() if count > 1)
if duplicates:
raise DuplicateDeploymentNameError(morph.filename, duplicates)
def _get_subsystem_names(self, system): # pragma: no cover
for subsystem in system.get('subsystems', []):
for name in subsystem['deploy'].iterkeys():
yield name
for name in self._get_subsystem_names(subsystem):
yield name
def _validate_system(self, morph):
# A system must contain at least one stratum
strata = morph['strata']
if (not isinstance(strata, collections.Iterable)
or isinstance(strata, collections.Mapping)):
raise SystemStrataNotListError(morph['name'],
type(strata))
if not strata:
raise EmptySystemError(morph['name'])
if not all(isinstance(o, collections.Mapping) for o in strata):
raise SystemStratumSpecsNotMappingError(morph['name'], strata)
# All stratum names should be unique within a system.
names = set()
for spec in strata:
name = spec['morph']
if name in names:
raise DuplicateStratumError(morph['name'], name)
names.add(name)
# Architecture name must be known.
if morph['arch'] not in morphlib.valid_archs:
raise UnknownArchitectureError(morph['arch'], morph.filename)
def _validate_stratum(self, morph):
# Require at least one chunk.
if len(morph.get('chunks', [])) == 0:
raise EmptyStratumError(morph['name'], morph.filename)
# Require build-dependencies for the stratum itself, unless
# it has chunks built in bootstrap mode.
if 'build-depends' in morph:
if not isinstance(morph['build-depends'], list):
raise InvalidTypeError(
'build-depends', list, type(morph['build-depends']),
morph['name'])
for dep in morph['build-depends']:
if dep['morph'] == morph.filename:
raise DependsOnSelfError(morph['name'], morph.filename)
else:
for spec in morph['chunks']:
if spec.get('build-mode') in ['bootstrap', 'test']:
break
else:
raise NoStratumBuildDependenciesError(
morph['name'], morph.filename)
# All chunk names must be unique within a stratum.
names = set()
for spec in morph['chunks']:
name = spec['name']
if name in names:
raise DuplicateChunkError(morph['name'], name)
names.add(name)
# Check each reference to a chunk.
for spec in morph['chunks']:
chunk_name = spec['name']
# All chunks repos and refs must be strings
def validate_chunk_str_field(field, spec, morph_filename):
if field not in spec:
raise MissingFieldError('%s in %s' % (field, spec),
morph.filename)
val = spec[field]
if not val or not isinstance(val, basestring) or (
not val.strip()):
raise InvalidStringError(
field, spec, morph_filename)
validate_chunk_str_field('repo', spec, morph.filename)
validate_chunk_str_field('ref', spec, morph.filename)
# The build-depends field must be a list.
if 'build-depends' in spec:
if not isinstance(spec['build-depends'], list):
raise InvalidTypeError(
'%s.build-depends' % chunk_name, list,
type(spec['build-depends']), morph['name'])
# Either 'morph' or 'build-system' must be specified.
if 'morph' in spec and 'build-system' in spec:
raise ChunkSpecConflictingFieldsError(
['morph', 'build-system'], chunk_name, morph.filename)
if 'morph' not in spec and 'build-system' not in spec:
raise ChunkSpecNoBuildInstructionsError(
chunk_name, morph.filename)
def validate_submodules(submodules, morph_filename):
for sub_name in submodules:
validate_chunk_str_field('url', submodules[sub_name],
morph_filename)
if 'submodules' in spec:
if not isinstance(spec['submodules'], dict):
raise NotADictionaryError(
morph.filename, "The 'submodules' in chunk '%s' have "
"to be a dict" % (chunk_name))
validate_submodules(spec['submodules'], morph.filename)
@classmethod
def _validate_chunk(cls, morphology):
errors = []
if 'products' in morphology:
cls._validate_products(morphology['name'],
morphology['products'], errors)
for key in MorphologyDumper.keyorder:
if key.endswith('-commands') and key in morphology:
cls._validate_commands(morphology['name'], key,
morphology[key], errors)
if len(errors) == 1:
raise errors[0]
elif errors:
raise MultipleValidationErrors(morphology['name'], errors)
@classmethod
def _validate_commands(cls, morphology_name, key, commands, errors):
if commands is None:
return
for cmd_index, cmd in enumerate(commands): # pragma: no cover
if not isinstance(cmd, basestring):
e = InvalidTypeError('%s[%d]' % (key, cmd_index),
str, type(cmd), morphology_name)
errors.append(e)
@classmethod
def _validate_products(cls, morphology_name, products, errors):
'''Validate the products field is of the correct type.'''
if (not isinstance(products, collections.Iterable)
or isinstance(products, collections.Mapping)):
raise InvalidTypeError('products', list,
type(products), morphology_name)
for spec_index, spec in enumerate(products):
if not isinstance(spec, collections.Mapping):
e = InvalidTypeError('products[%d]' % spec_index,
dict, type(spec), morphology_name)
errors.append(e)
continue
cls._validate_products_spec_fields_exist(morphology_name,
spec_index, spec, errors)
if 'include' in spec:
cls._validate_products_specs_include(
morphology_name, spec_index, spec['include'], errors)
product_spec_required_fields = ('artifact', 'include')
@classmethod
def _validate_products_spec_fields_exist(
cls, morphology_name, spec_index, spec, errors):
given_fields = sorted(spec.iterkeys())
missing = (field for field in cls.product_spec_required_fields
if field not in given_fields)
for field in missing:
e = MissingFieldError('products[%d].%s' % (spec_index, field),
morphology_name)
errors.append(e)
unexpected = (field for field in given_fields
if field not in cls.product_spec_required_fields)
for field in unexpected:
e = InvalidFieldError('products[%d].%s' % (spec_index, field),
morphology_name)
errors.append(e)
@classmethod
def _validate_products_specs_include(cls, morphology_name, spec_index,
include_patterns, errors):
'''Validate that products' include field is a list of strings.'''
# Allow include to be most iterables, but not a mapping
# or a string, since iter of a mapping is just the keys,
# and the iter of a string is a 1 character length string,
# which would also validate as an iterable of strings.
if (not isinstance(include_patterns, collections.Iterable)
or isinstance(include_patterns, collections.Mapping)
or isinstance(include_patterns, basestring)):
e = InvalidTypeError('products[%d].include' % spec_index, list,
type(include_patterns), morphology_name)
errors.append(e)
else:
for pattern_index, pattern in enumerate(include_patterns):
pattern_path = ('products[%d].include[%d]' %
(spec_index, pattern_index))
if not isinstance(pattern, basestring):
e = InvalidTypeError(pattern_path, str,
type(pattern), morphology_name)
errors.append(e)
def _require_field(self, field, morphology):
if field not in morphology:
raise MissingFieldError(field, morphology.filename)
def _require_fields(self, fields, morphology):
for field in fields:
self._require_field(field, morphology)
def _deny_unknown_fields(self, allowed, morphology):
for field in morphology:
if field not in allowed:
raise InvalidFieldError(field, morphology.filename)
def set_defaults(self, morphology):
'''Set all missing fields in the morpholoy to their defaults.
The morphology is assumed to be valid.
'''
kind = morphology['kind']
defaults = self._static_defaults[kind]
for key in defaults:
if key not in morphology:
morphology[key] = defaults[key]
getattr(self, '_set_%s_defaults' % kind)(morphology)
def _set_cluster_defaults(self, morph):
for system in morph.get('systems', []):
if 'deploy-defaults' not in system:
system['deploy-defaults'] = {}
if 'deploy' not in system:
system['deploy'] = {}
def _set_system_defaults(self, morph):
pass
def _set_stratum_defaults(self, morph):
for spec in morph['chunks']:
if 'repo' not in spec:
spec['repo'] = spec['name']
if 'build-mode' not in spec:
spec['build-mode'] = \
self._static_defaults['chunk']['build-mode']
if 'prefix' not in spec:
spec['prefix'] = \
self._static_defaults['chunk']['prefix']
if 'submodules' not in spec:
spec['submodules'] = \
self._static_defaults['chunk']['submodules']
def _set_chunk_defaults(self, morph):
if morph['max-jobs'] is not None:
morph['max-jobs'] = int(morph['max-jobs'])
def lookup_build_system(self, name):
return self._predefined_build_systems[name]
def set_commands(self, morph):
if morph['kind'] == 'chunk':
default = self._static_defaults['chunk']['build-system']
bs_name = morph.get('build-system', default)
try:
bs = self.lookup_build_system(bs_name)
except KeyError:
raise UnknownBuildSystemError(bs_name, morph['name'])
for key in self._static_defaults['chunk']:
if 'commands' not in key: continue
if key not in morph:
attr = '_'.join(key.split('-'))
morph[key] = getattr(bs, attr)
| gpl-2.0 | -5,550,887,847,934,322,000 | 34.577181 | 79 | 0.565554 | false | 4.291613 | false | false | false |
bshillingford/python-torchfile | torchfile.py | 1 | 15223 | """
Mostly direct port of the Lua and C serialization implementation to
Python, depending only on `struct`, `array`, and numpy.
Supported types:
* `nil` to Python `None`
* numbers to Python floats, or by default a heuristic changes them to ints or
longs if they are integral
* booleans
* strings: read as byte strings (Python 3) or normal strings (Python 2), like
lua strings which don't support unicode, and that can contain null chars
* tables converted to a special dict (*); if they are list-like (i.e. have
numeric keys from 1 through n) they become a python list by default
* Torch classes: supports Tensors and Storages, and most classes such as
modules. Trivially extensible much like the Torch serialization code.
Trivial torch classes like most `nn.Module` subclasses become
`TorchObject`s. The `type_handlers` dict contains the mapping from class
names to reading functions.
* functions: loaded into the `LuaFunction` `namedtuple`,
which simply wraps the raw serialized data, i.e. upvalues and code.
These are mostly useless, but exist so you can deserialize anything.
(*) Since Lua allows you to index a table with a table but Python does not, we
replace dicts with a subclass that is hashable, and change its
equality comparison behaviour to compare by reference.
See `hashable_uniq_dict`.
Currently, the implementation assumes the system-dependent binary Torch
format, but minor refactoring can give support for the ascii format as well.
"""
import struct
from array import array
import numpy as np
import sys
from collections import namedtuple
TYPE_NIL = 0
TYPE_NUMBER = 1
TYPE_STRING = 2
TYPE_TABLE = 3
TYPE_TORCH = 4
TYPE_BOOLEAN = 5
TYPE_FUNCTION = 6
TYPE_RECUR_FUNCTION = 8
LEGACY_TYPE_RECUR_FUNCTION = 7
LuaFunction = namedtuple('LuaFunction',
['size', 'dumped', 'upvalues'])
class hashable_uniq_dict(dict):
"""
Subclass of dict with equality and hashing semantics changed:
equality and hashing is purely by reference/instance, to match
the behaviour of lua tables.
Supports lua-style dot indexing.
This way, dicts can be keys of other dicts.
"""
def __hash__(self):
return id(self)
def __getattr__(self, key):
if key in self:
return self[key]
if isinstance(key, (str, bytes)):
return self.get(key.encode('utf8'))
def __eq__(self, other):
return id(self) == id(other)
def __ne__(self, other):
return id(self) != id(other)
def _disabled_binop(self, other):
raise TypeError(
'hashable_uniq_dict does not support these comparisons')
__cmp__ = __ne__ = __le__ = __gt__ = __lt__ = _disabled_binop
class TorchObject(object):
"""
Simple torch object, used by `add_trivial_class_reader`.
Supports both forms of lua-style indexing, i.e. getattr and getitem.
Use the `torch_typename` method to get the object's torch class name.
Equality is by reference, as usual for lua (and the default for Python
objects).
"""
def __init__(self, typename, obj=None, version_number=0):
self._typename = typename
self._obj = obj
self._version_number = version_number
def __getattr__(self, k):
if k in self._obj:
return self._obj[k]
if isinstance(k, (str, bytes)):
return self._obj.get(k.encode('utf8'))
def __getitem__(self, k):
if k in self._obj:
return self._obj[k]
if isinstance(k, (str, bytes)):
return self._obj.get(k.encode('utf8'))
def torch_typename(self):
return self._typename
def __repr__(self):
return "TorchObject(%s, %s)" % (self._typename, repr(self._obj))
def __str__(self):
return repr(self)
def __dir__(self):
keys = list(self._obj.keys())
keys.append('torch_typename')
return keys
type_handlers = {}
def register_handler(typename):
def do_register(handler):
type_handlers[typename] = handler
return do_register
def add_tensor_reader(typename, dtype):
def read_tensor_generic(reader, version):
# https://github.com/torch/torch7/blob/1e86025/generic/Tensor.c#L1249
ndim = reader.read_int()
size = reader.read_long_array(ndim)
stride = reader.read_long_array(ndim)
storage_offset = reader.read_long() - 1 # 0-indexing
# read storage:
storage = reader.read_obj()
if storage is None or ndim == 0 or len(size) == 0 or len(stride) == 0:
# empty torch tensor
return np.empty((0), dtype=dtype)
# convert stride to numpy style (i.e. in bytes)
stride = [storage.dtype.itemsize * x for x in stride]
# create numpy array that indexes into the storage:
return np.lib.stride_tricks.as_strided(
storage[storage_offset:],
shape=size,
strides=stride)
type_handlers[typename] = read_tensor_generic
add_tensor_reader(b'torch.ByteTensor', dtype=np.uint8)
add_tensor_reader(b'torch.CharTensor', dtype=np.int8)
add_tensor_reader(b'torch.ShortTensor', dtype=np.int16)
add_tensor_reader(b'torch.IntTensor', dtype=np.int32)
add_tensor_reader(b'torch.LongTensor', dtype=np.int64)
add_tensor_reader(b'torch.FloatTensor', dtype=np.float32)
add_tensor_reader(b'torch.DoubleTensor', dtype=np.float64)
add_tensor_reader(b'torch.CudaTensor', dtype=np.float32)
add_tensor_reader(b'torch.CudaByteTensor', dtype=np.uint8)
add_tensor_reader(b'torch.CudaCharTensor', dtype=np.int8)
add_tensor_reader(b'torch.CudaShortTensor', dtype=np.int16)
add_tensor_reader(b'torch.CudaIntTensor', dtype=np.int32)
add_tensor_reader(b'torch.CudaDoubleTensor', dtype=np.float64)
def add_storage_reader(typename, dtype):
def read_storage(reader, version):
# https://github.com/torch/torch7/blob/1e86025/generic/Storage.c#L237
size = reader.read_long()
return np.fromfile(reader.f, dtype=dtype, count=size)
type_handlers[typename] = read_storage
add_storage_reader(b'torch.ByteStorage', dtype=np.uint8)
add_storage_reader(b'torch.CharStorage', dtype=np.int8)
add_storage_reader(b'torch.ShortStorage', dtype=np.int16)
add_storage_reader(b'torch.IntStorage', dtype=np.int32)
add_storage_reader(b'torch.LongStorage', dtype=np.int64)
add_storage_reader(b'torch.FloatStorage', dtype=np.float32)
add_storage_reader(b'torch.DoubleStorage', dtype=np.float64)
add_storage_reader(b'torch.CudaStorage', dtype=np.float32)
add_storage_reader(b'torch.CudaByteStorage', dtype=np.uint8)
add_storage_reader(b'torch.CudaCharStorage', dtype=np.int8)
add_storage_reader(b'torch.CudaShortStorage', dtype=np.int16)
add_storage_reader(b'torch.CudaIntStorage', dtype=np.int32)
add_storage_reader(b'torch.CudaDoubleStorage', dtype=np.float64)
def add_notimpl_reader(typename):
def read_notimpl(reader, version):
raise NotImplementedError('Reader not implemented for: ' + typename)
type_handlers[typename] = read_notimpl
add_notimpl_reader(b'torch.HalfTensor')
add_notimpl_reader(b'torch.HalfStorage')
add_notimpl_reader(b'torch.CudaHalfTensor')
add_notimpl_reader(b'torch.CudaHalfStorage')
@register_handler(b'tds.Vec')
def tds_Vec_reader(reader, version):
size = reader.read_int()
obj = []
_ = reader.read_obj()
for i in range(size):
e = reader.read_obj()
obj.append(e)
return obj
@register_handler(b'tds.Hash')
def tds_Hash_reader(reader, version):
size = reader.read_int()
obj = hashable_uniq_dict()
_ = reader.read_obj()
for i in range(size):
k = reader.read_obj()
v = reader.read_obj()
obj[k] = v
return obj
class T7ReaderException(Exception):
pass
class T7Reader:
def __init__(self,
fileobj,
use_list_heuristic=True,
use_int_heuristic=True,
utf8_decode_strings=False,
force_deserialize_classes=None,
force_8bytes_long=False):
"""
Params:
* `fileobj`: file object to read from, must be an actual file object
as it will be read by `array`, `struct`, and `numpy`. Since
it is only read sequentially, certain objects like pipes or
`sys.stdin` should work as well (untested).
* `use_list_heuristic`: automatically turn tables with only consecutive
positive integral indices into lists
(default True)
* `use_int_heuristic`: cast all whole floats into ints (default True)
* `utf8_decode_strings`: decode all strings as UTF8. By default they
remain as byte strings. Version strings always
are byte strings, but this setting affects
class names. (default False)
* `force_deserialize_classes`: deprecated.
"""
self.f = fileobj
self.objects = {} # read objects so far
if force_deserialize_classes is not None:
raise DeprecationWarning(
'force_deserialize_classes is now always '
'forced to be true, so no longer required')
self.use_list_heuristic = use_list_heuristic
self.use_int_heuristic = use_int_heuristic
self.utf8_decode_strings = utf8_decode_strings
self.force_8bytes_long = force_8bytes_long
def _read(self, fmt):
sz = struct.calcsize(fmt)
return struct.unpack(fmt, self.f.read(sz))
def read_boolean(self):
return self.read_int() == 1
def read_int(self):
return self._read('i')[0]
def read_long(self):
if self.force_8bytes_long:
return self._read('q')[0]
else:
return self._read('l')[0]
def read_long_array(self, n):
if self.force_8bytes_long:
lst = []
for i in range(n):
lst.append(self.read_long())
return lst
else:
arr = array('l')
arr.fromfile(self.f, n)
return arr.tolist()
def read_float(self):
return self._read('f')[0]
def read_double(self):
return self._read('d')[0]
def read_string(self, disable_utf8=False):
size = self.read_int()
s = self.f.read(size)
if disable_utf8 or not self.utf8_decode_strings:
return s
return s.decode('utf8')
def read_obj(self):
typeidx = self.read_int()
if typeidx == TYPE_NIL:
return None
elif typeidx == TYPE_NUMBER:
x = self.read_double()
# Extra checking for integral numbers:
if self.use_int_heuristic and x.is_integer():
return int(x)
return x
elif typeidx == TYPE_BOOLEAN:
return self.read_boolean()
elif typeidx == TYPE_STRING:
return self.read_string()
elif (typeidx == TYPE_TABLE or typeidx == TYPE_TORCH or
typeidx == TYPE_FUNCTION or typeidx == TYPE_RECUR_FUNCTION or
typeidx == LEGACY_TYPE_RECUR_FUNCTION):
# read the object reference index
index = self.read_int()
# check it is loaded already
if index in self.objects:
return self.objects[index]
# otherwise read it
if (typeidx == TYPE_FUNCTION or typeidx == TYPE_RECUR_FUNCTION or
typeidx == LEGACY_TYPE_RECUR_FUNCTION):
size = self.read_int()
dumped = self.f.read(size)
upvalues = self.read_obj()
obj = LuaFunction(size, dumped, upvalues)
self.objects[index] = obj
return obj
elif typeidx == TYPE_TORCH:
version = self.read_string(disable_utf8=True)
if version.startswith(b'V '):
version_number = int(float(version.partition(b' ')[2]))
class_name = self.read_string(disable_utf8=True)
else:
class_name = version
# created before existence of versioning
version_number = 0
if class_name in type_handlers:
# TODO: can custom readers ever be self-referential?
self.objects[index] = None # FIXME: if self-referential
obj = type_handlers[class_name](self, version)
self.objects[index] = obj
else:
# This must be performed in two steps to allow objects
# to be a property of themselves.
obj = TorchObject(
class_name, version_number=version_number)
self.objects[index] = obj
# After self.objects is populated, it's safe to read in
# case self-referential
obj._obj = self.read_obj()
return obj
else: # it is a table: returns a custom dict or a list
size = self.read_int()
# custom hashable dict, so that it can be a key, see above
obj = hashable_uniq_dict()
# For checking if keys are consecutive and positive ints;
# if so, returns a list with indices converted to 0-indices.
key_sum = 0
keys_natural = True
# bugfix: obj must be registered before reading keys and vals
self.objects[index] = obj
for _ in range(size):
k = self.read_obj()
v = self.read_obj()
obj[k] = v
if self.use_list_heuristic:
if not isinstance(k, int) or k <= 0:
keys_natural = False
elif isinstance(k, int):
key_sum += k
if self.use_list_heuristic:
# n(n+1)/2 = sum <=> consecutive and natural numbers
n = len(obj)
if keys_natural and n * (n + 1) == 2 * key_sum:
lst = []
for i in range(len(obj)):
elem = obj[i + 1]
# In case it is self-referential. This is not
# needed in lua torch since the tables are never
# modified as they are here.
if elem == obj:
elem = lst
lst.append(elem)
self.objects[index] = obj = lst
return obj
else:
raise T7ReaderException(
"unknown object type / typeidx: {}".format(typeidx))
def load(filename, **kwargs):
"""
Loads the given t7 file using default settings; kwargs are forwarded
to `T7Reader`.
"""
with open(filename, 'rb') as f:
reader = T7Reader(f, **kwargs)
return reader.read_obj()
| bsd-3-clause | 4,489,873,722,622,736,000 | 34.903302 | 79 | 0.588649 | false | 3.895343 | false | false | false |
paulross/cpip | src/cpip/TokenCss.py | 1 | 8529 | #!/usr/bin/env python
# CPIP is a C/C++ Preprocessor implemented in Python.
# Copyright (C) 2008-2017 Paul Ross
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Paul Ross: [email protected]
"""CSS Support for ITU+TU files in HTML."""
__author__ = 'Paul Ross'
__date__ = '2011-07-10'
__rights__ = 'Copyright (c) 2008-2017 Paul Ross'
import os
from cpip import ExceptionCpip
from cpip.core import ItuToTokens
#from cpip.util import XmlWrite
import string
class ExceptionTokenCss(ExceptionCpip):
pass
#: Map of {token_type : enum_int, ...}
TT_ENUM_MAP = {}
#: Reverse map of {enum_int : token_type, ...}
ENUM_TT_MAP = {}
for __i, __tt in enumerate(ItuToTokens.ITU_TOKEN_TYPES):
__enum = string.ascii_lowercase[__i]
TT_ENUM_MAP[__tt] = __enum
ENUM_TT_MAP[__enum] = __tt
ITU_CSS_LIST = [
"""/* Conditionally compiled == %s. */
span.%s {
background-color: GreenYellow;
}""" % (True, True),
"""/* Conditionally compiled == %s. */
span.%s {
background-color: Salmon;
}""" % (False, False),
"""/* Conditionally compiled == %s. */
span.%s {
background-color: yellowgreen;
}""" % ('Maybe', 'Maybe'),
"""/* %s */
span.%s {
color: Chartreuse;
font-style: italic;
}""" % ('header-name', TT_ENUM_MAP['header-name']),
"""/* %s */
span.%s {
color: BlueViolet;
font-style: normal;
}""" % ('identifier', TT_ENUM_MAP['identifier']),
"""/* %s */
span.%s {
color: HotPink;
font-style: normal;
}""" % ('pp-number', TT_ENUM_MAP['pp-number']),
"""/* %s */
span.%s {
color: orange;
font-style: italic;
}""" % ('character-literal', TT_ENUM_MAP['character-literal']),
"""/* %s */
span.%s {
color: LimeGreen;
font-style: italic;
}""" % ('string-literal', TT_ENUM_MAP['string-literal']),
"""/* %s */
span.%s {
color: black;
font-weight: bold;
font-style: normal;
}""" % ('preprocessing-op-or-punc', TT_ENUM_MAP['preprocessing-op-or-punc']),
"""/* %s */
span.%s {
color: silver;
font-style: normal;
}""" % ('non-whitespace', TT_ENUM_MAP['non-whitespace']),
"""/* %s */
span.%s {
color: black;
font-style: normal;
}""" % ('whitespace', TT_ENUM_MAP['whitespace']),
"""/* %s */
span.%s {
color: black;
font-style: normal;
}""" % ('concat', TT_ENUM_MAP['concat']),
"""/* %s */
span.%s {
color: red;
font-style: normal;
}""" % ('trigraph', TT_ENUM_MAP['trigraph']),
"""/* %s */
span.%s {
color: sienna;
font-style: normal;
}""" % ('C comment', TT_ENUM_MAP['C comment']),
"""/* %s */
span.%s {
color: peru;
font-style: normal;
}""" % ('C++ comment', TT_ENUM_MAP['C++ comment']),
"""/* %s */
span.%s {
color: red;
font-style: normal;
}""" % ('keyword', TT_ENUM_MAP['keyword']),
"""/* %s */
span.%s {
color: blue;
font-style: normal;
}""" % ('preprocessing-directive', TT_ENUM_MAP['preprocessing-directive']),
"""/* %s */
span.%s {
color: red;
font-style: italic;
}""" % ('Unknown', TT_ENUM_MAP['Unknown']),
# Other non-enumerated styles
# HTML styling
"""body {
font-size: 12px;
font-family: arial,helvetica,sans-serif;
margin: 6px;
padding: 6px;
}""",
#===============================================================================
# """h1 {
# font-family: Sans-serif;
# font-size: 1.5em;
# color: silver;
# font-style: italic;
# }""",
#===============================================================================
"""h1 {
color: darkgoldenrod;
font-family: sans-serif;
font-size: 14pt;
font-weight: bold;
}""",
"""h2 {
color: IndianRed;
font-family: sans-serif;
font-size: 14pt;
font-weight: normal;
}""",
"""h3 {
color: Black;
font-family: sans-serif;
font-size: 12pt;
font-weight: bold;
}""",
"""h4 {
color: FireBrick;
font-family: sans-serif;
font-size: 10pt;
font-weight: bold;
}""",
# Specialised classes
# Line numbers
"""span.line {
color: slategrey;
/*font-style: italic; */
}""",
# File names
"""span.file {
color: black;
font-style: italic;
}""",
# Files in tables
"""table.filetable {
border: 2px solid black;
font-family: monospace;
color: black;
}""",
"""th.filetable, td.filetable {
/* border: 1px solid black; */
border: 1px;
border-top-style:solid;
border-right-style:dotted;
border-bottom-style:none;
border-left-style:none;
vertical-align:top;
padding: 2px 6px 2px 6px;
}""",
# Monospaced tables e.g. for token counts
"""table.monospace {
border: 2px solid black;
border-collapse: collapse;
font-family: monospace;
color: black;
}""",
"""th.monospace, td.monospace {
border: 1px solid black;
vertical-align: top;
padding: 2px 6px 2px 6px;
}""",
# Macro presentation
"""span.macro_s_f_r_f_name{
color: DarkSlateGray;
font-family: monospace;
font-weight: normal;
font-style: italic;
}""",
"""span.macro_s_t_r_f_name {
color: DarkSlateGray;
font-family: monospace;
font-weight: normal;
font-style: normal;
}""",
"""span.macro_s_f_r_t_name {
color: Red; /* OrangeRed; */
font-family: monospace;
font-weight: bold;
font-style: italic;
}""",
"""span.macro_s_t_r_t_name{
color: Red; /* OrangeRed; */
font-family: monospace;
font-weight: bold;
font-style: normal;
}""",
"""span.macro_s_f_r_f_repl{
color: SlateGray;
font-family: monospace;
font-weight: normal;
font-style: italic;
}""",
"""span.macro_s_t_r_f_repl {
color: SlateGray;
font-family: monospace;
font-weight: normal;
font-style: normal;
}""",
"""span.macro_s_f_r_t_repl {
color: RosyBrown; /* Orange; */
font-family: monospace;
font-weight: bold;
font-style: italic;
}""",
"""span.macro_s_t_r_t_repl{
color: RosyBrown; /* Orange; */
font-family: monospace;
font-weight: bold;
font-style: normal;
}""",
# File declarations in the macro pages
"""span.file_decl {
color: black;
font-family: monospace;
/* font-weight: bold;
font-style: italic; */
}""",
# Conditional preprocessing directives - True
"""span.CcgNodeTrue {
color: LimeGreen;
font-family: monospace;
/* font-weight: bold; */
/* font-style: italic; */
}""",
# Conditional preprocessing directives - False
"""span.CcgNodeFalse {
color: red;
font-family: monospace;
/* font-weight: bold; */
/* font-style: italic; */
}""",
]
TT_CSS_FILE = 'cpip.css'
TT_CSS_STRING = '\n'.join(ITU_CSS_LIST)
def writeCssToDir(theDir):
"""Writes the CSS file into to the directory.
:param theDir: Directory.
:type theDir: ``str``
:returns: ``NoneType``
"""
try:
if not os.path.exists(theDir):
os.makedirs(theDir)
open(os.path.join(theDir, TT_CSS_FILE), 'w').write(TT_CSS_STRING)
except IOError as err:
raise ExceptionTokenCss('writeCssToDir(): %s' % str(err)) from err
def writeCssForFile(theFile):
"""Writes the CSS file into to the directory that the file is in."""
return writeCssToDir(os.path.dirname(theFile))
def retClass(theTt):
"""
:param theTt: Token type
:type theTt: ``str``
:returns: ``str`` -- CSS class.
:raises: ``ExceptionTokenCss`` For unknown token type.
"""
try:
return TT_ENUM_MAP[theTt]
except KeyError:
raise ExceptionTokenCss('Unknown token type %s' % theTt)
| gpl-2.0 | -6,426,611,412,637,766,000 | 25.405573 | 80 | 0.557158 | false | 3.124176 | false | false | false |
stefanvanwouw/najnaf | src/worker/controller.py | 1 | 1720 | from shared.app import AppState, AppCommand
import socket, pickle, conf, time
class WorkerController(object):
def __init__(self, state_obj, consumer):
self._state_obj = state_obj
self._consumer = consumer
self._sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self._sock.bind((conf.CONTROLLER_BIND_IP, conf.CONTROLLER_PORT))
self._master_ip = None
def run(self):
while True:
data, addr = self._sock.recvfrom(1024) # Receive, 1024 bytes buffer.
cmd = pickle.loads(data) # Deserialize data into command object.
if not isinstance(cmd,AppCommand):
continue
else:
if cmd.type == AppCommand.INIT and (self._state_obj.state ==
AppState.READY or self._state_obj.state ==
AppState.ERROR or (self._state_obj.state ==
AppState.RUNNING and self._master_ip != cmd.data)):
print "received init"
self._state_obj.state = AppState.INIT
self._master_ip = cmd.data
try:
self._consumer.connect(self._master_ip, conf.APOLLO_QUEUE_PORT)
self._consumer.subscribe()
self._state_obj.state = AppState.RUNNING
except:
self._state_obj.state = AppState.ERROR
elif cmd.type == AppCommand.SHUTDOWN and self._state_obj.state == AppState.RUNNING:
self._state_obj.state = AppState.SHUTTING_DOWN
# unsubscribe and join threads
self._consumer.shut_down()
| gpl-3.0 | -4,000,206,089,064,693,000 | 43.102564 | 99 | 0.538372 | false | 4.410256 | false | false | false |
lago-project/lago-ost-plugin | ovirtlago/server.py | 1 | 4700 | #
# Copyright 2014-2017 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Refer to the README and COPYING files for full details of the license
#
import contextlib
import errno
import logging
import os
import threading
from SimpleHTTPServer import SimpleHTTPRequestHandler
from SocketServer import ThreadingTCPServer
import sys
import traceback
LOGGER = logging.getLogger(__name__)
class LagoThreadingTCPServer(ThreadingTCPServer):
""" A custom multi-threaded TCP server.
We use `allow_reuse_address` in order to avoid a race when opening and
closing multiple servers (at each point in time only one server is
listening).
For example, the first server has a connection in 'time_wait' state,
while the second server tries to bind its socket.
Attributes:
_allowed_exceptions(tuple of Exceptions): If an exception occurs
and its type isn't not in `_allowed_exceptions`, its traceback
will be printed to the log.
_allowed_errnos(tuple of ints): If an OSError exception occurs
and its errno isn't not in `_allowed_errnos`, its traceback
will be printed to the log.
"""
allow_reuse_address = True
def __init__(
self,
server_address,
RequestHandlerClass,
allowed_exceptions=(),
allowed_errnos=(errno.EPIPE, ),
):
# We can't use super since the superclass isn't a new style class
ThreadingTCPServer.__init__(self, server_address, RequestHandlerClass)
self._allowed_exceptions = allowed_exceptions
self._allowed_errnos = allowed_errnos
def handle_error(self, request, client_address):
""" Handle an error gracefully
Overrides the default implementation which prints
the error to stdout and stderr
"""
_, value, _ = sys.exc_info()
ignore_err_conditions = [
hasattr(value, 'errno') and value.errno in self._allowed_errnos,
isinstance(value, self._allowed_exceptions),
]
if any(ignore_err_conditions):
return
LOGGER.debug(traceback.format_exc())
def generate_request_handler(root_dir):
"""
Factory for _BetterHTTPRequestHandler classes
Args:
root_dir (path): Path to the dir to serve
Returns:
_BetterHTTPRequestHandler: A ready to be used improved http request
handler
"""
class _BetterHTTPRequestHandler(SimpleHTTPRequestHandler):
__root_dir = root_dir
_len_cwd = len(os.getcwd())
def translate_path(self, path):
return os.path.join(
self.__root_dir,
SimpleHTTPRequestHandler.translate_path(
self, path
)[self._len_cwd:].lstrip('/')
)
def log_message(self, *args, **kwargs):
pass
return _BetterHTTPRequestHandler
def _create_http_server(listen_ip, listen_port, root_dir):
"""
Starts an http server with an improved request handler
Args:
listen_ip (str): Ip to listen on
port (int): Port to register on
root_dir (str): path to the directory to serve
Returns:
BaseHTTPServer: instance of the http server, already running on a
thread
"""
server = LagoThreadingTCPServer(
(listen_ip, listen_port),
generate_request_handler(root_dir),
)
threading.Thread(target=server.serve_forever).start()
return server
@contextlib.contextmanager
def repo_server_context(gw_ip, port, root_dir):
"""
Context manager that starts a generic http server that serves `root_dir`,
and listens on `gw_ip`:`port`.
Args:
gw_ip(str): IP to listen on
port(int): Port to listen on
root_dir(str): The root directory that will be served.
"""
server = _create_http_server(
listen_ip=gw_ip,
listen_port=port,
root_dir=root_dir,
)
try:
yield
finally:
server.shutdown()
| gpl-2.0 | -6,331,778,533,540,957,000 | 29.718954 | 79 | 0.65383 | false | 4.339797 | false | false | false |
ajdiaz/mico | mico/util/switch.py | 1 | 1215 | #! /usr/bin/env python
# -*- encoding: utf-8 -*-
# vim:fenc=utf-8:
from __builtin__ import env
"""This module provide a switcher, which when is accesses toggle an internal
status.
"""
class Switcher(object):
"""Switch a global status using the environment as critical region, and
setted using class constructor.
"""
_switch = (None, None)
def __init__(self, init_value=None):
self._old_value = env.get(self._switch[0], None)
env[self._switch[0]] = self._switch[1] \
if init_value is None \
else init_value
def __enter__(self):
pass
def __exit__(self, t, v, tr):
if self._old_value is None:
del env[self._switch[0]]
else:
env[self._switch[0]] = self._old_value
@staticmethod
def getValue(key):
"""Class method to get the value of an specified switcher using key.
"""
return env.get(key, None)
@classmethod
def from_key(cls, key, value):
"""Class method to create a new switcher using key and value.
"""
return type.__new__(type,"switch_%s" % key, (Switcher,),{ "_switch": (key, value) })
| gpl-2.0 | -290,881,152,891,639,100 | 26.613636 | 92 | 0.558025 | false | 3.785047 | false | false | false |
sailthru/stolos | stolos/examples/valid_if_or_example.py | 1 | 1515 | import datetime
def func(app_name, **parsed_job_id):
"""
This example function evaluates the identifiers in a job_id to decide
whether a particular job should be executed or not. To use it, your task
configuration must define a "valid_if_or" section that points to this
function.
This functionality is useful when you don't wish to create a new identifier
for a particular job_id or you wish to have Stolos mark specific
job_ids as "skipped"
PARAMS:
app_name - the task name. We provide this option so this func can be
generalized to more than one scheduled app
**parsed_job_id - specifies the identifiers that make up the job_id.
You could also just decide to explicitly define keyword args like:
def func(app_name, date, client_id, collection_name)
In this particular example, this function will not let Stolos queue
any job_ids except those job_ids where client_id is 1111 where the given
date is a Wednesday or Sunday. All other job_ids this function receives
will not be queued by Stolos, and will instead be marked as
"skipped"
"""
c1 = parsed_job_id['client_id'] == 1111
_day_of_week = datetime.datetime.strptime(
str(parsed_job_id['date']), '%Y%m%d').strftime("%A")
c2 = _day_of_week in ['Friday', 'Sunday']
from stolos.examples import log
log.critical('%s %s %s %s' % (app_name, parsed_job_id, c1, c2))
if c1 and c2:
return True
else:
return False
| apache-2.0 | 8,128,489,602,805,298,000 | 37.846154 | 79 | 0.676568 | false | 3.806533 | false | false | false |
PaddlePaddle/models | PaddleCV/tracking/ltr/train_settings/siamrpn/siamrpn_res50.py | 1 | 5570 | import paddle.fluid as fluid
import paddle.fluid.dygraph as dygraph
import ltr.actors as actors
import ltr.data.transforms as dltransforms
from ltr.data import processing, sampler, loader
from ltr.dataset import ImagenetVID, ImagenetDET, MSCOCOSeq, YoutubeVOS
from ltr.models.siam.siam import SiamRPN_ResNet50
from ltr.models.loss import select_softmax_with_cross_entropy_loss, weight_l1_loss
from ltr.trainers import LTRTrainer
import numpy as np
import cv2 as cv
from PIL import Image, ImageEnhance
def run(settings):
# Most common settings are assigned in the settings struct
settings.description = 'SiamRPN with ResNet-50 backbone.'
settings.print_interval = 100 # How often to print loss and other info
settings.batch_size = 32 # Batch size
settings.num_workers = 4 # Number of workers for image loading
settings.search_area_factor = {'train': 1.0, 'test': 255./127.}
settings.output_sz = {'train': 127, 'test': 255}
settings.scale_type = 'context'
settings.border_type = 'meanpad'
# Settings for the image sample and label generation
settings.center_jitter_factor = {'train': 0.1, 'test': 1.5}
settings.scale_jitter_factor = {'train': 0.05, 'test': 0.18}
settings.label_params = {
'search_size': 255,
'output_size': 25,
'anchor_stride': 8,
'anchor_ratios': [0.33, 0.5, 1, 2, 3],
'anchor_scales': [8],
'num_pos': 16,
'num_neg': 16,
'num_total': 64,
'thr_high': 0.6,
'thr_low': 0.3
}
settings.loss_weights = {'cls': 1., 'loc': 1.2}
settings.neg = 0.2
# Train datasets
vos_train = YoutubeVOS()
vid_train = ImagenetVID()
coco_train = MSCOCOSeq()
det_train = ImagenetDET()
# Validation datasets
#vid_val = ImagenetVID()
vid_val = coco_train
# The joint augmentation transform, that is applied to the pairs jointly
transform_joint = dltransforms.ToGrayscale(probability=0.25)
# The augmentation transform applied to the training set (individually to each image in the pair)
transform_exemplar = dltransforms.Transpose()
transform_instance = dltransforms.Transpose()
# Data processing to do on the training pairs
data_processing_train = processing.SiamProcessing(
search_area_factor=settings.search_area_factor,
output_sz=settings.output_sz,
center_jitter_factor=settings.center_jitter_factor,
scale_jitter_factor=settings.scale_jitter_factor,
scale_type=settings.scale_type,
border_type=settings.border_type,
mode='sequence',
label_params=settings.label_params,
train_transform=transform_exemplar,
test_transform=transform_instance,
joint_transform=transform_joint)
# Data processing to do on the validation pairs
data_processing_val = processing.SiamProcessing(
search_area_factor=settings.search_area_factor,
output_sz=settings.output_sz,
center_jitter_factor=settings.center_jitter_factor,
scale_jitter_factor=settings.scale_jitter_factor,
scale_type=settings.scale_type,
border_type=settings.border_type,
mode='sequence',
label_params=settings.label_params,
transform=transform_exemplar,
joint_transform=transform_joint)
# The sampler for training
dataset_train = sampler.MaskSampler(
[vid_train, coco_train, det_train, vos_train],
[2, 1 ,1, 2],
samples_per_epoch=5000 * settings.batch_size,
max_gap=100,
processing=data_processing_train,
neg=settings.neg)
# The loader for training
train_loader = loader.LTRLoader(
'train',
dataset_train,
training=True,
batch_size=settings.batch_size,
num_workers=settings.num_workers,
stack_dim=0)
# The sampler for validation
dataset_val = sampler.MaskSampler(
[vid_val],
[1, ],
samples_per_epoch=100 * settings.batch_size,
max_gap=100,
processing=data_processing_val)
# The loader for validation
val_loader = loader.LTRLoader(
'val',
dataset_val,
training=False,
batch_size=settings.batch_size,
num_workers=settings.num_workers,
stack_dim=0)
# creat network, set objective, creat optimizer, learning rate scheduler, trainer
with dygraph.guard():
# Create network
def scale_loss(loss):
total_loss = 0
for k in settings.loss_weights:
total_loss += loss[k] * settings.loss_weights[k]
return total_loss
net = SiamRPN_ResNet50(scale_loss=scale_loss)
# Define objective
objective = {
'cls': select_softmax_with_cross_entropy_loss,
'loc': weight_l1_loss,
}
# Create actor, which wraps network and objective
actor = actors.SiamActor(net=net, objective=objective)
# Set to training mode
actor.train()
# Define optimizer and learning rate
lr_scheduler = fluid.layers.exponential_decay(
learning_rate=0.005,
decay_steps=5000,
decay_rate=0.9659,
staircase=True)
optimizer = fluid.optimizer.Adam(
parameter_list=net.rpn_head.parameters() + net.neck.parameters(),
learning_rate=lr_scheduler)
trainer = LTRTrainer(actor, [train_loader, val_loader], optimizer, settings, lr_scheduler)
trainer.train(50, load_latest=False, fail_safe=False)
| apache-2.0 | -2,883,852,501,765,936,000 | 34.031447 | 101 | 0.646499 | false | 3.676568 | true | false | false |
cerebis/meta-sweeper | bin/metaART.py | 1 | 8681 | #!/usr/bin/env python
"""
meta-sweeper - for performing parametric sweeps of simulated
metagenomic sequencing experiments.
Copyright (C) 2016 "Matthew Z DeMaere"
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published
by the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import argparse
import atexit
import os
import subprocess
import sys
import numpy as np
from Bio import SeqIO
import abundance
import io_utils
TMP_INPUT = 'seq.tmp'
TMP_OUTPUT = 'reads.tmp'
# low-high seeds, giving 5M values
LOW_SEED_VALUE = 1000000
HIGH_SEED_VALUE = 6000000
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Simulate a metagenomic data set from an abundance profile')
parser.add_argument('-C', '--compress', choices=['gzip', 'bzip2'], default=None, help='Compress output files')
parser.add_argument('-n', '--output-name', metavar='PATH', help='Output file base name', required=True)
parser.add_argument('-P', '--profile', dest='profile', required=False,
help='Community abundance profile', metavar='FILE')
parser.add_argument('-M', '--max-coverage', metavar='INT', type=int, required=True,
help='Coverage of must abundant taxon')
parser.add_argument('-S', '--seed', metavar='INT', type=int, required=True, help='Random seed')
parser.add_argument('-l', '--read-len', metavar='INT', type=int, required=True, help='Read length')
parser.add_argument('-m', '--insert-len', metavar='INT', type=int, required=True, help='Insert length')
parser.add_argument('-s', '--insert-sd', metavar='INT', type=int, required=True, help='Insert standard deviation')
parser.add_argument('--art-path', default='art_illumina', help='Path to ART executable [default: art_illumina]')
parser.add_argument('--log', default='metaART.log', type=argparse.FileType('w'), help='Log file name')
parser.add_argument('--coverage-out', metavar='FILE', default='coverage.tsv',
help='Output file for simulated genome coverage table', required=False)
parser.add_argument('-z', '--num-samples', metavar='INT', type=int, default=1, required=True,
help='Number of transect samples')
parser.add_argument('--dist', metavar='DISTNAME', choices=['equal', 'uniform', 'lognormal'],
help='Abundance profile distribution [equal, uniform, lognormal]')
parser.add_argument('--lognorm-mu', metavar='FLOAT', type=float, default=1.0, required=False,
help='Log-normal relative abundance mu parameter')
parser.add_argument('--lognorm-sigma', metavar='FLOAT', type=float, default=1.0, required=False,
help='Log-normal relative abundance sigma parameter')
parser.add_argument('fasta', metavar='MULTIFASTA',
help='Input multi-fasta of all sequences')
parser.add_argument('output_dir', metavar='DIR',
help='Output directory')
args = parser.parse_args()
@atexit.register
def close_cov():
coverage_file.close()
seq_index = SeqIO.index(args.fasta, 'fasta')
base_name = os.path.join(args.output_dir, args.output_name)
r1_tmp = os.path.join(args.output_dir, '{0}1.fq'.format(TMP_OUTPUT))
r2_tmp = os.path.join(args.output_dir, '{0}2.fq'.format(TMP_OUTPUT))
seq_tmp = os.path.join(args.output_dir, TMP_INPUT)
coverage_file = open(os.path.join(args.output_dir, args.coverage_out), 'w')
RANDOM_STATE = np.random.RandomState(args.seed)
child_seeds = RANDOM_STATE.randint(LOW_SEED_VALUE, HIGH_SEED_VALUE, args.num_samples).tolist()
profile_seeds = RANDOM_STATE.randint(LOW_SEED_VALUE, HIGH_SEED_VALUE, args.num_samples).tolist()
if args.profile:
# if specified, read the static profile table from disk rather than calculate at runtime.
# this will meant the same abundance profile is used in each sample -- in multisample mode.
profile = abundance.read_profile(args.profile)
# generate N simulated communities
for n in xrange(0, args.num_samples):
# generate abundance profile from global seeded random state -- if not using a static table
if not args.profile:
seq_names = [si for si in seq_index]
profile = abundance.generate_profile(profile_seeds[n], seq_names, mode=args.dist,
lognorm_mu=args.lognorm_mu, lognorm_sigma=args.lognorm_sigma)
for i, chr_abn in enumerate(profile.values(), start=1):
coverage_file.write('{0}\t{1}\t{2}\t{3}\t{4}\n'.format(
n + 1, i, chr_abn.name, chr_abn.cell,
chr_abn.effective_abundance() * args.max_coverage))
print 'Sample {0} Relative Abundances:'.format(n)
profile.write_table(sys.stdout)
if args.num_samples > 1:
r1_final = '{0}.{1}.r1.fq'.format(base_name, n+1)
r2_final = '{0}.{1}.r2.fq'.format(base_name, n+1)
else:
r1_final = '{0}.r1.fq'.format(base_name)
r2_final = '{0}.r2.fq'.format(base_name)
r1_tmp = os.path.join(args.output_dir, '{0}1.fq'.format(TMP_OUTPUT, n+1))
r2_tmp = os.path.join(args.output_dir, '{0}2.fq'.format(TMP_OUTPUT, n+1))
output_R1 = io_utils.open_output(r1_final, mode='w', compress=args.compress)
output_R2 = io_utils.open_output(r2_final, mode='w', compress=args.compress)
try:
# iteratively call ART for each chromosome in profile, accumulate the results
for chr_abn in profile:
coverage = chr_abn.effective_abundance() * args.max_coverage
print '\tRequesting {0:.4f} coverage for {1}'.format(coverage, chr_abn.name)
# iteration target for ART
try:
ref_seq = seq_index[chr_abn.name]
ref_len = len(ref_seq)
SeqIO.write([ref_seq], seq_tmp, 'fasta')
subprocess.check_call([args.art_path,
'-p', # paired-end sequencing
'-na', # no alignment file
'-rs', str(child_seeds[n]),
'-m', str(args.insert_len),
'-s', str(args.insert_sd),
'-l', str(args.read_len),
'-f', str(coverage),
'-i', seq_tmp,
'-o', os.path.join(args.output_dir, TMP_OUTPUT)],
stdout=args.log, stderr=args.log)
except OSError as e:
print "There was an error executing \"art_illumina\"."
print "Check that it is either on your PATH or specify it at runtime."
raise e
except subprocess.CalledProcessError as e:
print e
raise e
# count generated reads
r1_n = 0
for seq in SeqIO.parse(r1_tmp, 'fastq'):
r1_n += 1
r2_n = 0
for seq in SeqIO.parse(r2_tmp, 'fastq'):
r2_n += 1
assert r1_n == r2_n, 'Error: failed to generate an equal number of fwd and rev reads'
effective_cov = args.read_len * (r1_n + r2_n) / float(ref_len)
print '\tGenerated {0} pairs for {1}, {2:.3f} coverage'.format(r1_n, chr_abn.name, effective_cov)
if r1_n != r2_n:
print 'Error: paired-end counts do not match {0} vs {1}'.format(r1_n, r2_n)
sys.exit(1)
io_utils.multicopy_tostream(r1_tmp, output_R1)
io_utils.multicopy_tostream(r2_tmp, output_R2)
os.remove(r1_tmp)
os.remove(r2_tmp)
os.remove(seq_tmp)
except Exception as e:
print e
print 'Warning!! -- non-zero exit'
sys.exit(1)
| gpl-3.0 | 4,648,134,745,466,227,000 | 45.672043 | 118 | 0.58173 | false | 3.743424 | false | false | false |
johncfaver/iddqd | cgi-bin/molecule.py | 1 | 4991 | #!/usr/bin/env python
from atom import atom
from sys import exit
from chem import sym2mass
class molecule:
def __init__(self,filename,filetype=None):
self.filename=filename
if filetype == None:
if filename.lower()[-3:]=='pdb':
self.filetype='pdb'
elif filename.lower()[-3:]=='xyz':
self.filetype='xyz'
elif filename.lower()[-3:]=='mol':
self.filetype='mol'
if self.filetype == 'xyz':
self.charge=0
self.molweight=0.
self.atoms=self.readXYZfile(filename)
self.natoms=len(self.atoms)
if self.filetype == 'pdb':
self.charge=0
self.molweight=0.
self.atoms,self.firstatominres,self.resnames=self.readPDBfile(filename)
self.nres=len(self.resnames)
self.natoms=len(self.atoms)
self.calcCharge()
if self.filetype == 'mol':
self.charge=0
self.molweight=0
self.atoms=self.readMOLfile(filename)
self.natoms=len(self.atoms)
def readPDBfile(self,filename):
try:
f=open(filename)
except Exception:
print 'ERROR LOADING ',filename
exit()
atomlist=[]
firstatominres=[]
res='1'
firstatominres.append(1)
resnames=[]
for line in f.readlines():
if line.split()[0].strip()=='ATOM':
atomlist.append(atom(line.split()[2][0],line.split()[5],line.split()[6],line.split()[7],line.split()[2]))
if len(resnames)==0:
resnames.append(line.split()[3])
if line.split()[4] != res:
firstatominres.append(len(atomlist))
resnames.append(line.split()[3])
res=line.split()[4]
return (atomlist,firstatominres,resnames)
def readXYZfile(self,filename):
try:
f=open(filename)
except Exception:
print 'ERROR LOADING ',filename
return 1
natoms=int(f.readline().strip())
try:
line=f.readline().strip()
if len(line.split())==1:
self.charge=int(line)
elif len(line.split())==2:
self.charge=int(line.split()[1])
except Exception:
print line.split(),filename
print 'ERROR reading XYZ file. Please put the charge on line 2.'
exit()
fl=f.readlines()
f.close()
atomlist=[]
for i in range(natoms):
try:
atomlist.append(atom(fl[i].split()[0],fl[i].split()[1],fl[i].split()[2],fl[i].split()[3]))
self.molweight+=sym2mass(atomlist[-1].atsym.upper())
except Exception:
print 'ERROR reading XYZ file. Check line', str(fl.index(i)+3),' of ',filename,'.'
break
return atomlist
def readMOLfile(self,filename):
try:
f=open(filename)
except Exception:
print 'ERROR LOADING ',filename
return 1
for i in xrange(3):
f.readline()
natoms=int(f.readline().split()[0])
atomlist=[]
for i in xrange(natoms):
try:
line=f.readline()
atomlist.append(atom(line.split()[3],line.split()[0],line.split()[1],line.split()[2]))
self.molweight+=sym2mass[atomlist[-1].atsym.upper()]
except Exception:
print 'ERROR Reading MOL file at line:', line.split()
break
f.close()
return atomlist
def calcCharge(self):
if self.filetype != 'pdb':
return 0
for i in self.resnames:
if i in ['ASP','GLU']:
self.charge-=1
if i in ['LYS','ARG','HIS']:
self.charge+=1
def writeXYZfile(self,filename):
f=open(filename,'w')
f.write(str(len(self.atoms))+' \n')
f.write('comment \n')
for i in self.atoms:
f.write(i.atsym+' '+str(i.x)+' '+str(i.y)+' '+str(i.z)+' \n')
f.close()
def printInfo(self):
print self.filename,self.natoms,' atoms',self.charge,' charge'
for k in self.atoms:
k.printInfo()
def formula(self):
symbols=[]
counts=[]
for i in self.atoms:
if i.atsym in symbols:
counts[symbols.index(i.atsym)]+=1
else:
symbols.append(i.atsym)
counts.append(1)
order=['C','H','BR','CL','F','I','N','O','P','S']
fstr=''
for i in order:
if i in symbols:
j=symbols.index(i)
fstr+=symbols.pop(j)+str(counts.pop(j))
for i,j in enumerate(symbols):
fstr+=j+str(counts[i])
return fstr
| gpl-3.0 | 7,313,591,759,236,521,000 | 33.184932 | 121 | 0.495091 | false | 3.896175 | false | false | false |
ionrock/designate | designate/tests/test_api/test_v2/test_zones.py | 1 | 26809 | # Copyright 2013 Hewlett-Packard Development Company, L.P.
#
# Author: Kiall Mac Innes <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from mock import patch
from oslo_config import cfg
import oslo_messaging as messaging
from oslo_log import log as logging
from designate import exceptions
from designate.central import service as central_service
from designate.tests.test_api.test_v2 import ApiV2TestCase
LOG = logging.getLogger(__name__)
class ApiV2ZonesTest(ApiV2TestCase):
def setUp(self):
super(ApiV2ZonesTest, self).setUp()
# Create the default TLDs
self.create_default_tlds()
def test_create_zone(self):
# Create a zone
fixture = self.get_domain_fixture(fixture=0)
response = self.client.post_json('/zones/', fixture)
# Check the headers are what we expect
self.assertEqual(202, response.status_int)
self.assertEqual('application/json', response.content_type)
# Check the body structure is what we expect
self.assertIn('links', response.json)
self.assertIn('self', response.json['links'])
# Check the values returned are what we expect
self.assertIn('id', response.json)
self.assertIn('created_at', response.json)
self.assertEqual('PENDING', response.json['status'])
self.assertEqual('PRIMARY', response.json['type'])
self.assertEqual([], response.json['masters'])
self.assertIsNone(response.json['updated_at'])
for k in fixture:
self.assertEqual(fixture[k], response.json[k])
def test_create_zone_no_type(self):
# Create a zone
fixture = self.get_domain_fixture(fixture=0)
del fixture['type']
response = self.client.post_json('/zones/', fixture)
# Check the headers are what we expect
self.assertEqual(202, response.status_int)
self.assertEqual('application/json', response.content_type)
# Check the body structure is what we expect
self.assertIn('links', response.json)
self.assertIn('self', response.json['links'])
# Check the values returned are what we expect
self.assertIn('id', response.json)
self.assertIn('created_at', response.json)
self.assertEqual('PENDING', response.json['status'])
self.assertEqual('PRIMARY', response.json['type'])
self.assertEqual([], response.json['masters'])
self.assertIsNone(response.json['updated_at'])
for k in fixture:
self.assertEqual(fixture[k], response.json[k])
def test_create_zone_validation(self):
# NOTE: The schemas should be tested separately to the API. So we
# don't need to test every variation via the API itself.
# Fetch a fixture
fixture = self.get_domain_fixture(fixture=0)
# Add a junk field to the body
fixture['junk'] = 'Junk Field'
# Ensure it fails with a 400
body = fixture
self._assert_exception('invalid_object', 400, self.client.post_json,
'/zones', body)
def test_create_zone_email_too_long(self):
fixture = self.get_domain_fixture(fixture=0)
fixture.update({'email': 'a' * 255 + '@abc.com'})
body = fixture
self._assert_exception('invalid_object', 400, self.client.post_json,
'/zones', body)
def test_create_zone_invalid_email(self):
invalid_emails = [
'org',
'example.org',
'bla.example.org',
'org.',
'example.org.',
'bla.example.org.',
]
fixture = self.get_domain_fixture(fixture=0)
for email in invalid_emails:
fixture.update({'email': email})
body = fixture
self._assert_exception('invalid_object', 400,
self.client.post_json,
'/zones', body)
def test_create_zone_email_missing(self):
fixture = self.get_domain_fixture(fixture=0)
del fixture['email']
body = fixture
self._assert_exception('invalid_object', 400, self.client.post_json,
'/zones', body)
def test_create_zone_ttl_less_than_zero(self):
fixture = self.get_domain_fixture(fixture=0)
fixture['ttl'] = -1
body = fixture
self._assert_exception('invalid_object', 400, self.client.post_json,
'/zones', body)
def test_create_zone_ttl_is_zero(self):
fixture = self.get_domain_fixture(fixture=0)
fixture['ttl'] = 0
body = fixture
self._assert_exception('invalid_object', 400, self.client.post_json,
'/zones', body)
def test_create_zone_ttl_is_greater_than_max(self):
fixture = self.get_domain_fixture(fixture=0)
fixture['ttl'] = 2174483648
body = fixture
self._assert_exception('invalid_object', 400, self.client.post_json,
'/zones', body)
def test_create_zone_ttl_is_invalid(self):
fixture = self.get_domain_fixture(fixture=0)
fixture['ttl'] = "!@?>"
body = fixture
self._assert_exception('invalid_object', 400, self.client.post_json,
'/zones', body)
def test_create_zone_ttl_is_not_required_field(self):
fixture = self.get_domain_fixture(fixture=0)
body = fixture
response = self.client.post_json('/zones', body)
self.assertEqual(202, response.status_int)
self.assertEqual('application/json', response.content_type)
def test_create_zone_description_too_long(self):
fixture = self.get_domain_fixture(fixture=0)
fixture['description'] = "a" * 161
body = fixture
self._assert_exception('invalid_object', 400, self.client.post_json,
'/zones', body)
def test_create_zone_name_is_missing(self):
fixture = self.get_domain_fixture(fixture=0)
del fixture['name']
body = fixture
self._assert_exception('invalid_object', 400, self.client.post_json,
'/zones', body)
def test_create_zone_name_too_long(self):
fixture = self.get_domain_fixture(fixture=0)
fixture['name'] = 'x' * 255 + ".com"
body = fixture
self._assert_exception('invalid_object', 400, self.client.post_json,
'/zones', body)
def test_create_zone_body_validation(self):
fixture = self.get_domain_fixture(fixture=0)
# Add id to the body
fixture['id'] = '2fdadfb1-cf96-4259-ac6b-bb7b6d2ff980'
# Ensure it fails with a 400
body = fixture
self._assert_exception('invalid_object', 400, self.client.post_json,
'/zones', body)
fixture = self.get_domain_fixture(fixture=0)
# Add created_at to the body
fixture['created_at'] = '2014-03-12T19:07:53.000000'
# Ensure it fails with a 400
body = fixture
self._assert_exception('invalid_object', 400, self.client.post_json,
'/zones', body)
def test_create_zone_invalid_name(self):
# Try to create a zone with an invalid name
fixture = self.get_domain_fixture(fixture=-1)
# Ensure it fails with a 400
self._assert_exception('invalid_object', 400, self.client.post_json,
'/zones', fixture)
@patch.object(central_service.Service, 'create_domain',
side_effect=messaging.MessagingTimeout())
def test_create_zone_timeout(self, _):
fixture = self.get_domain_fixture(fixture=0)
body = fixture
self._assert_exception('timeout', 504, self.client.post_json,
'/zones/', body)
@patch.object(central_service.Service, 'create_domain',
side_effect=exceptions.DuplicateDomain())
def test_create_zone_duplicate(self, _):
fixture = self.get_domain_fixture(fixture=0)
body = fixture
self._assert_exception('duplicate_domain', 409, self.client.post_json,
'/zones/', body)
def test_create_zone_missing_content_type(self):
self._assert_exception('unsupported_content_type', 415,
self.client.post, '/zones')
def test_create_zone_bad_content_type(self):
self._assert_exception(
'unsupported_content_type', 415, self.client.post, '/zones',
headers={'Content-type': 'test/goat'})
def test_zone_invalid_url(self):
url = '/zones/2fdadfb1-cf96-4259-ac6b-bb7b6d2ff980/invalid'
self._assert_exception('not_found', 404, self.client.get, url,
headers={'Accept': 'application/json'})
self._assert_exception('not_found', 404, self.client.patch_json, url)
self._assert_exception('not_found', 404, self.client.delete, url)
# Pecan returns a 405 for post
response = self.client.post(url, status=405)
self.assertEqual(405, response.status_int)
def test_get_zones(self):
response = self.client.get('/zones/')
# Check the headers are what we expect
self.assertEqual(200, response.status_int)
self.assertEqual('application/json', response.content_type)
# Check the body structure is what we expect
self.assertIn('zones', response.json)
self.assertIn('links', response.json)
self.assertIn('self', response.json['links'])
# We should start with 0 zones
self.assertEqual(0, len(response.json['zones']))
# We should start with 0 zones
self.assertEqual(0, len(response.json['zones']))
data = [self.create_domain(name='x-%s.com.' % i)
for i in 'abcdefghij']
self._assert_paging(data, '/zones', key='zones')
self._assert_invalid_paging(data, '/zones', key='zones')
@patch.object(central_service.Service, 'find_domains',
side_effect=messaging.MessagingTimeout())
def test_get_zones_timeout(self, _):
self._assert_exception('timeout', 504, self.client.get, '/zones/')
def test_get_zone(self):
# Create a zone
zone = self.create_domain()
response = self.client.get('/zones/%s' % zone['id'],
headers=[('Accept', 'application/json')])
# Check the headers are what we expect
self.assertEqual(200, response.status_int)
self.assertEqual('application/json', response.content_type)
# Check the body structure is what we expect
self.assertIn('links', response.json)
self.assertIn('self', response.json['links'])
# Check the values returned are what we expect
self.assertIn('id', response.json)
self.assertIn('created_at', response.json)
self.assertEqual('PENDING', response.json['status'])
self.assertIsNone(response.json['updated_at'])
self.assertEqual(zone['name'], response.json['name'])
self.assertEqual(zone['email'], response.json['email'])
def test_get_zone_invalid_id(self):
self._assert_invalid_uuid(self.client.get, '/zones/%s')
@patch.object(central_service.Service, 'get_domain',
side_effect=messaging.MessagingTimeout())
def test_get_zone_timeout(self, _):
url = '/zones/2fdadfb1-cf96-4259-ac6b-bb7b6d2ff980'
self._assert_exception('timeout', 504, self.client.get, url,
headers={'Accept': 'application/json'})
@patch.object(central_service.Service, 'get_domain',
side_effect=exceptions.DomainNotFound())
def test_get_zone_missing(self, _):
url = '/zones/2fdadfb1-cf96-4259-ac6b-bb7b6d2ff980'
self._assert_exception('domain_not_found', 404, self.client.get, url,
headers={'Accept': 'application/json'})
def test_get_zone_bad_accept(self):
url = '/zones/6e2146f3-87bc-4f47-adc5-4df0a5c78218'
self.client.get(url, headers={'Accept': 'test/goat'}, status=406)
def test_update_zone(self):
# Create a zone
zone = self.create_domain()
# Prepare an update body
body = {'email': 'prefix-%s' % zone['email']}
response = self.client.patch_json('/zones/%s' % zone['id'], body,
status=202)
# Check the headers are what we expect
self.assertEqual(202, response.status_int)
self.assertEqual('application/json', response.content_type)
# Check the body structure is what we expect
self.assertIn('links', response.json)
self.assertIn('self', response.json['links'])
self.assertIn('status', response.json)
# Check the values returned are what we expect
self.assertIn('id', response.json)
self.assertIsNotNone(response.json['updated_at'])
self.assertEqual('prefix-%s' % zone['email'],
response.json['email'])
def test_update_zone_invalid_id(self):
self._assert_invalid_uuid(self.client.patch_json, '/zones/%s')
def test_update_zone_validation(self):
# NOTE: The schemas should be tested separatly to the API. So we
# don't need to test every variation via the API itself.
# Create a zone
zone = self.create_domain()
# Prepare an update body with junk in the body
body = {'email': 'prefix-%s' % zone['email'],
'junk': 'Junk Field'}
url = '/zones/%s' % zone['id']
# Ensure it fails with a 400
self._assert_exception('invalid_object', 400, self.client.patch_json,
url, body)
# Prepare an update body with negative ttl in the body
body = {'email': 'prefix-%s' % zone['email'],
'ttl': -20}
# Ensure it fails with a 400
self._assert_exception('invalid_object', 400, self.client.patch_json,
url, body)
# Prepare an update body with ttl > maximum (2147483647) in the body
body = {'email': 'prefix-%s' % zone['email'],
'ttl': 2147483648}
# Ensure it fails with a 400
self._assert_exception('invalid_object', 400, self.client.patch_json,
url, body)
@patch.object(central_service.Service, 'get_domain',
side_effect=exceptions.DuplicateDomain())
def test_update_zone_duplicate(self, _):
# Prepare an update body
body = {'email': '[email protected]'}
url = '/zones/2fdadfb1-cf96-4259-ac6b-bb7b6d2ff980'
# Ensure it fails with a 409
self._assert_exception('duplicate_domain', 409, self.client.patch_json,
url, body)
@patch.object(central_service.Service, 'get_domain',
side_effect=messaging.MessagingTimeout())
def test_update_zone_timeout(self, _):
# Prepare an update body
body = {'email': '[email protected]'}
url = '/zones/2fdadfb1-cf96-4259-ac6b-bb7b6d2ff980'
# Ensure it fails with a 504
self._assert_exception('timeout', 504, self.client.patch_json,
url, body)
@patch.object(central_service.Service, 'get_domain',
side_effect=exceptions.DomainNotFound())
def test_update_zone_missing(self, _):
# Prepare an update body
body = {'email': '[email protected]'}
url = '/zones/2fdadfb1-cf96-4259-ac6b-bb7b6d2ff980'
# Ensure it fails with a 404
self._assert_exception('domain_not_found', 404, self.client.patch_json,
url, body)
def test_delete_zone(self):
zone = self.create_domain()
response = self.client.delete('/zones/%s' % zone['id'], status=202)
# Check the headers are what we expect
self.assertEqual(202, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertEqual('DELETE', response.json['action'])
self.assertEqual('PENDING', response.json['status'])
def test_delete_zone_invalid_id(self):
self._assert_invalid_uuid(self.client.delete, '/zones/%s')
@patch.object(central_service.Service, 'delete_domain',
side_effect=messaging.MessagingTimeout())
def test_delete_zone_timeout(self, _):
url = '/zones/2fdadfb1-cf96-4259-ac6b-bb7b6d2ff980'
self._assert_exception('timeout', 504, self.client.delete, url)
@patch.object(central_service.Service, 'delete_domain',
side_effect=exceptions.DomainNotFound())
def test_delete_zone_missing(self, _):
url = '/zones/2fdadfb1-cf96-4259-ac6b-bb7b6d2ff980'
self._assert_exception('domain_not_found', 404, self.client.delete,
url)
def test_post_abandon_zone(self):
zone = self.create_domain()
url = '/zones/%s/tasks/abandon' % zone.id
# Ensure that we get permission denied
self._assert_exception('forbidden', 403, self.client.post_json, url)
# Ensure that abandon zone succeeds with the right policy
self.policy({'abandon_domain': '@'})
response = self.client.post_json(url)
self.assertEqual(204, response.status_int)
def test_get_abandon_zone(self):
zone = self.create_domain()
url = '/zones/%s/tasks/abandon' % zone.id
self._assert_exception('method_not_allowed', 405, self.client.get, url)
def test_get_invalid_abandon(self):
# This is an invalid endpoint - should return 404
url = '/zones/tasks/abandon'
self._assert_exception('not_found', 404, self.client.get, url)
def test_get_zone_tasks(self):
# This is an invalid endpoint - should return 404
zone = self.create_domain()
url = '/zones/%s/tasks' % zone.id
self._assert_exception('not_found', 404, self.client.get, url)
def test_create_secondary(self):
# Create a zone
fixture = self.get_domain_fixture('SECONDARY', 0)
fixture['masters'] = ["10.0.0.1"]
response = self.client.post_json('/zones/', fixture)
# Check the headers are what we expect
self.assertEqual(202, response.status_int)
self.assertEqual('application/json', response.content_type)
# Check the body structure is what we expect
self.assertIn('links', response.json)
self.assertIn('self', response.json['links'])
# Check the values returned are what we expect
self.assertIn('id', response.json)
self.assertIn('created_at', response.json)
self.assertEqual('PENDING', response.json['status'])
self.assertEqual(cfg.CONF['service:central'].managed_resource_email,
response.json['email'])
self.assertIsNone(response.json['updated_at'])
# Zone is not transferred yet
self.assertIsNone(response.json['transferred_at'])
# Serial defaults to 1
self.assertEqual(response.json['serial'], 1)
for k in fixture:
self.assertEqual(fixture[k], response.json[k])
def test_create_secondary_no_masters(self):
# Create a zone
fixture = self.get_domain_fixture('SECONDARY', 0)
self._assert_exception('invalid_object', 400, self.client.post_json,
'/zones/', fixture)
def test_update_secondary(self):
# Create a zone
fixture = self.get_domain_fixture('SECONDARY', 0)
fixture['email'] = cfg.CONF['service:central'].managed_resource_email
# Create a zone
zone = self.create_domain(**fixture)
masters = ['10.0.0.1', '10.0.0.2']
# Prepare an update body
body = {'masters': masters}
response = self.client.patch_json('/zones/%s' % zone['id'], body,
status=202)
# Check the headers are what we expect
self.assertEqual(202, response.status_int)
self.assertEqual('application/json', response.content_type)
# Check the body structure is what we expect
self.assertIn('links', response.json)
self.assertIn('self', response.json['links'])
self.assertIn('status', response.json)
# Check the values returned are what we expect
self.assertIn('id', response.json)
self.assertIsNotNone(response.json['updated_at'])
self.assertEqual(masters, response.json['masters'])
self.assertEqual(1, response.json['serial'])
def test_xfr_request(self):
# Create a zone
fixture = self.get_domain_fixture('SECONDARY', 0)
fixture['email'] = cfg.CONF['service:central'].managed_resource_email
fixture['attributes'] = [{"key": "master", "value": "10.0.0.10"}]
# Create a zone
zone = self.create_domain(**fixture)
response = self.client.post_json(
'/zones/%s/tasks/xfr' % zone['id'],
None, status=202)
# Check the headers are what we expect
self.assertEqual(202, response.status_int)
self.assertEqual('application/json', response.content_type)
def test_invalid_xfr_request(self):
# Create a zone
# Create a zone
zone = self.create_domain()
response = self.client.post_json(
'/zones/%s/tasks/xfr' % zone['id'],
None, status=400)
# Check the headers are what we expect
self.assertEqual(400, response.status_int)
self.assertEqual('application/json', response.content_type)
def test_update_secondary_email_invalid_object(self):
# Create a zone
fixture = self.get_domain_fixture('SECONDARY', 0)
fixture['email'] = cfg.CONF['service:central'].managed_resource_email
# Create a zone
zone = self.create_domain(**fixture)
body = {'email': '[email protected]'}
self._assert_exception('invalid_object', 400, self.client.patch_json,
'/zones/%s' % zone['id'], body)
# Metadata tests
def test_metadata_exists(self):
response = self.client.get('/zones/')
# Make sure the fields exist
self.assertIn('metadata', response.json)
self.assertIn('total_count', response.json['metadata'])
def test_total_count(self):
response = self.client.get('/zones/')
# There are no zones by default
self.assertEqual(0, response.json['metadata']['total_count'])
# Create a zone
fixture = self.get_domain_fixture(fixture=0)
response = self.client.post_json('/zones/', fixture)
response = self.client.get('/zones/')
# Make sure total_count picked it up
self.assertEqual(1, response.json['metadata']['total_count'])
def test_total_count_pagination(self):
# Create two zones
fixture = self.get_domain_fixture(fixture=0)
response = self.client.post_json('/zones/', fixture)
fixture = self.get_domain_fixture(fixture=1)
response = self.client.post_json('/zones/', fixture)
# Paginate so that there is only one zone returned
response = self.client.get('/zones?limit=1')
self.assertEqual(1, len(response.json['zones']))
# The total_count should know there are two
self.assertEqual(2, response.json['metadata']['total_count'])
def test_no_update_deleting(self):
# Create a zone
zone = self.create_domain()
# Prepare an update body
body = {'zone': {'email': 'prefix-%s' % zone['email']}}
self.client.delete('/zones/%s' % zone['id'], status=202)
self._assert_exception('bad_request', 400, self.client.patch_json,
'/zones/%s' % zone['id'], body)
def test_get_nameservers(self):
# Create a zone
zone = self.create_domain()
# Prepare an update body
response = self.client.get('/zones/%s/nameservers' % zone['id'],
headers=[('Accept', 'application/json')])
self.assertIn('nameservers', response.json)
self.assertEqual(1, len(response.json['nameservers']))
self.assertIn('hostname', response.json['nameservers'][0])
self.assertIn('priority', response.json['nameservers'][0])
def test_get_zones_filter(self):
# Add zones for testing
fixtures = [
self.get_domain_fixture(
'PRIMARY', fixture=0, values={
'ttl': 3600,
'description': 'test1'
}
),
self.get_domain_fixture(
'PRIMARY', fixture=1, values={
'ttl': 4000,
'description': 'test2'
}
)
]
for fixture in fixtures:
response = self.client.post_json('/zones/', fixture)
get_urls = [
# Filter by Name
'/zones?name=%s' % fixtures[0]['name'],
# Filter by Email
'/zones?email=example*',
'/zones?email=%s' % fixtures[1]['email'],
# Filter by TTL
'/zones?ttl=3600',
# Filter by Description
'/zones?description=test1',
'/zones?description=test*'
]
correct_results = [1, 2, 1, 1, 1, 2]
for get_url, correct_result in zip(get_urls, correct_results):
response = self.client.get(get_url)
# Check the headers are what we expect
self.assertEqual(200, response.status_int)
self.assertEqual('application/json', response.content_type)
# Check that the correct number of zones match
self.assertEqual(correct_result, len(response.json['zones']))
def test_invalid_zones_filter(self):
invalid_url = '/zones?type=PRIMARY'
self._assert_exception(
'bad_request', 400, self.client.get, invalid_url)
| apache-2.0 | 6,416,293,826,561,550,000 | 36.865819 | 79 | 0.594017 | false | 4.00493 | true | false | false |
Risto-Stevcev/django-openworld | openworld/flatten_taxonomy.py | 1 | 1749 | """
Flattens the OpenTaxonomy for tagging purposes
Currently parses v2.0
Source for the taxonomy:
http://openeligibility.org
"""
__author__ = 'Risto Stevcev'
import argparse
import xml.etree.ElementTree as ET
import json
class Flattener(object):
def __init__(self, input_file, output_file):
self.input_file = input_file
self.output_file = output_file
self.services = set()
self.situations = set()
self.tags = {}
def run(self):
tree = ET.parse(self.input_file)
root = tree.getroot()
self.flatten(self.services, root.find('services'))
self.flatten(self.situations, root.find('situations'))
self.tags = {"services": sorted(list(self.services)), "situations": sorted(list(self.situations))}
json.dump(self.tags, self.output_file)
def flatten(self, tags, node):
if not node:
return
for item in node:
title = item.attrib.get('title')
if title:
tags.add(title.lower())
self.flatten(tags, item)
def main():
argparser = argparse.ArgumentParser(description='OpenTaxonomy Flattener - by %s.' % __author__)
argparser.add_argument('-i', '-input-file', type=argparse.FileType('r'), required=True,
help='taxonomy file (xml)')
argparser.add_argument('-o', '-output-file', type=argparse.FileType('w'), required=True,
help='output file (json)')
args = argparser.parse_args()
flattener = Flattener(args.i, args.o)
flattener.run()
print("Complete: the taxonomy file '%s' has been flattened into '%s'." % (args.i.name, args.o.name))
args.i.close()
args.o.close()
if __name__ == "__main__":
main()
| bsd-3-clause | 6,089,947,045,113,044,000 | 28.15 | 106 | 0.608348 | false | 3.591376 | false | false | false |
spaceexperiment/forum-app | app/api/views/main.py | 1 | 1613 | from flask import request, session, g, redirect, url_for, abort
from flask.views import MethodView
from . import api
from ..decorators import api_render
@api.route('/')
@api_render
def index():
return {'welcome': 'home'}
class BaseMethodView(MethodView):
decorators = [api_render]
model = None
def get_or_404(self, id):
obj = self.model.get(id)
if not obj:
abort(404)
return obj
def is_admin(self):
if hasattr(g, 'user'):
if g.user.is_admin == 'True':
return True
return abort(401)
def is_authenticated(self):
if hasattr(g, 'user'):
return True
return abort(401)
def error(self, message, code):
return {'message': message}, code
def bad_request(self, message):
return {'message': message}, 400
def missing_data(self, data):
"""
return None if data in request.json else return 400 with
missing data in message
param data: a list of strings of requered fields
"""
missing_fields = []
for key in data:
if not key in request.json:
missing_fields.append(key)
if missing_fields:
message = 'Missing ' + ', '.join(missing_fields)
return self.bad_request(message)
return None
def clean_data(self, fields):
data = {}
# stip away any key not in fields
for key in request.json:
if key in fields:
data[key] = request.json[key]
return data | mit | 5,932,561,269,379,271,000 | 25.032258 | 64 | 0.554867 | false | 4.200521 | false | false | false |
ygrass/handsome | payments/migrations/0001_initial.py | 1 | 9022 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Payment'
db.create_table(u'payments_payment', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('order', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['orders.Order'])),
('trade_no', self.gf('django.db.models.fields.CharField')(max_length=64)),
('trade_status', self.gf('django.db.models.fields.CharField')(max_length=64)),
('buyer_id', self.gf('django.db.models.fields.CharField')(max_length=32)),
('buyer_email', self.gf('django.db.models.fields.CharField')(max_length=128)),
('full_content', self.gf('django.db.models.fields.TextField')()),
('created_at', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
))
db.send_create_signal(u'payments', ['Payment'])
def backwards(self, orm):
# Deleting model 'Payment'
db.delete_table(u'payments_payment')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'orders.city': {
'Meta': {'object_name': 'City'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'province': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orders.Province']"})
},
u'orders.country': {
'Meta': {'object_name': 'Country'},
'city': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orders.City']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'})
},
u'orders.order': {
'Meta': {'object_name': 'Order'},
'address_city': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orders.City']", 'null': 'True', 'blank': 'True'}),
'address_country': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orders.Country']", 'null': 'True', 'blank': 'True'}),
'address_province': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orders.Province']", 'null': 'True', 'blank': 'True'}),
'age_group': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'chest': ('django.db.models.fields.CharField', [], {'max_length': '16', 'blank': 'True'}),
'code': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'my_orders'", 'to': u"orm['auth.User']"}),
'express_info': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'foot': ('django.db.models.fields.CharField', [], {'max_length': '16', 'blank': 'True'}),
'height': ('django.db.models.fields.CharField', [], {'max_length': '16', 'blank': 'True'}),
'hipline': ('django.db.models.fields.CharField', [], {'max_length': '16', 'blank': 'True'}),
'house': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'preferred_designer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'designed_orders'", 'to': u"orm['auth.User']"}),
'prepayment': ('django.db.models.fields.FloatField', [], {}),
'status': ('django.db.models.fields.CharField', [], {'default': "'created'", 'max_length': '16'}),
'style': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'total_price': ('django.db.models.fields.FloatField', [], {}),
'waistline': ('django.db.models.fields.CharField', [], {'max_length': '16', 'blank': 'True'}),
'weight': ('django.db.models.fields.CharField', [], {'max_length': '16', 'blank': 'True'})
},
u'orders.province': {
'Meta': {'object_name': 'Province'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'})
},
u'payments.payment': {
'Meta': {'object_name': 'Payment'},
'buyer_email': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'buyer_id': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'full_content': ('django.db.models.fields.TextField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orders.Order']"}),
'trade_no': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'trade_status': ('django.db.models.fields.CharField', [], {'max_length': '64'})
}
}
complete_apps = ['payments'] | unlicense | -4,652,941,033,891,106,000 | 71.766129 | 195 | 0.550876 | false | 3.570241 | false | false | false |
aminotti/yameo | app/defaults_routes.py | 1 | 3243 | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014-2015 Anthony Minotti <[email protected]>.
#
#
# This file is part of Yameo framework.
#
# Yameo framework is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Yameo framework is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Yameo framework. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import os
import io
import os
import shutil
from werkzeug import secure_filename
from flask import current_app, send_file, request, Response
from .controller import Controller
from .context import models
from .module import SmartManagement
from lib.exceptions import *
from lib.orm.binary import Binary
from .config import conf
ctl = Controller()
# TODO gerer les permissions sur toutes les routes
@ctl.route('/binaries/<ressource>/<path:ids>/<filename>', methods=['GET', 'PUT', 'PATCH'])
def getBinariesFiles(ressource, ids, filename):
ids = ids.split('/')
attribute, extension = os.path.splitext(secure_filename(filename))
ressource = ressource.capitalize()
res = models.get(ressource)
if not res:
raise Core404Exception("Ressource '{}' not found.".format(ressource))
res = res.get(*ids)
if not res:
raise Core404Exception("Ressource with ids '{}' not found.".format(ids))
if request.method == 'GET':
field = getattr(res, attribute.lower(), None)
if not field:
raise Core404Exception("'{}' of ressource '{}' not found.".format(attribute, ressource))
if field.extension != extension[1:]:
raise Core404Exception("File {}{} not found.".format(attribute, extension))
return send_file(field.stream, field.mimetype)
else:
if attribute.lower() not in res._columns:
raise Core400Exception("Bad binary attribute : '{}'".format(attribute))
binary = Binary(ressource.lower(), attribute, request.headers['Content-Type'], extension[1:], io.BytesIO(request.data))
setattr(res, attribute.lower(), binary)
res.write()
r = Response(None)
del r.headers['content-type']
r.status_code = 204
return r
@ctl.route('/')
def yameo_hello():
return "Hello {}!".format(current_app.tenant)
"""
@ctl.route('/yameo/install/<module>/')
def yameo_install(module):
SmartManagement.install(module, current_app)
return "Install ok!!"
"""
# TESTS :
@ctl.route('/yameo/booking/<int:code>/')
def yameo_test(code):
book = models.Booking.get(code)
if book:
return book.name
# book.unlink()
else:
return "Booking {} pas trouvé, devrais ernvoyer un 404!".format(code)
| agpl-3.0 | -7,180,037,846,009,860,000 | 30.173077 | 127 | 0.649291 | false | 3.924939 | false | false | false |
MarkusHackspacher/unknown-horizons | development/extract_strings_from_sqlite.py | 1 | 3600 | #!/usr/bin/env python3
# ###################################################
# Copyright (C) 2008-2017 The Unknown Horizons Team
# [email protected]
# This file is part of Unknown Horizons.
#
# Unknown Horizons is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the
# Free Software Foundation, Inc.,
# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
# ###################################################
###############################################################################
#
# == I18N DEV USE CASES: CHEATSHEET ==
#
# ** Refer to development/create_pot.sh for help with building or updating
# the translation files for Unknown Horizons.
#
###############################################################################
#
# THIS SCRIPT IS A HELPER SCRIPT. DO NOT INVOKE MANUALLY!
#
###############################################################################
from __future__ import print_function
import os
import sqlalchemy
import sqlalchemy.orm
import sqlalchemy.ext.declarative
import sqlite3
import sys
import tempfile
from collections import defaultdict
sys.path.append(".")
sys.path.append("./horizons")
from horizons.constants import PATHS
# sqlalchemy doesn't support importing sql files,
# therefore we work around this by using sqlite3
filename = tempfile.mkstemp(text=True)[1]
conn = sqlite3.connect(filename)
for db_file in PATHS.DB_FILES:
conn.executescript(open(db_file, "r").read())
conn.commit()
engine = sqlalchemy.create_engine('sqlite:///' + filename) # must be 4 slashes total, sqlalchemy breaks the unixoid conventions here
Session = sqlalchemy.orm.sessionmaker(bind=engine)
db_session = Session()
Base = sqlalchemy.ext.declarative.declarative_base()
#
# Classes
#
class Message(Base):
__tablename__ = 'message_text'
text = sqlalchemy.Column(sqlalchemy.String, primary_key=True)
class Resource(Base):
__tablename__ = 'resource'
name = sqlalchemy.Column(sqlalchemy.String, primary_key=True)
class Tier(Base):
__tablename__ = 'tier'
name = sqlalchemy.Column(sqlalchemy.String, primary_key=True)
#
# print it
#
class MSGID_collect:
msgids = defaultdict(list)
def __init__(self):
pass
def add_to_collection(self, msgid, place):
self.msgids[msgid].append(place)
def __str__(self):
s = []
for text, locations in self.msgids.items():
comment = '#. This is a database entry: {}\n'.format(','.join(locations))
s += [comment + build_msgid(text)]
return '\n'.join(s).strip()
def build_msgid(msgid):
return 'msgid "{}"\nmsgstr ""\n'.format(msgid.replace('"', '\\"'))
def collect_all():
collector = MSGID_collect()
for message in db_session.query(Message):
collector.add_to_collection(message.text, 'a messagewidget message (left part of the screen)')
for resource in db_session.query(Resource):
collector.add_to_collection(resource.name, 'the name of a resource')
for tier in db_session.query(Tier):
collector.add_to_collection(tier.name, 'the name of an inhabitant tier (level)')
return collector
print(collect_all())
os.unlink(filename)
| gpl-2.0 | 1,333,327,644,805,271,800 | 25.865672 | 132 | 0.660833 | false | 3.719008 | false | false | false |
capitalk/treelearn | treelearn/regression_ensemble.py | 1 | 1162 | import numpy as np
from sklearn.linear_model import LinearRegression
from base_ensemble import BaseEnsemble
class RegressionEnsemble(BaseEnsemble):
def __init__(self,
base_model=LinearRegression(),
num_models = 50,
bagging_percent=0.5,
bagging_replacement=True,
feature_subset_percent = 1.0,
stacking_model=None,
randomize_params = {},
additive = False,
verbose=False):
BaseEnsemble.__init__(self,
base_model,
num_models,
bagging_percent,
bagging_replacement,
feature_subset_percent,
stacking_model,
randomize_params,
additive,
verbose)
def predict(self, X):
pred = self.transform(X)
if self.stacking_model:
return self.stacking_model.predict(pred)
else:
return np.dot(pred, self.weights)
def _init_fit(self, X, Y):
pass
def _created_model(self, X, Y, indices, i, model):
pass
| lgpl-3.0 | 771,531,069,544,880,400 | 27.341463 | 54 | 0.515491 | false | 4.335821 | false | false | false |
pangeo-data/rechunker | rechunker/executors/prefect.py | 1 | 1850 | import prefect
from rechunker.types import ParallelPipelines, PipelineExecutor
class PrefectPipelineExecutor(PipelineExecutor[prefect.Flow]):
"""An execution engine based on Prefect.
Supports copying between any arrays that implement ``__getitem__`` and
``__setitem__`` for tuples of ``slice`` objects. Array must also be
serializable by Prefect (i.e., with pickle).
Execution plans for PrefectExecutor are prefect.Flow objects.
"""
def pipelines_to_plan(self, pipelines: ParallelPipelines) -> prefect.Flow:
return _make_flow(pipelines)
def execute_plan(self, plan: prefect.Flow, **kwargs):
state = plan.run(**kwargs)
return state
class MappedTaskWrapper(prefect.Task):
def __init__(self, stage, **kwargs):
self.stage = stage
super().__init__(**kwargs)
def run(self, key):
return self.stage.func(key)
class SingleTaskWrapper(prefect.Task):
def __init__(self, stage, **kwargs):
self.stage = stage
super().__init__(**kwargs)
def run(self):
return self.stage.func()
def _make_flow(pipelines: ParallelPipelines) -> prefect.Flow:
with prefect.Flow("Rechunker") as flow:
# iterate over different arrays in the group
for pipeline in pipelines:
stage_tasks = []
# iterate over the different stages of the array copying
for stage in pipeline:
if stage.map_args is None:
stage_task = SingleTaskWrapper(stage)
else:
stage_task = MappedTaskWrapper(stage).map(stage.map_args)
stage_tasks.append(stage_task)
# create dependence between stages
for n in range(len(stage_tasks) - 1):
stage_tasks[n + 1].set_upstream(stage_tasks[n])
return flow
| mit | 852,775,540,904,071,000 | 31.45614 | 78 | 0.625405 | false | 4.101996 | false | false | false |
elbow-jason/flask-meta | flask_meta/appmeta/config/cfg_generator.py | 1 | 2916 | import os
def yaml_cfg():
current_dir = os.getcwd()
yaml_text = """
SECRET_KEY: 'appmeta_dev_key'
META_CREDENTIALS: 'metaadmin,password'
ADMIN_CREDENTIALS: 'admin,password'
SQLALCHEMY_DATABASE_URI: sqlite://{}/test.db
SQLALCHEMY_BINDS:
- appmeta: sqlite://{}/appmeta.db
SERVER_URL: "http://localhost:5000"
SQLALCHEMY_ECHO: True
DEBUG: True
SECURITY_PASSWORD_HASH: bcrypt
# Upon creation I am going to make sure that the user selects a desired
# database before proceeding to ensure no hangups upon attempting
# to go to production. See comments below for further production
# directives.
# Also, using a password protected database as your
# database (i.e. MySQL or my favorite PostgreSQL) will
# make your database much more secure than using SQLite (as
# is the default). SQLite itself has no concept of users or
# permissions, and instead relies on OS permissions for
# security.
# this config should absolutely be changed before
# production. Specifically, the SECRET_KEY, META_CREDENTIALS,
# and ADMIN_CREDENTIALS.
# this line is for testing (do not remove)
""".format(current_dir, current_dir)
return str(yaml_text)
def ini_cfg():
current_dir = os.getcwd()
text = """
[appmeta_config]
SECRET_KEY = 'appmeta_dev_key'
META_CREDENTIALS = {'name':'metaadmin','password':'password'}
ADMIN_CREDENTIALS = {'name':'admin','password':'password'}
SQLALCHEMY_DATABASE_URI = 'sqlite://%s/test.db'
SQLALCHEMY_BINDS = {'appmeta' : 'sqlite://%s/appmeta.db' }
SERVER_URL: "http://localhost:5000"
SQLALCHEMY_ECHO: True
DEBUG: True
SECURITY_PASSWORD_HASH: 'bcrypt'
# Upon creation I am going to make sure that the user selects a desired
# database before proceeding to ensure no hangups upon attempting
# to go to production. See comments below for further production
# directives.
# Also, using a password protected database as your
# database (i.e. MySQL or my favorite PostgreSQL) will
# make your database much more secure than using SQLite (as
# is the default). SQLite itself has no concept of users or
# permissions, and instead relies on OS permissions for
# security.
# this config should absolutely be changed before
# production. Specifically, the SECRET_KEY, META_CREDENTIALS,
# and ADMIN_CREDENTIALS.
# this line is for testing (do not remove)
""" % (current_dir, current_dir)
return str(text)
def write_file_safe(text, file_name, overwrite=False):
if not overwrite:
if not os.path.isfile(file_name):
new_file = open(file_name, 'w+')
new_file.write(text)
new_file.close()
else:
print """The file '{}' already exists.
To overwrite '{}' pass overwrite=True as a kwarg.
No action taken.""".format(file_name, file_name)
def write_yaml_cfg():
write_file_safe(yml_cfg(), 'config.yaml')
def write_ini_cfg():
write_file_safe(ini_cfg(), 'config.ini')
if __name__ == '__main__':
write_ini_cfg()
| mit | 4,527,859,070,266,051,000 | 31.043956 | 71 | 0.709877 | false | 3.402567 | false | false | false |
simon-r/PyParticles | pyparticles/demo/fountain.py | 1 | 4044 | # PyParticles : Particles simulation in python
# Copyright (C) 2012 Simone Riva
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import numpy as np
import pyparticles.pset.particles_set as ps
import pyparticles.pset.opencl_context as occ
import pyparticles.ode.euler_solver as els
import pyparticles.ode.leapfrog_solver as lps
import pyparticles.ode.runge_kutta_solver as rks
import pyparticles.ode.stormer_verlet_solver as svs
import pyparticles.ode.midpoint_solver as mds
import pyparticles.forces.const_force as cf
import pyparticles.forces.drag as dr
import pyparticles.forces.multiple_force as mf
import pyparticles.animation.animated_ogl as aogl
import pyparticles.pset.default_boundary as db
from pyparticles.utils.pypart_global import test_pyopencl
def default_pos( pset , indx ):
t = default_pos.sim_time.time
pset.X[indx,:] = 0.01 * np.random.rand( len(indx) , pset.dim ).astype( pset.dtype )
fs = 1.0 / ( 1.0 + np.exp( -( t*4.0 - 2.0 ) ) )
alpha = 2.0 * np.pi * np.random.rand( len(indx) ).astype( pset.dtype )
vel_x = 2.0 * fs * np.cos( alpha )
vel_y = 2.0 * fs * np.sin( alpha )
pset.V[indx,0] = vel_x
pset.V[indx,1] = vel_y
pset.V[indx,2] = 10.0 * fs + 1.0 * fs * ( np.random.rand( len(indx)) )
def fountain():
"""
Fountain demo
"""
steps = 10000000
dt = 0.005
pcnt = 100000
fl = True
if test_pyopencl() :
print( "OpenCL is installed and enabled " )
print( " Try, at least, 200000 particles " )
while fl :
try :
print( " " )
pcnt = int( input('How many particles: ') )
except :
print( "Please insert a number! " )
else :
fl = False
pset = ps.ParticlesSet( pcnt , dtype=np.float32 )
pset.M[:] = 0.1
pset.X[:,2] = 0.7 * np.random.rand( pset.size )
grav = cf.ConstForce( pset.size , dim=pset.dim , u_force=( 0.0 , 0.0 , -10.0 ) )
occx = None
if test_pyopencl() :
occx = occ.OpenCLcontext( pset.size , pset.dim , ( occ.OCLC_X | occ.OCLC_V | occ.OCLC_A | occ.OCLC_M ) )
drag = dr.DragOCL( pset.size , dim=pset.dim , Consts=0.01 , ocl_context=occx )
else :
drag = dr.Drag( pset.size , dim=pset.dim , Consts=0.01 )
multi = mf.MultipleForce( pset.size , dim=pset.dim )
multi.append_force( grav )
multi.append_force( drag )
multi.set_masses( pset.M )
#solver = mds.MidpointSolver( multi , pset , dt )
if test_pyopencl() :
solver = els.EulerSolverOCL( multi , pset , dt , ocl_context=occx )
else :
solver = els.EulerSolver( multi , pset , dt )
solver.update_force()
default_pos.sim_time = solver.get_sim_time()
bd = ( -100.0 , 100.0 , -100.0 , 100.0 , 0.0 , 100.0 )
bound = db.DefaultBoundary( bd , dim=3 , defualt_pos=default_pos )
pset.set_boundary( bound )
a = aogl.AnimatedGl()
a.ode_solver = solver
a.pset = pset
a.steps = steps
a.draw_particles.set_draw_model( a.draw_particles.DRAW_MODEL_VECTOR )
a.init_rotation( -80 , [ 0.7 , 0.05 , 0 ] )
a.build_animation()
a.start()
return
| gpl-3.0 | 209,497,365,053,662,750 | 27.955556 | 112 | 0.589021 | false | 3.149533 | false | false | false |
Mithrilwoodrat/Tombhub | tombhub/models.py | 1 | 1363 | import datetime
from sqlalchemy import Column, Integer, String, Text, DateTime, ForeignKeyConstraint
from tombhub.database import Base
class User(Base):
__tablename__ = 'users'
id = Column(Integer, primary_key=True)
name = Column(String(50), unique=True)
passwd = Column(String(50))
def __init__(self, name=None, passwd = None):
self.name = name
self.passwd = passwd
def __repr__(self):
return '<User %r>' % (self.name)
def is_authenticated(self):
return True
def is_active(self):
return True
def is_anonymous(self):
return False
def get_id(self):
return self.id
class Thread(Base):
__tablename__ = 'threads'
id = Column(Integer, primary_key=True)
author_id = Column(Integer, nullable=False)
author_name = Column(String(50), nullable=False)
title = Column(String(50))
content = Column(Text)
created_date = Column(DateTime, default=datetime.datetime.utcnow)
ForeignKeyConstraint(['author_id','author_name'],['users.id','users.name'])
def __init__(self, title=None, author_id=None, content = None):
self.title = title
self.author_id = author_id
self.author_name = User.query.get(self.author_id).name
self.content = content
def __repr__(self):
return '<Thread %r>' % (self.title) | gpl-2.0 | 7,702,907,114,834,849,000 | 27.416667 | 84 | 0.630961 | false | 3.754821 | false | false | false |
fnoop/maverick | manifests/maverick-modules/maverick_network/facts.d/netinfo.py | 1 | 6823 | #!/usr/bin/env python3
# This fact extracts network info for network interfaces
import os, re, sys, subprocess
sys.dont_write_bytecode = True # This is to prevent .pyc files in facts.d directory
sys.path.insert(0, '/usr/local/examples')
try:
import netifaces
import pyric # pyric errors
import pyric.pyw as pyw # iw functionality
from udevnet import Udevnet
netinfo = {}
except:
print("netinfo_present=no")
print("netinfo_interfaces=")
sys.exit(1)
class Netinfo(object):
def __init__(self, _if):
self.data = {}
self._if = str(_if)
self.udevnet = Udevnet()
self.udevnet.runall()
def getinfo(self):
try:
self.data['macaddress'] = netifaces.ifaddresses(self._if)[netifaces.AF_LINK][0]['addr']
except:
self.data['macaddress'] = None
try:
self.data['ipaddress'] = netifaces.ifaddresses(self._if)[netifaces.AF_INET][0]['addr']
except:
self.data['ipaddress'] = None
try:
self.data['vendorstr'] = self.udevnet.data[self._if+"_id_vendor_from_database"]
except:
self.data['vendorstr'] = None
try:
self.data['vendoroui'] = self.udevnet.data[self._if+"_id_oui_from_database"]
except:
self.data['vendoroui'] = None
try:
self.data['vendor'] = self.udevnet.data[self._if+"_id_vendor"]
except:
self.data['vendor'] = None
# Hack for onboard raspberry devices
if type(self.data['vendoroui']) is str:
if re.search("^Raspberry", self.data['vendoroui']):
self.data['vendor'] = "RaspberryPi"
try:
self.data['driver'] = self.udevnet.data[self._if+"_id_net_driver"]
except:
try:
self.data['driver'] = self.udevnet.data[self._if+"_id_usb_driver"]
except:
self.data['driver'] = None
try:
self.data['model'] = self.udevnet.data[self._if+"_id_model_id"]
except:
self.data['model'] = None
try:
self.data['modelstr'] = self.udevnet.data[self._if+"_id_model_from_database"]
except:
self.data['modelstr'] = None
try:
self.data['netname'] = self.udevnet.data[self._if+"_id_net_name_from_database"]
except:
try:
self.data['netname'] = self.udevnet.data[self._if+"_id_net_name_onboard"]
except:
try:
self.data['netname'] = self.udevnet.data[self._if+"_id_net_name_slot"]
except:
try:
self.data['netname'] = self.udevnet.data[self._if+"_id_net_name_path"]
except:
try:
self.data['netname'] = self.udevnet.data[self._if+"_id_net_name_mac"]
except:
self.data['netname'] = None
try:
self.data['type'] = self.udevnet.data[self._if+"_devtype"]
if self.data['type'] == "wlan": self.data['type'] = "Wireless"
except:
try:
if re.search("^en", self.data['netname']):
self.data['type'] = "Ethernet"
elif re.search("^wl", self.data['netname']):
self.data['type'] = "Wireless"
else:
self.data['type'] = None
except:
self.data['type'] = None
# Stop here if we don't have a wireless card
if self.data['type'] != "Wireless":
return
# Retrieve wireless info
try:
_ifobj = pyw.getcard(self._if)
_ifinfo = pyw.ifinfo(_ifobj)
_devinfo = pyw.devinfo(_ifobj)
_physinfo = pyw.phyinfo(_ifobj)
_linkinfo = pyw.link(_ifobj)
except:
pass
try:
self.data['isup'] = pyw.isup(_ifobj)
except:
self.data['isup'] = None
try:
self.data['blocked'] = pyw.isblocked(_ifobj)
except:
self.data['blocked'] = None
try:
self.data['mode'] = _devinfo['mode']
except:
self.data['mode'] = None
try:
self.data['modes'] = _physinfo['modes']
except:
self.data['modes'] = None
try:
self.data['bands'] = _physinfo['bands']
except:
self.data['bands'] = None
try:
self.data['standards'] = pyw.devstds(_ifobj)
except:
self.data['standards'] = None
try:
self.data['freqs'] = pyw.devfreqs(_ifobj)
except:
self.data['freqs'] = None
try:
self.data['txpower'] = pyw.txget(_ifobj)
except:
self.data['txpower'] = None
try:
self.data['chans'] = pyw.devchs(_ifobj)
except:
self.data['chans'] = None
try:
self.data['reg'] = pyw.regget(_ifobj)
except:
self.data['reg'] = None
try:
self.data['chipset'] = _ifinfo['chipset']
except:
self.data['chipset'] = None
try:
self.data['state'] = _linkinfo['stat']
except:
self.data['state'] = None
try:
self.data['ssid'] = _linkinfo['ssid']
except:
self.data['ssid'] = None
try:
self.data['chw'] = _devinfo['CHW']
except:
self.data['chw'] = None
try:
self.data['frequency'] = _devinfo['RF']
except:
self.data['frequency'] = None
try:
self.data['rss'] = _linkinfo['rss']
except:
self.data['rss'] = None
try:
self.data['wtx'] = _linkinfo['tx']
except:
self.data['wtx'] = None
try:
self.data['wrx'] = _linkinfo['rx']
except:
self.data['wrx'] = None
def runall(self):
pass
#If we're being called as a command, instantiate and report
if __name__ == '__main__':
try:
ifs = pyw.interfaces()
except pyric.error as e:
print("Error running netinfo, pyric not available")
sys.exit(1)
print("netinfo_present=yes")
with open ("/etc/hostname", "r") as etc_hostname:
data=etc_hostname.readlines()
if data:
print("netinfo_etchostname="+str(data[0].rstrip()))
print("netinfo_interfaces="+",".join(ifs))
for _if in ifs:
_netinfo = Netinfo(_if)
_netinfo.getinfo()
for key,val in sorted(_netinfo.data.items()):
print("netinfo_"+_if+"_%s=%s" % (key, val))
| gpl-3.0 | -9,157,286,942,456,641,000 | 32.446078 | 99 | 0.491866 | false | 3.688108 | false | false | false |
b29308188/MMAI_final | src/datasets.py | 1 | 4394 | import cv2
import numpy as np
#labels for each kind of tag
label_maps = {"T":0 , "F": 1, "N": 2}
inv_label_maps = {v : k for (k, v) in label_maps.items()}
class Photo:
"""
This class is a photo which contains a list of faces.
image_ID : the ID of this photo
image : the path of the image
"""
class Face:
"""
This class represents a face start in (x , y).
w : width
h : height
tag : T/F/N
label : numbers corresponding to the tag
feature : the vector of features of this face
"""
def __init__(self, x, y, w, h, tag = None):
"""
This is the Constructor of Face
"""
self.x = int(float(x))
self.y = int(float(y))
self.w = int(float(w))
self.h = int(float(h))
self.tag = tag
if tag is not None:
self.label = label_maps[tag]
else:
self.label = None
self.feature = None
def __init__(self, image_ID = None):
"""
This is the constructor of Photo.
"""
self.image_ID = image_ID
self.faces = []
self.image = None
def read_image(self, image_path):
"""
Read image from the image_path and store it in memory
"""
self.image = cv2.resize(cv2.imread(image_path), (960, 720))
assert self.image is not None
def add_face(self, x, y, w, h, tag = None):
"""
Add a face to the list of faces.
"""
self.faces.append( self.Face(x, y, w, h, tag) )
def histogram(self, img):
Orishape = img.shape
hist = []
img = img.reshape((img.shape[0]*img.shape[1]*img.shape[2]),order='F')
a = np.histogram(img[0:Orishape[0]*Orishape[1]], bins=np.arange(0,257,64))[0]
hist += list(a.astype(float)/np.sum(a))
b = np.histogram(img[Orishape[0]*Orishape[1]:2*Orishape[0]*Orishape[1]], bins=np.arange(0,257,64))[0]
hist += list(b.astype(float)/np.sum(b))
c = np.histogram(img[2*Orishape[0]*Orishape[1]:3*Orishape[0]*Orishape[1]], bins=np.arange(0,257,32))[0]
hist += list(c.astype(float)/np.sum(c))
return hist
def colorgram(self, img):
cgram = []
for i in xrange(3): # RGB
cgram += [np.mean(img[0:,0:,i]), np.std(img[0:,0:,i])]
return cgram
def get_global_features(self):
gfs = []
gfs += [len(self.faces), self.image.shape[0]*self.image.shape[1]] # number of faces in this image
gfs += [np.mean([f.x for f in self.faces]), np.var([f.x for f in self.faces])]
gfs += [np.mean([f.y for f in self.faces]), np.var([f.y for f in self.faces])]
gfs += [np.mean([f.w for f in self.faces]), np.var([f.w for f in self.faces])]
gfs += [np.mean([f.h for f in self.faces]), np.var([f.h for f in self.faces])]
average_distance = 0.
self.disMatrix = np.zeros((len(self.faces), len(self.faces)))
for i, f1 in enumerate(self.faces):
for j, f2 in enumerate(self.faces):
dis = np.sqrt(((f1.x+f1.w/2) - (f2.x+f2.w/2))**2 + ((f1.y+f1.h/2) - (f2.y+f2.h/2))**2) #l2 dis
self.disMatrix[i, j] = dis
average_distance += dis
self.global_feature = gfs
def local_features(self, f, no):
lfs = [f.x, f.y, f.w, f.h]
lfs += self.colorgram(self.image[f.y : f.y+f.h, f.x : f.x+f.w])
lfs += [np.var(self.disMatrix[no, :]), np.mean(self.disMatrix[no, :])] # average distance to other faces
lfs += [f.x+f.w/2, f.y+f.h/2] # center
NinR = 0.0
R = 0.4 * self.image.shape[0]# percentage of image's width
for i in xrange(len(self.faces)):
if self.disMatrix[no, i] < R :
NinR += 1
lfs += [NinR/len(self.faces)]
return lfs
def extract_features(self):
"""
For each face in the list of faces, extract its features.
"""
if self.image is not None:
self.get_global_features()
for i, f in enumerate(self.faces):
if self.image is not None:
f.feature = np.array(self.local_features(f, i) + self.global_feature )
else:
f.feature = np.array([float(f.w*f.h)])
| gpl-2.0 | 5,503,500,688,097,726,000 | 33.873016 | 112 | 0.516614 | false | 3.167988 | false | false | false |
Ektorus/bohrium | bridge/npbackend/setup.py | 1 | 7563 | #!/usr/bin/env python
"""
/*
This file is part of Bohrium and copyright (c) 2012 the Bohrium
http://bohrium.bitbucket.org
Bohrium is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as
published by the Free Software Foundation, either version 3
of the License, or (at your option) any later version.
Bohrium is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the
GNU Lesser General Public License along with Bohrium.
If not, see <http://www.gnu.org/licenses/>.
*/
"""
from distutils.core import setup, Extension
from distutils.command.build import build
import os
import sys
import stat
import pprint
import json
import shutil
import numpy as np
from Cython.Distutils import build_ext
#We overload the setup.py with a 'buildpath=' argument that
#points to the root of the current build
build_path = None
for i,arg in enumerate(sys.argv):
if arg.startswith("buildpath="):
build_path = arg[len("buildpath="):]
sys.argv.pop(i)
def buildpath(*args):
if build_path is None:
return os.path.join(*args)
else:
return os.path.join(build_path, *args)
def srcpath(*args):
prefix = os.path.abspath(os.path.dirname(__file__))
assert len(prefix) > 0
return os.path.join(prefix, *args)
def get_timestamp(f):
st = os.stat(f)
mtime = st[stat.ST_MTIME] #modification time
return mtime
def set_timestamp(f,timestamp):
os.utime(f,(timestamp,timestamp))
#Returns the numpy data type name
def dtype_bh2np(bh_type_str):
return bh_type_str[3:].lower()#Remove BH_ and convert to lower case
#Merge bhc.i.head with the bh_c.h to create our SWIG interface bhc.i
time = 0
with open(buildpath("bhc.i"), 'w') as outfile:
for fname in [srcpath("bhc.i.head"),srcpath("..","c","codegen","output","bh_c.h")]:
t = get_timestamp(fname)
if t > time:
time = t
with open(fname) as infile:
for line in infile:
outfile.write(line)
set_timestamp(buildpath("bhc.i"),time)
#Create the _info.py file
time = get_timestamp(srcpath('setup.py'))
with open(buildpath("_info.py"), 'w') as o:
#Write header
o.write("#This file is auto generated by the setup.py\n")
o.write("import numpy as np\n")
#Find number of operands and type signature for each Bohrium opcode
#that Bohrium-C supports
t = get_timestamp(srcpath('..','..','core','codegen','opcodes.json'))
if t > time:
time = t
nops = {}
type_sig = {}
ufunc = {}
with open(srcpath('..','..','core','codegen','opcodes.json'), 'r') as f:
opcodes = json.loads(f.read())
for op in opcodes:
if op['elementwise'] and not op['system_opcode']:
#Convert the type signature to bhc names
type_sig = []
for sig in op['types']:
type_sig.append([dtype_bh2np(s) for s in sig])
name = op['opcode'].lower()[3:]#Removing BH_ and we have the NumPy and bohrium name
ufunc[name] = {'name': name,
'nop': int(op['nop']),
'type_sig': type_sig}
o.write("op = ")
pp = pprint.PrettyPrinter(indent=2, stream=o)
pp.pprint(ufunc)
#Find and write all supported data types
t = get_timestamp(srcpath('..','..','core','codegen','types.json'))
if t > time:
time = t
s = "numpy_types = ["
with open(srcpath('..','..','core','codegen','types.json'), 'r') as f:
types = json.loads(f.read())
for t in types:
if t['numpy'] == "unknown":
continue
s += "np.dtype('%s'), "%t['numpy']
s = s[:-2] + "]\n"
o.write(s)
set_timestamp(buildpath("_info.py"),time)
#We need to make sure that the extensions is build before the python module because of SWIG
#Furthermore, '_info.py' and 'bhc.py' should be copied to the build dir
class CustomBuild(build):
sub_commands = [
('build_ext', build.has_ext_modules),
('build_py', build.has_pure_modules),
('build_clib', build.has_c_libraries),
('build_scripts', build.has_scripts),
]
def run(self):
if not self.dry_run:
self.copy_file(buildpath('_info.py'),buildpath(self.build_lib,'bohrium','_info.py'))
self.copy_file(buildpath('bhc.py'),buildpath(self.build_lib,'bohrium','bhc.py'))
build.run(self)
if os.path.realpath(buildpath('random123.pyx')) != os.path.realpath(srcpath('random123.pyx')):
shutil.copy2(srcpath('random123.pyx'), buildpath('random123.pyx'))
shutil.copy2(srcpath('ndarray.pyx'), buildpath('ndarray.pyx'))
shutil.copy2(srcpath('_util.pyx'), buildpath('_util.pyx'))
setup(name='Bohrium',
version='0.2',
description='Bohrium NumPy',
long_description='Bohrium NumPy',
author='The Bohrium Team',
author_email='[email protected]',
url='http://www.bh107.org',
license='LGPLv3',
platforms='Linux, OSX',
cmdclass={'build': CustomBuild, 'build_ext':build_ext},
package_dir={'bohrium': srcpath('')},
packages=['bohrium', 'bohrium.target'],
ext_package='bohrium',
ext_modules=[Extension(name='_bhmodule',
sources=[srcpath('src','_bhmodule.c')],
depends=[srcpath('src','types.c'), srcpath('src','types.h'),
srcpath('src','operator_overload.c')],
include_dirs=[srcpath('..','c','codegen','output'),
srcpath('..','..','include')],
libraries=['dl','bhc', 'bh'],
library_dirs=[buildpath('..','c'),
buildpath('..','..','core')],
),
Extension(name='_bhc',
sources=[buildpath('bhc.i')],
include_dirs=[srcpath('..','c','codegen','output'),
srcpath('..','..','include')],
libraries=['dl','bhc', 'bh'],
library_dirs=[buildpath('..','c'),
buildpath('..','..','core')],
),
Extension(name='random123',
sources=[buildpath('random123.pyx')],
include_dirs=[srcpath('.'),
srcpath('..','..','thirdparty','Random123-1.08','include')],
libraries=[],
library_dirs=[],
),
Extension(name='_util',
sources=[buildpath('_util.pyx')],
include_dirs=[srcpath('.')],
libraries=[],
library_dirs=[],
),
Extension(name='ndarray',
sources=[buildpath('ndarray.pyx')],
include_dirs=[srcpath('.')],
libraries=[],
library_dirs=[],
)
]
)
| lgpl-3.0 | -1,784,800,386,496,291,800 | 37.390863 | 103 | 0.531667 | false | 3.941115 | false | false | false |
manderelee/csc2521_final | scripts/adjust_vox.py | 1 | 1368 | import os, sys
if __name__ == "__main__":
filename = sys.argv[1]
########### read ############
header = []
f = open(filename, 'r')
header.append(f.readline())
header.append(f.readline())
header.append(f.readline())
header.append(f.readline())
header.append(f.readline())
num_nodes = header[-1]
node_lines = []
for i in range(int(num_nodes)):
line = f.readline().split(" ")
line[0] = str(int(line[0]) + 1)
line = " ".join(line)
node_lines.append(line)
between = []
between.append(f.readline())
between.append(f.readline())
between.append(f.readline())
num_elements = between[-1]
element_lines = []
for i in range(int(num_elements)):
line = f.readline().split(" ")
line[0] = str(int(line[0]) + 1)
for j in range(5,13):
line[j] = str(int(line[j]) + 1)
line = " ".join(line)
element_lines.append(line + '\n')
end = ['$EndElements']
f.close()
########### write ############
f = open(filename, 'w')
for line in header:
f.write(line)
for line in node_lines:
f.write(line)
for line in between:
f.write(line)
for line in element_lines:
f.write(line)
for line in end:
f.write(line)
f.close()
| mpl-2.0 | -5,231,434,740,971,981,000 | 23.872727 | 43 | 0.500731 | false | 3.428571 | false | false | false |
kearnsw/Twitt.IR | src/alignment.py | 1 | 2873 | #This software is a free software. Thus, it is licensed under GNU General Public License.
#Python implementation to Smith-Waterman Algorithm for Homework 1 of Bioinformatics class.
#Forrest Bao, Sept. 26 <http://fsbao.net> <forrest.bao aT gmail.com>
# zeros() was origianlly from NumPy.
# This version is implemented by alevchuk 2011-04-10
def zeros(shape):
retval = []
for x in range(shape[0]):
retval.append([])
for y in range(shape[1]):
retval[-1].append(0)
return retval
match_award = 10
mismatch_penalty = -5
gap_penalty = -5 # both for opening and extanding
def match_score(alpha, beta):
if alpha == beta:
return match_award
elif alpha == '-' or beta == '-':
return gap_penalty
else:
return mismatch_penalty
def finalize(align1, align2):
align1 = align1[::-1] #reverse sequence 1
align2 = align2[::-1] #reverse sequence 2
i,j = 0,0
#calcuate identity, score and aligned sequeces
symbol = ''
found = 0
score = 0
identity = 0
return len(align1)
def water(seq1, seq2):
m, n = len(seq1), len(seq2) # length of two sequences
# Generate DP table and traceback path pointer matrix
score = zeros((m+1, n+1)) # the DP table
pointer = zeros((m+1, n+1)) # to store the traceback path
max_score = 0 # initial maximum score in DP table
# Calculate DP table and mark pointers
for i in range(1, m + 1):
for j in range(1, n + 1):
score_diagonal = score[i-1][j-1] + match_score(seq1[i-1], seq2[j-1])
score_up = score[i][j-1] + gap_penalty
score_left = score[i-1][j] + gap_penalty
score[i][j] = max(0,score_left, score_up, score_diagonal)
if score[i][j] == 0:
pointer[i][j] = 0 # 0 means end of the path
if score[i][j] == score_left:
pointer[i][j] = 1 # 1 means trace up
if score[i][j] == score_up:
pointer[i][j] = 2 # 2 means trace left
if score[i][j] == score_diagonal:
pointer[i][j] = 3 # 3 means trace diagonal
if score[i][j] >= max_score:
max_i = i
max_j = j
max_score = score[i][j];
align1, align2 = '', '' # initial sequences
i,j = max_i,max_j # indices of path starting point
#traceback, follow pointers
while pointer[i][j] != 0:
if pointer[i][j] == 3:
align1 += seq1[i-1]
align2 += seq2[j-1]
i -= 1
j -= 1
elif pointer[i][j] == 2:
align1 += '-'
align2 += seq2[j-1]
j -= 1
elif pointer[i][j] == 1:
align1 += seq1[i-1]
align2 += '-'
i -= 1
return finalize(align1, align2)
| gpl-3.0 | -6,357,484,585,676,314,000 | 32.022989 | 90 | 0.533241 | false | 3.428401 | false | false | false |
iamaris/xpython | login/example_edited.py | 1 | 1531 | # -*- coding utf8 -*-
import tornado.web
import tornado.gen
from google_oath2 import GoogleOath2Mixin
class GoogleHandler(tornado.web.RequestHandler, GoogleOath2Mixin):
@tornado.web.addslash
@tornado.web.asynchronous
@tornado.gen.coroutine
def get(self):
if self.get_argument("code", False):
user = yield self.get_authenticated_user(
redirect_uri='http://localhost:8000/oauth2callback',
client_id="796019659754-2ak2t323shp3c336pspmohfj8dhdtg4v.apps.googleusercontent.com",
client_secret="O-Nu4I0f2QcqT81MEQekgst9",
code=self.get_argument("code"),
extra_fields=['email'])
print '--------------------------------GOOGLE--------------------------------'
print user
print '----------------------------------------------------------------------'
self.set_secure_cookie("user", tornado.escape.json_encode(user))
self.redirect("/")
else:
self.authorize_redirect(
redirect_uri='http://localhost:8000/oauth2callback',
client_id="796019659754-2ak2t323shp3c336pspmohfj8dhdtg4v.apps.googleusercontent.com",
extra_params={"scope": "https://www.googleapis.com/auth/userinfo.email https://www.googleapis.com/auth/userinfo.profile",
"state" : "profile",
"response_type": "code",})
| apache-2.0 | -1,084,315,582,388,091,800 | 50.033333 | 137 | 0.528413 | false | 4.288515 | false | false | false |
eske/seq2seq | scripts/speech/extract.py | 1 | 2752 | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
from __future__ import division
import argparse
import numpy as np
import yaafelib
import tarfile
import tempfile
import os
from collections import Counter
parser = argparse.ArgumentParser()
parser.add_argument('inputs', nargs='+', help='tar archive which contains all the wav files')
parser.add_argument('output', help='output file')
parser.add_argument('--derivatives', action='store_true')
args = parser.parse_args()
parameters = dict(
step_size=160, # corresponds to 10 ms (at 16 kHz)
block_size=640, # corresponds to 40 ms
mfcc_coeffs=40,
mfcc_filters=41 # more filters? (needs to be at least mfcc_coeffs+1, because first coeff is ignored)
)
# TODO: ensure that all input files use this rate
fp = yaafelib.FeaturePlan(sample_rate=16000)
mfcc_features = 'MFCC MelNbFilters={mfcc_filters} CepsNbCoeffs={mfcc_coeffs} ' \
'blockSize={block_size} stepSize={step_size}'.format(**parameters)
energy_features = 'Energy blockSize={block_size} stepSize={step_size}'.format(**parameters)
fp.addFeature('mfcc: {}'.format(mfcc_features))
if args.derivatives:
fp.addFeature('mfcc_d1: {} > Derivate DOrder=1'.format(mfcc_features))
fp.addFeature('mfcc_d2: {} > Derivate DOrder=2'.format(mfcc_features))
fp.addFeature('energy: {}'.format(energy_features))
if args.derivatives:
fp.addFeature('energy_d1: {} > Derivate DOrder=1'.format(energy_features))
fp.addFeature('energy_d2: {} > Derivate DOrder=2'.format(energy_features))
if args.derivatives:
keys = ['mfcc', 'mfcc_d1', 'mfcc_d2', 'energy', 'energy_d1', 'energy_d2']
else:
keys = ['mfcc', 'energy']
df = fp.getDataFlow()
engine = yaafelib.Engine()
engine.load(df)
afp = yaafelib.AudioFileProcessor()
frame_counter = Counter()
outfile = open(args.output, 'wb')
total = 0
for filename in args.inputs:
tar = tarfile.open(filename)
total += len([f for f in tar if f.isfile()])
_, tmp_file = tempfile.mkstemp()
for j, filename in enumerate(args.inputs):
tar = tarfile.open(filename)
files = sorted([f for f in tar if f.isfile()], key=lambda f: f.name)
for i, fileinfo in enumerate(files):
file_ = tar.extractfile(fileinfo)
with open(tmp_file, 'wb') as f:
f.write(file_.read())
afp.processFile(engine, tmp_file)
feats = engine.readAllOutputs()
feats = np.concatenate([feats[k] for k in keys], axis=1)
frames, dim = feats.shape
feats = feats.astype(np.float32)
if frames == 0:
print(frames, dim, fileinfo.name)
raise Exception
if i == 0 and j == 0:
np.save(outfile, (total, dim))
np.save(outfile, feats)
outfile.close()
os.remove(tmp_file)
| apache-2.0 | -6,212,129,949,152,551,000 | 29.577778 | 105 | 0.666424 | false | 3.177829 | false | false | false |
RedhawkSDR/framework-codegen | redhawk/codegen/jinja/java/ports/frontend.py | 1 | 1943 | #
# This file is protected by Copyright. Please refer to the COPYRIGHT file
# distributed with this source distribution.
#
# This file is part of REDHAWK core.
#
# REDHAWK core is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# REDHAWK core is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see http://www.gnu.org/licenses/.
#
from redhawk.codegen.lang.idl import IDLInterface
from redhawk.codegen.jinja.ports import PortGenerator
from redhawk.codegen.jinja.ports import PortFactory
from generator import JavaPortGenerator
from redhawk.codegen.lang import java
class FrontendPortFactory(PortFactory):
NAMESPACE = 'FRONTEND'
def match(self, port):
return IDLInterface(port.repid()).namespace() == self.NAMESPACE
def generator(self, port):
interface = IDLInterface(port.repid()).interface()
return FrontendPortGenerator(port)
class FrontendPortGenerator(JavaPortGenerator):
def className(self):
return "frontend." + self.templateClass()
def templateClass(self):
if self.direction == 'uses':
porttype = 'Out'
else:
porttype = 'In'
porttype += self.interface + 'Port'
return porttype
def _ctorArgs(self, name):
return [java.stringLiteral(name)]
def constructor(self, name):
return '%s(%s)' % (self.className(), ', '.join(self._ctorArgs(name)))
def loader(self):
return jinja2.PackageLoader(__package__)
| lgpl-3.0 | 2,951,852,014,566,247,400 | 33.696429 | 79 | 0.712301 | false | 4.014463 | false | false | false |
dsimandl/teamsurmandl | teamsurmandl/forms.py | 1 | 2690 | from django import forms
from django.contrib.auth import login, authenticate
from django.contrib.auth.forms import AuthenticationForm
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit, Layout, Field, HTML, Div
from crispy_forms.bootstrap import FormActions
class SurmandlAuthForm(AuthenticationForm):
"""Form for our site login page. We are using crispy-forms here."""
username = forms.EmailField()
password = forms.PasswordInput()
def __init__(self, *args, **kwargs):
self.helper = FormHelper()
self.helper.form_class = 'form-signin'
#This removes all labels from the HTML
self.helper.form_show_labels = False
self.helper.form_show_errors = False
self.helper.form_method = 'post'
self.helper._form_action = ''
self.helper.layout = Layout(
HTML('<h2 class="form_signin-heading">Please sign in</h2>'),
HTML('{% if form.non_field_errors %}<div class="login-alert alert-danger">{{ form.non_field_errors.as_text }}</div>{% endif %}' ),
Field('username', css_class='form-control', placeholder="Email address", name="username", autofocus='True'),
HTML('{% if form.username.errors %} <div class="login-alert alert-danger">{{ form.username.errors.as_text }}</div>{% endif %}'),
Field('password', css_class='form-control', placeholder="Password", name="password"),
HTML('{% if form.password.errors %} <div class="login-alert alert-danger">{{ form.password.errors.as_text }}</div>{% endif %}'),
HTML('<label class="checkbox"> <input type="checkbox" value="remember-me"> Remember me</label>'),
FormActions(
Submit('submit', "Sign in", css_class="btn btn-large btn-primary btn-block")
)
)
super(SurmandlAuthForm, self).__init__(*args, **kwargs)
def clean(self):
username = self.cleaned_data.get('username')
password = self.cleaned_data.get('password')
if not username and not password:
raise forms.ValidationError("Email address and password are required")
elif username and not password:
raise forms.ValidationError("Password is required")
elif not username and password:
raise forms.ValidationError("Email is required")
self.user_cache = authenticate(username=username, password=password)
if self.user_cache is None:
raise forms.ValidationError("Your email and password do not match")
elif not self.user_cache.is_active:
raise forms.ValidationError("The account is inactive")
return self.cleaned_data
| mit | -713,614,621,017,927,400 | 47.035714 | 142 | 0.649442 | false | 4.183515 | false | false | false |
sebbcn/django-secure-storage | storage.py | 1 | 3497 |
from uuid import uuid4
from django.utils.timezone import now
from datetime import timedelta
from os.path import join
from django.core.files.storage import FileSystemStorage
from django.core.files.uploadedfile import UploadedFile
from .encryption import get_cipher_and_iv, padding
import app_settings as settings
from .models import EncryptedUploadedFileMetaData
class ExpiredFile(Exception):
pass
class InexistentFile(Exception):
pass
class EncryptedUploadedFile(UploadedFile):
''' Extends the django builtin UploadedFile.
The written file is encrypted using AES-256 cipher. '''
def __init__(self, *args, **kwargs):
self.passphrase = kwargs.pop('passphrase')
self.name = kwargs.get('name')
if self.name:
self._open_existing_file(*args, **kwargs)
else:
self._open_new_file(*args, **kwargs)
def _open_existing_file(self, *args, **kwargs):
self.file = self.open_file(mode='rb')
super(EncryptedUploadedFile, self).__init__(self.file, **kwargs)
EncryptedUploadedFileMetaData.load(self)
self.cipher = get_cipher_and_iv(self.passphrase, self.iv)[0]
def _open_new_file(self, *args, **kwargs):
self.cipher, self.iv = get_cipher_and_iv(self.passphrase)
self.name = EncryptedFileSystemStorage().get_available_name()
self.file = self.open_file(mode='wb')
# By default, we set an arbitrary 10 years expiration date.
expire = int(kwargs.pop('expire_date', 10 * settings.ONE_YEAR))
self.expire_date = now() + timedelta(seconds=expire)
self.clear_filename = kwargs.pop('clear_filename')
self.one_time = kwargs.pop('one_time', False)
kwargs['size'] = int(kwargs.pop('content_length', 0))
super(EncryptedUploadedFile, self).__init__(
self.file, self.name, **kwargs)
EncryptedUploadedFileMetaData.save_(self)
@property
def path(self):
return join(settings.UPLOAD_DIR, self.name)
def open_file(self, mode='rb'):
try:
return open(self.path, mode)
except IOError:
if mode == 'rb':
raise InexistentFile
raise
def encrypt_and_write(self, raw_data):
if raw_data:
block = self.cipher.encrypt(padding(raw_data))
self.write(block)
def chunks(self, chunk_size=None):
''' decrypting iterator '''
if not chunk_size:
chunk_size = self.DEFAULT_CHUNK_SIZE
read = 0
while True:
block = self.read(chunk_size)
if len(block) == 0:
# EOF
break
block = self.cipher.decrypt(block)
read += len(block)
if read > self.size:
# We remove the padding at the end of the file
padding = self.size - read
block = block[:padding]
yield block
class EncryptedFileSystemStorage(FileSystemStorage):
''' handles encrypted files on disk with random names '''
def __init__(self, location=settings.UPLOAD_DIR):
super(EncryptedFileSystemStorage, self).__init__(location)
def open(self, *args, **kwargs):
return EncryptedUploadedFile(*args, **kwargs)
def get_available_name(self):
''' return a random id for the upload file '''
file_id = str(uuid4()).replace("-", "")
return join(self.location, file_id)
| gpl-2.0 | 5,682,643,343,101,651,000 | 31.082569 | 72 | 0.611667 | false | 4.090058 | false | false | false |
ryanho/ISParser | main.py | 1 | 6603 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import webapp2
import hinet
import seednet
import StringIO
import PyRSS2Gen
import urllib
import datetime
import hashlib
#from google.appengine.ext import ndb
from google.appengine.api import memcache
HTTP_DATE_FMT = '%a, %d %b %Y %H:%M:%S %Z'
def check_date_fmt(date):
date = date.strip().split(' ')
if len(date) == 5:
HTTP_DATE_FMT = '%a, %d %b %Y %H:%M:%S'
elif len(date) == 6:
HTTP_DATE_FMT = '%a, %d %b %Y %H:%M:%S %Z'
return HTTP_DATE_FMT
#not use yet
class MainPage(webapp2.RequestHandler):
def get(self):
self.response.headers['Content-Type'] = 'text/plain'
self.response.headers['Cache-Control'] = 'max-age=3600, must-revalidate'
self.response.write('')
#generate hinet rss
class Hinet(webapp2.RequestHandler):
def output_content(self, content, serve=True):
if serve:
self.response.out.write(content)
else:
self.response.set_status(304)
def set_headers(self):
self.response.headers['Content-Type'] = 'application/xhtml+xml'
self.response.headers['Cache-Control'] = 'public, max-age=3600, must-revalidate'
def get_cache_data(self, rss):
output = memcache.get(rss)
mtime = memcache.get('h_mtime')
etag = memcache.get('h_etag')
if mtime is None:
mtime = datetime.datetime.utcnow().strftime(HTTP_DATE_FMT) + 'GMT'
self.response.headers['Last-Modified'] = mtime
return output, mtime, etag
def get(self):
serve = True
output, mtime, etag = self.get_cache_data('hinet_rss')
if 'If-Modified-Since' in self.request.headers:
IFMOD_DATE_FMT = check_date_fmt(self.request.headers['If-Modified-Since'])
last_seen = datetime.datetime.strptime(self.request.headers['If-Modified-Since'], IFMOD_DATE_FMT)
last_modified = datetime.datetime.strptime(mtime, HTTP_DATE_FMT)
if last_seen >= last_modified:
serve = False
if 'If-None-Match' in self.request.headers:
etags = [x.strip('" ') for x in self.request.headers['If-None-Match'].split(',')]
if etag in etags:
serve = False
if output is not None:
self.set_headers()
self.response.headers['ETag'] = '"%s"' % etag
self.output_content(output, serve)
return
items = []
parser = hinet.MyHTMLParser()
parser.feed(urllib.urlopen('http://search.hinet.net/getNotify?callback=jsonpCallback&type=0&sort=0&mobile=1').read())
for i in parser.struc_data:
items.append(PyRSS2Gen.RSSItem(title=i[1] + ' ' +i[3], link=i[2], pubDate=i[0]))
rss = PyRSS2Gen.RSS2(
title=u"Hinet系統公告",
link="http://www.hinet.net/pu/notify.htm",
description=u"此RSS內容取自Hinet網頁,依照著作權法之合理使用原則節錄部份內容。\
本RSS僅供參考,Hinet或任何人都不對內容負責",
lastBuildDate=mtime,
items=items)
output = StringIO.StringIO()
rss.write_xml(output,encoding='utf-8')
etag = hashlib.sha1(output.getvalue()).hexdigest()
memcache.set('hinet_rss', output.getvalue(), time=3600)
memcache.set('h_mtime', mtime, time=3600)
memcache.set('h_etag', etag, time=3600)
self.set_headers()
self.response.headers['ETag'] = '"%s"' % (etag,)
self.output_content(output.getvalue(), serve)
#generate seednet rss
class Seednet(webapp2.RequestHandler):
def output_content(self, content, serve=True):
if serve:
self.response.out.write(content)
else:
self.response.set_status(304)
def set_headers(self):
self.response.headers['Content-Type'] = 'application/xhtml+xml'
self.response.headers['Cache-Control'] = 'public, max-age=3600, must-revalidate'
def get_cache_data(self, rss):
output = memcache.get('seednet_rss')
mtime = memcache.get('s_mtime')
etag = memcache.get('s_etag')
if mtime is None:
mtime = datetime.datetime.utcnow().strftime(HTTP_DATE_FMT) + 'GMT'
self.response.headers['Last-Modified'] = mtime
return output, mtime, etag
def get(self):
serve = True
output, mtime, etag = self.get_cache_data('seednet_rss')
if 'If-Modified-Since' in self.request.headers:
IFMOD_DATE_FMT = check_date_fmt(self.request.headers['If-Modified-Since'])
last_seen = datetime.datetime.strptime(self.request.headers['If-Modified-Since'], IFMOD_DATE_FMT)
last_modified = datetime.datetime.strptime(mtime, HTTP_DATE_FMT)
if last_seen >= last_modified:
serve = False
if 'If-None-Match' in self.request.headers:
etags = [x.strip('" ') for x in self.request.headers['If-None-Match'].split(',')]
if etag in etags:
serve = False
if output is not None:
self.set_headers()
self.response.headers['ETag'] = '"%s"' % etag
self.output_content(output, serve)
return
items = []
parser = seednet.MyHTMLParser()
parser.feed(urllib.urlopen(
'https://service.seed.net.tw/register-cgi/service_notice?FUNC=notice_qry_more&Category=02&Start=1').read())
for i in parser.struc_data:
items.append(PyRSS2Gen.RSSItem(title=i[3], link=i[2], pubDate=i[0]))
rss = PyRSS2Gen.RSS2(
title=u"Seednet系統公告",
link="https://service.seed.net.tw/register-cgi/service_notice?FUNC=notice_qry_more&Category=02&Start=1",
description=u"此RSS內容取自Seednet網頁,依照著作權法之合理使用原則節錄部份內容。\
本RSS僅供參考,Seednet或任何人都不對內容負責",
lastBuildDate=mtime,
items=items)
output = StringIO.StringIO()
rss.write_xml(output,encoding='utf-8')
etag = hashlib.sha1(output.getvalue()).hexdigest()
memcache.set('seednet_rss', output.getvalue(), time=3600)
memcache.set('s_mtime', mtime, time=3600)
memcache.set('s_etag', etag, time=3600)
self.set_headers()
self.response.headers['ETag'] = '"%s"' % (etag,)
self.output_content(output.getvalue(), serve)
application = webapp2.WSGIApplication([
('/', MainPage),
('/hinet', Hinet),
('/seednet', Seednet),
], debug=False)
| bsd-3-clause | -7,144,110,593,311,770,000 | 35.611429 | 125 | 0.60309 | false | 3.290704 | false | false | false |
maximilianh/maxtools | lib/gumbyparser.py | 1 | 4210 | #!/usr/bin/python
from sys import *
from re import *
from optparse import OptionParser
import Fasta
import alignment
class gumbyBlock:
# easier to handle as a class
# baseseq is always first in seqs list
def __init__(self,number, score, pval, seqs):
self.number=number
self.score = score
self.pval=pval
self.seqs=seqs
def __repr__(self):
lines = []
lines.append( " * gumbyResult %d" % self.number)
lines.append("score %d, pval %e " % (self.score, self.pval))
for s in self.seqs:
lines.append( str(s))
return "\n".join(lines)
# -------- FUNCTIONS ------------------
def procData(baseSeq, exons, no, seqDict, pos, pval, length, score):
if len(seqDict)==0:
return []
if baseSeq not in seqDict:
stderr.write("error: your baseseq name is not in gumby result. remember that gumby keeps only first word of seq name\n")
sys.exit(1)
print seqDict[baseSeq]
if overlapped(pos[baseSeq], exons, baseSeq):
stderr.write("warning: dropping complete block with sequence %s:%s because baseSeq has overlapping exon annotation.\n" % (baseSeq, pos[baseSeq]))
return []
if seqDict[baseSeq].nucl.count("-")==len(seqDict[baseSeq].nucl):
stderr.write("warning: dropping complete block with sequence %s:%s because baseSeq contains only '-'-characters\n" % (baseSeq, pos[baseSeq]))
return []
if seqDict[baseSeq].nucl.count("N")==len(seqDict[baseSeq].nucl):
stderr.write("warning: dropping complete block with sequence %s:%s because baseSeq contains only N-characters\n" % (baseSeq, pos[baseSeq]))
return []
seqs = []
seqs.append(seqDict[baseSeq])
for n,s in seqDict.iteritems():
if n==baseSeq:
continue
seqs.append(s)
gb = gumbyBlock(no, score, pval, seqs)
return [gb]
def overlapped(pos, exons, baseSeq):
f1name, f1start, f1end = pos
if f1name != baseSeq:
return False
for e in exons:
f2start, f2end = e
# print "checking %d -- %d, %d" % (start, f2start, f2end)
result = (( f2start <= f1start and f2end > f1start) or \
(f2start < f1end and f2end >= f1end) or (f2start >= f1start and f2end <= f1end))
if result == True:
return True
return False
# ----- MAIN -------------------
def parseGumby(gumbyFile, exonFile, baseSeq):
# parses gumbyFile, removes things that overlap exons and gumbies that consist only of gaps on baseSeq
# returns a list of gumbyBlocks
infile = open(gumbyFile, "r")
exons = []
if exonFile!=None:
fh = open(exonFile, "r")
for l in fh:
fs = l.split()
if fs[0].lower()!=baseSeq:
continue
exons.append([ int(fs[3]), int(fs[4]) ] )
# print exons
re1 = compile("[a-z]+[ ]+[0-9]+[ ]+[0-9]+")
seqs = {}
pos = {}
i = -1
resultLst = alignment.Alignment()
for l in infile:
l = l.strip()
l = l.replace("*","-")
l = l.replace("<", "-")
l = l.replace(">", "-")
if l.startswith("start"):
if i!=-1:
resultLst.extend(procData(baseSeq, exons, i, seqs, pos, pval, length, score))
f = l.split()
pval = float(f[-1])
length = int(f[6].strip(","))
score = int(f[8].strip(","))
i+=1
seqs={}
if re1.match(l):
f = l.split()
name = f[0]
start = int(f[1])-1
end = int(f[2])-1
seq = f[3]
if name not in seqs:
faseq = Fasta.FastaSeq(name, seq)
faseq.chrom = name
faseq.start = start
faseq.end = end
seqs[name] = faseq
else:
faseq = seqs[f[0]]
faseq.nucl += f[3]
pos[name] = (name, start,end)
resultLst.extend(procData(baseSeq, exons, i, seqs, pos, pval, length, score))
return resultLst
# ---- DEBUGGING -----
#blocks = parseGumby("test/gumbyparser.gumby", "test/gumbyparser.bed", "oryzias")
#for b in blocks:
#print b
| gpl-2.0 | -6,803,909,765,453,174,000 | 31.137405 | 153 | 0.546318 | false | 3.301961 | false | false | false |
google-research/open-covid-19-data | tests/test_exported_data.py | 1 | 1437 | # Copyright 2020 Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import pandas as pd
import sys
import streamlit as st
PIPELINE_DIR = os.path.join(os.path.abspath(os.path.join(os.path.dirname(__file__), '../')), 'src/pipeline')
sys.path.append(PIPELINE_DIR)
import path_utils
AGGREGATED_EXPORT_FILES = ['cc_by/aggregated_cc_by.csv',
'cc_by_sa/aggregated_cc_by_sa.csv',
'cc_by_nc/aggregated_cc_by_nc.csv']
def test_location_and_date_unique():
for f in AGGREGATED_EXPORT_FILES:
export_path = os.path.join(path_utils.path_to('export_dir'), f)
exported_df = pd.read_csv(export_path)
duplicates = exported_df[exported_df[['open_covid_region_code', 'date']].duplicated(keep=False)]
duplicate_info = duplicates[['open_covid_region_code', 'date']]
print(duplicate_info)
assert duplicates.shape[0] == 0
test_location_and_date_unique()
| apache-2.0 | -4,331,713,547,621,887,500 | 35.846154 | 108 | 0.70007 | false | 3.389151 | false | false | false |
splice/splice-server | playpen/candlepin/get_manifest_data.py | 1 | 3520 | #!/usr/bin/env python
import json
import os
import sys
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "splice.checkin_service.settings")
from optparse import OptionParser
from splice.common import candlepin_client
def get_consumers(host, port, username, password, https):
return candlepin_client.get_consumers(host, port, username, password, https)
def get_entitlements(host, port, username, password, https):
return candlepin_client.get_entitlements(host, port, username, password, https)
def get_owners(host, port, username, password, https):
return candlepin_client.get_owners(host, port, username, password, https)
def get_pools(host, port, username, password, https):
return candlepin_client.get_pools(host, port, username, password, https)
def get_products(host, port, username, password, https):
return candlepin_client.get_products(host, port, username, password, https)
def get_rules(host, port, username, password, https):
return candlepin_client.get_rules(host, port, username, password, https)
def get_subscriptions(host, port, username, password, https):
return candlepin_client.get_subscriptions(host, port, username, password, https)
def get_accounts():
pass
if __name__ == "__main__":
# Parse Args
default_port=8443
default_user="admin"
default_password="admin"
parser = OptionParser(description="Script to fetch data from a candlepin")
parser.add_option('--host', action='store', default=None,
help="Hostname of Candlepin server")
parser.add_option('--port', action='store', default=default_port,
help="Port of Candlepin server defaults to: %s" % (default_port))
parser.add_option('--http', action='store_true', default=False, help="Use HTTP instead of HTTPs, default is False")
parser.add_option('--user', action='store', default=default_user,
help="Username, default is %s" % default_user)
parser.add_option('--password', action='store', default=default_password,
help="Password, default is %s" % default_password)
(opts, args) = parser.parse_args()
host = opts.host
port = opts.port
https = not opts.http
username = opts.user
password = opts.password
if not host:
print "Please re-run with --host"
sys.exit(1)
retval = get_consumers(host, port, username, password, https)
print "\nget_consumers = "
print json.dumps(retval, sort_keys=True, indent=4, separators=(',', ': '))
retval = get_entitlements(host, port, username, password, https)
print "\nget_entitlements = "
print json.dumps(retval, sort_keys=True, indent=4, separators=(',', ': '))
retval = get_owners(host, port, username, password, https)
print "\nget_owners = "
print json.dumps(retval, sort_keys=True, indent=4, separators=(',', ': '))
pools = get_pools(host, port, username, password, https)
print "\nget_pools = "
for p in pools:
print "%s\n" % p
products = get_products(host, port, username, password, https)
print "\nget_products = "
for p in products:
print "%s\n" % p
#TODO - Need to implement support for decoding response
#retval = get_rules(host, port, username, password, https)
#print "\nget_rules = \n%s" % (retval)
retval = get_subscriptions(host, port, username, password, https)
print "\nget_subscriptions() = "
#print json.dumps(retval, sort_keys=True, indent=4, separators=(',', ': '))
| gpl-2.0 | -4,783,654,591,121,323,000 | 35.666667 | 119 | 0.669318 | false | 3.659044 | false | false | false |
bryndin/tornado-flickr-api | tornado_flickrapi/tools.py | 1 | 2292 | import sys
import os
from tornado.gen import coroutine, Return
from method_call import call_api
#TODO(DB): update collers to expect future
@coroutine
def load_methods():
"""
Loads the list of all methods
"""
try:
r = yield call_api(method="flickr.reflection.getMethods")
except Exception as e:
raise e
raise Return(r["methods"]["method"])
__perms__ = {0: 'none', '1': 'read', '2': 'write', '3': 'delete'}
#TODO(DB): update collers to expect future
@coroutine
def methods_info():
methods = {}
for m in load_methods():
try:
info = yield call_api(method="flickr.reflection.getMethodInfo", method_name=m)
except Exception as e:
raise e
info.pop("stat")
method = info.pop("method")
method["requiredperms"] = __perms__[method["requiredperms"]]
method["needslogin"] = bool(method.pop("needslogin"))
method["needssigning"] = bool(method.pop("needssigning"))
info.update(method)
info["arguments"] = info["arguments"]["argument"]
info["errors"] = info["errors"]["error"]
methods[m] = info
raise Return(methods)
def write_reflection(path, template, methods=None):
if methods is None:
methods = methods_info()
with open(template, "r") as t:
templ = t.read()
prefix = ""
new_templ = ""
tab = " "
templ = templ % str(methods)
for c in templ:
if c == '{':
new_templ += '{\n' + prefix
prefix += tab
elif c == '}':
new_templ += '\n' + prefix + '}\n' + prefix
prefix = prefix[:-len(tab)]
else:
new_templ += c
with open(path, "w") as f:
f.write(new_templ)
def write_doc(output_path, exclude=["flickr_keys", "methods"]):
import flickr_api
exclude.append("__init__")
modules = ['flickr_api']
dir = os.path.dirname(flickr_api.__file__)
modules += [
"flickr_api." + f[:-3]
for f in os.listdir(dir)
if f.endswith(".py") and f[:-3] not in exclude]
sys.path.insert(0, dir + "../")
if not os.path.exists(output_path):
os.makedirs(output_path)
os.chdir(output_path)
for m in modules:
os.system("pydoc -w " + m)
| bsd-3-clause | -206,738,358,129,790,140 | 26.285714 | 90 | 0.557155 | false | 3.531587 | false | false | false |
avatartwo/avatar2 | avatar2/message.py | 1 | 2187 |
class AvatarMessage(object):
def __init__(self, origin):
self.origin = origin
def __str__(self):
if self.origin:
return "%s from %s" % (self.__class__.__name__, self.origin.name)
else:
return "%s from unkown origin" % self.__class__.__name__
class UpdateStateMessage(AvatarMessage):
def __init__(self, origin, new_state):
super(UpdateStateMessage, self).__init__(origin)
self.state = new_state
class BreakpointHitMessage(UpdateStateMessage):
def __init__(self, origin, breakpoint_number, address):
super(BreakpointHitMessage, self).__init__(origin, TargetStates.BREAKPOINT)
self.breakpoint_number = breakpoint_number
self.address = address
class SyscallCatchedMessage(BreakpointHitMessage):
def __init__(self, origin, breakpoint_number, address, type='entry'):
super(self.__class__, self).__init__(origin, breakpoint_number, address)
self.type = type
class RemoteMemoryReadMessage(AvatarMessage):
def __init__(self, origin, id, pc, address, size, dst=None):
super(self.__class__, self).__init__(origin)
self.id = id
self.pc = pc
self.address = address
self.size = size
self.dst = dst
self.num_words = 1
self.raw = False
class RemoteMemoryWriteMessage(AvatarMessage):
def __init__(self, origin, id, pc, address, value, size, dst=None):
super(self.__class__, self).__init__(origin)
self.id = id
self.pc = pc
self.address = address
self.value = value
self.size = size
self.dst = dst
class RemoteInterruptEnterMessage(AvatarMessage):
def __init__(self, origin, id, interrupt_num):
super(self.__class__, self).__init__(origin)
self.id = id
self.interrupt_num = interrupt_num
class RemoteInterruptExitMessage(AvatarMessage):
def __init__(self, origin, id, transition_type, interrupt_num):
super(self.__class__, self).__init__(origin)
self.id = id
self.transition_type = transition_type
self.interrupt_num = interrupt_num
from .targets.target import TargetStates
| apache-2.0 | 8,676,645,026,651,915,000 | 31.641791 | 83 | 0.622771 | false | 3.983607 | false | false | false |
douglaswei/stock | featProcess/stockit/FeatManager.py | 1 | 3746 | __author__ = 'wgz'
import sys
from FeatTranslator import FeatTranslator
labelName = "label"
codeName = "code"
dateName = "date"
class FeatManager:
def set_train_predict_path(self, feat_train_path, feat_predict_path):
self.trainFile = open(feat_train_path, 'w')
self.predictFile = open(feat_predict_path, 'w')
def extract(self, raw_feat_path, feat_train_path, feat_predict_path):
self.set_train_predict_path(feat_train_path, feat_predict_path)
for line in open(raw_feat_path):
line = line[:-1]
fields = [item.split(":") for item in line.split("\t")]
field_map = dict(fields)
label, code, date = field_map.get(labelName), field_map.get(codeName), field_map.get(dateName)
out_file = self.predictFile if len(label) == 0 else self.trainFile
label_res = 0 if len(label) == 0 else 0 if float(label) <= 0 else 1
out_str = "%s:%f\t%s:%s\t%s:%s" % (labelName, label_res, codeName, code, dateName, date)
translator = FeatTranslator()
feature_map = translator.extract(field_map, False)
sorted_featids = sorted(feature_map.keys())
for featId in sorted_featids:
out_str += "\t%d:%s" % (featId, feature_map[featId])
out_file.write(out_str + '\n')
def extract_maxs_mins(self, raw_feat_path, feat_beg_idx):
feature_max_values = []
feature_min_values = []
for line in open(raw_feat_path):
fields = [float(item.split(':')[1]) for item in line[:-1].split('\t')[feat_beg_idx:]]
if len(feature_max_values) == 0:
feature_max_values = fields[:]
if len(feature_min_values) == 0:
feature_min_values = fields[:]
feature_max_values = map(max, zip(feature_max_values, fields))
feature_min_values = map(min, zip(feature_min_values, fields))
return feature_max_values, feature_min_values
def extract_discrete_feat(self, raw_feat_path, feat_train_path, feat_predict_path, feat_beg_idx, span_num):
self.set_train_predict_path(feat_train_path, feat_predict_path)
feature_max_values, feature_min_values = self.extract_maxs_mins(raw_feat_path, feat_beg_idx)
feature_diffs = map(lambda x: x[0] - x[1], zip(feature_max_values, feature_min_values))
feature_spans = map(lambda x: x / span_num or 0.1, feature_diffs)
translator = FeatTranslator()
for line in open(raw_feat_path):
kvs = [item.split(':') for item in line[:-1].split('\t')]
raw_feature_map = dict(kvs)
label, code, date = raw_feature_map.get(labelName), raw_feature_map.get(codeName), raw_feature_map.get(
dateName)
# label_res = 0 if (len(label) == 0 or float(label) <= 0) else float(float(label) / 30)
label_res = 0 if len(label) == 0 else float(label) / 50
label_res = pow(label_res, 0.5) if label_res > 0 else label_res
feature_map = dict(map(lambda (v, s, m): [v[0], (float(v[1]) - m) / s],
zip(kvs[feat_beg_idx:], feature_spans, feature_min_values)))
feature_res_map = translator.extract(feature_map, True)
out_str = "%s:%f\t%s:%s\t%s:%s" % (labelName, label_res, codeName, code, dateName, date)
for featId in sorted(feature_res_map.keys()):
out_str += "\t%d:%s" % (featId, feature_res_map[featId])
fout = self.predictFile if len(label) == 0 else self.trainFile
fout.write(out_str + '\n')
if __name__ == "__main__":
featManager = FeatManager("test")
featManager.extract("../feats")
sys.exit(0)
| gpl-2.0 | 7,675,545,704,607,347,000 | 43.595238 | 115 | 0.584357 | false | 3.309187 | false | false | false |
Peilonrayz/instruction-follower | src/hrm_readable/__main__.py | 1 | 3562 | import re
def setup():
BASE_COMMANDS = [
("inbox", ""),
("outbox", ""),
("copyfrom", "{}"),
("copyto", "{}"),
("add", "{}"),
("sub", "{}"),
("bumpup", "{}"),
("bumpdn", "{}"),
("jump", "{}"),
("jumpz", "{}"),
("jumpn", "{}"),
]
max_len = max(len(c[0]) for c in BASE_COMMANDS) + 1
base_commands = {
command: " " + command.upper() + " " * (max_len - len(command)) + arg
for command, arg in BASE_COMMANDS
}
base_commands.update({"label": "{}:"})
additional_commands = {
"b>": (("inbox", ""),),
"b<": (("outbox", ""),),
"c>": (("copyto", "{0}"),),
"c<": (("copyfrom", "{0}"),),
"+": (("add", "{0}"),),
"-": (("sub", "{0}"),),
"u>": (("add", "{0}"),),
"u<": (("sub", "{0}"),),
"::": (("label", "{0}"),),
"~:": (("jump", "{0}"),),
"-:": (("jumpn", "{0}"),),
"0:": (("jumpz", "{0}"),),
"=>": (("jump", "{0}"),),
"->": (("jumpn", "{0}"),),
"0>": (("jumpz", "{0}"),),
"place": (("inbox", ""), ("copyto", "{0}"),),
"take": (("copyfrom", "{0}"), ("outbox", ""),),
"through": (("inbox", ""), ("outbox", ""),),
"gt": (("copyfrom", "{0}"), ("sub", "{1}"),),
"lt": (("copyfrom", "{1}"), ("sub", "{0}"),),
"move": (("copyfrom", "{0}"), ("copyto", "{1}"),),
"swap": (
("copyfrom", "{0}"),
("copyto", "{2}"),
("copyfrom", "{1}"),
("copyto", "{0}"),
("copyfrom", "{2}"),
("copyto", "{1}"),
),
"i>": (("inbox", ""), ("copyto", "{0}"),),
"i<": (("copyfrom", "{0}"), ("outbox", ""),),
">>": (("inbox", ""), ("outbox", ""),),
">": (("copyfrom", "{0}"), ("sub", "{1}"),),
"<": (("copyfrom", "{1}"), ("sub", "{0}"),),
"~>": (("copyfrom", "{0}"), ("copyto", "{1}"),),
"<>": (
("copyfrom", "{0}"),
("copyto", "{2}"),
("copyfrom", "{1}"),
("copyto", "{0}"),
("copyfrom", "{2}"),
("copyto", "{1}"),
),
}
return base_commands, additional_commands
COMMANDS, ADDITIONAL_COMMANDS = setup()
def read_commands(program):
commands = []
for line in program:
line = line.strip()
if not line or line.startswith(("#", "//", "--")):
continue
match = re.match(r"(.+):$", line)
if match:
commands.append(("label", (match.groups(1))))
continue
name, *args = line.split()
commands.append((name.lower(), args))
return commands
def to_hrm(commands):
hrm_commands = []
for name, args in commands:
additional_commands = ADDITIONAL_COMMANDS.get(name, None)
if additional_commands is None:
hrm_commands.append((name, (args[:1] or [None])[0]))
continue
for command, value in additional_commands:
hrm_commands.append((command, value.format(*args)))
return hrm_commands
def format_hrm(commands):
return "\n".join(COMMANDS[name].format(arg) for name, arg in commands)
while True:
level = input("level: ")
try:
f = open("./levels/{}".format(level))
except FileNotFoundError:
print("File doesn't exist")
continue
with f:
mhrm_commands = read_commands(f)
hrm_commands = to_hrm(mhrm_commands)
print("\n\n{}\n\n".format(format_hrm(hrm_commands)))
| mit | -2,887,195,703,688,148,500 | 29.444444 | 80 | 0.396126 | false | 3.42171 | false | false | false |
GenosResearchGroup/ContourMetrics | apps/calculator/views.py | 1 | 14052 | import itertools
import json
import numpy
import scipy.stats
from django.db.models import Q
from django.http import HttpResponse
from django.shortcuts import render
import lib
import lib.similarity
from apps.calculator import models
from apps.calculator.forms import ComparisonFormOne, ComparisonFormMultiple, AlgorithmForm, \
ComparisonFormAlgorithms
from apps.calculator.models import Contour
from lib import Contour as C
from lib.utils import print_cseg, get_first, get_last, apply_fn_to_matrix, round_tolist
# Views
def calculator(request):
args = {'algorithm_form': AlgorithmForm()}
return render(request, 'calculator.html', args)
def get_calculus(request):
if request.is_ajax():
normalized_seq = list(map(int, json.loads(request.POST.get('normalized'))))
operation = request.POST.get('operation')
query_set = Contour.objects.filter(normalized=normalized_seq)
if query_set and operation in ['Class', 'Reduction', 'IAD', 'Oscillation']:
contour_model = query_set[0]
else:
contour = C(list(map(int, json.loads(request.POST.get('request_data')))))
if operation == 'Class':
if query_set:
result = contour_model.equivalent_class
else:
result = contour.equivalent_class_prime().sequence
elif operation == 'Reduction':
if query_set:
result = contour_model.prime
else:
result = contour.reduction()[0].sequence
elif operation == 'IAD':
if query_set:
result = contour_model.direction_index
else:
result = contour.direction_index()
elif operation == 'Oscillation':
if query_set:
result = contour_model.oscillation_index
else:
result = contour.oscillation_index()
elif operation == 'Window 3':
result = contour.window_reduction(3).sequence
elif operation == 'Window 5':
result = contour.window_reduction(5).sequence
else:
return
json_out = json.dumps(result)
return HttpResponse(json_out, content_type="application/json")
def get_memory_comparison(request):
if request.is_ajax():
algorithm_index = json.loads(request.POST.get('algorithm'))
memories = json.loads(request.POST.get('memory'))
algorithm = models.ALGORITHM_CHOICES[algorithm_index][0]
contours = {}
for k, v in memories.items():
contour = C(list(map(int, v['array'])))
if k not in contours:
contours[k] = contour
combinations = itertools.combinations(contours.keys(), 2)
data = []
for a, b in combinations:
ca = contours[a]
cb = contours[b]
labels = ' '.join([ca.__repr__(), cb.__repr__()])
comp_obj = lib.Comparison(ca, cb)
value = comp_obj.compare(algorithm)
data.append([labels, value])
json_out = json.dumps(data)
return HttpResponse(json_out, content_type="application/json")
def comparison_one(request):
form = ComparisonFormOne()
args = {'form': form}
return render(request, 'comparison_one.html', args)
def comparison_multiple(request):
form = ComparisonFormMultiple()
args = {'form': form}
return render(request, 'comparison_multiple.html', args)
def comparison_algorithms(request):
form = ComparisonFormAlgorithms()
args = {'form': form}
return render(request, 'benchmark.html', args)
def get_comparison_data_one(request):
if request.is_ajax():
main_contour_seq = list(map(int, json.loads(request.POST.get('mainContour'))))
collection_id = request.POST.get('selectedCollection')
algorithm_label = request.POST.get('selectedAlgorithm')
mode_label = request.POST.get('selectedMode')
key_label = request.POST.get('selectedKey')
time_signature_label = request.POST.get('selectedTimeSignature')
voice = request.POST.get('selectedVoice')
collection = models.Collection.objects.get(id=collection_id)
contour_query = [Q(phrase__piece__collection=collection)]
if mode_label != '':
contour_query.append(Q(phrase__piece__mode=mode_label))
if key_label != '':
contour_query.append(Q(phrase__piece__key=key_label))
if time_signature_label != '':
contour_query.append(Q(phrase__piece__time_signature=time_signature_label))
if voice != '':
contour_query.append(Q(phrase__voice=voice))
qs_contours = models.Contour.objects.filter(*contour_query).distinct()
main_contour = C(main_contour_seq)
main_contour_adjacency_series = main_contour.adjacent_series()
if algorithm_label == 'OSC':
input_data_a = main_contour.oscillation_spectrum(False)
else:
input_data_a = ''.join(map(str, main_contour_adjacency_series))
data = []
for c in qs_contours.values('normalized', 'oscillation_spectrum', 'adjacency_series'):
if algorithm_label == 'OSC':
input_data_b = list(map(get_first, c['oscillation_spectrum']))
value = lib.similarity.oscillation_spectrum_correlation(input_data_a, input_data_b)
elif algorithm_label == 'AED':
input_data_b = ''.join(map(str, c['adjacency_series']))
value = lib.similarity.adjacent_edit_distance(input_data_a, input_data_b)
elif algorithm_label == 'AGP':
input_data_b = ''.join(map(str, c['adjacency_series']))
value = lib.similarity.sequence_similarity(input_data_a, input_data_b)
data.append([print_cseg(c['normalized']), value])
arr = numpy.array(list(map(get_last, data)))
mean = numpy.nan_to_num(arr.mean())
std = numpy.nan_to_num(arr.std())
frequency = scipy.stats.itemfreq(arr)
statistics = [
['Mean', mean],
['Std', std],
['Skew', numpy.nan_to_num(scipy.stats.skew(arr))],
['Kurtosis', numpy.nan_to_num(scipy.stats.kurtosis(arr))],
['Variation coefficient', std / mean],
['Entropy', scipy.stats.entropy(frequency)[0]]
]
json_out = json.dumps({
'data': data,
'statistics': statistics
})
return HttpResponse(json_out, content_type="application/json")
def get_comparison_data_multiple(request):
if request.is_ajax():
collection_id = request.POST.get('selectedCollection')
algorithm_label = request.POST.get('selectedAlgorithm')
mode_label = request.POST.get('selectedMode')
key_label = request.POST.get('selectedKey')
time_signature_label = request.POST.get('selectedTimeSignature')
voice_label = request.POST.get('selectedVoice')
collection = models.Collection.objects.get(id=collection_id)
contour_query = [
Q(phrase__piece__collection=collection),
]
comparison_query = [
Q(collection=collection),
Q(algorithm=algorithm_label)
]
if mode_label != '':
contour_query.append(Q(phrase__piece__mode=mode_label))
comparison_query.append(Q(mode=mode_label))
else:
comparison_query.append(Q(mode=None))
if key_label != '':
contour_query.append(Q(phrase__piece__key=key_label))
comparison_query.append(Q(key=key_label))
else:
comparison_query.append(Q(key=None))
if time_signature_label != '':
contour_query.append(Q(phrase__piece__time_signature=time_signature_label))
comparison_query.append(Q(time_signature=time_signature_label))
else:
comparison_query.append(Q(time_signature=None))
if voice_label != '':
contour_query.append(Q(phrase__voice=voice_label))
comparison_query.append(Q(voice=voice_label))
else:
comparison_query.append(Q(voice=None))
qs_comparison = models.Comparison.objects.filter(*comparison_query)
# FIXME: get the comparison object
print('> Comparison', qs_comparison)
if not qs_comparison.exists():
return HttpResponse(json.dumps({'collection_exists': False}), content_type="application/json")
comp = models.Comparison.objects.filter(*comparison_query).first()
columns = ['mean', 'std', 'skew', 'kurtosis', 'variation_coefficient']
ind = ['similarity', 'direction', 'oscillation', 'size', 'diversity']
columns_size = len(columns)
ind_size = len(ind)
dic = comp.get_dict()
matrix = numpy.zeros(columns_size * ind_size).reshape(columns_size, ind_size)
for j in range(ind_size):
for i in range(columns_size):
label = '{}_{}'.format(ind[j], columns[i])
matrix[i][j] = dic[label]
seq = matrix.round(2).tolist()
for row, label in zip(seq, columns):
row.insert(0, label.capitalize().replace('_', ' '))
dic = {
'statistics': seq,
'collection_exists': True
}
dic.update(comp.get_dict())
json_out = json.dumps(dic)
return HttpResponse(json_out, content_type="application/json")
def get_comparison_algorithms_data(request):
if request.is_ajax():
collection_id = request.POST.get('selectedCollection')
algorithms_labels = request.POST.getlist('selectedAlgorithms[]')
mode_label = request.POST.get('selectedMode')
key_label = request.POST.get('selectedKey')
time_signature_label = request.POST.get('selectedTimeSignature')
voice_label = request.POST.get('selectedVoice')
collection = models.Collection.objects.get(id=collection_id)
contour_query = [
Q(phrase__piece__collection=collection),
]
if mode_label != '':
contour_query.append(Q(phrase__piece__mode=mode_label))
if key_label != '':
contour_query.append(Q(phrase__piece__key=key_label))
if time_signature_label != '':
contour_query.append(Q(phrase__piece__time_signature=time_signature_label))
if voice_label != '':
contour_query.append(Q(phrase__voice=voice_label))
qs_contour = models.Contour.objects.filter(*contour_query)
confidence_level = 95
confidence_interval = 10
sample_size = lib.utils.sample_size(qs_contour.count(), confidence_level, confidence_interval)
qs_contour_sample = qs_contour.order_by('?')[:sample_size]
contours = map(lambda d: d['normalized'], qs_contour_sample.values('normalized'))
combined_contours = itertools.combinations(contours, 2)
seq = []
aux_dic = {}
for a, b in combined_contours:
k = tuple(sorted([tuple(a), tuple(b)]))
if k not in aux_dic:
ca = C(a)
cb = C(b)
comp = lib.Comparison(ca, cb)
aux_dic[k] = [comp.compare(algorithm) for algorithm in algorithms_labels]
seq.append(aux_dic[k])
arr = numpy.array(seq)
number_of_algorithms = len(algorithms_labels)
combined_index = list(itertools.combinations(range(number_of_algorithms), 2))
scatter_data = []
scatter_labels = []
correlation_data = []
arr_t = arr.T
# get correlation scatter data
for i1, i2 in combined_index:
scatter_data.append(arr_t[[i1, i2]].T.tolist())
scatter_labels.append([algorithms_labels[j] for j in [i1, i2]])
correlation_data.append(numpy.corrcoef(arr_t[i1], arr_t[i2])[0][1])
hist_data = []
correlation = numpy.zeros(number_of_algorithms ** 2).reshape(number_of_algorithms, number_of_algorithms).tolist()
# get correlation table and histogram data
for i in range(number_of_algorithms):
hist_data.append([[str(a), b] for a, b in zip(range(len(arr_t[i])), arr_t[i])])
for j in range(i, number_of_algorithms):
if i == j:
v = 1
else:
v = numpy.corrcoef(arr_t[i], arr_t[j])[0][1].round(2)
correlation[i][j] = v
correlation[j][i] = v
for row, label in zip(correlation, algorithms_labels):
row.insert(0, label.upper())
correlation_header = algorithms_labels
correlation_header.insert(0, '')
sample_data = [
['Population size', qs_contour.count()],
['Sample size', sample_size],
['Confidence level', confidence_level],
['Confidence interval', confidence_interval],
]
# Entropy doesn't work with apply_fn_to_matrix
entropy = numpy.matrix([scipy.stats.entropy(scipy.stats.itemfreq(arr[:, i]))[1] for i in range(len(arr.T))])
std = apply_fn_to_matrix(arr, numpy.std)
mean = apply_fn_to_matrix(arr, numpy.mean)
statistics_data = [
['Mean'] + round_tolist(mean),
['Median'] + round_tolist(apply_fn_to_matrix(arr, numpy.median)), # df.median().round(2).values.tolist(),
['Std'] + round_tolist(std),
['Skew'] + round_tolist(apply_fn_to_matrix(arr, scipy.stats.skew)),
['Kurtosis'] + round_tolist(apply_fn_to_matrix(arr, scipy.stats.kurtosis)),
['Variation coefficient'] + round_tolist(std / mean), # (df.std() / df.mean()).round(2).values.tolist(),
['Entropy'] + round_tolist(entropy) # [scipy.stats.entropy(df[c].value_counts()).round(2) for c in df.columns]
]
dic = {
'sample_data': sample_data,
'correlation': correlation,
'correlation_header': correlation_header,
'scatter_data': scatter_data,
'scatter_labels': scatter_labels,
'histogram_data': hist_data,
'histogram_labels': correlation_header[1:],
'statistics_data': statistics_data
}
json_out = json.dumps(dic)
return HttpResponse(json_out, content_type="application/json")
| mit | 4,782,281,060,298,566,000 | 34.484848 | 119 | 0.607316 | false | 3.8362 | false | false | false |
edgarcosta/lmfdb-gce | transition_scripts/reimport_knowls_and_userdb.py | 1 | 3895 | import os, sys
from sage.all import load
os.chdir("/home/edgarcosta/lmfdb/")
sys.path.append("/home/edgarcosta/lmfdb/")
import lmfdb
db = lmfdb.db_backend.db
DelayCommit = lmfdb.db_backend.DelayCommit
load("/home/edgarcosta/lmfdb-gce/transition_scripts/export_special.py")
def backup():
import subprocess, datetime
timestamp = datetime.datetime.now().strftime("%Y%m%d-%H%M")
userdbdump="/scratch/postgres-backup/userdb-backup-%s.tar" % timestamp
knowlsdump="/scratch/postgres-backup/knowls-backup-%s.tar" % timestamp
a = subprocess.check_call(["sudo", "-u", "postgres", "pg_dump", "--clean", "--if-exists", "--schema=userdb", "--file", userdbdump, "--format", "tar", "lmfdb"])
b = subprocess.check_call(["sudo", "-u", "postgres", "pg_dump", "--clean", "--if-exists", "--schema=public", "-t", 'kwl_knowls', "-t", "kwl_deleted", "-t", "kwl_history", "--file", knowlsdump, "--format", "tar", "lmfdb"], stderr=subprocess.STDOUT)
if a + b != 0:
print "Failed to backup users and kwl_*"
raise ValueError
print "Succeeded in backing up knowls and userdb"
return a + b
def import_knowls():
cur = db.conn.cursor()
tablenames = ['kwl_history', 'kwl_deleted', 'kwl_knowls'];
with DelayCommit(db, silence=True):
try:
# rename old tables
for name in tablenames:
cur.execute("ALTER TABLE IF EXISTS %s DROP CONSTRAINT IF EXISTS %s_pkey" % (name, name));
cur.execute("DROP TABLE IF EXISTS %s" % name);
# create tables
cur.execute("CREATE TABLE kwl_knowls (id text, cat text, title text, content text, authors jsonb, last_author text, quality text, timestamp timestamp, _keywords jsonb, history jsonb)")
cur.execute("CREATE TABLE kwl_deleted (id text, cat text, title text, content text, authors jsonb, last_author text, quality text, timestamp timestamp, _keywords jsonb, history jsonb)")
cur.execute("CREATE TABLE kwl_history (id text, title text, time timestamp, who text, state text)")
for tbl in ["kwl_knowls", "kwl_deleted", "kwl_history"]:
for action in ["INSERT", "UPDATE", "DELETE"]:
db._grant(action, tbl, ['webserver'])
db.grant_select(tbl)
with open('/scratch/importing/kwl_knowls.txt') as F:
cur.copy_from(F, 'kwl_knowls', columns=["id", "cat", "title", "content", "authors", "last_author", "quality", "timestamp", "_keywords", "history"])
with open('/scratch/importing/kwl_history.txt') as F:
cur.copy_from(F, 'kwl_history', columns=["id", "title", "time", "who", "state"])
cur.execute("ALTER TABLE kwl_knowls ADD CONSTRAINT kwl_knowls_pkey PRIMARY KEY (id)")
# no primary key on deleted
#cur.execute("ALTER TABLE kwl_deleted ADD CONSTRAINT kwl_deleted_pkey PRIMARY KEY (id)")
cur.execute("ALTER TABLE kwl_history ADD CONSTRAINT kwl_history_pkey PRIMARY KEY (id)")
except Exception:
print "Failure in importing knowls"
db.conn.rollback()
raise
print "Succeeded in importing knowls"
def import_users():
with DelayCommit(db, silence=True):
try:
conn = db.conn
cur = conn.cursor()
# delete rows of usersdb.users
cur.execute("DELETE FROM userdb.users")
with open('/scratch/importing/users.txt') as F:
cur.copy_from(F, 'userdb.users', columns=["username", "password", "bcpassword", "admin", "color_scheme", "full_name", "email", "url", "about", "created"])
except Exception:
conn.rollback()
print "Failure in importing users"
raise
print "Successfully imported users"
export_knowls()
export_users()
backup()
import_knowls()
import_users()
| mit | 8,707,786,978,296,396,000 | 47.08642 | 251 | 0.61258 | false | 3.664158 | false | false | false |
bstrebel/OxAPI | oxapi/attachment.py | 1 | 3058 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os,sys,time,json,requests
from oxapi import *
class OxAttachment(OxBean):
module_name = 'attachment'
module_type = None
map = {'folder': 800,
'attached': 801,
'module': 802,
'filename': 803,
'filesize': 804,
'file_mimetype': 805,
'rtf_flag': 806}
map.update(OxBean.map)
columns = OxBean.columns(map)
def __init__(self, data, ox=None, timestamp=None, columns=None):
self._document = None
self._module = 'attachment'
OxBean.__init__(self, data, ox, timestamp, columns)
@property
def document(self):
if not self._document:
if self._data:
params = {'module': self.module,
'attached': self.attached,
'id': self.id,
'folder': self.folder}
document = self._ox.get(self.module_name, 'document', params)
if document:
#self._timestamp = content.get('timestamp', None)
self._document = document
return self._document
def detach(self):
if self._data:
params = {'module': self.module,
'attached': self.attached,
'folder': self.folder}
body = []
body.append(self.id)
result = self._ox.put(self.module_name,'detach', params, body)
# {u'timestamp': 1449912233915L, u'data': u''}
pass
class OxAttachments(OxBeans):
module_name = 'attachment'
def __init__(self, ox):
OxBeans.__init__(self, ox)
def action(self, action, params):
if action == 'all':
params.update({'columns': ",".join(map(lambda id: str(id), OxAttachment.columns))})
self._data = []
OxBeans.action(self, OxAttachment, action, params)
if self._raw:
folder = params['folder']
id = OxAttachment.map['folder']
pos = OxAttachment.columns.index(id)
for raw in self._raw:
# workaround because of Open-Xchange bug
if raw[pos] == 0: raw[pos] = folder
self._data.append(OxAttachment(raw, self._ox))
return self
elif action == 'get':
self._data = None
OxBeans.action(self, OxAttachment, action, params)
self._data = OxAttachment(self._raw, self._ox, self._timestamp)
return self._data
elif action == 'document':
self._data = None
OxBeans.action(self, OxAttachment, action, params)
self._data = OxAttachment(self._raw, self._content, self._ox, self._timestamp)
return self._data
# region __main__
if __name__ == '__main__':
with OxHttpAPI.get_session() as ox:
task = ox.get_task('246','43806')
attachments = ox.get_attachments(task)
pass
# endregion
| gpl-2.0 | -6,618,038,992,664,991,000 | 28.980392 | 95 | 0.515697 | false | 3.915493 | false | false | false |
mrnamingo/enigma2-test | lib/python/Plugins/SystemPlugins/SoftwareManager/ImageFlasher.py | 1 | 30780 | from Plugins.SystemPlugins.Hotplug.plugin import hotplugNotifier
from Components.Label import Label
from Components.Button import Button
from Components.ActionMap import ActionMap
from Components.MenuList import MenuList
from Components.FileList import FileList
from Components.Task import Task, Job, job_manager, Condition
from Components.Sources.StaticText import StaticText
from Components.SystemInfo import SystemInfo
from Screens.Console import Console
from Screens.MessageBox import MessageBox
from Screens.ChoiceBox import ChoiceBox
from Screens.Screen import Screen
from Screens.Console import Console
from Screens.HelpMenu import HelpableScreen
from Screens.TaskView import JobView
from Tools.Downloader import downloadWithProgress
from enigma import fbClass
import urllib2
import os
import shutil
import math
from boxbranding import getBoxType, getImageDistro, getMachineName, getMachineBrand, getImageVersion, getMachineKernelFile, getMachineRootFile
distro = getImageDistro()
ImageVersion = getImageVersion()
ROOTFSBIN = getMachineRootFile()
KERNELBIN = getMachineKernelFile()
#############################################################################################################
image = 0 # 0=openATV / 1=openMips
if distro.lower() == "openmips":
image = 1
elif distro.lower() == "openatv":
image = 0
feedurl_atv = 'http://images.mynonpublic.com/openatv/%s' %ImageVersion
if ImageVersion == '5.3':
ImageVersion2= '5.4'
else:
ImageVersion2= '5.3'
feedurl_atv2= 'http://images.mynonpublic.com/openatv/%s' %ImageVersion2
feedurl_om = 'http://image.openmips.com/5.3'
imagePath = '/media/hdd/imagebackups'
flashPath = '/media/hdd/flash1'
flashTmp = '/media/hdd/flash2'
ofgwritePath = '/usr/bin/ofgwrite'
#############################################################################################################
def Freespace(dev):
statdev = os.statvfs(dev)
space = (statdev.f_bavail * statdev.f_frsize) / 1024
print "[Flash Online] Free space on %s = %i kilobytes" %(dev, space)
return space
def ReadNewfeed():
f = open('/etc/enigma2/newfeed', 'r')
newfeed = f.readlines()
f.close()
return newfeed
class FlashOnline(Screen):
skin = """
<screen position="center,center" size="560,400" title="Image Flasher">
<ePixmap position="10,368" zPosition="1" size="140,40" pixmap="skin_default/buttons/red.png" transparent="1" alphatest="on" />
<ePixmap position="140,368" zPosition="1" size="140,40" transparent="1" alphatest="on" />
<ePixmap position="280,368" zPosition="1" size="140,40" transparent="1" alphatest="on" />
<ePixmap position="425,368" zPosition="1" size="140,40" pixmap="skin_default/buttons/blue.png" transparent="1" alphatest="on" />
<widget source="key_red" render="Label" position="0,360" zPosition="2" size="140,40" valign="center" halign="center" font="skyreg;20" foregroundColor="skygold" backgroundColor="skydarkblue" transparent="1" shadowColor="black" shadowOffset="-1,-1" />
<widget source="key_green" render="Label" position="140,360" zPosition="2" size="140,40" valign="center" halign="center" font="skyreg;20" foregroundColor="skygold" backgroundColor="skydarkblue" transparent="1" shadowColor="black" shadowOffset="-1,-1" />
<widget source="key_yellow" render="Label" position="280,360" zPosition="2" size="140,40" valign="center" halign="center" font="skyreg;20" foregroundColor="skygold" backgroundColor="skydarkblue" transparent="1" shadowColor="black" shadowOffset="-1,-1" />
<widget source="key_blue" render="Label" position="420,360" zPosition="2" size="140,40" valign="center" halign="center" font="skyreg;20" foregroundColor="skygold" backgroundColor="skydarkblue" transparent="1" shadowColor="black" shadowOffset="-1,-1" />
<widget name="info-online" position="10,30" zPosition="1" size="450,100" font="skyreg;20" foregroundColor="skygold" backgroundColor="skydarkblue" halign="left" valign="top" transparent="1" />
<widget name="info-local" position="10,150" zPosition="1" size="450,200" font="skyreg;20" foregroundColor="skygold" backgroundColor="skydarkblue" halign="left" valign="top" transparent="1" />
</screen>"""
def __init__(self, session):
Screen.__init__(self, session)
self.session = session
self.selection = 0
self.devrootfs = "/dev/mmcblk0p3"
self.multi = 1
self.list = self.list_files("/boot")
Screen.setTitle(self, _("ViX4E2PROJECT Image Flasher"))
self["key_yellow"] = Button("")
self["key_green"] = Button("NEXT")
self["key_red"] = Button(_("EXIT"))
self["key_blue"] = Button(_(""))
self["info-local"] = Label(_("You will automatically be presented with existing available image .zip files (if any) which are stored in your default local folder: /media/hdd/imagebackups\n\nYou can choose to directly flash any of these images from this menu by highlighting and pressing green to flash or you can press yellow to browse to another image .zip file which is stored elsewhere on your system."))
self["info-online"] = Label(_("When on the next step of this process you will have the options of pressing green to flash highlighted images in your default folder (if any), blue to delete image, yellow to browse to another image .zip file or red to exit."))
self["press-green"] = Label(_("PRESS GREEN BUTTON TO CONTINUE TO NEXT STEP"))
self["actions"] = ActionMap(["OkCancelActions", "ColorActions"],
{
"blue": self.quit,
"yellow": self.quit,
"green": self.green,
"red": self.quit,
"cancel": self.quit,
}, -2)
if SystemInfo["HaveMultiBoot"]:
self.multi = self.read_startup("/boot/" + self.list[self.selection]).split(".",1)[1].split(" ",1)[0]
self.multi = self.multi[-1:]
print "[Flash Online] MULTI:",self.multi
def check_hdd(self):
if not os.path.exists("/media/hdd"):
self.session.open(MessageBox, _("No /hdd found !!\nPlease make sure you have a HDD mounted.\n\nExit plugin."), type = MessageBox.TYPE_ERROR)
return False
if Freespace('/media/hdd') < 300000:
self.session.open(MessageBox, _("Not enough free space on /hdd !!\nYou need at least 300Mb free space.\n\nExit plugin."), type = MessageBox.TYPE_ERROR)
return False
if not os.path.exists(ofgwritePath):
self.session.open(MessageBox, _('ofgwrite not found !!\nPlease make sure you have ofgwrite installed in /usr/bin/ofgwrite.\n\nExit plugin.'), type = MessageBox.TYPE_ERROR)
return False
if not os.path.exists(imagePath):
try:
os.mkdir(imagePath)
except:
pass
if os.path.exists(flashPath):
try:
os.system('rm -rf ' + flashPath)
except:
pass
try:
os.mkdir(flashPath)
except:
pass
return True
def quit(self):
self.close()
def green(self):
if self.check_hdd():
self.session.open(doFlashImage, online = False, list=self.list[self.selection], multi=self.multi, devrootfs=self.devrootfs)
else:
self.close()
def blue(self):
if self.check_hdd():
self.session.open(doFlashImage, online = True, list=self.list[self.selection], multi=self.multi, devrootfs=self.devrootfs)
else:
self.close()
def yellow(self):
if SystemInfo["HaveMultiBoot"]:
self.selection = self.selection + 1
if self.selection == len(self.list):
self.selection = 0
self["key_yellow"].setText(_(self.list[self.selection]))
self.multi = self.read_startup("/boot/" + self.list[self.selection]).split(".",1)[1].split(" ",1)[0]
self.multi = self.multi[-1:]
print "[Flash Online] MULTI:",self.multi
cmdline = self.read_startup("/boot/" + self.list[self.selection]).split("=",3)[3].split(" ",1)[0]
self.devrootfs = cmdline
print "[Flash Online] MULTI rootfs ", self.devrootfs
def read_startup(self, FILE):
file = FILE
with open(file, 'r') as myfile:
data=myfile.read().replace('\n', '')
myfile.close()
return data
def list_files(self, PATH):
files = []
if SystemInfo["HaveMultiBoot"]:
path = PATH
for name in os.listdir(path):
if name != 'bootname' and os.path.isfile(os.path.join(path, name)):
try:
cmdline = self.read_startup("/boot/" + name).split("=",3)[3].split(" ",1)[0]
except IndexError:
continue
cmdline_startup = self.read_startup("/boot/STARTUP").split("=",3)[3].split(" ",1)[0]
if (cmdline != cmdline_startup) and (name != "STARTUP"):
files.append(name)
files.insert(0,"STARTUP")
else:
files = "None"
return files
class doFlashImage(Screen):
skin = """
<screen position="center,center" size="560,500" title="ViX4E2PROJECT Image Flasher (select a image)">
<ePixmap position="10,467" zPosition="1" size="140,40" pixmap="skin_default/buttons/red.png" transparent="1" alphatest="on" />
<ePixmap position="140,467" zPosition="1" size="140,40" pixmap="skin_default/buttons/green.png" transparent="1" alphatest="on" />
<ePixmap position="280,467" zPosition="1" size="140,40" pixmap="skin_default/buttons/yellow.png" transparent="1" alphatest="on" />
<ePixmap position="420,467" zPosition="1" size="140,40" pixmap="skin_default/buttons/blue.png" transparent="1" alphatest="on" />
<widget source="key_red" render="Label" position="0,460" zPosition="2" size="140,40" valign="center" halign="center" font="skyreg;21" foregroundColor="skygold" backgroundColor="skydarkblue" transparent="1" shadowColor="black" shadowOffset="-1,-1" />
<widget source="key_green" render="Label" position="140,460" zPosition="2" size="140,40" valign="center" halign="center" font="skyreg;21" foregroundColor="skygold" backgroundColor="skydarkblue" transparent="1" shadowColor="black" shadowOffset="-1,-1" />
<widget source="key_yellow" render="Label" position="285,460" zPosition="2" size="140,40" valign="center" halign="center" font="skyreg;21" foregroundColor="skygold" backgroundColor="skydarkblue" transparent="1" shadowColor="black" shadowOffset="-1,-1" />
<widget source="key_blue" render="Label" position="420,460" zPosition="2" size="140,40" valign="center" halign="center" font="skyreg;21" foregroundColor="skygold" backgroundColor="skydarkblue" transparent="1" shadowColor="black" shadowOffset="-1,-1" />
<widget name="info-online" position="10,30" zPosition="1" size="450,100" font="skyreg;20" foregroundColor="skygold" backgroundColor="skydarkblue" halign="left" valign="top" transparent="1" /> <widget name="imageList" position="10,10" zPosition="1" size="450,450" font="skyreg;21" foregroundColor="skygold" backgroundColor="skydarkblue" scrollbarMode="showOnDemand" transparent="1" />
</screen>"""
def __init__(self, session, online, list=None, multi=None, devrootfs=None ):
Screen.__init__(self, session)
self.session = session
Screen.setTitle(self, _("ViX4E2PROJECT Image Flasher"))
self["key_green"] = Button(_("FLASH"))
self["key_red"] = Button(_("EXIT"))
self["key_blue"] = Button("")
self["key_yellow"] = Button("")
self["info-local"] = Label(_("Press green to flash highlighted image below, blue to delete image, yellow to browse to another image .zip file or red to exit.\n\nDefault local folder: /media/hdd/imagebackups"))
self.filename = None
self.imagelist = []
self.simulate = False
self.Online = online
self.List = list
self.multi=multi
self.devrootfs=devrootfs
self.imagePath = imagePath
self.feedurl = feedurl_atv
if image == 0:
self.feed = "atv"
else:
self.feed = "om"
self["imageList"] = MenuList(self.imagelist)
self["actions"] = ActionMap(["OkCancelActions", "ColorActions"],
{
"green": self.green,
"yellow": self.yellow,
"red": self.quit,
"blue": self.blue,
"ok": self.green,
"cancel": self.quit,
}, -2)
self.onLayoutFinish.append(self.layoutFinished)
self.newfeed = None
if os.path.exists('/etc/enigma2/newfeed'):
self.newfeed = ReadNewfeed()
def quit(self):
if self.simulate or not self.List == "STARTUP":
fbClass.getInstance().unlock()
self.close()
def blue(self):
if self.Online:
if image == 1:
if self.feed == "atv":
self.feed = "om"
else:
self.feed = "atv"
else:
if self.feed == "atv":
self.feed = "atv2"
else:
self.feed = "atv"
self.layoutFinished()
return
sel = self["imageList"].l.getCurrentSelection()
if sel == None:
print"Nothing to select !!"
return
self.filename = sel
self.session.openWithCallback(self.RemoveCB, MessageBox, _("Do you really want to delete\n%s ?") % (sel), MessageBox.TYPE_YESNO)
def RemoveCB(self, ret):
if ret:
if os.path.exists(self.imagePath + "/" + self.filename):
os.remove(self.imagePath + "/" + self.filename)
self.imagelist.remove(self.filename)
self["imageList"].l.setList(self.imagelist)
def box(self):
box = getBoxType()
machinename = getMachineName()
if box in ('uniboxhd1', 'uniboxhd2', 'uniboxhd3'):
box = "ventonhdx"
elif box == 'odinm6':
box = getMachineName().lower()
elif box == "inihde" and machinename.lower() == "xpeedlx":
box = "xpeedlx"
elif box in ('xpeedlx1', 'xpeedlx2'):
box = "xpeedlx"
elif box == "inihde" and machinename.lower() == "hd-1000":
box = "sezam-1000hd"
elif box == "ventonhdx" and machinename.lower() == "hd-5000":
box = "sezam-5000hd"
elif box == "ventonhdx" and machinename.lower() == "premium twin":
box = "miraclebox-twin"
elif box == "xp1000" and machinename.lower() == "sf8 hd":
box = "sf8"
elif box.startswith('et') and not box in ('et8000', 'et8500', 'et8500s', 'et10000'):
box = box[0:3] + 'x00'
elif box == 'odinm9' and self.feed == "atv":
box = 'maram9'
return box
def green(self, ret = None):
sel = self["imageList"].l.getCurrentSelection()
if sel == None:
print"Nothing to select !!"
return
file_name = self.imagePath + "/" + sel
self.filename = file_name
self.sel = sel
box = self.box()
self.hide()
if self.Online:
url = self.feedurl + "/" + box + "/" + sel
print "[Flash Online] Download image: >%s<" % url
if self.newfeed:
self.feedurl = self.newfeed[0][:-1]
url = self.feedurl + "/" + box + "/" + sel
authinfo = urllib2.HTTPPasswordMgrWithDefaultRealm()
authinfo.add_password(None, self.feedurl, self.newfeed[1][:-1], self.newfeed[2][:-1])
handler = urllib2.HTTPBasicAuthHandler(authinfo)
myopener = urllib2.build_opener(handler)
opened = urllib2.install_opener(myopener)
u = urllib2.urlopen(url)
total_size = int(u.info().getheaders("Content-Length")[0])
downloaded = 0
CHUNK = 256 * 1024
with open(file_name, 'wb') as fp:
while True:
chunk = u.read(CHUNK)
downloaded += len(chunk)
print "Downloading: %s Bytes of %s" % (downloaded, total_size)
if not chunk: break
fp.write(chunk)
self.ImageDownloadCB(False)
else:
try:
u = urllib2.urlopen(url)
f = open(file_name, 'wb')
f.close()
job = ImageDownloadJob(url, file_name, sel)
job.afterEvent = "close"
job_manager.AddJob(job)
job_manager.failed_jobs = []
self.session.openWithCallback(self.ImageDownloadCB, JobView, job, backgroundable = False, afterEventChangeable = False)
except urllib2.URLError as e:
print "[Flash Online] Download failed !!\n%s" % e
self.session.openWithCallback(self.ImageDownloadCB, MessageBox, _("Download Failed !!" + "\n%s" % e), type = MessageBox.TYPE_ERROR)
self.close()
else:
self.startInstallLocal(True)
def ImageDownloadCB(self, ret):
if ret:
return
if job_manager.active_job:
job_manager.active_job = None
self.close()
return
if len(job_manager.failed_jobs) == 0:
self.flashWithPostFlashActionMode = 'online'
self.flashWithPostFlashAction()
else:
self.session.open(MessageBox, _("Download Failed !!"), type = MessageBox.TYPE_ERROR)
def flashWithPostFlashAction(self, ret = True):
if ret:
print "flashWithPostFlashAction"
title =_("Please confirm if wish to continue with new flash or return to previous menu")
list = ((_("Confirmed! Flash new firmware and reboot"), "wizard"),
(_("Do not flash! Just return to previous menu"), "abort"))
self.session.openWithCallback(self.postFlashActionCallback, ChoiceBox,title=title,list=list,selection=self.SelectPrevPostFashAction())
else:
self.show()
def SelectPrevPostFashAction(self):
index = 0
Settings = False
AllPlugins = False
noPlugins = False
if os.path.exists('/media/hdd/images/config/settings'):
Settings = True
if os.path.exists('/media/hdd/images/config/plugins'):
AllPlugins = True
if os.path.exists('/media/hdd/images/config/noplugins'):
noPlugins = True
if Settings and noPlugins:
index = 1
elif Settings and not AllPlugins and not noPlugins:
index = 2
elif Settings and AllPlugins:
index = 3
return index
def postFlashActionCallback(self, answer):
print "postFlashActionCallback"
restoreSettings = False
restoreAllPlugins = False
restoreSettingsnoPlugin = False
if answer is not None:
if answer[1] == "restoresettings":
restoreSettings = True
if answer[1] == "restoresettingsnoplugin":
restoreSettings = True
restoreSettingsnoPlugin = True
if answer[1] == "restoresettingsandallplugins":
restoreSettings = True
restoreAllPlugins = True
if restoreSettings:
self.SaveEPG()
if answer[1] != "abort":
if restoreSettings:
try:
os.system('mkdir -p /media/hdd/images/config')
os.system('touch /media/hdd/images/config/settings')
except:
print "postFlashActionCallback: failed to create /media/hdd/images/config/settings"
else:
if os.path.exists('/media/hdd/images/config/settings'):
os.system('rm -f /media/hdd/images/config/settings')
if restoreAllPlugins:
try:
os.system('mkdir -p /media/hdd/images/config')
os.system('touch /media/hdd/images/config/plugins')
except:
print "postFlashActionCallback: failed to create /media/hdd/images/config/plugins"
else:
if os.path.exists('/media/hdd/images/config/plugins'):
os.system('rm -f /media/hdd/images/config/plugins')
if restoreSettingsnoPlugin:
try:
os.system('mkdir -p /media/hdd/images/config')
os.system('touch /media/hdd/images/config/noplugins')
except:
print "postFlashActionCallback: failed to create /media/hdd/images/config/noplugins"
else:
if os.path.exists('/media/hdd/images/config/noplugins'):
os.system('rm -f /media/hdd/images/config/noplugins')
if self.flashWithPostFlashActionMode == 'online':
self.unzip_image(self.filename, flashPath)
else:
self.startInstallLocalCB()
else:
self.show()
else:
self.show()
def unzip_image(self, filename, path):
print "Unzip %s to %s" %(filename,path)
self.session.openWithCallback(self.cmdFinished, Console, title = _("Unzipping files, Please wait ..."), cmdlist = ['unzip ' + filename + ' -o -d ' + path, "sleep 3"], closeOnSuccess = True)
def cmdFinished(self):
self.prepair_flashtmp(flashPath)
self.Start_Flashing()
def Start_Flashing(self):
print "Start Flashing"
cmdlist = []
if os.path.exists(ofgwritePath):
text = _("Flashing: ")
if self.simulate:
text += _("Simulate (no write)")
if SystemInfo["HaveMultiBoot"]:
cmdlist.append("%s -n -r -k -m%s %s > /dev/null 2>&1" % (ofgwritePath, self.multi, flashTmp))
else:
cmdlist.append("%s -n -r -k %s > /dev/null 2>&1" % (ofgwritePath, flashTmp))
self.close()
message = "echo -e '\n"
message += _('Show only found image and mtd partitions.\n')
message += "'"
else:
text += _("root and kernel")
if SystemInfo["HaveMultiBoot"]:
if not self.List == "STARTUP":
os.system('mkfs.ext4 -F ' + self.devrootfs)
cmdlist.append("%s -r -k -m%s %s > /dev/null 2>&1" % (ofgwritePath, self.multi, flashTmp))
if not self.List == "STARTUP":
cmdlist.append("umount -fl /oldroot_bind")
cmdlist.append("umount -fl /newroot")
else:
cmdlist.append("%s -r -k %s > /dev/null 2>&1" % (ofgwritePath, flashTmp))
message = "echo -e '\n"
if not self.List == "STARTUP" and SystemInfo["HaveMultiBoot"]:
message += _('ofgwrite flashing ready.\n')
message += _('please press exit to go back to the menu.\n')
else:
message += _('ofgwrite will stop enigma2 now to run the flash.\n')
message += _('Your STB will freeze during the flashing process.\n')
message += _('Please: DO NOT reboot your STB and turn off the power.\n')
message += _('The image or kernel will be flashing and auto booted in few minutes.\n')
if self.box() == 'gb800solo':
message += _('GB800SOLO takes about 20 mins !!\n')
message += "'"
cmdlist.append(message)
self.session.open(Console, title = text, cmdlist = cmdlist, finishedCallback = self.quit, closeOnSuccess = False)
if not self.simulate:
fbClass.getInstance().lock()
if not self.List == "STARTUP":
self.close()
def prepair_flashtmp(self, tmpPath):
if os.path.exists(flashTmp):
flashTmpold = flashTmp + 'old'
os.system('mv %s %s' %(flashTmp, flashTmpold))
os.system('rm -rf %s' %flashTmpold)
if not os.path.exists(flashTmp):
os.mkdir(flashTmp)
kernel = True
rootfs = True
for path, subdirs, files in os.walk(tmpPath):
for name in files:
if name.find('kernel') > -1 and name.endswith('.bin') and kernel:
binfile = os.path.join(path, name)
dest = flashTmp + '/%s' %KERNELBIN
shutil.copyfile(binfile, dest)
kernel = False
elif name.find('root') > -1 and (name.endswith('.bin') or name.endswith('.jffs2') or name.endswith('.bz2')) and rootfs:
binfile = os.path.join(path, name)
dest = flashTmp + '/%s' %ROOTFSBIN
shutil.copyfile(binfile, dest)
rootfs = False
elif name.find('uImage') > -1 and kernel:
binfile = os.path.join(path, name)
dest = flashTmp + '/uImage'
shutil.copyfile(binfile, dest)
kernel = False
elif name.find('e2jffs2') > -1 and name.endswith('.img') and rootfs:
binfile = os.path.join(path, name)
dest = flashTmp + '/e2jffs2.img'
shutil.copyfile(binfile, dest)
rootfs = False
def yellow(self):
if not self.Online:
self.session.openWithCallback(self.DeviceBrowserClosed, DeviceBrowser, None, matchingPattern="^.*\.(zip|bin|jffs2|img)", showDirectories=True, showMountpoints=True, inhibitMounts=["/autofs/sr0/"])
else:
from Plugins.SystemPlugins.SoftwareManager.BackupRestore import BackupScreen
self.session.openWithCallback(self.green,BackupScreen, runBackup = True)
def startInstallLocal(self, ret = None):
self.flashWithPostFlashActionMode = 'local'
self.flashWithPostFlashAction()
def startInstallLocalCB(self, ret = None):
if self.sel == str(flashTmp):
self.Start_Flashing()
else:
self.unzip_image(self.filename, flashPath)
def DeviceBrowserClosed(self, path, filename, binorzip):
if path:
print path, filename, binorzip
strPath = str(path)
if strPath[-1] == '/':
strPath = strPath[:-1]
self.imagePath = strPath
if os.path.exists(flashTmp):
os.system('rm -rf ' + flashTmp)
os.mkdir(flashTmp)
if binorzip == 0:
for files in os.listdir(self.imagePath):
if files.endswith(".bin") or files.endswith('.jffs2') or files.endswith('.img'):
self.prepair_flashtmp(strPath)
break
self.Start_Flashing()
elif binorzip == 1:
self.unzip_image(strPath + '/' + filename, flashPath)
else:
self.layoutFinished()
else:
self.imagePath = imagePath
def layoutFinished(self):
box = self.box()
self.imagelist = []
if self.Online:
self["key_yellow"].setText("Backup&Flash")
if image == 1:
if self.feed == "atv":
self.feedurl = feedurl_atv
self["key_blue"].setText("openMIPS")
else:
self.feedurl = feedurl_om
self["key_blue"].setText("openATV")
else:
if self.feed == "atv":
self.feedurl = feedurl_atv
self["key_blue"].setText("ATV %s" %ImageVersion2)
else:
self.feedurl = feedurl_atv2
self["key_blue"].setText("ATV %s" %ImageVersion)
url = '%s/index.php?open=%s' % (self.feedurl,box)
try:
req = urllib2.Request(url)
if self.newfeed:
self.feedurl = self.newfeed[0][:-1]
url = '%s/index.php?open=%s' % (self.feedurl,box)
authinfo = urllib2.HTTPPasswordMgrWithDefaultRealm()
authinfo.add_password(None, self.feedurl, self.newfeed[1][:-1], self.newfeed[2][:-1])
handler = urllib2.HTTPBasicAuthHandler(authinfo)
myopener = urllib2.build_opener(handler)
opened = urllib2.install_opener(myopener)
response = urllib2.urlopen(url)
else:
response = urllib2.urlopen(req)
except urllib2.URLError as e:
print "URL ERROR: %s\n%s" % (e,url)
self["imageList"].l.setList(self.imagelist)
return
try:
the_page = response.read()
except urllib2.HTTPError as e:
print "HTTP download ERROR: %s" % e.code
return
lines = the_page.split('\n')
tt = len(box)
for line in lines:
if line.find("<a href='%s/" % box) > -1:
t = line.find("<a href='%s/" % box)
if self.feed == "atv" or self.feed == "atv2":
self.imagelist.append(line[t+tt+10:t+tt+tt+39])
else:
self.imagelist.append(line[t+tt+10:t+tt+tt+40])
else:
self["key_blue"].setText(_("DELETE"))
self["key_yellow"].setText(_("BROWSE"))
for name in os.listdir(self.imagePath):
if name.endswith(".zip"): # and name.find(box) > 1:
self.imagelist.append(name)
self.imagelist.sort()
if os.path.exists(flashTmp):
os.system('cd /media/hdd/flash1 && rm -rf *')
if os.path.exists(flashPath):
os.system('cd /media/hdd/flash2 && rm -rf *')
self["imageList"].l.setList(self.imagelist)
def SaveEPG(self):
from enigma import eEPGCache
epgcache = eEPGCache.getInstance()
epgcache.save()
class ImageDownloadJob(Job):
def __init__(self, url, filename, file):
Job.__init__(self, _("Downloading %s") %file)
ImageDownloadTask(self, url, filename)
class DownloaderPostcondition(Condition):
def check(self, task):
return task.returncode == 0
def getErrorMessage(self, task):
return self.error_message
class ImageDownloadTask(Task):
def __init__(self, job, url, path):
Task.__init__(self, job, _("Downloading"))
self.postconditions.append(DownloaderPostcondition())
self.job = job
self.url = url
self.path = path
self.error_message = ""
self.last_recvbytes = 0
self.error_message = None
self.download = None
self.aborted = False
def run(self, callback):
self.callback = callback
self.download = downloadWithProgress(self.url,self.path)
self.download.addProgress(self.download_progress)
self.download.start().addCallback(self.download_finished).addErrback(self.download_failed)
print "[ImageDownloadTask] downloading", self.url, "to", self.path
def abort(self):
print "[ImageDownloadTask] aborting", self.url
if self.download:
self.download.stop()
self.aborted = True
def download_progress(self, recvbytes, totalbytes):
if ( recvbytes - self.last_recvbytes ) > 100000: # anti-flicker
self.progress = int(100*(float(recvbytes)/float(totalbytes)))
self.name = _("Downloading") + ' ' + _("%d of %d kBytes") % (recvbytes/1024, totalbytes/1024)
self.last_recvbytes = recvbytes
def download_failed(self, failure_instance=None, error_message=""):
self.error_message = error_message
if error_message == "" and failure_instance is not None:
self.error_message = failure_instance.getErrorMessage()
Task.processFinished(self, 1)
def download_finished(self, string=""):
if self.aborted:
self.finish(aborted = True)
else:
Task.processFinished(self, 0)
class DeviceBrowser(Screen, HelpableScreen):
skin = """
<screen name="DeviceBrowser" position="center,center" size="520,430" >
<ePixmap pixmap="skin_default/buttons/red.png" position="0,0" size="140,40" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/green.png" position="140,0" size="140,40" alphatest="on" />
<widget source="key_red" render="Label" position="0,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#9f1313" transparent="1" />
<widget source="key_green" render="Label" position="140,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#1f771f" transparent="1" />
<widget source="message" render="Label" position="5,50" size="510,150" font="Regular;16" />
<widget name="filelist" position="5,210" size="510,220" scrollbarMode="showOnDemand" />
</screen>"""
def __init__(self, session, startdir, message="", showDirectories = True, showFiles = True, showMountpoints = True, matchingPattern = "", useServiceRef = False, inhibitDirs = False, inhibitMounts = False, isTop = False, enableWrapAround = False, additionalExtensions = None):
Screen.__init__(self, session)
HelpableScreen.__init__(self)
Screen.setTitle(self, _("Browse to image .zip file"))
self["key_red"] = Button(_("EXIT"))
self["key_green"] = Button(_("FLASH"))
self["message"] = Button(message)
self.filelist = FileList(startdir, showDirectories = showDirectories, showFiles = showFiles, showMountpoints = showMountpoints, matchingPattern = matchingPattern, useServiceRef = useServiceRef, inhibitDirs = inhibitDirs, inhibitMounts = inhibitMounts, isTop = isTop, enableWrapAround = enableWrapAround, additionalExtensions = additionalExtensions)
self["filelist"] = self.filelist
self["FilelistActions"] = ActionMap(["SetupActions", "ColorActions"],
{
"green": self.use,
"red": self.exit,
"ok": self.ok,
"cancel": self.exit
})
hotplugNotifier.append(self.hotplugCB)
self.onShown.append(self.updateButton)
self.onClose.append(self.removeHotplug)
def hotplugCB(self, dev, action):
print "[hotplugCB]", dev, action
self.updateButton()
def updateButton(self):
if self["filelist"].getFilename() or self["filelist"].getCurrentDirectory():
self["key_green"].text = _("FLASH")
else:
self["key_green"].text = ""
def removeHotplug(self):
print "[removeHotplug]"
hotplugNotifier.remove(self.hotplugCB)
def ok(self):
if self.filelist.canDescent():
if self["filelist"].showMountpoints == True and self["filelist"].showDirectories == False:
self.use()
else:
self.filelist.descent()
def use(self):
print "[use]", self["filelist"].getCurrentDirectory(), self["filelist"].getFilename()
if self["filelist"].getFilename() is not None and self["filelist"].getCurrentDirectory() is not None:
if self["filelist"].getFilename().endswith(".bin") or self["filelist"].getFilename().endswith(".jffs2"):
self.close(self["filelist"].getCurrentDirectory(), self["filelist"].getFilename(), 0)
elif self["filelist"].getFilename().endswith(".zip"):
self.close(self["filelist"].getCurrentDirectory(), self["filelist"].getFilename(), 1)
else:
return
def exit(self):
self.close(False, False, -1)
| gpl-2.0 | -8,050,462,198,954,369,000 | 38.818887 | 409 | 0.6782 | false | 3.084787 | true | false | false |
Alignak-monitoring-contrib/alignak-app | alignak_app/qobjects/service/services.py | 1 | 10854 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2015-2018:
# Matthieu Estrada, [email protected]
#
# This file is part of (AlignakApp).
#
# (AlignakApp) is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# (AlignakApp) is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with (AlignakApp). If not, see <http://www.gnu.org/licenses/>.
"""
Services
++++++++
Services manage creation of QWidget to display the services
"""
from logging import getLogger
from operator import itemgetter
from PyQt5.Qt import QTreeWidget, QTreeWidgetItem, QWidget, QIcon, QGridLayout, QSize, QListWidget
from PyQt5.Qt import Qt, QListWidgetItem
from alignak_app.backend.datamanager import data_manager
from alignak_app.items.item import get_icon_name
from alignak_app.utils.config import settings
from alignak_app.qobjects.common.frames import get_frame_separator
from alignak_app.qobjects.service.tree_item import ServiceTreeItem
from alignak_app.qobjects.service.services_dashboard import ServicesDashboardQWidget
from alignak_app.qobjects.service.service import ServiceDataQWidget
logger = getLogger(__name__)
class ServicesQWidget(QWidget):
"""
Class wo create services QWidget
"""
def __init__(self, parent=None):
super(ServicesQWidget, self).__init__(parent)
# Fields
self.services = None
self.services_tree_widget = QTreeWidget()
self.services_list_widget = QListWidget()
self.service_data_widget = ServiceDataQWidget()
self.services_dashboard = ServicesDashboardQWidget()
def initialize(self):
"""
Initialize QWidget
"""
layout = QGridLayout()
self.setLayout(layout)
layout.setContentsMargins(0, 0, 0, 0)
# Services dashboard
self.services_dashboard.initialize()
for state in self.services_dashboard.states_btns:
self.services_dashboard.states_btns[state].clicked.connect(
lambda _, s=state: self.filter_services(state=s)
)
layout.addWidget(self.services_dashboard, 0, 0, 1, 2)
layout.addWidget(get_frame_separator(), 1, 0, 1, 2)
# Services QTreeWidget
self.services_tree_widget.setIconSize(QSize(32, 32))
self.services_tree_widget.setAlternatingRowColors(True)
self.services_tree_widget.header().close()
layout.addWidget(self.services_tree_widget, 2, 0, 1, 1)
# Services QListWidget
self.services_list_widget.clicked.connect(self.update_service_data)
self.services_list_widget.hide()
layout.addWidget(self.services_list_widget, 2, 0, 1, 1)
# Service DataWidget
self.service_data_widget.initialize()
layout.addWidget(self.service_data_widget, 2, 1, 1, 1)
def filter_services(self, state):
"""
Filter services with the wanted state
:param state: state of service: OK, WARNING, NOT_MONITORED, DOWNTIME
:return:
"""
# Clear QListWidget and update filter buttons of services dashboard
self.services_list_widget.clear()
for btn_state in self.services_dashboard.states_btns:
if btn_state != state:
self.services_dashboard.states_btns[btn_state].setChecked(False)
# Update QWidgets
if self.sender().isChecked():
self.set_filter_items(state)
self.services_tree_widget.hide()
self.services_list_widget.show()
else:
self.services_tree_widget.show()
self.services_list_widget.hide()
def set_filter_items(self, state):
"""
Add filter items to QListWidget corresponding to "state"
:param state: state of service to filter
:type state: str
"""
services_added = False
if state in 'NOT_MONITORED':
for service in self.services:
if not service.data['active_checks_enabled'] and \
not service.data['passive_checks_enabled']and \
not service.data['ls_downtimed'] and \
not service.data['ls_acknowledged']:
self.add_filter_item(service)
services_added = True
elif state in 'DOWNTIME':
for service in self.services:
if service.data['ls_downtimed']:
self.add_filter_item(service)
services_added = True
elif state in 'ACKNOWLEDGE':
for service in self.services:
if service.data['ls_acknowledged']:
self.add_filter_item(service)
services_added = True
else:
for service in self.services:
if service.data['ls_state'] in state:
self.add_filter_item(service)
services_added = True
if not services_added:
not_added_item = QListWidgetItem()
not_added_item.setData(Qt.DecorationRole, QIcon(settings.get_image('services_ok')))
not_added_item.setData(Qt.DisplayRole, _('No such services to display...'))
self.services_list_widget.addItem(not_added_item)
def add_filter_item(self, filter_item):
"""
Add filter item to QListWidget
:param filter_item: filter item (service)
:type filter_item: alignak_app.items.service.Service
"""
item = QListWidgetItem()
monitored = \
filter_item.data['passive_checks_enabled'] + filter_item.data['active_checks_enabled']
icon_name = get_icon_name(
filter_item.item_type,
filter_item.data['ls_state'],
filter_item.data['ls_acknowledged'],
filter_item.data['ls_downtimed'],
monitored
)
item.setData(Qt.DecorationRole, QIcon(settings.get_image(icon_name)))
item.setData(Qt.DisplayRole, filter_item.get_display_name())
item.setData(Qt.UserRole, filter_item.item_id)
item.setToolTip(filter_item.get_tooltip())
self.services_list_widget.addItem(item)
def update_widget(self, services):
"""
Update the QTreeWidget and its items
:param services: list of :class:`Services <alignak_app.items.service.Service>` items
:type services: list
"""
self.services = services
# Update services dashboard
self.services_dashboard.update_widget(self.services)
# Clear QTreeWidget
self.services_tree_widget.clear()
self.services_tree_widget.setIconSize(QSize(16, 16))
if self.services:
# Set as "Global" aggregation who are empty
for service in self.services:
if not service.data['aggregation']:
service.data['aggregation'] = 'Global'
# First sort list by state then by aggregation
newlist = sorted(
self.services,
key=lambda s: itemgetter('ls_state', 'ls_acknowledged', 'aggregation')(s.data)
)
self.services = newlist
# Get list of aggregations
aggregations = []
for service in self.services:
if service.data['aggregation'] not in aggregations:
aggregations.append(service.data['aggregation'])
# Add QTreeWidgetItems
for aggregation in aggregations:
main_tree = QTreeWidgetItem()
main_tree.setText(0, aggregation)
main_tree.setIcon(0, QIcon(settings.get_image('tree')))
main_tree.setToolTip(0, aggregation)
for service in self.services:
if service.data['aggregation'] == aggregation:
service_tree = ServiceTreeItem()
service_tree.initialize(service)
service_tree.setToolTip(0, service.get_tooltip())
self.services_tree_widget.clicked.connect(self.update_service_data)
main_tree.addChild(service_tree)
self.services_tree_widget.addTopLevelItem(main_tree)
self.service_data_widget.hide()
else:
# If no services, reset service item to None and hide data widget
self.service_data_widget.service_item = None
self.service_data_widget.hide()
def update_service_data(self): # pragma: no cover
"""
Update ServiceDataqWidget
"""
service_item = self.sender().currentItem()
if isinstance(service_item, (ServiceTreeItem, QListWidgetItem)):
service = None
# Get service
if isinstance(service_item, ServiceTreeItem):
service = data_manager.get_item('service', '_id', service_item.service_id)
elif isinstance(service_item, QListWidgetItem):
service = data_manager.get_item('service', '_id', service_item.data(Qt.UserRole))
if not service:
service = self.service_data_widget.service_item
# Update QWidgets
self.services_tree_widget.setMaximumWidth(self.width() * 0.5)
self.services_list_widget.setMaximumWidth(self.width() * 0.5)
self.service_data_widget.setMaximumWidth(self.width() * 0.5)
self.service_data_widget.update_widget(service)
self.services_dashboard.update_widget(self.services)
self.service_data_widget.show()
# Update Service Items (ServiceTreeItem, QListWidgetItem)
if isinstance(service_item, ServiceTreeItem):
service_item.update_item()
else:
monitored = \
service.data['passive_checks_enabled'] + service.data['active_checks_enabled']
icon_name = get_icon_name(
'service',
service.data['ls_state'],
service.data['ls_acknowledged'],
service.data['ls_downtimed'],
monitored
)
service_item.setData(Qt.DecorationRole, QIcon(settings.get_image(icon_name)))
service_item.setData(Qt.DisplayRole, service.get_display_name())
service_item.setToolTip(service.get_tooltip())
| agpl-3.0 | 7,076,940,687,448,298,000 | 37.626335 | 98 | 0.608163 | false | 4.251469 | false | false | false |
CaliOpen/CaliOpen | src/backend/tools/py.migrate/caliopen_migrate/shards.py | 1 | 3022 | import logging
from caliopen_storage.config import Configuration
from caliopen_storage.helpers.connection import get_index_connection
from caliopen_main.user.core.setups import setup_shard_index
from caliopen_main.user.core import User
log = logging.getLogger(__name__)
def delete_all_shards(dry_run=True):
"""Delete all index shards."""
client = get_index_connection()
shards = Configuration('global').get('elasticsearch.shards')
for shard in shards:
log.info('Processing shard {}'.format(shard))
if not shard.startswith('caliopen-'):
log.warn('Invalid shard name, pass')
continue
if not client.indices.exists(shard):
log.warn('Shard does not exist')
continue
if dry_run:
log.info('Delete shard but dry run do not touch')
else:
client.indices.delete(shard)
log.info('Index {} deleted'.format(shard))
def create_all_shards(dry_run=True):
"""Create all needed index shards."""
client = get_index_connection()
shards = Configuration('global').get('elasticsearch.shards')
for shard_id in shards:
if not client.indices.exists(shard_id):
log.info('Creating shard {}'.format(shard_id))
if not dry_run:
setup_shard_index(shard_id)
def recreate_user_alias(client, user, dry_run=True):
"""Create an index alias mapping user_id -> shard_id."""
if not user.shard_id:
log.error('No shard for user {}'.format(user.user_id))
return False
shards = Configuration('global').get('elasticsearch.shards')
alias_exists = False
if client.indices.exists_alias(name=user.user_id):
alias = client.indices.get_alias(name=user.user_id)
for index, alias_infos in alias.items():
if index not in shards:
if not dry_run:
client.indices.delete_alias(index=index, name=user.user_id)
else:
log.info('Alias exist {} with index {}, should delete'.
format(user.user_id, index))
else:
log.info('Alias on shard exist, skipping')
alias_exists = True
if alias_exists:
return True
if not dry_run:
body = {'filter': {'term': {'user_id': user.user_id}}}
try:
client.indices.put_alias(index=user.shard_id,
name=user.user_id,
body=body)
except Exception as exc:
log.exception('Error during alias creation for user {} : {}'.
format(user.user_id, exc))
return False
else:
log.info('Should create alias {}'.format(user.user_id))
return True
def recreate_all_user_aliases(dry_run=True):
"""Recreate alias for all users."""
client = get_index_connection()
for user in User._model_class.all():
recreate_user_alias(client, user, dry_run)
| gpl-3.0 | 7,146,484,076,049,893,000 | 35.409639 | 79 | 0.589676 | false | 4.117166 | false | false | false |
smurfix/pybble | pybble/blueprint/_root/part/usertracker.py | 1 | 1356 | # -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function, division, unicode_literals
##
## This is part of Pybble, a WMS (Whatever Management System) based on
## Jinja2/Haml, Werkzeug, Flask, and Optimism.
##
## Pybble is Copyright © 2009-2014 by Matthias Urlichs <[email protected]>,
## it is licensed under the GPLv3. See the file `README.md` for details,
## including an optimistic statements by the author.
##
## This paragraph is auto-generated and may self-destruct at any time,
## courtesy of "make update". The original is in ‘utils/_boilerplate.py’.
## Thus, please do not remove the next line, or insert any blank lines.
##BP
from flask import request, session
from pybble.core.models.tracking import UserTracker
from pybble.core.db import db
from pybble.render import render_template
from .._base import expose
expose = expose.sub("part.usertracker")
from datetime import datetime,timedelta
from time import time
###
### Tracking
###
@expose("/changes")
def view_all():
user = request.user
f = (UserTracker.user == user)
last = session.get("chg_",None)
if last and time()-last[0] < 2*60:
pass
else:
session["chg_"] = (int(time()), user.feed_read)
user.feed_read = datetime.utcnow()
return render_template("changelist.html", changes=UserTracker.q.filter(f).order_by(UserTracker.id.desc())[0:30])
| gpl-3.0 | 796,936,370,807,972,100 | 29.704545 | 113 | 0.721688 | false | 3.216667 | false | false | false |
jonathanmorgan/django_reference_data | examples/ref_domain-from_listofnewspapers-Texas.py | 1 | 6532 | # imports
# urllib
import datetime
import urllib2
# beautifulsoup 4
from bs4 import BeautifulSoup
# python_utilties
#import python_utilities.beautiful_soup.beautiful_soup_helper
# django_reference_data
import django_reference_data.models
#===============================================================================#
# declare variables
#===============================================================================#
# declare variables - tracking performance
start_dt = None
end_dt = None
domain_counter = -1
no_match_counter = -1
error_counter = -1
my_exception_helper = None
# declare variables
do_update_existing = True
# processing state list.
state_name = ""
state_url = ""
state_file_path = ""
state_file = None
# processing a state's page.
state_html = None
state_bs = None
state_paper_list = None
state_paper_li = None
paper_name = ""
paper_url = ""
current_domain_instance = None
paper_counter = -1
# fields we collect per domain.
bs_helper = None
current_domain_name = ""
slash_index = ""
current_domain_path = ""
current_description = ""
current_source = ""
current_source_details = ""
current_domain_type = ""
current_is_news = True
current_rank = -1
#===============================================================================#
# Code
#===============================================================================#
# capture start datetime, initialize counters
start_dt = datetime.datetime.now()
domain_counter = 0
no_match_counter = 0
error_counter = 0
# init beautiful soup helper
#bs_helper = python_utilities.beautiful_soup.beautiful_soup_helper.BeautifulSoupHelper()
# clean out broken texas domain rows.
'''
DELETE from `django_reference_data_reference_domain`
WHERE source_details LIKE '%in-texas%';
'''
state_name = "Texas"
state_url = "http://www.listofnewspapers.com/en/north-america/texan-newspapers-in-texas.html"
state_file_path = "texan-newspapers-in-texas-TIDY.html"
# print next state:
print( "==> processing " + state_name + ": " + state_file_path )
# load the state's HTML
state_file = open( state_file_path, "r" )
state_html = state_file.read()
# let BeautifulSoup parse it.
state_bs = BeautifulSoup( state_html, "html.parser" )
# get list of papers.
state_paper_list = state_bs.find_all( "li", "linewspapers" )
print( "- paper count: " + str( len( state_paper_list ) ) )
# loop over papers.
paper_counter = 0
for state_paper_li in state_paper_list:
paper_counter += 1
domain_counter += 1
print( "- paper " + str( paper_counter ) + ": " + str( state_paper_li ) )
# get values
paper_name = state_paper_li.get_text()
paper_url = state_paper_li.a[ 'href' ]
print( " - " + paper_name + ": " + paper_url )
# collect information - init
current_domain_name = ""
slash_index = ""
current_domain_path = ""
current_description = ""
current_source = ""
current_source_details = ""
current_domain_type = ""
current_is_news = True
current_rank = -1
# description
current_description = paper_name
# parse out domain and path
current_domain_name = django_reference_data.models.Reference_Domain.parse_URL( paper_url, django_reference_data.models.Reference_Domain.URL_PARSE_RETURN_DOMAIN )
current_domain_path = django_reference_data.models.Reference_Domain.parse_URL( paper_url, django_reference_data.models.Reference_Domain.URL_PARSE_RETURN_PATH )
# no rank
# always the same for these.
current_source = "listofnewspapers.com"
current_source_details = state_url
current_domain_type = django_reference_data.models.Reference_Domain.DOMAIN_TYPE_NEWS
current_is_news = True
# get Reference_Domain instance
# update existing?
if ( do_update_existing == True ):
try:
# first, try looking up existing domain.
#domain_rs = django_reference_data.models.Reference_Domain.objects.filter( source = current_source )
#domain_rs = domain_rs.filter( domain_name = current_domain_name )
#current_domain_instance = domain_rs.get( domain_path = current_domain_path )
# use lookup_record() method. Returns None if
# not found.
current_domain_instance = django_reference_data.models.Reference_Domain.lookup_record( source_IN = current_source, domain_name_IN = current_domain_name, domain_path_IN = current_domain_path )
# got anything?
if ( current_domain_instance == None ):
# nothing returned. Create new instance.
current_domain_instance = django_reference_data.models.Reference_Domain()
no_match_counter += 1
#-- END check to see if domain found --#
except:
# No matching row. Create new instance.
current_domain_instance = django_reference_data.models.Reference_Domain()
no_match_counter += 1
#-- END attempt to get existing row. --#
else:
# not updating. Just create new instance.
current_domain_instance = django_reference_data.models.Reference_Domain()
#-- END check to see if we update existing. --#
# set values
#current_domain_instance.domain_name = current_domain_name
#current_domain_instance.domain_path = current_domain_path
#current_domain_instance.long_name = None
# parse and store the URL information.
current_domain_instance.parse_and_store_URL( paper_url )
current_domain_instance.description = current_description
current_domain_instance.source = current_source
current_domain_instance.source_details = current_source_details
current_domain_instance.domain_type = current_domain_type
current_domain_instance.is_news = current_is_news
#current_domain_instance.is_multimedia = False
#current_domain_instance.rank = current_rank
current_domain_instance.state = state_name
#current_domain_instance.county = ""
#current_domain_instance.city = ""
#current_domain_instance.zip_code = ""
# save
current_domain_instance.save()
#-- END loop over papers. --#
# a little overview
end_dt = datetime.datetime.now()
print( "==> Started at " + str( start_dt ) )
print( "==> Finished at " + str( end_dt ) )
print( "==> Duration: " + str( end_dt - start_dt ) )
print( "==> Domains: " + str( domain_counter ) )
print( "==> No Match: " + str( no_match_counter ) )
print( "==> Errors: " + str( error_counter ) ) | gpl-3.0 | 5,729,771,921,642,360,000 | 30.109524 | 203 | 0.629822 | false | 3.692482 | false | false | false |
violarium/file-pyncoder | pyncoder.py | 1 | 1229 | import lib.files
import lib.filters
import os
import sys
import argparse
parser = argparse.ArgumentParser(description='Convert file encodings.')
# the target - file or directory
parser.add_argument('--target', '-t', action='store', type=str, required=True)
# converter options
parser.add_argument('--in-encoding', '-i', action='store', required=True)
parser.add_argument('--out-encoding', '-o', action='store', required=True)
parser.add_argument('--keep-backup', '-k', action='store_true', default=True)
# the regular expressions: to include and exclude files
parser.add_argument('--regexp', '-r', action='store')
parser.add_argument('--ng-regexp', '-nr', action='store')
# the extensions: can include or exclude extensions
group = parser.add_mutually_exclusive_group()
group.add_argument('--extensions', '-e', action='store')
group.add_argument('--ng-extensions', '-ne', action='store')
args = parser.parse_args()
# check whether file or directory
if os.path.isdir(args.target):
pass
elif os.path.isfile(args.target):
lib.files.change_file_encoding(args.target, args.in_encoding, args.out_encoding, args.keep_backup)
pass
else:
print "There are no file or directory '%s'" % args.target
sys.exit(1)
| mit | 852,558,385,694,625,700 | 32.216216 | 102 | 0.716843 | false | 3.511429 | false | true | false |
ypid/series60-remote | pc/window/mainwindow.py | 1 | 51261 | # -*- coding: utf-8 -*-
# Copyright (c) 2008 - 2010 Lukas Hetzenecker <[email protected]>
import sys
import re
import base64
import copy
import distutils.version
from PyQt4.QtCore import *
from PyQt4.QtGui import *
import window.contacts_edit
import window.contacts_import
import window.settings
import window.history
import window.statistics
import window.export
import window.message_queue
import window.import_messages
import window.about
import window.favorites
import widget.SortedTreeWidgetItem
import widget.SortedListWidgetItem
import ui.ui_main
import ui.ui_mobileNumberSelect
import ui.ui_mobileNumberNotFound
import ui.ui_connection_failed
import ui.ui_connection_version_mismatch
import ui.ui_connection_update_version
import ui.resource_rc
import lib.update_checker
import lib.favorites
import lib.obex_handler
import lib.obex_scheduler
import lib.obex_wrapper
from lib.classes import *
LINUX= "qt_x11_wait_for_window_manager" in dir()
class MainWindow(QMainWindow, ui.ui_main.Ui_MainWindow):
def __init__(self, parent, main):
super(MainWindow, self).__init__(parent)
self.parent = parent
self.main = main
self.log = main.log
self.connection = main.connection
self.database = main.database
self.settings = main.settings
self.helper = main.helper
# Refresh all 10 minutes the device information
self.refreshTimer = QTimer(self)
self.refreshTimer.setInterval(600000)
self.setupUi(self)
# Favorites menu
self.contactMenu = self.menu_Contacts
self.favMenu = lib.favorites.FavoriteMenu(self.contactMenu, main)
# Change color of the ListWidget to a normal background color and make the highlight color lighter
pal = QPalette()
pal.setColor(QPalette.Base, self.palette().color(QPalette.Window))
pal.setColor(QPalette.Highlight, QColor(38, 136, 240))
self.listWidget.setPalette(pal)
# Add menu to "Import contacts" button
self.importMenu = QMenu(self)
self.importVcardAction = self.importMenu.addAction(QIcon(":/text-x-vcard"), self.tr("Import &Vcard file..."))
self.importLdifAction = self.importMenu.addAction(QIcon(":/text-x-ldif"), self.tr("Import &LDIF file..."))
self.contactsImportButton.setMenu(self.importMenu)
# Restore size, position and splitter states from previous saved value
windowSize = self.settings.setting("windows/main/size")
windowPosition = self.settings.setting("windows/main/position")
messagesSplitter = self.settings.setting("windows/main/messagesSplitter")
contactsSplitter = self.settings.setting("windows/main/contactsSplitter")
if windowSize.isValid():
self.resize(windowSize)
if not windowPosition.isNull():
self.move(windowPosition)
if not messagesSplitter.isNull():
self.messagesSplitter.restoreState(messagesSplitter)
if not contactsSplitter.isNull():
self.contactsSplitter.restoreState(contactsSplitter)
self.newMessagesComplete = False
self.queueMessages = 0
self.fillTypeBox = True
self.connectionAttemptByUser = True
self.connectionClosedByUser = False
self.deviceScanner = self.connection.scanner()
self.automaticConnectionTimer = QTimer()
self.emptyPixmap = QPixmap(16, 16)
self.emptyPixmap.fill(Qt.transparent)
self.emptyIcon = QIcon(self.emptyPixmap)
self.contactIsRecipientIcon = QIcon(":/dialog-apply")
self.fileWidget.updateActions()
# Load the contacts and devices when the event loop is started and all other events are handled
# This results in a faster startup (saved ~274ms)
self.loadSettings()
self.showFavorites()
QTimer.singleShot(0, self.loadUpdateChecker)
QTimer.singleShot(0, lambda : self.loadAutomaticConnection(True))
self.adjustSize()
self.connect(self.main, SIGNAL("favoriteListChanged"), self.showFavorites)
self.connect(self.main, SIGNAL("updateContact"), self.updateContact)
self.connect(self.contactMenu, SIGNAL("triggered(QAction *)"), self.favoriteClicked)
self.connect(self.settingsAction, SIGNAL("triggered()"), self.showSettings)
self.connect(self.exportAction, SIGNAL("triggered()"), self.showExportDialog)
self.connect(self.quitAction, SIGNAL("triggered()"), self.quit)
self.connect(self.aboutApplicationAction, SIGNAL("triggered()"), self.showAboutDialog)
self.connect(self.aboutQtAction, SIGNAL("triggered()"), self.main.app.aboutQt)
self.connect(self.historyAction, SIGNAL("triggered()"), self.showHistory)
self.connect(self.statisticsAction, SIGNAL("triggered()"), self.showStatistics)
self.connect(self.messageQueueAction, SIGNAL("triggered()"), self.showMessageQueue)
self.connect(self.importMessagesAction, SIGNAL("triggered()"), self.showImportMessages)
self.connect(self.logAction, SIGNAL("triggered()"), self.showLog)
self.connect(self.donateAction, SIGNAL("triggered()"), self.openDonateWebsite)
self.connect(self.connectButton, SIGNAL("clicked()"), self.connectToDevice)
self.connect(self.messageText, SIGNAL("sendMessage"), self.sendButton, SLOT("animateClick()"))
self.connect(self.sendButton, SIGNAL("clicked()"), self.sendMessage)
self.connect(self.refreshButton, SIGNAL("clicked()"), self.refreshSysinfo)
self.connect(self.refreshTimer, SIGNAL("timeout()"), self.refreshSysinfo)
self.connect(self.listWidget, SIGNAL("itemSelectionChanged()"), self.checkPosition)
self.connect(self.stackedWidget, SIGNAL("currentChanged(int)"),
lambda: self.searchLine.setSearchText() or self.searchLine_2.setSearchText())
self.connect(self.stackedWidget, SIGNAL("currentChanged(int)"), self.checkFiles)
self.connect(self.disconnectButton, SIGNAL("clicked()"), self.closeConnection)
self.connect(self.cancelButton, SIGNAL("clicked()"), self.closeConnection)
self.connect(self.messageText, SIGNAL("textChanged()"), self.textChanged)
self.connect(self.toLine, SIGNAL("textEdited(const QString &)"), self.recipientChanged)
self.connect(self.contactsTree,SIGNAL("customContextMenuRequested(QPoint)"),self.showCustomContextMenu)
self.connect(self.contactsTree,SIGNAL("itemActivated(QTreeWidgetItem *, int)"),self.contactClicked)
self.connect(self.contactsList,SIGNAL("currentItemChanged(QListWidgetItem *, QListWidgetItem *)"),self.showContact)
self.connect(self.contactsList,SIGNAL("currentItemChanged(QListWidgetItem *, QListWidgetItem *)"),
self, SLOT("checkEditContactButton()"))
self.connect(self.contactEditButton,SIGNAL("clicked()"), self.editContact)
self.connect(self.contactAddButton,SIGNAL("clicked()"), self.addContact)
self.connect(self.importVcardAction,SIGNAL("triggered()"), lambda : self.importContacts("vcard"))
self.connect(self.importLdifAction,SIGNAL("triggered()"), lambda : self.importContacts("ldif"))
self.connect(self.contactsList,SIGNAL("customContextMenuRequested(QPoint)"),self.showContactListContextMenu)
self.connect(self.searchLine,SIGNAL("textChanged(const QString &)"),self.showContacts)
self.connect(self.searchLine_2,SIGNAL("textChanged(const QString &)"),self.showContacts)
self.connect(self.typeBox,SIGNAL("currentIndexChanged(int)"),self.showContacts)
self.connect(self.connection, SIGNAL("connectionStateChanged"), lambda x: self.connectBar.setValue(x))
self.connect(self.connection, SIGNAL("sysinfoCompleted"), self.showSysinfo)
self.connect(self.connection, SIGNAL("contactsCompleted"), self.showContacts)
self.connect(self.connection, SIGNAL("contactsUpdated"), self.showContacts)
self.connect(self.connection, SIGNAL("connectionCompleted"), self.connected)
self.connect(self.connection, SIGNAL("connectionClosed"), self.connectionClosed)
self.connect(self.connection, SIGNAL("connectionAborted"), self.connectionClosed)
self.connect(self.connection, SIGNAL("connectionFailed"), self.connectionFailed)
self.connect(self.connection, SIGNAL("connectionVersionMismatchError"), self.connectionVersionMismatch)
self.connect(self.connection, SIGNAL("messagesRequest"), self.newMessages)
self.connect(self.connection, SIGNAL("messagesRequestComplete"), self.newMessagesFinished)
self.connect(self.connection, SIGNAL("messageSent"), self.messageStateChanged)
self.connect(self.connection, SIGNAL("messageQueued"), self.messageStateChanged)
self.connect(self.automaticConnectionTimer, SIGNAL("timeout()"), self.automaticConnectionTimerFired)
self.connect(self.deviceScanner, SIGNAL("scanStarted"), self.automaticConnectionScanStarted)
self.connect(self.deviceScanner, SIGNAL("foundDevice"), self.automaticConnectionFoundDevice)
self.connect(self.deviceScanner, SIGNAL("scanCompleted"), self.automaticConnectionScanFinished)
self.connect(self.deviceScanner, SIGNAL("scanFailed"), self.automaticConnectionScanFinished)
self.connect(self.settings, SIGNAL("reloadSettings"), self.loadSettings)
self.connect(self.settings, SIGNAL("reloadSettings"), self.loadAutomaticConnection)
# Also update the icons in the summary tab when the connection state has changed
self.okPixmap = QIcon(":/dialog-apply").pixmap(16, 16)
self.loadingMovie = QMovie(":/loading-2", parent=self)
self.loadingMovie.setScaledSize(QSize(20, 20))
self.loadingMovie.start()
self.connect(self.connection, SIGNAL("connectionEstablished"), lambda : self.connectionStateLabel.setPixmap(self.okPixmap))
self.connect(self.connection, SIGNAL("connectionEstablished"), lambda : self.sysinfoStateLabel.setMovie(self.loadingMovie))
self.connect(self.connection, SIGNAL("sysinfoCompleted"), lambda : self.sysinfoStateLabel.setPixmap(self.okPixmap))
self.connect(self.connection, SIGNAL("sysinfoCompleted"), lambda : self.contactStateLabel.setMovie(self.loadingMovie))
self.connect(self.connection, SIGNAL("contactsCompleted"), lambda : self.contactStateLabel.setPixmap(self.okPixmap))
self.connect(self.connection, SIGNAL("contactsCompleted"), lambda : self.calendarStateLabel.setMovie(self.loadingMovie))
self.connect(self.connection, SIGNAL("calendarCompleted"), lambda : self.calendarStateLabel.setPixmap(self.okPixmap))
if not main.minimized:
self.show()
def __str__(self):
return "\"Main-Window\""
def loadSettings(self):
self.updateDevices()
if self.connection.connected():
# Show the extended StackedWidget when there is an active connection
# after reloading the settings
self.showSysinfo()
self.showContacts()
self.connection = self.main.connection
self.messageText.setSendMessageOnReturn(self.settings.setting("general/sendMessageOnReturn"))
self.checkSendButton()
self.checkEditContactButton()
def loadUpdateChecker(self):
if self.settings.setting("updateCheck/enabled"):
lastCheck = self.settings.setting("updateCheck/lastCheck")
interval = self.settings.setting("updateCheck/interval")
if interval == 0:
return
if not lastCheck.isValid() or lastCheck.daysTo(QDate.currentDate()) >= interval:
self.updateChecker = lib.update_checker.UpdateChecker(self, self.main)
self.connect(self.updateChecker, SIGNAL("updateCheckFailed"), self.updateCheckError)
self.connect(self.updateChecker, SIGNAL("updateCheckNewVersion"), self.updateCheckNewVersion)
self.updateChecker.updateCheck()
else:
lastVersion = self.settings.setting("updateCheck/lastVersion")
if not lastVersion:
return
lastVersion = distutils.version.LooseVersion(lastVersion)
currentVersion = ".".join([str(i) for i in self.main.appVersion])
currentVersion = distutils.version.LooseVersion(currentVersion)
if lastVersion > currentVersion:
self.updateCheckNewVersion(self.settings.setting("updateCheck/lastVersion"), self.settings.setting("updateCheck/lastMessage"))
def loadAutomaticConnection(self, firstStart=False):
enabled = self.settings.setting("general/automaticConnectionEnabled")
if enabled and not self.connection.connected():
interval = self.settings.setting("general/automaticConnectionInterval")
if firstStart:
self.automaticConnectionTimerFired()
self.automaticConnectionTimer.setInterval(interval * 1000)
self.automaticConnectionTimer.start()
def showFavorites(self):
self.contactMenu.clear()
self.favMenu.menu(self.contactMenu)
def adjustSize(self):
maxSize = QSize()
for i in range(self.listWidget.count()):
itemSize = self.listWidget.sizeHintForIndex( self.listWidget.indexFromItem(self.listWidget.item(i)) )
if itemSize.width() > maxSize.width():
maxSize.setWidth(itemSize.width())
if itemSize.height() > maxSize.height():
maxSize.setHeight(itemSize.height())
# Add spacing
maxSize.setWidth(maxSize.width() + 13)
maxSize.setHeight(maxSize.height() + 10)
for i in range(self.listWidget.count()):
self.listWidget.item(i).setSizeHint(maxSize)
self.listWidget.setGridSize(maxSize)
self.listWidget.setMaximumWidth(maxSize.width() + self.listWidget.rect().width() - self.listWidget.contentsRect().width() )
self.listWidget.setMinimumWidth(maxSize.width() + self.listWidget.rect().width() - self.listWidget.contentsRect().width() )
def checkPosition(self):
# If you select the last item, hold the left mouse button move your mouse to a free space select the last item
if len(self.listWidget.selectedItems()) == 0:
self.listWidget.setCurrentRow(self.listWidget.currentRow())
def checkFiles(self, index):
if self.stackedWidget.indexOf(self.files) == index:
if lib.obex_wrapper.FOUND_OBEX and not self.fileWidget.connected() and self.connection.connected():
handler = lib.obex_handler.ObexHandler(self.connection.device().bluetoothAddress())
scheduler = lib.obex_scheduler.ObexScheduler(handler)
self.fileWidget.setScheduler(scheduler)
def updateDevices(self):
device = self.devicesBox.currentDevice()
if not isinstance(device, type(None)):
try:
try:
totalRam = self.helper.pretty_filesize(device.value("total_ram"))
except:
totalRam = self.tr("unknown")
try:
totalRom = self.helper.pretty_filesize(device.value("total_rom"))
except:
totalRom = self.tr("unknown")
self.modelLabel_3.setText(str(device.value("model")))
self.imeiLabel_3.setText(str(device.value("imei")))
self.totalRamLabel_3.setText(totalRam)
self.romLabel_3.setText(totalRom)
self.displayLabel_3.setText(self.tr("%1 pixels").arg(device.value("display") ))
self.osLabel_3.setText(device.value("s60_version")) # TODO: append to modelLabel
self.detailStack.setCurrentWidget(self.simpleWidget)
except ValueError:
# This happens when you were never connected to the device
# (e.g. when you start the application for the first time)
self.detailStack.setCurrentWidget(self.noDataWidget)
else:
self.detailStack.setCurrentWidget(self.noDataWidget)
def __connectToDevice(self, device):
if self.connection.connected():
return
if isinstance(device, type(None)):
return
self.settings.setSetting("bluetooth/lastName", device.name())
port = self.settings.setting("bluetooth/port")
if self.scanningMovie.movie():
self.scanningMovie.movie().stop()
self.scanningMovie.setMovie(QMovie())
self.scanningMovie.setToolTip("")
self.automaticConnectionTimer.stop()
# FIXME: Ugly hack
device.setPort(port)
# Reset connection state icons
self.connectionStateLabel.setMovie(self.loadingMovie)
self.sysinfoStateLabel.clear()
self.contactStateLabel.clear()
self.calendarStateLabel.clear()
self.log.info(QString("Connect to device %1 ( %2 on port %3 )").arg(device.name()).arg(device.bluetoothAddress()).arg(port))
self.statusLabel.setText(self.tr("Establish connection!"))
self.connectLabel.setText(self.tr("Connection establishment to: <b>%1</b>").arg(device.name()))
self.devicesBox.selectDevice(device)
self.devicesBox.setEnabled(False)
self.connectButton.setEnabled(False)
self.establishConnectionStack.setCurrentWidget(self.establishConnectionWidget)
self.connection.connectToDevice(device)
def connectToDevice(self):
device = self.devicesBox.currentDevice()
if isinstance(device, type(None)):
return
self.connectionAttemptByUser = True
self.__connectToDevice(device)
def showSysinfo(self):
refreshDate = QDate.currentDate().toString("dd.MM.yyyy")
refreshTime = QTime().currentTime().toString("hh:mm:ss")
try:
freeRam = self.helper.pretty_filesize(self.connection.device().value("free_ram"))
except:
freeRam = self.tr("unknown")
try:
totalRam = self.helper.pretty_filesize(self.connection.device().value("total_ram"))
except:
totalRam = self.tr("unknown")
try:
totalRom = self.helper.pretty_filesize(self.connection.device().value("total_rom"))
except:
totalRom = self.tr("unknown")
try:
signalBars = int(self.connection.device().value("signal_bars"))
except:
signalBars = 0
try:
battery = int(self.connection.device().value("battery"))
except:
battery = 0
if self.connection.device().value("signal_dbm") == u"-1":
# Mobile phone is in offline mode
signalDbm = self.tr("offline mode")
else:
signalDbm = self.tr("%1 dbM").arg(self.connection.device().value("signal_dbm"))
if signalBars == -1:
# Mobile phone is in offline mode
self.signalBar_2.setHidden(True)
self.signalBar.setHidden(True)
else:
self.signalBar_2.setHidden(False)
self.signalBar.setHidden(False)
self.refreshTimeLabel_2.setText(refreshDate + " " + refreshTime)
self.modelLabel_2.setText(str(self.connection.device().value("model")))
self.batteryLabel_2.setText(self.tr("%1% of 100%").arg(self.connection.device().value("battery")))
self.batteryBar_2.setValue(battery)
self.signalLabel_2.setText(signalDbm)
self.signalBar_2.setValue(signalBars)
self.refreshTimeLabel.setText(refreshDate + " " + refreshTime)
self.modelLabel.setText(str(self.connection.device().value("model")))
self.batteryLabel.setText(self.tr("%1% of 100%").arg(self.connection.device().value("battery")))
self.batteryBar.setValue(battery)
self.signalLabel.setText(signalDbm)
self.signalBar.setValue(signalBars)
self.profileLabel.setText(self.connection.device().value("active_profile"))
self.btAddressLabel.setText(self.connection.device().bluetoothAddress())
self.displayLabel.setText(self.tr("%1 pixels").arg(self.connection.device().value("display") ))
self.drivespaceBox.clear()
for type, value in self.connection.device().values():
if type <> "free_drivespace":
continue
drive, free = value.split(":")
free = self.helper.pretty_filesize(free)
self.drivespaceBox.addItem(QString("%1: %2").arg(drive, free))
self.imeiLabel.setText(str(self.connection.device().value("imei")))
self.freeRamLabel.setText(freeRam)
self.totalRamLabel.setText(totalRam)
self.romLabel.setText(totalRom)
self.swLabel.setText(self.connection.device().value("program_version"))
self.programLabel.setText(self.connection.device().value("pys60_version"))
self.osLabel.setText(self.connection.device().value("s60_version")) # TODO: append to modelLabel
self.detailStack.setCurrentWidget(self.extendedWidget)
def showContacts(self, search=""):
if self.fillTypeBox:
self.typeBox.addItem(self.tr("Name"), QVariant("s60remote-name"))
self.typeBox.addItem(self.tr("All fields"), QVariant("s60remote-all"))
self.typeBox.insertSeparator(2)
search = self.searchLine.searchText()
if not search:
search = self.searchLine_2.searchText()
search = unicode(search).lower()
self.contactsTree.clear()
self.contactsList.clear()
searchField = self.typeBox.itemData(self.typeBox.currentIndex()).toString()
for contact in self.database.contacts(True):
if self.fillTypeBox:
for field, value in contact.values():
if field.isPicture():
continue
if self.typeBox.findData(QVariant(field.type())) == -1:
self.typeBox.addItem(field.toString()[:-1], QVariant(field.type()))
if search:
# Search for name
if searchField == "s60remote-name":
if search not in contact.name().lower():
continue
# Search in all field
elif searchField == "s60remote-all":
found = False
for type in contact.types():
if type == "thumbnail_image":
continue
for value in contact.value(type):
if search in value.lower():
found = True
if not found:
continue
# Search in one specific field
else:
found = False
for value in contact.value(searchField):
if search in value.lower():
found = True
if not found:
continue
item = widget.SortedListWidgetItem.SortedListWidgetItem(self.contactsList)
item.setData(Roles.ContactRole, QVariant(contact))
item.setText(contact.name())
if "thumbnail_image" in contact and self.settings.setting("contacts/displayIcon"):
try:
data = base64.decodestring(contact.value("thumbnail_image")[0])
except:
pass
image = QImage().fromData(data)
icon = QIcon(QPixmap().fromImage(image))
item.setIcon(icon)
self.contactsList.setIconSize( QSize(image.size().width()/2, image.size().height()/2) )
if "mobile_number" in contact:
if self.settings.setting("contacts/hideCellnumber"):
item = widget.SortedTreeWidgetItem.SortedTreeWidgetItem(self.contactsTree)
item.setData(0, Roles.ContactRole, QVariant(contact))
item.setText(0, contact.name())
item.setIcon(0, self.contactIsRecipientIcon) if self.contactIsRecipient(contact) else item.setIcon(0, self.emptyIcon)
else:
for number in contact.value("mobile_number"):
item = widget.SortedTreeWidgetItem.SortedTreeWidgetItem(self.contactsTree)
item.setData(0, Roles.ContactRole, QVariant(contact))
item.setText(0, contact.name())
item.setText(1, number)
item.setIcon(0, self.contactIsRecipientIcon) if self.contactIsRecipient(contact) else item.setIcon(0, self.emptyIcon)
if self.contactsList.currentRow() == -1 and self.contactsList.count() > 0:
self.contactsList.setCurrentRow(0)
self.contactsTree.setColumnHidden(1, self.settings.setting("contacts/hideCellnumber"))
self.contactsTree.sortByColumn(0, Qt.AscendingOrder)
self.contactsTree.resizeColumnToContents(0)
self.contactsTree.resizeColumnToContents(1)
if self.fillTypeBox:
self.fillTypeBox = False
def updateContact(self, contact):
#TODO: Only update the changed contact...
self.showContacts()
item = self.contactsList.item(0)
for row in range(self.contactsList.count()):
data = self.contactsList.item(row).data(Roles.ContactRole).toPyObject()
if data == contact:
item = self.contactsList.item(row)
break
self.contactsList.setCurrentItem(item, QItemSelectionModel.ClearAndSelect)
def showContact(self, contact, previousContact):
try:
contact = contact.data(Roles.ContactRole).toPyObject()
except:
return
self.contactBrowser.clear()
self.nameLabel.setText("""<span style=" font-size:16pt; font-weight:600;">""" + unicode(contact.name()) + """</span>""")
if "thumbnail_image" in contact:
data = base64.decodestring(contact.value("thumbnail_image")[0])
image = QImage().fromData(data)
pixmap = QPixmap().fromImage(image)
self.pictureLabel.setPixmap(pixmap)
else:
self.pictureLabel.setPixmap(QPixmap())
for field, value in contact.values():
if field.isPicture():
continue
if field.isDate():
value = QDate.fromString(value, "yyyyMMdd").toString(Qt.DefaultLocaleLongDate)
self.contactBrowser.insertHtml("<b>" + field.toString(printLocation=True) + " </b> " + value + "<br />")
def connected(self):
self.refreshTimer.start()
self.connectionClosedByUser = False
self.connectionDate = QDate.currentDate().toString("dd.MM.yyyy")
self.connectionTime = QTime().currentTime().toString("hh:mm:ss")
self.connectionTimeLabel.setText(self.connectionDate + " " + self.connectionTime)
self.connectionTimeLabel_2.setText(self.connectionDate + " " + self.connectionTime)
self.disconnectButton.setEnabled(True)
self.refreshButton.setEnabled(True)
self.statusLabel.setText(self.tr("Connected to <b>%1</b>").arg(self.connection.device().name()))
self.connectionStack.setCurrentWidget(self.informationWidget)
self.checkEditContactButton()
self.checkSendButton()
def contactClicked(self, item, column):
contact = item.data(0, Roles.ContactRole).toPyObject()
phone = item.text(1) if not self.settings.setting("contacts/hideCellnumber") else None
if phone:
contact.addInternalValue("phone", phone)
if self.contactIsRecipient(contact):
self.takeContact(contact, phone)
item.setIcon(0, self.emptyIcon)
else:
self.insertContact(contact, phone)
item.setIcon(0, self.contactIsRecipientIcon)
self.checkSendButton()
def contactIsRecipient(self, contact):
to = self.toLine.text()
if not to:
return False
hide = self.settings.setting("contacts/hideCellnumber")
for recipient in unicode(to).split(";"):
recipient = recipient.strip()
if hide:
if recipient == contact.name():
return True
else:
if recipient == contact.name() + " (" + contact.internalValue("phone") + ")":
return True
return False
def insertContact(self, contact, phone):
name = contact.name()
if phone:
name += " (" + phone + ")"
curName = unicode(self.toLine.text())
if (len(curName) == 0):
name = unicode(name)
self.toLine.setText(name)
else:
name = curName + u"; " + unicode(name)
self.toLine.setText(name)
def takeContact(self, contact, phone):
to = unicode(self.toLine.text())
name = contact.name()
if phone:
name += " (" + phone + ")"
toList = to.split(";")
toList = [entry.strip() for entry in toList]
toList.remove(name)
to = "; ".join(toList)
self.toLine.setText(to)
def textChanged(self):
len = int(self.messageText.toPlainText().length())
chars, messages = self.helper.countMessages(len)
if len >= 512:
bigsms = '***'
else: bigsms = ''
self.charLabel.setText(self.tr("%1 chars left; %n message(s); total chars: %2%3", "", messages).arg(chars).arg(len).arg(bigsms))
self.checkSendButton()
def recipientChanged(self, recipients):
# This is only called if the to line is changed by the user (and NOT programmatically)
toList = recipients.split(";")
toList = [unicode(entry).strip() for entry in toList]
hideCell = self.settings.setting("contacts/hideCellnumber")
for itemPos in xrange(self.contactsTree.topLevelItemCount()):
item = self.contactsTree.topLevelItem(itemPos)
contact = item.data(0, Roles.ContactRole).toPyObject()
if (hideCell and contact.name() in toList) \
or (not hideCell and contact.name() + " (" + item.text(1) + ")" in toList):
item.setIcon(0, self.contactIsRecipientIcon)
else:
item.setIcon(0, self.emptyIcon)
self.checkSendButton()
def sendMessage(self):
to = unicode(self.toLine.text())
msg = unicode(self.messageText.toPlainText())
to = to.split(";")
for name in to:
contact = None
name = name.strip()
# Only a phone number, sth. like 06641234567 or +436641234567
if re.match(r"^[+]{0,1}\d*$", name) != None:
contact = Contact(name=name)
contact.addInternalValue("phone", name)
# Name and phone number, sth. like foo for (06641234567)
elif re.match(r".*\([+]{0,1}\d{3,15}\)$", name) != None:
search = re.search(r"(.*)\s\((.*)\)$", name)
name = search.groups()[0]
phone = search.groups()[1]
contact = Contact(name=name)
contact.addInternalValue("phone", phone)
# Only a name, sth. like foo
else:
for recipient in self.database.contacts(True):
if unicode(recipient.name()) == name:
contact = copy.deepcopy(recipient)
if len(recipient.value("mobile_number")) > 1:
self.log.info(QString("Contact %1 has more then one mobile number.").arg(name))
number = self.askMobileNumber(contact)
if number != None:
contact.addInternalValue("phone", number)
else:
continue
else:
contact.addInternalValue("phone", recipient.value("mobile_number")[0])
if not contact:
# foo must be in the contact list
dlg = QDialog(self)
dialog = ui.ui_mobileNumberNotFound.Ui_MobileNumberNotFoundDialog()
dialog.setupUi(dlg)
self.main.setupButtonBox(dialog.buttonBox)
dlg.exec_()
continue
if not "phone" in contact.internalValues():
continue
self.log.info(QString("Sending message to contact %1").arg(unicode(contact)))
message = Message()
message.setType(MessageType.Outgoing)
message.setDevice(self.connection.device())
message.setContact(contact)
message.setDateTime(QDateTime.currentDateTime())
message.setMessage(msg)
self.connection.sendMessage(message)
self.toLine.clear()
self.messageText.clear()
self.messageText.setFocus()
for itemPos in xrange(self.contactsTree.topLevelItemCount()):
item = self.contactsTree.topLevelItem(itemPos)
item.setIcon(0, self.emptyIcon)
def askMobileNumber(self, contact):
dlg = QDialog(self)
dialog = ui.ui_mobileNumberSelect.Ui_MobileNumberSelectDialog()
dialog.setupUi(dlg)
self.main.setupButtonBox(dialog.buttonBox)
dialog.contactLabel.setText(self.tr("Please choose the telephone number for contact <i>%1</i>:").arg(contact.name()))
for number in contact.value("mobile_number"):
dialog.mobileBox.addItem(number)
if not dlg.exec_():
return None
return str(dialog.mobileBox.currentText())
def showCustomContextMenu(self, pos):
index = self.contactsTree.indexAt(pos)
if not index.isValid():
return
item = self.contactsTree.itemAt(pos)
menu = QMenu(self)
# Contact as QVariant: There is no need to convert it to a PyObject,
# because it is only used to pass it to the actions
contact = item.data(0, Roles.ContactRole)
startChat = QAction(self)
startChat.setText(self.tr("Start &chat"))
startChat.setIcon(QIcon(":/message-chat"))
startChat.setProperty("type", QVariant("chat"))
startChat.setProperty("contact", contact)
menu.addAction(startChat)
if self.settings.setting("messages/saveAllMessages"):
showHistory = QAction(self)
showHistory.setText(self.tr("View &history"))
showHistory.setIcon(QIcon(":/message-history"))
showHistory.setProperty("type", QVariant("history"))
showHistory.setProperty("contact", contact)
menu.addAction(showHistory)
showStatistics = QAction(self)
showStatistics.setText(self.tr("View &statistics"))
showStatistics.setIcon(QIcon(":/view-statistics"))
showStatistics.setProperty("type", QVariant("statistics"))
showStatistics.setProperty("contact", contact)
menu.addAction(showStatistics)
menu.popup(QCursor.pos())
self.connect(menu, SIGNAL("triggered(QAction *)"), self.customContextMenuTriggered)
def customContextMenuTriggered(self, action):
type = str(action.property("type").toString())
contact = action.property("contact").toPyObject()
if type == "chat":
self.openChat(contact)
elif type == "history":
historyBrowser = window.history.History(self, self.main, contact)
elif type == "statistics":
statisticsDialog = window.statistics.Statistics(self, self.main, contact)
def showContactListContextMenu(self, pos):
menu = QMenu(self)
if self.connection.connected():
index = self.contactsList.indexAt(pos)
if not index.isValid():
return
item = self.contactsList.itemAt(pos)
# Contact as QVariant: There is no need to convert it to a PyObject,
# because it is only used to pass it to the actions
contact = item.data(Roles.ContactRole)
editAction = QAction(self)
editAction.setText(self.tr("&Edit contact"))
editAction.setIcon(QIcon(":/user-properties"))
editAction.setProperty("type", QVariant("edit"))
editAction.setProperty("contact", contact)
menu.addAction(editAction)
removeAction = QAction(self)
removeAction.setText(self.tr("&Remove contact"))
removeAction.setIcon(QIcon(":/list-remove-user"))
removeAction.setProperty("type", QVariant("remove"))
removeAction.setProperty("contact", contact)
menu.addAction(removeAction)
self.connect(menu, SIGNAL("triggered(QAction *)"), self.contactListContextMenuTriggered)
else:
notConnectedAction = QAction(self)
notConnectedAction.setText(self.tr("You aren't connected to the mobile phone."))
notConnectedAction.setIcon(QIcon(":/dialog-close"))
notConnectedAction.setEnabled(False)
menu.addAction(notConnectedAction)
menu.popup(QCursor.pos())
def contactListContextMenuTriggered(self, action):
type = str(action.property("type").toString())
contact = action.property("contact").toPyObject()
if type == "edit":
dlg = window.contacts_edit.ContactsEdit(self, self.main, contact)
elif type == "remove":
ret = QMessageBox.question(None,
self.tr("Delete contact"),
self.tr("Do you really want to remove contact \"%1\"?").arg(contact.name()),
QMessageBox.StandardButtons(\
QMessageBox.No | \
QMessageBox.Yes))
if ret == QMessageBox.Yes:
self.connection.contactRemove(contact)
self.database.contactRemove(contact.idOnPhone())
self.showContacts()
def refreshSysinfo(self):
if not self.connection.connected():
return
self.connection.refreshSysinfo()
def newMessages(self):
if not self.newMessagesComplete:
time = QTime().currentTime().toString()
self.statusBar().showMessage(self.tr("[%1] Fetching new messages...").arg(time))
def newMessagesFinished(self, num):
if not self.newMessagesComplete:
time = QTime().currentTime().toString()
if num > 0:
self.statusBar().showMessage(self.tr("[%1] %n new message(s) got saved.", "", num).arg(time), 5000)
else:
self.statusBar().showMessage(self.tr("[%1] There are no new messages.").arg(time), 5000)
self.newMessagesComplete = True
def messageStateChanged(self, message):
queue = self.connection.pendingMessages()
anz = len(queue)
time = QTime().currentTime().toString()
if anz >= 1:
self.statusBar().showMessage(self.tr("[%1] %n message(s) in queue", "", anz).arg(time))
elif anz == 0 and self.queueMessages > 0:
self.statusBar().showMessage(self.tr("[%1] All messages were sent").arg(time), 5000)
self.queueMessages = anz
def closeConnection(self):
self.connectionClosedByUser = True
self.connection.closeConnection()
self.fileWidget.closeConnection()
def connectionClosed(self):
self.refreshTimer.stop()
self.statusLabel.setText(self.tr("No active connection!"))
self.devicesBox.setEnabled(True)
self.connectButton.setEnabled(True)
self.establishConnectionStack.setCurrentWidget(self.emptyWidget)
self.connectionStack.setCurrentWidget(self.connectionWidget)
self.disconnectButton.setEnabled(False)
self.refreshButton.setEnabled(False)
self.connectBar.setValue(0)
self.updateDevices()
self.checkEditContactButton()
self.checkSendButton()
if not self.connectionClosedByUser:
self.log.debug(QString("Automatic connection establishment: connection closed by error, restarting timer..."))
self.loadAutomaticConnection()
else:
self.log.debug(QString("Automatic connection establishment: connection closed by user, timer is not started"))
def automaticConnectionTimerFired(self):
self.log.debug(QString("Timer for automatic connection establishment fired.. scanning for devices"))
self.deviceScanner.start()
def automaticConnectionScanStarted(self):
self.log.debug(QString("Automatic connection establishment: Device scan started"))
movie = QMovie(":/loading-2", "", self)
movie.setScaledSize(QSize(16, 16))
self.scanningMovie.setMovie(movie)
self.scanningMovie.setToolTip(self.tr("There is an active device scan for the automatic connection establishment"))
self.scanningMovie.movie().start()
def automaticConnectionFoundDevice(self, address, name, deviceClass):
for device in self.database.devices():
if device.bluetoothAddress() == address:
self.log.info(QString("Automatic connection establishment: Matching device found, connecting..."))
self.deviceScanner.stop()
self.connectionAttemptByUser = False
self.__connectToDevice(device)
def automaticConnectionScanFinished(self):
self.log.debug(QString("Automatic connection establishment: Device scan finished"))
if self.scanningMovie.movie():
self.scanningMovie.movie().stop()
self.scanningMovie.setMovie(QMovie())
self.scanningMovie.setToolTip("")
@pyqtSignature("")
def checkEditContactButton(self):
if self.connection.connected() and self.contactsList.selectedItems():
self.contactAddButton.setEnabled(True)
self.contactsImportButton.setEnabled(True)
self.contactEditButton.setEnabled(True)
else:
self.contactAddButton.setEnabled(False)
self.contactsImportButton.setEnabled(False)
self.contactEditButton.setEnabled(False)
def checkSendButton(self):
if self.toLine.text() and self.messageText.toPlainText() and self.connection.connected():
self.sendButton.setEnabled(True)
else:
self.sendButton.setEnabled(False)
def openChat(self, contact):
if contact:
# Close all popup windows of the contact
for popup in self.main.popups:
try:
button = popup.buttons.buttons()[0] # Chat button
popupContact = button.property("contact").toPyObject()
if contact == popupContact:
popup.close()
except:
pass
#myChat = window.chat.Chat(None, self.main, contact)
self.main.chatManager.openChat(contact)
def favoriteClicked(self, action):
type = action.property("type").toString()
if type == "contact":
contact = action.data().toPyObject()
self.openChat(contact)
elif type == "configureFavorites":
self.showFavoriteDialog()
def editContact(self):
try:
contact = self.contactsList.currentItem()
contact = contact.data(Roles.ContactRole).toPyObject()
except:
contact = None
dlg = window.contacts_edit.ContactsEdit(self, self.main, contact)
def reinstallService(self):
self.closeConnection()
dlg = QDialog(self)
dialog = ui.ui_connection_update_version.Ui_ConnectionUpdateVersionDialog()
dialog.setupUi(dlg)
if lib.obex_wrapper.FOUND_OBEX:
dialog.obexStack.setCurrentWidget(dialog.obexFoundWidget)
self.log.info(QString("OBEX support was found, trying to send installation file to device"))
else:
dialog.obexStack.setCurrentWidget(dialog.obexNotFoundWidget)
self.log.info(QString("OBEX support was not found"))
if LINUX:
dialog.operatingSystemStack.setCurrentWidget(dialog.linuxWidget)
else:
dialog.operatingSystemStack.setCurrentWidget(dialog.windowsWidget)
self.connect(dialog.sendApplicationButton, SIGNAL("clicked()"), lambda : self.sendApplicationFile(dialog.py20Box.isChecked()))
self.connect(dialog.sendPythonButton, SIGNAL("clicked()"), lambda : self.sendPythonFile(dialog.py20Box.isChecked()))
self.connect(dialog.openFolderButton, SIGNAL("clicked()"), self.helper.openFolder)
dlg.exec_()
def sendApplicationFile(self, usePy20):
if usePy20:
self.helper.sendFile(self, self.devicesBox.currentDevice(), self.main.applicationSis_Py20)
else:
self.helper.sendFile(self, self.devicesBox.currentDevice(), self.main.applicationSis_Py14)
def sendPythonFile(self, usePy20):
if usePy20:
self.helper.sendFile(self, self.devicesBox.currentDevice(), self.main.pythonSis_Py20)
else:
self.helper.sendFile(self, self.devicesBox.currentDevice(), self.main.pythonSis_Py14)
def connectionFailed(self, errno, errmsg):
self.connectionClosedByUser = False
if not self.connectionAttemptByUser:
self.statusBar().showMessage(self.tr("Connection to device failed: %1 - %2").arg(errno).arg(errmsg), 6000)
return
dlg = QDialog(self)
dialog = ui.ui_connection_failed.Ui_ConnectionFailedDialog()
dialog.setupUi(dlg)
dialog.errnoLabel.setText("<b>" + str(errno) + "</b>")
dialog.errmsgLabel.setText("<b>" + errmsg + "</b>")
self.main.setupButtonBox(dialog.buttonBox)
self.connect(dialog.reinstallButton, SIGNAL("clicked()"), self.reinstallService)
self.connect(dialog.buttonBox.button(QDialogButtonBox.Retry), SIGNAL("clicked()"), self.connectToDevice)
self.connect(dialog.buttonBox.button(QDialogButtonBox.Cancel), SIGNAL("clicked()"), self.closeConnection)
dlg.exec_()
def connectionVersionMismatch(self, deviceVersion, pcVersion):
dlg = QDialog(self)
dialog = ui.ui_connection_version_mismatch.Ui_ConnectionVersionMismatchDialog()
dialog.setupUi(dlg)
dialog.mobileVersionLabel.setText("<b>" + str(deviceVersion) + "</b>")
dialog.pcVersionLabel.setText("<b>" + str(pcVersion) + "</b>")
self.main.setupButtonBox(dialog.buttonBox)
self.connect(dialog.updateButton, SIGNAL("clicked()"), self.reinstallService)
self.connect(dialog.buttonBox.button(QDialogButtonBox.Cancel), SIGNAL("clicked()"), self.closeConnection)
dlg.exec_()
def updateCheckError(self, errorMessage):
self.statusBar().showMessage(self.tr("Update check failed: %1").arg(errorMessage), 5000)
def updateCheckNewVersion(self, version, message):
text = self.tr("The update to <b>%1</b> of Series60-Remote is available at <b>%2</b>. Would you like to get it?").arg(version, self.settings.setting("updateCheck/website").toString())
if message:
text += '<br /><br />' + self.tr("Update notes: %1").arg(message)
button = QMessageBox.information(self, self.tr("New version detected"), text, QMessageBox.Yes | QMessageBox.No | QMessageBox.Ignore, QMessageBox.Yes)
if button == QMessageBox.Yes:
QDesktopServices.openUrl(self.settings.setting("updateCheck/website"))
elif button == QMessageBox.Ignore:
self.settings.setSetting("updateCheck/interval", 0)
def openDonateWebsite(self):
QDesktopServices.openUrl(QUrl("http://sourceforge.net/donate/index.php?group_id=204429"))
def addContact(self):
dlg = window.contacts_edit.ContactsEdit(self, self.main)
def importContacts(self, format):
dlg = window.contacts_import.ContactsImport(self, self.main, format)
def showFavoriteDialog(self):
dlg = window.favorites.Favorites(self, self.main)
def showAboutDialog(self):
dlg = window.about.About(self, self.main)
def showSettings(self):
dlg = window.settings.Settings(self, self.main)
def showExportDialog(self):
dlg = window.export.Export(self, self.main)
def showHistory(self):
dlg = window.history.History(self, self.main)
def showStatistics(self):
dlg = window.statistics.Statistics(self, self.main)
def showMessageQueue(self):
dlg = window.message_queue.MessageQueue(self, self.main)
def showImportMessages(self):
dlg = window.import_messages.ImportMessages(self, self.main)
def showLog(self):
self.main.logWindow.show()
def quit(self):
self.main.app.closeAllWindows()
self.main.app.quit()
def closeEvent(self, event):
self.settings.beginGroup("windows")
self.settings.beginGroup("main")
self.settings.setSetting("size", self.size())
self.settings.setSetting("position", self.pos())
self.settings.setSetting("messagesSplitter", self.messagesSplitter.saveState())
self.settings.setSetting("contactsSplitter", self.contactsSplitter.saveState())
close = self.settings.setting("windows/main/minimizeOnClose")
if close == 0:
message = QMessageBox.question(None, self.tr("Quit"),
self.tr("Should the application stay in the system tray?"),
QMessageBox.StandardButtons( QMessageBox.No | QMessageBox.Yes), QMessageBox.Yes)
if message == QMessageBox.Yes:
self.settings.setSetting("minimizeOnClose", 1)
else:
self.settings.setSetting("minimizeOnClose", 2)
self.settings.endGroup()
self.settings.endGroup()
close = self.settings.setting("windows/main/minimizeOnClose")
if close == 1:
self.hide()
else:
for popup in self.main.popups:
popup.close()
if hasattr(self.main, "trayicon"):
self.main.trayicon.hide()
if self.connection.connected():
self.connection.closeConnection()
if self.fileWidget.connected():
self.fileWidget.closeConnection()
self.settings.sync()
self.database.close()
self.hide()
self.main.app.quit()
| gpl-2.0 | -1,246,744,118,675,140,000 | 43.114458 | 191 | 0.629816 | false | 4.198968 | false | false | false |
WillWeatherford/deliver-cute | subscribers/migrations/0001_initial.py | 1 | 1594 | # -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-08-06 22:02
from __future__ import unicode_literals
from django.db import migrations, models
import subscribers.models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='SubReddit',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('display_name', models.CharField(max_length=21, unique=True)),
],
),
migrations.CreateModel(
name='Subscriber',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('email', models.EmailField(max_length=254)),
('send_hour', models.IntegerField(choices=[(0, '12:00 AM'), (1, '1:00 AM'), (2, '2:00 AM'), (3, '3:00 AM'), (4, '4:00 AM'), (5, '5:00 AM'), (6, '6:00 AM'), (7, '7:00 AM'), (8, '8:00 AM'), (9, '9:00 AM'), (10, '10:00 AM'), (11, '11:00 AM'), (12, '12:00 PM'), (13, '1:00 PM'), (14, '2:00 PM'), (15, '3:00 PM'), (16, '4:00 PM'), (17, '5:00 PM'), (18, '6:00 PM'), (19, '7:00 PM'), (20, '8:00 PM'), (21, '9:00 PM'), (22, '10:00 PM'), (23, '11:00 PM')], default=8)),
('unsubscribe_hash', models.CharField(default=subscribers.models._hash, max_length=255, unique=True)),
('subreddits', models.ManyToManyField(related_name='subscribers', to='subscribers.SubReddit')),
],
),
]
| mit | 4,599,604,326,839,132,700 | 45.882353 | 476 | 0.54266 | false | 3.246436 | false | false | false |
Pal3love/otRebuilder | Package/otRebuilder/Dep/fontTools/varLib/__init__.py | 1 | 23435 | """
Module for dealing with 'gvar'-style font variations, also known as run-time
interpolation.
The ideas here are very similar to MutatorMath. There is even code to read
MutatorMath .designspace files in the varLib.designspace module.
For now, if you run this file on a designspace file, it tries to find
ttf-interpolatable files for the masters and build a variable-font from
them. Such ttf-interpolatable and designspace files can be generated from
a Glyphs source, eg., using noto-source as an example:
$ fontmake -o ttf-interpolatable -g NotoSansArabic-MM.glyphs
Then you can make a variable-font this way:
$ fonttools varLib master_ufo/NotoSansArabic.designspace
API *will* change in near future.
"""
from __future__ import print_function, division, absolute_import
from __future__ import unicode_literals
from fontTools.misc.py23 import *
from fontTools.misc.arrayTools import Vector
from fontTools.ttLib import TTFont, newTable
from fontTools.ttLib.tables._n_a_m_e import NameRecord
from fontTools.ttLib.tables._f_v_a_r import Axis, NamedInstance
from fontTools.ttLib.tables._g_l_y_f import GlyphCoordinates
from fontTools.ttLib.tables.ttProgram import Program
from fontTools.ttLib.tables.TupleVariation import TupleVariation
from fontTools.ttLib.tables import otTables as ot
from fontTools.varLib import builder, designspace, models, varStore
from fontTools.varLib.merger import VariationMerger, _all_equal
from fontTools.varLib.mvar import MVAR_ENTRIES
from fontTools.varLib.iup import iup_delta_optimize
from collections import OrderedDict
import os.path
import logging
from pprint import pformat
log = logging.getLogger("fontTools.varLib")
class VarLibError(Exception):
pass
#
# Creation routines
#
def _add_fvar(font, axes, instances):
"""
Add 'fvar' table to font.
axes is an ordered dictionary of DesignspaceAxis objects.
instances is list of dictionary objects with 'location', 'stylename',
and possibly 'postscriptfontname' entries.
"""
assert axes
assert isinstance(axes, OrderedDict)
log.info("Generating fvar")
fvar = newTable('fvar')
nameTable = font['name']
for a in axes.values():
axis = Axis()
axis.axisTag = Tag(a.tag)
# TODO Skip axes that have no variation.
axis.minValue, axis.defaultValue, axis.maxValue = a.minimum, a.default, a.maximum
axis.axisNameID = nameTable.addName(tounicode(a.labelname['en']))
# TODO:
# Replace previous line with the following when the following issues are resolved:
# https://github.com/fonttools/fonttools/issues/930
# https://github.com/fonttools/fonttools/issues/931
# axis.axisNameID = nameTable.addMultilingualName(a.labelname, font)
fvar.axes.append(axis)
for instance in instances:
coordinates = instance['location']
name = tounicode(instance['stylename'])
psname = instance.get('postscriptfontname')
inst = NamedInstance()
inst.subfamilyNameID = nameTable.addName(name)
if psname is not None:
psname = tounicode(psname)
inst.postscriptNameID = nameTable.addName(psname)
inst.coordinates = {axes[k].tag:axes[k].map_backward(v) for k,v in coordinates.items()}
#inst.coordinates = {axes[k].tag:v for k,v in coordinates.items()}
fvar.instances.append(inst)
assert "fvar" not in font
font['fvar'] = fvar
return fvar
def _add_avar(font, axes):
"""
Add 'avar' table to font.
axes is an ordered dictionary of DesignspaceAxis objects.
"""
assert axes
assert isinstance(axes, OrderedDict)
log.info("Generating avar")
avar = newTable('avar')
interesting = False
for axis in axes.values():
# Currently, some rasterizers require that the default value maps
# (-1 to -1, 0 to 0, and 1 to 1) be present for all the segment
# maps, even when the default normalization mapping for the axis
# was not modified.
# https://github.com/googlei18n/fontmake/issues/295
# https://github.com/fonttools/fonttools/issues/1011
# TODO(anthrotype) revert this (and 19c4b37) when issue is fixed
curve = avar.segments[axis.tag] = {-1.0: -1.0, 0.0: 0.0, 1.0: 1.0}
if not axis.map:
continue
items = sorted(axis.map.items())
keys = [item[0] for item in items]
vals = [item[1] for item in items]
# Current avar requirements. We don't have to enforce
# these on the designer and can deduce some ourselves,
# but for now just enforce them.
assert axis.minimum == min(keys)
assert axis.maximum == max(keys)
assert axis.default in keys
# No duplicates
assert len(set(keys)) == len(keys)
assert len(set(vals)) == len(vals)
# Ascending values
assert sorted(vals) == vals
keys_triple = (axis.minimum, axis.default, axis.maximum)
vals_triple = tuple(axis.map_forward(v) for v in keys_triple)
keys = [models.normalizeValue(v, keys_triple) for v in keys]
vals = [models.normalizeValue(v, vals_triple) for v in vals]
if all(k == v for k, v in zip(keys, vals)):
continue
interesting = True
curve.update(zip(keys, vals))
assert 0.0 in curve and curve[0.0] == 0.0
assert -1.0 not in curve or curve[-1.0] == -1.0
assert +1.0 not in curve or curve[+1.0] == +1.0
# curve.update({-1.0: -1.0, 0.0: 0.0, 1.0: 1.0})
assert "avar" not in font
if not interesting:
log.info("No need for avar")
avar = None
else:
font['avar'] = avar
return avar
def _add_stat(font, axes):
nameTable = font['name']
assert "STAT" not in font
STAT = font["STAT"] = newTable('STAT')
stat = STAT.table = ot.STAT()
stat.Version = 0x00010000
axisRecords = []
for i,a in enumerate(axes.values()):
axis = ot.AxisRecord()
axis.AxisTag = Tag(a.tag)
# Meh. Reuse fvar nameID!
axis.AxisNameID = nameTable.addName(tounicode(a.labelname['en']))
axis.AxisOrdering = i
axisRecords.append(axis)
axisRecordArray = ot.AxisRecordArray()
axisRecordArray.Axis = axisRecords
# XXX these should not be hard-coded but computed automatically
stat.DesignAxisRecordSize = 8
stat.DesignAxisCount = len(axisRecords)
stat.DesignAxisRecord = axisRecordArray
# TODO Move to glyf or gvar table proper
def _GetCoordinates(font, glyphName):
"""font, glyphName --> glyph coordinates as expected by "gvar" table
The result includes four "phantom points" for the glyph metrics,
as mandated by the "gvar" spec.
"""
glyf = font["glyf"]
if glyphName not in glyf.glyphs: return None
glyph = glyf[glyphName]
if glyph.isComposite():
coord = GlyphCoordinates([(getattr(c, 'x', 0),getattr(c, 'y', 0)) for c in glyph.components])
control = (glyph.numberOfContours,[c.glyphName for c in glyph.components])
else:
allData = glyph.getCoordinates(glyf)
coord = allData[0]
control = (glyph.numberOfContours,)+allData[1:]
# Add phantom points for (left, right, top, bottom) positions.
horizontalAdvanceWidth, leftSideBearing = font["hmtx"].metrics[glyphName]
if not hasattr(glyph, 'xMin'):
glyph.recalcBounds(glyf)
leftSideX = glyph.xMin - leftSideBearing
rightSideX = leftSideX + horizontalAdvanceWidth
# XXX these are incorrect. Load vmtx and fix.
topSideY = glyph.yMax
bottomSideY = -glyph.yMin
coord = coord.copy()
coord.extend([(leftSideX, 0),
(rightSideX, 0),
(0, topSideY),
(0, bottomSideY)])
return coord, control
# TODO Move to glyf or gvar table proper
def _SetCoordinates(font, glyphName, coord):
glyf = font["glyf"]
assert glyphName in glyf.glyphs
glyph = glyf[glyphName]
# Handle phantom points for (left, right, top, bottom) positions.
assert len(coord) >= 4
if not hasattr(glyph, 'xMin'):
glyph.recalcBounds(glyf)
leftSideX = coord[-4][0]
rightSideX = coord[-3][0]
topSideY = coord[-2][1]
bottomSideY = coord[-1][1]
for _ in range(4):
del coord[-1]
if glyph.isComposite():
assert len(coord) == len(glyph.components)
for p,comp in zip(coord, glyph.components):
if hasattr(comp, 'x'):
comp.x,comp.y = p
elif glyph.numberOfContours is 0:
assert len(coord) == 0
else:
assert len(coord) == len(glyph.coordinates)
glyph.coordinates = coord
glyph.recalcBounds(glyf)
horizontalAdvanceWidth = round(rightSideX - leftSideX)
leftSideBearing = round(glyph.xMin - leftSideX)
# XXX Handle vertical
font["hmtx"].metrics[glyphName] = horizontalAdvanceWidth, leftSideBearing
def _add_gvar(font, model, master_ttfs, tolerance=0.5, optimize=True):
assert tolerance >= 0
log.info("Generating gvar")
assert "gvar" not in font
gvar = font["gvar"] = newTable('gvar')
gvar.version = 1
gvar.reserved = 0
gvar.variations = {}
for glyph in font.getGlyphOrder():
allData = [_GetCoordinates(m, glyph) for m in master_ttfs]
allCoords = [d[0] for d in allData]
allControls = [d[1] for d in allData]
control = allControls[0]
if (any(c != control for c in allControls)):
log.warning("glyph %s has incompatible masters; skipping" % glyph)
continue
del allControls
# Update gvar
gvar.variations[glyph] = []
deltas = model.getDeltas(allCoords)
supports = model.supports
assert len(deltas) == len(supports)
# Prepare for IUP optimization
origCoords = deltas[0]
endPts = control[1] if control[0] >= 1 else list(range(len(control[1])))
for i,(delta,support) in enumerate(zip(deltas[1:], supports[1:])):
if all(abs(v) <= tolerance for v in delta.array):
continue
var = TupleVariation(support, delta)
if optimize:
delta_opt = iup_delta_optimize(delta, origCoords, endPts, tolerance=tolerance)
if None in delta_opt:
# Use "optimized" version only if smaller...
var_opt = TupleVariation(support, delta_opt)
axis_tags = sorted(support.keys()) # Shouldn't matter that this is different from fvar...?
tupleData, auxData, _ = var.compile(axis_tags, [], None)
unoptimized_len = len(tupleData) + len(auxData)
tupleData, auxData, _ = var_opt.compile(axis_tags, [], None)
optimized_len = len(tupleData) + len(auxData)
if optimized_len < unoptimized_len:
var = var_opt
gvar.variations[glyph].append(var)
def _remove_TTHinting(font):
for tag in ("cvar", "cvt ", "fpgm", "prep"):
if tag in font:
del font[tag]
for attr in ("maxTwilightPoints", "maxStorage", "maxFunctionDefs", "maxInstructionDefs", "maxStackElements", "maxSizeOfInstructions"):
setattr(font["maxp"], attr, 0)
font["maxp"].maxZones = 1
font["glyf"].removeHinting()
# TODO: Modify gasp table to deactivate gridfitting for all ranges?
def _merge_TTHinting(font, model, master_ttfs, tolerance=0.5):
log.info("Merging TT hinting")
assert "cvar" not in font
# Check that the existing hinting is compatible
# fpgm and prep table
for tag in ("fpgm", "prep"):
all_pgms = [m[tag].program for m in master_ttfs if tag in m]
if len(all_pgms) == 0:
continue
if tag in font:
font_pgm = font[tag].program
else:
font_pgm = Program()
if any(pgm != font_pgm for pgm in all_pgms):
log.warning("Masters have incompatible %s tables, hinting is discarded." % tag)
_remove_TTHinting(font)
return
# glyf table
for name, glyph in font["glyf"].glyphs.items():
all_pgms = [
m["glyf"][name].program
for m in master_ttfs
if hasattr(m["glyf"][name], "program")
]
if not any(all_pgms):
continue
glyph.expand(font["glyf"])
if hasattr(glyph, "program"):
font_pgm = glyph.program
else:
font_pgm = Program()
if any(pgm != font_pgm for pgm in all_pgms if pgm):
log.warning("Masters have incompatible glyph programs in glyph '%s', hinting is discarded." % name)
_remove_TTHinting(font)
return
# cvt table
all_cvs = [Vector(m["cvt "].values) for m in master_ttfs if "cvt " in m]
if len(all_cvs) == 0:
# There is no cvt table to make a cvar table from, we're done here.
return
if len(all_cvs) != len(master_ttfs):
log.warning("Some masters have no cvt table, hinting is discarded.")
_remove_TTHinting(font)
return
num_cvt0 = len(all_cvs[0])
if (any(len(c) != num_cvt0 for c in all_cvs)):
log.warning("Masters have incompatible cvt tables, hinting is discarded.")
_remove_TTHinting(font)
return
# We can build the cvar table now.
cvar = font["cvar"] = newTable('cvar')
cvar.version = 1
cvar.variations = []
deltas = model.getDeltas(all_cvs)
supports = model.supports
for i,(delta,support) in enumerate(zip(deltas[1:], supports[1:])):
delta = [round(d) for d in delta]
if all(abs(v) <= tolerance for v in delta):
continue
var = TupleVariation(support, delta)
cvar.variations.append(var)
def _add_HVAR(font, model, master_ttfs, axisTags):
log.info("Generating HVAR")
hAdvanceDeltas = {}
metricses = [m["hmtx"].metrics for m in master_ttfs]
for glyph in font.getGlyphOrder():
hAdvances = [metrics[glyph][0] for metrics in metricses]
# TODO move round somewhere else?
hAdvanceDeltas[glyph] = tuple(round(d) for d in model.getDeltas(hAdvances)[1:])
# We only support the direct mapping right now.
supports = model.supports[1:]
varTupleList = builder.buildVarRegionList(supports, axisTags)
varTupleIndexes = list(range(len(supports)))
n = len(supports)
items = []
zeroes = [0]*n
for glyphName in font.getGlyphOrder():
items.append(hAdvanceDeltas.get(glyphName, zeroes))
while items and items[-1] is zeroes:
del items[-1]
advanceMapping = None
# Add indirect mapping to save on duplicates
uniq = set(items)
# TODO Improve heuristic
if (len(items) - len(uniq)) * len(varTupleIndexes) > len(items):
newItems = sorted(uniq)
mapper = {v:i for i,v in enumerate(newItems)}
mapping = [mapper[item] for item in items]
while len(mapping) > 1 and mapping[-1] == mapping[-2]:
del mapping[-1]
advanceMapping = builder.buildVarIdxMap(mapping)
items = newItems
del mapper, mapping, newItems
del uniq
varData = builder.buildVarData(varTupleIndexes, items)
varstore = builder.buildVarStore(varTupleList, [varData])
assert "HVAR" not in font
HVAR = font["HVAR"] = newTable('HVAR')
hvar = HVAR.table = ot.HVAR()
hvar.Version = 0x00010000
hvar.VarStore = varstore
hvar.AdvWidthMap = advanceMapping
hvar.LsbMap = hvar.RsbMap = None
def _add_MVAR(font, model, master_ttfs, axisTags):
log.info("Generating MVAR")
store_builder = varStore.OnlineVarStoreBuilder(axisTags)
store_builder.setModel(model)
records = []
lastTableTag = None
fontTable = None
tables = None
for tag, (tableTag, itemName) in sorted(MVAR_ENTRIES.items(), key=lambda kv: kv[1]):
if tableTag != lastTableTag:
tables = fontTable = None
if tableTag in font:
# TODO Check all masters have same table set?
fontTable = font[tableTag]
tables = [master[tableTag] for master in master_ttfs]
lastTableTag = tableTag
if tables is None:
continue
# TODO support gasp entries
master_values = [getattr(table, itemName) for table in tables]
if _all_equal(master_values):
base, varIdx = master_values[0], None
else:
base, varIdx = store_builder.storeMasters(master_values)
setattr(fontTable, itemName, base)
if varIdx is None:
continue
log.info(' %s: %s.%s %s', tag, tableTag, itemName, master_values)
rec = ot.MetricsValueRecord()
rec.ValueTag = tag
rec.VarIdx = varIdx
records.append(rec)
assert "MVAR" not in font
if records:
MVAR = font["MVAR"] = newTable('MVAR')
mvar = MVAR.table = ot.MVAR()
mvar.Version = 0x00010000
mvar.Reserved = 0
mvar.VarStore = store_builder.finish()
# XXX these should not be hard-coded but computed automatically
mvar.ValueRecordSize = 8
mvar.ValueRecordCount = len(records)
mvar.ValueRecord = sorted(records, key=lambda r: r.ValueTag)
def _merge_OTL(font, model, master_fonts, axisTags):
log.info("Merging OpenType Layout tables")
merger = VariationMerger(model, axisTags, font)
merger.mergeTables(font, master_fonts, ['GPOS'])
store = merger.store_builder.finish()
try:
GDEF = font['GDEF'].table
assert GDEF.Version <= 0x00010002
except KeyError:
font['GDEF']= newTable('GDEF')
GDEFTable = font["GDEF"] = newTable('GDEF')
GDEF = GDEFTable.table = ot.GDEF()
GDEF.Version = 0x00010003
GDEF.VarStore = store
# Pretty much all of this file should be redesigned and moved inot submodules...
# Such a mess right now, but kludging along...
class _DesignspaceAxis(object):
def __repr__(self):
return repr(self.__dict__)
@staticmethod
def _map(v, map):
keys = map.keys()
if not keys:
return v
if v in keys:
return map[v]
k = min(keys)
if v < k:
return v + map[k] - k
k = max(keys)
if v > k:
return v + map[k] - k
# Interpolate
a = max(k for k in keys if k < v)
b = min(k for k in keys if k > v)
va = map[a]
vb = map[b]
return va + (vb - va) * (v - a) / (b - a)
def map_forward(self, v):
if self.map is None: return v
return self._map(v, self.map)
def map_backward(self, v):
if self.map is None: return v
map = {v:k for k,v in self.map.items()}
return self._map(v, map)
def load_designspace(designspace_filename):
ds = designspace.load(designspace_filename)
axes = ds.get('axes')
masters = ds.get('sources')
if not masters:
raise VarLibError("no sources found in .designspace")
instances = ds.get('instances', [])
standard_axis_map = OrderedDict([
('weight', ('wght', {'en':'Weight'})),
('width', ('wdth', {'en':'Width'})),
('slant', ('slnt', {'en':'Slant'})),
('optical', ('opsz', {'en':'Optical Size'})),
])
# Setup axes
axis_objects = OrderedDict()
if axes is not None:
for axis_dict in axes:
axis_name = axis_dict.get('name')
if not axis_name:
axis_name = axis_dict['name'] = axis_dict['tag']
if 'map' not in axis_dict:
axis_dict['map'] = None
else:
axis_dict['map'] = {m['input']:m['output'] for m in axis_dict['map']}
if axis_name in standard_axis_map:
if 'tag' not in axis_dict:
axis_dict['tag'] = standard_axis_map[axis_name][0]
if 'labelname' not in axis_dict:
axis_dict['labelname'] = standard_axis_map[axis_name][1].copy()
axis = _DesignspaceAxis()
for item in ['name', 'tag', 'minimum', 'default', 'maximum', 'map']:
assert item in axis_dict, 'Axis does not have "%s"' % item
if 'labelname' not in axis_dict:
axis_dict['labelname'] = {'en': axis_name}
axis.__dict__ = axis_dict
axis_objects[axis_name] = axis
else:
# No <axes> element. Guess things...
base_idx = None
for i,m in enumerate(masters):
if 'info' in m and m['info']['copy']:
assert base_idx is None
base_idx = i
assert base_idx is not None, "Cannot find 'base' master; Either add <axes> element to .designspace document, or add <info> element to one of the sources in the .designspace document."
master_locs = [o['location'] for o in masters]
base_loc = master_locs[base_idx]
axis_names = set(base_loc.keys())
assert all(name in standard_axis_map for name in axis_names), "Non-standard axis found and there exist no <axes> element."
for name,(tag,labelname) in standard_axis_map.items():
if name not in axis_names:
continue
axis = _DesignspaceAxis()
axis.name = name
axis.tag = tag
axis.labelname = labelname.copy()
axis.default = base_loc[name]
axis.minimum = min(m[name] for m in master_locs if name in m)
axis.maximum = max(m[name] for m in master_locs if name in m)
axis.map = None
# TODO Fill in weight / width mapping from OS/2 table? Need loading fonts...
axis_objects[name] = axis
del base_idx, base_loc, axis_names, master_locs
axes = axis_objects
del axis_objects
log.info("Axes:\n%s", pformat(axes))
# Check all master and instance locations are valid and fill in defaults
for obj in masters+instances:
obj_name = obj.get('name', obj.get('stylename', ''))
loc = obj['location']
for axis_name in loc.keys():
assert axis_name in axes, "Location axis '%s' unknown for '%s'." % (axis_name, obj_name)
for axis_name,axis in axes.items():
if axis_name not in loc:
loc[axis_name] = axis.default
else:
v = axis.map_backward(loc[axis_name])
assert axis.minimum <= v <= axis.maximum, "Location for axis '%s' (mapped to %s) out of range for '%s' [%s..%s]" % (axis_name, v, obj_name, axis.minimum, axis.maximum)
# Normalize master locations
normalized_master_locs = [o['location'] for o in masters]
log.info("Internal master locations:\n%s", pformat(normalized_master_locs))
# TODO This mapping should ideally be moved closer to logic in _add_fvar/avar
internal_axis_supports = {}
for axis in axes.values():
triple = (axis.minimum, axis.default, axis.maximum)
internal_axis_supports[axis.name] = [axis.map_forward(v) for v in triple]
log.info("Internal axis supports:\n%s", pformat(internal_axis_supports))
normalized_master_locs = [models.normalizeLocation(m, internal_axis_supports) for m in normalized_master_locs]
log.info("Normalized master locations:\n%s", pformat(normalized_master_locs))
# Find base master
base_idx = None
for i,m in enumerate(normalized_master_locs):
if all(v == 0 for v in m.values()):
assert base_idx is None
base_idx = i
assert base_idx is not None, "Base master not found; no master at default location?"
log.info("Index of base master: %s", base_idx)
return axes, internal_axis_supports, base_idx, normalized_master_locs, masters, instances
def build(designspace_filename, master_finder=lambda s:s):
"""
Build variation font from a designspace file.
If master_finder is set, it should be a callable that takes master
filename as found in designspace file and map it to master font
binary as to be opened (eg. .ttf or .otf).
"""
axes, internal_axis_supports, base_idx, normalized_master_locs, masters, instances = load_designspace(designspace_filename)
log.info("Building variable font")
log.info("Loading master fonts")
basedir = os.path.dirname(designspace_filename)
master_ttfs = [master_finder(os.path.join(basedir, m['filename'])) for m in masters]
master_fonts = [TTFont(ttf_path) for ttf_path in master_ttfs]
# Reload base font as target font
vf = TTFont(master_ttfs[base_idx])
# TODO append masters as named-instances as well; needs .designspace change.
fvar = _add_fvar(vf, axes, instances)
_add_stat(vf, axes)
_add_avar(vf, axes)
del instances
# Map from axis names to axis tags...
normalized_master_locs = [{axes[k].tag:v for k,v in loc.items()} for loc in normalized_master_locs]
#del axes
# From here on, we use fvar axes only
axisTags = [axis.axisTag for axis in fvar.axes]
# Assume single-model for now.
model = models.VariationModel(normalized_master_locs, axisOrder=axisTags)
assert 0 == model.mapping[base_idx]
log.info("Building variations tables")
_add_MVAR(vf, model, master_fonts, axisTags)
_add_HVAR(vf, model, master_fonts, axisTags)
_merge_OTL(vf, model, master_fonts, axisTags)
if 'glyf' in vf:
_add_gvar(vf, model, master_fonts)
_merge_TTHinting(vf, model, master_fonts)
return vf, model, master_ttfs
def main(args=None):
from argparse import ArgumentParser
from fontTools import configLogger
parser = ArgumentParser(prog='varLib')
parser.add_argument('designspace')
options = parser.parse_args(args)
# TODO: allow user to configure logging via command-line options
configLogger(level="INFO")
designspace_filename = options.designspace
finder = lambda s: s.replace('master_ufo', 'master_ttf_interpolatable').replace('.ufo', '.ttf')
outfile = os.path.splitext(designspace_filename)[0] + '-VF.ttf'
vf, model, master_ttfs = build(designspace_filename, finder)
log.info("Saving variation font %s", outfile)
vf.save(outfile)
if __name__ == "__main__":
import sys
if len(sys.argv) > 1:
sys.exit(main())
import doctest
sys.exit(doctest.testmod().failed)
| mit | 5,187,638,124,208,858,000 | 30.080902 | 185 | 0.699125 | false | 2.956725 | false | false | false |
Axam/nsx-web | nailgun/nailgun/db/sqlalchemy/models/cluster.py | 2 | 4812 | # -*- coding: utf-8 -*-
# Copyright 2013 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import Boolean
from sqlalchemy import Column
from sqlalchemy import Enum
from sqlalchemy import ForeignKey
from sqlalchemy import Integer
from sqlalchemy import Text
from sqlalchemy import Unicode
from sqlalchemy.orm import backref
from sqlalchemy.orm import relationship
from nailgun import consts
from nailgun.db import db
from nailgun.db.sqlalchemy.models.base import Base
from nailgun.db.sqlalchemy.models.fields import JSON
from nailgun.db.sqlalchemy.models.node import Node
class ClusterChanges(Base):
__tablename__ = 'cluster_changes'
id = Column(Integer, primary_key=True)
cluster_id = Column(Integer, ForeignKey('clusters.id'))
node_id = Column(Integer, ForeignKey('nodes.id', ondelete='CASCADE'))
name = Column(
Enum(*consts.CLUSTER_CHANGES, name='possible_changes'),
nullable=False
)
class Cluster(Base):
__tablename__ = 'clusters'
id = Column(Integer, primary_key=True)
mode = Column(
Enum(*consts.CLUSTER_MODES, name='cluster_mode'),
nullable=False,
default=consts.CLUSTER_MODES.ha_compact
)
status = Column(
Enum(*consts.CLUSTER_STATUSES, name='cluster_status'),
nullable=False,
default=consts.CLUSTER_STATUSES.new
)
net_provider = Column(
Enum(*consts.CLUSTER_NET_PROVIDERS, name='net_provider'),
nullable=False,
default=consts.CLUSTER_NET_PROVIDERS.nova_network
)
network_config = relationship("NetworkingConfig",
backref=backref("cluster"),
cascade="all,delete",
uselist=False)
grouping = Column(
Enum(*consts.CLUSTER_GROUPING, name='cluster_grouping'),
nullable=False,
default=consts.CLUSTER_GROUPING.roles
)
name = Column(Unicode(50), unique=True, nullable=False)
release_id = Column(Integer, ForeignKey('releases.id'), nullable=False)
pending_release_id = Column(Integer, ForeignKey('releases.id'))
nodes = relationship(
"Node", backref="cluster", cascade="delete", order_by='Node.id')
tasks = relationship("Task", backref="cluster", cascade="delete")
attributes = relationship("Attributes", uselist=False,
backref="cluster", cascade="delete")
changes_list = relationship("ClusterChanges", backref="cluster",
cascade="delete")
# We must keep all notifications even if cluster is removed.
# It is because we want user to be able to see
# the notification history so that is why we don't use
# cascade="delete" in this relationship
# During cluster deletion sqlalchemy engine will set null
# into cluster foreign key column of notification entity
notifications = relationship("Notification", backref="cluster")
network_groups = relationship(
"NetworkGroup",
backref="cluster",
cascade="delete",
order_by="NetworkGroup.id"
)
replaced_deployment_info = Column(JSON, default={})
replaced_provisioning_info = Column(JSON, default={})
is_customized = Column(Boolean, default=False)
fuel_version = Column(Text, nullable=False)
@property
def changes(self):
return [
{"name": i.name, "node_id": i.node_id}
for i in self.changes_list
]
@changes.setter
def changes(self, value):
self.changes_list = value
@property
def is_ha_mode(self):
return self.mode in ('ha_full', 'ha_compact')
@property
def full_name(self):
return '%s (id=%s, mode=%s)' % (self.name, self.id, self.mode)
@property
def is_locked(self):
if self.status in ("new", "stopped") and not \
db().query(Node).filter_by(
cluster_id=self.id,
status="ready"
).count():
return False
return True
class Attributes(Base):
__tablename__ = 'attributes'
id = Column(Integer, primary_key=True)
cluster_id = Column(Integer, ForeignKey('clusters.id'))
editable = Column(JSON)
generated = Column(JSON)
| apache-2.0 | -4,429,185,441,734,378,000 | 34.124088 | 78 | 0.645885 | false | 4.221053 | false | false | false |
waterwoodwind/QA_web | main/save_load_func.py | 1 | 3092 | #coding=utf-8
import pandas as pd
import pickle
import json
from main.models import qa_info
from django.core import serializers
from django.http import HttpResponse
import os
#function
def list_all_data():
file_1 = file('data_all.pkl', 'rb')
updata = pickle.load(file_1)
return updata
#views
def refresh_middle_data(request):
exclude_list = []
query_data = qa_info.objects.all().order_by('-data')
json_data = serializers.serialize("json", query_data, use_natural_foreign_keys=True)
list_data = json.loads(json_data)
dict_name_verbose_name = {}
columns_set = []
colheaders = []
dataSchema = {}
for field in qa_info._meta.fields:
dict_name_verbose_name[field.name] = field.verbose_name
if not field.verbose_name in exclude_list:
print field.verbose_name
colheaders.append(field.verbose_name.encode("utf8"))
dataSchema[field.verbose_name] = ''
columns_item = {
u"title": field.verbose_name,
u"field": field.verbose_name,
# u"sortable": u"true",
}
if field.verbose_name == u"问题描述":
columns_item[u"width"] = u"20%"
columns_item[u"title"] = u"问题描述"
elif field.verbose_name == u"整改措施":
columns_item[u"width"] = u"20%"
columns_item[u"title"] = u"整改措施"
elif field.verbose_name == u"处理意见":
columns_item[u"width"] = u"6%"
columns_item[u"title"] = u"处理意见"
else:
split_list = list(field.verbose_name)
# every two word add
title_str = ""
for i in range(len(split_list)):
title_str = title_str + split_list[i]
if (i + 1) % 2 == 0:
title_str = title_str + u"<br>"
if field.verbose_name == u"相关附件":
columns_item[u'formatter'] = "attachment"
columns_item[u"title"] = title_str
columns_item[u"width"] = u"2%"
columns_set.append(columns_item)
json_columns = json.dumps(columns_set)
upload_data = []
for item in list_data:
single_data = item['fields']
single_data[u'id'] = item['pk']
upload_data.append(single_data)
# print upload_data
chinese_updata = []
for item in upload_data:
dict_updata = {}
for key, value in item.items():
dict_updata[dict_name_verbose_name[key]] = value
# print chinese_updata
chinese_updata.append(dict_updata)
#save list
if os.path.exists('data_all.pkl'):
os.remove('data_all.pkl')
file_1 = file('data_all.pkl', 'wb')
pickle.dump(chinese_updata, file_1, True)
#save pd file
if os.path.exists('data.h5'):
os.remove('data.h5')
df_data = pd.DataFrame(chinese_updata)
df_data.to_hdf('data.h5', 'df')
return HttpResponse(u"前端数据已刷新") | mit | -4,685,831,142,808,562,000 | 31.159574 | 88 | 0.549967 | false | 3.346622 | false | false | false |
Subsets and Splits