repo_name
stringlengths 5
92
| path
stringlengths 4
221
| copies
stringclasses 19
values | size
stringlengths 4
6
| content
stringlengths 766
896k
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 32
997
| alpha_frac
float64 0.25
0.96
| autogenerated
bool 1
class | ratio
float64 1.5
13.6
| config_test
bool 2
classes | has_no_keywords
bool 2
classes | few_assignments
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
walafc0/soclib | soclib/iss/iss_profiler/bin/iss_profiler2profile.py | 1 | 2939 | #!/usr/bin/env python
from dsx.util.objdumper import *
import sys
__id__ = "$Id: iss_profiler2profile.py 917 2009-03-12 10:10:06Z nipo $"
__version__ = "$Revision: 917 $"
class SymLooker:
def __init__(self, arch, obj):
self.__syms = {}
dumper = ObjDumper(arch, obj)
for section in dumper:
for sym in section:
self.__syms[sym.addr] = sym.name
self.__addrs = self.__syms.keys()
self.__addrs.sort()
self.__addr2sym = {}
def is_entry(self, addr):
return addr in self.__syms
def lookup_sym(self, addr):
last_addr = None
for sym_addr in self.__addrs:
if sym_addr > addr:
break
last_addr = sym_addr
if last_addr is None:
print hex(addr), "not found in", self.__addrs
return self.__syms[last_addr]
def find_sym(self, addr):
try:
return self.__addr2sym[addr]
except KeyError:
sym = self.lookup_sym(addr)
self.__addr2sym[addr] = sym
return sym
def per_sym(self, ctor):
ret = {}
for k in self.syms():
ret[k] = ctor(k)
return ret
def syms(self):
return self.__syms.values()
arch = sys.argv[1]
obj = sys.argv[2]
sl = SymLooker(arch, obj)
class Counter:
def __init__(self, sym):
self.sym = sym
self.total = 0
self.frozen = 0
self.running = 0
self.runs = 0
def inc(self, running, entering):
if entering:
self.runs += 1
if running:
self.running += 1
else:
self.frozen += 1
self.total += 1
def cmp_total(self, other):
return cmp(self.total, other.total)
def cmp_running(self, other):
return cmp(self.running, other.running)
def missing(self):
if self.total:
return float(self.frozen)/float(self.total)
else:
return 0
def cmp_missing(self, other):
return cmp(self.missing(), other.missing())
def cmp_runs(self, other):
return cmp(self.runs, other.runs)
def cpr(self):
if self.runs:
return float(self.total)/float(self.runs)
else:
return 0
def cmp_cpr(self, other):
return cmp(self.cpr(), other.cpr())
def __repr__(self):
return "%s runs %04d total %06d, cpr: %06d, running time %06d, frz %06d, miss %f"%(
self.sym.ljust(30), self.runs, self.total, self.cpr(), self.running, self.frozen, self.missing())
if sys.argv[3:]:
for xaddr in sys.argv[3:]:
addr = int(xaddr, 16)
print hex(addr), sl.find_sym(addr)
else:
count = sl.per_sym(Counter)
total = 0
last_func = ''
for line in sys.stdin.readlines():
line = line.strip()
running, asked, xaddr = line.split(' ')
if asked == '+':
total += 1
running = running == 'R'
addr = int(xaddr, 16)
sym = sl.find_sym(addr)
entry = sl.is_entry(addr)
count[sym].inc(running, last_func != sym and entry)
last_func = sym
v = count.values()
v = filter(lambda x:x.runs > 15, v)
v.sort(Counter.cmp_runs)
v.reverse()
print "Most runs"
for i in v:
print i
v.sort(Counter.cmp_running)
v.reverse()
print "Most on CPU"
for i in v:
print i
v.sort(Counter.cmp_missing)
v.reverse()
print "Most missing"
for i in v:
print i
| lgpl-2.1 | -5,641,330,111,067,963,000 | 21.960938 | 100 | 0.638653 | false | 2.614769 | false | false | false |
ayepezv/GAD_ERP | openerp/addons/test_impex/models.py | 2 | 4918 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models
def selection_fn(model):
return list(enumerate(["Corge", "Grault", "Wheee", "Moog"]))
def compute_fn(records):
for record in records:
record.value = 3
def inverse_fn(records):
pass
MODELS = [
('boolean', fields.Boolean()),
('integer', fields.Integer()),
('float', fields.Float()),
('decimal', fields.Float(digits=(16, 3))),
('string.bounded', fields.Char(size=16)),
('string.required', fields.Char(size=None, required=True)),
('string', fields.Char(size=None)),
('date', fields.Date()),
('datetime', fields.Datetime()),
('text', fields.Text()),
('selection', fields.Selection([(1, "Foo"), (2, "Bar"), (3, "Qux"), (4, '')])),
# here use size=-1 to store the values as integers instead of strings
('selection.function', fields.Selection(selection_fn, size=-1)),
# just relate to an integer
('many2one', fields.Many2one('export.integer')),
('one2many', fields.One2many('export.one2many.child', 'parent_id')),
('many2many', fields.Many2many('export.many2many.other')),
('function', fields.Integer(compute=compute_fn, inverse=inverse_fn)),
# related: specialization of fields.function, should work the same way
# TODO: reference
]
for name, field in MODELS:
class NewModel(models.Model):
_name = 'export.%s' % name
const = fields.Integer(default=4)
value = field
@api.multi
def name_get(self):
return [(record.id, "%s:%s" % (self._name, record.value)) for record in self]
@api.model
def name_search(self, name='', args=None, operator='ilike', limit=100):
if isinstance(name, basestring) and name.split(':')[0] == self._name:
records = self.search([('value', operator, int(name.split(':')[1]))])
return records.name_get()
else:
return []
class One2ManyChild(models.Model):
_name = 'export.one2many.child'
# FIXME: orm.py:1161, fix to name_get on m2o field
_rec_name = 'value'
parent_id = fields.Many2one('export.one2many')
str = fields.Char()
value = fields.Integer()
@api.multi
def name_get(self):
return [(record.id, "%s:%s" % (self._name, record.value)) for record in self]
@api.model
def name_search(self, name='', args=None, operator='ilike', limit=100):
if isinstance(name, basestring) and name.split(':')[0] == self._name:
records = self.search([('value', operator, int(name.split(':')[1]))])
return records.name_get()
else:
return []
class One2ManyMultiple(models.Model):
_name = 'export.one2many.multiple'
parent_id = fields.Many2one('export.one2many.recursive')
const = fields.Integer(default=36)
child1 = fields.One2many('export.one2many.child.1', 'parent_id')
child2 = fields.One2many('export.one2many.child.2', 'parent_id')
class One2ManyChildMultiple(models.Model):
_name = 'export.one2many.multiple.child'
# FIXME: orm.py:1161, fix to name_get on m2o field
_rec_name = 'value'
parent_id = fields.Many2one('export.one2many.multiple')
str = fields.Char()
value = fields.Integer()
@api.multi
def name_get(self):
return [(record.id, "%s:%s" % (self._name, record.value)) for record in self]
class One2ManyChild1(models.Model):
_name = 'export.one2many.child.1'
_inherit = 'export.one2many.multiple.child'
class One2ManyChild2(models.Model):
_name = 'export.one2many.child.2'
_inherit = 'export.one2many.multiple.child'
class Many2ManyChild(models.Model):
_name = 'export.many2many.other'
# FIXME: orm.py:1161, fix to name_get on m2o field
_rec_name = 'value'
str = fields.Char()
value = fields.Integer()
@api.multi
def name_get(self):
return [(record.id, "%s:%s" % (self._name, record.value)) for record in self]
@api.model
def name_search(self, name='', args=None, operator='ilike', limit=100):
if isinstance(name, basestring) and name.split(':')[0] == self._name:
records = self.search([('value', operator, int(name.split(':')[1]))])
return records.name_get()
else:
return []
class SelectionWithDefault(models.Model):
_name = 'export.selection.withdefault'
const = fields.Integer(default=4)
value = fields.Selection([(1, "Foo"), (2, "Bar")], default=2)
class RecO2M(models.Model):
_name = 'export.one2many.recursive'
value = fields.Integer()
child = fields.One2many('export.one2many.multiple', 'parent_id')
class OnlyOne(models.Model):
_name = 'export.unique'
value = fields.Integer()
_sql_constraints = [
('value_unique', 'unique (value)', "The value must be unique"),
]
| gpl-3.0 | 4,212,233,521,099,419,000 | 30.729032 | 89 | 0.618544 | false | 3.417651 | false | false | false |
steven-martins/Marking | back/marks/migrations/0001_initial.py | 1 | 3773 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
import datetime
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Mark',
fields=[
('id', models.AutoField(serialize=False, verbose_name='ID', primary_key=True, auto_created=True)),
('result', models.IntegerField()),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Picture',
fields=[
('id', models.AutoField(serialize=False, verbose_name='ID', primary_key=True, auto_created=True)),
('file', models.ImageField(max_length=150, upload_to='picture/%Y/%m/%d')),
('title', models.CharField(max_length=50)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Project',
fields=[
('id', models.AutoField(serialize=False, verbose_name='ID', primary_key=True, auto_created=True)),
('name', models.CharField(max_length=100)),
('description', models.TextField()),
('marks', models.ManyToManyField(to=settings.AUTH_USER_MODEL, through='marks.Mark', related_name='project_marks_student')),
('members', models.ManyToManyField(to=settings.AUTH_USER_MODEL)),
('pictures', models.ManyToManyField(to='marks.Picture', blank=True)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Question',
fields=[
('id', models.AutoField(serialize=False, verbose_name='ID', primary_key=True, auto_created=True)),
('title', models.CharField(max_length=200)),
('detail', models.CharField(max_length=250, blank=True)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Student',
fields=[
('login', models.CharField(serialize=False, max_length=10, primary_key=True)),
('last_connection', models.DateTimeField(blank=True, default=datetime.datetime.now)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Timeslot',
fields=[
('id', models.AutoField(serialize=False, verbose_name='ID', primary_key=True, auto_created=True)),
('title', models.CharField(max_length=100)),
],
options={
},
bases=(models.Model,),
),
migrations.AddField(
model_name='project',
name='timeslot',
field=models.ForeignKey(to='marks.Timeslot', blank=True),
preserve_default=True,
),
migrations.AddField(
model_name='mark',
name='project',
field=models.ForeignKey(to='marks.Project'),
preserve_default=True,
),
migrations.AddField(
model_name='mark',
name='question',
field=models.ForeignKey(to='marks.Question'),
preserve_default=True,
),
migrations.AddField(
model_name='mark',
name='student',
field=models.ForeignKey(to=settings.AUTH_USER_MODEL),
preserve_default=True,
),
]
| mit | 3,561,885,093,581,885,000 | 34.59434 | 139 | 0.51789 | false | 4.72807 | false | false | false |
googlefonts/noto-emoji | materialize_emoji_images.py | 1 | 4076 | #!/usr/bin/env python3
#
# Copyright 2016 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Create a copy of the emoji images that instantiates aliases, etc. as
symlinks."""
from __future__ import print_function
import argparse
import glob
import os
from os import path
import re
import shutil
from nototools import tool_utils
# copied from third_party/color_emoji/add_glyphs.py
EXTRA_SEQUENCES = {
'u1F46A': '1F468_200D_1F469_200D_1F466', # MWB
'u1F491': '1F469_200D_2764_FE0F_200D_1F468', # WHM
'u1F48F': '1F469_200D_2764_FE0F_200D_1F48B_200D_1F468', # WHKM
}
# Flag aliases - from: to
FLAG_ALIASES = {
'BV': 'NO',
'CP': 'FR',
'HM': 'AU',
'SJ': 'NO',
'UM': 'US',
}
OMITTED_FLAGS = set(
'BL BQ DG EA EH FK GF GP GS MF MQ NC PM RE TF WF XK YT'.split())
def _flag_str(ris_pair):
return '_'.join('%04x' % (ord(cp) - ord('A') + 0x1f1e6)
for cp in ris_pair)
def _copy_files(src, dst):
"""Copies files named 'emoji_u*.png' from dst to src, and return a set of
the names with 'emoji_u' and the extension stripped."""
code_strings = set()
tool_utils.check_dir_exists(src)
dst = tool_utils.ensure_dir_exists(dst, clean=True)
for f in glob.glob(path.join(src, 'emoji_u*.png')):
shutil.copy(f, dst)
code_strings.add(path.splitext(path.basename(f))[0][7:])
return code_strings
def _alias_people(code_strings, dst):
"""Create aliases for people in dst, based on code_strings."""
for src, ali in sorted(EXTRA_SEQUENCES.items()):
if src[1:].lower() in code_strings:
src_name = 'emoji_%s.png' % src.lower()
ali_name = 'emoji_u%s.png' % ali.lower()
print('creating symlink %s -> %s' % (ali_name, src_name))
os.symlink(path.join(dst, src_name), path.join(dst, ali_name))
else:
print('people image %s not found' % src, file=os.stderr)
def _alias_flags(code_strings, dst):
for ali, src in sorted(FLAG_ALIASES.items()):
src_str = _flag_str(src)
if src_str in code_strings:
src_name = 'emoji_u%s.png' % src_str
ali_name = 'emoji_u%s.png' % _flag_str(ali)
print('creating symlink %s (%s) -> %s (%s)' % (ali_name, ali, src_name, src))
os.symlink(path.join(dst, src_name), path.join(dst, ali_name))
else:
print('flag image %s (%s) not found' % (src_name, src), file=os.stderr)
def _alias_omitted_flags(code_strings, dst):
UNKNOWN_FLAG = 'fe82b'
if UNKNOWN_FLAG not in code_strings:
print('unknown flag missing', file=os.stderr)
return
dst_name = 'emoji_u%s.png' % UNKNOWN_FLAG
dst_path = path.join(dst, dst_name)
for ali in sorted(OMITTED_FLAGS):
ali_str = _flag_str(ali)
if ali_str in code_strings:
print('omitted flag %s has image %s' % (ali, ali_str), file=os.stderr)
continue
ali_name = 'emoji_u%s.png' % ali_str
print('creating symlink %s (%s) -> unknown_flag (%s)' % (
ali_str, ali, dst_name))
os.symlink(dst_path, path.join(dst, ali_name))
def materialize_images(src, dst):
code_strings = _copy_files(src, dst)
_alias_people(code_strings, dst)
_alias_flags(code_strings, dst)
_alias_omitted_flags(code_strings, dst)
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
'-s', '--srcdir', help='path to input sources', metavar='dir',
default = 'build/compressed_pngs')
parser.add_argument(
'-d', '--dstdir', help='destination for output images', metavar='dir')
args = parser.parse_args()
materialize_images(args.srcdir, args.dstdir)
if __name__ == '__main__':
main()
| apache-2.0 | 7,824,723,636,050,095,000 | 31.349206 | 83 | 0.652601 | false | 2.973012 | false | false | false |
esacosta/u-mooc | common/schema_fields.py | 1 | 5958 | # Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Mapping from schema to backend properties."""
__author__ = 'Abhinav Khandelwal ([email protected])'
import collections
import json
from models.property import Property
from models.property import Registry
class SchemaField(Property):
"""SchemaField defines a solo field in REST API."""
def get_json_schema(self):
"""Get the JSCON schema for this field."""
prop = {}
prop['type'] = self._property_type
if self._optional:
prop['optional'] = self._optional
if self._description:
prop['description'] = self._description
return prop
def get_schema_dict_entry(self):
"""Get Schema annotation dictionary for this field."""
if self._extra_schema_dict_values:
schema = self._extra_schema_dict_values
else:
schema = {}
schema['label'] = self._label
schema['_type'] = self._property_type
if 'date' is self._property_type:
schema['dateFormat'] = 'Y/m/d'
schema['valueFormat'] = 'Y/m/d'
elif 'select' is self._property_type:
choices = []
for value, label in self._select_data:
choices.append({'value': value, 'label': label})
schema['choices'] = choices
if self._description:
schema['description'] = self._description
return schema
class FieldRegistry(Registry):
"""FieldRegistry is a collection of SchemaField's for an API."""
def add_sub_registry(
self, name, title=None, description=None, registry=None):
"""Add a sub registry to for this Registry."""
if not registry:
registry = FieldRegistry(title, description)
self._sub_registories[name] = registry
return registry
def get_json_schema_dict(self):
schema_dict = dict(self._registry)
schema_dict['properties'] = collections.OrderedDict()
for schema_field in self._properties:
schema_dict['properties'][schema_field.name] = (
schema_field.get_json_schema())
for key in self._sub_registories.keys():
schema_dict['properties'][key] = (
self._sub_registories[key].get_json_schema_dict())
return schema_dict
def get_json_schema(self):
"""Get the json schema for this API."""
return json.dumps(self.get_json_schema_dict())
def _get_schema_dict(self, prefix_key):
"""Get schema dict for this API."""
title_key = list(prefix_key)
title_key.append('title')
schema_dict = [(title_key, self._title)]
base_key = list(prefix_key)
base_key.append('properties')
for schema_field in self._properties:
field_key = list(base_key)
field_key.append(schema_field.name)
field_key.append('_inputex')
filed_tuple = field_key, schema_field.get_schema_dict_entry()
schema_dict.append(filed_tuple)
for key in self._sub_registories.keys():
sub_registry_key_prefix = list(base_key)
sub_registry_key_prefix.append(key)
sub_registry = self._sub_registories[key]
# pylint: disable-msg=protected-access
for entry in sub_registry._get_schema_dict(sub_registry_key_prefix):
schema_dict.append(entry)
# pylint: enable-msg=protected-access
return schema_dict
def get_schema_dict(self):
"""Get schema dict for this API."""
return self._get_schema_dict(list())
def _add_entry(self, key_part_list, value, entity):
if len(key_part_list) == 1:
entity[key_part_list[0]] = value
return
key = key_part_list.pop()
if not entity.has_key(key):
entity[key] = {}
else:
assert type(entity[key]) == type(dict())
self._add_entry(key_part_list, value, entity[key])
def convert_json_to_entity(self, json_entry, entity):
assert type(json_entry) == type(dict())
for key in json_entry.keys():
if type(json_entry[key]) == type(dict()):
self.convert_json_to_entity(json_entry[key], entity)
else:
key_parts = key.split(':')
key_parts.reverse()
self._add_entry(key_parts, json_entry[key], entity)
def _get_field_value(self, key_part_list, entity):
if len(key_part_list) == 1:
if entity.has_key(key_part_list[0]):
return entity[key_part_list[0]]
return None
key = key_part_list.pop()
if entity.has_key(key):
return self._get_field_value(key_part_list, entity[key])
return None
def convert_entity_to_json_entity(self, entity, json_entry):
for schema_field in self._properties:
field_name = schema_field.name
field_name_parts = field_name.split(':')
field_name_parts.reverse()
value = self._get_field_value(field_name_parts, entity)
if type(value) != type(None):
json_entry[field_name] = value
for key in self._sub_registories.keys():
json_entry[key] = {}
self._sub_registories[key].convert_entity_to_json_entity(
entity, json_entry[key])
| apache-2.0 | -4,220,608,419,550,851,600 | 36.2375 | 80 | 0.596173 | false | 3.977303 | false | false | false |
LCBRU/reporter | reporter/uhl_reports/fast/data_quality/screening_clinic_redcap_dq.py | 1 | 3307 | #!/usr/bin/env python3
from reporter.core import Schedule
from reporter.connections import RedcapInstance
from reporter.emailing import (
RECIPIENT_FAST_MANAGER,
RECIPIENT_FAST_ADMIN,
)
from reporter.application_abstract_reports.redcap.data_quality import (
RedcapMissingDataWhen,
RedcapMissingAllWhen,
RedcapInvalidNhsNumber,
RedcapImpliesCheck,
RedcapInvalidEmailAddress,
RedcapInvalidDate,
RedcapInvalidHeightInCm,
RedcapInvalidHeightInFeetAndInches,
RedcapInvalidWeightInKg,
RedcapInvalidWeightInStonesAndPounds,
RedcapInvalidPostCode,
)
REDCAP_SCREENING_PROJECT_ID = 48
REDCAP_INSTANCE = RedcapInstance.internal
class FastRedcapInvalidEmailAddress(
RedcapInvalidEmailAddress):
def __init__(self):
super().__init__(
redcap_instance=REDCAP_INSTANCE,
project_id=REDCAP_SCREENING_PROJECT_ID,
fields=['email_add'],
recipients=[RECIPIENT_FAST_ADMIN],
schedule=Schedule.never,
)
class FastScreeningRedcapInvalidDate(
RedcapInvalidDate):
def __init__(self):
super().__init__(
redcap_instance=REDCAP_INSTANCE,
project_id=REDCAP_SCREENING_PROJECT_ID,
recipients=[RECIPIENT_FAST_ADMIN],
schedule=Schedule.never,
)
class FastScreeningRedcapInvalidNhsNumber(
RedcapInvalidNhsNumber):
def __init__(self):
super().__init__(
redcap_instance=REDCAP_INSTANCE,
project_id=REDCAP_SCREENING_PROJECT_ID,
fields=['nhs_no'],
recipients=[RECIPIENT_FAST_ADMIN],
schedule=Schedule.never,
)
class FastRedcapInvalidPostCode(
RedcapInvalidPostCode):
def __init__(self):
super().__init__(
redcap_instance=REDCAP_INSTANCE,
project_id=REDCAP_SCREENING_PROJECT_ID,
fields=['postcode'],
recipients=[RECIPIENT_FAST_ADMIN],
schedule=Schedule.never,
)
class FastRedcapMissingDataWhenRecruited(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=REDCAP_INSTANCE,
project_id=REDCAP_SCREENING_PROJECT_ID,
fields=[
'first_name',
'last_name',
'postcode',
'gp_practice',
'clinic_date',
'invitation_group',
'patient_attend',
'patient_agree_scan',
],
indicator_field='patient_recruited',
indicator_value='1',
recipients=[RECIPIENT_FAST_MANAGER, RECIPIENT_FAST_ADMIN],
schedule=Schedule.never,
)
class FastRedcapMissingAddressWhenRecruited(RedcapMissingAllWhen):
def __init__(self):
super().__init__(
redcap_instance=REDCAP_INSTANCE,
project_id=REDCAP_SCREENING_PROJECT_ID,
fields=['add_1', 'add_2', 'add_3', 'add_4'],
indicator_field='patient_recruited',
indicator_value='1',
recipients=[RECIPIENT_FAST_MANAGER, RECIPIENT_FAST_ADMIN],
schedule=Schedule.never,
)
| mit | 8,472,242,262,794,943,000 | 29.198113 | 71 | 0.589054 | false | 4.003632 | false | false | false |
dholbach/snapcraft | snapcraft/_options.py | 1 | 4962 | # -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2016 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging
import multiprocessing
import os
import platform
logger = logging.getLogger(__name__)
_ARCH_TRANSLATIONS = {
'armv7l': {
'kernel': 'arm',
'deb': 'armhf',
'cross-compiler-prefix': 'arm-linux-gnueabihf-',
'cross-build-packages': ['gcc-arm-linux-gnueabihf'],
'triplet': 'arm-linux-gnueabihf',
},
'aarch64': {
'kernel': 'arm64',
'deb': 'arm64',
'cross-compiler-prefix': 'aarch64-linux-gnu-',
'cross-build-packages': ['gcc-aarch64-linux-gnu'],
'triplet': 'aarch64-linux-gnu',
},
'i686': {
'kernel': 'x86',
'deb': 'i386',
'triplet': 'i386-linux-gnu',
},
'ppc64le': {
'kernel': 'powerpc',
'deb': 'ppc64el',
'cross-compiler-prefix': 'powerpc64le-linux-gnu-',
'cross-build-packages': ['gcc-powerpc64le-linux-gnu'],
'triplet': 'powerpc64le-linux-gnu',
},
'x86_64': {
'kernel': 'x86',
'deb': 'amd64',
'triplet': 'x86_64-linux-gnu',
},
's390x': {
'kernel': 's390x',
'deb': 's390x',
'cross-compiler-prefix': 's390x-linux-gnu-',
'cross-build-packages': ['gcc-s390x-linux-gnu'],
'triplet': 's390x-linux-gnu',
}
}
class ProjectOptions:
@property
def use_geoip(self):
return self.__use_geoip
@property
def parallel_builds(self):
return self.__parallel_builds
@property
def parallel_build_count(self):
build_count = 1
if self.__parallel_builds:
try:
build_count = multiprocessing.cpu_count()
except NotImplementedError:
logger.warning(
'Unable to determine CPU count; disabling parallel builds')
return build_count
@property
def is_cross_compiling(self):
return self.__target_machine != self.__host_machine
@property
def cross_compiler_prefix(self):
try:
return self.__machine_info['cross-compiler-prefix']
except KeyError:
raise EnvironmentError(
'Cross compilation not support for target arch {!}'.format(
self.__machine_target))
@property
def additional_build_packages(self):
packages = []
if self.is_cross_compiling:
packages.extend(self.__machine_info.get(
'cross-build-packages', []))
return packages
@property
def arch_triplet(self):
return self.__machine_info['triplet']
@property
def deb_arch(self):
return self.__machine_info['deb']
@property
def kernel_arch(self):
return self.__machine_info['kernel']
@property
def local_plugins_dir(self):
return os.path.join(self.parts_dir, 'plugins')
@property
def parts_dir(self):
return os.path.join(self.__project_dir, 'parts')
@property
def stage_dir(self):
return os.path.join(self.__project_dir, 'stage')
@property
def snap_dir(self):
return os.path.join(self.__project_dir, 'prime')
@property
def debug(self):
return self.__debug
def __init__(self, use_geoip=False, parallel_builds=True,
target_deb_arch=None, debug=False):
# TODO: allow setting a different project dir and check for
# snapcraft.yaml
self.__project_dir = os.getcwd()
self.__use_geoip = use_geoip
self.__parallel_builds = parallel_builds
self._set_machine(target_deb_arch)
self.__debug = debug
def _set_machine(self, target_deb_arch):
self.__host_machine = platform.machine()
if not target_deb_arch:
self.__target_machine = self.__host_machine
else:
self.__target_machine = _find_machine(target_deb_arch)
logger.info('Setting target machine to {!r}'.format(
target_deb_arch))
self.__machine_info = _ARCH_TRANSLATIONS[self.__target_machine]
def _find_machine(deb_arch):
for machine in _ARCH_TRANSLATIONS:
if _ARCH_TRANSLATIONS[machine].get('deb', '') == deb_arch:
return machine
raise EnvironmentError(
'Cannot set machine from deb_arch {!r}'.format(deb_arch))
| gpl-3.0 | 740,297,699,160,747,900 | 28.188235 | 79 | 0.592301 | false | 3.802299 | false | false | false |
ekaakurniawan/Bioinformatics-Tools | DnC_LocalAlignment/DnC_LocalAlignment.py | 1 | 6963 | # Copyright (C) 2012 by Eka A. Kurniawan
# eka.a.kurniawan(ta)gmail(tod)com
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the
# Free Software Foundation, Inc.,
# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# Local Alignment in Linear Space - Divide and Conquer
# References:
# - Neil C. Jones, Pavel A. Pevzner. An Introduction to Bioinformatics Algorithms. Cambridge: The MIT Press, 2004.
import copy
# Seq1 = "CACCC"
# Seq2 = "CATC"
# Seq1 = "CAC"
# Seq2 = "CATC"
# Seq1 = "CTTGAT"
# Seq2 = "GCAT"
# Seq1 = "TCAAATCAACCAAGATGGAAGCAAAACTGTTTGTAC"
# Seq2 = "ATGAAGGCAATACTATTAGTCTTGCTATATACATTC"
# Seq1 = "MEAKLFVLFCTFTVLKADTICVGYHANNSTDTVDTVLEKNVTVTHSVNLLEDSHNGKLCSLNGIAPLQLGKCNVAGWLLGNPECDLLLTANSWSYIIETSNSENGTCYPGEFIDYEELREQLSSVSSFEKFEIFPKANSWPNHETTKGVTAACSYSGASSFYRNLLWITKKGTSYPKLSKSYTNNKGKEVLVLWGVHHPPTTSEQQSLYQNTDAYVSVGSSKYNRRFTPEIAARPKVRGQAGRMNYYWTLLDQGDTITFEATGNLIAPWYAFALNKGSDSGIITSDAPVHNCDTRCQTPHGALNSSLPFQNVHPITIGECPKYVKSTKLRMATGLRNVPSIQSRGLFGAIAGFIEGGWTGMIDGWYGYHHQNEQGSGYAADQKSTQNAIDGITNKVNSVIEKMNTQFTAVGKEFNNLERRIENLNKKVDDGFLDVWTYNAELLVLLENERTLDFHDSNVRNLYEKVRSQLRNNAKELGNGCFEFYHKCDDECMESVKNGTYDYPKYSEESKLNREEIDGVKLESMGVYQILAIYSTVASSLVLLVSLGAISFWMCSNGSLQCRICI"
# Seq2 = "MKAILVVLLYTFATANADTLCIGYHANNSTDTVDTVLEKNVTVTHSVNLLEDKHNGKLCKLRGVAPLHLGKCNIAGWILGNPECESLSTASSWSYIVETSSSDNGTCYPGDFIDYEELREQLSSVSSFERFEIFPKTSSWPNHDSNKGVTAACPHAGAKSFYKNLIWLVKKGNSYPKLSKSYINDKGKEVLVLWGIHHPSTSADQQSLYQNADAYVFVGTSRYSKKFKPEIAIRPKVRDQEGRMNYYWTLVEPGDKITFEATGNLVVPRYAFAMERNAGSGIIISDTPVHDCNTTCQTPKGAINTSLPFQNIHPITIGKCPKYVKSTKLRLATGLRNVPSIQSRGLFGAIAGFIEGGWTGMVDGWYGYHHQNEQGSGYAADLKSTQNAIDEITNKVNSVIEKMNTQFTAVGKEFNHLEKRIENLNKKVDDGFLDIWTYNAELLVLLENERTLDYHDSNVKNLYEKVRSQLKNNAKEIGNGCFEFYHKCDNTCMESVKNGTYDYPKYSEEAKLNREEIDGVKLESTRIYQILAIYSTVASSLVLVVSLGAISFWMCSNGSLQCRICI"
# Seq1 = "TTAAG"
# Seq2 = "AAGT"
Seq1 = "TCAAATCAAAAGCA"
Seq2 = "ATGAAGGCAATACCCTA"
mu = 1
sigma = 2
# ------------------------------------ Local Alignment - Dynamic Programming ---
def getScoreLocalAlignment(i, j):
if V[i] == W[j]:
m = S[i-1][j-1] + 1
else:
m = S[i-1][j-1] - mu
return max([0,
S[i-1][j] - sigma,
S[i][j-1] - sigma,
m])
def getMaxValue(M):
maxVal = float("-inf")
maxIndx = [None, None]
for i, r in enumerate(M):
curMax = max(r)
if maxVal < curMax:
maxVal = curMax
maxIndx = [i, r.index(maxVal)]
return maxVal, maxIndx
V = "0" + Seq1
W = "0" + Seq2
lenV = len(V)
lenW = len(W)
S = [[0 for j in xrange(lenW)] for i in xrange(lenV)]
for i in xrange(1, lenV):
for j in range(1, lenW):
S[i][j] = getScoreLocalAlignment(i, j)
val, endPoint = getMaxValue(S)
endPoint = [endPoint[0] - 1, endPoint[1] - 1]
V = "0" + Seq1[::-1]
W = "0" + Seq2[::-1]
for i in xrange(1, lenV):
for j in range(1, lenW):
S[i][j] = getScoreLocalAlignment(i, j)
val, startPoint = getMaxValue(S)
startPoint = [lenV - startPoint[0] - 1, lenW - startPoint[1] - 1]
# -------------------- Global Alignment in Linear Space - Divide and Conquer ---
oriSeq1 = Seq1
oriSeq2 = Seq2
Seq1 = Seq1[startPoint[0]:endPoint[0]+1]
Seq2 = Seq2[startPoint[1]:endPoint[1]+1]
def getScoreGlobalAlignment(i, j):
if V[i] == W[j]:
m = S[i-1][j-1] + 1
else:
m = S[i-1][j-1] - mu
scores = [S[i-1][j] - sigma,
S[i][j-1] - sigma,
m]
return max(scores)
def calculatePrefix(source, sink, i):
global V, W, S
V = "0" + Seq1[source[0]:i + 1]
W = "0" + Seq2[source[1]:sink[1]]
lenV = len(V)
lenW = len(W)
S = [[0 for j in xrange(lenW)] for i in xrange(lenV)]
for a in range(lenV): S[a][0] = a * -sigma
for b in range(lenW): S[0][b] = b * -sigma
for a in xrange(1, lenV):
for b in range(1, lenW):
S[a][b] = getScoreGlobalAlignment(a, b)
return S[lenV - 1][1:lenW + 1]
def calculateSuffix(source, sink, i):
global V, W, S
V = "0" + Seq1[i:sink[0]][::-1]
W = "0" + Seq2[source[1]:sink[1]][::-1]
lenV = len(V)
lenW = len(W)
S = [[0 for j in xrange(lenW)] for i in xrange(lenV)]
for a in range(lenV): S[a][0] = a * -sigma
for b in range(lenW): S[0][b] = b * -sigma
for a in xrange(1, lenV):
for b in range(1, lenW):
S[a][b] = getScoreGlobalAlignment(a, b)
return S[lenV - 1][1:lenW + 1][::-1]
def getPath(source, sink):
end = False
if (sink[0] - source[0]) <= 2:
if D[source[0]] == None:
mid_i = source[0]
elif D[source[0] + 1] == None:
mid_i = source[0] + 1
else:
return
end = True
else:
mid_i = source[0] + ((sink[0] - source[0]) / 2)
prefix = calculatePrefix(source, sink, mid_i)
suffix = calculateSuffix(source, sink, mid_i)
sumScore = [prefix[b] + suffix[b] for b in xrange(sink[1] - source[1])]
maxScore = max(sumScore)
mid_k = source[1] + sumScore.index(maxScore)
D[mid_i] = maxScore
K[mid_i] = mid_k
if end:
return
getPath(source, [mid_i + 1, mid_k + 1])
getPath([mid_i, mid_k], sink)
def generateSequence():
indx = 0
k_indx = 0
for i in xrange(0, n):
if i in K[k_indx:]:
total = sum([1 for j in K[k_indx:] if j == i])
if total > 1:
R[0] += [indx + j + 1 for j in xrange(total)]
startIndx = k_indx + K[k_indx:].index(i)
maxVal = max(D[startIndx:startIndx+total])
R[1] += [indx + D[startIndx:startIndx+total].index(maxVal) + 1]
indx += total
k_indx += total
else:
R[0] += [indx + 1]
R[1] += [indx + 1]
indx += 1
k_indx += 1
else:
R[1] += [indx + 1]
indx += 1
def displaySequence():
V = "0" + Seq1
W = "0" + Seq2
Vseq = ""
Wseq = ""
for indx in xrange(max(R[0] + R[1])):
indx += 1
if indx in R[0]:
Vseq += V[R[0].index(indx)]
else :
Vseq += "-"
if indx in R[1]:
Wseq += W[R[1].index(indx)]
else :
Wseq += "-"
print Vseq
print Wseq
print ""
m = len(Seq1)
n = len(Seq2)
S = []
V = ""
W = ""
D = [None for i in xrange(m)]
K = copy.deepcopy(D)
R = [[0], [0]]
getPath([0,0], [m,n])
generateSequence()
print R #bar
displaySequence()
| gpl-2.0 | -8,100,298,384,511,656,000 | 28.629787 | 577 | 0.60922 | false | 2.556167 | false | false | false |
charman2/rsas | examples/unsteady.py | 1 | 5254 | # -*- coding: utf-8 -*-
"""Storage selection (SAS) functions: example with multiple fluxes out at steady state
Runs the rSAS model for a synthetic dataset with one flux in and
multiple fluxes out and steady state flow
Theory is presented in:
Harman, C. J. (2014), Time-variable transit time distributions and transport:
Theory and application to storage-dependent transport of chloride in a watershed,
Water Resour. Res., 51, doi:10.1002/2014WR015707.
"""
from __future__ import division
import rsas
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
# Initializes the random number generator so we always get the same result
np.random.seed(0)
# =====================================
# Load the input data
# =====================================
data = pd.read_csv('Q1.csv', index_col=0, parse_dates=[1])
# length of the dataset
N = len(data)
# The individual timeseries can be pulled out of the dataframe
S = data['S'].values
J = data['J'].values
Q = data['Q1'].values
C_J = data['C_J'].values-2
C_Q1 = data['C_Q1'].values
ST_min = data['ST_min'].values
ST_max = data['ST_max'].values
# =========================
# Parameters needed by rsas
# =========================
# The concentration of water older than the start of observations
C_old = ((J*C_J)[J>0]).sum()/((J)[J>0]).sum()
# =========================
# Create the rsas functions
# =========================
S_dead = 10.
#lam = 0.
# Uniform
# Parameters for the rSAS function
Q_rSAS_fun_type = 'uniform'
ST_min = np.zeros(N)
ST_max = S + S_dead
Q_rSAS_fun_parameters = np.c_[ST_min, ST_max]
rSAS_fun_Q1 = rsas.create_function(Q_rSAS_fun_type, Q_rSAS_fun_parameters)
rSAS_fun = [rSAS_fun_Q1]
# Kumaraswami
## Parameters for the rSAS function
#Q_rSAS_fun_type = 'kumaraswami'
#ST_min = np.ones(N) * 0.
#ST_max = S + S_dead
#a = np.maximum(0.01, 2. + lam * (S - S.mean())/S.std())
#b = np.ones(N) * 5.
#Q_rSAS_fun_parameters = np.c_[a, b, ST_min, ST_max]
#rSAS_fun_Q1 = rsas.create_function(Q_rSAS_fun_type, Q_rSAS_fun_parameters)
#rSAS_fun = [rSAS_fun_Q1]
# =================
# Initial condition
# =================
# Unknown initial age distribution, so just set this to zeros
ST_init = np.zeros(N + 1)
# =============
# Run the model
# =============
# Run it
outputs = rsas.solve(J, Q, rSAS_fun, ST_init=ST_init,
mode='RK4', dt = 1., n_substeps=3, C_J=C_J, C_old=[C_old], verbose=False, debug=False)
# Let's pull these out to make the outputs from rsas crystal clear
# State variables: age-ranked storage of water and solutes
# ROWS of ST, MS are T - ages
# COLUMNS of ST, MS are t - times
# LAYERS of MS are s - solutes
ST = outputs['ST']
MS = outputs['MS'][:,:,0]
# Timestep-averaged backwards TTD
# ROWS of PQ are T - ages
# COLUMNS of PQ are t - times
# LAYERS of PQ are q - fluxes
PQ1m = outputs['PQ'][:,:,0]
# Timestep-averaged outflow concentration
# ROWS of C_Q are t - times
# COLUMNS of PQ are q - fluxes
C_Q1m1 = outputs['C_Q'][:,0,0]
# Timestep averaged solute load out
# ROWS of MQ are T - ages
# COLUMNS of MQ are t - times
# LAYERS of MQ are q - fluxes
# Last dimension of MS are s - solutes
MQ1m = outputs['MQ'][:,:,0,0]
#%%
# ==================================
# Plot the rSAS function
# ==================================
STx = np.linspace(0,S.max()+S_dead,100)
Omega = np.r_[[rSAS_fun_Q1.cdf_i(STx,i) for i in range(N)]].T
import matplotlib.cm as cm
fig = plt.figure(0)
plt.clf()
for i in range(N):
plt.plot(STx, Omega[:,i], lw=1, color=cm.jet((S[i]-S.min())/S.ptp()))
plt.ylim((0,1))
plt.ylabel('$\Omega_Q(T)$')
plt.xlabel('age-ranked storage $S_T$')
plt.title('Cumulative rSAS function')
#%%
# ==================================
# Plot the transit time distribution
# ==================================
fig = plt.figure(1)
plt.clf()
plt.plot(PQ1m, lw=1)
plt.ylim((0,1))
plt.ylabel('$P_Q(T)$')
plt.xlabel('age $T$')
plt.title('Cumulative transit time distribution')
#%%
# =====================================================================
# Outflow concentration estimated using several different TTD
# =====================================================================
# Lets get the instantaneous value of the TTD at the end of each timestep
PQ1i = np.zeros((N+1, N+1))
PQ1i[:,0] = rSAS_fun_Q1.cdf_i(ST[:,0],0)
PQ1i[:,1:] = np.r_[[rSAS_fun_Q1.cdf_i(ST[:,i+1],i) for i in range(N)]].T
# Use the transit time distribution and input timeseries to estimate
# the output timeseries for the instantaneous and timestep-averaged cases
C_Q1i, C_Q1i_raw, Q1i_observed_fraction = rsas.transport(PQ1i, C_J, C_old)
C_Q1m2, C_Q1m2_raw, Q1m2_observed_fraction = rsas.transport(PQ1m, C_J, C_old)
# Plot the results
fig = plt.figure(2)
plt.clf()
plt.step(data['datetime'], C_Q1m1, 'g', ls='--', label='mean rsas internal', lw=2, where='post')
plt.step(data['datetime'], C_Q1m2, 'b', ls=':', label='mean rsas.transport', lw=2, where='post')
plt.step(data['datetime'], C_Q1m2_raw, '0.5', ls=':', label='mean rsas.transport (obs part)', lw=2, where='post')
plt.plot(data['datetime'], C_Q1i, 'b:o', label='inst. rsas.transport', lw=1)
#plt.plot(data['datetime'], data['C_Q1'], 'r.', label='observed', lw=2)
plt.ylim((-2, 0))
plt.legend(loc=0)
plt.ylabel('Concentration [-]')
plt.xlabel('time')
plt.title('Outflow concentration')
plt.show()
| mit | 1,715,018,500,754,261,800 | 35.234483 | 113 | 0.60906 | false | 2.793195 | false | false | false |
Galarzaa90/NabBot | cogs/tracking.py | 1 | 83628 | # Copyright 2019 Allan Galarza
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import asyncio
import datetime as dt
import logging
import pickle
import re
import time
from collections import defaultdict
from typing import List, NamedTuple, Union, Optional, Dict
import asyncpg
import discord
import tibiapy
from discord.ext import commands
from tibiapy import Death, Guild, OnlineCharacter, OtherCharacter, World
from nabbot import NabBot
from .utils import CogUtils, EMBED_LIMIT, FIELD_VALUE_LIMIT, checks, config, get_user_avatar, is_numeric, join_list, \
online_characters, safe_delete_message, split_params
from .utils.context import NabCtx
from .utils.database import DbChar, DbDeath, DbLevelUp, get_affected_count, get_server_property, PoolConn
from .utils.errors import CannotPaginate, NetworkError
from .utils.messages import death_messages_monster, death_messages_player, format_message, level_messages, \
split_message, weighed_choice, DeathMessageCondition, LevelCondition, SIMPLE_LEVEL, SIMPLE_DEATH, SIMPLE_PVP_DEATH
from .utils.pages import Pages, VocationPages
from .utils.tibia import HIGHSCORE_CATEGORIES, NabChar, get_character, get_current_server_save_time, get_guild, \
get_highscores, get_share_range, get_voc_abb, get_voc_emoji, get_world, tibia_worlds, normalize_vocation
log = logging.getLogger("nabbot")
# Storage used to keep a cache of guilds for watchlists
GUILD_CACHE = defaultdict(dict) # type: defaultdict[str, Dict[str, Guild]]
WATCHLIST_SEPARATOR = "·"
class CharactersResult(NamedTuple):
skipped: List[OtherCharacter]
no_user: List[DbChar]
same_owner: List[DbChar]
different_user: List[DbChar]
new: List[NabChar]
all_skipped: bool
# region Database Helper classes
class Watchlist:
"""Represents a Watchlist from the database"""
def __init__(self, **kwargs):
self.server_id: int = kwargs.get("server_id")
self.channel_id: int = kwargs.get("channel_id")
self.message_id: int = kwargs.get("message_id")
self.user_id: int = kwargs.get("user_id")
self.show_count: bool = kwargs.get("show_count", True)
self.created: dt.datetime = kwargs.get("created")
# Not columns
self.entries: List['WatchlistEntry'] = []
self.world = None
self.content = ""
self.online_characters: List[OnlineCharacter] = []
self.online_guilds: List[Guild] = []
self.disbanded_guilds: List[str] = []
self.description = ""
@property
def online_count(self) -> int:
"""Total number of online characters across entries."""
return len(self.online_characters) + sum(g.online_count for g in self.online_guilds)
def __repr__(self):
return "<{0.__class__.__name__} server_id={0.server_id} channel_id={0.channel_id} message_id={0.message_id}>"\
.format(self)
async def add_entry(self, conn: PoolConn, name: str, is_guild: bool, user_id: int, reason: Optional[str]) ->\
Optional['WatchlistEntry']:
""" Adds an entry to the watchlist.
:param conn: Connection to the database.
:param name: Name of the character or guild.
:param is_guild: Whether the entry is a guild or not.
:param user_id: The user that created the entry.
:param reason: The reason for the entry.
:return: The new created entry or None if it already exists.
"""
try:
return await WatchlistEntry.insert(conn, self.channel_id, name, is_guild, user_id, reason)
except asyncpg.UniqueViolationError:
return None
async def get_entries(self, conn: PoolConn) -> List['WatchlistEntry']:
"""Gets all entries in this watchlist.
:param conn: Connection to the database.
:return: List of entries if any.
"""
return await WatchlistEntry.get_entries_by_channel(conn, self.channel_id)
async def update_message_id(self, conn: PoolConn, message_id: int):
"""Update's the message id.
:param conn: Connection to the database.
:param message_id: The new message id.
"""
await conn.execute("UPDATE watchlist SET message_id = $1 WHERE channel_id = $2", message_id, self.channel_id)
self.message_id = message_id
async def update_show_count(self, conn: PoolConn, show_count: bool):
"""Update's the show_count property.
If the property is True, the number of online entries will be shown in the channel's name.
:param conn: Connection to the database.
:param show_count: The property's new value.
"""
await conn.execute("UPDATE watchlist SET show_count = $1 WHERE channel_id = $2", show_count, self.channel_id)
self.show_count = show_count
@classmethod
async def insert(cls, conn: PoolConn, server_id: int, channel_id: int, user_id: int) -> 'Watchlist':
"""Adds a new watchlist to the database.
:param conn: Connection to the database.
:param server_id: The discord guild's id.
:param channel_id: The channel's id.
:param user_id: The user that created the watchlist.
:return: The created watchlist.
"""
row = await conn.fetchrow("INSERT INTO watchlist(server_id, channel_id, user_id) VALUES($1,$2,$3) RETURNING *",
server_id, channel_id, user_id)
return cls(**row)
@classmethod
async def get_by_channel_id(cls, conn: PoolConn, channel_id: int) -> Optional['Watchlist']:
"""Gets a watchlist corresponding to the channel id.
:param conn: Connection to the database.
:param channel_id: The id of the channel.
:return: The found watchlist, if any."""
row = await conn.fetchrow("SELECT * FROM watchlist WHERE channel_id = $1", channel_id)
if row is None:
return None
return cls(**row)
@classmethod
async def get_by_world(cls, conn: PoolConn, world: str) -> List['Watchlist']:
"""
Gets all watchlist from a Tibia world.
:param conn: Connection to the database.
:param world: The name of the world.
:return: A list of watchlists from the world.
"""
query = """SELECT t0.* FROM watchlist t0
LEFT JOIN server_property t1 ON t1.server_id = t0.server_id AND key = 'world'
WHERE value ? $1"""
rows = await conn.fetch(query, world)
return [cls(**row) for row in rows]
@classmethod
def sort_by_voc_and_level(cls):
"""Sorting function to order by vocation and then by level."""
return lambda char: (normalize_vocation(char.vocation), -char.level)
class WatchlistEntry:
"""Represents a watchlist entry."""
def __init__(self, **kwargs):
self.channel_id: int = kwargs.get("channel_id")
self.name: str = kwargs.get("name")
self.is_guild: bool = kwargs.get("is_guild", False)
self.reason: Optional[str] = kwargs.get("reason")
self.user_id: int = kwargs.get("user_id")
self.created: dt.datetime = kwargs.get("created")
async def remove(self, conn: PoolConn):
"""Removes a watchlist entry from the database.
:param conn: Connection to the database.
"""
await self.delete(conn, self.channel_id, self.name, self.is_guild)
@classmethod
async def delete(cls, conn: PoolConn, channel_id: int, name: str, is_guild: bool):
"""
:param conn: Connection to the databse.
:param channel_id: The id of the watchlist's channel.
:param name: The name of the entry.
:param is_guild: Whether the entry is a guild or a character.
"""
await conn.execute("DELETE FROM watchlist_entry WHERE channel_id = $1 AND lower(name) = $2 AND is_guild = $3",
channel_id, name.lower().strip(), is_guild)
@classmethod
async def get_by_name(cls, conn: PoolConn, channel_id: int, name: str, is_guild: bool) -> \
Optional['WatchlistEntry']:
"""Gets an entry by its name.
:param conn: Connection to the database.
:param channel_id: The id of the channel.
:param name: Name of the entry.
:param is_guild: Whether the entry is a guild or a character.
:return: The entry if found.
"""
row = await conn.fetchrow("SELECT * FROM watchlist_entry "
"WHERE channel_id = $1 AND lower(name) = $2 AND is_guild = $3",
channel_id, name.lower().strip(), is_guild)
if row is None:
return None
return cls(**row)
@classmethod
async def get_entries_by_channel(cls, conn, channel_id) -> List['WatchlistEntry']:
"""Gets entries related to a watchlist channel.
:param conn: Connection to the database.
:param channel_id: Id of the channel.
:return: A list of entries corresponding to the channel.
"""
rows = await conn.fetch("SELECT * FROM watchlist_entry WHERE channel_id = $1", channel_id)
return [cls(**row) for row in rows]
@classmethod
async def insert(cls, conn: PoolConn, channel_id: int, name: str, is_guild: bool, user_id: int, reason=None)\
-> Optional['WatchlistEntry']:
"""Inserts a watchlist entry into the database.
:param conn: Connection to the database.
:param channel_id: The id of the watchlist's channel.
:param name: Name of the entry.
:param is_guild: Whether the entry is a guild or a character.
:param user_id: The id of the user that added the entry.
:param reason: The reason for the entry.
:return: The inserted entry.
"""
row = await conn.fetchrow("INSERT INTO watchlist_entry(channel_id, name, is_guild, reason, user_id) "
"VALUES($1, $2, $3, $4, $5) RETURNING *", channel_id, name, is_guild, reason, user_id)
if row is None:
return None
return cls(**row)
# endregion
class Tracking(commands.Cog, CogUtils):
"""Commands related to NabBot's tracking system."""
def __init__(self, bot: NabBot):
self.bot = bot
self.scan_online_chars_task = bot.loop.create_task(self.scan_online_chars())
self.scan_highscores_task = bot.loop.create_task(self.scan_highscores())
self.world_tasks = {}
self.world_times = {}
# region Tasks
async def scan_deaths(self, world):
"""Iterates through online characters, checking if they have new deaths.
This task is created for every tracked world.
On every iteration, the last element is checked and reinserted at the beginning."""
#################################################
# Nezune's cave #
# Do not touch anything, enter at your own risk #
#################################################
tag = f"{self.tag}[{world}][scan_deaths]"
await self.bot.wait_until_ready()
log.info(f"{tag} Started")
while not self.bot.is_closed():
try:
await asyncio.sleep(config.death_scan_interval)
if len(online_characters[world]) == 0:
await asyncio.sleep(0.5)
continue
skip = False
# Pop last char in queue, reinsert it at the beginning
current_char = online_characters[world].pop()
if hasattr(current_char, "last_check") and time.time() - current_char.last_check < 45:
skip = True
current_char.last_check = time.time()
online_characters[world].insert(0, current_char)
if not skip:
# Check for new death
char = await get_character(self.bot, current_char.name)
await self.compare_deaths(char)
else:
await asyncio.sleep(0.5)
except NetworkError:
await asyncio.sleep(0.3)
continue
except asyncio.CancelledError:
# Task was cancelled, so this is fine
break
except KeyError:
continue
except Exception as e:
log.exception(f"{tag} Exception: {e}")
continue
async def scan_highscores(self):
"""Scans the highscores, storing the results in the database.
The task checks if the last stored data is from the current server save or not."""
#################################################
# Nezune's cave #
# Do not touch anything, enter at your own risk #
#################################################
tag = f"{self.tag}[scan_highscores]"
await self.bot.wait_until_ready()
log.info(f"{tag} Started")
while not self.bot.is_closed():
if len(self.bot.tracked_worlds_list) == 0:
# If no worlds are tracked, just sleep, worlds might get registered later
await asyncio.sleep(10*60)
continue
for world in self.bot.tracked_worlds_list:
tag = f"{self.tag}[{world}](scan_highscores)"
world_count = 0
if world not in tibia_worlds:
log.warning(f"{tag} Tracked world is no longer a valid world.")
await asyncio.sleep(0.1)
try:
for key, values in HIGHSCORE_CATEGORIES.items():
# Check the last scan time, highscores are updated every server save
last_scan = await self.bot.pool.fetchval(
"SELECT last_scan FROM highscores WHERE world = $1 AND category = $2", world, key)
if last_scan:
last_scan_ss = get_current_server_save_time(last_scan)
current_ss = get_current_server_save_time()
# If the saved results are from the current server save, saving is skipped
if last_scan_ss >= current_ss:
log.debug(f"{tag} {values[0].name} | {values[1].name} | Already saved")
await asyncio.sleep(0.1)
continue
try:
highscores = await get_highscores(world, *values)
except NetworkError:
continue
await self.save_highscores(world, key, highscores)
except asyncio.CancelledError:
# Task was cancelled, so this is fine
break
except Exception:
log.exception(f"{tag}")
continue
if world_count:
log.info(f"{tag} {world_count:,} entries saved.")
await asyncio.sleep(5)
await asyncio.sleep(60*30)
async def scan_online_chars(self):
"""Scans tibia.com's character lists to store them locally.
A online list per world is created, with the online registered characters.
When a character enters the online list, their deaths are checked.
On every cycle, their levels are compared.
When a character leaves the online list, their levels and deaths are compared."""
#################################################
# Nezune's cave #
# Do not touch anything, enter at your own risk #
#################################################
await self.bot.wait_until_ready()
tag = f"{self.tag}[scan_online_chars]"
log.info(f"{tag} Task started")
try:
with open("data/online_list.dat", "rb") as f:
saved_list, timestamp = pickle.load(f)
if (time.time() - timestamp) < config.online_list_expiration:
online_characters.clear()
online_characters.update(saved_list)
count = len([c for v in online_characters.values() for c in v])
log.info(f"{tag} Loaded cached online list | {count:,} players")
else:
log.info(f"{tag} Cached online list is too old, discarding")
except FileNotFoundError:
pass
except (ValueError, pickle.PickleError):
log.info(f"{tag} Couldn't read cached online list.")
while not self.bot.is_closed():
try:
# Pop last server in queue, reinsert it at the beginning
current_world = tibia_worlds.pop()
tibia_worlds.insert(0, current_world)
if current_world.capitalize() not in self.bot.tracked_worlds_list:
await asyncio.sleep(0.1)
continue
if time.time() - self.world_times.get(current_world.capitalize(), 0) < config.online_scan_interval:
await asyncio.sleep(0.2)
continue
tag = f"{self.tag}[{current_world}][scan_online_chars]"
log.debug(f"{tag} Checking online list")
# Get online list for this server
try:
world = await get_world(current_world)
if world is None:
await asyncio.sleep(0.1)
continue
log.debug(f"{tag} {world.online_count} players online")
except NetworkError:
await asyncio.sleep(0.1)
continue
current_world_online = world.online_players
if len(current_world_online) == 0:
await asyncio.sleep(0.1)
continue
self.world_times[world.name] = time.time()
self.bot.dispatch("world_scanned", world)
# Save the online list in file
with open("data/online_list.dat", "wb") as f:
pickle.dump((online_characters, time.time()), f, protocol=pickle.HIGHEST_PROTOCOL)
if current_world not in online_characters:
online_characters[current_world] = []
# List of characters that are now offline
offline_list = [c for c in online_characters[current_world] if c not in current_world_online]
for offline_char in offline_list:
# Check if characters got level ups when they went offline
log.debug(f"{tag} Character no longer online | {offline_char.name}")
online_characters[current_world].remove(offline_char)
try:
_char = await get_character(self.bot, offline_char.name)
await self.compare_levels(_char)
await self.compare_deaths(_char)
except NetworkError:
continue
# Add new online chars and announce level differences
for server_char in current_world_online:
db_char = await DbChar.get_by_name(self.bot.pool, server_char.name)
if db_char:
try:
if server_char not in online_characters[current_world]:
# If the character wasn't in the online list we add them
# (We insert them at the beginning of the list to avoid messing with the checks order)
server_char.last_check = time.time()
log.debug(f"{tag} Character added to online list | {server_char.name}")
online_characters[current_world].insert(0, server_char)
_char = await get_character(self.bot, server_char.name)
await self.compare_deaths(_char)
# Only update level up, but don't count it as a level up
await self.compare_levels(_char, True)
else:
await self.compare_levels(server_char)
# Update character in the list
_char_index = online_characters[current_world].index(server_char)
online_characters[current_world][_char_index].level = server_char.level
except NetworkError:
continue
except (ValueError, IndexError):
continue
except asyncio.CancelledError:
# Task was cancelled, so this is fine
break
except Exception:
log.exception("scan_online_chars")
continue
# endregion
# region Custom Events
@commands.Cog.listener()
async def on_world_scanned(self, scanned_world: World):
"""Event called each time a world is checked.
Updates the watchlists
:param scanned_world: The scanned world's information.
"""
# Schedule Scan Deaths task for this world
if scanned_world.name not in self.world_tasks:
self.world_tasks[scanned_world.name] = self.bot.loop.create_task(self.scan_deaths(scanned_world.name))
GUILD_CACHE[scanned_world.name].clear()
await self._run_watchlist(scanned_world)
async def _run_watchlist(self, scanned_world: World):
watchlists = await Watchlist.get_by_world(self.bot.pool, scanned_world.name)
for watchlist in watchlists:
watchlist.world = scanned_world.name
log.debug(f"{self.tag}[{scanned_world.name}] Checking entries for watchlist | "
f"Guild ID: {watchlist.server_id} | Channel ID: {watchlist.channel_id} "
f"| World: {scanned_world.name}")
guild: discord.Guild = self.bot.get_guild(watchlist.server_id)
if guild is None:
await asyncio.sleep(0.01)
continue
discord_channel: discord.TextChannel = guild.get_channel(watchlist.channel_id)
if discord_channel is None:
await asyncio.sleep(0.1)
continue
watchlist.entries = await watchlist.get_entries(self.bot.pool)
if not watchlist.entries:
await asyncio.sleep(0.1)
continue
await self._watchlist_scan_entries(watchlist, scanned_world)
await self._watchlist_build_content(watchlist)
await self._watchlist_update_content(watchlist, discord_channel)
async def _watchlist_scan_entries(self, watchlist: Watchlist, scanned_world: World):
for entry in watchlist.entries:
if entry.is_guild:
await self._watchlist_check_guild(watchlist, entry)
# If it is a character, check if he's in the online list
else:
self._watchlist_add_characters(watchlist, entry, scanned_world)
watchlist.online_characters.sort(key=Watchlist.sort_by_voc_and_level())
@classmethod
async def _watchlist_check_guild(cls, watchlist, watched_guild: WatchlistEntry):
try:
tibia_guild = await cls.cached_get_guild(watched_guild.name, watchlist.world)
except NetworkError:
return
# Save disbanded guilds separately
if tibia_guild is None:
watchlist.disbanded_guilds.append(watched_guild.name)
return
# If there's at least one member online, add guild to list
if tibia_guild.online_count:
watchlist.online_guilds.append(tibia_guild)
@staticmethod
def _watchlist_add_characters(watchlist, watched_char: WatchlistEntry, scanned_world: World):
for online_char in scanned_world.online_players:
if online_char.name == watched_char.name:
# Add to online list
watchlist.online_characters.append(online_char)
return
@staticmethod
def _watchlist_get_msg_entries(characters):
return [f"\t{char.name} - Level {char.level} {get_voc_emoji(char.vocation)}" for char in characters]
async def _watchlist_build_content(self, watchlist):
if watchlist.online_count > 0:
msg_entries = self._watchlist_get_msg_entries(watchlist.online_characters)
watchlist.content = "\n".join(msg_entries)
self._watchlist_build_guild_content(watchlist)
else:
watchlist.description = "There are no watched characters online."
def _watchlist_build_guild_content(self, watchlist):
for guild_name in watchlist.disbanded_guilds:
watchlist.content += f"\n__Guild: **{guild_name}**__\n"
watchlist.content += "\t*Guild was disbanded.*"
for tibia_guild in watchlist.online_guilds:
watchlist.content += f"\n__Guild: **{tibia_guild.name}**__\n"
online_members = tibia_guild.online_members[:]
online_members.sort(key=Watchlist.sort_by_voc_and_level())
watchlist.content += "\n".join(self._watchlist_get_msg_entries(online_members))
async def _watchlist_update_content(self, watchlist: Watchlist, channel: discord.TextChannel):
# Send new watched message or edit last one
embed = discord.Embed(description=watchlist.description, timestamp=dt.datetime.utcnow())
embed.set_footer(text="Last updated")
if watchlist.content:
if len(watchlist.content) >= EMBED_LIMIT - 50:
watchlist.content = split_message(watchlist.content, EMBED_LIMIT - 50)[0]
watchlist.content += "\n*And more...*"
fields = split_message(watchlist.content, FIELD_VALUE_LIMIT)
for s, split_field in enumerate(fields):
name = "Watchlist" if s == 0 else "\u200F"
embed.add_field(name=name, value=split_field, inline=False)
try:
await self._watchlist_update_message(self.bot.pool, watchlist, channel, embed)
await self._watchlist_update_name(watchlist, channel)
except discord.HTTPException:
# log.exception(f"{self.tag}[_watchlist_update_content] {watchlist}")
pass
@staticmethod
async def _watchlist_update_name(watchlist: Watchlist, channel: discord.TextChannel):
try:
original_name = channel.name.split(WATCHLIST_SEPARATOR, 1)[0]
if original_name != channel.name and not watchlist.show_count:
await channel.edit(name=original_name, reason="Removing online count")
elif watchlist.show_count:
new_name = f"{original_name}{WATCHLIST_SEPARATOR}{watchlist.online_count}"
# Reduce unnecessary API calls and Audit log spam
if new_name != channel.name:
await channel.edit(name=new_name, reason="Online count changed")
except discord.Forbidden:
pass
@staticmethod
async def _watchlist_update_message(conn, watchlist, channel, embed):
# We try to get the watched message, if the bot can't find it, we just create a new one
# This may be because the old message was deleted or this is the first time the list is checked
try:
message = await channel.fetch_message(watchlist.message_id)
except discord.HTTPException:
message = None
if message is None:
new_message = await channel.send(embed=embed)
await watchlist.update_message_id(conn, new_message.id)
else:
await message.edit(embed=embed)
# endregion
# region Discord Events
@commands.Cog.listener()
async def on_guild_channel_delete(self, channel: discord.abc.GuildChannel):
"""Called when a guild channel is deleted.
Deletes associated watchlist and entries."""
if not isinstance(channel, discord.TextChannel):
return
result = await self.bot.pool.execute("DELETE FROM watchlist_entry WHERE channel_id = $1", channel.id)
deleted_entries = get_affected_count(result)
result = await self.bot.pool.execute("DELETE FROM watchlist WHERE channel_id = $1", channel.id)
deleted = get_affected_count(result)
if deleted:
# Dispatch event so ServerLog cog can handle it.
log.info(f"{self.tag} Watchlist channel deleted | Channel {channel.id} | Guild {channel.guild.id}")
self.bot.dispatch("watchlist_deleted", channel, deleted_entries)
# endregion
# region Commands
@checks.server_mod_only()
@checks.tracking_world_only()
@commands.command(name="addchar", aliases=["registerchar"], usage="<user>,<character>")
async def add_char(self, ctx: NabCtx, *, params):
"""Register a character and optionally all other visible characters to a discord user.
This command can only be used by server moderators.
If a character is hidden, only that character will be added. Characters in other worlds are skipped."""
params = params.split(",")
if len(params) != 2:
raise commands.BadArgument()
target_name, char_name = params
target_name = target_name.strip()
target = self.bot.get_member(target_name, ctx.guild)
if target is None:
return await ctx.error(f"I couldn't find any users named `{target_name}`")
if target.bot:
return await ctx.error("You can't register characters to discord bots!")
msg = await ctx.send(f"{config.loading_emoji} Fetching characters...")
try:
char = await get_character(ctx.bot, char_name)
if char is None:
return await msg.edit(content="That character doesn't exist.")
except NetworkError:
return await msg.edit(content="I couldn't fetch the character, please try again.")
check_other = False
if len(char.other_characters) > 1:
message = await ctx.send("Do you want to attempt to add the other visible characters in this account?")
check_other = await ctx.react_confirm(message, timeout=60, delete_after=True)
if check_other is None:
await safe_delete_message(msg)
return await ctx.error("You ran out of time, try again."
"Remember you have to react or click on the reactions.")
if check_other:
await safe_delete_message(msg)
msg = await ctx.send(f"{config.loading_emoji} Fetching characters...")
try:
results = await self.check_char_availability(ctx, ctx.author.id, char, [ctx.world], check_other)
except NetworkError:
return await msg.edit("I'm having network issues, please try again.")
if results.all_skipped:
await safe_delete_message(msg)
await ctx.error(f"Sorry, I couldn't find any characters in **{ctx.world}**.")
return
reply = await self.process_character_assignment(ctx, results, target, ctx.author)
await safe_delete_message(msg)
await ctx.send(reply)
@commands.command()
@checks.tracking_world_somewhere()
async def claim(self, ctx: NabCtx, *, char_name: str = None):
"""Claims a character registered as yours.
Claims a character as yours, even if it is already registered to someone else.
In order for this to work, you have to put a special code in the character's comment.
You can see this code by using the command with no parameters. The code looks like this: `/NB-23FC13AC7400000/`
Once you had set the code, you can use the command with that character, if the code matches,
it will be reassigned to you.
Note that it may take some time for the code to be visible to NabBot because of caching.
This code is unique for your discord user, so the code will only work for your discord account and no one else.
No one can claim a character of yours unless you put **their** code on your character's comment.
"""
user = ctx.author
claim_pattern = re.compile(r"/NB-([^/]+)/")
user_code = hex(user.id)[2:].upper()
# List of Tibia worlds tracked in the servers the user is
if ctx.is_private:
user_tibia_worlds = [ctx.world]
else:
user_tibia_worlds = ctx.bot.get_user_worlds(user.id)
if not ctx.is_private and self.bot.tracked_worlds.get(ctx.guild.id) is None:
return await ctx.send("This server is not tracking any tibia worlds.")
if len(user_tibia_worlds) == 0:
return
if char_name is None:
await ctx.send(f"To use this command, add `/NB-{user_code}/` to the comment of the character you want to"
f"claim, and then use `/claim character_name`.")
return
msg = await ctx.send(f"{config.loading_emoji} Fetching character...")
try:
char = await get_character(ctx.bot, char_name)
if char is None:
return await msg.edit(content=f"{ctx.tick(False)} That character doesn't exist.")
except NetworkError:
return await msg.edit(content=f"{ctx.tick(False)} I couldn't fetch the character, please try again.")
match = claim_pattern.search(char.comment if char.comment is not None else "")
if not match:
await ctx.error(f"Couldn't find verification code on character's comment.\n"
f"Add `/NB-{user_code}/` to the comment to authenticate.")
return
code = match.group(1)
if code != user_code:
await ctx.error(f"The verification code on the character's comment doesn't match yours.\n"
f"Use `/NB-{user_code}/` to authenticate.")
return
check_other = False
if len(char.other_characters) > 1:
message = await ctx.send("Do you want to attempt to add the other visible characters in this account?")
check_other = await ctx.react_confirm(message, timeout=60, delete_after=True)
if check_other is None:
await safe_delete_message(msg)
return await ctx.send("You ran out of time, try again."
"Remember you have to react or click on the reactions.")
if check_other:
await safe_delete_message(msg)
msg = await ctx.send(f"{config.loading_emoji} Fetching characters...")
try:
results = await self.check_char_availability(ctx, ctx.author.id, char, user_tibia_worlds, check_other)
except NetworkError:
return await msg.edit("I'm having network issues, please try again.")
if results.all_skipped:
reply = "Sorry, I couldn't find any characters from the worlds in the context ({0})."
return await msg.edit(content=reply.format(join_list(user_tibia_worlds)))
reply = await self.process_character_assignment(ctx, results, ctx.author, claim=True)
await safe_delete_message(msg)
await ctx.send(reply)
@checks.tracking_world_somewhere()
@commands.command(aliases=["i'm", "iam"])
async def im(self, ctx: NabCtx, *, char_name: str):
"""Lets you add your tibia character(s) for the bot to track.
If there are other visible characters, the bot will ask for confirmation to add them too.
Characters in other worlds other than the currently tracked world are skipped.
If it finds a character owned by another user, the whole process will be stopped.
If a character is already registered to someone else, `claim` can be used."""
# List of Tibia worlds tracked in the servers the user is
if ctx.is_private:
user_tibia_worlds = [ctx.world]
else:
user_tibia_worlds = ctx.bot.get_user_worlds(ctx.author.id)
msg = await ctx.send(f"{config.loading_emoji} Fetching character...")
try:
char = await get_character(ctx.bot, char_name)
if char is None:
return await msg.edit(content=f"{ctx.tick(False)} That character doesn't exist.")
except NetworkError:
return await msg.edit(content=f"{ctx.tick(False)} I couldn't fetch the character, please try again.")
check_other = False
if len(char.other_characters) > 1:
await msg.edit(content="Do you want to attempt to add the other visible characters in this account?")
check_other = await ctx.react_confirm(msg, timeout=60, delete_after=True)
if check_other is None:
await safe_delete_message(msg)
return await ctx.send("You didn't reply in time, try again."
"Remember that you have to react or click on the icons.")
if check_other:
await safe_delete_message(msg)
msg = await ctx.send(f"{config.loading_emoji} Fetching characters...")
try:
results = await self.check_char_availability(ctx, ctx.author.id, char, user_tibia_worlds, check_other)
except NetworkError:
return await msg.edit("I'm having network issues, please try again.")
if results.all_skipped:
reply = "Sorry, I couldn't find any characters from the worlds in the context ({0})."
return await msg.edit(content=reply.format(join_list(user_tibia_worlds)))
reply = await self.process_character_assignment(ctx, results, ctx.author)
await safe_delete_message(msg)
await ctx.send(reply)
@checks.tracking_world_somewhere()
@commands.command(aliases=["i'mnot"])
async def imnot(self, ctx: NabCtx, *, name):
"""Removes a character assigned to you.
All registered level ups and deaths will be lost forever."""
db_char = await DbChar.get_by_name(ctx.pool, name)
if db_char is None or db_char.user_id == 0:
return await ctx.error("There's no character registered with that name.")
if db_char.user_id != ctx.author.id:
return await ctx.error(f"The character **{db_char.name}** is not registered to you.")
message = await ctx.send(f"Are you sure you want to unregister "
f"**{db_char.name}** ({abs(db_char.level)} {db_char.vocation})?")
confirm = await ctx.react_confirm(message, timeout=50)
if confirm is None:
return await ctx.send("I guess you changed your mind.")
if not confirm:
return await ctx.send("No then? Ok.")
await db_char.update_user(ctx.pool, 0)
await ctx.success(f"**{db_char.name}** is no longer registered to you.")
self.bot.dispatch("character_change", ctx.author.id)
self.bot.dispatch("character_unregistered", ctx.author, db_char)
@checks.can_embed()
@checks.tracking_world_only()
@commands.command()
async def online(self, ctx: NabCtx):
"""Tells you which users are online on Tibia.
This list gets updated based on Tibia.com online list, so it takes a couple minutes to be updated."""
world = ctx.world
per_page = 20 if await ctx.is_long() else 5
now = dt.datetime.utcnow()
uptime = (now - self.bot.start_time).total_seconds()
count = 0
entries = []
vocations = []
for char in online_characters.get(world, []):
name = char.name
db_char = await DbChar.get_by_name(ctx.pool, name)
if not db_char:
continue
# Skip characters of members not in the server
owner = ctx.guild.get_member(db_char.user_id)
if owner is None:
continue
owner = owner.display_name
emoji = get_voc_emoji(char.vocation)
vocations.append(char.vocation.value)
vocation = get_voc_abb(char.vocation)
entries.append(f"{char.name} (Lvl {char.level} {vocation}{emoji}, **@{owner}**)")
count += 1
if count == 0:
if uptime < 90:
await ctx.send("I just started, give me some time to check online lists...⌛")
else:
await ctx.send("There is no one online from Discord.")
return
pages = VocationPages(ctx, entries=entries, vocations=vocations, per_page=per_page)
pages.embed.title = "Users online"
try:
await pages.paginate()
except CannotPaginate as e:
await ctx.send(e)
@commands.command(name="searchteam", aliases=["whereteam", "findteam"], usage="<params>")
@checks.tracking_world_only()
@checks.can_embed()
async def search_team(self, ctx: NabCtx, *, params=None):
"""Searches for a registered character that meets the criteria
There are 3 ways to use this command:
- Show characters in share range with a specific character. (`searchteam <name>`)
- Show characters in share range with a specific level. (`searchteam <level>`)
- Show characters in a level range. (`searchteam <min>,<max>`)
Online characters are shown first on the list, they also have an icon."""
permissions = ctx.bot_permissions
if not permissions.embed_links:
await ctx.send("Sorry, I need `Embed Links` permission for this command.")
return
invalid_arguments = "Invalid arguments used, examples:\n" \
"```/searchteam charname\n" \
"/searchteam level\n" \
"/searchteam minlevel,maxlevel```"
if ctx.world is None:
await ctx.send("This server is not tracking any tibia worlds.")
return
if params is None:
await ctx.send(invalid_arguments)
return
entries = []
vocations = []
online_entries = []
online_vocations = []
per_page = 20 if await ctx.is_long() else 5
char = None
params = split_params(params)
if len(params) < 1 or len(params) > 2:
await ctx.send(invalid_arguments)
return
# params[0] could be a character's name, a character's level or one of the level ranges
# If it's not a number, it should be a player's name
if not is_numeric(params[0]):
# We shouldn't have another parameter if a character name was specified
if len(params) == 2:
await ctx.send(invalid_arguments)
return
char = await get_character(ctx.bot, params[0])
if char is None:
await ctx.send("I couldn't find a character with that name.")
return
low, high = get_share_range(char.level)
title = f"Characters in share range with {char.name}({low}-{high}):"
empty = f"I didn't find anyone in share range with **{char.name}**({low}-{high})"
else:
# Check if we have another parameter, meaning this is a level range
if len(params) == 2:
try:
level1 = int(params[0])
level2 = int(params[1])
except ValueError:
await ctx.send(invalid_arguments)
return
if level1 <= 0 or level2 <= 0:
await ctx.send("You entered an invalid level.")
return
low = min(level1, level2)
high = max(level1, level2)
title = f"Characters between level {low} and {high}"
empty = f"I didn't find anyone between levels **{low}** and **{high}**"
# We only got a level, so we get the share range for it
else:
if int(params[0]) <= 0:
await ctx.send("You entered an invalid level.")
return
low, high = get_share_range(int(params[0]))
title = f"Characters in share range with level {params[0]} ({low}-{high})"
empty = f"I didn't find anyone in share range with level **{params[0]}** ({low}-{high})"
async with ctx.pool.acquire() as conn:
count = 0
online_list = [x.name for v in online_characters.values() for x in v]
async for db_char in DbChar.get_chars_in_range(conn, low, high, ctx.world):
if char is not None and char.name == db_char.name:
continue
owner = ctx.guild.get_member(db_char.user_id)
if owner is None:
continue
count += 1
owner = owner.display_name
emoji = get_voc_emoji(db_char.vocation)
voc_abb = get_voc_abb(db_char.vocation)
entry = f"**{db_char.name}** - Level {abs(db_char.level)} {voc_abb}{emoji} - @**{owner}**"
if db_char.name in online_list:
entry = f"{config.online_emoji}{entry}"
online_entries.append(entry)
online_vocations.append(db_char.vocation)
else:
entries.append(entry)
vocations.append(db_char.vocation)
if count < 1:
await ctx.send(empty)
return
pages = VocationPages(ctx, entries=online_entries + entries, per_page=per_page,
vocations=online_vocations + vocations)
pages.embed.title = title
try:
await pages.paginate()
except CannotPaginate as e:
await ctx.send(e)
@checks.server_mod_only()
@checks.tracking_world_only()
@commands.command(name="removechar", aliases=["deletechar", "unregisterchar"])
async def remove_char(self, ctx: NabCtx, *, name):
"""Removes a registered character from someone.
This can only be used by server moderators.
Note that you can only remove chars if they are from users exclusively in your server.
You can't remove any characters that would alter other servers NabBot is in."""
# This could be used to remove deleted chars so we don't need to check anything
# Except if the char exists in the database...
db_char = await DbChar.get_by_name(ctx.pool, name.strip())
if db_char is None or db_char.user_id == 0:
return await ctx.error("There's no character with that name registered.")
if db_char.world != ctx.world:
return await ctx.error(f"The character **{db_char.name}** is in a different world.")
user = self.bot.get_user(db_char.user_id)
if user is not None:
user_guilds = self.bot.get_user_guilds(user.id)
# Iterating every world where the user is, to check if it wouldn't affect other admins.
for guild in user_guilds:
if guild == ctx.guild:
continue
if self.bot.tracked_worlds.get(guild.id, None) != ctx.world:
continue
author: discord.Member = guild.get_member(ctx.author.id)
if author is None or not author.guild_permissions.manage_guild:
await ctx.error(f"The user of this server is also in another server tracking "
f"**{ctx.world}**, where you are not an admin. You can't alter other servers.")
return
username = "unknown" if user is None else user.display_name
await db_char.update_user(ctx.pool, 0)
await ctx.send("**{0}** was removed successfully from **@{1}**.".format(db_char.name, username))
self.bot.dispatch("character_unregistered", user, db_char, ctx.author)
@checks.server_mod_only()
@checks.tracking_world_only()
@commands.group(invoke_without_command=True, case_insensitive=True, aliases=["huntedlist"])
async def watchlist(self, ctx: NabCtx):
"""Create or manage watchlists.
Watchlists are channels where the online status of selected characters are shown.
You can create multiple watchlists and characters and guilds to each one separately.
Try the subcommands."""
await ctx.send("To manage watchlists, use one of the subcommands.\n"
f"Try `{ctx.clean_prefix}help {ctx.invoked_with}`.")
@checks.tracking_world_only()
@checks.channel_mod_somewhere()
@watchlist.command(name="add", aliases=["addplayer", "addchar"], usage="<channel> <name>[,reason]")
async def watchlist_add(self, ctx: NabCtx, channel: discord.TextChannel, *, params):
"""Adds a character to a watchlist.
A reason can be specified by adding it after the character's name, separated by a comma."""
watchlist = await Watchlist.get_by_channel_id(ctx.pool, channel.id)
if not watchlist:
return await ctx.error(f"{channel.mention} is not a watchlist channel.")
if not channel.permissions_for(ctx.author).manage_channels:
return await ctx.error(f"You need `Manage Channel` permissions in {channel.mention} to add entries.")
params = params.split(",", 1)
name = params[0]
reason = None
if len(params) > 1:
reason = params[1]
char = await get_character(ctx.bot, name)
if char is None:
await ctx.error("A character with that name doesn't exist.")
return
world = ctx.world
if char.world != world:
await ctx.error(f"This character is not in **{world}**.")
return
message = await ctx.send(f"Do you want to add **{char.name}** (Level {char.level} {char.vocation}) "
f"to the watchlist {channel.mention}")
confirm = await ctx.react_confirm(message, delete_after=True)
if confirm is None:
await ctx.send("You took too long!")
return
if not confirm:
await ctx.send("Ok then, guess you changed your mind.")
return
entry = await watchlist.add_entry(ctx.pool, char.name, False, ctx.author.id, reason)
if entry:
await ctx.success(f"Character **{char.name}** added to the watchlist {channel.mention}.")
else:
await ctx.error(f"**{char.name}** is already registered in {channel.mention}")
@checks.tracking_world_only()
@checks.channel_mod_somewhere()
@watchlist.command(name="addguild", usage="<channel> <name>[,reason]")
async def watchlist_addguild(self, ctx: NabCtx, channel: discord.TextChannel, *, params):
"""Adds an entire guild to a watchlist.
Guilds are displayed in the watchlist as a group."""
watchlist = await Watchlist.get_by_channel_id(ctx.pool, channel.id)
if not watchlist:
return await ctx.error(f"{channel.mention} is not a watchlist channel.")
if not channel.permissions_for(ctx.author).manage_channels:
return await ctx.error(f"You need `Manage Channel` permissions in {channel.mention} to add entries.")
params = params.split(",", 1)
name = params[0]
reason = None
if len(params) > 1:
reason = params[1]
guild = await get_guild(name)
if guild is None:
await ctx.error("There's no guild with that name.")
return
if guild.world != ctx.world:
await ctx.error(f"This guild is not in **{ctx.world}**.")
return
message = await ctx.send(f"Do you want to add the guild **{guild.name}** to the watchlist {channel.mention}?")
confirm = await ctx.react_confirm(message, delete_after=True)
if confirm is None:
await ctx.send("You took too long!")
return
if not confirm:
await ctx.send("Ok then, guess you changed your mind.")
return
entry = await watchlist.add_entry(ctx.pool, guild.name, True, ctx.author.id, reason)
if entry:
await ctx.success(f"Guild **{guild.name}** added to the watchlist {channel.mention}.")
else:
await ctx.error(f"**{guild.name}** is already registered in {channel.mention}")
@checks.tracking_world_only()
@checks.channel_mod_somewhere()
@watchlist.command(name="adduser", usage="<channel> <user>[,reason]")
async def watchlist_adduser(self, ctx: NabCtx, channel: discord.TextChannel, *, params):
"""Adds the currently registered characters of a user to the watchlist.
A reason can be specified by adding it after the character's name, separated by a comma."""
watchlist = await Watchlist.get_by_channel_id(ctx.pool, channel.id)
if not watchlist:
return await ctx.error(f"{channel.mention} is not a watchlist channel.")
if not channel.permissions_for(ctx.author).manage_channels:
return await ctx.error(
f"You need `Manage Channel` permissions in {channel.mention} to add entries.")
params = params.split(",", 1)
name = params[0]
reason = None
if len(params) > 1:
reason = params[1]
user = ctx.bot.get_member(name, ctx.guild)
if user is None:
return await ctx.error("I don't see any users with that name or id.")
characters = await DbChar.get_chars_by_user(ctx.pool, user.id, worlds=ctx.world)
if not characters:
await ctx.error(f"This user doesn't have any registered characters in {ctx.world}.")
return
char_list = "\n".join(f"• {c.name}" for c in characters)
message = await ctx.send(f"Do you want to add currently registered characters of `{user}` to this watchlist?\n"
f"{char_list}")
confirm = await ctx.react_confirm(message)
if confirm is None:
await ctx.send("You took too long!")
return
if not confirm:
await ctx.send("Ok then, guess you changed your mind.")
return
results = ""
for char in characters:
entry = await watchlist.add_entry(ctx.pool, char.name, False, ctx.author.id, reason)
if entry:
results += f"\n• {char.name}"
if results:
await ctx.success(f"I added the following characters to the list {channel.mention}, "
f"duplicates where skipped:{results}")
else:
await ctx.error("No characters where added, as they were all duplicates.")
@checks.server_mod_only()
@checks.tracking_world_only()
@watchlist.command(name="create")
async def watchlist_create(self, ctx: NabCtx, *, name):
"""Creates a watchlist channel.
Creates a new text channel for the watchlist to be posted.
The watch list shows which characters from it are online. Entire guilds can be added too.
The channel can be renamed at anytime. If the channel is deleted, all its entries are deleted too.
"""
if WATCHLIST_SEPARATOR in name:
await ctx.error(f"Channel name cannot contain the special character **{WATCHLIST_SEPARATOR}**")
return
if not ctx.bot_permissions.manage_channels:
return await ctx.error(f"I need `Manage Channels` permission in the server to use this command.")
message = await ctx.send(f"Do you want to create a new watchlist named `{name}`?")
confirm = await ctx.react_confirm(message, delete_after=True)
if not confirm:
return
try:
overwrites = {
ctx.guild.default_role: discord.PermissionOverwrite(send_messages=False, read_messages=True),
ctx.guild.me: discord.PermissionOverwrite(send_messages=True, read_messages=True, manage_channels=True)
}
channel = await ctx.guild.create_text_channel(name, overwrites=overwrites, category=ctx.channel.category)
except discord.Forbidden:
await ctx.error(f"Sorry, I don't have permissions to create channels.")
except discord.HTTPException:
await ctx.error(f"Something went wrong, the channel name you chose is probably invalid.")
else:
log.info(f"Watchlist created (Channel ID: {channel.id}, Guild ID: {channel.guild.id})")
await ctx.success(f"Channel created successfully: {channel.mention}\n")
await channel.send("This is where I will post a list of online watched characters.\n"
"Edit this channel's permissions to allow the roles you want.\n"
"This channel can be renamed freely.\n"
"Anyone with `Manage Channel` permission here can add entries.\n"
f"Example: {ctx.clean_prefix}{ctx.command.full_parent_name} add {channel.mention} "
f"Galarzaa Fidera\n"
"If this channel is deleted, all related entries will be lost.\n"
"**It is important to not allow anyone to write in here**\n"
"*This message can be deleted now.*")
watchlist = await Watchlist.insert(ctx.pool, ctx.guild.id, channel.id, ctx.author.id)
log.debug(f"{self.tag} Watchlist created | {watchlist}")
@checks.channel_mod_somewhere()
@checks.tracking_world_only()
@watchlist.command(name="info", aliases=["details", "reason"])
async def watchlist_info(self, ctx: NabCtx, channel: discord.TextChannel, *, name: str):
"""Shows information about a watchlist entry.
This shows who added the player, when, and if there's a reason why they were added."""
if not await Watchlist.get_by_channel_id(ctx.pool, channel.id):
return await ctx.error(f"{channel.mention} is not a watchlist.")
entry = await WatchlistEntry.get_by_name(ctx.pool, channel.id, name, False)
if not entry:
return await ctx.error(f"There's no character with that name registered to {channel.mention}.")
embed = discord.Embed(title=entry.name, url=tibiapy.Character.get_url(entry.name), timestamp=entry.created,
description=f"**Reason:** {entry.reason}" if entry.reason else "No reason provided.")
embed.set_author(name=f"In #{channel}")
author = ctx.guild.get_member(entry.user_id)
if author:
embed.set_footer(text=f"Added by {author.name}#{author.discriminator}",
icon_url=get_user_avatar(author))
await ctx.send(embed=embed)
@checks.channel_mod_somewhere()
@checks.tracking_world_only()
@watchlist.command(name="infoguild", aliases=["detailsguild", "reasonguild"])
async def watchlist_infoguild(self, ctx: NabCtx, channel: discord.TextChannel, *, name: str):
""""Shows details about a guild entry in a watchlist.
This shows who added the player, when, and if there's a reason why they were added."""
if not await Watchlist.get_by_channel_id(ctx.pool, channel.id):
return await ctx.error(f"{channel.mention} is not a watchlist.")
entry = await WatchlistEntry.get_by_name(ctx.pool, channel.id, name, True)
if not entry:
return await ctx.error(f"There's no guild with that name registered to {channel.mention}.")
embed = discord.Embed(title=entry.name, timestamp=entry.created, url=tibiapy.Guild.get_url(entry.name),
description=f"**Reason:** {entry.reason}" if entry.reason else "No reason provided.")
embed.set_author(name=f"In #{channel}")
author = ctx.guild.get_member(entry.user_id)
if author:
embed.set_footer(text=f"Added by {author.name}#{author.discriminator}",
icon_url=get_user_avatar(author))
await ctx.send(embed=embed)
@checks.tracking_world_only()
@watchlist.command(name="list")
async def watchlist_list(self, ctx: NabCtx, channel: discord.TextChannel):
"""Shows characters belonging to that watchlist.
Note that this lists all characters, not just online characters."""
if not await Watchlist.get_by_channel_id(ctx.pool, channel.id):
return await ctx.error(f"{channel.mention} is not a watchlist.")
if not channel.permissions_for(ctx.author).read_messages:
return await ctx.error("You can't see the list of a watchlist you can't see.")
entries = await WatchlistEntry.get_entries_by_channel(ctx.pool, channel.id)
entries = [entry for entry in entries if not entry.is_guild]
if not entries:
return await ctx.error(f"This watchlist has no registered characters.")
pages = Pages(ctx, entries=[f"[{r.name}]({NabChar.get_url(r.name)})" for r in entries])
pages.embed.title = f"Watched Characters in #{channel.name}"
try:
await pages.paginate()
except CannotPaginate as e:
await ctx.error(e)
@checks.tracking_world_only()
@watchlist.command(name="listguilds", aliases=["guilds", "guildlist"])
async def watchlist_list_guild(self, ctx: NabCtx, channel: discord.TextChannel):
"""Shows a list of guilds in the watchlist."""
if not await Watchlist.get_by_channel_id(ctx.pool, channel.id):
return await ctx.error(f"{channel.mention} is not a watchlist.")
entries = await WatchlistEntry.get_entries_by_channel(ctx.pool, channel.id)
entries = [entry for entry in entries if entry.is_guild]
if not channel.permissions_for(ctx.author).read_messages:
return await ctx.error("You can't see the list of a watchlist you can't see.")
if not entries:
return await ctx.error(f"This watchlist has no registered characters.")
pages = Pages(ctx, entries=[f"[{r.name}]({Guild.get_url(r.name)})" for r in entries])
pages.embed.title = f"Watched Guilds in #{channel.name}"
try:
await pages.paginate()
except CannotPaginate as e:
await ctx.error(e)
@checks.channel_mod_somewhere()
@checks.tracking_world_only()
@watchlist.command(name="remove", aliases=["removeplayer", "removechar"])
async def watchlist_remove(self, ctx: NabCtx, channel: discord.TextChannel, *, name):
"""Removes a character from a watchlist."""
if not await Watchlist.get_by_channel_id(ctx.pool, channel.id):
return await ctx.error(f"{channel.mention} is not a watchlist.")
entry = await WatchlistEntry.get_by_name(ctx.pool, channel.id, name, False)
if entry is None:
return await ctx.error(f"There's no character with that name registered in {channel.mention}.")
message = await ctx.send(f"Do you want to remove **{name}** from this watchlist?")
confirm = await ctx.react_confirm(message)
if confirm is None:
await ctx.send("You took too long!")
return
if not confirm:
await ctx.send("Ok then, guess you changed your mind.")
return
await entry.remove(ctx.pool)
await ctx.success("Character removed from the watchlist.")
@checks.channel_mod_somewhere()
@checks.tracking_world_only()
@watchlist.command(name="removeguild")
async def watchlist_removeguild(self, ctx: NabCtx, channel: discord.TextChannel, *, name):
"""Removes a guild from the watchlist."""
if not await Watchlist.get_by_channel_id(ctx.pool, channel.id):
return await ctx.error(f"{channel.mention} is not a watchlist.")
entry = await WatchlistEntry.get_by_name(ctx.pool, channel.id, name, True)
if entry is None:
return await ctx.error(f"There's no guild with that name registered in {channel.mention}.")
message = await ctx.send(f"Do you want to remove **{name}** from this watchlist?")
confirm = await ctx.react_confirm(message)
if confirm is None:
await ctx.send("You took too long!")
return
if not confirm:
await ctx.send("Ok then, guess you changed your mind.")
return
await entry.remove(ctx.pool)
await ctx.success("Guild removed from the watchlist.")
@checks.channel_mod_somewhere()
@checks.tracking_world_only()
@watchlist.command(name="showcount", usage="<channel> <yes|no>")
async def watchlist_showcount(self, ctx: NabCtx, channel: discord.TextChannel, yes_no):
"""Changes whether the online count will be displayed in the watchlist's channel's name or not."""
watchlist = await Watchlist.get_by_channel_id(ctx.pool, channel.id)
if not watchlist:
return await ctx.error(f"{channel.mention} is not a watchlist.")
if yes_no.lower().strip() in ["yes", "true"]:
await watchlist.update_show_count(ctx.pool, True)
await ctx.success("Showing online count is now enabled. The name will be updated on the next cycle.")
elif yes_no.lower().strip() in ["no", "false"]:
await watchlist.update_show_count(ctx.pool, False)
await ctx.success("Showing online count is now disabled. The name will be updated on the next cycle.")
else:
await ctx.error("That's not a valid option, try `yes` or `no`.")
# endregion
# region Methods
async def announce_death(self, char: NabChar, death: Death, levels_lost=0):
"""Announces a level up on the corresponding servers."""
log_msg = f"{self.tag}[{char.world}] announce_death: {char.name} | {death.level} | {death.killer.name}"
# Find killer article (a/an)
killer_article = ""
if not death.by_player:
killer_article = death.killer.name.split(" ", 1)
if killer_article[0] in ["a", "an"] and len(killer_article) > 1:
death.killer.name = killer_article[1]
killer_article = killer_article[0] + " "
else:
killer_article = ""
if death.killer.name.lower() in ["death", "energy", "earth", "fire", "pit battler", "pit berserker",
"pit blackling",
"pit brawler", "pit condemned", "pit demon", "pit destroyer", "pit fiend",
"pit groveller", "pit grunt", "pit lord", "pit maimer", "pit overlord",
"pit reaver",
"pit scourge"] and levels_lost == 0:
# Skip element damage deaths unless player lost a level to avoid spam from arena deaths
# This will cause a small amount of deaths to not be announced but it's probably worth the tradeoff
log.debug(f"{log_msg} | Skipping arena death")
return
guilds = [s for s, w in self.bot.tracked_worlds.items() if w == char.world]
for guild_id in guilds:
guild = self.bot.get_guild(guild_id)
if guild is None:
continue
min_level = await get_server_property(self.bot.pool, guild_id, "announce_level", config.announce_threshold)
if death.level < min_level:
log.debug(f"{log_msg} | Guild skipped {guild_id} | Level under limit")
continue
if guild.get_member(char.owner_id) is None:
log.debug(f"{log_msg} | Guild skipped {guild_id} | Owner not in server")
continue
simple_messages = await get_server_property(self.bot.pool, guild_id, "simple_messages", False)
condition = DeathMessageCondition(char=char, death=death, levels_lost=levels_lost, min_level=min_level)
# Select a message
if death.by_player:
message = weighed_choice(death_messages_player, condition) if not simple_messages else SIMPLE_DEATH
else:
message = weighed_choice(death_messages_monster, condition) if not simple_messages else SIMPLE_PVP_DEATH
# Format message with death information
message = message.format(**{'name': char.name, 'level': death.level, 'killer': death.killer.name,
'killer_article': killer_article, 'he_she': char.he_she.lower(),
'his_her': char.his_her.lower(), 'him_her': char.him_her.lower()})
# Format extra stylization
message = f"{config.pvpdeath_emoji if death.by_player else config.death_emoji} {format_message(message)}"
channel_id = await get_server_property(self.bot.pool, guild.id, "levels_channel")
channel = self.bot.get_channel_or_top(guild, channel_id)
try:
await channel.send(message[:1].upper() + message[1:])
log.debug(f"{log_msg} | Announced in {guild_id}")
except discord.Forbidden:
log.warning(f"{log_msg} | Forbidden error | Channel {channel.id} | Server {guild.id}")
except discord.HTTPException:
log.exception(f"{log_msg}")
async def announce_level(self, char: NabChar, level: int):
"""Announces a level up on corresponding servers."""
log_msg = f"{self.tag}[{char.world}] announce_level: : {char.name} | {level}"
guilds = [s for s, w in self.bot.tracked_worlds.items() if w == char.world]
for guild_id in guilds:
guild: discord.Guild = self.bot.get_guild(guild_id)
if guild is None:
continue
min_level = await get_server_property(self.bot.pool, guild_id, "announce_level", config.announce_threshold)
if char.level < min_level:
log.debug(f"{log_msg} | Guild skipped {guild_id} | Level under limit")
continue
if guild.get_member(char.owner_id) is None:
log.debug(f"{log_msg} | Guild skipped {guild_id} | Owner not in server")
continue
channel_id = await get_server_property(self.bot.pool, guild.id, "levels_channel")
simple_messages = await get_server_property(self.bot.pool, guild_id, "simple_messages", False)
channel = self.bot.get_channel_or_top(guild, channel_id)
try:
# Select a message
if not simple_messages:
message = weighed_choice(level_messages, LevelCondition(char=char, level=level,
min_level=min_level))
else:
message = SIMPLE_LEVEL
# Format message with level information
message = message.format(**{'name': char.name, 'level': level, 'he_she': char.he_she.lower(),
'his_her': char.his_her.lower(), 'him_her': char.him_her.lower()})
# Format extra stylization
message = f"{config.levelup_emoji} {format_message(message)}"
await channel.send(message)
log.debug(f"{log_msg} | Announced in {guild_id}")
except discord.Forbidden:
log.warning(f"{log_msg} | Forbidden error | Channel {channel.id} | Server {guild.id}")
except discord.HTTPException:
log.exception(f"{log_msg}")
@staticmethod
async def cached_get_guild(guild_name: str, world: str) -> Optional[Guild]:
"""
Used to cache guild info, to avoid fetching the same guild multiple times if they are in multiple lists
"""
if guild_name in GUILD_CACHE[world]:
return GUILD_CACHE[world][guild_name]
guild = await get_guild(guild_name)
GUILD_CACHE[world][guild_name] = guild
return guild
@classmethod
async def check_char_availability(cls, ctx: NabCtx, user_id: int, char: NabChar, worlds: List[str],
check_other=False):
"""Checks the availability of a character and other visible characters optionally.
:param ctx: The command context where this is called.
:param user_id: The id of the user against which the characters will be checked for.
:param char: The character to be checked.
:param worlds: The worlds to filter characters from.
:param check_other: Whether other characters in the same account should be processed to or not.
:return: A named tuple containing the different categories of characters found.
"""
skipped = [] # type: List[OtherCharacter]
"""Characters that were skipped due to being in another world or scheduled for deletion."""
no_user = [] # type: List[DbChar]
"""Characters that belong to users no longer visible to NabBot, most of the time abandoned temporal users."""
same_owner = [] # type: List[DbChar]
"""Characters that already belong to the user."""
different_user = [] # type: List[DbChar]
"""Characters belonging to a different user."""
unregistered = [] # type: List[NabChar]
"""Characters that have never been registered."""
if check_other and not char.hidden:
chars: List[Union[OtherCharacter, NabChar]] = char.other_characters
_char = next((x for x in chars if x.name == char.name))
chars[chars.index(_char)] = char
else:
chars = [char]
for char in chars:
if char.world not in worlds or char.deleted:
skipped.append(char)
continue
db_char = await DbChar.get_by_name(ctx.pool, char.name)
if db_char:
owner = ctx.bot.get_user(db_char.user_id)
if owner is None:
no_user.append(db_char)
continue
elif db_char.user_id == user_id:
same_owner.append(db_char)
continue
different_user.append(db_char)
continue
if isinstance(char, OtherCharacter):
char = await get_character(ctx.bot, char.name)
unregistered.append(char)
return CharactersResult._make((skipped, no_user, same_owner, different_user, unregistered,
len(skipped) == len(chars)))
async def compare_deaths(self, char: NabChar):
"""Checks if the player has new deaths.
New deaths are announced if they are not older than 30 minutes."""
if char is None:
return
async with self.bot.pool.acquire() as conn:
db_char = await DbChar.get_by_name(conn, char.name)
if db_char is None:
return
pending_deaths = []
for death in char.deaths:
# Check if we have a death that matches the time
exists = await DbDeath.exists(conn, db_char.id, death.level, death.time)
if exists:
# We already have this death, we're assuming we already have older deaths
break
pending_deaths.append(death)
# Announce and save deaths from older to new
for death in reversed(pending_deaths):
db_death = DbDeath.from_tibiapy(death)
db_death.character_id = db_char.id
await db_death.save(conn)
log_msg = f"{self.tag}[{char.world}] Death detected: {char.name} | {death.level} |" \
f" {death.killer.name}"
if (dt.datetime.now(dt.timezone.utc)- death.time) >= dt.timedelta(minutes=30):
log.info(f"{log_msg} | Too old to announce.")
# Only try to announce if character has an owner
elif char.owner_id:
log.info(log_msg)
await self.announce_death(char, death, max(death.level - char.level, 0))
async def compare_levels(self, char: Union[NabChar, OnlineCharacter], update_only=False):
"""Compares the character's level with the stored level in database.
This should only be used on online characters or characters that just became offline."""
if char is None:
return
async with self.bot.pool.acquire() as conn:
db_char = await DbChar.get_by_name(conn, char.name)
if not db_char:
return
# OnlineCharacter has no sex attribute, so we get it from database and convert to NabChar
if isinstance(char, OnlineCharacter):
char = NabChar.from_online(char, db_char.sex, db_char.user_id)
level_before = db_char.level
if level_before != char.level:
await db_char.update_level(conn, char.level)
log.debug(f"{self.tag}[{char.world}][compare_level] {char.name}'s level updated:"
f" {level_before} -> {char.level}")
if not (char.level > level_before > 0) or update_only:
return
# Saving level up date in database
await DbLevelUp.insert(conn, db_char.id, char.level)
# Announce the level up
log.info(f"{self.tag}[{char.world}] Level up detected: {char.name} | {char.level}")
# Only try to announce level if char has an owner.
if char.owner_id:
await self.announce_level(char, char.level)
else:
log.debug(f"{self.tag}[{char.world}] Character has no owner, skipping")
@classmethod
async def process_character_assignment(cls, ctx: NabCtx, results: CharactersResult, user: discord.User,
author: discord.User = None, claim=False):
"""Processes the results of a character check and applies the changes
:param ctx: The command context
:param results: The character results
:param user: The user that will get the characters assigned.
:param author: The user that did the action, None if it was the same user.
:param claim: Whether the operation is a claim.
:return: A summary of the applied actions.
"""
recipient = f"**@{user.display_name}**" if author else "you"
author_log = f"| By {author}" if author else ""
reply = ""
if results.different_user and not claim:
first = results.different_user[0].name
reply = f"{ctx.tick(False)} Sorry, a character in that account ({first}) is already registered to " \
f"someone else.\n" \
f"If the character really belongs to {recipient}, `{ctx.clean_prefix}claim {first}` should be used."
return reply
if results.same_owner:
existent_names = [e.name for e in results.same_owner]
reply += f"\n⚫ The following characters were already registered to {recipient}: {join_list(existent_names)}"
if results.new:
added_names = [a.name for a in results.new]
reply += f"\n🔵 The following characters were added to {recipient}: {join_list(added_names)}"
if results.no_user:
updated_names = [r.name for r in results.no_user]
reply += f"\n⚪ The following characters were reassigned to {recipient}: {join_list(updated_names)}"
if results.different_user:
reclaimed_chars = [c.name for c in results.different_user]
reply += f"\n🔴 The following characters were reclaimed by you: {join_list(reclaimed_chars)}"
async with ctx.pool.acquire() as conn:
for char in results.different_user:
await char.update_user(conn, user.id)
log.info(f"{cls.get_tag()} Character Claimed | {char.name} | {user} ({user.id}){author_log}")
for char in results.no_user:
await char.update_user(conn, user.id)
log.info(f"{cls.get_tag()} Character Reassigned | {char.name} | {user} ({user.id}){author_log}")
for char in results.new:
db_char = await DbChar.insert(conn, char.name, char.level, char.vocation.value, user.id, char.world,
char.guild_name)
char.id = db_char.id
log.info(f"{cls.get_tag()} Character Registered | {char.name} | {user} ({user.id}){author_log}")
# If we are claiming, different user characters are also passed
if claim:
results.no_user.extend(results.different_user)
ctx.bot.dispatch("characters_registered", user, results.new, results.no_user, author)
ctx.bot.dispatch("character_change", user.id)
return reply
async def save_highscores(self, world: str, key: str, highscores: tibiapy.Highscores) -> int:
"""Saves the highscores of a world and category to the database."""
if highscores is None:
return 0
rows = [(e.rank, key, world, e.name, e.vocation.value, e.value) for e in highscores.entries]
async with self.bot.pool.acquire() as conn: # type: asyncpg.Connection
async with conn.transaction():
# Delete old records
await conn.execute("DELETE FROM highscores_entry WHERE category = $1 AND world = $2", key, world)
# Add current entries
await conn.copy_records_to_table("highscores_entry", records=rows,
columns=["rank", "category", "world", "name", "vocation", "value"])
log.debug(f"{self.tag}[{world}][save_highscores] {key} | {len(rows)} entries saved")
# Update scan times
await conn.execute("""INSERT INTO highscores(world, category, last_scan)
VALUES($1, $2, $3)
ON CONFLICT (world,category)
DO UPDATE SET last_scan = EXCLUDED.last_scan""",
world, key, dt.datetime.now(dt.timezone.utc))
return len(rows)
# endregion
def cog_unload(self):
log.info(f"{self.tag} Unloading cog")
self.scan_highscores_task.cancel()
self.scan_online_chars_task.cancel()
for k, v in self.world_tasks.items():
v.cancel()
def setup(bot):
bot.add_cog(Tracking(bot))
| apache-2.0 | 2,001,326,355,620,940,500 | 47.838201 | 120 | 0.590449 | false | 4.062533 | false | false | false |
Fiona/AreWeAlone | __main__.py | 1 | 4157 | ##########
# LD 22
# The theme is alone
# it's a dumb theme
# fiona wrote this
##########
# System and Python lib imports
import sys
sys.path += ['.']
# Game engine imports
from myrmidon.myrmidon import MyrmidonGame, MyrmidonProcess
from myrmidon.consts import *
from pygame.locals import *
# Game imports
from consts import *
from media import Media
from gui import GUI
from galaxy import Galaxy
from game_galaxy import Galaxy_background, Solar_system_star, Player_ship, Galaxy_player_ship
class Game(MyrmidonProcess):
# Current state
game_state = 0
# Player state
money = 2000000000
fuel = 0
crew = 0
current_system = "Sol"
current_object = "Earth"
fuel_cost = 1000000000
crew_cost = 500000000
actions_done = {}
home_planet_result = []
first_time = True
# Self explanitory object pointers and lists
fps_text = None
gui = None
media = None
solar_system_objects = []
player_ship = None
background = None
galaxy = None
def execute(self):
# Pre launch set-up
MyrmidonGame.current_fps = 60
self.priority = PRIORITY_MAIN_GAME
# Load all media
self.media = Media()
self.media.load_fonts()
self.media.load_graphics()
self.media.load_audio()
# Debug display
if DEBUG_SHOW_FPS:
self.fps_text = MyrmidonGame.write_text(0.0, 0.0, font = self.media.fonts['basic'], text = 0)
self.fps_text.colour = (1, 1, 1, 1)
self.fps_text.z = -2000
# Set up starting game objects
self.galaxy = Galaxy(self)
self.gui = GUI(self)
self.switch_game_state_to(GAME_STATE_SOLAR_SYSTEM)
self.media.audio['ambient'].play(loops = -1)
while True:
# update debug display
if DEBUG_SHOW_FPS:
self.fps_text.text = "fps: " + str(MyrmidonGame.fps)
yield
def quit_game(self):
sys.exit()
def switch_game_state_to(self, state, gui_state = None):
"""
Pass in a state and this will switch to it.
It will also clean up everying necessary to go out of the
previous game state.
"""
# Undo and destroy everything in the current state
self.gui.destroy_current_gui_state()
col = (1.0, 1.0, 1.0)
if self.game_state == GAME_STATE_SOLAR_SYSTEM:
for x in self.solar_system_objects:
x.signal(S_KILL)
self.solar_system_objects = []
self.player_ship.signal(S_KILL)
self.background.signal(S_KILL)
elif self.game_state == GAME_STATE_GALAXY:
self.player_ship.signal(S_KILL)
self.background.signal(S_KILL)
# Switch to new state
self.game_state = state
# Create everything we require
if state == GAME_STATE_GALAXY:
self.background = Galaxy_background(self)
self.gui.fade_toggle()
self.gui.switch_gui_state_to(GUI_STATE_GALAXY if gui_state is None else gui_state)
self.player_ship = Galaxy_player_ship(self)
elif state == GAME_STATE_SOLAR_SYSTEM:
self.background = Galaxy_background(self)
self.solar_system_objects = []
self.solar_system_objects.append(Solar_system_star(self, self.galaxy.solar_systems[self.current_system]))
self.gui.fade_toggle()
self.gui.switch_gui_state_to(GUI_STATE_SOLAR_SYSTEM if gui_state is None else gui_state)
self.player_ship = Player_ship(self)
def do_home_planet_results(self):
if len(self.home_planet_result) > 0:
result = self.home_planet_result.pop()
result[0](self, *result[1])
if __name__ == '__main__':
MyrmidonGame.screen_resolution = (1024, 768)
MyrmidonGame.lowest_resolution = (1024, 768)
MyrmidonGame.full_screen = False
Game()
| mit | 1,547,291,332,813,934,600 | 27.06993 | 117 | 0.570604 | false | 3.624237 | false | false | false |
BatedUrGonnaDie/salty_bot | modules/helpers/yt_video_link.py | 1 | 1246 | #! /usr/bin/env python3.7
import re
import isodate
import modules.extensions.regexes as regexes
import modules.commands.helpers.time_formatter as time_formatter
ON_ACTION = "PRIVMSG"
def call(salty_inst, c_msg, balancer, **kwargs):
video_ids = re.findall(regexes.YOUTUBE_URL, c_msg["message"])
if not video_ids:
return False, "No video ids"
seen_ids = set()
seen_add = seen_ids.add
video_ids = [x for x in video_ids if not (x in seen_ids or seen_add(x))]
parts = ["snippet", "statistics", "contentDetails"]
final_list = []
success, response = salty_inst.youtube_api.get_videos(video_ids, parts, **kwargs)
if not success:
return False, \
"Error retrieving info from youtube API ({0})".format(response.status_code)
if len(response["items"]) == 0:
return False, "No valid ID's found."
for i in response["items"]:
final_list.append("[{0}] {1} uploaded by {2}. Views: {3}".format(
time_formatter.format_time(isodate.parse_duration(i["contentDetails"]["duration"]).seconds),
i["snippet"]["title"],
i["snippet"]["channelTitle"],
i["statistics"]["viewCount"]
))
return True, " | ".join(final_list)
| mit | -6,777,776,183,059,641,000 | 32.675676 | 104 | 0.623596 | false | 3.509859 | false | false | false |
google/tf_mesh_renderer | mesh_renderer/rasterize_triangles_test.py | 1 | 7681 | # Copyright 2017 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import numpy as np
import tensorflow as tf
import test_utils
import camera_utils
import rasterize_triangles
class RenderTest(tf.test.TestCase):
def setUp(self):
self.test_data_directory = 'mesh_renderer/test_data/'
tf.reset_default_graph()
self.cube_vertex_positions = tf.constant(
[[-1, -1, 1], [-1, -1, -1], [-1, 1, -1], [-1, 1, 1], [1, -1, 1],
[1, -1, -1], [1, 1, -1], [1, 1, 1]],
dtype=tf.float32)
self.cube_triangles = tf.constant(
[[0, 1, 2], [2, 3, 0], [3, 2, 6], [6, 7, 3], [7, 6, 5], [5, 4, 7],
[4, 5, 1], [1, 0, 4], [5, 6, 2], [2, 1, 5], [7, 4, 0], [0, 3, 7]],
dtype=tf.int32)
self.tf_float = lambda x: tf.constant(x, dtype=tf.float32)
self.image_width = 640
self.image_height = 480
self.perspective = camera_utils.perspective(
self.image_width / self.image_height,
self.tf_float([40.0]), self.tf_float([0.01]),
self.tf_float([10.0]))
def runTriangleTest(self, w_vector, target_image_name):
"""Directly renders a rasterized triangle's barycentric coordinates.
Tests only the kernel (rasterize_triangles_module).
Args:
w_vector: 3 element vector of w components to scale triangle vertices.
target_image_name: image file name to compare result against.
"""
clip_init = np.array(
[[-0.5, -0.5, 0.8, 1.0], [0.0, 0.5, 0.3, 1.0], [0.5, -0.5, 0.3, 1.0]],
dtype=np.float32)
clip_init = clip_init * np.reshape(
np.array(w_vector, dtype=np.float32), [3, 1])
clip_coordinates = tf.constant(clip_init)
triangles = tf.constant([[0, 1, 2]], dtype=tf.int32)
rendered_coordinates, _, _ = (
rasterize_triangles.rasterize_triangles_module.rasterize_triangles(
clip_coordinates, triangles, self.image_width, self.image_height))
rendered_coordinates = tf.concat(
[rendered_coordinates,
tf.ones([self.image_height, self.image_width, 1])], axis=2)
with self.test_session() as sess:
image = rendered_coordinates.eval()
baseline_image_path = os.path.join(self.test_data_directory,
target_image_name)
test_utils.expect_image_file_and_render_are_near(
self, sess, baseline_image_path, image)
def testRendersSimpleTriangle(self):
self.runTriangleTest((1.0, 1.0, 1.0), 'Simple_Triangle.png')
def testRendersPerspectiveCorrectTriangle(self):
self.runTriangleTest((0.2, 0.5, 2.0), 'Perspective_Corrected_Triangle.png')
def testRendersTwoCubesInBatch(self):
"""Renders a simple cube in two viewpoints to test the python wrapper."""
vertex_rgb = (self.cube_vertex_positions * 0.5 + 0.5)
vertex_rgba = tf.concat([vertex_rgb, tf.ones([8, 1])], axis=1)
center = self.tf_float([[0.0, 0.0, 0.0]])
world_up = self.tf_float([[0.0, 1.0, 0.0]])
look_at_1 = camera_utils.look_at(self.tf_float([[2.0, 3.0, 6.0]]),
center, world_up)
look_at_2 = camera_utils.look_at(self.tf_float([[-3.0, 1.0, 6.0]]),
center, world_up)
projection_1 = tf.matmul(self.perspective, look_at_1)
projection_2 = tf.matmul(self.perspective, look_at_2)
projection = tf.concat([projection_1, projection_2], axis=0)
background_value = [0.0, 0.0, 0.0, 0.0]
rendered = rasterize_triangles.rasterize(
tf.stack([self.cube_vertex_positions, self.cube_vertex_positions]),
tf.stack([vertex_rgba, vertex_rgba]), self.cube_triangles, projection,
self.image_width, self.image_height, background_value)
with self.test_session() as sess:
images = sess.run(rendered, feed_dict={})
for i in (0, 1):
image = images[i, :, :, :]
baseline_image_name = 'Unlit_Cube_{}.png'.format(i)
baseline_image_path = os.path.join(self.test_data_directory,
baseline_image_name)
test_utils.expect_image_file_and_render_are_near(
self, sess, baseline_image_path, image)
def testSimpleTriangleGradientComputation(self):
"""Verifies the Jacobian matrix for a single pixel.
The pixel is in the center of a triangle facing the camera. This makes it
easy to check which entries of the Jacobian might not make sense without
worrying about corner cases.
"""
test_pixel_x = 325
test_pixel_y = 245
clip_coordinates = tf.placeholder(tf.float32, shape=[3, 4])
triangles = tf.constant([[0, 1, 2]], dtype=tf.int32)
barycentric_coordinates, _, _ = (
rasterize_triangles.rasterize_triangles_module.rasterize_triangles(
clip_coordinates, triangles, self.image_width, self.image_height))
pixels_to_compare = barycentric_coordinates[
test_pixel_y:test_pixel_y + 1, test_pixel_x:test_pixel_x + 1, :]
with self.test_session():
ndc_init = np.array(
[[-0.5, -0.5, 0.8, 1.0], [0.0, 0.5, 0.3, 1.0], [0.5, -0.5, 0.3, 1.0]],
dtype=np.float32)
theoretical, numerical = tf.test.compute_gradient(
clip_coordinates, (3, 4),
pixels_to_compare, (1, 1, 3),
x_init_value=ndc_init,
delta=4e-2)
jacobians_match, message = (
test_utils.check_jacobians_are_nearly_equal(
theoretical, numerical, 0.01, 0.0, True))
self.assertTrue(jacobians_match, message)
def testInternalRenderGradientComputation(self):
"""Isolates and verifies the Jacobian matrix for the custom kernel."""
image_height = 21
image_width = 28
clip_coordinates = tf.placeholder(tf.float32, shape=[8, 4])
barycentric_coordinates, _, _ = (
rasterize_triangles.rasterize_triangles_module.rasterize_triangles(
clip_coordinates, self.cube_triangles, image_width, image_height))
with self.test_session():
# Precomputed transformation of the simple cube to normalized device
# coordinates, in order to isolate the rasterization gradient.
# pyformat: disable
ndc_init = np.array(
[[-0.43889722, -0.53184521, 0.85293502, 1.0],
[-0.37635487, 0.22206162, 0.90555805, 1.0],
[-0.22849123, 0.76811147, 0.80993629, 1.0],
[-0.2805393, -0.14092168, 0.71602166, 1.0],
[0.18631913, -0.62634289, 0.88603103, 1.0],
[0.16183566, 0.08129397, 0.93020856, 1.0],
[0.44147962, 0.53497446, 0.85076219, 1.0],
[0.53008741, -0.31276882, 0.77620775, 1.0]],
dtype=np.float32)
# pyformat: enable
theoretical, numerical = tf.test.compute_gradient(
clip_coordinates, (8, 4),
barycentric_coordinates, (image_height, image_width, 3),
x_init_value=ndc_init,
delta=4e-2)
jacobians_match, message = (
test_utils.check_jacobians_are_nearly_equal(
theoretical, numerical, 0.01, 0.01))
self.assertTrue(jacobians_match, message)
if __name__ == '__main__':
tf.test.main()
| apache-2.0 | -5,375,903,968,414,628,000 | 38.188776 | 80 | 0.622836 | false | 3.151826 | true | false | false |
crask/redisproxy | test/memcache/memcache.py | 1 | 15420 | # Copyright 2012 Mixpanel, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''
a minimal, pure python client for memcached, kestrel, etc.
Usage example::
import memcache
mc = memcache.Client("127.0.0.1", 11211, timeout=1, connect_timeout=5)
mc.set("some_key", "Some value")
value = mc.get("some_key")
mc.delete("another_key")
'''
import errno
import re
import socket
class ClientException(Exception):
'''
Raised when the server does something we don't expect
| This does not include `socket errors <http://docs.python.org/library/socket.html#socket.error>`_
| Note that ``ValidationException`` subclasses this so, technically, this is raised on any error
'''
def __init__(self, msg, item=None):
if item is not None:
msg = '%s: %r' % (msg, item) # use repr() to better see special chars
super(ClientException, self).__init__(msg)
class ValidationException(ClientException):
'''
Raised when an invalid parameter is passed to a ``Client`` function
'''
def __init__(self, msg, item):
super(ValidationException, self).__init__(msg, item)
class Client(object):
def __init__(self, host, port, timeout=None, connect_timeout=None):
'''
If ``connect_timeout`` is None, ``timeout`` will be used instead
(for connect and everything else)
'''
self._addr = (host, port)
self._timeout = timeout
self._connect_timeout = connect_timeout
self._socket = None
def __del__(self):
self.close()
def _get_addr(self):
return self._addr
address = property(_get_addr)
''' A read-only (str, int) tuple representing the host operations are performed on '''
def _get_timeout(self):
return self._timeout
def _set_timeout(self, timeout):
# presumably this should fail rarely
# set locally before on socket
# b/c if socket fails, it will probably be closed/reopened
# and will want to use last intended value
self._timeout = timeout
if self._socket:
self._socket.settimeout(timeout)
timeout = property(_get_timeout, _set_timeout)
'''
A float representing the timeout in seconds for reads and sends on the underlying socket
(``connect_timeout`` cannot be changed once init)
Setting a timeout can raise a ``TypeError`` (non-float) or a ``ValueError`` (negative)
'''
def _connect(self):
# buffer needed since we always ask for 4096 bytes at a time
# thus, might read more than the current expected response
# cleared on every reconnect since old bytes are part of old session and can't be reused
self._buffer = ''
self._socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
connect_timeout = self._connect_timeout if self._connect_timeout is not None else self._timeout
self._socket.settimeout(connect_timeout) # passing None means blocking
try:
self._socket.connect(self._addr)
self._socket.settimeout(self._timeout)
except (socket.error, socket.timeout):
self._socket = None # don't want to hang on to bad socket
raise
def _read(self, length=None):
'''
Return the next length bytes from server
Or, when length is None,
Read a response delimited by \r\n and return it (including \r\n)
(Use latter only when \r\n is unambiguous -- aka for control responses, not data)
'''
result = None
while result is None:
if length: # length = 0 is ambiguous, so don't use
if len(self._buffer) >= length:
result = self._buffer[:length]
self._buffer = self._buffer[length:]
else:
delim_index = self._buffer.find('\r\n')
if delim_index != -1:
result = self._buffer[:delim_index+2]
self._buffer = self._buffer[delim_index+2:]
if result is None:
try:
tmp = self._socket.recv(4096)
except (socket.error, socket.timeout) as e:
self.close()
raise e
if not tmp:
# we handle common close/retry cases in _send_command
# however, this can happen if server suddenly goes away
# (e.g. restarting memcache under sufficient load)
raise socket.error, 'unexpected socket close on recv'
else:
self._buffer += tmp
return result
def _send_command(self, command):
'''
Send command to server and return initial response line
Will reopen socket if it got closed (either locally or by server)
'''
if self._socket: # try to find out if the socket is still open
try:
self._socket.settimeout(0)
self._socket.recv(0)
# if recv didn't raise, then the socket was closed or there is junk
# in the read buffer, either way, close
self.close()
except socket.error as e:
if e.errno == errno.EAGAIN: # this is expected if the socket is still open
self._socket.settimeout(self._timeout)
else:
self.close()
if not self._socket:
self._connect()
self._socket.sendall(command)
return self._read()
# key supports ascii sans space and control chars
# \x21 is !, right after space, and \x7e is -, right before DEL
# also 1 <= len <= 250 as per the spec
_valid_key_re = re.compile('^[\x21-\x7e]{1,250}$')
def _validate_key(self, key):
if not isinstance(key, str): # avoid bugs subtle and otherwise
raise ValidationException('key must be str', key)
m = self._valid_key_re.match(key)
if m:
# in python re, $ matches either end of line or right before
# \n at end of line. We can't allow latter case, so
# making sure length matches is simplest way to detect
if len(m.group(0)) != len(key):
raise ValidationException('trailing newline', key)
else:
raise ValidationException('invalid key', key)
return key
def close(self):
'''
Closes the socket if its open
| Sockets are automatically closed when the ``Client`` object is garbage collected
| Sockets are opened the first time a command is run (such as ``get`` or ``set``)
| Raises socket errors
'''
if self._socket:
self._socket.close()
self._socket = None
def delete(self, key):
'''
Deletes a key/value pair from the server
Raises ``ClientException`` and socket errors
'''
# req - delete <key> [noreply]\r\n
# resp - DELETED\r\n
# or
# NOT_FOUND\r\n
key = self._validate_key(key)
command = 'delete %s\r\n' % key
resp = self._send_command(command)
if resp != 'DELETED\r\n' and resp != 'NOT_FOUND\r\n':
raise ClientException('delete failed', resp)
def get(self, key):
'''
Gets a single value from the server; returns None if there is no value
Raises ``ValidationException``, ``ClientException``, and socket errors
'''
return self.multi_get([key])[0]
def multi_get(self, keys):
'''
Takes a list of keys and returns a list of values
Raises ``ValidationException``, ``ClientException``, and socket errors
'''
if len(keys) == 0:
return []
# req - get <key> [<key> ...]\r\n
# resp - VALUE <key> <flags> <bytes> [<cas unique>]\r\n
# <data block>\r\n (if exists)
# [...]
# END\r\n
keys = [self._validate_key(key) for key in keys]
if len(set(keys)) != len(keys):
raise ClientException('duplicate keys passed to multi_get')
command = 'get %s\r\n' % ' '.join(keys)
received = {}
resp = self._send_command(command)
error = None
while resp != 'END\r\n':
terms = resp.split()
if len(terms) == 4 and terms[0] == 'VALUE': # exists
key = terms[1]
flags = int(terms[2])
length = int(terms[3])
if flags != 0:
error = ClientException('received non zero flags')
val = self._read(length+2)[:-2]
if key in received:
error = ClientException('duplicate results from server')
received[key] = val
else:
raise ClientException('get failed', resp)
resp = self._read()
if error is not None:
# this can happen if a memcached instance contains items set by a previous client
# leads to subtle bugs, so fail fast
raise error
if len(received) > len(keys):
raise ClientException('received too many responses')
# memcache client is used by other servers besides memcached.
# In the case of kestrel, responses coming back to not necessarily
# match the requests going out. Thus we just ignore the key name
# if there is only one key and return what we received.
if len(keys) == 1 and len(received) == 1:
response = received.values()
else:
response = [received.get(key) for key in keys]
return response
def getex(self, key):
'''
Gets a single value from the server; returns None if there is no value
Raises ``ValidationException``, ``ClientException``, and socket errors
'''
return self.multi_getex([key])[0]
def multi_getex(self, keys):
'''
Takes a list of keys and returns a list of values
Raises ``ValidationException``, ``ClientException``, and socket errors
'''
if len(keys) == 0:
return []
# req - getex <key> [<key> ...]\r\n
# resp - VALUE <key> <flags> <bytes> <cas unique> <expire time>\r\n
# <data block>\r\n (if exists)
# [...]
# END\r\n
keys = [self._validate_key(key) for key in keys]
if len(set(keys)) != len(keys):
raise ClientException('duplicate keys passed to multi_get')
command = 'getex %s\r\n' % ' '.join(keys)
received = {}
resp = self._send_command(command)
error = None
while resp != 'END\r\n':
terms = resp.split()
if len(terms) == 6 and terms[0] == 'VALUE': # exists
key = terms[1]
flags = int(terms[2])
length = int(terms[3])
if flags != 0:
error = ClientException('received non zero flags')
val = self._read(length+2)[:-2]
if key in received:
error = ClientException('duplicate results from server')
received[key] = val
else:
raise ClientException('get failed', resp)
resp = self._read()
if error is not None:
# this can happen if a memcached instance contains items set by a previous client
# leads to subtle bugs, so fail fast
raise error
if len(received) > len(keys):
raise ClientException('received too many responses')
# memcache client is used by other servers besides memcached.
# In the case of kestrel, responses coming back to not necessarily
# match the requests going out. Thus we just ignore the key name
# if there is only one key and return what we received.
if len(keys) == 1 and len(received) == 1:
response = received.values()
else:
response = [received.get(key) for key in keys]
return response
def set(self, key, val, exptime=0):
'''
Sets a key to a value on the server with an optional exptime (0 means don't auto-expire)
Raises ``ValidationException``, ``ClientException``, and socket errors
'''
# req - set <key> <flags> <exptime> <bytes> [noreply]\r\n
# <data block>\r\n
# resp - STORED\r\n (or others)
key = self._validate_key(key)
# the problem with supporting types is it oftens leads to uneven and confused usage
# some code sites use the type support, others do manual casting to/from str
# worse yet, some sites don't even know what value they are putting in and mis-cast on get
# by uniformly requiring str, the end-use code is much more uniform and legible
if not isinstance(val, str):
raise ValidationException('value must be str', val)
# typically, if val is > 1024**2 bytes server returns:
# SERVER_ERROR object too large for cache\r\n
# however custom-compiled memcached can have different limit
# so, we'll let the server decide what's too much
if not isinstance(exptime, int):
raise ValidationException('exptime not int', exptime)
elif exptime < 0:
raise ValidationException('exptime negative', exptime)
command = 'set %s 0 %d %d\r\n%s\r\n' % (key, exptime, len(val), val)
resp = self._send_command(command)
if resp != 'STORED\r\n':
raise ClientException('set failed', resp)
def stats(self, additional_args=None):
'''
Runs a stats command on the server.
``additional_args`` are passed verbatim to the server.
See `the memcached wiki <http://code.google.com/p/memcached/wiki/NewCommands#Statistics>`_ for details
or `the spec <https://github.com/memcached/memcached/blob/master/doc/protocol.txt>`_ for even more details
Raises ``ClientException`` and socket errors
'''
# req - stats [additional args]\r\n
# resp - STAT <name> <value>\r\n (one per result)
# END\r\n
if additional_args is not None:
command = 'stats %s\r\n' % additional_args
else:
command = 'stats\r\n'
resp = self._send_command(command)
result = {}
while resp != 'END\r\n':
terms = resp.split()
if len(terms) == 2 and terms[0] == 'STAT':
result[terms[1]] = None
elif len(terms) == 3 and terms[0] == 'STAT':
result[terms[1]] = terms[2]
else:
raise ClientException('stats failed', resp)
resp = self._read()
return result
| apache-2.0 | -2,465,647,685,932,409,000 | 37.074074 | 114 | 0.569455 | false | 4.273836 | false | false | false |
sterlingbaldwin/acme_workbench | workbench-backend/index/tests.py | 1 | 3781 | import json
from django.test import TestCase
from django.test import Client
class IndexViewTests(TestCase):
fixtures = ['seed.json']
"""
Tests for the Index app views
"""
def test_get_index(self):
"""
Test that the index page returns success
"""
client = Client()
response = client.get('/')
self.assertEqual(response.status_code, 200)
def test_get_workbench_no_login(self):
"""
Test that the workbench redirects when not logged in
"""
client = Client()
response = client.get('/workbench')
self.assertEqual(response.status_code, 302)
def test_get_workbench_with_login(self):
"""
Test that the workbench renders when logged in
"""
client = Client()
self.assertTrue(
client.login(
username='test_user',
password='qwertyuiop'))
res = client.get('/workbench')
self.assertEqual(res.status_code, 200)
def test_valid_user_registration(self):
"""
test ability to register new users
"""
client = Client()
res = client.get('/register')
self.assertEqual(res.status_code, 200)
post_data = {
'username': 'test_user1',
'password1': 'test_pass',
'password2': 'test_pass',
'firstname': 'test',
'lastname': 'test',
'email': '[email protected]'
}
res = client.post('/register', post_data)
self.assertEqual(res.status_code, 200)
def test_invalid_user_registration(self):
"""
test ability to register new users
"""
client = Client()
res = client.get('/register')
self.assertEqual(res.status_code, 200)
post_data = {
'username': 'test_user1',
'password1': 'test_pass',
'password2': 'THIS IS NOT VALID',
'firstname': 'test',
'lastname': 'test',
'email': '[email protected]'
}
res = client.post('/register', post_data)
self.assertNotEqual(res.status_code, 200)
def test_valid_user_login(self):
"""
test users ability to login with valid credentials
"""
client = Client()
post_data = {
'password': 'qwertyuiop',
'username': 'test_user'
}
res = client.post('/login', post_data)
self.assertEqual(res.status_code, 200)
def test_invalid_user_login(self):
"""
Test rejection of invalid credentials
"""
client = Client()
post_data = {
'username': 'test_user',
'password': 'IM A LITTLE TEA POT'
}
res = client.post('/login', post_data)
self.assertEqual(res.status_code, 401)
def test_valid_user_logout(self):
"""
test users ability to logout
"""
client = Client()
post_data = {
'password': 'qwertyuiop',
'username': 'test_user'
}
res = client.post('/login', post_data)
self.assertEqual(res.status_code, 200)
res = client.get('/logout')
self.assertEqual(res.status_code, 200)
self.assertFalse(res.context['request'].user.is_authenticated())
def test_get_user_list(self):
"""
test of the get user list view
"""
client = Client()
url = '/get_user_list/'
expected_result = ['test_user', 'baldwin32']
res = client.get(url)
self.assertEqual(res.status_code, 200)
data = json.loads(res.content)
for user in data:
self.assertTrue(user in expected_result)
| bsd-2-clause | 3,293,972,023,252,596,000 | 28.310078 | 72 | 0.530283 | false | 4.164097 | true | false | false |
lixiangning888/whole_project | lib/cuckoo/common/demux.py | 1 | 7192 | # Copyright (C) 2015 Accuvant, Inc. ([email protected])
# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org
# See the file 'docs/LICENSE' for copying permission.
import os
import tempfile
from zipfile import ZipFile
try:
from rarfile import RarFile
HAS_RARFILE = True
except ImportError:
HAS_RARFILE = False
from lib.cuckoo.common.config import Config
from lib.cuckoo.common.objects import File
from lib.cuckoo.common.email_utils import find_attachments_in_email
from lib.cuckoo.common.office.msgextract import Message
def demux_zip(filename, options):
retlist = []
try:
# don't try to extract from office docs
magic = File(filename).get_type()
if "Microsoft" in magic or "Java Jar" in magic:
return retlist
extracted = []
password="infected"
fields = options.split(",")
for field in fields:
try:
key, value = field.split("=", 1)
if key == "password":
password = value
break
except:
pass
with ZipFile(filename, "r") as archive:
infolist = archive.infolist()
for info in infolist:
# avoid obvious bombs
if info.file_size > 100 * 1024 * 1024 or not info.file_size:
continue
# ignore directories
if info.filename.endswith("/"):
continue
base, ext = os.path.splitext(info.filename)
basename = os.path.basename(info.filename)
ext = ext.lower()
if ext == "" and len(basename) and basename[0] == ".":
continue
extensions = ["", ".exe", ".dll", ".pdf", ".doc", ".ppt", ".pptx", ".docx", ".xls", ".msi", ".bin", ".scr"]
for theext in extensions:
if ext == theext:
extracted.append(info.filename)
break
options = Config()
tmp_path = options.cuckoo.get("tmppath", "/tmp")
target_path = os.path.join(tmp_path, "cuckoo-zip-tmp")
if not os.path.exists(target_path):
os.mkdir(target_path)
tmp_dir = tempfile.mkdtemp(prefix='cuckoozip_',dir=target_path)
for extfile in extracted:
try:
retlist.append(archive.extract(extfile, path=tmp_dir, pwd=password))
except:
retlist.append(archive.extract(extfile, path=tmp_dir))
except:
pass
return retlist
def demux_rar(filename, options):
retlist = []
if not HAS_RARFILE:
return retlist
try:
# don't try to auto-extract RAR SFXes
magic = File(filename).get_type()
if "PE32" in magic or "MS-DOS executable" in magic:
return retlist
extracted = []
password="infected"
fields = options.split(",")
for field in fields:
try:
key, value = field.split("=", 1)
if key == "password":
password = value
break
except:
pass
with RarFile(filename, "r") as archive:
infolist = archive.infolist()
for info in infolist:
# avoid obvious bombs
if info.file_size > 100 * 1024 * 1024 or not info.file_size:
continue
# ignore directories
if info.filename.endswith("\\"):
continue
# add some more sanity checking since RarFile invokes an external handler
if "..\\" in info.filename:
continue
base, ext = os.path.splitext(info.filename)
basename = os.path.basename(info.filename)
ext = ext.lower()
if ext == "" and len(basename) and basename[0] == ".":
continue
extensions = ["", ".exe", ".dll", ".pdf", ".doc", ".ppt", ".pptx", ".docx", ".xls", ".msi", ".bin", ".scr"]
for theext in extensions:
if ext == theext:
extracted.append(info.filename)
break
options = Config()
tmp_path = options.cuckoo.get("tmppath", "/tmp")
target_path = os.path.join(tmp_path, "cuckoo-rar-tmp")
if not os.path.exists(target_path):
os.mkdir(target_path)
tmp_dir = tempfile.mkdtemp(prefix='cuckoorar_',dir=target_path)
for extfile in extracted:
# RarFile differs from ZipFile in that extract() doesn't return the path of the extracted file
# so we have to make it up ourselves
try:
archive.extract(extfile, path=tmp_dir, pwd=password)
retlist.append(os.path.join(tmp_dir, extfile.replace("\\", "/")))
except:
archive.extract(extfile, path=tmp_dir)
retlist.append(os.path.join(tmp_dir, extfile.replace("\\", "/")))
except:
pass
return retlist
def demux_email(filename, options):
retlist = []
try:
with open(filename, "rb") as openfile:
buf = openfile.read()
atts = find_attachments_in_email(buf, True)
if atts and len(atts):
for att in atts:
retlist.append(att[0])
except:
pass
return retlist
def demux_msg(filename, options):
retlist = []
try:
retlist = Message(filename).get_extracted_attachments()
except:
pass
return retlist
def demux_sample(filename, package, options):
"""
If file is a ZIP, extract its included files and return their file paths
If file is an email, extracts its attachments and return their file paths (later we'll also extract URLs)
"""
# if a package was specified, then don't do anything special
# this will allow for the ZIP package to be used to analyze binaries with included DLL dependencies
if package:
return [ filename ]
retlist = demux_zip(filename, options)
if not retlist:
retlist = demux_rar(filename, options)
if not retlist:
retlist = demux_email(filename, options)
if not retlist:
retlist = demux_msg(filename, options)
# handle ZIPs/RARs inside extracted files
if retlist:
newretlist = []
for item in retlist:
zipext = demux_zip(item, options)
if zipext:
newretlist.extend(zipext)
else:
rarext = demux_rar(item, options)
if rarext:
newretlist.extend(rarext)
else:
newretlist.append(item)
retlist = newretlist
# if it wasn't a ZIP or an email or we weren't able to obtain anything interesting from either, then just submit the
# original file
if not retlist:
retlist.append(filename)
return retlist
| lgpl-3.0 | -6,092,322,796,231,694,000 | 33.411483 | 123 | 0.535595 | false | 4.238067 | false | false | false |
jeromecc/doctoctocbot | src/customer/forms.py | 1 | 3367 | from django import forms
from django.utils.translation import ugettext_lazy as _
from django_countries.fields import CountryField
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit
from customer.models import Customer
from bootstrap_modal_forms.forms import BSModalForm, BSModalModelForm
class CustomerReadOnlyForm(forms.Form):
def __init__(self, *args, **kwargs):
self.user = kwargs.pop('user', None)
super(CustomerReadOnlyForm, self).__init__(*args, **kwargs)
try:
customer = Customer.objects.get(user=self.user)
except Customer.DoesNotExist:
return
self.helper = FormHelper()
self.helper.form_id = 'customer-form'
self.helper.form_class = 'form-horizontal'
self.helper.form_method = 'post'
self.helper.form_action = '/customer/'
self.helper.form_group_wrapper_class = 'row'
self.helper.label_class = 'offset-md-1 col-md-1'
self.helper.field_class = 'col-md-8'
self.fields['country'].label = _('Country')
self.helper.add_input(Submit('submit', 'Submit', css_class='btn-primary'))
self.fields['id'].initial=customer.id
self.fields['first_name'].initial=customer.first_name
self.fields['last_name'].initial=customer.last_name
self.fields['company'].initial=customer.company
self.fields['address_1'].initial=customer.address_1
self.fields['address_2'].initial=customer.address_2
self.fields['country'].initial=customer.country
self.fields['email'].initial=customer.email
self.fields['city'].initial=customer.city
#self.fields['state'].initial=customer.state
self.fields['zip_code'].initial=customer.zip_code
id = forms.CharField(
disabled=True,
widget=forms.HiddenInput(),
)
first_name = forms.CharField(
label=_('First name'),
max_length=128,
disabled=True,
)
last_name = forms.CharField(
label=_('Last name'),
max_length=128,
disabled=True,
)
company = forms.CharField(
label=_('Company'),
max_length=128,
disabled=True,
)
address_1 = forms.CharField(
label=_('Address'),
max_length=128,
disabled=True,
)
address_2 = forms.CharField(
label=_('Address'),
max_length=128,
required=False,
disabled=True,
)
country = CountryField(
blank_label=_('(select country)')
).formfield(disabled=True,)
phone = forms.CharField(
label=_('Telephone'),
max_length=32,
required=False,
disabled=True,
)
email = forms.CharField(
label=_('Email'),
max_length=254,
disabled=True,
)
city = forms.CharField(
label=_('City'),
max_length=128,
disabled=True,
)
"""
state = forms.CharField(
label=_('State'),
max_length=128,
required=False,
disabled=True,
)
"""
zip_code = forms.CharField(
label=_('ZIP code'),
max_length=32,
disabled=True,
)
class CustomerModelForm(BSModalModelForm):
class Meta:
model = Customer
exclude = [
'silver_id',
'user',
'state',
]
| mpl-2.0 | 2,314,816,419,920,023,000 | 28.535088 | 82 | 0.591922 | false | 4.008333 | false | false | false |
rain87/pc-health | create_graph.py | 1 | 6855 | #!/usr/bin/python
# coding=utf8
import rrd_config as C
import os
import subprocess
from collections import namedtuple
import gzip
import sys
import itertools
from smart_attributes import names as smart_names
DataSource = namedtuple('DataSource', 'db_fname field legend is_area color stack')
DataSource.__new__.__defaults__ = (False, None, False)
Graph = namedtuple('Graph', 'fname title vlabel ds')
graph_colors = [ '#396AB1', '#DA7C30', '#3E9651', '#CC2529', '#535154', '#6B4C9A', '#922428', '#948B3D', '#00adb5', '#f08a5d' ]
def hdd_ds(field):
return [ DataSource('hdd_' + d + '.rrd', field, d, False) for d in C.drives ]
def traffic_ds(units, direction):
color = itertools.cycle(graph_colors[:3])
field = '_{units}_{direction}'.format(units=units, direction=direction)
return [
DataSource(db_fname='traffic_{dev}.rrd'.format(dev=dev), field=proto + field,
legend='{}-{}'.format(dev, proto.upper()), is_area=True, color=color.next())
for dev, proto in itertools.product(C.network_devices[:-1], ['tcp', 'udp', 'all'])
] + [
DataSource('traffic_eth0.rrd', 'tcp' + field, '', False, ''),
DataSource('traffic_eth0.rrd', 'udp' + field, '', False, '', True),
DataSource('traffic_eth0.rrd', 'all' + field, 'eth0', False, '#000000', True)
]
def connections_ds(direction):
color = itertools.cycle(graph_colors[:2])
return [
DataSource(db_fname='traffic_{dev}.rrd'.format(dev=dev),
field='{proto}_new_{direction}'.format(proto=proto, direction=direction),
legend='{}-{}'.format(dev, proto),
is_area=True, color=color.next())
for dev, proto in itertools.product(C.network_devices, ['tcp', 'udp'])
]
def smart_graph(attr, field, label=None):
sattr = str(attr).zfill(3)
return Graph('smart_' + sattr, '{} ({}-{})'.format(smart_names[attr], sattr, field), label,
[ DataSource('smart_' + hdd + '.rrd', 'a{}_{}'.format(sattr, field), hdd, False) for hdd in C.drives ])
graphs = [
Graph('hdd_rrqm_s', 'Read requests merged per second that were queued to the device', 'rrqm/s', hdd_ds('rrqm_s')),
Graph('hdd_wrqm_s', 'Write requests merged per second that were queued to the device', 'wrqm/s ', hdd_ds('wrqm_s')),
Graph('hdd_r_s', 'Read requests that were issued to the device per second', 'r/s', hdd_ds('r_s')),
Graph('hdd_w_s', 'Write requests that were issued to the device per second', 'w/s', hdd_ds('w_s')),
Graph('hdd_rkB_s', 'Kilobytes read from the device per second', 'rkB/s ', hdd_ds('rkB_s')),
Graph('hdd_wkB_s', 'Kilobytes written to the device per second', 'wkB/s ', hdd_ds('wkB_s')),
Graph('hdd_avgrq_sz', 'Avg size of the requests that were issued to the device', 'sectors', hdd_ds('avgrq_sz')),
Graph('hdd_avgqu_sz', 'Avg queue length of the requests that were issued to the device', 'requests', hdd_ds('avgqu_sz')),
Graph('hdd_await', 'Avg time for I/O requests issued to the device to be served', 'milliseconds', hdd_ds('await')),
Graph('hdd_r_await', 'Avg time for READ requests issued to the device to be served', 'milliseconds', hdd_ds('r_await')),
Graph('hdd_w_await', 'Avg time for WRITE requests issued to the device to be served', 'milliseconds', hdd_ds('w_await')),
Graph('hdd_svctm', '(OBSOLETE) Avg service time for I/O requests that were issued to the device', 'milliseconds', hdd_ds('svctm')),
Graph('hdd_util', 'Percentage of CPU time during which I/O requests were issued to the device', '%', hdd_ds('util')),
Graph('cpu_load', 'CPU loads', '%', [ DataSource('cpu.rrd', field, field, True) for field in C.CpuStat._fields if field != 'idle']),
Graph('cpu_la', 'CPU load averages', None, [ DataSource('cpu_la.rrd', field, field, False) for field in C.CpuLa._fields]),
Graph('traffic_in_bytes', 'Incoming bytes', 'bytes/s', traffic_ds('bytes', 'in')),
Graph('traffic_out_bytes', 'Outgoing bytes', 'bytes/s', traffic_ds('bytes', 'out')),
Graph('traffic_in_pckts', 'Incoming packets', 'packets/s', traffic_ds('pckts', 'in')),
Graph('traffic_out_pckts', 'Outgoing packets', 'packets/s', traffic_ds('pckts', 'out')),
Graph('incoming_connections', 'Incoming connections', 'count', connections_ds('in')),
Graph('outgoing_connections', 'Outgoing connections', 'count', connections_ds('out')),
Graph('sockets', 'Sockets', 'sockets',
[ DataSource('sockets.rrd', field, field, True) for field in 'estab closed orphaned synrecv tw tw2'.split(' ') ] +\
[ DataSource('sockets.rrd', field, field, False) for field in 'total tcp ports'.split(' ') ]),
Graph('ups_v', 'Voltages', 'volts', [ DataSource('ups.rrd', 'LINEV', 'AC line', False), DataSource('ups.rrd', 'BATTV', 'UPS battery', False)]),
Graph('ups_load', 'Load and charge', '%', [ DataSource('ups.rrd', 'LOADPCT', 'UPS load', False), DataSource('ups.rrd', 'BCHARGE', 'Battery charge', False) ]),
Graph('ups_misc', 'Misc UPS stats', None, [ DataSource('ups.rrd', 'TIMELEFT', 'Time on battery left', False),
DataSource('ups.rrd', 'NUMXFERS', 'Number of transfers', False), DataSource('ups.rrd', 'TONBATT', 'Time on battery', False),
DataSource('ups.rrd', 'CUMONBATT', 'CUMONBATT', False) ]),
smart_graph(194, 'raw', '°C'),
smart_graph(1, 'cur'),
smart_graph(3, 'raw', 'msec'),
smart_graph(4, 'raw'),
smart_graph(7, 'cur'),
smart_graph(9, 'raw'),
smart_graph(11, 'raw'),
smart_graph(12, 'raw'),
smart_graph(195, 'cur'),
]
graph_intervals = {
'hourly': 'now-1h',
'optimal': 'now-400m',
'daily': 'now-1d',
'weekly': 'now-1w',
'monthly': 'now-30d',
'yearly': 'now-1y'
}
def plot(graph, interval):
assert interval in graph_intervals
cmd = ['rrdtool', 'graph', '-' , '--start', graph_intervals[interval], '--title', graph.title, '--imgformat', 'SVG',
'--lower-limit', '0' ]
if graph.vlabel:
cmd += ['--vertical-label', graph.vlabel]
ds_list = graph.ds if isinstance(graph.ds, list) else [graph.ds]
color = itertools.cycle(graph_colors)
for i in range(0, len(ds_list)):
ds = ds_list[i]
cmd.append('DEF:v{i}={db}:{field}:AVERAGE'.format(i=i, db=os.path.join(C.rrd_path, ds.db_fname), field=ds.field))
cmd.append('{type}:v{i}{color}:{legend}{stack}'.format(
type='AREA' if ds.is_area else 'LINE1', i=i, color=color.next() if ds.color is None else ds.color,
legend=ds.legend, stack=':STACK' if ds.is_area or ds.stack else ''))
#print(' '.join(cmd))
rrd = subprocess.Popen(cmd, stdout=subprocess.PIPE)
gz = gzip.open(os.path.join(C.graph_path, graph.fname + '_' + interval + '.svgz'), 'wb')
while rrd.poll() is None:
gz.write(rrd.stdout.read())
gz.close()
assert rrd.poll() == 0
for graph in graphs:
plot(graph, sys.argv[1])
| mit | -2,728,556,869,338,496,500 | 54.723577 | 162 | 0.626204 | false | 3.101357 | false | false | false |
dendory/chartjs | sample3.py | 1 | 5887 | # This script parses the CSV files gathered from the Canadian Weather Service and makes charts
import chartjs
import csv
# We will cover these years
startyear = 1981
endyear = 2012
# We will make charts for 3 major Canadian cities
cities = [
{'name': "Montreal", 'fillColor': "rgba(100,50,200,0.25)", 'strokeColor': "rgba(100,50,200,0.75)", 'pointColor': "rgba(100,50,200,0.75)"},
{'name': "Toronto", 'fillColor': "rgba(200,100,100,0.25)", 'strokeColor': "rgba(200,100,100,0.75)", 'pointColor': "rgba(200,100,100,0.75)"},
{'name': "Vancouver", 'fillColor': "rgba(100,200,100,0.25)", 'strokeColor': "rgba(100,200,100,0.75)", 'pointColor': "rgba(100,200,100,0.75)"},
]
# 3 of the charts will cover all 12 months
months = ["Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"]
# The first chart will show median temperatures over the years
global_chart = chartjs.chart("Temperature medians for 1981 - 2012 in Celsius<br><font color='#6432C8'>Montreal</font>, <font color='#B1846B'>Toronto</font>, <font color='#6CCB6C'>Vancouver</font>", "Line", 1200, 600)
global_chart.set_params(JSinline = False)
# Each city will have a chart showing each month's median temperature
montreal_chart = chartjs.chart("Montreal temperatures for 2012 in Celsius", "Line", 390, 200)
montreal_chart.canvas = "montreal"
montreal_chart.set_labels(months)
toronto_chart = chartjs.chart("Toronto temperatures for 2012 in Celsius", "Line", 390, 200)
toronto_chart.canvas = "toronto"
toronto_chart.set_labels(months)
vancouver_chart = chartjs.chart("Vancouver temperatures for 2012 in Celsius", "Line", 390, 200)
vancouver_chart.canvas = "vancouver"
vancouver_chart.set_labels(months)
_startyear = startyear
# Loop one city at a time
for city in cities:
city_data = []
years = []
medians = []
# Loop one year at a time
while startyear < endyear+1:
# Open CSV file for the city and year
f = open("data/" + city['name'] + "/" + str(startyear) + ".csv", 'r', newline='')
next(f)
csvreader = csv.reader(f, delimiter=',')
totalvalues = 0
values = 0
monthly_values = 0
monthly_totalvalues = 0
current_month = '01'
# Parse the CSV line by line
for line in csvreader:
try:
# For each line, we add the value and the number of values
values += float(line[9])
totalvalues += 1
except:
pass
try:
# For year 2012, we also record monthly medians for the city charts
if startyear == 2012:
# If the month column changed, that means we must compute the median for last month
if str(line[2]) != str(current_month):
# All the added values, divided by the number of values
median = "{0:.2f}".format(float(monthly_values / monthly_totalvalues))
# Append the median to the current city's list
city_data.append(median)
# Set the current month to the new value
current_month = str(line[2])
# Reset variables to 0
monthly_values = 0
monthly_totalvalues = 0
# For each line in this month, add the value and add the number of values
monthly_values += float(line[9])
monthly_totalvalues += 1
except:
pass
# For the last month, we need to calculate the median one last time
if monthly_totalvalues > 0:
median = "{0:.2f}".format(float(monthly_values / monthly_totalvalues))
city_data.append(median)
# After reading all the lines in the file, calculate the median for the year
if totalvalues > 0:
median = "{0:.2f}".format(float(values / totalvalues))
medians.append(median)
else:
medians.append(0)
# Append the current year to the labels
years.append(startyear)
# Create all of the city charts
if startyear == 2012:
if city['name'] == "Montreal":
montreal_chart.set_params(fillColor = city['fillColor'], strokeColor = city['strokeColor'], pointColor = city['pointColor'])
montreal_chart.add_dataset(city_data)
if city['name'] == "Toronto":
toronto_chart.set_params(fillColor = city['fillColor'], strokeColor = city['strokeColor'], pointColor = city['pointColor'])
toronto_chart.add_dataset(city_data)
if city['name'] == "Vancouver":
vancouver_chart.set_params(fillColor = city['fillColor'], strokeColor = city['strokeColor'], pointColor = city['pointColor'])
vancouver_chart.add_dataset(city_data)
startyear += 1
# Create the global chart
global_chart.set_labels(years)
global_chart.set_params(fillColor = city['fillColor'], strokeColor = city['strokeColor'], pointColor = city['pointColor'])
global_chart.add_dataset(medians)
startyear = _startyear
f.close()
# Create the HTML page and the 4 charts individually
f = open("sample3.html", 'w')
output = """<!doctype html>
<html>
<head>
<title>Temperature charts</title>
{1}
</head>
<body>
<div style="width: {2}px; height: {3}px; max-width: 99%" class="chartjs">
<center><h2>{0}</h2></center>
""".format(global_chart.title, global_chart.js, str(global_chart.width), str(global_chart.height))
output += global_chart.make_chart_canvas()
output += " <table width='99%'><tr><td><center><h4>" + montreal_chart.title + "</h4></center>"
output += montreal_chart.make_chart_canvas()
output += " </td><td><center><h4>" + toronto_chart.title + "</h4></center>"
output += toronto_chart.make_chart_canvas()
output += " </td><td><center><h4>" + vancouver_chart.title + "</h4></center>"
output += vancouver_chart.make_chart_canvas()
output += """ </td></tr></table>
<script>
window.onload = function()
{"""
output += global_chart.make_chart_onload()
output += montreal_chart.make_chart_onload()
output += toronto_chart.make_chart_onload()
output += vancouver_chart.make_chart_onload()
output += """ }
</script>
</div>
</body>
</html>
"""
f.write(output)
f.close()
| mit | 831,402,048,602,100,100 | 39.167832 | 216 | 0.660608 | false | 2.950877 | false | false | false |
MSHallOpenSoft/plotter | GUI_final.py | 1 | 59129 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'GUI_final.ui'
#
# Created: Thu Mar 19 22:03:17 2015
# by: PyQt4 UI code generator 4.11.3
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName(_fromUtf8("MainWindow"))
MainWindow.resize(1396, 727)
MainWindow.setStyleSheet(_fromUtf8("QFrame{\n"
"border:none;\n"
"}\n"
"QStatusBar{ \n"
"background:qlineargradient(spread:pad, x1:0, y1:1, x2:0, y2:0.33, stop:0 rgba(255, 255, 255, 255), stop:0.125 rgba(155, 174, 198, 255), stop:0.318182 rgba(104, 117, 133, 255), stop:0.534091 rgba(65, 73, 83, 255), stop:0.875 rgba(42, 47, 54, 255)); }\n"
" QMainWindow{\n"
" background-image: url(:/img/Icons/rsz_back1.jpg); border:none;\n"
" background-color:qlineargradient(spread:pad, x1:1, y1:1, x2:0.483136, y2:0.466, stop:0 rgba(219, 219, 219, 255), stop:1 rgba(255, 255, 255, 255));\n"
" text-align: center; }\n"
" QGroupBox{ \n"
"background-color: qlineargradient(spread:pad, x1:1, y1:1, x2:0.483136, y2:0.466, stop:0 rgba(219, 219, 219, 255), stop:1 rgba(255, 255, 255, 255)); }\n"
" QTabWidget{\n"
" background-color: qlineargradient(spread:pad, x1:1, y1:1, x2:0.483136, y2:0.466, stop:0 rgba(219, 219, 219, 255), stop:1 rgba(255, 255, 255, 255)); }\n"
" QDockWidget{\n"
" background-color:#737373;\n"
" border:none;\n"
" padding:0px; \n"
"}\n"
" QSlider::groove:horizontal {\n"
" background:red;\n"
" height: 15px;\n"
" position: absolute; \n"
"left: 4px; \n"
"right: 4px; }\n"
" QSlider::handle:horizontal {\n"
" height:20px;\n"
" width: 10px; \n"
"background: qlineargradient(spread:pad, x1:0, y1:0.477, x2:0, y2:0, stop:0.125 rgba(42, 47, 54, 255), stop:0.465909 rgba(65, 73, 83, 255), stop:0.681818 rgba(104, 117, 133, 255), stop:0.875 rgba(155, 174, 198, 255), stop:1 rgba(255, 255, 255, 255));\n"
" margin: -4px; }\n"
" QSlider::handle:hover:horizontal { \n"
"height:20px;\n"
" width: 10px;\n"
" background:qlineargradient(spread:pad, x1:0, y1:0.477, x2:0, y2:0, stop:0.125 rgba(91, 95, 100, 255), stop:0.465909 rgba(122, 132, 146, 255), stop:0.681818 rgba(141, 153, 167, 255), stop:0.875 rgba(181, 195, 212, 255), stop:1 rgba(255, 255, 255, 255));\n"
" margin: -4px;\n"
" }\n"
" QSlider::add-page:horizontal { background:qlineargradient(spread:pad, x1:0, y1:1, x2:0, y2:0.0802727, stop:0 rgba(255, 255, 255, 255), stop:0.0397727 rgba(222, 255, 196, 255), stop:0.176136 rgba(168, 255, 99, 255), stop:0.642045 rgba(127, 200, 70, 255));\n"
" }\n"
" QSlider::sub-page:horizontal { \n"
"background: qlineargradient(spread:pad, x1:0, y1:0.664, x2:0, y2:0, stop:0.357955 rgba(89, 189, 9, 255), stop:0.801136 rgba(120, 255, 13, 255), stop:0.9375 rgba(175, 255, 111, 255), stop:1 rgba(255, 255, 255, 255)) ;\n"
" }\n"
" QToolButton{ \n"
"position: relative;\n"
" border: none; \n"
"outline:none;\n"
" color: black;\n"
" padding: 0px;\n"
" border-radius: 2px;\n"
" font-size: 22px;\n"
" }\n"
" QToolButton:hover:!pressed{ \n"
"position: relative;\n"
" border: none; \n"
"outline:none; \n"
"background-color:qlineargradient(spread:pad, x1:0, y1:1, x2:0, y2:0.0802727, stop:0 rgba(255, 255, 255, 255), stop:0.0397727 rgba(222, 255, 196, 255), stop:0.176136 rgba(168, 255, 99, 255), stop:0.642045 rgba(127, 200, 70, 255));\n"
" color: white;\n"
" padding: 0px;\n"
" border-radius: 2px;\n"
" font-size: 22px; \n"
"}\n"
" QPushButton{ \n"
"position: relative;\n"
" border:none;\n"
" outline:none; \n"
"background-color: qlineargradient(spread:pad, x1:0, y1:0.664, x2:0, y2:0, stop:0.357955 rgba(89, 189, 9, 255), stop:0.801136 rgba(120, 255, 13, 255), stop:0.9375 rgba(175, 255, 111, 255), stop:1 rgba(255, 255, 255, 255));\n"
" color: white;\n"
" padding: 6px 20px; \n"
"border-radius: 2px;\n"
" font-size: 20px;\n"
" }\n"
" QPushButton:hover:!pressed{ \n"
"position: relative;\n"
" border: none; \n"
"outline:none;\n"
" background:qlineargradient(spread:pad, x1:0, y1:1, x2:0, y2:0.0802727, stop:0 rgba(255, 255, 255, 255), stop:0.0397727 rgba(222, 255, 196, 255), stop:0.176136 rgba(168, 255, 99, 255), stop:0.642045 rgba(127, 200, 70, 255));\n"
" color: white; \n"
"padding: 6px 20px; \n"
"border-radius: 2px;\n"
" font-size:20px; \n"
"} \n"
"QComboBox { \n"
"border: none; \n"
"padding: 1px 18px 1px 3px; \n"
"min-width: 6em;\n"
" }\n"
" QComboBox, QComboBox:drop-down \n"
"{\n"
" background:qlineargradient(spread:pad, x1:0, y1:0.664, x2:0, y2:0, stop:0.357955 rgba(89, 189, 9, 255), stop:0.801136 rgba(120, 255, 13, 255), stop:0.9375 rgba(175, 255, 111, 255), stop:1 rgba(255, 255, 255, 255));\n"
" }\n"
" QComboBox:on, QComboBox:drop-down:on { background:qlineargradient(spread:pad, x1:0, y1:1, x2:0, y2:0.0802727, stop:0 rgba(255, 255, 255, 255), stop:0.0397727 rgba(222, 255, 196, 255), stop:0.176136 rgba(168, 255, 99, 255), stop:0.642045 rgba(127, 200, 70, 255)); \n"
"}\n"
" QComboBox:on {\n"
" padding-top: 3px;\n"
" padding-left: 4px; \n"
"} \n"
"QComboBox::drop-down{\n"
" subcontrol-origin: padding; \n"
"subcontrol-position: top right;\n"
" width: 15px; \n"
"border-left-width: 1px; \n"
"border-left-color: darkgray; \n"
"border-left-style: solid;\n"
" }\n"
" QComboBox::down-arrow { \n"
"image:url(:/arrow/Icons/arrow-new.png);\n"
" } \n"
"QComboBox::down-arrow:on {\n"
" top: 1px;\n"
" left: 1px;\n"
" }\n"
" QMenu {\n"
" background-color: qlineargradient(spread:pad, x1:1, y1:1, x2:0.483136, y2:0.466, stop:0 rgba(219, 219, 219, 255), stop:1 rgba(255, 255, 255, 255)); \n"
"border: none; \n"
"}\n"
" QMenu::item {\n"
" background-color: transparent;\n"
" }\n"
" QMenu::item:selected {\n"
" background-color:rgb(120, 255, 13);\n"
" }\n"
" QMenuBar { \n"
"background-color:qlineargradient(spread:pad, x1:0, y1:0, x2:1, y2:1, stop:0 #DBDBDB, stop:1 rgba(255, 255, 255, 255)) } QMenuBar::item {\n"
" spacing: 3px;\n"
" padding: 1px 4px; \n"
"background: transparent; \n"
"border-radius: 2px;\n"
" }\n"
" QMenuBar::item:selected {\n"
" background:#737373;\n"
" }\n"
" QMenuBar::item:pressed \n"
"{ background: #414953; \n"
"} \n"
"QTableWidget{ \n"
"background:qlineargradient(spread:pad, x1:1, y1:1, x2:0, y2:0, stop:0 #DBDBDB, stop:1 rgba(255, 255, 255, 255));\n"
" border:1px solid rgb(171, 173, 179);\n"
" }\n"
" QTextEdit{ \n"
"background:qlineargradient(spread:pad, x1:1, y1:1, x2:0, y2:0, stop:0 #DBDBDB, stop:1 rgba(255, 255, 255, 255)); \n"
"}\n"
" QScrollBar:horizontal {\n"
" border: none; background: #DBDBDB; height: 15px; margin: 0px 20px 0 20px; \n"
"}\n"
" QScrollBar::handle:horizontal { background:qlineargradient(spread:pad, x1:0, y1:0.664, x2:0, y2:0, stop:0.357955 rgba(89, 189, 9, 255), stop:0.801136 rgba(120, 255, 13, 255), stop:0.9375 rgba(175, 255, 111, 255), stop:1 rgba(255, 255, 255, 255));\n"
" min-width: 20px;\n"
" }\n"
" QScrollBar::handle:horizontal:hover { background:qlineargradient(spread:pad, x1:0, y1:1, x2:0, y2:0.0802727, stop:0 rgba(255, 255, 255, 255), stop:0.0397727 rgba(222, 255, 196, 255), stop:0.176136 rgba(168, 255, 99, 255), stop:0.642045 rgba(127, 200, 70, 255));\n"
" min-width: 20px;\n"
" } \n"
"QScrollBar::add-line:horizontal {\n"
" border: none;\n"
" background:#DBDBDB; \n"
"width: 20px;\n"
" subcontrol-position: right;\n"
" subcontrol-origin: margin;\n"
" }\n"
" QScrollBar::sub-line:horizontal {\n"
" border:none; \n"
"background:#DBDBDB; \n"
"width: 20px;\n"
" subcontrol-position: left;\n"
" subcontrol-origin: margin;\n"
" }\n"
" QScrollBar::add-line:horizontal:hover:!pressed { \n"
"border: none;\n"
" background: qlineargradient(spread:pad, x1:0, y1:0.664, x2:0, y2:0, stop:0.357955 rgba(89, 189, 9, 255), stop:0.801136 rgba(120, 255, 13, 255), stop:0.9375 rgba(175, 255, 111, 255), stop:1 rgba(255, 255, 255, 255)); \n"
"width: 20px;\n"
" subcontrol-position: right; \n"
"subcontrol-origin: margin; \n"
"}\n"
" QScrollBar::sub-line:horizontal:hover:!pressed { \n"
"border:none;\n"
" background: qlineargradient(spread:pad, x1:0, y1:0.664, x2:0, y2:0, stop:0.357955 rgba(89, 189, 9, 255), stop:0.801136 rgba(120, 255, 13, 255), stop:0.9375 rgba(175, 255, 111, 255), stop:1 rgba(255, 255, 255, 255));\n"
" width: 20px; \n"
"subcontrol-position: left;\n"
" subcontrol-origin: margin; \n"
"}\n"
" QScrollBar::left-arrow:horizontal{\n"
" image: url(:/arrow/Icons/left-arrow.png);\n"
" }\n"
" QScrollBar::right-arrow:horizontal{\n"
" image: url(:/arrow/Icons/right-arrow.png);\n"
" }\n"
" QScrollBar:vertical {\n"
" border: none;\n"
" background: #DBDBDB;\n"
" width: 15px; \n"
"margin: 0px 20px 0 20px; \n"
"} \n"
"QScrollBar::handle:vertical { background:qlineargradient(spread:pad, x1:0, y1:0.664, x2:0, y2:0, stop:0.357955 rgba(89, 189, 9, 255), stop:0.801136 rgba(120, 255, 13, 255), stop:0.9375 rgba(175, 255, 111, 255), stop:1 rgba(255, 255, 255, 255));\n"
" min-height: 20px; }\n"
" QScrollBar::handle:vertical:hover { background:qlineargradient(spread:pad, x1:0, y1:1, x2:0, y2:0.0802727, stop:0 rgba(255, 255, 255, 255), stop:0.0397727 rgba(222, 255, 196, 255), stop:0.176136 rgba(168, 255, 99, 255), stop:0.642045 rgba(127, 200, 70, 255));\n"
" min-height: 15px;\n"
" }\n"
" QScrollBar::add-line:vertical {\n"
" border: none;\n"
" background:#DBDBDB; \n"
"height: 20px;\n"
" subcontrol-position: bottom; \n"
"subcontrol-origin: margin; \n"
"}\n"
" QScrollBar::sub-line:vertical {\n"
" border:none; \n"
"background:#DBDBDB; \n"
"height: 20px;\n"
" subcontrol-position: top;\n"
" subcontrol-origin: margin;\n"
" } \n"
"QScrollBar::add-line:vertical:hover:!pressed { \n"
"border: none; \n"
"background: qlineargradient(spread:pad, x1:0, y1:0.664, x2:0, y2:0, stop:0.357955 rgba(89, 189, 9, 255), stop:0.801136 rgba(120, 255, 13, 255), stop:0.9375 rgba(175, 255, 111, 255), stop:1 rgba(255, 255, 255, 255));\n"
" height: 20px;\n"
" subcontrol-position:bottom; \n"
"subcontrol-origin: margin;\n"
" }\n"
" QScrollBar::sub-line:vertical:hover:!pressed { b\n"
"order:none; \n"
"background: qlineargradient(spread:pad, x1:0, y1:0.664, x2:0, y2:0, stop:0.357955 rgba(89, 189, 9, 255), stop:0.801136 rgba(120, 255, 13, 255), stop:0.9375 rgba(175, 255, 111, 255), stop:1 rgba(255, 255, 255, 255));\n"
" height: 20px; \n"
"subcontrol-position:top;\n"
" subcontrol-origin: margin;\n"
" }\n"
" QScrollBar::up-arrow:vertical{ \n"
"image: url(:/arrow/Icons/up-arrow.png); \n"
"} \n"
"QScrollBar::down-arrow:vertical{\n"
" image: url(:/arrow/Icons/down-arrow.png);\n"
" }"))
self.centralwidget = QtGui.QWidget(MainWindow)
self.centralwidget.setObjectName(_fromUtf8("centralwidget"))
self.horizontalLayout_3 = QtGui.QHBoxLayout(self.centralwidget)
self.horizontalLayout_3.setObjectName(_fromUtf8("horizontalLayout_3"))
self.frame_2 = QtGui.QFrame(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.frame_2.sizePolicy().hasHeightForWidth())
self.frame_2.setSizePolicy(sizePolicy)
self.frame_2.setMinimumSize(QtCore.QSize(20, 0))
self.frame_2.setStyleSheet(_fromUtf8(""))
self.frame_2.setFrameShape(QtGui.QFrame.StyledPanel)
self.frame_2.setFrameShadow(QtGui.QFrame.Raised)
self.frame_2.setObjectName(_fromUtf8("frame_2"))
self.horizontalLayout_4 = QtGui.QHBoxLayout(self.frame_2)
self.horizontalLayout_4.setObjectName(_fromUtf8("horizontalLayout_4"))
self.verticalLayout_5 = QtGui.QVBoxLayout()
self.verticalLayout_5.setObjectName(_fromUtf8("verticalLayout_5"))
self.pushButton = QtGui.QPushButton(self.frame_2)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pushButton.sizePolicy().hasHeightForWidth())
self.pushButton.setSizePolicy(sizePolicy)
self.pushButton.setMaximumSize(QtCore.QSize(20, 50))
self.pushButton.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.pushButton.setText(_fromUtf8(""))
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(_fromUtf8(":/arrow/Icons/double-right.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.pushButton.setIcon(icon)
self.pushButton.setObjectName(_fromUtf8("pushButton"))
self.verticalLayout_5.addWidget(self.pushButton)
self.horizontalLayout_4.addLayout(self.verticalLayout_5)
self.horizontalLayout_3.addWidget(self.frame_2)
self.frame = QtGui.QFrame(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.frame.sizePolicy().hasHeightForWidth())
self.frame.setSizePolicy(sizePolicy)
self.frame.setMaximumSize(QtCore.QSize(320, 16777215))
self.frame.setFrameShape(QtGui.QFrame.StyledPanel)
self.frame.setFrameShadow(QtGui.QFrame.Raised)
self.frame.setObjectName(_fromUtf8("frame"))
self.verticalLayout_3 = QtGui.QVBoxLayout(self.frame)
self.verticalLayout_3.setObjectName(_fromUtf8("verticalLayout_3"))
self.horizontalLayout_5 = QtGui.QHBoxLayout()
self.horizontalLayout_5.setSpacing(6)
self.horizontalLayout_5.setObjectName(_fromUtf8("horizontalLayout_5"))
self.pushButton_3 = QtGui.QPushButton(self.frame)
self.pushButton_3.setEnabled(True)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pushButton_3.sizePolicy().hasHeightForWidth())
self.pushButton_3.setSizePolicy(sizePolicy)
self.pushButton_3.setMinimumSize(QtCore.QSize(50, 0))
self.pushButton_3.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.pushButton_3.setStyleSheet(_fromUtf8(""))
self.pushButton_3.setObjectName(_fromUtf8("pushButton_3"))
self.horizontalLayout_5.addWidget(self.pushButton_3)
self.toolButton_7 = QtGui.QToolButton(self.frame)
self.toolButton_7.setMinimumSize(QtCore.QSize(10, 0))
self.toolButton_7.setMaximumSize(QtCore.QSize(35, 16777215))
self.toolButton_7.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.toolButton_7.setStyleSheet(_fromUtf8(""))
icon1 = QtGui.QIcon()
icon1.addPixmap(QtGui.QPixmap(_fromUtf8("Icons/Add-New-48.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.toolButton_7.setIcon(icon1)
self.toolButton_7.setIconSize(QtCore.QSize(40, 30))
self.toolButton_7.setObjectName(_fromUtf8("toolButton_7"))
self.horizontalLayout_5.addWidget(self.toolButton_7)
self.toolButton_9 = QtGui.QToolButton(self.frame)
self.toolButton_9.setMinimumSize(QtCore.QSize(10, 0))
self.toolButton_9.setMaximumSize(QtCore.QSize(35, 16777215))
self.toolButton_9.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.toolButton_9.setStyleSheet(_fromUtf8(""))
icon2 = QtGui.QIcon()
icon2.addPixmap(QtGui.QPixmap(_fromUtf8("Icons/Minus-48.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.toolButton_9.setIcon(icon2)
self.toolButton_9.setIconSize(QtCore.QSize(40, 30))
self.toolButton_9.setObjectName(_fromUtf8("toolButton_9"))
self.horizontalLayout_5.addWidget(self.toolButton_9)
self.toolButton_8 = QtGui.QToolButton(self.frame)
self.toolButton_8.setMinimumSize(QtCore.QSize(10, 0))
self.toolButton_8.setMaximumSize(QtCore.QSize(35, 16777215))
self.toolButton_8.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.toolButton_8.setStyleSheet(_fromUtf8(""))
icon3 = QtGui.QIcon()
icon3.addPixmap(QtGui.QPixmap(_fromUtf8("Icons/Folder-Open-48.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.toolButton_8.setIcon(icon3)
self.toolButton_8.setIconSize(QtCore.QSize(40, 30))
self.toolButton_8.setObjectName(_fromUtf8("toolButton_8"))
self.horizontalLayout_5.addWidget(self.toolButton_8)
self.toolButton_5 = QtGui.QToolButton(self.frame)
self.toolButton_5.setMinimumSize(QtCore.QSize(10, 0))
self.toolButton_5.setMaximumSize(QtCore.QSize(35, 16777215))
self.toolButton_5.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.toolButton_5.setStyleSheet(_fromUtf8(""))
icon4 = QtGui.QIcon()
icon4.addPixmap(QtGui.QPixmap(_fromUtf8("Icons/Save-48.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.toolButton_5.setIcon(icon4)
self.toolButton_5.setIconSize(QtCore.QSize(40, 30))
self.toolButton_5.setObjectName(_fromUtf8("toolButton_5"))
self.horizontalLayout_5.addWidget(self.toolButton_5)
spacerItem = QtGui.QSpacerItem(20, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_5.addItem(spacerItem)
self.verticalLayout_3.addLayout(self.horizontalLayout_5)
self.tableWidget = QtGui.QTableWidget(self.frame)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.tableWidget.sizePolicy().hasHeightForWidth())
self.tableWidget.setSizePolicy(sizePolicy)
self.tableWidget.setMinimumSize(QtCore.QSize(300, 0))
self.tableWidget.setStyleSheet(_fromUtf8(""))
self.tableWidget.setObjectName(_fromUtf8("tableWidget"))
self.tableWidget.setColumnCount(3)
self.tableWidget.setRowCount(0)
item = QtGui.QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(0, item)
item = QtGui.QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(1, item)
item = QtGui.QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(2, item)
self.verticalLayout_3.addWidget(self.tableWidget)
self.pushButton_21 = QtGui.QPushButton(self.frame)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pushButton_21.sizePolicy().hasHeightForWidth())
self.pushButton_21.setSizePolicy(sizePolicy)
self.pushButton_21.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.pushButton_21.setStyleSheet(_fromUtf8(""))
self.pushButton_21.setObjectName(_fromUtf8("pushButton_21"))
self.verticalLayout_3.addWidget(self.pushButton_21)
self.horizontalLayout_3.addWidget(self.frame)
self.verticalLayout_6 = QtGui.QVBoxLayout()
self.verticalLayout_6.setObjectName(_fromUtf8("verticalLayout_6"))
self.widget = QtGui.QWidget(self.centralwidget)
self.widget.setStyleSheet(_fromUtf8(""))
self.widget.setObjectName(_fromUtf8("widget"))
self.verticalLayout_6.addWidget(self.widget)
self.horizontalLayout_3.addLayout(self.verticalLayout_6)
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtGui.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 1396, 21))
self.menubar.setObjectName(_fromUtf8("menubar"))
self.menuFile = QtGui.QMenu(self.menubar)
self.menuFile.setObjectName(_fromUtf8("menuFile"))
self.menuEdit = QtGui.QMenu(self.menubar)
self.menuEdit.setObjectName(_fromUtf8("menuEdit"))
self.menuView = QtGui.QMenu(self.menubar)
self.menuView.setObjectName(_fromUtf8("menuView"))
self.menuAbout = QtGui.QMenu(self.menubar)
self.menuAbout.setObjectName(_fromUtf8("menuAbout"))
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtGui.QStatusBar(MainWindow)
self.statusbar.setObjectName(_fromUtf8("statusbar"))
MainWindow.setStatusBar(self.statusbar)
self.dockWidget = QtGui.QDockWidget(MainWindow)
self.dockWidget.setMinimumSize(QtCore.QSize(320, 91))
self.dockWidget.setObjectName(_fromUtf8("dockWidget"))
self.dockWidgetContents = QtGui.QWidget()
self.dockWidgetContents.setObjectName(_fromUtf8("dockWidgetContents"))
self.gridLayout = QtGui.QGridLayout(self.dockWidgetContents)
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
self.comboBox_5 = QtGui.QComboBox(self.dockWidgetContents)
self.comboBox_5.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.comboBox_5.setObjectName(_fromUtf8("comboBox_5"))
self.comboBox_5.addItem(_fromUtf8(""))
self.comboBox_5.addItem(_fromUtf8(""))
self.comboBox_5.addItem(_fromUtf8(""))
self.gridLayout.addWidget(self.comboBox_5, 0, 0, 1, 1)
self.textEdit = QtGui.QTextEdit(self.dockWidgetContents)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.textEdit.sizePolicy().hasHeightForWidth())
self.textEdit.setSizePolicy(sizePolicy)
self.textEdit.setMinimumSize(QtCore.QSize(0, 20))
self.textEdit.setObjectName(_fromUtf8("textEdit"))
self.gridLayout.addWidget(self.textEdit, 0, 1, 1, 1)
self.comboBox_6 = QtGui.QComboBox(self.dockWidgetContents)
self.comboBox_6.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.comboBox_6.setObjectName(_fromUtf8("comboBox_6"))
self.comboBox_6.addItem(_fromUtf8(""))
self.comboBox_6.addItem(_fromUtf8(""))
self.comboBox_6.addItem(_fromUtf8(""))
self.gridLayout.addWidget(self.comboBox_6, 1, 0, 1, 1)
self.textEdit_2 = QtGui.QTextEdit(self.dockWidgetContents)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.textEdit_2.sizePolicy().hasHeightForWidth())
self.textEdit_2.setSizePolicy(sizePolicy)
self.textEdit_2.setMinimumSize(QtCore.QSize(0, 20))
self.textEdit_2.setObjectName(_fromUtf8("textEdit_2"))
self.gridLayout.addWidget(self.textEdit_2, 1, 1, 1, 1)
self.dockWidget.setWidget(self.dockWidgetContents)
MainWindow.addDockWidget(QtCore.Qt.DockWidgetArea(1), self.dockWidget)
self.dockWidget_2 = QtGui.QDockWidget(MainWindow)
self.dockWidget_2.setMinimumSize(QtCore.QSize(427, 324))
self.dockWidget_2.setStyleSheet(_fromUtf8(""))
self.dockWidget_2.setObjectName(_fromUtf8("dockWidget_2"))
self.dockWidgetContents_2 = QtGui.QWidget()
self.dockWidgetContents_2.setObjectName(_fromUtf8("dockWidgetContents_2"))
self.verticalLayout_4 = QtGui.QVBoxLayout(self.dockWidgetContents_2)
self.verticalLayout_4.setObjectName(_fromUtf8("verticalLayout_4"))
self.groupBox_2 = QtGui.QGroupBox(self.dockWidgetContents_2)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.groupBox_2.sizePolicy().hasHeightForWidth())
self.groupBox_2.setSizePolicy(sizePolicy)
self.groupBox_2.setMinimumSize(QtCore.QSize(0, 50))
self.groupBox_2.setObjectName(_fromUtf8("groupBox_2"))
self.horizontalLayout_12 = QtGui.QHBoxLayout(self.groupBox_2)
self.horizontalLayout_12.setObjectName(_fromUtf8("horizontalLayout_12"))
self.horizontalLayout_7 = QtGui.QHBoxLayout()
self.horizontalLayout_7.setObjectName(_fromUtf8("horizontalLayout_7"))
self.comboBox = QtGui.QComboBox(self.groupBox_2)
self.comboBox.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.comboBox.setStyleSheet(_fromUtf8(""))
self.comboBox.setObjectName(_fromUtf8("comboBox"))
self.comboBox.addItem(_fromUtf8(""))
self.comboBox.addItem(_fromUtf8(""))
self.horizontalLayout_7.addWidget(self.comboBox)
self.comboBox_3 = QtGui.QComboBox(self.groupBox_2)
self.comboBox_3.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.comboBox_3.setObjectName(_fromUtf8("comboBox_3"))
self.comboBox_3.addItem(_fromUtf8(""))
self.comboBox_3.addItem(_fromUtf8(""))
self.comboBox_3.addItem(_fromUtf8(""))
self.horizontalLayout_7.addWidget(self.comboBox_3)
self.comboBox_2 = QtGui.QComboBox(self.groupBox_2)
self.comboBox_2.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.comboBox_2.setLayoutDirection(QtCore.Qt.LeftToRight)
self.comboBox_2.setAutoFillBackground(False)
self.comboBox_2.setFrame(True)
self.comboBox_2.setObjectName(_fromUtf8("comboBox_2"))
self.comboBox_2.addItem(_fromUtf8(""))
self.comboBox_2.addItem(_fromUtf8(""))
self.comboBox_2.addItem(_fromUtf8(""))
self.horizontalLayout_7.addWidget(self.comboBox_2)
self.horizontalLayout_12.addLayout(self.horizontalLayout_7)
self.verticalLayout_4.addWidget(self.groupBox_2)
self.verticalLayout_2 = QtGui.QVBoxLayout()
self.verticalLayout_2.setObjectName(_fromUtf8("verticalLayout_2"))
self.tabWidget_2 = QtGui.QTabWidget(self.dockWidgetContents_2)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.tabWidget_2.sizePolicy().hasHeightForWidth())
self.tabWidget_2.setSizePolicy(sizePolicy)
self.tabWidget_2.setMinimumSize(QtCore.QSize(310, 0))
self.tabWidget_2.setCursor(QtGui.QCursor(QtCore.Qt.ArrowCursor))
self.tabWidget_2.setAutoFillBackground(False)
self.tabWidget_2.setStyleSheet(_fromUtf8(""))
self.tabWidget_2.setTabPosition(QtGui.QTabWidget.South)
self.tabWidget_2.setTabShape(QtGui.QTabWidget.Rounded)
self.tabWidget_2.setIconSize(QtCore.QSize(16, 25))
self.tabWidget_2.setElideMode(QtCore.Qt.ElideNone)
self.tabWidget_2.setTabsClosable(False)
self.tabWidget_2.setMovable(True)
self.tabWidget_2.setObjectName(_fromUtf8("tabWidget_2"))
self.tab_3 = QtGui.QWidget()
self.tab_3.setObjectName(_fromUtf8("tab_3"))
self.verticalLayout_8 = QtGui.QVBoxLayout(self.tab_3)
self.verticalLayout_8.setObjectName(_fromUtf8("verticalLayout_8"))
self.groupBox = QtGui.QGroupBox(self.tab_3)
self.groupBox.setObjectName(_fromUtf8("groupBox"))
self.verticalLayout_7 = QtGui.QVBoxLayout(self.groupBox)
self.verticalLayout_7.setObjectName(_fromUtf8("verticalLayout_7"))
self.horizontalLayout_8 = QtGui.QHBoxLayout()
self.horizontalLayout_8.setObjectName(_fromUtf8("horizontalLayout_8"))
self.label = QtGui.QLabel(self.groupBox)
self.label.setObjectName(_fromUtf8("label"))
self.horizontalLayout_8.amenuddWidget(self.label)
self.horizontalSlider = QtGui.QSlider(self.groupBox)
self.horizontalSlider.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.horizontalSlider.setStyleSheet(_fromUtf8(""))
self.horizontalSlider.setOrientation(QtCore.Qt.Horizontal)
self.horizontalSlider.setObjectName(_fromUtf8("horizontalSlider"))
self.horizontalLayout_8.addWidget(self.horizontalSlider)
self.verticalLayout_7.addLayout(self.horizontalLayout_8)
self.horizontalLayout_9 = QtGui.QHBoxLayout()
self.horizontalLayout_9.setSizeConstraint(QtGui.QLayout.SetNoConstraint)
self.horizontalLayout_9.setObjectName(_fromUtf8("horizontalLayout_9"))
self.label_2 = QtGui.QLabel(self.groupBox)
self.label_2.setObjectName(_fromUtf8("label_2"))
self.horizontalLayout_9.addWidget(self.label_2)
self.label_3 = QtGui.QLabel(self.groupBox)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_3.sizePolicy().hasHeightForWidth())
self.label_3.setSizePolicy(sizePolicy)
self.label_3.setMinimumSize(QtCore.QSize(20, 20))
self.label_3.setObjectName(_fromUtf8("label_3"))
self.horizontalLayout_9.addWidget(self.label_3)
self.label_4 = QtGui.QLabel(self.groupBox)
self.label_4.setObjectName(_fromUtf8("label_4"))
self.horizontalLayout_9.addWidget(self.label_4)
self.radioButton = QtGui.QRadioButton(self.groupBox)
self.radioButton.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.radioButton.setObjectName(_fromUtf8("radioButton"))
self.horizontalLayout_9.addWidget(self.radioButton)
self.radioButton_3 = QtGui.QRadioButton(self.groupBox)
self.radioButton_3.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.radioButton_3.setObjectName(_fromUtf8("radioButton_3"))
self.horizontalLayout_9.addWidget(self.radioButton_3)
self.radioButton_2 = QtGui.QRadioButton(self.groupBox)
self.radioButton_2.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.radioButton_2.setObjectName(_fromUtf8("radioButton_2"))
self.horizontalLayout_9.addWidget(self.radioButton_2)
self.verticalLayout_7.addLayout(self.horizontalLayout_9)
self.horizontalLayout_10 = QtGui.QHBoxLayout()
self.horizontalLayout_10.setObjectName(_fromUtf8("horizontalLayout_10"))
self.label_5 = QtGui.QLabel(self.groupBox)
self.label_5.setObjectName(_fromUtf8("label_5"))
self.horizontalLayout_10.addWidget(self.label_5)
self.comboBox_4 = QtGui.QComboBox(self.groupBox)
self.comboBox_4.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.comboBox_4.setObjectName(_fromUtf8("comboBox_4"))
self.horizontalLayout_10.addWidget(self.comboBox_4)
self.label_6 = QtGui.QLabel(self.groupBox)
self.label_6.setObjectName(_fromUtf8("label_6"))
self.horizontalLayout_10.addWidget(self.label_6)
self.horizontalSlider_2 = QtGui.QSlider(self.groupBox)
self.horizontalSlider_2.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.horizontalSlider_2.setOrientation(QtCore.Qt.Horizontal)
self.horizontalSlider_2.setObjectName(_fromUtf8("horizontalSlider_2"))
self.horizontalLayout_10.addWidget(self.horizontalSlider_2)
self.verticalLayout_7.addLayout(self.horizontalLayout_10)
self.horizontalLayout_11 = QtGui.QHBoxLayout()
self.horizontalLayout_11.setObjectName(_fromUtf8("horizontalLayout_11"))
self.label_7 = QtGui.QLabel(self.groupBox)
self.label_7.setObjectName(_fromUtf8("label_7"))
self.horizontalLayout_11.addWidget(self.label_7)
self.horizontalSlider_3 = QtGui.QSlider(self.groupBox)
self.horizontalSlider_3.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.horizontalSlider_3.setOrientation(QtCore.Qt.Horizontal)
self.horizontalSlider_3.setObjectName(_fromUtf8("horizontalSlider_3"))
self.horizontalLayout_11.addWidget(self.horizontalSlider_3)
self.verticalLayout_7.addLayout(self.horizontalLayout_11)
self.verticalLayout_8.addWidget(self.groupBox)
self.tabWidget_2.addTab(self.tab_3, _fromUtf8(""))
self.tab_4 = QtGui.QWidget()
self.tab_4.setObjectName(_fromUtf8("tab_4"))
self.horizontalLayout_13 = QtGui.QHBoxLayout(self.tab_4)
self.horizontalLayout_13.setObjectName(_fromUtf8("horizontalLayout_13"))
self.tabWidget_3 = QtGui.QTabWidget(self.tab_4)
self.tabWidget_3.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.tabWidget_3.setStyleSheet(_fromUtf8(""))
self.tabWidget_3.setUsesScrollButtons(False)
self.tabWidget_3.setMovable(True)
self.tabWidget_3.setObjectName(_fromUtf8("tabWidget_3"))
self.tab_5 = QtGui.QWidget()
self.tab_5.setObjectName(_fromUtf8("tab_5"))
self.tabWidget_3.addTab(self.tab_5, _fromUtf8(""))
self.tab_6 = QtGui.QWidget()
self.tab_6.setObjectName(_fromUtf8("tab_6"))
self.tabWidget_3.addTab(self.tab_6, _fromUtf8(""))
self.horizontalLayout_13.addWidget(self.tabWidget_3)
self.tabWidget_2.addTab(self.tab_4, _fromUtf8(""))
self.verticalLayout_2.addWidget(self.tabWidget_2)
self.verticalLayout_4.addLayout(self.verticalLayout_2)
self.dockWidget_2.setWidget(self.dockWidgetContents_2)
MainWindow.addDockWidget(QtCore.Qt.DockWidgetArea(1), self.dockWidget_2)
self.dockWidget_3 = QtGui.QDockWidget(MainWindow)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.dockWidget_3.sizePolicy().hasHeightForWidth())
self.dockWidget_3.setSizePolicy(sizePolicy)
self.dockWidget_3.setMinimumSize(QtCore.QSize(489, 70))
self.dockWidget_3.setMaximumSize(QtCore.QSize(524287, 524287))
self.dockWidget_3.setObjectName(_fromUtf8("dockWidget_3"))
self.dockWidgetContents_3 = QtGui.QWidget()
self.dockWidgetContents_3.setObjectName(_fromUtf8("dockWidgetContents_3"))
self.horizontalLayout = QtGui.QHBoxLayout(self.dockWidgetContents_3)
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
self.toolButton_17 = QtGui.QToolButton(self.dockWidgetContents_3)
self.toolButton_17.setMaximumSize(QtCore.QSize(16777215, 25))
self.toolButton_17.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.toolButton_17.setStyleSheet(_fromUtf8(""))
icon5 = QtGui.QIcon()
icon5.addPixmap(QtGui.QPixmap(_fromUtf8("Icons/Item-New-48.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.toolButton_17.setIcon(icon5)
self.toolButton_17.setIconSize(QtCore.QSize(30, 30))
self.toolButton_17.setObjectName(_fromUtf8("toolButton_17"))
self.horizontalLayout.addWidget(self.toolButton_17)
self.toolButton_10 = QtGui.QToolButton(self.dockWidgetContents_3)
self.toolButton_10.setMaximumSize(QtCore.QSize(16777215, 25))
self.toolButton_10.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.toolButton_10.setStyleSheet(_fromUtf8(""))
self.toolButton_10.setIcon(icon3)
self.toolButton_10.setIconSize(QtCore.QSize(30, 30))
self.toolButton_10.setObjectName(_fromUtf8("toolButton_10"))
self.horizontalLayout.addWidget(self.toolButton_10)
self.toolButton_20 = QtGui.QToolButton(self.dockWidgetContents_3)
self.toolButton_20.setMaximumSize(QtCore.QSize(16777215, 25))
self.toolButton_20.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.toolButton_20.setStyleSheet(_fromUtf8(""))
self.toolButton_20.setIcon(icon4)
self.toolButton_20.setIconSize(QtCore.QSize(30, 30))
self.toolButton_20.setObjectName(_fromUtf8("toolButton_20"))
self.horizontalLayout.addWidget(self.toolButton_20)
self.toolButton_18 = QtGui.QToolButton(self.dockWidgetContents_3)
self.toolButton_18.setMaximumSize(QtCore.QSize(16777215, 25))
self.toolButton_18.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.toolButton_18.setStyleSheet(_fromUtf8(""))
icon6 = QtGui.QIcon()
icon6.addPixmap(QtGui.QPixmap(_fromUtf8("Icons/Open-48.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.toolButton_18.setIcon(icon6)
self.toolButton_18.setIconSize(QtCore.QSize(30, 30))
self.toolButton_18.setObjectName(_fromUtf8("toolButton_18"))
self.horizontalLayout.addWidget(self.toolButton_18)
self.line_4 = QtGui.QFrame(self.dockWidgetContents_3)
self.line_4.setMaximumSize(QtCore.QSize(16777215, 20))
self.line_4.setFrameShape(QtGui.QFrame.VLine)
self.line_4.setFrameShadow(QtGui.QFrame.Sunken)
self.line_4.setObjectName(_fromUtf8("line_4"))
self.horizontalLayout.addWidget(self.line_4)
self.toolButton_4 = QtGui.QToolButton(self.dockWidgetContents_3)
self.toolButton_4.setMaximumSize(QtCore.QSize(16777215, 25))
self.toolButton_4.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.toolButton_4.setStyleSheet(_fromUtf8(""))
self.toolButton_4.setIcon(icon1)
self.toolButton_4.setIconSize(QtCore.QSize(30, 30))
self.toolButton_4.setObjectName(_fromUtf8("toolButton_4"))
self.horizontalLayout.addWidget(self.toolButton_4)
self.toolButton_3 = QtGui.QToolButton(self.dockWidgetContents_3)
self.toolButton_3.setMaximumSize(QtCore.QSize(16777215, 25))
self.toolButton_3.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.toolButton_3.setStyleSheet(_fromUtf8(""))
self.toolButton_3.setIcon(icon2)
self.toolButton_3.setIconSize(QtCore.QSize(30, 30))
self.toolButton_3.setObjectName(_fromUtf8("toolButton_3"))
self.horizontalLayout.addWidget(self.toolButton_3)
self.line_5 = QtGui.QFrame(self.dockWidgetContents_3)
self.line_5.setMaximumSize(QtCore.QSize(16777215, 20))
self.line_5.setFrameShape(QtGui.QFrame.VLine)
self.line_5.setFrameShadow(QtGui.QFrame.Sunken)
self.line_5.setObjectName(_fromUtf8("line_5"))
self.horizontalLayout.addWidget(self.line_5)
self.checkBox = QtGui.QCheckBox(self.dockWidgetContents_3)
self.checkBox.setMaximumSize(QtCore.QSize(20, 25))
self.checkBox.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.checkBox.setLayoutDirection(QtCore.Qt.LeftToRight)
self.checkBox.setText(_fromUtf8(""))
self.checkBox.setObjectName(_fromUtf8("checkBox"))
self.horizontalLayout.addWidget(self.checkBox)
self.Example = QtGui.QToolButton(self.dockWidgetContents_3)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.Example.sizePolicy().hasHeightForWidth())
self.Example.setSizePolicy(sizePolicy)
self.Example.setMaximumSize(QtCore.QSize(16777215, 25))
self.Example.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.Example.setStyleSheet(_fromUtf8("QToolButton{\n"
"font-size: 15px;\n"
"color:rgb(255, 255, 255);\n"
"}"))
self.Example.setIconSize(QtCore.QSize(24, 24))
self.Example.setObjectName(_fromUtf8("Example"))
self.horizontalLayout.addWidget(self.Example)
self.line_6 = QtGui.QFrame(self.dockWidgetContents_3)
self.line_6.setMaximumSize(QtCore.QSize(16777215, 20))
self.line_6.setFrameShape(QtGui.QFrame.VLine)
self.line_6.setFrameShadow(QtGui.QFrame.Sunken)
self.line_6.setObjectName(_fromUtf8("line_6"))
self.horizontalLayout.addWidget(self.line_6)
self.toolButton = QtGui.QToolButton(self.dockWidgetContents_3)
self.toolButton.setMaximumSize(QtCore.QSize(16777215, 25))
self.toolButton.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.toolButton.setStyleSheet(_fromUtf8(""))
icon7 = QtGui.QIcon()
icon7.addPixmap(QtGui.QPixmap(_fromUtf8("Icons/Board-Pin-48.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.toolButton.setIcon(icon7)
self.toolButton.setIconSize(QtCore.QSize(30, 30))
self.toolButton.setObjectName(_fromUtf8("toolButton"))
self.horizontalLayout.addWidget(self.toolButton)
self.toolButton_25 = QtGui.QToolButton(self.dockWidgetContents_3)
self.toolButton_25.setMaximumSize(QtCore.QSize(16777215, 25))
self.toolButton_25.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.toolButton_25.setStyleSheet(_fromUtf8(""))
icon8 = QtGui.QIcon()
icon8.addPixmap(QtGui.QPixmap(_fromUtf8("Icons/Table-48.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.toolButton_25.setIcon(icon8)
self.toolButton_25.setIconSize(QtCore.QSize(30, 30))
self.toolButton_25.setObjectName(_fromUtf8("toolButton_25"))
self.horizontalLayout.addWidget(self.toolButton_25)
self.line_8 = QtGui.QFrame(self.dockWidgetContents_3)
self.line_8.setMaximumSize(QtCore.QSize(16777215, 20))
self.line_8.setFrameShape(QtGui.QFrame.VLine)
self.line_8.setFrameShadow(QtGui.QFrame.Sunken)
self.line_8.setObjectName(_fromUtf8("line_8"))
self.horizontalLayout.addWidget(self.line_8)
self.dockWidget_3.setWidget(self.dockWidgetContents_3)
MainWindow.addDockWidget(QtCore.Qt.DockWidgetArea(4), self.dockWidget_3)
self.dockWidget_4 = QtGui.QDockWidget(MainWindow)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.dockWidget_4.sizePolicy().hasHeightForWidth())
self.dockWidget_4.setSizePolicy(sizePolicy)
self.dockWidget_4.setMinimumSize(QtCore.QSize(624, 70))
self.dockWidget_4.setMaximumSize(QtCore.QSize(524287, 70))
self.dockWidget_4.setObjectName(_fromUtf8("dockWidget_4"))
self.dockWidgetContents_4 = QtGui.QWidget()
self.dockWidgetContents_4.setObjectName(_fromUtf8("dockWidgetContents_4"))
self.horizontalLayout_2 = QtGui.QHBoxLayout(self.dockWidgetContents_4)
self.horizontalLayout_2.setObjectName(_fromUtf8("horizontalLayout_2"))
self.line_7 = QtGui.QFrame(self.dockWidgetContents_4)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.line_7.sizePolicy().hasHeightForWidth())
self.line_7.setSizePolicy(sizePolicy)
self.line_7.setMaximumSize(QtCore.QSize(16777215, 20))
self.line_7.setLineWidth(1)
self.line_7.setMidLineWidth(1)
self.line_7.setFrameShape(QtGui.QFrame.VLine)
self.line_7.setFrameShadow(QtGui.QFrame.Sunken)
self.line_7.setObjectName(_fromUtf8("line_7"))
self.horizontalLayout_2.addWidget(self.line_7)
self.toolButton_19 = QtGui.QToolButton(self.dockWidgetContents_4)
self.toolButton_19.setMaximumSize(QtCore.QSize(16777215, 25))
self.toolButton_19.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.toolButton_19.setStyleSheet(_fromUtf8(""))
icon9 = QtGui.QIcon()
icon9.addPixmap(QtGui.QPixmap(_fromUtf8("Icons/Keyboard-48.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.toolButton_19.setIcon(icon9)
self.toolButton_19.setIconSize(QtCore.QSize(35, 35))
self.toolButton_19.setObjectName(_fromUtf8("toolButton_19"))
self.horizontalLayout_2.addWidget(self.toolButton_19)
self.toolButton_23 = QtGui.QToolButton(self.dockWidgetContents_4)
self.toolButton_23.setMaximumSize(QtCore.QSize(16777215, 25))
self.toolButton_23.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.toolButton_23.setStyleSheet(_fromUtf8(""))
icon10 = QtGui.QIcon()
icon10.addPixmap(QtGui.QPixmap(_fromUtf8("Icons/Printer-48.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.toolButton_23.setIcon(icon10)
self.toolButton_23.setIconSize(QtCore.QSize(35, 35))
self.toolButton_23.setObjectName(_fromUtf8("toolButton_23"))
self.horizontalLayout_2.addWidget(self.toolButton_23)
self.toolButton_2 = QtGui.QToolButton(self.dockWidgetContents_4)
self.toolButton_2.setMaximumSize(QtCore.QSize(16777215, 25))
self.toolButton_2.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.toolButton_2.setIcon(icon4)
self.toolButton_2.setIconSize(QtCore.QSize(35, 35))
self.toolButton_2.setObjectName(_fromUtf8("toolButton_2"))
self.horizontalLayout_2.addWidget(self.toolButton_2)
self.toolButton_24 = QtGui.QToolButton(self.dockWidgetContents_4)
self.toolButton_24.setMaximumSize(QtCore.QSize(16777215, 25))
self.toolButton_24.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.toolButton_24.setStyleSheet(_fromUtf8(""))
icon11 = QtGui.QIcon()
icon11.addPixmap(QtGui.QPixmap(_fromUtf8("Icons/Camera-02-48.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.toolButton_24.setIcon(icon11)
self.toolButton_24.setIconSize(QtCore.QSize(35, 35))
self.toolButton_24.setObjectName(_fromUtf8("toolButton_24"))
self.horizontalLayout_2.addWidget(self.toolButton_24)
self.toolButton_22 = QtGui.QToolButton(self.dockWidgetContents_4)
self.toolButton_22.setMaximumSize(QtCore.QSize(16777215, 25))
self.toolButton_22.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.toolButton_22.setStyleSheet(_fromUtf8(""))
icon12 = QtGui.QIcon()
icon12.addPixmap(QtGui.QPixmap(_fromUtf8("Icons/Facebook-48.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.toolButton_22.setIcon(icon12)
self.toolButton_22.setIconSize(QtCore.QSize(35, 35))
self.toolButton_22.setObjectName(_fromUtf8("toolButton_22"))
self.horizontalLayout_2.addWidget(self.toolButton_22)
self.line_3 = QtGui.QFrame(self.dockWidgetContents_4)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.line_3.sizePolicy().hasHeightForWidth())
self.line_3.setSizePolicy(sizePolicy)
self.line_3.setMaximumSize(QtCore.QSize(16777215, 20))
self.line_3.setFrameShape(QtGui.QFrame.VLine)
self.line_3.setFrameShadow(QtGui.QFrame.Sunken)
self.line_3.setObjectName(_fromUtf8("line_3"))
self.horizontalLayout_2.addWidget(self.line_3)
self.toolButton_21 = QtGui.QToolButton(self.dockWidgetContents_4)
self.toolButton_21.setMaximumSize(QtCore.QSize(16777215, 25))
self.toolButton_21.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.toolButton_21.setStyleSheet(_fromUtf8(""))
icon13 = QtGui.QIcon()
icon13.addPixmap(QtGui.QPixmap(_fromUtf8("Icons/Media-Play-48.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.toolButton_21.setIcon(icon13)
self.toolButton_21.setIconSize(QtCore.QSize(35, 35))
self.toolButton_21.setObjectName(_fromUtf8("toolButton_21"))
self.horizontalLayout_2.addWidget(self.toolButton_21)
self.toolButton_16 = QtGui.QToolButton(self.dockWidgetContents_4)
self.toolButton_16.setMaximumSize(QtCore.QSize(16777215, 25))
self.toolButton_16.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.toolButton_16.setStyleSheet(_fromUtf8(""))
icon14 = QtGui.QIcon()
icon14.addPixmap(QtGui.QPixmap(_fromUtf8("Icons/Stop-48.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.toolButton_16.setIcon(icon14)
self.toolButton_16.setIconSize(QtCore.QSize(35, 35))
self.toolButton_16.setObjectName(_fromUtf8("toolButton_16"))
self.horizontalLayout_2.addWidget(self.toolButton_16)
self.line_2 = QtGui.QFrame(self.dockWidgetContents_4)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.line_2.sizePolicy().hasHeightForWidth())
self.line_2.setSizePolicy(sizePolicy)
self.line_2.setMaximumSize(QtCore.QSize(16777215, 20))
self.line_2.setFrameShape(QtGui.QFrame.VLine)
self.line_2.setFrameShadow(QtGui.QFrame.Sunken)
self.line_2.setObjectName(_fromUtf8("line_2"))
self.horizontalLayout_2.addWidget(self.line_2)
self.toolButton_15 = QtGui.QToolButton(self.dockWidgetContents_4)
self.toolButton_15.setMaximumSize(QtCore.QSize(16777215, 25))
self.toolButton_15.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.toolButton_15.setStyleSheet(_fromUtf8(""))
icon15 = QtGui.QIcon()
icon15.addPixmap(QtGui.QPixmap(_fromUtf8("Icons/Column-Selection-48.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.toolButton_15.setIcon(icon15)
self.toolButton_15.setIconSize(QtCore.QSize(35, 35))
self.toolButton_15.setObjectName(_fromUtf8("toolButton_15"))
self.horizontalLayout_2.addWidget(self.toolButton_15)
self.toolButton_14 = QtGui.QToolButton(self.dockWidgetContents_4)
self.toolButton_14.setMaximumSize(QtCore.QSize(16777215, 25))
self.toolButton_14.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.toolButton_14.setStyleSheet(_fromUtf8(""))
icon16 = QtGui.QIcon()
icon16.addPixmap(QtGui.QPixmap(_fromUtf8("Icons/Slash-48.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.toolButton_14.setIcon(icon16)
self.toolButton_14.setIconSize(QtCore.QSize(35, 35))
self.toolButton_14.setObjectName(_fromUtf8("toolButton_14"))
self.horizontalLayout_2.addWidget(self.toolButton_14)
self.line = QtGui.QFrame(self.dockWidgetContents_4)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.line.sizePolicy().hasHeightForWidth())
self.line.setSizePolicy(sizePolicy)
self.line.setMaximumSize(QtCore.QSize(16777215, 20))
self.line.setFrameShape(QtGui.QFrame.VLine)
self.line.setFrameShadow(QtGui.QFrame.Sunken)
self.line.setObjectName(_fromUtf8("line"))
self.horizontalLayout_2.addWidget(self.line)
self.toolButton_13 = QtGui.QToolButton(self.dockWidgetContents_4)
self.toolButton_13.setMaximumSize(QtCore.QSize(16777215, 25))
self.toolButton_13.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.toolButton_13.setStyleSheet(_fromUtf8(""))
icon17 = QtGui.QIcon()
icon17.addPixmap(QtGui.QPixmap(_fromUtf8("Icons/Magnifying-Glass-48.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.toolButton_13.setIcon(icon17)
self.toolButton_13.setIconSize(QtCore.QSize(35, 35))
self.toolButton_13.setObjectName(_fromUtf8("toolButton_13"))
self.horizontalLayout_2.addWidget(self.toolButton_13)
self.toolButton_12 = QtGui.QToolButton(self.dockWidgetContents_4)
self.toolButton_12.setMaximumSize(QtCore.QSize(16777215, 25))
self.toolButton_12.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.toolButton_12.setStyleSheet(_fromUtf8(""))
icon18 = QtGui.QIcon()
icon18.addPixmap(QtGui.QPixmap(_fromUtf8("Icons/Zoom-In-48.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.toolButton_12.setIcon(icon18)
self.toolButton_12.setIconSize(QtCore.QSize(35, 35))
self.toolButton_12.setObjectName(_fromUtf8("toolButton_12"))
self.horizontalLayout_2.addWidget(self.toolButton_12)
self.toolButton_11 = QtGui.QToolButton(self.dockWidgetContents_4)
self.toolButton_11.setMaximumSize(QtCore.QSize(16777215, 25))
self.toolButton_11.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.toolButton_11.setAutoFillBackground(False)
self.toolButton_11.setStyleSheet(_fromUtf8(""))
icon19 = QtGui.QIcon()
icon19.addPixmap(QtGui.QPixmap(_fromUtf8("Icons/Zoom-Out-48.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.toolButton_11.setIcon(icon19)
self.toolButton_11.setIconSize(QtCore.QSize(35, 35))
self.toolButton_11.setObjectName(_fromUtf8("toolButton_11"))
self.horizontalLayout_2.addWidget(self.toolButton_11)
self.dockWidget_4.setWidget(self.dockWidgetContents_4)
MainWindow.addDockWidget(QtCore.Qt.DockWidgetArea(4), self.dockWidget_4)
self.dockWidget_5 = QtGui.QDockWidget(MainWindow)
self.dockWidget_5.setObjectName(_fromUtf8("dockWidget_5"))
self.dockWidgetContents_5 = QtGui.QWidget()
self.dockWidgetContents_5.setObjectName(_fromUtf8("dockWidgetContents_5"))
self.verticalLayout = QtGui.QVBoxLayout(self.dockWidgetContents_5)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.pushButton_2 = QtGui.QPushButton(self.dockWidgetContents_5)
self.pushButton_2.setMinimumSize(QtCore.QSize(0, 0))
self.pushButton_2.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.pushButton_2.setObjectName(_fromUtf8("pushButton_2"))
self.verticalLayout.addWidget(self.pushButton_2)
self.dockWidget_5.setWidget(self.dockWidgetContents_5)
MainWindow.addDockWidget(QtCore.Qt.DockWidgetArea(1), self.dockWidget_5)
self.retranslateUi(MainWindow)
self.tabWidget_2.setCurrentIndex(1)
self.tabWidget_3.setCurrentIndex(1)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
MainWindow.setWindowTitle(_translate("MainWindow", "MainWindow", None))
self.pushButton_3.setText(_translate("MainWindow", "Hide", None))
self.toolButton_7.setToolTip(_translate("MainWindow", "Add", None))
self.toolButton_7.setText(_translate("MainWindow", "...", None))
self.toolButton_9.setToolTip(_translate("MainWindow", "Remove", None))
self.toolButton_9.setText(_translate("MainWindow", "...", None))
self.toolButton_8.setToolTip(_translate("MainWindow", "Import Coordinates", None))
self.toolButton_8.setText(_translate("MainWindow", "...", None))
self.toolButton_5.setToolTip(_translate("MainWindow", "Export Coordinates", None))
self.toolButton_5.setText(_translate("MainWindow", "...", None))
item = self.tableWidget.horizontalHeaderItem(0)
item.setText(_translate("MainWindow", "x", None))
item = self.tableWidget.horizontalHeaderItem(1)
item.setText(_translate("MainWindow", "y", None))
item = self.tableWidget.horizontalHeaderItem(2)
item.setText(_translate("MainWindow", "z", None))
self.pushButton_21.setText(_translate("MainWindow", "Redraw", None))
self.toolButton_17.setToolTip(_translate("MainWindow", "Create New", None))
self.toolButton_17.setText(_translate("MainWindow", "...", None))
self.toolButton_10.setToolTip(_translate("MainWindow", "Open Existing", None))
self.toolButton_10.setText(_translate("MainWindow", "...", None))
self.toolButton_20.setToolTip(_translate("MainWindow", "Save to Drive", None))
self.toolButton_20.setText(_translate("MainWindow", "...", None))
self.toolButton_18.setToolTip(_translate("MainWindow", "Load New", None))
self.toolButton_18.setText(_translate("MainWindow", "...", None))
self.toolButton_4.setToolTip(_translate("MainWindow", "Add new Equation", None))
self.toolButton_4.setText(_translate("MainWindow", "...", None))
self.toolButton_3.setToolTip(_translate("MainWindow", "Remove this Equation", None))
self.toolButton_3.setText(_translate("MainWindow", "...", None))
self.checkBox.setToolTip(_translate("MainWindow", "Show on Graph", None))
self.Example.setToolTip(_translate("MainWindow", "Illustrate with an Example", None))
self.Example.setWhatsThis(_translate("MainWindow", "Example", None))
self.Example.setText(_translate("MainWindow", "Example", None))
self.toolButton.setToolTip(_translate("MainWindow", "Always on Top", None))
self.toolButton.setText(_translate("MainWindow", "...", None))
self.toolButton_25.setToolTip(_translate("MainWindow", "Show/Hide Table", None))
self.toolButton_25.setText(_translate("MainWindow", "...", None))
self.toolButton_19.setToolTip(_translate("MainWindow", "Keyboard", None))
self.toolButton_19.setText(_translate("MainWindow", "...", None))
self.toolButton_23.setToolTip(_translate("MainWindow", "Print graph", None))
self.toolButton_23.setText(_translate("MainWindow", "...", None))
self.toolButton_2.setToolTip(_translate("MainWindow", "Save Graph", None))
self.toolButton_2.setText(_translate("MainWindow", "...", None))
self.toolButton_24.setToolTip(_translate("MainWindow", "Take a screenshot", None))
self.toolButton_24.setText(_translate("MainWindow", "...", None))
self.toolButton_22.setToolTip(_translate("MainWindow", "Go to our FaceBook page", None))
self.toolButton_22.setText(_translate("MainWindow", "...", None))
self.toolButton_21.setToolTip(_translate("MainWindow", "Play", None))
self.toolButton_21.setText(_translate("MainWindow", "...", None))
self.toolButton_16.setToolTip(_translate("MainWindow", "Stop", None))
self.toolButton_16.setText(_translate("MainWindow", "...", None))
self.toolButton_15.setToolTip(_translate("MainWindow", "Disable Anti-Aliasing", None))
self.toolButton_15.setText(_translate("MainWindow", "...", None))
self.toolButton_14.setToolTip(_translate("MainWindow", "Enable Anti-Aliasing", None))
self.toolButton_14.setText(_translate("MainWindow", "...", None))
self.toolButton_13.setToolTip(_translate("MainWindow", "Zoom All", None))
self.toolButton_13.setText(_translate("MainWindow", "...", None))
self.toolButton_12.setToolTip(_translate("MainWindow", "Zoom in", None))
self.toolButton_12.setText(_translate("MainWindow", "...", None))
self.toolButton_11.setToolTip(_translate("MainWindow", "Zoom out", None))
self.toolButton_11.setText(_translate("MainWindow", "...", None))
self.pushButton_2.setText(_translate("MainWindow", "PushButton", None))
| gpl-2.0 | -4,134,462,761,231,112,000 | 56.969608 | 268 | 0.698557 | false | 3.313477 | false | false | false |
ifp-uiuc/do-neural-networks-learn-faus-iccvw-2015 | ck_plus/cnn_ad/train.py | 1 | 3697 | import argparse
import os
import sys
sys.path.append('..')
import numpy
from anna import util
from anna.datasets import supervised_dataset
#from anna.datasets.supervised_data_loader import SupervisedDataLoaderCrossVal
import data_fold_loader
import data_paths
from model import SupervisedModel
parser = argparse.ArgumentParser(prog='train_cnn_with_dropout_\
data_augmentation',
description='Script to train convolutional \
network from random initialization with \
dropout and data augmentation.')
parser.add_argument("-s", "--split", default='0', help='Testing split of CK+ \
to use. (0-9)')
parser.add_argument("--checkpoint_dir", default='./', help='Location to save \
model checkpoint files.')
args = parser.parse_args()
print('Start')
test_split = int(args.split)
if test_split < 0 or test_split > 9:
raise Exception("Testing Split must be in range 0-9.")
print('Using CK+ testing split: {}'.format(test_split))
checkpoint_dir = os.path.join(args.checkpoint_dir, 'checkpoints_'+str(test_split))
print 'Checkpoint dir: ', checkpoint_dir
pid = os.getpid()
print('PID: {}'.format(pid))
f = open('pid_'+str(test_split), 'wb')
f.write(str(pid)+'\n')
f.close()
# Load model
model = SupervisedModel('experiment', './', learning_rate=1e-2)
monitor = util.Monitor(model,
checkpoint_directory=checkpoint_dir,
save_steps=1000)
# Add dropout to fully-connected layer
model.fc4.dropout = 0.5
model._compile()
# Loading CK+ dataset
print('Loading Data')
#supervised_data_loader = SupervisedDataLoaderCrossVal(
# data_paths.ck_plus_data_path)
#train_data_container = supervised_data_loader.load('train', train_split)
#test_data_container = supervised_data_loader.load('test', train_split)
train_folds, val_fold, _ = data_fold_loader.load_fold_assignment(test_fold=test_split)
X_train, y_train = data_fold_loader.load_folds(data_paths.ck_plus_data_path, train_folds)
X_val, y_val = data_fold_loader.load_folds(data_paths.ck_plus_data_path, [val_fold])
X_test, y_test = data_fold_loader.load_folds(data_paths.ck_plus_data_path, [test_split])
X_train = numpy.float32(X_train)
X_train /= 255.0
X_train *= 2.0
X_val = numpy.float32(X_val)
X_val /= 255.0
X_val *= 2.0
X_test = numpy.float32(X_test)
X_test /= 255.0
X_test *= 2.0
train_dataset = supervised_dataset.SupervisedDataset(X_train, y_train)
val_dataset = supervised_dataset.SupervisedDataset(X_val, y_val)
train_iterator = train_dataset.iterator(
mode='random_uniform', batch_size=64, num_batches=31000)
val_iterator = val_dataset.iterator(
mode='random_uniform', batch_size=64, num_batches=31000)
# Do data augmentation (crops, flips, rotations, scales, intensity)
data_augmenter = util.DataAugmenter2(crop_shape=(96, 96),
flip=True, gray_on=True)
normer = util.Normer3(filter_size=5, num_channels=1)
module_list_train = [data_augmenter, normer]
module_list_val = [normer]
preprocessor_train = util.Preprocessor(module_list_train)
preprocessor_val = util.Preprocessor(module_list_val)
print('Training Model')
for x_batch, y_batch in train_iterator:
x_batch = preprocessor_train.run(x_batch)
monitor.start()
log_prob, accuracy = model.train(x_batch, y_batch)
monitor.stop(1-accuracy)
if monitor.test:
monitor.start()
x_val_batch, y_val_batch = val_iterator.next()
x_val_batch = preprocessor_val.run(x_val_batch)
val_accuracy = model.eval(x_val_batch, y_val_batch)
monitor.stop_test(1-val_accuracy)
| bsd-3-clause | -8,914,378,663,641,386,000 | 33.551402 | 89 | 0.675683 | false | 3.242982 | true | false | false |
radjkarl/imgProcessor | imgProcessor/imgSignal.py | 1 | 10847 | from __future__ import division
from __future__ import print_function
import numpy as np
import cv2
from imgProcessor.imgIO import imread
from imgProcessor.measure.FitHistogramPeaks import FitHistogramPeaks
from fancytools.math.findXAt import findXAt
# from scipy.optimize.minpack import curve_fit
MAX_SIZE = 700
def scaleSignalCut(img, ratio, nbins=100):
'''
scaling img cutting x percent of top and bottom part of histogram
'''
start, stop = scaleSignalCutParams(img, ratio, nbins)
img = img - start
img /= (stop - start)
return img
def _toSize(img):
fac = MAX_SIZE / max(img.shape)
if fac < 1:
try:
return cv2.resize(img, (0, 0), fx=fac, fy=fac,
interpolation=cv2.INTER_AREA)
except cv2.error:
# cv2.error: ..\..\..\modules\imgproc\src\imgwarp.cpp:3235: error:
# (-215) dsize.area() > 0 in function cv::resize
return cv2.resize(img.T, (0, 0), fx=fac, fy=fac,
interpolation=cv2.INTER_AREA).T
return img
def _histogramAndCorrBinPos(img, nbins=100):
try:
h, bins = np.histogram(img, nbins)
except ValueError: # img contains NaN
h, bins = np.histogram(img[np.isfinite(img)], nbins)
b0 = bins[0]
bins = bins[1:]
bins += 0.5 * (bins[0] - b0)
return h, bins
def scaleSignalCutParams(img, ratio=0.01, nbins=100, return_img=False):
img = _toSize(img)
h, bins = _histogramAndCorrBinPos(img, nbins)
h = np.cumsum(h).astype(float)
h -= h.min()
h /= h[-1]
try:
start = findXAt(bins, h, ratio)
except IndexError:
start = bins[0]
try:
stop = findXAt(bins, h, 1 - ratio)
except IndexError:
stop = bins[-1]
if return_img:
return start, stop, img
return start, stop
def scaleSignal(img, fitParams=None,
backgroundToZero=False, reference=None):
'''
scale the image between...
backgroundToZero=True -> 0 (average background) and 1 (maximum signal)
backgroundToZero=False -> signal+-3std
reference -> reference image -- scale image to fit this one
returns:
scaled image
'''
img = imread(img)
if reference is not None:
# def fn(ii, m,n):
# return ii*m+n
# curve_fit(fn, img[::10,::10], ref[::10,::10])
low, high = signalRange(img, fitParams)
low2, high2 = signalRange(reference)
img = np.asfarray(img)
ampl = (high2 - low2) / (high - low)
img -= low
img *= ampl
img += low2
return img
else:
offs, div = scaleParams(img, fitParams, backgroundToZero)
img = np.asfarray(img) - offs
img /= div
print('offset: %s, divident: %s' % (offs, div))
return img
def getBackgroundRange(fitParams):
'''
return minimum, average, maximum of the background peak
'''
smn, _, _ = getSignalParameters(fitParams)
bg = fitParams[0]
_, avg, std = bg
bgmn = max(0, avg - 3 * std)
if avg + 4 * std < smn:
bgmx = avg + 4 * std
if avg + 3 * std < smn:
bgmx = avg + 3 * std
if avg + 2 * std < smn:
bgmx = avg + 2 * std
else:
bgmx = avg + std
return bgmn, avg, bgmx
def hasBackground(fitParams):
'''
compare the height of putative bg and signal peak
if ratio if too height assume there is no background
'''
signal = getSignalPeak(fitParams)
bg = getBackgroundPeak(fitParams)
if signal == bg:
return False
r = signal[0] / bg[0]
if r < 1:
r = 1 / r
return r < 100
def backgroundPeakValue(img, bins=500):
f = FitHistogramPeaks(img, bins=bins, bins2=300)
bgp = getBackgroundPeak(f.fitParams)
ind = int(bgp[1])
if ind < 0:
ind = 0
# y = f.yvals[ind:]
# i = np.argmax(np.diff(y) > 0)
# bgmaxpos = ind # + i
# print(f.xvals[bgmaxpos], bgmaxpos)
# import pylab as plt
# plt.plot(f.xvals, f.yvals)
# plt.show()
return f.xvals[ind]
def signalMinimum2(img, bins=None):
'''
minimum position between signal and background peak
'''
f = FitHistogramPeaks(img, bins=bins)
i = signalPeakIndex(f.fitParams)
spos = f.fitParams[i][1]
# spos = getSignalPeak(f.fitParams)[1]
# bpos = getBackgroundPeak(f.fitParams)[1]
bpos = f.fitParams[i - 1][1]
ind = np.logical_and(f.xvals > bpos, f.xvals < spos)
try:
i = np.argmin(f.yvals[ind])
return f.xvals[ind][i]
except ValueError as e:
if bins is None:
return signalMinimum2(img, bins=400)
else:
raise e
def signalMinimum(img, fitParams=None, n_std=3):
'''
intersection between signal and background peak
'''
if fitParams is None:
fitParams = FitHistogramPeaks(img).fitParams
assert len(fitParams) > 1, 'need 2 peaks so get minimum signal'
i = signalPeakIndex(fitParams)
signal = fitParams[i]
bg = getBackgroundPeak(fitParams)
smn = signal[1] - n_std * signal[2]
bmx = bg[1] + n_std * bg[2]
if smn > bmx:
return smn
# peaks are overlapping
# define signal min. as intersection between both Gaussians
def solve(p1, p2):
s1, m1, std1 = p1
s2, m2, std2 = p2
a = (1 / (2 * std1**2)) - (1 / (2 * std2**2))
b = (m2 / (std2**2)) - (m1 / (std1**2))
c = (m1**2 / (2 * std1**2)) - (m2**2 / (2 * std2**2)) - \
np.log(((std2 * s1) / (std1 * s2)))
return np.roots([a, b, c])
i = solve(bg, signal)
try:
return i[np.logical_and(i > bg[1], i < signal[1])][0]
except IndexError:
# this error shouldn't occur... well
return max(smn, bmx)
def getSignalMinimum(fitParams, n_std=3):
assert len(fitParams) > 0, 'need min. 1 peak so get minimum signal'
if len(fitParams) == 1:
signal = fitParams[0]
return signal[1] - n_std * signal[2]
i = signalPeakIndex(fitParams)
signal = fitParams[i]
bg = fitParams[i - 1]
#bg = getBackgroundPeak(fitParams)
smn = signal[1] - n_std * signal[2]
bmx = bg[1] + n_std * bg[2]
if smn > bmx:
return smn
# peaks are overlapping
# define signal min. as intersection between both Gaussians
def solve(p1, p2):
s1, m1, std1 = p1
s2, m2, std2 = p2
a = (1 / (2 * std1**2)) - (1 / (2 * std2**2))
b = (m2 / (std2**2)) - (m1 / (std1**2))
c = (m1**2 / (2 * std1**2)) - (m2**2 / (2 * std2**2)) - \
np.log(((std2 * s1) / (std1 * s2)))
return np.roots([a, b, c])
i = solve(bg, signal)
try:
return i[np.logical_and(i > bg[1], i < signal[1])][0]
except IndexError:
# something didnt work out - fallback
return smn
def getSignalParameters(fitParams, n_std=3):
'''
return minimum, average, maximum of the signal peak
'''
signal = getSignalPeak(fitParams)
mx = signal[1] + n_std * signal[2]
mn = signal[1] - n_std * signal[2]
if mn < fitParams[0][1]:
mn = fitParams[0][1] # set to bg
return mn, signal[1], mx
def signalStd(img):
fitParams = FitHistogramPeaks(img).fitParams
signal = getSignalPeak(fitParams)
return signal[2]
def backgroundMean(img, fitParams=None):
try:
if fitParams is None:
fitParams = FitHistogramPeaks(img).fitParams
bg = getBackgroundPeak(fitParams)
return bg[1]
except Exception as e:
print(e)
# in case peaks were not found:
return img.mean()
def signalRange(img, fitParams=None, nSigma=3):
try:
if fitParams is None:
fitParams = FitHistogramPeaks(img).fitParams
signPeak = getSignalPeak(fitParams)
return (signalMinimum(img, fitParams, nSigma),
signPeak[1] + nSigma * signPeak[2])
# return (signPeak[1] - nSigma*signPeak[2],signPeak[1] +
# nSigma*signPeak[2])
except Exception as e:
print(e)
# in case peaks were not found:
s = img.std()
m = img.mean()
return m - nSigma * s, m + nSigma * s
def scaleParamsFromReference(img, reference):
# saving startup time:
from scipy.optimize import curve_fit
def ff(arr):
arr = imread(arr, 'gray')
if arr.size > 300000:
arr = arr[::10, ::10]
m = np.nanmean(arr)
s = np.nanstd(arr)
r = m - 3 * s, m + 3 * s
b = (r[1] - r[0]) / 5
return arr, r, b
img, imgr, imgb = ff(img)
reference, refr, refb = ff(reference)
nbins = np.clip(15, max(imgb, refb), 50)
refh = np.histogram(reference, bins=nbins, range=refr)[
0].astype(np.float32)
imgh = np.histogram(img, bins=nbins, range=imgr)[0].astype(np.float32)
import pylab as plt
plt.figure(1)
plt.plot(refh)
plt.figure(2)
plt.plot(imgh)
plt.show()
def fn(x, offs, div):
return (x - offs) / div
params, fitCovariances = curve_fit(fn, refh, imgh, p0=(0, 1))
perr = np.sqrt(np.diag(fitCovariances))
print('error scaling to reference image: %s' % perr[0])
# if perr[0] < 0.1:
return params[0], params[1]
def scaleParams(img, fitParams=None):
low, high = signalRange(img, fitParams)
offs = low
div = high - low
return offs, div
def getBackgroundPeak(fitParams):
return fitParams[0]
def getSignalPeak(fitParams):
i = signalPeakIndex(fitParams)
return fitParams[i]
def signalPeakIndex(fitParams):
if len(fitParams) == 1:
i = 0
else:
# find categorical signal peak as max(peak height*standard deviation):
sizes = [pi[0] * pi[2] for pi in fitParams[1:]]
# signal peak has to have positive avg:
for n, p in enumerate(fitParams[1:]):
if p[1] < 0:
sizes[n] = 0
i = np.argmax(sizes) + 1
return i
if __name__ == '__main__':
import sys
import pylab as plt
from fancytools.os.PathStr import PathStr
import imgProcessor
img = imread(PathStr(imgProcessor.__file__).dirname().join(
'media', 'electroluminescence', 'EL_module_orig.PNG'), 'gray')
print('EL signal within range of %s' % str(signalRange(img)))
print('EL signal minimum = %s' % signalMinimum(img))
if 'no_window' not in sys.argv:
plt.imshow(img)
plt.colorbar()
plt.show()
| gpl-3.0 | 7,949,265,973,686,205,000 | 26.028424 | 78 | 0.553425 | false | 3.283984 | false | false | false |
Osmose/normandy | recipe-server/normandy/recipes/migrations/0021_migrate_to_single_actions.py | 1 | 1111 | # -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-03-16 19:55
# flake8: noqa
from __future__ import unicode_literals
from django.db import migrations
def multiple_to_single(apps, schema_editor):
"""
Take the first action in a recipe and set it as the single action.
"""
Recipe = apps.get_model('recipes', 'Recipe')
for recipe in Recipe.objects.all():
if recipe.recipeaction_set.count() < 1:
raise ValueError('Cannot migrate recipe pk={0} as it has no actions. Delete it manually'
' or add an action and re-run this migration.'.format(recipe.pk))
recipe_action = recipe.recipeaction_set.order_by('order')[0]
recipe.action = recipe_action.action
recipe.arguments_json = recipe_action.arguments_json
recipe.save()
def noop(apps, schema_editor):
pass # Not too concerned about going backwards here.
class Migration(migrations.Migration):
dependencies = [
('recipes', '0020_auto_20160316_1947'),
]
operations = [
migrations.RunPython(multiple_to_single, noop)
]
| mpl-2.0 | -8,590,091,028,667,492,000 | 30.742857 | 100 | 0.647165 | false | 3.844291 | false | false | false |
HoussemCharf/FunUtils | pythonMergeSort.py | 1 | 2025 | """
This is a pure python implementation of the merge sort algorithm
For doctests run following command:
python -m doctest -v merge_sort.py
or
python3 -m doctest -v merge_sort.py
For manual testing run:
python merge_sort.py
"""
from __future__ import print_function
def merge_sort(collection):
"""Pure implementation of the merge sort algorithm in Python
:param collection: some mutable ordered collection with heterogeneous
comparable items inside
:return: the same collection ordered by ascending
Examples:
>>> merge_sort([0, 5, 3, 2, 2])
[0, 2, 2, 3, 5]
>>> merge_sort([])
[]
>>> merge_sort([-2, -5, -45])
[-45, -5, -2]
"""
length = len(collection)
if length > 1:
midpoint = length // 2
left_half = merge_sort(collection[:midpoint])
right_half = merge_sort(collection[midpoint:])
i = 0
j = 0
k = 0
left_length = len(left_half)
right_length = len(right_half)
while i < left_length and j < right_length:
if left_half[i] < right_half[j]:
collection[k] = left_half[i]
i += 1
else:
collection[k] = right_half[j]
j += 1
k += 1
while i < left_length:
collection[k] = left_half[i]
i += 1
k += 1
while j < right_length:
collection[k] = right_half[j]
j += 1
k += 1
return collection
if __name__ == '__main__':
import sys
# For python 2.x and 3.x compatibility: 3.x has no raw_input builtin
# otherwise 2.x's input builtin function is too "smart"
if sys.version_info.major < 3:
input_function = raw_input
else:
input_function = input
user_input = input_function('Enter numbers separated by a comma:\n')
unsorted = [int(item) for item in user_input.split(',')]
print(merge_sort(unsorted)) | mit | -4,175,166,591,996,086,300 | 26.957143 | 73 | 0.545185 | false | 3.835227 | false | false | false |
walterbender/locosugar | toolbar_utils.py | 1 | 5547 | # -*- coding: utf-8 -*-
# Copyright (c) 2011, Walter Bender
# Copyright (c) 2012, Ignacio Rodriguez
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# You should have received a copy of the GNU General Public License
# along with this library; if not, write to the Free Software
# Foundation, 51 Franklin Street, Suite 500 Boston, MA 02110-1335 USA
from gi.repository import Gtk
from sugar3.graphics.radiotoolbutton import RadioToolButton
from sugar3.graphics.toolbutton import ToolButton
from sugar3.graphics.combobox import ComboBox
def combo_factory(combo_array, toolbar, callback, cb_arg=None,
tooltip=None, default=None):
'''Factory for making a toolbar combo box'''
combo = ComboBox()
if tooltip is not None and hasattr(combo, 'set_tooltip_text'):
combo.set_tooltip_text(tooltip)
if cb_arg is not None:
combo.connect('changed', callback, cb_arg)
else:
combo.connect('changed', callback)
for i, selection in enumerate(combo_array):
combo.append_item(i, selection, None)
combo.show()
toolitem = Gtk.ToolItem()
toolitem.add(combo)
if hasattr(toolbar, 'insert'): # the main toolbar
toolbar.insert(toolitem, -1)
else: # or a secondary toolbar
toolbar.props.page.insert(toolitem, -1)
toolitem.show()
if default is not None:
combo.set_active(combo_array.index(default))
return combo
def entry_factory(default_string, toolbar, tooltip=None, max=3):
''' Factory for adding a text box to a toolbar '''
entry = Gtk.Entry()
entry.set_text(default_string)
if tooltip is not None and hasattr(entry, 'set_tooltip_text'):
entry.set_tooltip_text(tooltip)
entry.set_width_chars(max)
entry.show()
toolitem = Gtk.ToolItem()
toolitem.add(entry)
if hasattr(toolbar, 'insert'): # the main toolbar
toolbar.insert(toolitem, -1)
else: # or a secondary toolbar
toolbar.props.page.insert(toolitem, -1)
toolitem.show()
return entry
def button_factory(icon_name, toolbar, callback, cb_arg=None, tooltip=None,
accelerator=None):
'''Factory for making tooplbar buttons'''
button = ToolButton(icon_name)
if tooltip is not None:
button.set_tooltip(tooltip)
button.props.sensitive = True
if accelerator is not None:
button.props.accelerator = accelerator
if cb_arg is not None:
button.connect('clicked', callback, cb_arg)
else:
button.connect('clicked', callback)
if hasattr(toolbar, 'insert'): # the main toolbar
toolbar.insert(button, -1)
else: # or a secondary toolbar
toolbar.props.page.insert(button, -1)
button.show()
return button
def radio_factory(name, toolbar, callback, cb_arg=None, tooltip=None,
group=None):
''' Add a radio button to a toolbar '''
button = RadioToolButton(group=group)
button.set_icon_name(name)
if callback is not None:
if cb_arg is None:
button.connect('clicked', callback)
else:
button.connect('clicked', callback, cb_arg)
if hasattr(toolbar, 'insert'): # Add button to the main toolbar...
toolbar.insert(button, -1)
else: # ...or a secondary toolbar.
toolbar.props.page.insert(button, -1)
button.show()
if tooltip is not None:
button.set_tooltip(tooltip)
return button
def label_factory(toolbar, label_text, width=None):
''' Factory for adding a label to a toolbar '''
label = Gtk.Label(label_text)
label.set_line_wrap(True)
if width is not None:
label.set_size_request(width, -1) # doesn't work on XOs
label.show()
toolitem = Gtk.ToolItem()
toolitem.add(label)
if hasattr(toolbar, 'insert'): # the main toolbar
toolbar.insert(toolitem, -1)
else: # or a secondary toolbar
toolbar.props.page.insert(toolitem, -1)
toolitem.show()
return label
def separator_factory(toolbar, expand=False, visible=True):
''' add a separator to a toolbar '''
separator = Gtk.SeparatorToolItem()
separator.props.draw = visible
separator.set_expand(expand)
if hasattr(toolbar, 'insert'): # the main toolbar
toolbar.insert(separator, -1)
else: # or a secondary toolbar
toolbar.props.page.insert(separator, -1)
separator.show()
def image_factory(image, toolbar, tooltip=None):
''' Add an image to the toolbar '''
img = Gtk.Image()
img.set_from_pixbuf(image)
img_tool = Gtk.ToolItem()
img_tool.add(img)
if tooltip is not None:
img.set_tooltip_text(tooltip)
if hasattr(toolbar, 'insert'): # the main toolbar
toolbar.insert(img_tool, -1)
else: # or a secondary toolbar
toolbar.props.page.insert(img_tool, -1)
img_tool.show()
return img
def spin_factory(default, min, max, callback, toolbar):
spin_adj = Gtk.Adjustment(default, min, max, 1, 32, 0)
spin = Gtk.SpinButton(spin_adj, 0, 0)
spin_id = spin.connect('value-changed', callback)
spin.set_numeric(True)
spin.show()
toolitem = Gtk.ToolItem()
toolitem.add(spin)
if hasattr(toolbar, 'insert'): # the main toolbar
toolbar.insert(toolitem, -1)
else:
toolbar.props.page.insert(toolitem, -1)
toolitem.show()
return spin
| gpl-3.0 | -9,041,999,313,022,829,000 | 33.240741 | 75 | 0.658194 | false | 3.601948 | false | false | false |
ionux/bitforge | tests/unit.py | 1 | 1117 | from bitforge.unit import Unit
class TestUnit:
def test_btc_accessors(self):
u = Unit(btc = 1.2)
assert u.btc == 1.2
assert u.mbtc == 1200
assert u.bits == 1200000
assert u.satoshis == 120000000
def test_btc_conversion(self):
u = Unit(btc = 1.3)
assert u.mbtc == 1300
assert u.bits == 1300000
assert u.satoshis == 130000000
u = Unit(mbtc = 1.3)
assert u.btc == 0.0013
assert u.bits == 1300
assert u.satoshis == 130000
u = Unit(bits = 1.3)
assert u.btc == 0.0000013
assert u.mbtc == 0.0013
assert u.satoshis == 130
u = Unit(satoshis = 3)
assert u.btc == 0.00000003
assert u.mbtc == 0.00003
assert u.bits == 0.03
# TODO: Review presition
# def test_unit_rates(self):
# u = Unit.from_fiat(1.3, 350)
# assert u.at_rate(350) == 1.3
# u = Unit(btc = 0.0123)
# assert u.at_rate(10) == 0.12
def test_repr(self):
u = Unit(btc = 1.3)
assert repr(u) == '<Unit: 130000000 satoshis>'
| mit | 5,227,069,257,949,842,000 | 24.386364 | 54 | 0.522829 | false | 3.002688 | false | false | false |
markbenvenuto/buildbaron | bfg_analyzer.py | 1 | 24487 | #!/usr/bin/env python3
"""
Script to analyze the Jira Build Baron Queue
"""
import argparse
import binascii
import datetime
import dateutil
import dateutil.relativedelta
import hashlib
import json
import os
import pprint
import re
import requests
import stat
import sys
if __name__ == "__main__" and __package__ is None:
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(os.path.realpath(__file__)))))
import buildbaron.analyzer.analyzer_config
import buildbaron.analyzer.evergreen
import buildbaron.analyzer.evg_log_file_analyzer
import buildbaron.analyzer.faultinfo
import buildbaron.analyzer.jira_client
import buildbaron.analyzer.log_file_analyzer
import buildbaron.analyzer.logkeeper
import buildbaron.analyzer.mongo_client
import buildbaron.analyzer.parallel_failure_analyzer
import buildbaron.analyzer.timeout_file_analyzer
# URL of the default Jira server.
# If you use .com, it breaks horribly
def ParseJiraTicket(issue, summary, description):
# Parse summary
if "System Failure:" in summary:
type = "system_failure"
elif "Timed Out:" in summary:
type = "timed_out"
elif "Failures" in summary:
type = "test_failure"
elif "Failure" in summary:
type = "test_failure"
elif "Failed" in summary:
type = "task_failure"
else:
raise ValueError("Unknown summary " + str(summary))
suite, build_variant, project, githash = ("unknown", "unknown", "unknown", "unknown")
summary_match = re.match(".*?: (.*) on (.*) \[(.*) @ ([a-zA-Z0-9]+)\]", summary)
if summary_match:
suite, build_variant, project, githash = summary_match.groups()
# Parse Body of description
lines = description.split("\n")
tests = []
for line in lines:
if line.startswith('h2.'):
url_match = re.search("\|(.*)\]", line)
task_url = url_match.group(1)
elif "[Logs|" in line:
log_line_match = re.match("\*(.*)\* - \[Logs\|(.*?)\]", line)
if log_line_match:
test_name = log_line_match.group(1)
log_file = log_line_match.group(2)
tests.append({'name': test_name, 'log_file': log_file})
else:
pass
return bfg_fault_description(issue,
summary,
type,
project,
githash,
task_url,
suite,
build_variant,
tests)
class bfg_fault_description:
"""Parse a fault description into type"""
def __init__(self,
issue,
summary,
type,
project,
githash,
task_url,
suite,
build_variant,
tests):
self.issue = issue
self.summary = summary
self.type = type
self.project = project
self.githash = githash
self.task_url = task_url
self.suite = suite
self.build_variant = build_variant
self.tests = tests
def to_json(self):
return json.dumps(self, cls=BFGCustomEncoder)
class BFGCustomEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, bfg_fault_description):
return {
"issue": obj.issue,
"summary": obj.summary,
"type": obj.type,
"task_url": obj.task_url,
"project": obj.project,
"githash": obj.githash,
"suite": obj.suite,
"build_variant": obj.build_variant,
"tests": obj.tests
}
# Let the base class default method raise the TypeError
return json.JSONEncoder.default(self, obj)
class BFGCustomDecoder(json.JSONDecoder):
def __init__(self, *args, **kwargs):
json.JSONDecoder.__init__(self, object_hook=self.object_hook, *args, **kwargs)
def object_hook(self, obj):
if 'task_url' not in obj and "project" not in obj:
return obj
return bfg_fault_description(obj['issue'], obj['summary'], obj['type'], obj['project'],
obj['task_url'], obj['suite'], obj['build_variant'],
obj['tests'])
class bfg_analyzer(object):
"""description of class"""
__STACK_FRAME_EXTRACTING_REGEX = re.compile(
"([a-zA-Z0-9\./]*)@((?:[a-zA-Z0-9_()]+/?)+\.js):(\d+)(?::\d+)?$")
def __init__(self, jira_client):
self.jira_client = jira_client
self.evg_client = buildbaron.analyzer.evergreen.client()
self.pp = pprint.PrettyPrinter()
def query(self, query_str):
results = self.jira_client.search_issues(query_str, maxResults=100)
print("Result Count %d" % len(results))
bfs = []
for result in results:
bfs.append(ParseJiraTicket(
result.key,
result.fields.summary,
result.fields.description
))
# Save to disk to help investigation of bad results
bfs_str = json.dumps(bfs, cls=BFGCustomEncoder, indent="\t")
with open("bfs.json", "wb") as sjh:
sjh.write(bfs_str.encode())
# Return a list of dictionaries instead of a list of bfg_fault_description
return json.loads(bfs_str)
def check_logs(self, bfs):
summaries = []
for bf in bfs:
summaries.append(self.process_bf(bf))
jira_issue = self.jira_client.get_bfg_issue(bf["issue"])
jira_issue.fields.labels.append("bot-analyzed")
jira_issue.add_field_value("labels", "bot-analyzed")
return summaries
# TODO: parallelize the check_logs function with this since we are network bound
# builds = thread_map( lambda item : process_bf(base_url, item), commits)
def thread_map(func, items):
# We can use a with statement to ensure threads are cleaned up promptly
with concurrent.futures.ThreadPoolExecutor(max_workers=cpu_count() * 2) as executor:
# Start the load operations and mark each future with its URL
future_to_item = {executor.submit(func, item): item for item in items}
results = []
for future in concurrent.futures.as_completed(future_to_item):
item = future_to_item[future]
try:
nf = future.result()
if nf:
results += nf
except Exception as exc:
print('%r generated an exception: %s' % (item, exc))
return results
def create_bf_cache(self, bf):
"""Create a directory to cache the log file in"""
if not os.path.exists("cache"):
os.mkdir("cache")
if not os.path.exists(os.path.join("cache", "bf")):
os.mkdir(os.path.join("cache", "bf"))
m = hashlib.sha1()
m.update(bf["task_url"].encode())
digest = m.digest()
digest64 = binascii.b2a_hex(digest).decode()
bf["hash"] = digest64
path = os.path.join("cache", "bf", digest64)
bf["bf_cache"] = path
if not os.path.exists(path):
os.mkdir(path)
def create_test_cache(self, bf, test):
"""Create a directory to cache the log file in"""
m = hashlib.sha1()
m.update(test["name"].encode())
digest = m.digest()
digest64 = binascii.b2a_hex(digest).decode()
test["hash"] = digest64
path = os.path.join(bf['bf_cache'], digest64)
test["cache"] = path
if not os.path.exists(path):
os.mkdir(path)
def process_bf(self, bf):
"""
Process a log through the log file analyzer
Saves analysis information in cache\XXX\summary.json
"""
self.create_bf_cache(bf)
print("BF: " + str(bf))
summary_json_file = os.path.join(bf["bf_cache"], "summary.json")
# If we've already analyzed this failure, don't do it again.
if os.path.exists(summary_json_file):
with open(summary_json_file, "rb") as summary_file:
return json.loads(summary_file.read().decode('utf-8'))
system_log_url = buildbaron.analyzer.evergreen.task_get_system_raw_log(bf['task_url'])
task_log_file_url = buildbaron.analyzer.evergreen.task_get_task_raw_log(bf["task_url"])
bf['system_log_url'] = system_log_url
bf['task_log_file_url'] = task_log_file_url
# Will be populated with objects like {"test": <test name>, "faults": [...]}
tests_fault_info = []
# Will be populated with fault objects.
extracted_faults = self.process_task_failure(bf)
if bf['type'] == 'test_failure':
# Go through each test
for test in bf['tests']:
tests_fault_info.append({
"test": test["name"],
"faults": self.process_test(bf, test)
})
elif bf['type'] == 'system_failure':
extracted_faults.extend(self.process_system_failure(bf))
elif bf['type'] == 'timed_out':
task_faults, test_faults = self.process_time_out(bf)
extracted_faults.extend(task_faults)
tests_fault_info.extend(test_faults)
try:
summary_obj = {
"bfg_info": bf,
"faults": [fault.to_json() for fault in extracted_faults],
"test_faults": [
{"test": info["test"], "faults": [fault.to_json() for fault in info["faults"]]}
for info in tests_fault_info
],
"backtraces": [],
}
except TypeError:
summary_obj = {
"bfg_info": bf,
"faults": [fault.to_json() for fault in extracted_faults],
"backtraces": [],
}
summary_str = json.dumps(summary_obj)
def flatten(a):
flattened = []
for elem in a:
if type(elem) == list:
flattened.extend(elem)
else:
flattened.append(elem)
return flattened
# Update jira tickets to include new information.
try:
all_faults = (extracted_faults
+ flatten([testinfo["faults"] for testinfo in tests_fault_info]))
except:
all_faults = extracted_faults
for fault in all_faults:
self.jira_client.add_fault_comment(bf["issue"], fault)
if fault.category == "js backtrace":
backtrace = self.build_backtrace(fault, bf["githash"])
self.jira_client.add_github_backtrace_context(bf["issue"], backtrace)
summary_obj["backtraces"].append(backtrace)
with open(summary_json_file, "wb") as sjh:
sjh.write(summary_str.encode())
return summary_obj
def build_backtrace(self, fault, githash):
"""
returns a list of strings representing a backtrace, as well as a parsed version represented
as a list of objects of the form
{
"github_url": "https://github.com/mongodb/mongo/blob/deadbeef/jstests/core/test.js#L42",
"first_line_number": 37,
"line_number": 42,
"frame_number": 0,
"file_path": "jstests/core/test.js",
"file_name": "test.js",
"lines": ["line 37", "line 38", ..., "line 47"]
}
"""
trace = []
# Also populate a plain-text style backtrace, with github links to frames.
n_lines_of_context = 5
stack_lines = fault.context.splitlines()
# Traverse the stack frames in reverse.
for i in range(len(stack_lines) - 1, -1, -1):
line = stack_lines[i].replace("\\", "/") # Normalize separators.
stack_match = bfg_analyzer.__STACK_FRAME_EXTRACTING_REGEX.search(line)
if stack_match is None:
if re.search("failed to load", line) is not None:
continue # skip that line, it's expected.
break # any other line should be the end of the backtrace
(func_name, file_path, line_number) = stack_match.groups()
gui_github_url = (
"https://github.com/mongodb/mongo/blob/{githash}/{file_path}#L{line_number}".format(
githash=githash,
file_path=file_path,
line_number=line_number))
line_number = int(line_number)
# add a {code} frame to the comment, showing the line involved in the stack trace, with
# some context of surrounding lines. Don't do this for the stack frames within
# src/mongo/shell, since they tend not to be as interesting.
if "src/mongo/shell" in file_path:
continue
raw_github_url = (
"https://raw.githubusercontent.com/mongodb/mongo/{githash}/{file_path}".format(
githash=githash,
file_path=file_path))
raw_code = requests.get(raw_github_url).text
start_line = max(0, line_number - n_lines_of_context)
end_line = line_number + n_lines_of_context
code_context = raw_code.splitlines()[start_line:end_line]
file_name = file_path[file_path.rfind("/") + 1:]
trace.append({
"github_url": gui_github_url,
"first_line_number": start_line,
"line_number": line_number,
"frame_number": i,
"file_path": file_path,
"file_name": file_name,
"lines": code_context
})
return trace
def process_system_failure(self, bf):
cache_dir = bf["bf_cache"]
log_file = os.path.join(cache_dir, "test.log")
bf['log_file_url'] = bf['task_log_file_url']
bf['name'] = 'task'
if not os.path.exists(log_file):
self.evg_client.retrieve_file(bf['task_log_file_url'], log_file)
with open(log_file, "rb") as lfh:
log_file_str = lfh.read().decode('utf-8')
analyzer = buildbaron.analyzer.evg_log_file_analyzer.EvgLogFileAnalyzer(log_file_str)
analyzer.analyze()
faults = analyzer.get_faults()
if len(faults) == 0:
print("===========================")
print("No system failure faults detected: " + self.pp.pformat(bf))
print("To Debug: python analyzer" + os.path.sep + "log_file_analyzer.py " + log_file)
print("===========================")
return faults
def process_task_failure(self, bf):
cache_dir = bf["bf_cache"]
log_file = os.path.join(cache_dir, "test.log")
bf['log_file_url'] = bf['task_log_file_url']
bf['name'] = 'task'
if not os.path.exists(log_file):
self.evg_client.retrieve_file(bf['task_log_file_url'], log_file)
with open(log_file, "rb") as lfh:
log_file_str = lfh.read().decode('utf-8')
extracted_faults = []
analyzer = buildbaron.analyzer.evg_log_file_analyzer.EvgLogFileAnalyzer(log_file_str)
analyzer.analyze()
extracted_faults.extend(analyzer.get_faults())
oom_analyzer = self.check_for_oom_killer(bf)
if oom_analyzer is not None:
extracted_faults.extend(oom_analyzer.get_faults())
return extracted_faults
def process_time_out(self, bf):
"""
Returns a list of faults at the task level, and also a list of faults at the test level,
which is populated with test faults if any are determined to have timed out.
"""
cache_dir = bf["bf_cache"]
log_file = os.path.join(cache_dir, "test.log")
bf['log_file_url'] = bf['task_log_file_url']
bf['name'] = 'task'
if not os.path.exists(log_file):
self.evg_client.retrieve_file(bf['task_log_file_url'], log_file)
with open(log_file, "rb") as lfh:
log_file_str = lfh.read().decode('utf-8')
task_faults = []
test_faults = []
print("Checking " + log_file)
analyzer = buildbaron.analyzer.timeout_file_analyzer.TimeOutAnalyzer(log_file_str)
analyzer.analyze()
task_faults.extend(analyzer.get_faults())
incomplete_tests = analyzer.get_incomplete_tests()
if len(incomplete_tests) == 0:
if len(task_faults) == 0:
print("===========================")
print("No faults found for task: " + self.pp.pformat(bf))
print("To Debug: python analyzer" + os.path.sep + "timeout_file_analyzer.py " +
log_file)
print("===========================")
for incomplete_test in incomplete_tests:
jira_issue = self.jira_client.get_bfg_issue(bf["issue"])
timeout_comment = (
"*" +
incomplete_test["name"] +
" timed out* - [Logs|" +
incomplete_test["log_file"] +
"]"
)
try:
if "bot-analyzed" not in jira_issue.fields.labels:
jira_issue.update(
description=jira_issue.fields.description +
"\n{0}\n".format(timeout_comment))
except buildbaron.analyzer.jira_client.JIRAError as e:
print("Error updating jira: " + str(e))
test_faults.extend(self.process_test(bf, incomplete_test))
return task_faults, test_faults
def process_test(self, bf, test):
self.create_test_cache(bf, test)
cache_dir = test["cache"]
log_file = os.path.join(cache_dir, "test.log")
# TODO(CWS) what is this?
nested_test = test
for key in bf.keys():
if key != 'tests' and key != 'name':
nested_test[key] = bf[key]
faults = []
# If logkeeper is down, we will not have a log file :-(
if test["log_file"] is not None and test["log_file"] != "" and "test/None" not in test[
"log_file"] and "log url not available" not in test["log_file"]:
if not os.path.exists(log_file):
buildbaron.analyzer.logkeeper.retieve_raw_log(test["log_file"], log_file)
test["log_file_url"] = buildbaron.analyzer.logkeeper.get_raw_log_url(
test["log_file"])
log_file_stat = os.stat(log_file)
if log_file_stat[stat.ST_SIZE] > 50 * 1024 * 1024:
print("Skipping Large File : " + str(log_file_stat[stat.ST_SIZE]))
return []
else:
test["log_file_url"] = "none"
with open(log_file, "wb") as lfh:
lfh.write("Logkeeper was down\n".encode())
log_file_stat = os.stat(log_file)
if log_file_stat[stat.ST_SIZE] > 50 * 1024 * 1024:
print("Skipping Large File : " + str(log_file_stat[stat.ST_SIZE]) + " at " + str(
log_file))
return []
with open(log_file, "rb") as lfh:
log_file_str = lfh.read().decode('utf-8')
print("Checking Log File")
LFS = buildbaron.analyzer.log_file_analyzer.LogFileSplitter(log_file_str)
analyzer = buildbaron.analyzer.log_file_analyzer.LogFileAnalyzer(LFS.get_streams())
analyzer.analyze()
faults.extend(analyzer.get_faults())
if test["name"].startswith("basic") and test["name"].endswith(".js"):
print("Anlyzing basic.js or basicPlus.js failure")
parallel_analyzer = \
buildbaron.analyzer.parallel_failure_analyzer.ParallelTestFailureAnalyzer(
log_file_str)
parallel_analyzer.analyze()
faults.extend(parallel_analyzer.get_faults())
if len(faults) == 0:
print("===========================")
print("No faults found for test: " + self.pp.pformat(bf))
print("To Debug: python analyzer" + os.path.sep + "log_file_analyzer.py " +
log_file)
print("===========================")
return faults
def check_for_oom_killer(self, bf):
cache_dir = bf["bf_cache"]
log_file = os.path.join(cache_dir, "test.log")
if not os.path.exists(log_file):
self.evg_client.retrieve_file(bf['system_log_url'], log_file)
with open(log_file, "rb") as lfh:
log_file_str = lfh.read().decode('utf-8')
analyzer = buildbaron.analyzer.evg_log_file_analyzer.EvgLogFileAnalyzer(log_file_str)
analyzer.analyze_oom()
if len(analyzer.get_faults()) > 0:
return analyzer
return None
def query_bfg_str(start, end):
# Dates should be formatted as 2017-01-25
return ('project = bfg'
' AND resolution is EMPTY'
' AND created > {createdStart}'
' AND created <= {createdEnd}'
' AND summary !~ "System Failure:"'
' ORDER BY created DESC'.format(
createdStart=start.strftime("%Y-%m-%d"),
createdEnd=end.strftime("%Y-%m-%d")))
def get_last_week_query():
today = datetime.date.today()
# The start of build baron - if today is Wednesday, returns prior Wednesday otherwise return
# prior x2 Wednesday
last_wednesday = today + dateutil.relativedelta.relativedelta(
weekday=dateutil.relativedelta.WE(-2))
# The end of build baron
last_tuesday = today + dateutil.relativedelta.relativedelta(
weekday=dateutil.relativedelta.WE(-1))
return query_bfg_str(last_wednesday, last_tuesday)
def get_this_week_query():
today = datetime.date.today()
# The start of build baron - last Wednesday (or today if today is Wednesday)
next_wednesday = today + dateutil.relativedelta.relativedelta(
weekday=dateutil.relativedelta.WE(-1))
# The end of build baron - this Wednesday
this_tuesday = today + dateutil.relativedelta.relativedelta(
weekday=dateutil.relativedelta.WE(2))
return query_bfg_str(next_wednesday, this_tuesday)
def main():
parser = argparse.ArgumentParser(description='Analyze test failure in jira.')
group = parser.add_argument_group("Jira options")
group.add_argument(
'--jira_server',
type=str,
help="Jira Server to query",
default=buildbaron.analyzer.analyzer_config.jira_server())
group.add_argument(
'--jira_user',
type=str,
help="Jira user name",
default=buildbaron.analyzer.analyzer_config.jira_user())
group = parser.add_mutually_exclusive_group()
group.add_argument(
'--last_week', action='store_true', help="Query of Last week's build baron queue")
group.add_argument(
'--this_week', action='store_true', help="Query of This week's build baron queue")
group.add_argument('--query_str', type=str, help="Any query against implicitly the BFG project")
args = parser.parse_args()
if args.query_str:
query_str = "(PROJECT = BFG) AND (%s)" % args.query_str
elif args.last_week:
query_str = get_last_week_query()
else:
query_str = get_this_week_query()
print("Query: %s" % query_str)
# Connect to mongod
print("Initializing local MongoDB server...")
buildbaron.analyzer.mongo_client.reinit_db()
# Connect to jira
jira_client = buildbaron.analyzer.jira_client.jira_client(args.jira_server, args.jira_user)
# Create our analyzer
bfa = bfg_analyzer(jira_client)
# Fetch desired BFG tickets
bfs = bfa.query(query_str)
# Analyze for failure
failed_bfs = bfa.check_logs(bfs)
print("Total BFs to investigate %d\n" % len(failed_bfs))
failed_bfs_root = {
'query': query_str,
'date': datetime.datetime.now().isoformat(' '),
'bfs': failed_bfs
}
with open("failed_bfs.json", "w", encoding="utf8") as sjh:
json.dump(failed_bfs_root, sjh, indent="\t")
buildbaron.analyzer.mongo_client.load_bfs(failed_bfs)
if __name__ == '__main__':
main()
| apache-2.0 | -1,692,208,233,766,159,000 | 34.283862 | 100 | 0.556663 | false | 3.79879 | true | false | false |
jjscarafia/CUPS-Cloud-Print | reportissues.py | 2 | 2412 | #! /bin/sh
"true" '''\'
if command -v python2 > /dev/null; then
exec python2 "$0" "$@"
else
exec python "$0" "$@"
fi
exit $?
'''
# CUPS Cloudprint - Print via Google Cloud Print
# Copyright (C) 2013 Simon Cadman
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
if __name__ == '__main__': # pragma: no cover
import sys
import os
import subprocess
libpath = "/usr/local/share/cloudprint-cups/"
if not os.path.exists(libpath):
libpath = "/usr/share/cloudprint-cups"
sys.path.insert(0, libpath)
from auth import Auth
from printermanager import PrinterManager
from ccputils import Utils
Utils.SetupLogging()
# line below is replaced on commit
CCPVersion = "20140814.2 000000"
Utils.ShowVersion(CCPVersion)
requestors, storage = Auth.SetupAuth(True)
printer_manager = PrinterManager(requestors)
printers = printer_manager.getPrinters()
if printers is None:
print "ERROR: No Printers Found"
sys.exit(1)
for printer in printers:
print printer.getCUPSDriverDescription()
print ""
print printer.getFields()
print printer['capabilities']
print "\n"
ppdname = printer.getPPDName()
p1 = subprocess.Popen(
(os.path.join(libpath, 'dynamicppd.py'), 'cat', ppdname.lstrip('-')),
stdout=subprocess.PIPE)
ppddata = p1.communicate()[0]
p = subprocess.Popen(['cupstestppd', '-'], stdout=subprocess.PIPE, stdin=subprocess.PIPE)
testdata = p.communicate(ppddata)[0]
result = p.returncode
print "Result of cupstestppd was " + str(result)
print "".join(testdata)
if result != 0:
print "cupstestppd errored: "
print ppddata
print "\n"
| gpl-3.0 | -9,007,118,623,980,421,000 | 32.5 | 97 | 0.648425 | false | 3.716487 | false | false | false |
gzamboni/sdnResilience | loxi/of14/queue_stats_prop.py | 1 | 3603 | # Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University
# Copyright (c) 2011, 2012 Open Networking Foundation
# Copyright (c) 2012, 2013 Big Switch Networks, Inc.
# See the file LICENSE.pyloxi which should have been included in the source distribution
# Automatically generated by LOXI from template module.py
# Do not modify
import struct
import loxi
import util
import loxi.generic_util
import sys
ofp = sys.modules['loxi.of14']
class queue_stats_prop(loxi.OFObject):
subtypes = {}
def __init__(self, type=None):
if type != None:
self.type = type
else:
self.type = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
subtype, = reader.peek('!H', 0)
subclass = queue_stats_prop.subtypes.get(subtype)
if subclass:
return subclass.unpack(reader)
obj = queue_stats_prop()
obj.type = reader.read("!H")[0]
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.type != other.type: return False
return True
def pretty_print(self, q):
q.text("queue_stats_prop {")
with q.group():
with q.indent(2):
q.breakable()
q.breakable()
q.text('}')
class experimenter(queue_stats_prop):
subtypes = {}
type = 65535
def __init__(self, experimenter=None, exp_type=None):
if experimenter != None:
self.experimenter = experimenter
else:
self.experimenter = 0
if exp_type != None:
self.exp_type = exp_type
else:
self.exp_type = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.exp_type))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
subtype, = reader.peek('!L', 4)
subclass = experimenter.subtypes.get(subtype)
if subclass:
return subclass.unpack(reader)
obj = experimenter()
_type = reader.read("!H")[0]
assert(_type == 65535)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.experimenter = reader.read("!L")[0]
obj.exp_type = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.experimenter != other.experimenter: return False
if self.exp_type != other.exp_type: return False
return True
def pretty_print(self, q):
q.text("experimenter {")
with q.group():
with q.indent(2):
q.breakable()
q.text("exp_type = ");
q.text("%#x" % self.exp_type)
q.breakable()
q.text('}')
queue_stats_prop.subtypes[65535] = experimenter
| gpl-2.0 | 4,146,537,638,850,166,000 | 27.824 | 88 | 0.566472 | false | 3.800633 | false | false | false |
virtualopensystems/nova | nova/virt/hyperv/driver.py | 1 | 9838 | # Copyright (c) 2010 Cloud.com, Inc
# Copyright (c) 2012 Cloudbase Solutions Srl
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
A Hyper-V Nova Compute driver.
"""
from nova.i18n import _
from nova.openstack.common import log as logging
from nova.virt import driver
from nova.virt.hyperv import hostops
from nova.virt.hyperv import livemigrationops
from nova.virt.hyperv import migrationops
from nova.virt.hyperv import rdpconsoleops
from nova.virt.hyperv import snapshotops
from nova.virt.hyperv import vmops
from nova.virt.hyperv import volumeops
LOG = logging.getLogger(__name__)
class HyperVDriver(driver.ComputeDriver):
def __init__(self, virtapi):
super(HyperVDriver, self).__init__(virtapi)
self._hostops = hostops.HostOps()
self._volumeops = volumeops.VolumeOps()
self._vmops = vmops.VMOps()
self._snapshotops = snapshotops.SnapshotOps()
self._livemigrationops = livemigrationops.LiveMigrationOps()
self._migrationops = migrationops.MigrationOps()
self._rdpconsoleops = rdpconsoleops.RDPConsoleOps()
def init_host(self, host):
pass
def list_instances(self):
return self._vmops.list_instances()
def spawn(self, context, instance, image_meta, injected_files,
admin_password, network_info=None, block_device_info=None):
self._vmops.spawn(context, instance, image_meta, injected_files,
admin_password, network_info, block_device_info)
def reboot(self, context, instance, network_info, reboot_type,
block_device_info=None, bad_volumes_callback=None):
self._vmops.reboot(instance, network_info, reboot_type)
def destroy(self, context, instance, network_info, block_device_info=None,
destroy_disks=True, migrate_data=None):
self._vmops.destroy(instance, network_info, block_device_info,
destroy_disks)
def cleanup(self, context, instance, network_info, block_device_info=None,
destroy_disks=True, migrate_data=None, destroy_vifs=True):
"""Cleanup after instance being destroyed by Hypervisor."""
pass
def get_info(self, instance):
return self._vmops.get_info(instance)
def attach_volume(self, context, connection_info, instance, mountpoint,
disk_bus=None, device_type=None, encryption=None):
return self._volumeops.attach_volume(connection_info,
instance['name'])
def detach_volume(self, connection_info, instance, mountpoint,
encryption=None):
return self._volumeops.detach_volume(connection_info,
instance['name'])
def get_volume_connector(self, instance):
return self._volumeops.get_volume_connector(instance)
def get_available_resource(self, nodename):
return self._hostops.get_available_resource()
def get_host_stats(self, refresh=False):
return self._hostops.get_host_stats(refresh)
def host_power_action(self, host, action):
return self._hostops.host_power_action(host, action)
def snapshot(self, context, instance, image_id, update_task_state):
self._snapshotops.snapshot(context, instance, image_id,
update_task_state)
def pause(self, instance):
self._vmops.pause(instance)
def unpause(self, instance):
self._vmops.unpause(instance)
def suspend(self, instance):
self._vmops.suspend(instance)
def resume(self, context, instance, network_info, block_device_info=None):
self._vmops.resume(instance)
def power_off(self, instance, timeout=0, retry_interval=0):
# TODO(PhilDay): Add support for timeout (clean shutdown)
self._vmops.power_off(instance)
def power_on(self, context, instance, network_info,
block_device_info=None):
self._vmops.power_on(instance)
def live_migration(self, context, instance, dest, post_method,
recover_method, block_migration=False,
migrate_data=None):
self._livemigrationops.live_migration(context, instance, dest,
post_method, recover_method,
block_migration, migrate_data)
def rollback_live_migration_at_destination(self, context, instance,
network_info,
block_device_info,
destroy_disks=True,
migrate_data=None):
self.destroy(context, instance, network_info, block_device_info)
def pre_live_migration(self, context, instance, block_device_info,
network_info, disk_info, migrate_data=None):
self._livemigrationops.pre_live_migration(context, instance,
block_device_info,
network_info)
def post_live_migration_at_destination(self, context, instance,
network_info,
block_migration=False,
block_device_info=None):
self._livemigrationops.post_live_migration_at_destination(
context,
instance,
network_info,
block_migration)
def check_can_live_migrate_destination(self, context, instance,
src_compute_info, dst_compute_info,
block_migration=False,
disk_over_commit=False):
return self._livemigrationops.check_can_live_migrate_destination(
context, instance, src_compute_info, dst_compute_info,
block_migration, disk_over_commit)
def check_can_live_migrate_destination_cleanup(self, context,
dest_check_data):
self._livemigrationops.check_can_live_migrate_destination_cleanup(
context, dest_check_data)
def check_can_live_migrate_source(self, context, instance,
dest_check_data):
return self._livemigrationops.check_can_live_migrate_source(
context, instance, dest_check_data)
def get_instance_disk_info(self, instance_name, block_device_info=None):
pass
def plug_vifs(self, instance, network_info):
"""Plug VIFs into networks."""
msg = _("VIF plugging is not supported by the Hyper-V driver.")
raise NotImplementedError(msg)
def unplug_vifs(self, instance, network_info):
"""Unplug VIFs from networks."""
msg = _("VIF unplugging is not supported by the Hyper-V driver.")
raise NotImplementedError(msg)
def ensure_filtering_rules_for_instance(self, instance, network_info):
LOG.debug("ensure_filtering_rules_for_instance called",
instance=instance)
def unfilter_instance(self, instance, network_info):
LOG.debug("unfilter_instance called", instance=instance)
def migrate_disk_and_power_off(self, context, instance, dest,
flavor, network_info,
block_device_info=None,
timeout=0, retry_interval=0):
# TODO(PhilDay): Add support for timeout (clean shutdown)
return self._migrationops.migrate_disk_and_power_off(context,
instance, dest,
flavor,
network_info,
block_device_info)
def confirm_migration(self, migration, instance, network_info):
self._migrationops.confirm_migration(migration, instance, network_info)
def finish_revert_migration(self, context, instance, network_info,
block_device_info=None, power_on=True):
self._migrationops.finish_revert_migration(context, instance,
network_info,
block_device_info, power_on)
def finish_migration(self, context, migration, instance, disk_info,
network_info, image_meta, resize_instance,
block_device_info=None, power_on=True):
self._migrationops.finish_migration(context, migration, instance,
disk_info, network_info,
image_meta, resize_instance,
block_device_info, power_on)
def get_host_ip_addr(self):
return self._hostops.get_host_ip_addr()
def get_host_uptime(self, host):
return self._hostops.get_host_uptime()
def get_rdp_console(self, context, instance):
return self._rdpconsoleops.get_rdp_console(instance)
| apache-2.0 | 4,809,147,517,281,367,000 | 43.116592 | 79 | 0.582639 | false | 4.523218 | false | false | false |
MediaMath/qasino | lib/zmq_requestor.py | 1 | 2310 | # Copyright (C) 2014 MediaMath, Inc. <http://www.mediamath.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from txzmq import ZmqFactory, ZmqEndpoint, ZmqEndpointType, ZmqREQConnection
import logging
import json
from util import Identity
class ZmqRequestor(ZmqREQConnection):
def __init__(self, remote_host, port, zmq_factory, data_manager=None):
self.data_manager = data_manager
self.remote_host = remote_host
endpoint = ZmqEndpoint(ZmqEndpointType.connect, "tcp://%s:%d" % (remote_host, port))
ZmqREQConnection.__init__(self, zmq_factory, endpoint)
def request_metadata(self):
msg = { "op" : "get_table_list", "identity" : Identity.get_identity() }
#logging.info("ZmqRequestor: Requesting table list from %s.", self.remote_host)
deferred = self.sendMsg(json.dumps(msg))
deferred.callback = self.message_received
def send_table(self, table):
deferred = self.sendMsg(table.get_json(op="add_table_data", identity=Identity.get_identity()))
deferred.callback = self.message_received
def message_received(self, msg):
response_meta = json.loads(msg[0])
if response_meta == None or response_meta["response_op"] == None:
logging.error("ZmqRequestor: bad message response received")
elif response_meta["response_op"] == "tables_list":
logging.info("ZmqRequestor: Table list response: %s", json.loads(msg[1]))
elif response_meta["response_op"] == "ok":
logging.info("ZmqRequestor: request OK")
elif response_meta["response_op"] == "error":
logging.info("ZmqRequestor: request ERROR: " + response_meta["error_message"])
else:
logging.error("ZmqRequestor: unknown response: ", response_meta)
| apache-2.0 | 5,312,493,586,066,515,000 | 38.827586 | 102 | 0.679654 | false | 3.799342 | false | false | false |
scionrep/scioncc | src/pyon/util/containers.py | 1 | 14330 | """ General purpose util classes and functions """
__author__ = 'Adam R. Smith, Michael Meisinger'
import collections
import datetime
import importlib
import string
import time
import simplejson
import base64
import uuid
import os
import re
from types import NoneType
from copy import deepcopy
DICT_LOCKING_ATTR = "__locked__"
class DotNotationGetItem(object):
""" Drive the behavior for DotList and DotDict lookups by dot notation, JSON-style. """
def _convert(self, val):
""" Convert the type if necessary and return if a conversion happened. """
if isinstance(val, dict) and not isinstance(val, DotDict):
return DotDict(val), True
elif isinstance(val, list) and not isinstance(val, DotList):
return DotList(val), True
return val, False
def __getitem__(self, key):
val = super(DotNotationGetItem, self).__getitem__(key)
val, converted = self._convert(val)
if converted: self[key] = val
return val
def __contains__(self, item):
return hasattr(self, item)
class DotList(DotNotationGetItem, list):
""" Partner class for DotDict; see that for docs. Both are needed to fully support JSON/YAML blocks. """
#def DotListIterator(list.)
def __iter__(self):
""" Monkey-patch the "next" iterator method to return modified versions. This will be slow. """
#it = super(DotList, self).__iter__()
#it_next = getattr(it, 'next')
#setattr(it, 'next', lambda: it_next(it))
#return it
for val in super(DotList, self).__iter__():
val, converted = self._convert(val)
yield val
class DotDict(DotNotationGetItem, dict):
"""
Subclass of dict that will recursively look up attributes with dot notation.
This is primarily for working with JSON-style data in a cleaner way like javascript.
Note that this will instantiate a number of child DotDicts when you first access attributes;
do not use in performance-critical parts of your code.
"""
def __dir__(self):
return [k for k in self.__dict__.keys() + self.keys() if k != DICT_LOCKING_ATTR]
def __getattr__(self, key):
""" Make attempts to lookup by nonexistent attributes also attempt key lookups. """
if self.has_key(key):
return self[key]
if not self.__dict__.has_key(DICT_LOCKING_ATTR):
import sys
import dis
frame = sys._getframe(1)
if '\x00%c' % dis.opmap['STORE_ATTR'] in frame.f_code.co_code:
self[key] = DotDict()
return self[key]
raise AttributeError(key)
def __setattr__(self, key, value):
if key in dir(dict):
raise AttributeError('%s conflicts with builtin.' % key)
if self.__dict__.has_key(DICT_LOCKING_ATTR):
raise AttributeError('Setting %s on a locked DotDict' % key)
if isinstance(value, dict):
self[key] = DotDict(value)
else:
self[key] = value
def copy(self):
return deepcopy(self)
def get_safe(self, qual_key, default=None):
"""
@brief Returns value of qualified key, such as "system.name" or None if not exists.
If default is given, returns the default. No exception thrown.
"""
value = get_safe(self, qual_key)
if value is None:
value = default
return value
def lock(self):
self.__dict__[DICT_LOCKING_ATTR] = True
def clear(self):
if self.__dict__.has_key(DICT_LOCKING_ATTR):
del self.__dict__[DICT_LOCKING_ATTR]
super(DotDict, self).clear()
def pop(self, *args, **kwargs):
if self.__dict__.has_key(DICT_LOCKING_ATTR):
raise AttributeError('Cannot pop on a locked DotDict')
return super(DotDict, self).pop(*args, **kwargs)
def popitem(self):
if self.__dict__.has_key(DICT_LOCKING_ATTR):
raise AttributeError('Cannot popitem on a locked DotDict')
return super(DotDict, self).popitem()
def as_dict(self):
return simple_deepcopy(self)
@classmethod
def fromkeys(cls, seq, value=None):
return DotDict(dict.fromkeys(seq, value))
class DictDiffer(object):
"""
Calculate the difference between two dictionaries as:
(1) items added
(2) items removed
(3) keys same in both but changed values
(4) keys same in both and unchanged values
"""
def __init__(self, current_dict, past_dict):
self.current_dict, self.past_dict = current_dict, past_dict
self.set_current, self.set_past = set(current_dict.keys()), set(past_dict.keys())
self.intersect = self.set_current.intersection(self.set_past)
def added(self):
return self.set_current - self.intersect
def removed(self):
return self.set_past - self.intersect
def changed(self):
return set(o for o in self.intersect if self.past_dict[o] != self.current_dict[o])
def unchanged(self):
return set(o for o in self.intersect if self.past_dict[o] == self.current_dict[o])
def simple_deepcopy(coll):
""" Performs a recursive deep copy on given collection, only using dict, list and set
collection types and not checking for cycles. """
if isinstance(coll, dict):
return {k: simple_deepcopy(v) for k, v in coll.iteritems()}
elif isinstance(coll, set):
return {simple_deepcopy(v) for v in coll}
elif hasattr(coll, "__iter__"):
return [simple_deepcopy(v) for v in coll]
else:
return coll
# dict_merge from: http://appdelegateinc.com/blog/2011/01/12/merge-deeply-nested-dicts-in-python/
def quacks_like_dict(object):
""" Check if object is dict-like """
return isinstance(object, collections.Mapping)
def dict_merge(base, upd, inplace=False):
""" Merge two deep dicts non-destructively.
Uses a stack to avoid maximum recursion depth exceptions.
@param base the dict to merge into
@param upd the content to merge
@param inplace change base if True, otherwise deepcopy base
@retval the merged dict (base if inplace else a merged deepcopy)
"""
assert quacks_like_dict(base), quacks_like_dict(upd)
dst = base if inplace else deepcopy(base)
stack = [(dst, upd)]
while stack:
current_dst, current_src = stack.pop()
for key in current_src:
if key not in current_dst:
current_dst[key] = current_src[key]
else:
if quacks_like_dict(current_src[key]) and quacks_like_dict(current_dst[key]) :
stack.append((current_dst[key], current_src[key]))
else:
current_dst[key] = current_src[key]
return dst
def get_safe(dict_instance, keypath, default=None):
"""
Returns a value with in a nested dict structure from a dot separated
path expression such as "system.server.host" or a list of key entries
@retval Value if found or None
"""
try:
obj = dict_instance
keylist = keypath if type(keypath) is list else keypath.split('.')
for key in keylist:
obj = obj[key]
return obj
except Exception as ex:
return default
def named_any(name):
"""
Retrieve a Python object by its fully qualified name from the global Python
module namespace. The first part of the name, that describes a module,
will be discovered and imported. Each subsequent part of the name is
treated as the name of an attribute of the object specified by all of the
name which came before it.
@param name: The name of the object to return.
@return: the Python object identified by 'name'.
"""
if not name:
raise Exception("Empty module name")
names = name.split('.')
module = None
mod_mames = names[:]
obj_names = []
while not module:
if mod_mames:
trialname = '.'.join(mod_mames)
try:
module = importlib.import_module(trialname)
except Exception as ex:
obj_names.append(mod_mames.pop())
else:
if len(names) == 1:
raise Exception("No module named %r" % (name,))
else:
raise Exception('%r does not name an object' % (name,))
obj = module
for n in reversed(obj_names):
obj = getattr(obj, n)
return obj
def for_name(modpath, classname):
"""
Returns a class of "classname" from module "modname".
"""
module = __import__(modpath, fromlist=[classname])
classobj = getattr(module, classname)
return classobj()
def current_time_millis():
return int(round(time.time() * 1000))
get_ion_ts_millis = current_time_millis
def get_ion_ts():
"""
Returns standard ION representation of a global timestamp.
It is defined as a str representing an integer number, the millis in UNIX epoch,
which started 1970-01-01 midnight UTC
"""
return str(current_time_millis())
def get_datetime(ts, local_time=True):
"""
Returns a naive datetime object in either local time or UTC time based on the given ION
timestamp
@param ts ION timestamp (str with millis in epoch)
@param local_time if True, returns local time (default), otherwise UTC
@retval datetime instance, naive
"""
tsf = float(ts) / 1000
return datetime.datetime.fromtimestamp(tsf) if local_time else datetime.datetime.utcfromtimestamp(tsf)
def get_datetime_str(ts, show_millis=False, local_time=True):
"""
Returns a string with date and time representation from an ION timestamp
@param ts ION timestamp (str with millis in epoch)
@param show_millis If True, appends the milli seconds
@param local_time if True, returns local time (default), otherwise UTC
@retval str with ION standard date and time representation
"""
dt = get_datetime(ts, local_time)
dts = str(dt)
period_idx = dts.rfind(".")
if period_idx != -1:
dts = dts[:period_idx+4] if show_millis else dts[:period_idx]
return dts
def parse_ion_ts(ts):
""" Returns a Python timestamp from an ION ts """
return float(ts) / 1000
def is_valid_ts(ts):
""" Check if given ts is string with only digits and length of 13 """
# We assume no timestamps before 2001-09
return isinstance(ts, basestring) and len(ts) == 13 and ts.isdigit() and ts[0] != "0"
def itersubclasses(cls, _seen=None):
"""
itersubclasses(cls)
http://code.activestate.com/recipes/576949-find-all-subclasses-of-a-given-class/
Generator over all subclasses of a given class, in depth first order.
"""
if not isinstance(cls, type):
raise TypeError('itersubclasses must be called with '
'new-style classes, not %.100r' % cls)
if _seen is None: _seen = set()
try:
subs = cls.__subclasses__()
except TypeError: # fails only when cls is type
subs = cls.__subclasses__(cls)
for sub in subs:
if sub not in _seen:
_seen.add(sub)
yield sub
for sub in itersubclasses(sub, _seen):
yield sub
def getleafsubclasses(cls):
"""
Returns all subclasses that have no further subclasses, for the given class
"""
scls = itersubclasses(cls)
return [s for s in scls if not s.__subclasses__()]
# _abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789
BASIC_VALID = "_%s%s" % (string.ascii_letters, string.digits)
# -_.()abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789
NORMAL_VALID = "-_.() %s%s" % (string.ascii_letters, string.digits)
def create_valid_identifier(name, valid_chars=BASIC_VALID, dot_sub=None, ws_sub=None):
if dot_sub:
name = name.replace('.', dot_sub)
if ws_sub:
name = name.replace(' ', ws_sub)
return str(''.join(c for c in name if c in valid_chars))
def create_basic_identifier(name):
return create_valid_identifier(name, dot_sub='_', ws_sub='_')
def is_basic_identifier(name):
return name == create_basic_identifier(name)
def is_valid_identifier(name, valid_chars=BASIC_VALID, dot_sub=None, ws_sub=None):
return name == create_valid_identifier(name, valid_chars=valid_chars, dot_sub=dot_sub, ws_sub=ws_sub)
#Used by json encoder
def ion_object_encoder(obj):
return obj.__dict__
def make_json(data):
result = simplejson.dumps(data, default=ion_object_encoder, indent=2)
return result
#Global utility functions for generating unique names and UUIDs
# get a UUID - URL safe, Base64
def get_a_Uuid():
r_uuid = base64.urlsafe_b64encode(uuid.uuid4().bytes)
return r_uuid.replace('=', '')
# generate a unique identifier based on a UUID and optional information
def create_unique_identifier(prefix=''):
return prefix + '_' + get_a_Uuid()
def get_default_sysname():
return 'ion_%s' % os.uname()[1].replace('.', '_')
def get_default_container_id():
return string.replace('%s_%d' % (os.uname()[1], os.getpid()), ".", "_")
BASIC_TYPE_SET = {str, bool, int, float, long, NoneType}
def recursive_encode(obj, encoding="utf8"):
"""Recursively walks a dict/list collection and in-place encodes any unicode keys and values in
dicts and lists to UTF-8 encoded str"""
if isinstance(obj, dict):
fix_list = None
for k, v in obj.iteritems():
if type(k) is unicode:
if fix_list is None:
fix_list = []
fix_list.append(k)
if type(v) in BASIC_TYPE_SET:
continue
if type(v) is unicode:
obj[k] = v.encode(encoding)
continue
recursive_encode(v, encoding=encoding)
if fix_list:
for k in fix_list:
v = obj.pop(k)
newk = k.encode(encoding)
obj[newk] = v
elif isinstance(obj, list):
for i, v in enumerate(obj):
if type(v) in BASIC_TYPE_SET:
continue
if type(v) is unicode:
obj[i] = v.encode(encoding)
continue
recursive_encode(v, encoding=encoding)
else:
raise RuntimeError("unknown type: %s" % type(obj))
return obj
| bsd-2-clause | 2,506,083,733,855,044,600 | 33.200477 | 108 | 0.625611 | false | 3.858374 | false | false | false |
uweschmitt/emzed | libms/WebserviceClients/Metlin.py | 1 | 3159 | import pdb
#encoding:latin-1
import requests
import urllib2
import userConfig
from collections import OrderedDict
from ..DataStructures.Table import Table
class MetlinMatcher(object):
ws_col_names = [ "formula", "mass", "name", "molid"]
ws_col_types = [ str, float, str, int]
ws_col_formats = [ "%s", "%.5f", "%s", "%d" ]
url = "http://metlin.scripps.edu/REST/search/index.php"
info_url = "http://metlin.scripps.edu/metabo_info.php?molid=%d"
batch_size = 90 # should be 500 as metlin promises, but this is false
# the REST webserive of METLIN returns a result set which does not explain
# which combination of theoretical mass and adduct results in a match,
# which is not what we want. eg one gets the same result set for
# masses=[195.0877, 194.07904], adducts=["M"] and for masses=[195.0877],
# adducts = ["M", "M+H"]
# so we start a separate query for mass and each adduct !
@staticmethod
def _query(masses, adduct, ppm):
token = userConfig.getMetlinToken()
if not token:
raise Exception("need metlin token in user config file")
params = OrderedDict()
params["token"] = token # "DqeN7qBNEAzVNm9n"
params["mass[]"] = masses
params["adduct[]"] = [adduct]
params["tolunits"] = "ppm"
params["tolerance"] = ppm
r = requests.get(MetlinMatcher.url, params=params)
if r.status_code != 200:
raise Exception("matlin query %s failed: %s" %
(urllib2.unquote(r.url), r.text))
try:
j = r.json()
except:
raise Exception("invalid answer from %s" % r.url)
ws_col_names = MetlinMatcher.ws_col_names
ws_col_types = MetlinMatcher.ws_col_types
ws_col_formats = MetlinMatcher.ws_col_formats
info_url = MetlinMatcher.info_url
tables = []
for m_z, ji in zip(masses, j):
rows = []
if isinstance(ji, dict):
ji = ji.values()
for jii in ji:
if jii:
rows.append([t(jii[n])\
for t, n in zip(ws_col_types, ws_col_names)])
if rows:
ti = Table(ws_col_names, ws_col_types, ws_col_formats, rows[:])
ti.addColumn("m_z", m_z, insertBefore=0)
ti.addColumn("adduct", adduct, insertBefore=1)
ti.addColumn("link", ti.molid.apply(lambda d: info_url % d))
tables.append(ti)
return tables
@staticmethod
def query(masses, adducts, ppm):
all_tables = []
for adduct in adducts:
for i0 in range(0, len(masses), MetlinMatcher.batch_size):
mass_slice = masses[i0:i0 + MetlinMatcher.batch_size]
tables = MetlinMatcher._query(mass_slice, adduct, ppm)
all_tables.extend(tables)
result_table = all_tables[0]
result_table.append(all_tables[1:])
return result_table
if 0:
t = MetlinMatcher.query(["282.222813", "292.229272"], 50, "-")
t.info()
t._print()
| gpl-3.0 | -3,001,720,266,134,262,300 | 32.967742 | 79 | 0.566318 | false | 3.467618 | false | false | false |
KatiRG/flyingpigeon | flyingpigeon/processes/wps_subset_continents.py | 1 | 5476 | import os
import tarfile
from flyingpigeon.subset import clipping
from flyingpigeon.subset import _CONTINENTS_
from pywps.Process import WPSProcess
from flyingpigeon.log import init_process_logger
import logging
logger = logging.getLogger(__name__)
class subset_continentsProcess(WPSProcess):
def __init__(self):
WPSProcess.__init__(
self,
identifier="subset_continents",
title="Subset continents",
version="0.9",
abstract="Returns only the selected polygon for each input dataset",
metadata=[
{"title": "LSCE", "href": "http://www.lsce.ipsl.fr/en/index.php"},
{"title": "Documentation", "href": "http://flyingpigeon.readthedocs.io/en/latest/"},
],
statusSupported=True,
storeSupported=True
)
self.resource = self.addComplexInput(
identifier="resource",
title="Resource",
abstract="NetCDF Files or archive (tar/zip) containing netCDF files",
minOccurs=1,
maxOccurs=1000,
maxmegabites=5000,
formats=[{"mimeType": "application/x-netcdf"},
{"mimeType": "application/x-tar"},
{"mimeType": "application/zip"}],
)
self.region = self.addLiteralInput(
identifier="region",
title="Region",
default='Africa',
type=type(''),
minOccurs=1,
maxOccurs=len(_CONTINENTS_),
allowedValues=_CONTINENTS_ # REGION_EUROPE #COUNTRIES #
)
# self.dimension_map = self.addLiteralInput(
# identifier="dimension_map",
# title="Dimension Map",
# abstract= 'if not ordered in lon/lat a dimension map has to be provided',
# type=type(''),
# minOccurs=0,
# maxOccurs=1
# )
self.mosaic = self.addLiteralInput(
identifier="mosaic",
title="Mosaic",
abstract="If Mosaic is checked, selected polygons will be merged to one Mosaic for each input file",
default=False,
type=type(False),
minOccurs=0,
maxOccurs=1,
)
# self.variable = self.addLiteralInput(
# identifier="variable",
# title="Variable",
# abstract="Variable to be expected in the input files (Variable will be detected if not set)",
# default=None,
# type=type(''),
# minOccurs=0,
# maxOccurs=1,
# )
self.output = self.addComplexOutput(
title="Subsets",
abstract="Tar archive containing the netCDF files",
formats=[{"mimeType": "application/x-tar"}],
asReference=True,
identifier="output",
)
self.output_netcdf = self.addComplexOutput(
title="Subsets for one dataset",
abstract="NetCDF file with subsets of one dataset.",
formats=[{"mimeType": "application/x-netcdf"}],
asReference=True,
identifier="ncout",
)
self.output_log = self.addComplexOutput(
identifier="output_log",
title="Logging information",
abstract="Collected logs during process run.",
formats=[{"mimeType": "text/plain"}],
asReference=True,
)
def execute(self):
from ast import literal_eval
from flyingpigeon.utils import archive, archiveextract
init_process_logger('log.txt')
self.output_log.setValue('log.txt')
ncs = archiveextract(self.getInputValues(identifier='resource'))
mosaic = self.mosaic.getValue()
regions = self.region.getValue()
# variable = self.variable.getValue()
# logger.info('regions: %s' % regions)
# dimension_map = self.dimension_map.getValue()
# if dimension_map != None:
# dimension_map = literal_eval(dimension_map)
logger.info('ncs = %s', ncs)
logger.info('regions = %s', regions)
logger.info('mosaic = %s', mosaic)
# logger.info('dimension_map = %s', dimension_map)
self.status.set('Arguments set for subset process', 10)
logger.debug('starting: regions=%s, num_files=%s' % (len(regions), len(ncs)))
try:
results = clipping(
resource=ncs,
polygons=regions, # self.region.getValue(),
mosaic=mosaic,
spatial_wrapping='wrap',
# variable=variable,
dir_output=os.path.abspath(os.curdir),
# dimension_map=dimension_map,
)
except Exception as e:
msg = 'clipping failed'
logger.exception(msg)
raise Exception(msg)
if not results:
raise Exception('no results produced.')
# prepare tar file
try:
tarf = archive(results)
logger.info('Tar file prepared')
except Exception as e:
msg = 'Tar file preparation failed'
logger.exception(msg)
raise Exception(msg)
self.output.setValue(tarf)
i = next((i for i, x in enumerate(results) if x), None)
self.output_netcdf.setValue(results[i])
self.status.set('done', 100)
| apache-2.0 | -3,787,420,159,431,396,400 | 33.658228 | 112 | 0.546567 | false | 4.23839 | false | false | false |
dthain/cctools | chirp/src/bindings/python3/chirp.binding.py | 1 | 22700 | ## @package ChirpPython
#
# Python Chirp bindings.
#
# The objects and methods provided by this package correspond to the native
# C API in @ref chirp_reli.h and chirp_swig_wrap.h
#
# The SWIG-based Python bindings provide a higher-level interface that
# revolves around:
#
# - @ref Chirp.Client
# - @ref Chirp.Stat
import os
import time
import json
##
# \class Chirp.Client
# Python Client object
#
# This class is used to create a chirp client
class Client(object):
##
# Create a new chirp client
#
# @param self Reference to the current task object.
# @param hostport The host:port of the server.
# @param timeout The time to wait for a server response on every request.
# @param authentication A list of prefered authentications. E.g., ['tickets', 'unix']
# @param tickets A list of ticket filenames.
# @param debug Generate client debug output.
def __init__(self, hostport, timeout=60, authentication=None, tickets=None, debug=False):
self.hostport = hostport
self.timeout = timeout
if debug:
cctools_debug_config('chirp_python_client')
cctools_debug_flags_set('chirp')
if tickets and (authentication is None):
authentication = ['ticket']
self.__set_tickets(tickets)
if authentication is None:
auth_register_all()
else:
for auth in authentication:
auth_register_byname(auth)
self.identity = self.whoami()
if self.identity == '':
raise AuthenticationFailure(authentication)
def __exit__(self, exception_type, exception_value, traceback):
chirp_reli_disconnect(self.hostport)
def __del__(self):
chirp_reli_disconnect(self.hostport)
def __stoptime(self, absolute_stop_time=None, timeout=None):
if timeout is None:
timeout = self.timeout
if absolute_stop_time is None:
absolute_stop_time = time.time() + timeout
return absolute_stop_time
def __set_tickets(self, tickets):
tickets_str = None
if tickets is None:
try:
tickets_str = os.environ['CHIRP_CLIENT_TICKETS']
except KeyError:
tickets_str = None
else:
tickets_str = ','.join(tickets)
if tickets_str is not None:
auth_ticket_load(tickets_str)
##
# Returns a string with identity of the client according to the server.
#
# @param self Reference to the current task object.
# @param absolute_stop_time If given, maximum number of seconds since
# epoch to wait for a server response.
# (Overrides any timeout.)
# @param timeout If given, maximum number of seconds to
# wait for a server response.
def whoami(self, absolute_stop_time=None, timeout=None):
return chirp_wrap_whoami(self.hostport, self.__stoptime(absolute_stop_time, timeout))
##
# Returns a string with the ACL of the given directory.
# Throws an IOError on error (no such directory).
#
# @param self Reference to the current task object.
# @param path Target directory.
# @param absolute_stop_time If given, maximum number of seconds since
# epoch to wait for a server response.
# (Overrides any timeout.)
# @param timeout If given, maximum number of seconds to
# wait for a server response.
def listacl(self, path='/', absolute_stop_time=None, timeout=None):
acls = chirp_wrap_listacl(self.hostport, path, self.__stoptime(absolute_stop_time, timeout))
if acls is None:
raise IOError(path)
return acls.split('\n')
##
# Returns a string with the ACL of the given directory.
# Throws a GeneralError on error.
#
# @param self Reference to the current task object.
# @param path Target directory.
# @param subject Target subject.
# @param rights Permissions to be granted.
# @param absolute_stop_time If given, maximum number of seconds since
# epoch to wait for a server response.
# (Overrides any timeout.)
# @param timeout If given, maximum number of seconds to
# wait for a server response.
def setacl(self, path, subject, rights, absolute_stop_time=None, timeout=None):
result = chirp_reli_setacl(self.hostport, path, subject, rights, self.__stoptime(absolute_stop_time, timeout))
if result < 0:
raise GeneralFailure('setacl', result, [path, subject, rights])
return result
##
# Set the ACL for the given directory to be only for the rights to the calling user.
# Throws a GeneralError on error.
#
# @param self Reference to the current task object.
# @param path Target directory.
# @param rights Permissions to be granted.
# @param absolute_stop_time If given, maximum number of seconds since
# epoch to wait for a server response.
# (Overrides any timeout.)
# @param timeout If given, maximum number of seconds to
# wait for a server response.
def resetacl(self, path, rights, absolute_stop_time=None, timeout=None):
result = chirp_wrap_resetacl(self.hostport, path, rights, self.__stoptime(absolute_stop_time, timeout))
if result < 0:
raise GeneralFailure('resetacl', result, [path, rights])
return result
##
# Returns a list with the names of the files in the path.
# Throws an IOError on error (no such directory).
#
# @param self Reference to the current task object.
# @param path Target file/directory.
# @param absolute_stop_time If given, maximum number of seconds since
# epoch to wait for a server response.
# (Overrides any timeout.)
# @param timeout If given, maximum number of seconds to
# wait for a server response.
def ls(self, path, absolute_stop_time=None, timeout=None):
dr = chirp_reli_opendir(self.hostport, path, self.__stoptime(absolute_stop_time, timeout))
files = []
if dir is None:
raise IOError(path)
while True:
d = chirp_reli_readdir(dr)
if d is None: break
files.append(Stat(d.name, d.info))
return files
##
# Returns a Chirp.Stat object with information on path.
# Throws an IOError on error (e.g., no such path or insufficient permissions).
#
# @param self Reference to the current task object.
# @param path Target file/directory.
# @param absolute_stop_time If given, maximum number of seconds since
# epoch to wait for a server response.
# (Overrides any timeout.)
# @param timeout If given, maximum number of seconds to
# wait for a server response.
def stat(self, path, absolute_stop_time=None, timeout=None):
info = chirp_wrap_stat(self.hostport, path, self.__stoptime(absolute_stop_time, timeout))
if info is None:
raise IOError(path)
return Stat(path, info)
##
# Changes permissions on path.
# Throws a GeneralFailure on error (e.g., no such path or insufficient permissions).
#
# @param self Reference to the current task object.
# @param path Target file/directory.
# @param mode Desired permissions (e.g., 0755)
# @param absolute_stop_time If given, maximum number of seconds since
# epoch to wait for a server response.
# (Overrides any timeout.)
# @param timeout If given, maximum number of seconds to
# wait for a server response.
def chmod(self, path, mode, absolute_stop_time=None, timeout=None):
result = chirp_reli_chmod(self.hostport, path, mode, self.__stoptime(absolute_stop_time, timeout))
if result < 0:
raise GeneralFailure('chmod', result, [path, mode])
return result
##
# Copies local file/directory source to the chirp server as file/directory destination.
# If destination is not given, source name is used.
# Raises Chirp.TransferFailure on error.
#
# @param self Reference to the current task object.
# @param source A local file or directory.
# @param destination File or directory name to use in the server (defaults to source).
# @param absolute_stop_time If given, maximum number of seconds since
# epoch to wait for a server response.
# (Overrides any timeout.)
# @param timeout If given, maximum number of seconds to
# wait for a server response.
def put(self, source, destination=None, absolute_stop_time=None, timeout=None):
if destination is None:
destination = source
result = chirp_recursive_put(self.hostport,
source, destination,
self.__stoptime(absolute_stop_time, timeout))
if result > -1:
return result
raise TransferFailure('put', result, source, destination)
##
# Copies server file/directory source to the local file/directory destination.
# If destination is not given, source name is used.
# Raises Chirp.TransferFailure on error.
#
# @param self Reference to the current task object.
# @param source A server file or directory.
# @param destination File or directory name to be used locally (defaults to source).
# @param absolute_stop_time If given, maximum number of seconds since
# epoch to wait for a server response.
# (Overrides any timeout.)
# @param timeout If given, maximum number of seconds to
# wait for a server response.
def get(self, source, destination=None, absolute_stop_time=None, timeout=None):
if destination is None:
destination = source
result = chirp_recursive_get(self.hostport,
source, destination,
self.__stoptime(absolute_stop_time, timeout))
if result > -1:
return result
raise TransferFailure('get', result, source, destination)
##
# Removes the given file or directory from the server.
# Raises OSError on error.
#
# @param self Reference to the current task object.
# @param path Target file/directory.
# @param absolute_stop_time If given, maximum number of seconds since
# epoch to wait for a server response.
# (Overrides any timeout.)
# @param timeout If given, maximum number of seconds to
# wait for a server response.
def rm(self, path, absolute_stop_time=None, timeout=None):
status = chirp_reli_rmall(self.hostport, path, self.__stoptime(absolute_stop_time, timeout))
if status < 0:
raise OSError
##
# Recursively create the directories in path.
# Raises OSError on error.
#
# @param self Reference to the current task object.
# @param path Target file/directory.
# @param mode Unix permissions for the created directory.
# @param absolute_stop_time If given, maximum number of seconds since
# epoch to wait for a server response.
# (Overrides any timeout.)
# @param timeout If given, maximum number of seconds to
# wait for a server response.
def mkdir(self, path, mode=493, absolute_stop_time=None, timeout=None):
result = chirp_reli_mkdir_recursive(self.hostport, path, mode, self.__stoptime(absolute_stop_time, timeout))
if result < 0:
raise OSError
return result
##
# Computes the checksum of path.
# Raises IOError on error.
#
# @param self Reference to the current task object.
# @param path Target file.
# @param algorithm One of 'md5' or 'sha1' (default).
# @param absolute_stop_time If given, maximum number of seconds since
# epoch to wait for a server response.
# (Overrides any timeout.)
# @param timeout If given, maximum number of seconds to
# wait for a server response.
def hash(self, path, algorithm='sha1', absolute_stop_time=None, timeout=None):
hash_hex = chirp_wrap_hash(self.hostport, path, algorithm, self.__stoptime(absolute_stop_time, timeout))
if hash_hex is None:
raise IOError
return hash_hex
##
# Creates a chirp job. See http://ccl.cse.nd.edu/software/manuals/chirp.html for details.
#
# @param job_description A dictionary with a job chirp description.
#
# @code
# job_description = {
# 'executable': "/bin/tar",
# 'arguments': [ 'tar', '-cf', 'archive.tar', 'a', 'b' ],
# 'files': { 'task_path': 'a',
# 'serv_path': '/users/magrat/a.txt'
# 'type': 'INPUT' },
# { 'task_path': 'b',
# 'serv_path': '/users/magrat/b.txt'
# 'type': 'INPUT' },
# { 'task_path': 'archive.tar',
# 'serv_path': '/users/magrat/archive.tar'
# 'type': 'OUTPUT' }
# }
# job_id = client.job_create(job_description);
# @endcode
def job_create(self, job_description):
job_json = json.dumps(job_description)
job_id = chirp_wrap_job_create(self.hostport, job_json, self.__stoptime())
if job_id < 0:
raise ChirpJobError('create', job_id, job_json)
return job_id
##
# Kills the jobs identified with the different job ids.
#
# @param job_ids Job ids of the chirp jobs to be killed.
#
def job_kill(self, *job_ids):
ids_str = json.dumps(job_ids)
result = chirp_wrap_job_kill(self.hostport, ids_str, self.__stoptime())
if result < 0:
raise ChirpJobError('kill', result, ids_str)
return result
##
# Commits (starts running) the jobs identified with the different job ids.
#
# @param job_ids Job ids of the chirp jobs to be committed.
#
def job_commit(self, *job_ids):
ids_str = json.dumps(job_ids)
result = chirp_wrap_job_commit(self.hostport, ids_str, self.__stoptime())
if result < 0:
raise ChirpJobError('commit', result, ids_str)
return result
##
# Reaps the jobs identified with the different job ids.
#
# @param job_ids Job ids of the chirp jobs to be reaped.
#
def job_reap(self, *job_ids):
ids_str = json.dumps(job_ids)
result = chirp_wrap_job_reap(self.hostport, ids_str, self.__stoptime())
if result < 0:
raise ChirpJobError('reap', result, ids_str)
return result
##
# Obtains the current status for each job id. The value returned is a
# list which contains a dictionary reference per job id.
#
# @param job_ids Job ids of the chirp jobs to be reaped.
#
def job_status(self, *job_ids):
ids_str = json.dumps(job_ids)
status = chirp_wrap_job_status(self.hostport, ids_str, self.__stoptime())
if status is None:
raise ChirpJobError('status', None, ids_str)
return json.loads(status)
##
# Waits waiting_time seconds for the job_id to terminate. Return value is
# the same as job_status. If the call timesout, an empty string is
# returned. If job_id is missing, `<job_wait>` waits for any of the user's job.
#
# @param waiting_time maximum number of seconds to wait for a job to finish.
# @param job_id id of the job to wait.
def job_wait(self, waiting_time, job_id=0):
status = chirp_wrap_job_wait(self.hostport, job_id, waiting_time, self.__stoptime())
if status is None:
raise ChirpJobError('status', None, job_id)
return json.loads(status)
##
# Python Stat object
#
# This class is used to record stat information for files/directories of a chirp server.
class Stat(object):
def __init__(self, path, cstat):
self._path = path
self._info = cstat
##
# Target path.
#
# @a Note: This is defined using property decorator. So it must be called without parentheses
# (). For example:
# @code
# >>> print s.path
# @endcode
@property
def path(self):
return self._path
##
# ID of device containing file.
#
# @a Note: This is defined using property decorator. So it must be called without parentheses
# (). For example:
# @code
# >>> print s.device
# @endcode
@property
def device(self):
return self._info.cst_dev
##
# inode number
#
# @a Note: This is defined using property decorator. So it must be called without parentheses
# (). For example:
# @code
# >>> print s.inode
# @endcode
@property
def inode(self):
return self._info.cst_ino
##
# file mode permissions
#
# @a Note: This is defined using property decorator. So it must be called without parentheses
# (). For example:
# @code
# >>> print s.mode
# @endcode
@property
def mode(self):
return self._info.cst_mode
##
# number of hard links
#
# @a Note: This is defined using property decorator. So it must be called without parentheses
# (). For example:
# @code
# >>> print s.nlink
# @endcode
@property
def nlink(self):
return self._info.cst_nlink
##
# user ID of owner
#
# @a Note: This is defined using property decorator. So it must be called without parentheses
# (). For example:
# @code
# >>> print s.uid
# @endcode
@property
def uid(self):
return self._info.cst_uid
##
# group ID of owner
#
# @a Note: This is defined using property decorator. So it must be called without parentheses
# (). For example:
# @code
# >>> print s.gid
# @endcode
@property
def gid(self):
return self._info.cst_gid
##
# device ID if special file
#
# @a Note: This is defined using property decorator. So it must be called without parentheses
# (). For example:
# @code
# >>> print s.rdev
# @endcode
@property
def rdev(self):
return self._info.cst_rdev
##
# total size, in bytes
#
# @a Note: This is defined using property decorator. So it must be called without parentheses
# (). For example:
# @code
# >>> print s.size
# @endcode
@property
def size(self):
return self._info.cst_size
##
# block size for file system I/O
#
# @a Note: This is defined using property decorator. So it must be called without parentheses
# (). For example:
# @code
# >>> print s.block_size
# @endcode
@property
def block_size(self):
return self._info.cst_blksize
##
# number of 512B blocks allocated
#
# @a Note: This is defined using property decorator. So it must be called without parentheses
# (). For example:
# @code
# >>> print s.blocks
# @endcode
@property
def blocks(self):
return self._info.cst_blocks
##
# number of seconds since epoch since last access
#
# @a Note: This is defined using property decorator. So it must be called without parentheses
# (). For example:
# @code
# >>> print s.atime
# @endcode
@property
def atime(self):
return self._info.cst_atime
##
# number of seconds since epoch since last modification
#
# @a Note: This is defined using property decorator. So it must be called without parentheses
# (). For example:
# @code
# >>> print s.mtime
# @endcode
@property
def mtime(self):
return self._info.cst_mtime
##
# number of seconds since epoch since last status change
#
# @a Note: This is defined using property decorator. So it must be called without parentheses
# (). For example:
# @code
# >>> print s.ctime
# @endcode
@property
def ctime(self):
return self._info.cst_ctime
def __repr__(self):
return "%s uid:%d gid:%d size:%d" % (self.path, self.uid, self.gid, self.size)
class AuthenticationFailure(Exception):
pass
class GeneralFailure(Exception):
def __init__(self, action, status, value):
message = "Error with %s(%s) %s" % (action, status, value)
super(GeneralFailure, self).__init__(message)
self.action = action
self.status = status
self.value = value
class TransferFailure(Exception):
def __init__(self, action, status, source, dest):
message = "Error with %s(%s) %s %s" % (action, status, source, dest)
super(TransferFailure, self).__init__(message)
self.action = action
self.status = status
self.source = source
self.dest = dest
class ChirpJobError(Exception):
def __init__(self, action, status, value):
message = "Error with %s(%s) %s" % (action, status, value)
super(ChirpJobError, self).__init__(message)
self.action = action
self.status = status
self.value = value
# @endcode
| gpl-2.0 | -854,934,521,074,259,500 | 33.869432 | 118 | 0.574758 | false | 4.145362 | false | false | false |
RudolfCardinal/crate | crate_anon/nlp_manager/cloud_request_sender.py | 1 | 12012 | #!/usr/bin/env python
"""
crate_anon/nlp_manager/cloud_request_sender.py
===============================================================================
Copyright (C) 2015-2021 Rudolf Cardinal ([email protected]).
This file is part of CRATE.
CRATE is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
CRATE is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with CRATE. If not, see <http://www.gnu.org/licenses/>.
===============================================================================
**CloudRequestSender class.**
"""
# =============================================================================
# Imports
# =============================================================================
from enum import auto, Enum
import logging
from typing import (
Any, Dict, List, Optional, Tuple, Generator, TYPE_CHECKING,
)
from crate_anon.nlp_manager.constants import (
DEFAULT_REPORT_EVERY_NLP,
)
from crate_anon.nlp_manager.input_field_config import (
InputFieldConfig,
FN_SRCDB,
FN_SRCTABLE,
FN_SRCPKFIELD,
FN_SRCPKVAL,
FN_SRCPKSTR,
FN_SRCFIELD,
)
from crate_anon.nlp_manager.models import FN_SRCHASH
from crate_anon.nlp_manager.cloud_request import (
CloudRequestProcess,
RecordNotPrintable,
RecordsPerRequestExceeded,
RequestTooLong,
)
from crate_anon.nlp_manager.cloud_run_info import CloudRunInfo
if TYPE_CHECKING:
from http.cookiejar import CookieJar
log = logging.getLogger(__name__)
# =============================================================================
# CloudRequestSender
# =============================================================================
class CloudRequestSender(object):
"""
Class to encapsulate a NLP request outbound to a cloud NLP server.
"""
class State(Enum):
"""
Request state.
"""
BUILDING_REQUEST = auto()
SENDING_REQUEST = auto()
FINISHED = auto()
def __init__(
self,
text_generator: Generator[Tuple[str, Dict[str, Any]], None, None],
crinfo: CloudRunInfo,
ifconfig: InputFieldConfig,
report_every: int = DEFAULT_REPORT_EVERY_NLP,
incremental: bool = False,
queue: bool = True) -> None:
"""
Args:
text_generator:
Generator that generates text strings from the source
database. See
:meth:`crate_anon.nlp_manager.input_field_config.InputFieldConfig.gen_text`.
crinfo:
A :class:`crate_anon.nlp_manager.cloud_run_info.CloudRunInfo`
object.
ifconfig:
An
:class:`crate_anon.nlp_manager.input_field_config.InputFieldConfig`
object.
report_every:
Report to the log every *n* requests.
incremental:
Process in incremental mode (ignoring source records that have
not changed since last time)?
queue:
Queue the requests for back-end processing (rather than waiting
for an immediate reply)?
"""
self._text_generator = text_generator
self._crinfo = crinfo
self._ifconfig = ifconfig
self._report_every = report_every
self._incremental = incremental
self._queue = queue
self._global_recnum = -1
self._requests = [] # type: List[CloudRequestProcess]
self._cookies = None # type: Optional[CookieJar]
self._request_count = 0 # number of requests sent
self._text = None # type: Optional[str]
self._other_values = None # type: Optional[Dict[str, Any]]
self._request_is_empty = True
self._need_new_record = True
self._need_new_request = True
self._num_recs_processed = 0
self._state = self.State.BUILDING_REQUEST
self._request = None # type: Optional[CloudRequestProcess]
def send_requests(
self,
global_recnum: int) -> Tuple[List[CloudRequestProcess], bool, int]:
"""
Sends off a series of cloud requests and returns them as a list.
``self._queue`` determines whether these are queued requests or not.
Also returns whether the generator for the text is empty.
Return tuple is: ``requests, some_records_processed, global_recnum``.
"""
self._global_recnum = global_recnum
self._requests = []
self._cookies = None
self._request_count = 1
self._text = None
self._other_values = None
self._request_is_empty = True
self._need_new_record = True
self._need_new_request = True
# Check processors are available
available_procs = self._crinfo.get_remote_processors()
if not available_procs:
return [], False, self._global_recnum
self._num_recs_processed = 0
self._state = self.State.BUILDING_REQUEST
# If we've reached the limit of records before commit, return to
# outer function in order to process and commit (or write to file if
# it's a queued request)
while self._state != self.State.FINISHED:
if self._state == self.State.BUILDING_REQUEST:
self._build_request()
if self._state == self.State.SENDING_REQUEST:
self._send_request()
return self._requests, self._num_recs_processed > 0, self._global_recnum # noqa
def _build_request(self) -> None:
"""
Adds another record to the outbound request, until the request is
fully built. Updates our state to reflect what needs to happen next.
"""
if self._need_new_record:
try:
self._get_next_record()
except StopIteration:
self._update_state_for_no_more_records()
return
hasher = self._crinfo.nlpdef.hash
srchash = hasher(self._text)
if self._incremental and self._record_already_processed(srchash):
return
self._num_recs_processed += 1
self._other_values[FN_SRCHASH] = srchash
if self._need_new_request:
self._request = self._get_new_cloud_request()
self._request_is_empty = True
self._need_new_request = False
self._need_new_record = True
# Add the text to the cloud request with the appropriate metadata
try:
self._request.add_text(
self._text, self._other_values
)
# added OK, request now has some text
self._request_is_empty = False
except RecordNotPrintable:
# Text contained no printable characters. Skip it.
pass
except (RecordsPerRequestExceeded, RequestTooLong) as e:
if isinstance(e, RequestTooLong) and self._request_is_empty:
# Get some new text next time
log.warning("Skipping text that's too long to send")
else:
# Try same text again with a fresh request
self._need_new_record = False
self._state = self.State.SENDING_REQUEST
if self._record_limit_reached():
self._state = self.State.SENDING_REQUEST
def _get_new_cloud_request(self) -> CloudRequestProcess:
"""
Creates and returns a new
:class:`crate_anon.nlp_manager.cloud_request.CloudRequestProcess`
object.
"""
return CloudRequestProcess(self._crinfo)
def _update_state_for_no_more_records(self) -> None:
"""
No more input records are available. This means either (a) we've sent
all our requests and have finished, or (b) we're building our last
request and we need to send it. Set the state accordingly.
"""
if self._request_is_empty or self._need_new_request:
# Nothing more to send
self._state = self.State.FINISHED
return
# Send last request
self._state = self.State.SENDING_REQUEST
def _record_already_processed(self, srchash: str) -> bool:
"""
Has this source record (identified by its PK and its hash) already been
processed? (If so, then in incremental mode, we can skip it.)
"""
pkval = self._other_values[FN_SRCPKVAL]
pkstr = self._other_values[FN_SRCPKSTR]
progrec = self._ifconfig.get_progress_record(pkval, pkstr)
if progrec is not None:
if progrec.srchash == srchash:
log.debug("Record previously processed; skipping")
return True
log.debug("Record has changed")
else:
log.debug("Record is new")
return False
def _record_limit_reached(self) -> bool:
"""
Have we processed as many records as we're allowed before we should
COMMIT to the database?
"""
limit_before_commit = self._crinfo.cloudcfg.limit_before_commit
return self._num_recs_processed >= limit_before_commit
def _get_next_record(self) -> None:
"""
Reads the next text record and metadata into ``self._text`` and
``self._other_values``.
Raises:
:exc:`StopIteration` if there are no more records
"""
self._text, self._other_values = next(self._text_generator)
self._global_recnum += 1
pkval = self._other_values[FN_SRCPKVAL]
pkstr = self._other_values[FN_SRCPKSTR]
# 'ifconfig.get_progress_record' expects pkstr to be None if it's
# empty
if not pkstr:
pkstr = None
if self._report_every and self._global_recnum % self._report_every == 0: # noqa
# total number of records in table
totalcount = self._ifconfig.get_count()
log.info(
"Processing {db}.{t}.{c}, PK: {pkf}={pkv} "
"(record {g_recnum}/{totalcount})".format(
db=self._other_values[FN_SRCDB],
t=self._other_values[FN_SRCTABLE],
c=self._other_values[FN_SRCFIELD],
pkf=self._other_values[FN_SRCPKFIELD],
pkv=pkstr if pkstr else pkval,
g_recnum=self._global_recnum,
totalcount=totalcount
)
)
def _send_request(self) -> None:
"""
Send a pending request to the remote NLP server.
Update the state afterwards.
"""
self._request.send_process_request(
queue=self._queue,
cookies=self._cookies,
include_text_in_reply=self._crinfo.cloudcfg.has_gate_processors
)
# If there's a connection error, we only get this far if we
# didn't choose to stop at failure
if self._request.request_failed:
log.warning("Continuing after failed request.")
else:
if self._request.cookies:
self._cookies = self._request.cookies
log.info(f"Sent request to be processed: #{self._request_count} "
f"of this block")
self._request_count += 1
self._requests.append(self._request)
if self._record_limit_reached():
self._state = self.State.FINISHED
return
self._state = self.State.BUILDING_REQUEST
self._need_new_request = True
| gpl-3.0 | -5,249,729,148,744,573,000 | 34.75 | 93 | 0.565851 | false | 4.297674 | true | false | false |
tchellomello/home-assistant | homeassistant/components/vesync/switch.py | 1 | 3309 | """Support for VeSync switches."""
import logging
from homeassistant.components.switch import SwitchEntity
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from .common import VeSyncDevice
from .const import DOMAIN, VS_DISCOVERY, VS_DISPATCHERS, VS_SWITCHES
_LOGGER = logging.getLogger(__name__)
DEV_TYPE_TO_HA = {
"wifi-switch-1.3": "outlet",
"ESW03-USA": "outlet",
"ESW01-EU": "outlet",
"ESW15-USA": "outlet",
"ESWL01": "switch",
"ESWL03": "switch",
"ESO15-TB": "outlet",
}
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up switches."""
async def async_discover(devices):
"""Add new devices to platform."""
_async_setup_entities(devices, async_add_entities)
disp = async_dispatcher_connect(
hass, VS_DISCOVERY.format(VS_SWITCHES), async_discover
)
hass.data[DOMAIN][VS_DISPATCHERS].append(disp)
_async_setup_entities(hass.data[DOMAIN][VS_SWITCHES], async_add_entities)
return True
@callback
def _async_setup_entities(devices, async_add_entities):
"""Check if device is online and add entity."""
dev_list = []
for dev in devices:
if DEV_TYPE_TO_HA.get(dev.device_type) == "outlet":
dev_list.append(VeSyncSwitchHA(dev))
elif DEV_TYPE_TO_HA.get(dev.device_type) == "switch":
dev_list.append(VeSyncLightSwitch(dev))
else:
_LOGGER.warning(
"%s - Unknown device type - %s", dev.device_name, dev.device_type
)
continue
async_add_entities(dev_list, update_before_add=True)
class VeSyncBaseSwitch(VeSyncDevice, SwitchEntity):
"""Base class for VeSync switch Device Representations."""
def turn_on(self, **kwargs):
"""Turn the device on."""
self.device.turn_on()
class VeSyncSwitchHA(VeSyncBaseSwitch, SwitchEntity):
"""Representation of a VeSync switch."""
def __init__(self, plug):
"""Initialize the VeSync switch device."""
super().__init__(plug)
self.smartplug = plug
@property
def device_state_attributes(self):
"""Return the state attributes of the device."""
attr = {}
if hasattr(self.smartplug, "weekly_energy_total"):
attr["voltage"] = self.smartplug.voltage
attr["weekly_energy_total"] = self.smartplug.weekly_energy_total
attr["monthly_energy_total"] = self.smartplug.monthly_energy_total
attr["yearly_energy_total"] = self.smartplug.yearly_energy_total
return attr
@property
def current_power_w(self):
"""Return the current power usage in W."""
return self.smartplug.power
@property
def today_energy_kwh(self):
"""Return the today total energy usage in kWh."""
return self.smartplug.energy_today
def update(self):
"""Update outlet details and energy usage."""
self.smartplug.update()
self.smartplug.update_energy()
class VeSyncLightSwitch(VeSyncBaseSwitch, SwitchEntity):
"""Handle representation of VeSync Light Switch."""
def __init__(self, switch):
"""Initialize Light Switch device class."""
super().__init__(switch)
self.switch = switch
| apache-2.0 | -3,332,572,542,167,740,000 | 29.925234 | 81 | 0.642188 | false | 3.768793 | false | false | false |
FreeOpcUa/python-opcua | opcua/common/instantiate.py | 1 | 5606 | """
Instantiate a new node and its child nodes from a node type.
"""
import logging
from opcua import Node
from opcua import ua
from opcua.common import ua_utils
from opcua.common.copy_node import _rdesc_from_node, _read_and_copy_attrs
logger = logging.getLogger(__name__)
def instantiate(parent, node_type, nodeid=None, bname=None, dname=None, idx=0, instantiate_optional=True):
"""
instantiate a node type under a parent node.
nodeid and browse name of new node can be specified, or just namespace index
If they exists children of the node type, such as components, variables and
properties are also instantiated
"""
rdesc = _rdesc_from_node(parent, node_type)
rdesc.TypeDefinition = node_type.nodeid
if nodeid is None:
nodeid = ua.NodeId(namespaceidx=idx) # will trigger automatic node generation in namespace idx
if bname is None:
bname = rdesc.BrowseName
elif isinstance(bname, str):
bname = ua.QualifiedName.from_string(bname)
nodeids = _instantiate_node(
parent.server,
Node(parent.server, rdesc.NodeId),
parent.nodeid,
rdesc,
nodeid,
bname,
dname=dname,
instantiate_optional=instantiate_optional)
return [Node(parent.server, nid) for nid in nodeids]
def _instantiate_node(server,
node_type,
parentid,
rdesc,
nodeid,
bname,
dname=None,
recursive=True,
instantiate_optional=True):
"""
instantiate a node type under parent
"""
addnode = ua.AddNodesItem()
addnode.RequestedNewNodeId = nodeid
addnode.BrowseName = bname
addnode.ParentNodeId = parentid
addnode.ReferenceTypeId = rdesc.ReferenceTypeId
addnode.TypeDefinition = rdesc.TypeDefinition
if rdesc.NodeClass in (ua.NodeClass.Object, ua.NodeClass.ObjectType):
addnode.NodeClass = ua.NodeClass.Object
_read_and_copy_attrs(node_type, ua.ObjectAttributes(), addnode)
elif rdesc.NodeClass in (ua.NodeClass.Variable, ua.NodeClass.VariableType):
addnode.NodeClass = ua.NodeClass.Variable
_read_and_copy_attrs(node_type, ua.VariableAttributes(), addnode)
elif rdesc.NodeClass in (ua.NodeClass.Method, ):
addnode.NodeClass = ua.NodeClass.Method
_read_and_copy_attrs(node_type, ua.MethodAttributes(), addnode)
elif rdesc.NodeClass in (ua.NodeClass.DataType, ):
addnode.NodeClass = ua.NodeClass.DataType
_read_and_copy_attrs(node_type, ua.DataTypeAttributes(), addnode)
else:
logger.error("Instantiate: Node class not supported: %s", rdesc.NodeClass)
raise RuntimeError("Instantiate: Node class not supported")
return
if dname is not None:
addnode.NodeAttributes.DisplayName = dname
res = server.add_nodes([addnode])[0]
res.StatusCode.check()
added_nodes = [res.AddedNodeId]
if recursive:
parents = ua_utils.get_node_supertypes(node_type, includeitself=True)
node = Node(server, res.AddedNodeId)
for parent in parents:
descs = parent.get_children_descriptions(includesubtypes=False)
for c_rdesc in descs:
# skip items that already exists, prefer the 'lowest' one in object hierarchy
if not ua_utils.is_child_present(node, c_rdesc.BrowseName):
c_node_type = Node(server, c_rdesc.NodeId)
refs = c_node_type.get_referenced_nodes(refs=ua.ObjectIds.HasModellingRule)
if not refs:
# spec says to ignore nodes without modelling rules
logger.info("Instantiate: Skip node without modelling rule %s as part of %s", c_rdesc.BrowseName, addnode.BrowseName)
continue
# exclude nodes with optional ModellingRule if requested
if not instantiate_optional and refs[0].nodeid in (ua.NodeId(ua.ObjectIds.ModellingRule_Optional), ua.NodeId(ua.ObjectIds.ModellingRule_OptionalPlaceholder)):
logger.info("Instantiate: Skip optional node %s as part of %s", c_rdesc.BrowseName, addnode.BrowseName)
continue
# if root node being instantiated has a String NodeId, create the children with a String NodeId
if res.AddedNodeId.NodeIdType is ua.NodeIdType.String:
inst_nodeid = res.AddedNodeId.Identifier + "." + c_rdesc.BrowseName.Name
nodeids = _instantiate_node(
server,
c_node_type,
res.AddedNodeId,
c_rdesc,
nodeid=ua.NodeId(identifier=inst_nodeid, namespaceidx=res.AddedNodeId.NamespaceIndex),
bname=c_rdesc.BrowseName,
instantiate_optional=instantiate_optional)
else:
nodeids = _instantiate_node(
server,
c_node_type,
res.AddedNodeId,
c_rdesc,
nodeid=ua.NodeId(namespaceidx=res.AddedNodeId.NamespaceIndex),
bname=c_rdesc.BrowseName,
instantiate_optional=instantiate_optional)
added_nodes.extend(nodeids)
return added_nodes
| lgpl-3.0 | -3,443,361,974,097,699,300 | 42.457364 | 178 | 0.600428 | false | 4.224567 | false | false | false |
anselmobd/fo2 | src/estoque/models.py | 1 | 3782 | import datetime
from pprint import pprint
from django.contrib.auth.models import User
from django.db import models
from produto.models import ProdutoItem
class EstoquePermissions(models.Model):
class Meta:
verbose_name = 'Permissões de estoque'
managed = False
permissions = (
("can_transferencia", "Pode fazer transferência entre depósitos"),
)
class TipoMovStq(models.Model):
codigo = models.CharField(
'Código',
max_length=100, unique=True, default="-")
descricao = models.CharField(
'Descrição',
max_length=100)
trans_saida = models.IntegerField(
'Transação de saída',
default=0)
trans_entrada = models.IntegerField(
'Transação de entrada',
default=0)
menu = models.BooleanField(
'Aparece no menu',
default=False)
ordem = models.IntegerField(
default=0)
renomeia = models.BooleanField(
'Renomeia',
default=False)
CHOICES = (
('1', '1 para 1'),
('M', 'Monta Kit'),
('D', 'Desmonta Kit'),
)
unidade = models.CharField(
max_length=1, choices = CHOICES, default='1')
def __str__(self):
return self.descricao
class Meta:
db_table = "fo2_est_tipo_mov"
verbose_name = "Tipo de movimento de estoque"
verbose_name_plural = "Tipos de movimentos de estoque"
_doc_mov_stq_start_range = 802000000
class DocMovStqManager(models.Manager):
def get_queryset(self):
return super(
DocMovStqManager,
self).get_queryset().annotate(
num_doc=models.F('id') + _doc_mov_stq_start_range).all()
class DocMovStq(models.Model):
descricao = models.CharField(
'Descrição',
max_length=100)
data = models.DateField()
usuario = models.ForeignKey(
User, models.PROTECT,
verbose_name='usuário')
objects = DocMovStqManager()
@property
def get_num_doc(self):
return self.id + _doc_mov_stq_start_range
def __str__(self):
return f'{self.num_doc} - {self.descricao}'
def save(self, *args, **kwargs):
if not self.id:
self.data = datetime.date.today()
super(DocMovStq, self).save(*args, **kwargs)
class Meta:
db_table = "fo2_est_doc_mov"
verbose_name = "Documento de movimento de estoque"
verbose_name_plural = "Documentos de movimentos de estoque"
class MovStq(models.Model):
tipo_mov = models.ForeignKey(
TipoMovStq, models.PROTECT,
verbose_name='Tipo de movimento')
item = models.ForeignKey(
ProdutoItem, models.PROTECT)
quantidade = models.IntegerField(
default=0)
deposito_origem = models.IntegerField(
'Depósito de origem')
deposito_destino = models.IntegerField(
'Depósito de destino')
novo_item = models.ForeignKey(
ProdutoItem, models.PROTECT, related_name='movstqdest', null=True)
documento = models.ForeignKey(
DocMovStq, models.PROTECT,
verbose_name='Documento de movimento de estoque')
usuario = models.ForeignKey(
User, models.PROTECT,
verbose_name='usuário')
obs = models.CharField(
'Observação', default='',
max_length=100)
hora = models.DateTimeField(
null=True, auto_now_add=True)
itens_extras = models.CharField(
default='', max_length=200)
def __str__(self):
return (f'{self.documento.get_num_doc}, {self.item} '
f'{self.deposito_origem}->{self.deposito_destino}')
class Meta:
db_table = "fo2_est_mov"
verbose_name = "Movimento de estoque"
verbose_name_plural = "Movimentos de estoque"
| mit | 4,410,419,019,604,873,000 | 27.293233 | 78 | 0.612809 | false | 3.341918 | false | false | false |
maas/maas | src/maasserver/models/space.py | 1 | 6013 | # Copyright 2015-2016 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
"""Space objects."""
import datetime
import re
from django.core.exceptions import PermissionDenied, ValidationError
from django.db.models import CharField, Manager, TextField
from django.db.models.query import QuerySet
from maasserver import DefaultMeta
from maasserver.models.cleansave import CleanSave
from maasserver.models.timestampedmodel import TimestampedModel
from maasserver.utils.orm import MAASQueriesMixin
def validate_space_name(value):
"""Django validator: `value` must be either `None`, or valid."""
if value is None:
return
namespec = re.compile(r"^[\w-]+$")
if not namespec.search(value):
raise ValidationError("Invalid space name: %s." % value)
# Name of the special, default space. This space cannot be deleted.
DEFAULT_SPACE_NAME = "space-0"
class SpaceQueriesMixin(MAASQueriesMixin):
def get_specifiers_q(self, specifiers, separator=":", **kwargs):
# Circular imports.
from maasserver.models import Subnet
# This dict is used by the constraints code to identify objects
# with particular properties. Please note that changing the keys here
# can impact backward compatibility, so use caution.
specifier_types = {
None: self._add_default_query,
"name": "__name",
"subnet": (Subnet.objects, "vlan__space"),
}
return super().get_specifiers_q(
specifiers,
specifier_types=specifier_types,
separator=separator,
**kwargs
)
class SpaceQuerySet(QuerySet, SpaceQueriesMixin):
"""Custom QuerySet which mixes in some additional queries specific to
this object. This needs to be a mixin because an identical method is needed
on both the Manager and all QuerySets which result from calling the
manager.
"""
class SpaceManager(Manager, SpaceQueriesMixin):
"""Manager for :class:`Space` model."""
def get_queryset(self):
queryset = SpaceQuerySet(self.model, using=self._db)
return queryset
def get_default_space(self):
"""Return the default space."""
now = datetime.datetime.now()
space, _ = self.get_or_create(
id=0,
defaults={"id": 0, "name": None, "created": now, "updated": now},
)
return space
def get_space_or_404(self, specifiers, user, perm):
"""Fetch a `Space` by its id. Raise exceptions if no `Space` with
this id exists or if the provided user has not the required permission
to access this `Space`.
:param specifiers: The space specifiers.
:type specifiers: string
:param user: The user that should be used in the permission check.
:type user: django.contrib.auth.models.User
:param perm: The permission to assert that the user has on the node.
:type perm: unicode
:raises: django.http.Http404_,
:class:`maasserver.exceptions.PermissionDenied`.
.. _django.http.Http404: https://
docs.djangoproject.com/en/dev/topics/http/views/
#the-http404-exception
"""
space = self.get_object_by_specifiers_or_raise(specifiers)
if user.has_perm(perm, space):
return space
else:
raise PermissionDenied()
class Space(CleanSave, TimestampedModel):
"""A `Space`.
:ivar name: The short-human-identifiable name for this space.
:ivar objects: An instance of the class :class:`SpaceManager`.
"""
# Name of the undefined space.
UNDEFINED = "undefined"
class Meta(DefaultMeta):
"""Needed for South to recognize this model."""
verbose_name = "Space"
verbose_name_plural = "Spaces"
objects = SpaceManager()
# We don't actually allow blank or null name, but that is enforced in
# clean() and save().
name = CharField(
max_length=256,
editable=True,
null=True,
blank=True,
unique=True,
validators=[validate_space_name],
)
description = TextField(null=False, blank=True)
def __str__(self):
return "name=%s" % self.get_name()
def is_default(self):
"""Is this the default space?"""
return self.id == 0
def get_name(self):
"""Return the name of the space."""
if self.name:
return self.name
else:
return "space-%s" % self.id
def clean_name(self):
reserved = re.compile(r"^space-\d+$")
if self.name is not None and self.name != "":
if self.name == Space.UNDEFINED:
raise ValidationError({"name": ["Reserved space name."]})
if reserved.search(self.name):
if self.id is None or self.name != "space-%d" % self.id:
raise ValidationError({"name": ["Reserved space name."]})
elif self.id is not None:
# Since we are not creating the space, force the (null or empty)
# name to be the default name.
self.name = "space-%d" % self.id
def save(self, *args, **kwargs):
# Name will get set by clean_name() if None or empty, and there is an
# id. We just need to handle names here for creation.
super().save(*args, **kwargs)
if self.name is None or self.name == "":
# If we got here, then we have a newly created space that needs a
# default name.
self.name = "space-%d" % self.id
self.save()
def clean(self, *args, **kwargs):
super().clean(*args, **kwargs)
self.clean_name()
@property
def subnet_set(self):
"""Backward compatibility shim to get the subnets on this space."""
# Circular imports.
from maasserver.models import Subnet
return Subnet.objects.filter(vlan__space=self)
| agpl-3.0 | -8,030,477,149,762,342,000 | 32.220994 | 79 | 0.620323 | false | 4.204895 | false | false | false |
vmware/pyvmomi-community-samples | samples/getvmsbycluster.py | 1 | 4534 | #!/usr/bin/env python
"""
Written by Chris Hupman
Github: https://github.com/chupman/
Example: Get guest info with folder and host placement
"""
import json
from tools import cli, service_instance
data = {}
def get_nics(guest):
nics = {}
for nic in guest.net:
if nic.network: # Only return adapter backed interfaces
if nic.ipConfig is not None and nic.ipConfig.ipAddress is not None:
nics[nic.macAddress] = {} # Use mac as uniq ID for nic
nics[nic.macAddress]['netlabel'] = nic.network
ipconf = nic.ipConfig.ipAddress
i = 0
nics[nic.macAddress]['ipv4'] = {}
for ip in ipconf:
if ":" not in ip.ipAddress: # Only grab ipv4 addresses
nics[nic.macAddress]['ipv4'][i] = ip.ipAddress
nics[nic.macAddress]['prefix'] = ip.prefixLength
nics[nic.macAddress]['connected'] = nic.connected
i = i+1
return nics
def vmsummary(summary, guest):
vmsum = {}
config = summary.config
net = get_nics(guest)
vmsum['mem'] = str(config.memorySizeMB / 1024)
vmsum['diskGB'] = str("%.2f" % (summary.storage.committed / 1024**3))
vmsum['cpu'] = str(config.numCpu)
vmsum['path'] = config.vmPathName
vmsum['ostype'] = config.guestFullName
vmsum['state'] = summary.runtime.powerState
vmsum['annotation'] = config.annotation if config.annotation else ''
vmsum['net'] = net
return vmsum
def vm2dict(datacenter, cluster, host, vm, summary):
# If nested folder path is required, split into a separate function
vmname = vm.summary.config.name
data[datacenter][cluster][host][vmname]['folder'] = vm.parent.name
data[datacenter][cluster][host][vmname]['mem'] = summary['mem']
data[datacenter][cluster][host][vmname]['diskGB'] = summary['diskGB']
data[datacenter][cluster][host][vmname]['cpu'] = summary['cpu']
data[datacenter][cluster][host][vmname]['path'] = summary['path']
data[datacenter][cluster][host][vmname]['net'] = summary['net']
data[datacenter][cluster][host][vmname]['ostype'] = summary['ostype']
data[datacenter][cluster][host][vmname]['state'] = summary['state']
data[datacenter][cluster][host][vmname]['annotation'] = summary['annotation']
def data2json(raw_data, args):
with open(args.jsonfile, 'w') as json_file:
json.dump(raw_data, json_file)
def main():
"""
Iterate through all datacenters and list VM info.
"""
parser = cli.Parser()
parser.add_custom_argument('--json', required=False, action='store_true',
help='Write out to json file')
parser.add_custom_argument('--jsonfile', required=False, action='store',
default='getvmsbycluster.json',
help='Filename and path of json file')
parser.add_custom_argument('--silent', required=False, action='store_true',
help='supress output to screen')
args = parser.get_args()
si = service_instance.connect(args)
outputjson = True if args.json else False
content = si.RetrieveContent()
children = content.rootFolder.childEntity
for child in children: # Iterate though DataCenters
datacenter = child
data[datacenter.name] = {} # Add data Centers to data dict
clusters = datacenter.hostFolder.childEntity
for cluster in clusters: # Iterate through the clusters in the DC
# Add Clusters to data dict
data[datacenter.name][cluster.name] = {}
hosts = cluster.host # Variable to make pep8 compliance
for host in hosts: # Iterate through Hosts in the Cluster
hostname = host.summary.config.name
# Add VMs to data dict by config name
data[datacenter.name][cluster.name][hostname] = {}
vms = host.vm
for vm in vms: # Iterate through each VM on the host
vmname = vm.summary.config.name
data[datacenter.name][cluster.name][hostname][vmname] = {}
summary = vmsummary(vm.summary, vm.guest)
vm2dict(datacenter.name, cluster.name, hostname, vm, summary)
if not args.silent:
print(json.dumps(data, sort_keys=True, indent=4))
if outputjson:
data2json(data, args)
# Start program
if __name__ == "__main__":
main()
| apache-2.0 | 2,445,535,360,479,337,000 | 38.426087 | 81 | 0.60322 | false | 3.970228 | true | false | false |
ASoftTech/Scons-Tools-Grbd | scons_tools_grbd/Tools/MSBuild/VC/Dll2Lib.py | 1 | 3979 | """
Dll2Lib
This tool will generate a .lib file under windows for a given .dll file
This uses dumpfile to export a list of symbols
dumpbin /exports C:\yourpath\yourlib.dll
The list of symbols is then written to a .def file
The lib command is then used to generate the .lib file from the .def file
lib /def:C:\mypath\mylib.def /OUT:C:\mypath\mylib.lib
A side affect of this is an .exp file which also requires cleanup
We can then use the .lib file for linking with the compiler under Windows
"""
import os, sys, os.path as path, subprocess
import SCons.Script
from SCons.Environment import Environment
from SCons.Script import Builder
from SCons.Tool.MSCommon import msvc_exists, msvc_setup_env_once
def exists(env):
return msvc_exists()
def generate(env):
"""Called when the tool is loaded into the environment at startup of script"""
assert(exists(env))
# Set-up ms tools paths
msvc_setup_env_once(env)
env.SetDefault(
# Location of the dumpbin executable
DUMPBIN = 'dumpbin',
)
# Register the builder
bld = Builder(action = __Dll2Lib_func, emitter = __Dll2Lib_emitter)
env.Append(BUILDERS = {'Dll2Lib' : bld})
def __Dll2Lib_emitter(target, source, env):
"""Add the generated .def and .exp files to the list of targerts for cleanup"""
addfiles = []
for item in target:
libfile = item.abspath
deffile = path.splitext(libfile)[0] + '.def'
expfile = path.splitext(libfile)[0] + '.exp'
addfiles.append(File(deffile))
addfiles.append(File(expfile))
target = target + addfiles
return target, source
def __Dll2Lib_func(target, source, env):
"""Actual builder that does the work after the Sconscript file is parsed"""
index = 0
for srcitem in source:
srcfile = str(srcitem)
filename = str(target[index])
libfile = path.splitext(filename)[0] + '.lib'
deffile = path.splitext(filename)[0] + '.def'
if path.splitext(srcfile)[1] != '.dll':
continue
dumpbin_exp = __dumpbin_run_exports(env, srcfile)
exportlist = __dumpbin_parse_exports(dumpbin_exp)
__write_deffile(deffile, exportlist)
__generate_lib(env, deffile, libfile)
index = index + 1
def __dumpbin_run_exports(env, dllfile):
"""Run dumpbin /exports against the input dll"""
cmdopts = [env['DUMPBIN'], '/exports', str(dllfile)]
print("Calling '%s'" % env['DUMPBIN'])
stdout, stderr = __runcmd_mbcs(env, cmdopts)
return stdout
def __dumpbin_parse_exports(input):
"""Parse thr output from dumpbin as a list of symbols"""
ret = []
lines = input.split('\n')
for line in lines:
arr1 = line.split()
if len(arr1) == 4 and arr1[1] != 'number' and arr1[1] != 'hint':
ret.append(arr1[3])
return ret
def __write_deffile(outfile, lines):
"""Write the list of symbols to a .def file"""
with open(outfile, 'w') as f:
f.write('EXPORTS\n')
for line in lines:
f.write(line + '\n')
def __generate_lib(env, deffile, libfile):
"""Generate the .lib file"""
cmdopts = [env['AR'], '/def:' + deffile, '/OUT:' + libfile]
stdout, stderr = __runcmd_mbcs(env, cmdopts)
return stdout
def __runcmd_mbcs(env, cmdopts):
"""Run command while capturing the output"""
popen = SCons.Action._subproc(env, cmdopts, stdin='devnull',
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = popen.stdout.read()
stderr = popen.stderr.read()
if not isinstance(stderr, str):
stderr = stderr.decode("mbcs")
if not isinstance(stdout, str):
stdout = stdout.decode("mbcs")
if stderr:
import sys
sys.stderr.write(stderr)
if popen.wait() != 0:
raise IOError(stderr)
return stdout, stderr
| mit | -6,572,437,492,332,772,000 | 30.614754 | 83 | 0.617994 | false | 3.63379 | false | false | false |
liliasapurina/python_training | test/test_contacts.py | 1 | 2349 | __author__ = '1'
import re
def test_contacts_on_home_page(app):
address_from_home_page = app.address.get_address_list()[0]
address_from_edit_page = app.address.get_address_info_from_edit_page(0)
assert address_from_home_page.all_phones_from_home_page == merge_phones_like_on_home_page(address_from_edit_page)
assert address_from_home_page.name == address_from_edit_page.name
assert address_from_home_page.lastname == address_from_edit_page.lastname
assert address_from_home_page.address == address_from_edit_page.address
assert address_from_home_page.all_emails_from_home_page == merge_emails_like_on_home_page(address_from_edit_page)
def test_contacts_on_address_view_page(app):
address_from_view_page = app.address.get_address_from_view_page(0)
address_from_edit_page = app.address.get_address_info_from_edit_page(0)
assert address_from_view_page.all_fields == merge_fields_like_on_view_page(address_from_edit_page)
def clear(s):
return re.sub("[() -]","",s)
def merge_phones_like_on_home_page(address):
return "\n".join(filter(lambda x: x != "",
map(lambda x: clear(x),
filter(lambda x: x is not None,
[address.phone, address.mobilephone, address.workphone, address.secondaryphone]))))
def merge_emails_like_on_home_page(address):
return "\n".join(filter(lambda x: x != "",
filter(lambda x: x is not None,
[address.email, address.email2, address.email3])))
def merge_fields_like_on_view_page(address):
return str(address.name)+" "+str(address.middlename)\
+" "+str(address.lastname)+"\n"+str(address.nickname)\
+"\n"+str(address.company)+"\n"+str(address.address)\
+"\nH: "+str(address.phone)\
+"\nM: "+str(address.mobilephone)+"\nW: "+str(address.workphone)\
+"\n"+"\n"\
+create_view_for_email(str(address.email))\
+create_view_for_email(str(address.email2))\
+create_view_for_email(str(address.email3))+'\n'\
+"\nP: "+str(address.secondaryphone)
def create_view_for_email(email):
if email != "":
dog_index = email.find("@")
return email+" (www."+email[dog_index+1:len(email)]+")"
else:
return "\n" | apache-2.0 | -2,452,055,065,751,226,400 | 46.959184 | 121 | 0.62069 | false | 3.271588 | false | false | false |
robozman/pymumblegui | pymumble/pymumble_py3/mumble.py | 1 | 24841 | # -*- coding: utf-8 -*-
import threading
import logging
import time
import select
import socket
import ssl
import struct
from .errors import *
from .constants import *
from . import users
from . import channels
from . import blobs
from . import commands
from . import callbacks
from . import tools
from . import soundoutput
from . import mumble_pb2
class Mumble(threading.Thread):
"""
Mumble client library main object.
basically a thread
"""
def __init__(self, host, user, port=64738, password='', certfile=None, keyfile=None, reconnect=False, tokens=[], debug=False):
"""
host=mumble server hostname or address
port=mumble server port
user=user to use for the connection
password=password for the connection
certfile=client certificate to authenticate the connection
keyfile=private key comming with client certificate
reconnect=if True, try to reconnect if disconnected
tokens=channel access tokens as a list of strings
debug=if True, send debugging messages (lot of...) to the stdout
"""
# TODO: use UDP audio
threading.Thread.__init__(self)
self.Log = logging.getLogger("PyMumble") # logging object for errors and debugging
if debug:
self.Log.setLevel(logging.DEBUG)
else:
self.Log.setLevel(logging.ERROR)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s-%(name)s-%(levelname)s-%(message)s')
ch.setFormatter(formatter)
self.Log.addHandler(ch)
self.parent_thread = threading.current_thread() # main thread of the calling application
self.mumble_thread = None # thread of the mumble client library
self.host = host
self.port = port
self.user = user
self.password = password
self.certfile = certfile
self.keyfile = keyfile
self.reconnect = reconnect
self.ping_stats = {"last_rcv": 0, "time_send": 0, "nb": 0, "avg": 40.0, "var": 0.0}
self.tokens = tokens
self.__opus_profile = PYMUMBLE_AUDIO_TYPE_OPUS_PROFILE
self.receive_sound = False # set to True to treat incoming audio, otherwise it is simply ignored
self.loop_rate = PYMUMBLE_LOOP_RATE
self.application = PYMUMBLE_VERSION_STRING
self.callbacks = callbacks.CallBacks() # callbacks management
self.ready_lock = threading.Lock() # released when the connection is fully established with the server
self.ready_lock.acquire()
def init_connection(self):
"""Initialize variables that are local to a connection, (needed if the client automatically reconnect)"""
self.ready_lock.acquire(False) # reacquire the ready-lock in case of reconnection
self.connected = PYMUMBLE_CONN_STATE_NOT_CONNECTED
self.control_socket = None
self.media_socket = None # Not implemented - for UDP media
self.bandwidth = PYMUMBLE_BANDWIDTH # reset the outgoing bandwidth to it's default before connectiong
self.server_max_bandwidth = None
self.udp_active = False
self.users = users.Users(self, self.callbacks) # contain the server's connected users informations
self.channels = channels.Channels(self, self.callbacks) # contain the server's channels informations
self.blobs = blobs.Blobs(self) # manage the blob objects
self.sound_output = soundoutput.SoundOutput(self, PYMUMBLE_AUDIO_PER_PACKET, self.bandwidth, opus_profile=self.__opus_profile) # manage the outgoing sounds
self.commands = commands.Commands() # manage commands sent between the main and the mumble threads
self.receive_buffer = bytes() # initialize the control connection input buffer
def run(self):
"""Connect to the server and start the loop in its thread. Retry if requested"""
self.mumble_thread = threading.current_thread()
# loop if auto-reconnect is requested
while True:
self.init_connection() # reset the connection-specific object members
if self.connect() >= PYMUMBLE_CONN_STATE_FAILED: # some error occured, exit here
self.ready_lock.release()
break
try:
self.loop()
except socket.error:
self.connected = PYMUMBLE_CONN_STATE_NOT_CONNECTED
if not self.reconnect or not self.parent_thread.is_alive():
break
time.sleep(PYMUMBLE_CONNECTION_RETRY_INTERVAL)
def connect(self):
"""Connect to the server"""
# Connect the SSL tunnel
self.Log.debug("connecting to %s on port %i.", self.host, self.port)
std_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
self.control_socket = ssl.wrap_socket(std_sock, certfile=self.certfile, keyfile=self.keyfile, ssl_version=ssl.PROTOCOL_TLS)
self.control_socket.connect((self.host, self.port))
self.control_socket.setblocking(0)
# Perform the Mumble authentication
version = mumble_pb2.Version()
version.version = (PYMUMBLE_PROTOCOL_VERSION[0] << 16) + (PYMUMBLE_PROTOCOL_VERSION[1] << 8) + PYMUMBLE_PROTOCOL_VERSION[2]
version.release = self.application
version.os = PYMUMBLE_OS_STRING
version.os_version = PYMUMBLE_OS_VERSION_STRING
self.Log.debug("sending: version: %s", version)
self.send_message(PYMUMBLE_MSG_TYPES_VERSION, version)
authenticate = mumble_pb2.Authenticate()
authenticate.username = self.user
authenticate.password = self.password
authenticate.tokens.extend(self.tokens)
authenticate.opus = True
self.Log.debug("sending: authenticate: %s", authenticate)
self.send_message(PYMUMBLE_MSG_TYPES_AUTHENTICATE, authenticate)
except socket.error:
self.connected = PYMUMBLE_CONN_STATE_FAILED
return self.connected
self.connected = PYMUMBLE_CONN_STATE_AUTHENTICATING
return self.connected
def loop(self):
"""
Main loop
waiting for a message from the server for maximum self.loop_rate time
take care of sending the ping
take care of sending the queued commands to the server
check on every iteration for outgoing sound
check for disconnection
"""
self.Log.debug("entering loop")
last_ping = time.time() # keep track of the last ping time
# loop as long as the connection and the parent thread are alive
while self.connected not in (PYMUMBLE_CONN_STATE_NOT_CONNECTED, PYMUMBLE_CONN_STATE_FAILED) and self.parent_thread.is_alive():
if last_ping + PYMUMBLE_PING_DELAY <= time.time(): # when it is time, send the ping
self.ping()
last_ping = time.time()
if self.connected == PYMUMBLE_CONN_STATE_CONNECTED:
while self.commands.is_cmd():
self.treat_command(self.commands.pop_cmd()) # send the commands coming from the application to the server
self.sound_output.send_audio() # send outgoing audio if available
(rlist, wlist, xlist) = select.select([self.control_socket], [], [self.control_socket], self.loop_rate) # wait for a socket activity
if self.control_socket in rlist: # something to be read on the control socket
self.read_control_messages()
elif self.control_socket in xlist: # socket was closed
self.control_socket.close()
self.connected = PYMUMBLE_CONN_STATE_NOT_CONNECTED
def ping(self):
"""Send the keepalive through available channels"""
ping = mumble_pb2.Ping()
ping.timestamp = int(time.time())
ping.tcp_ping_avg = self.ping_stats['avg']
ping.tcp_ping_var = self.ping_stats['var']
ping.tcp_packets = self.ping_stats['nb']
self.Log.debug("sending: ping: %s", ping)
self.send_message(PYMUMBLE_MSG_TYPES_PING, ping)
self.ping_stats['time_send'] = int(time.time() * 1000)
self.Log.debug(self.ping_stats['last_rcv'])
if self.ping_stats['last_rcv'] != 0 and int(time.time() * 1000) > self.ping_stats['last_rcv'] + (60 * 1000):
self.Log.debug("Ping too long ! Disconnected ?")
self.connected = PYMUMBLE_CONN_STATE_NOT_CONNECTED
def ping_response(self, mess):
self.ping_stats['last_rcv'] = int(time.time() * 1000)
ping = int(time.time() * 1000) - self.ping_stats['time_send']
old_avg = self.ping_stats['avg']
nb = self.ping_stats['nb']
new_avg = ((self.ping_stats['avg'] * nb) + ping) / (nb + 1)
try:
self.ping_stats['var'] = self.ping_stats['var'] + pow(old_avg - new_avg, 2) + (1 / nb) * pow(ping - new_avg, 2)
except ZeroDivisionError:
pass
self.ping_stats['avg'] = new_avg
self.ping_stats['nb'] += 1
def send_message(self, type, message):
"""Send a control message to the server"""
packet = struct.pack("!HL", type, message.ByteSize()) + message.SerializeToString()
while len(packet) > 0:
self.Log.debug("sending message")
sent = self.control_socket.send(packet)
if sent < 0:
raise socket.error("Server socket error")
packet = packet[sent:]
def read_control_messages(self):
"""Read control messages coming from the server"""
# from tools import tohex # for debugging
try:
buffer = self.control_socket.recv(PYMUMBLE_READ_BUFFER_SIZE)
self.receive_buffer += buffer
except socket.error:
pass
while len(self.receive_buffer) >= 6: # header is present (type + length)
self.Log.debug("read control connection")
header = self.receive_buffer[0:6]
if len(header) < 6:
break
(type, size) = struct.unpack("!HL", header) # decode header
if len(self.receive_buffer) < size+6: # if not length data, read further
break
# self.Log.debug("message received : " + tohex(self.receive_buffer[0:size+6])) # for debugging
message = self.receive_buffer[6:size+6] # get the control message
self.receive_buffer = self.receive_buffer[size+6:] # remove from the buffer the read part
self.dispatch_control_message(type, message)
def dispatch_control_message(self, type, message):
"""Dispatch control messages based on their type"""
self.Log.debug("dispatch control message")
if type == PYMUMBLE_MSG_TYPES_UDPTUNNEL: # audio encapsulated in control message
self.sound_received(message)
elif type == PYMUMBLE_MSG_TYPES_VERSION:
mess = mumble_pb2.Version()
mess.ParseFromString(message)
self.Log.debug("message: Version : %s", mess)
elif type == PYMUMBLE_MSG_TYPES_AUTHENTICATE:
mess = mumble_pb2.Authenticate()
mess.ParseFromString(message)
self.Log.debug("message: Authenticate : %s", mess)
elif type == PYMUMBLE_MSG_TYPES_PING:
mess = mumble_pb2.Ping()
mess.ParseFromString(message)
self.Log.debug("message: Ping : %s", mess)
self.ping_response(mess)
elif type == PYMUMBLE_MSG_TYPES_REJECT:
mess = mumble_pb2.Reject()
mess.ParseFromString(message)
self.Log.debug("message: reject : %s", mess)
self.connected = PYMUMBLE_CONN_STATE_FAILED
self.ready_lock.release()
raise ConnectionRejectedError(mess.reason)
elif type == PYMUMBLE_MSG_TYPES_SERVERSYNC: # this message finish the connection process
mess = mumble_pb2.ServerSync()
mess.ParseFromString(message)
self.Log.debug("message: serversync : %s", mess)
self.users.set_myself(mess.session)
self.server_max_bandwidth = mess.max_bandwidth
self.set_bandwidth(mess.max_bandwidth)
if self.connected == PYMUMBLE_CONN_STATE_AUTHENTICATING:
self.connected = PYMUMBLE_CONN_STATE_CONNECTED
self.callbacks(PYMUMBLE_CLBK_CONNECTED)
self.ready_lock.release() # release the ready-lock
elif type == PYMUMBLE_MSG_TYPES_CHANNELREMOVE:
mess = mumble_pb2.ChannelRemove()
mess.ParseFromString(message)
self.Log.debug("message: ChannelRemove : %s", mess)
self.channels.remove(mess.channel_id)
elif type == PYMUMBLE_MSG_TYPES_CHANNELSTATE:
mess = mumble_pb2.ChannelState()
mess.ParseFromString(message)
self.Log.debug("message: channelstate : %s", mess)
self.channels.update(mess)
elif type == PYMUMBLE_MSG_TYPES_USERREMOVE:
mess = mumble_pb2.UserRemove()
mess.ParseFromString(message)
self.Log.debug("message: UserRemove : %s", mess)
self.users.remove(mess)
elif type == PYMUMBLE_MSG_TYPES_USERSTATE:
mess = mumble_pb2.UserState()
mess.ParseFromString(message)
self.Log.debug("message: userstate : %s", mess)
self.users.update(mess)
elif type == PYMUMBLE_MSG_TYPES_BANLIST:
mess = mumble_pb2.BanList()
mess.ParseFromString(message)
self.Log.debug("message: BanList : %s", mess)
elif type == PYMUMBLE_MSG_TYPES_TEXTMESSAGE:
mess = mumble_pb2.TextMessage()
mess.ParseFromString(message)
self.Log.debug("message: TextMessage : %s", mess)
self.callbacks(PYMUMBLE_CLBK_TEXTMESSAGERECEIVED, mess)
elif type == PYMUMBLE_MSG_TYPES_PERMISSIONDENIED:
mess = mumble_pb2.PermissionDenied()
mess.ParseFromString(message)
self.Log.debug("message: PermissionDenied : %s", mess)
elif type == PYMUMBLE_MSG_TYPES_ACL:
mess = mumble_pb2.ACL()
mess.ParseFromString(message)
self.Log.debug("message: ACL : %s", mess)
elif type == PYMUMBLE_MSG_TYPES_QUERYUSERS:
mess = mumble_pb2.QueryUsers()
mess.ParseFromString(message)
self.Log.debug("message: QueryUsers : %s", mess)
elif type == PYMUMBLE_MSG_TYPES_CRYPTSETUP:
mess = mumble_pb2.CryptSetup()
mess.ParseFromString(message)
self.Log.debug("message: CryptSetup : %s", mess)
self.ping()
elif type == PYMUMBLE_MSG_TYPES_CONTEXTACTIONMODIFY:
mess = mumble_pb2.ContextActionModify()
mess.ParseFromString(message)
self.Log.debug("message: ContextActionModify : %s", mess)
self.callbacks(PYMUMBLE_CLBK_CONTEXTACTIONRECEIVED, mess)
elif type == PYMUMBLE_MSG_TYPES_CONTEXTACTION:
mess = mumble_pb2.ContextAction()
mess.ParseFromString(message)
self.Log.debug("message: ContextAction : %s", mess)
elif type == PYMUMBLE_MSG_TYPES_USERLIST:
mess = mumble_pb2.UserList()
mess.ParseFromString(message)
self.Log.debug("message: UserList : %s", mess)
elif type == PYMUMBLE_MSG_TYPES_VOICETARGET:
mess = mumble_pb2.VoiceTarget()
mess.ParseFromString(message)
self.Log.debug("message: VoiceTarget : %s", mess)
elif type == PYMUMBLE_MSG_TYPES_PERMISSIONQUERY:
mess = mumble_pb2.PermissionQuery()
mess.ParseFromString(message)
self.Log.debug("message: PermissionQuery : %s", mess)
elif type == PYMUMBLE_MSG_TYPES_CODECVERSION:
mess = mumble_pb2.CodecVersion()
mess.ParseFromString(message)
self.Log.debug("message: CodecVersion : %s", mess)
self.sound_output.set_default_codec(mess)
elif type == PYMUMBLE_MSG_TYPES_USERSTATS:
mess = mumble_pb2.UserStats()
mess.ParseFromString(message)
self.Log.debug("message: UserStats : %s", mess)
elif type == PYMUMBLE_MSG_TYPES_REQUESTBLOB:
mess = mumble_pb2.RequestBlob()
mess.ParseFromString(message)
self.Log.debug("message: RequestBlob : %s", mess)
elif type == PYMUMBLE_MSG_TYPES_SERVERCONFIG:
mess = mumble_pb2.ServerConfig()
mess.ParseFromString(message)
self.Log.debug("message: ServerConfig : %s", mess)
def set_bandwidth(self, bandwidth):
"""Set the total allowed outgoing bandwidth"""
if self.server_max_bandwidth is not None and bandwidth > self.server_max_bandwidth:
self.bandwidth = self.server_max_bandwidth
else:
self.bandwidth = bandwidth
self.sound_output.set_bandwidth(self.bandwidth) # communicate the update to the outgoing audio manager
def sound_received(self, message):
"""Manage a received sound message"""
# from tools import tohex # for debugging
pos = 0
# self.Log.debug("sound packet : " + tohex(message)) # for debugging
(header, ) = struct.unpack("!B", bytes([message[pos]])) # extract the header
type = (header & 0b11100000) >> 5
target = header & 0b00011111
pos += 1
if type == PYMUMBLE_AUDIO_TYPE_PING:
return
session = tools.VarInt() # decode session id
pos += session.decode(message[pos:pos+10])
sequence = tools.VarInt() # decode sequence number
pos += sequence.decode(message[pos:pos+10])
self.Log.debug("audio packet received from %i, sequence %i, type:%i, target:%i, lenght:%i", session.value, sequence.value, type, target, len(message))
terminator = False # set to true if it's the last 10 ms audio frame for the packet (used with CELT codec)
while (pos < len(message)) and not terminator: # get the audio frames one by one
if type == PYMUMBLE_AUDIO_TYPE_OPUS:
size = tools.VarInt() # OPUS use varint for the frame length
pos += size.decode(message[pos:pos+10])
size = size.value
if not (size & 0x2000): # terminator is 0x2000 in the resulting int.
terminator = True # should actually always be 0 as OPUS can use variable length audio frames
size &= 0x1fff # isolate the size from the terminator
else:
(header, ) = struct.unpack("!B", message[pos]) # CELT length and terminator is encoded in a 1 byte int
if not (header & 0b10000000):
terminator = True
size = header & 0b01111111
pos += 1
self.Log.debug("Audio frame : time:%f, last:%s, size:%i, type:%i, target:%i, pos:%i", time.time(), str(terminator), size, type, target, pos-1)
if size > 0 and self.receive_sound: # if audio must be treated
try:
newsound = self.users[session.value].sound.add(message[pos:pos+size],
sequence.value,
type,
target) # add the sound to the user's sound queue
self.callbacks(PYMUMBLE_CLBK_SOUNDRECEIVED, self.users[session.value], newsound)
sequence.value += int(round(newsound.duration / 1000 * 10)) # add 1 sequence per 10ms of audio
self.Log.debug("Audio frame : time:%f last:%s, size:%i, uncompressed:%i, type:%i, target:%i", time.time(), str(terminator), size, newsound.size, type, target)
except CodecNotSupportedError as msg:
print(msg)
except KeyError: # sound received after user removed
pass
# if len(message) - pos < size:
# raise InvalidFormatError("Invalid audio frame size")
pos += size # go further in the packet, after the audio frame
# TODO: get position info
def set_application_string(self, string):
"""Set the application name, that can be viewed by other clients on the server"""
self.application = string
def set_loop_rate(self, rate):
"""Set the current main loop rate (pause per iteration)"""
self.loop_rate = rate
def get_loop_rate(self):
"""Get the current main loop rate (pause per iteration)"""
return self.loop_rate
def set_codec_profile(self, profile):
"""set the audio profile"""
if profile in ["audio", "voip"]:
self.__opus_profile = profile
else:
raise ValueError("Unknown profile: " + str(profile))
def get_codec_profile(self):
"""return the audio profile string"""
return self.__opus_profile
def set_receive_sound(self, value):
"""Enable or disable the management of incoming sounds"""
if value:
self.receive_sound = True
else:
self.receive_sound = False
def is_ready(self):
"""Wait for the connection to be fully completed. To be used in the main thread"""
self.ready_lock.acquire()
self.ready_lock.release()
def execute_command(self, cmd, blocking=True):
"""Create a command to be sent to the server. To be used in the main thread"""
self.is_ready()
lock = self.commands.new_cmd(cmd)
if blocking and self.mumble_thread is not threading.current_thread():
lock.acquire()
lock.release()
return lock
# TODO: manage a timeout for blocking commands. Currently, no command actually waits for the server to execute
# The result of these commands should actually be checked against incoming server updates
def treat_command(self, cmd):
"""Send the awaiting commands to the server. Used in the pymumble thread."""
if cmd.cmd == PYMUMBLE_CMD_MOVE:
userstate = mumble_pb2.UserState()
userstate.session = cmd.parameters["session"]
userstate.channel_id = cmd.parameters["channel_id"]
self.Log.debug("Moving to channel")
self.send_message(PYMUMBLE_MSG_TYPES_USERSTATE, userstate)
cmd.response = True
self.commands.answer(cmd)
elif cmd.cmd == PYMUMBLE_CMD_TEXTMESSAGE:
textmessage = mumble_pb2.TextMessage()
textmessage.session.append(cmd.parameters["session"])
textmessage.channel_id.append(cmd.parameters["channel_id"])
textmessage.message = cmd.parameters["message"]
self.send_message(PYMUMBLE_MSG_TYPES_TEXTMESSAGE, textmessage)
cmd.response = True
self.commands.answer(cmd)
elif cmd.cmd == PYMUMBLE_CMD_TEXTPRIVATEMESSAGE:
textprivatemessage = mumble_pb2.TextMessage()
textprivatemessage.session.append(cmd.parameters["session"])
textprivatemessage.message = cmd.parameters["message"]
self.send_message(PYMUMBLE_MSG_TYPES_TEXTMESSAGE, textprivatemessage)
cmd.response = True
self.commands.answer(cmd)
elif cmd.cmd == PYMUMBLE_CMD_MODUSERSTATE:
userstate = mumble_pb2.UserState()
userstate.session = cmd.parameters["session"]
if "mute" in cmd.parameters:
userstate.mute = cmd.parameters["mute"]
if "self_mute" in cmd.parameters:
userstate.self_mute = cmd.parameters["self_mute"]
if "deaf" in cmd.parameters:
userstate.deaf = cmd.parameters["deaf"]
if "self_deaf" in cmd.parameters:
userstate.self_deaf = cmd.parameters["self_deaf"]
if "suppress" in cmd.parameters:
userstate.suppress = cmd.parameters["suppress"]
if "recording" in cmd.parameters:
userstate.recording = cmd.parameters["recording"]
if "comment" in cmd.parameters:
userstate.comment = cmd.parameters["comment"]
if "texture" in cmd.parameters:
userstate.texture = cmd.parameters["texture"]
self.send_message(PYMUMBLE_MSG_TYPES_USERSTATE, userstate)
cmd.response = True
self.commands.answer(cmd)
| gpl-3.0 | -3,118,328,671,455,202,300 | 41.318569 | 178 | 0.605088 | false | 4.005321 | false | false | false |
evgenybf/pyXLWriter | pyXLWriter/utilites.py | 1 | 4897 | # pyXLWriter: A library for generating Excel Spreadsheets
# Copyright (c) 2004 Evgeny Filatov <[email protected]>
# Copyright (c) 2002-2004 John McNamara (Perl Spreadsheet::WriteExcel)
#
# This library is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation; either version 2.1 of the License, or
# (at your option) any later version.
#
# This library is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser
# General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this library; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#----------------------------------------------------------------------------
# This module was written/ported from PERL Spreadsheet::WriteExcel module
# The author of the PERL Spreadsheet::WriteExcel module is John McNamara
# <[email protected]>
#----------------------------------------------------------------------------
# See the README.txt distributed with pyXLWriter for more details.
"""pyXLWriter.utilites
Utilities for work with reference to cells
"""
__revision__ = """$Id: utilites.py,v 1.23 2004/08/20 05:16:17 fufff Exp $"""
#
# TODO: Optimazation - I must use re.compale everywhere.
#
import re
__all__ = ["cell_to_rowcol", "cell_to_rowcol2", "rowcol_to_cell",
"cellrange_to_rowcol_pair"]
_re_cell_ex = re.compile(r"(\$?)([A-I]?[A-Z])(\$?)(\d+)")
_re_row_range = re.compile(r"\$?(\d+):\$?(\d+)")
_re_col_range = re.compile(r"\$?([A-I]?[A-Z]):\$?([A-I]?[A-Z])")
_re_cell_range = re.compile(r"\$?([A-I]?[A-Z]\$?\d+):\$?([A-I]?[A-Z]\$?\d+)")
_re_cell_ref = re.compile(r"\$?([A-I]?[A-Z]\$?\d+)")
def _col_by_name(colname):
"""
"""
col = 0
pow = 1
for i in xrange(len(colname)-1, -1, -1):
ch = colname[i]
col += (ord(ch) - ord('A') + 1) * pow
pow *= 26
return col - 1
def cell_to_rowcol(cell):
"""Convert an Excel cell reference string in A1 notation
to numeric row/col notation.
Returns: row, col, row_abs, col_abs
"""
m = _re_cell_ex.match(cell)
if not m:
raise Exception("Error in cell format")
col_abs, col, row_abs, row = m.groups()
row_abs = bool(row_abs)
col_abs = bool(col_abs)
row = int(row) - 1
col = _col_by_name(col)
return row, col, row_abs, col_abs
def cell_to_rowcol2(cell):
"""Convert an Excel cell reference string in A1 notation
to numeric row/col notation.
Returns: row, col
"""
m = _re_cell_ex.match(cell)
if not m:
raise Exception("Error in cell format")
col_abs, col, row_abs, row = m.groups()
# Convert base26 column string to number
# All your Base are belong to us.
row = int(row) - 1
col = _col_by_name(col)
return row, col
def rowcol_to_cell(row, col, row_abs=False, col_abs=False):
"""Convert numeric row/col notation to an Excel cell reference string in
A1 notation.
"""
d = col // 26
m = col % 26
chr1 = "" # Most significant character in AA1
if row_abs:
row_abs = '$'
else:
row_abs = ''
if col_abs:
col_abs = '$'
else:
col_abs = ''
if d > 0:
chr1 = chr(ord('A') + d - 1)
chr2 = chr(ord('A') + m)
# Zero index to 1-index
return col_abs + chr1 + chr2 + row_abs + str(row + 1)
def cellrange_to_rowcol_pair(cellrange):
"""Convert cell range string in A1 notation to numeric row/col
pair.
Returns: row1, col1, row2, col2
"""
cellrange = cellrange.upper()
# Convert a row range: '1:3'
res = _re_row_range.match(cellrange)
if res:
row1 = int(res.group(1)) - 1
col1 = 0
row2 = int(res.group(2)) - 1
col2 = -1
return row1, col1, row2, col2
# Convert a column range: 'A:A' or 'B:G'.
# A range such as A:A is equivalent to A1:A16384, so add rows as required
res = _re_col_range.match(cellrange)
if res:
col1 = _col_by_name(res.group(1))
row1 = 0
col2 = _col_by_name(res.group(2))
row2 = -1
return row1, col1, row2, col2
# Convert a cell range: 'A1:B7'
res = _re_cell_range.match(cellrange)
if res:
row1, col1 = cell_to_rowcol2(res.group(1))
row2, col2 = cell_to_rowcol2(res.group(2))
return row1, col1, row2, col2
# Convert a cell reference: 'A1' or 'AD2000'
res = _re_cell_ref.match(cellrange)
if res:
row1, col1 = cell_to_rowcol2(res.group(1))
return row1, col1, row1, col1
raise Exception("Unknown cell reference %s" % (cell))
| lgpl-2.1 | -7,616,251,451,061,417,000 | 30.391026 | 77 | 0.590157 | false | 3.09154 | false | false | false |
ngokevin/zamboni | settings_test.py | 1 | 2958 | import atexit
import os
import tempfile
from mkt.settings import ROOT
_tmpdirs = set()
def _cleanup():
try:
import sys
import shutil
except ImportError:
return
tmp = None
try:
for tmp in _tmpdirs:
shutil.rmtree(tmp)
except Exception, exc:
sys.stderr.write("\n** shutil.rmtree(%r): %s\n" % (tmp, exc))
atexit.register(_cleanup)
def _polite_tmpdir():
tmp = tempfile.mkdtemp()
_tmpdirs.add(tmp)
return tmp
# See settings.py for documentation:
IN_TEST_SUITE = True
NETAPP_STORAGE = _polite_tmpdir()
ADDONS_PATH = _polite_tmpdir()
GUARDED_ADDONS_PATH = _polite_tmpdir()
SIGNED_APPS_PATH = _polite_tmpdir()
SIGNED_APPS_REVIEWER_PATH = _polite_tmpdir()
UPLOADS_PATH = _polite_tmpdir()
TMP_PATH = _polite_tmpdir()
COLLECTIONS_ICON_PATH = _polite_tmpdir()
REVIEWER_ATTACHMENTS_PATH = _polite_tmpdir()
DUMPED_APPS_PATH = _polite_tmpdir()
AUTHENTICATION_BACKENDS = (
'django_browserid.auth.BrowserIDBackend',
)
# We won't actually send an email.
SEND_REAL_EMAIL = True
# Turn off search engine indexing.
USE_ELASTIC = False
# Ensure all validation code runs in tests:
VALIDATE_ADDONS = True
PAYPAL_PERMISSIONS_URL = ''
ENABLE_API_ERROR_SERVICE = False
SITE_URL = 'http://testserver'
BROWSERID_AUDIENCES = [SITE_URL]
STATIC_URL = SITE_URL + '/'
MEDIA_URL = '/media/'
CACHES = {
'default': {
'BACKEND': 'caching.backends.locmem.LocMemCache',
}
}
# COUNT() caching can't be invalidated, it just expires after x seconds. This
# is just too annoying for tests, so disable it.
CACHE_COUNT_TIMEOUT = -1
# Overrides whatever storage you might have put in local settings.
DEFAULT_FILE_STORAGE = 'amo.utils.LocalFileStorage'
VIDEO_LIBRARIES = ['lib.video.dummy']
ALLOW_SELF_REVIEWS = True
# Make sure debug toolbar output is disabled so it doesn't interfere with any
# html tests.
DEBUG_TOOLBAR_CONFIG = {
'INTERCEPT_REDIRECTS': False,
'SHOW_TOOLBAR_CALLBACK': lambda r: False,
'HIDE_DJANGO_SQL': True,
'TAG': 'div',
'ENABLE_STACKTRACES': False,
}
MOZMARKET_VENDOR_EXCLUDE = []
TASK_USER_ID = '4043307'
PASSWORD_HASHERS = (
'django.contrib.auth.hashers.MD5PasswordHasher',
)
SQL_RESET_SEQUENCES = False
GEOIP_URL = ''
GEOIP_DEFAULT_VAL = 'restofworld'
GEOIP_DEFAULT_TIMEOUT = .2
ES_DEFAULT_NUM_REPLICAS = 0
ES_DEFAULT_NUM_SHARDS = 3
IARC_MOCK = True
# Ensure that exceptions aren't re-raised.
DEBUG_PROPAGATE_EXCEPTIONS = False
PAYMENT_PROVIDERS = ['bango']
# When not testing this specific feature, make sure it's off.
PRE_GENERATE_APKS = False
# This is a precaution in case something isn't mocked right.
PRE_GENERATE_APK_URL = 'http://you-should-never-load-this.com/'
# A sample key for signing receipts.
WEBAPPS_RECEIPT_KEY = os.path.join(ROOT, 'mkt/webapps/tests/sample.key')
# A sample key for signing preverified-account assertions.
PREVERIFIED_ACCOUNT_KEY = os.path.join(ROOT, 'mkt/account/tests/sample.key')
| bsd-3-clause | 2,432,286,399,915,075,000 | 22.291339 | 77 | 0.701826 | false | 3.065285 | true | false | false |
yfpeng/pengyifan-leetcode | src/main/python/pyleetcode/next_greater_element.py | 1 | 1874 | """
You are given two arrays (without duplicates) nums1 and nums2 where nums1's elements are subset of nums2. Find all the
next greater numbers for nums1's elements in the corresponding places of nums2.
The Next Greater Number of a number x in nums1 is the first greater number to its right in nums2. If it does not exist,
output -1 for this number.
Example 1:
Input: nums1 = [4,1,2], nums2 = [1,3,4,2].
Output: [-1,3,-1]
Explanation:
For number 4 in the first array, you cannot find the next greater number for it in the second array, so output -1.
For number 1 in the first array, the next greater number for it in the second array is 3.
For number 2 in the first array, there is no next greater number for it in the second array, so output -1.
Example 2:
Input: nums1 = [2,4], nums2 = [1,2,3,4].
Output: [3,-1]
Explanation:
For number 2 in the first array, the next greater number for it in the second array is 3.
For number 4 in the first array, there is no next greater number for it in the second array, so output -1.
Note:
- All elements in nums1 and nums2 are unique.
- The length of both nums1 and nums2 would not exceed 1000.
"""
def next_greater_element(findNums, nums):
"""
:type findNums: List[int]
:type nums: List[int]
:rtype: List[int]
"""
output = []
for num1 in findNums:
index = nums.index(num1) + 1
o = -1
while index < len(nums):
if num1 < nums[index]:
o = nums[index]
break
index += 1
output.append(o)
return output
def test_next_greater_element():
assert next_greater_element([4, 1, 2], [1, 3, 4, 2]) == [-1, 3, -1]
assert next_greater_element([2, 4], [1, 2, 3, 4]) == [3, -1]
if __name__ == '__main__':
test_next_greater_element() | bsd-3-clause | -2,137,404,932,578,816,800 | 30.25 | 122 | 0.623799 | false | 3.476809 | false | false | false |
ProgrammingRobotsStudyGroup/robo_magellan | scripts/kill_switch.py | 1 | 3880 | #!/usr/bin/env python
#
# Copyright 2017 Robot Garden, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#------------------------------------------------------------------------------
# Monitors state of the kill switch by monitoring an UP GPIO input, which
# is connected to a contact closure from the kill switch module.
#
# Topics subscribed: None
# Topics published:
# kill_sw_enabled: std_msgs/Bool
#------------------------------------------------------------------------------
#
import RPi.GPIO as GPIO
import time
import rospy
from std_msgs.msg import Bool
from std_msgs.msg import String
def kill_sw_mon():
rospy.init_node('kill_sw_mon', anonymous=True)
pub = rospy.Publisher('kill_sw_enabled', Bool, queue_size=10)
# first push of kill switch is going to start the state machine
global pub_exec_simple
pub_exec_simple = rospy.Publisher("exec_cmd_simple", String, queue_size = 10)
global once
once = False
rate = rospy.Rate(20)
gpio_pin = 7
GPIO.setmode(GPIO.BOARD)
# assumes the pin is pulled up be external resistor
GPIO.setup(gpio_pin, GPIO.IN)
# find the starting state of the input pin
n_qual = 5 # number of successive readings that must match to qualify
count = 0
last_val = False
start = time.time()
while count < n_qual:
if (time.time() - start) > 10:
break
val = GPIO.input(gpio_pin) == 0
if val == last_val:
count += 1
last_val = val
else:
count = 0 # reset
last_val = val
time.sleep(0.1) # pause between reads
if count >= n_qual:
kill_sw_ok = val # the value of n_qual consecutive reads
# print "Initial value is: %s" % val
else:
kill_sw_ok = False;
print "Initial value not found; count = %s" % count
# TODO need better error handling?
if not rospy.is_shutdown():
time.sleep(0.2)
pub.publish(kill_sw_ok) # publish initial state
rospy.loginfo(kill_sw_ok)
while not rospy.is_shutdown():
try:
if not kill_sw_ok:
# Use falling edge detection to see if pin is pulled
# low to avoid repeated polling
GPIO.wait_for_edge(gpio_pin, GPIO.FALLING)
time.sleep(0.1) # wait for sw bounce
if not GPIO.input(gpio_pin): # re-read to confirm
kill_sw_ok = True
pub.publish(kill_sw_ok)
rospy.loginfo(kill_sw_ok)
pub_exec_simple.publish("START_EXEC")
rospy.loginfo("kill_sw_mon: Pubishing START_EXEC")
else:
GPIO.wait_for_edge(gpio_pin, GPIO.RISING)
time.sleep(0.1) # wait for sw bounce
if GPIO.input(gpio_pin):
kill_sw_ok = False
pub.publish(kill_sw_ok)
rospy.loginfo(kill_sw_ok)
rate.sleep()
except Exception, e:
#Revert all GPIO pins to their normal states (i.e. input = safe)
GPIO.cleanup()
if __name__ == '__main__':
try:
kill_sw_mon()
except rospy.ROSInterruptException:
#Revert all GPIO pins to their normal states (i.e. input = safe)
GPIO.cleanup()
pass
| apache-2.0 | 8,239,543,852,723,142,000 | 33.954955 | 81 | 0.569072 | false | 3.923155 | false | false | false |
fossfreedom/coverart-browser | coverart_controllers.py | 1 | 28816 | # -*- Mode: python; coding: utf-8; tab-width: 4; indent-tabs-mode: nil; -*-
#
# Copyright (C) 2012 - fossfreedom
# Copyright (C) 2012 - Agustin Carrasco
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
from datetime import date
from collections import OrderedDict
from collections import namedtuple
from gi.repository import GObject
from gi.repository import Gdk
from gi.repository import RB
from gi.repository import Gio
from gi.repository import GLib
from coverart_browser_prefs import CoverLocale
from coverart_browser_prefs import GSetting
from coverart_utils import create_pixbuf_from_file_at_size
from coverart_utils import GenreConfiguredSpriteSheet
from coverart_utils import ConfiguredSpriteSheet
from coverart_utils import get_stock_size
from coverart_utils import CaseInsensitiveDict
from coverart_utils import Theme
import rb
MenuNodeT = namedtuple('MenuNode', 'label menutype typevalue')
def MenuNode(label, menutype=None, typevalue=None):
return MenuNodeT(label, menutype, typevalue)
class OptionsController(GObject.Object):
# properties
options = GObject.property(type=object, default=None)
current_key = GObject.property(type=str, default=None)
update_image = GObject.property(type=bool, default=False)
enabled = GObject.property(type=bool, default=True)
def __init__(self):
super(OptionsController, self).__init__()
# connect the variations on the current key to the controllers action
self.connect('notify::current-key', self._do_action)
def get_current_key_index(self):
return self.options.index(self.current_key)
def option_selected(self, key):
if key != self.current_key:
# update the current value
self.current_key = key
def _do_action(self, *args):
self.do_action()
def do_action(self):
pass
def get_current_image(self):
return None
def get_current_description(self):
return self.current_key
def update_images(self, *args):
pass
def create_spritesheet(self, plugin, sheet, typestr):
'''
helper function to create a specific spritesheet
'''
if sheet:
del sheet
return ConfiguredSpriteSheet(plugin, typestr, get_stock_size())
def create_button_image(self, plugin, image, icon_name):
'''
helper function to create a button image
'''
if image:
del image
path = 'img/' + Theme(self.plugin).current + '/'
return create_pixbuf_from_file_at_size(
rb.find_plugin_file(self.plugin, path + icon_name),
*get_stock_size())
class PlaylistPopupController(OptionsController):
def __init__(self, plugin, album_model):
super(PlaylistPopupController, self).__init__()
self._album_model = album_model
shell = plugin.shell
self.plugin = plugin
# get the library name and initialize the superclass with it
self._library_name = shell.props.library_source.props.name
# get the queue name
self._queue_name = shell.props.queue_source.props.name
if " (" in self._queue_name:
self._queue_name = self._queue_name[0:self._queue_name.find(" (")]
self._spritesheet = None
self._update_options(shell)
# get the playlist model so we can monitor changes
playlist_model = shell.props.display_page_model
# connect signals to update playlists
playlist_model.connect('row-inserted', self._update_options, shell)
playlist_model.connect('row-deleted', self._update_options, shell)
playlist_model.connect('row-changed', self._update_options, shell)
def update_images(self, *args):
self._spritesheet = self.create_spritesheet(self.plugin,
self._spritesheet, 'playlist')
if args[-1]:
self.update_image = True
def _update_options(self, *args):
shell = args[-1]
self.update_images(False)
playlist_manager = shell.props.playlist_manager
still_exists = self.current_key == self._library_name or \
self.current_key == self._queue_name
# retrieve the options
values = OrderedDict()
# library and play queue sources
values[self._library_name] = None
values[self._queue_name] = shell.props.queue_source
# playlists
playlists_entries = playlist_manager.get_playlists()
for playlist in playlists_entries:
if playlist.props.is_local:
name = playlist.props.name
values[name] = playlist
still_exists = still_exists or name == self.current_key
self.values = values
self.options = list(values.keys())
self.current_key = self.current_key if still_exists else \
self._library_name
def do_action(self):
playlist = self.values[self.current_key]
if not playlist:
self._album_model.remove_filter('model')
else:
self._album_model.replace_filter('model',
playlist.get_query_model())
def get_current_image(self):
playlist = self.values[self.current_key]
if self.current_key == self._library_name:
image = self._spritesheet['music']
elif self._queue_name in self.current_key:
image = self._spritesheet['queue']
elif isinstance(playlist, RB.StaticPlaylistSource):
image = self._spritesheet['playlist']
else:
image = self._spritesheet['smart']
return image
class GenrePopupController(OptionsController):
# properties
new_genre_icon = GObject.property(type=bool, default=False)
def __init__(self, plugin, album_model):
super(GenrePopupController, self).__init__()
cl = CoverLocale()
cl.switch_locale(cl.Locale.LOCALE_DOMAIN)
self._album_model = album_model
shell = plugin.shell
self.plugin = plugin
# create a new property model for the genres
genres_model = RB.RhythmDBPropertyModel.new(shell.props.db,
RB.RhythmDBPropType.GENRE)
query = shell.props.library_source.props.base_query_model
genres_model.props.query_model = query
# initial genre
self._initial_genre = _('All Genres')
self._spritesheet = None
self._default_image = None
self._unrecognised_image = None
self._connect_properties()
self._connect_signals(query, genres_model)
# generate initial popup
self._update_options(genres_model)
def update_images(self, *args):
if self._spritesheet:
del self._spritesheet
self._spritesheet = GenreConfiguredSpriteSheet(self.plugin,
'genre', get_stock_size())
self._default_image = self.create_button_image(self.plugin,
self._default_image, 'default_genre.png')
self._unrecognised_image = self.create_button_image(self.plugin,
self._unrecognised_image, 'unrecognised_genre.png')
if args[-1]:
self.update_image = True
def _connect_signals(self, query, genres_model):
# connect signals to update genres
self.connect('notify::new-genre-icon', self._update_options, genres_model)
query.connect('row-inserted', self._update_options, genres_model)
query.connect('row-deleted', self._update_options, genres_model)
query.connect('row-changed', self._update_options, genres_model)
def _connect_properties(self):
gs = GSetting()
setting = gs.get_setting(gs.Path.PLUGIN)
setting.bind(gs.PluginKey.NEW_GENRE_ICON, self, 'new_genre_icon',
Gio.SettingsBindFlags.GET)
def _update_options(self, *args):
genres_model = args[-1]
self.update_images(False)
still_exists = False
# retrieve the options
options = []
row_num = 0
for row in genres_model:
if row_num == 0:
cl = CoverLocale()
cl.switch_locale(cl.Locale.LOCALE_DOMAIN)
genre = _('All Genres')
row_num = row_num + 1
else:
genre = row[0]
options.append(genre)
still_exists = still_exists or genre == self.current_key
self.options = options
self.current_key = self.current_key if still_exists else \
self._initial_genre
def do_action(self):
'''
called when genre popup menu item chosen
return None if the first entry in popup returned
'''
if self.current_key == self._initial_genre:
self._album_model.remove_filter('genre')
else:
self._album_model.replace_filter('genre', self.current_key)
def get_current_image(self):
test_genre = self.current_key.lower()
if test_genre == self._initial_genre.lower():
image = self._default_image
else:
image = self._find_alternates(test_genre)
if image == self._unrecognised_image and \
test_genre in self._spritesheet:
image = self._spritesheet[test_genre]
return image
def _find_alternates(self, test_genre):
# the following genre checks are required
# 1. if we have user defined genres
# 2. then check locale specific system genres
# 3. then check local specific alternates
# 4. then check if we system genres
# where necessary check if any of the genres are a substring
# of test_genre - check in reverse order so that we
# test largest strings first (prevents spurious matches with
# short strings)
# N.B. we use RB.search_fold since the strings can be
# in a mixture of cases, both unicode (normalized or not) and str
# and as usual python cannot mix and match these types.
test_genre = RB.search_fold(test_genre)
ret, sprite = self._match_genres(test_genre, self._spritesheet.GENRE_USER)
if ret:
return sprite
for genre in sorted(self._spritesheet.locale_names,
key=lambda b: (-len(b), b)):
if RB.search_fold(genre) in test_genre:
return self._spritesheet[self._spritesheet.locale_names[genre]]
# next check locale alternates
ret, sprite = self._match_genres(test_genre, self._spritesheet.GENRE_LOCALE)
if ret:
return sprite
ret, sprite = self._match_genres(test_genre, self._spritesheet.GENRE_SYSTEM)
if ret:
return sprite
# check if any of the default genres are a substring
# of test_genre - check in reverse order so that we
# test largest strings first (prevents spurious matches with
# short strings)
for genre in sorted(self._spritesheet.names,
key=lambda b: (-len(b), b)):
if RB.search_fold(genre) in test_genre:
return self._spritesheet[genre]
# if no matches then default to unrecognised image
return self._unrecognised_image
def _match_genres(self, test_genre, genre_type):
case_search = CaseInsensitiveDict(
dict((k.name, v) for k, v in self._spritesheet.genre_alternate.items()
if k.genre_type == genre_type))
if test_genre in case_search:
return (True, self._spritesheet[case_search[test_genre]])
else:
return (False, None)
def get_current_description(self):
cl = CoverLocale()
cl.switch_locale(cl.Locale.LOCALE_DOMAIN)
if self.current_key == self._initial_genre:
return _('All Genres')
else:
return self.current_key
class SortPopupController(OptionsController):
def __init__(self, plugin, viewmgr):
super(SortPopupController, self).__init__()
self._viewmgr = viewmgr
self.plugin = plugin
# sorts dictionary
cl = CoverLocale()
cl.switch_locale(cl.Locale.LOCALE_DOMAIN)
self.values = OrderedDict([(_('Sort by album name'), 'name'),
(_('Sort by album artist'), 'artist'),
(_('Sort by year'), 'year'),
(_('Sort by rating'), 'rating')])
self.options = list(self.values.keys())
# get the current sort key and initialise the superclass
gs = GSetting()
source_settings = gs.get_setting(gs.Path.PLUGIN)
value = source_settings[gs.PluginKey.SORT_BY]
self._spritesheet = None
self.update_images(False)
self.current_key = list(self.values.keys())[
list(self.values.values()).index(value)]
def update_images(self, *args):
self._spritesheet = self.create_spritesheet(self.plugin,
self._spritesheet, 'sort')
if args[-1]:
self.update_image = True
def do_action(self):
sort = self.values[self.current_key]
gs = GSetting()
settings = gs.get_setting(gs.Path.PLUGIN)
settings[gs.PluginKey.SORT_BY] = sort
self._viewmgr.current_view.get_default_manager().emit('sort', "album")
def get_current_image(self):
sort = self.values[self.current_key]
return self._spritesheet[sort]
class ArtistSortPopupController(OptionsController):
def __init__(self, plugin, viewmgr):
super(ArtistSortPopupController, self).__init__()
self._viewmgr = viewmgr
self.plugin = plugin
# sorts dictionary
cl = CoverLocale()
cl.switch_locale(cl.Locale.LOCALE_DOMAIN)
self.values = OrderedDict([(_('Sort by album name'), 'name_artist'),
(_('Sort by year'), 'year_artist'),
(_('Sort by rating'), 'rating_artist')])
self.options = list(self.values.keys())
# get the current sort key and initialise the superclass
gs = GSetting()
source_settings = gs.get_setting(gs.Path.PLUGIN)
value = source_settings[gs.PluginKey.SORT_BY_ARTIST]
if value not in list(self.values.values()):
print("here")
value = 'name_artist'
source_settings[gs.PluginKey.SORT_BY_ARTIST] = value
self._spritesheet = None
self.update_images(False)
self.current_key = list(self.values.keys())[
list(self.values.values()).index(value)]
print(self.current_key)
def update_images(self, *args):
self._spritesheet = self.create_spritesheet(self.plugin,
self._spritesheet, 'sort_artist')
if args[-1]:
self.update_image = True
def do_action(self):
sort = self.values[self.current_key]
gs = GSetting()
settings = gs.get_setting(gs.Path.PLUGIN)
settings[gs.PluginKey.SORT_BY_ARTIST] = sort
self._viewmgr.current_view.get_default_manager().emit('sort', "artist")
def get_current_image(self):
sort = self.values[self.current_key]
return self._spritesheet[sort]
class PropertiesMenuController(OptionsController):
favourites = GObject.property(type=bool, default=False)
follow = GObject.property(type=bool, default=False)
def __init__(self, plugin, source):
super(PropertiesMenuController, self).__init__()
self._source = source
self.plugin = plugin
self._connect_properties()
# sorts dictionary
cl = CoverLocale()
cl.switch_locale(cl.Locale.LOCALE_DOMAIN)
# options
self.values = OrderedDict()
self.values[MenuNode(_('Download all covers'))] = 'download'
self.values[MenuNode(_('Play random album'))] = 'random'
self.values[MenuNode(_('Follow playing song'), 'check',
(True if self.follow else False))] = 'follow'
self.values[MenuNode('separator1', 'separator')] = ''
self.values[MenuNode(_('Use favourites only'), 'check',
(True if self.favourites else False))] = 'favourite'
self.values[MenuNode('separator2', 'separator')] = ''
self.values[MenuNode(_('Browser Preferences'))] = 'browser prefs'
self.values[MenuNode(_('Search Preferences'))] = 'search prefs'
self.options = list(self.values.keys())
self.update_images(False)
if self.favourites:
self._source.propertiesbutton_callback('favourite')
if self.follow:
self._source.propertiesbutton_callback('follow')
self.current_key = None
def _connect_properties(self):
gs = GSetting()
setting = gs.get_setting(gs.Path.PLUGIN)
setting.bind(
gs.PluginKey.USE_FAVOURITES,
self,
'favourites',
Gio.SettingsBindFlags.DEFAULT)
setting.bind(
gs.PluginKey.FOLLOWING,
self,
'follow',
Gio.SettingsBindFlags.DEFAULT)
def _change_key(self, dict, old, new):
for i in range(len(dict)):
k, v = dict.popitem(False)
dict[new if old == k else k] = v
def update_images(self, *args):
self._image = self.create_button_image(self.plugin,
None, 'properties.png')
if args[-1]:
self.update_image = True
def do_action(self):
if self.current_key:
key = [node for node in self.values if node.label == self.current_key]
if self.current_key == _('Use favourites only'):
self.favourites = not self.favourites
if self.current_key == _('Follow playing song'):
self.follow = not self.follow
self._source.propertiesbutton_callback(self.values[key[0]])
self.current_key = None
def get_current_image(self):
return self._image
def get_current_description(self):
return _('Properties')
class DecadePopupController(OptionsController):
def __init__(self, plugin, album_model):
super(DecadePopupController, self).__init__()
self._album_model = album_model
self.plugin = plugin
self._spritesheet = None
# decade options
cl = CoverLocale()
cl.switch_locale(cl.Locale.LOCALE_DOMAIN)
self.values = OrderedDict()
self.values[_('All Decades')] = [-1, 'All Decades']
# '20s' as in the decade 2010
self.values[_('20s')] = [2020, '20s']
#'10s' as in the decade 2010
self.values[_('10s')] = [2010, '10s']
#'00s' as in the decade 2000
self.values[_('00s')] = [2000, '00s']
#'90s' as in the decade 1990
self.values[_('90s')] = [1990, '90s']
#'80s' as in the decade 1980
self.values[_('80s')] = [1980, '80s']
#'70s' as in the decade 1970
self.values[_('70s')] = [1970, '70s']
#'60s' as in the decade 1960
self.values[_('60s')] = [1960, '60s']
#'50s' as in the decade 1950
self.values[_('50s')] = [1950, '50s']
#'40s' as in the decade 1940
self.values[_('40s')] = [1940, '40s']
#'30s' as in the decade 1930
self.values[_('30s')] = [1930, '30s']
#'Older' as in 'older than the year 1930'
self.values[_('Older')] = [-1, 'Older']
self.options = list(self.values.keys())
# if we aren't on the 20s yet, remove it
if date.today().year < 2020:
self.options.remove(_('20s'))
# define a initial decade an set the initial key
self._initial_decade = self.options[0]
self.update_images(False)
self.current_key = self._initial_decade
def update_images(self, *args):
self._spritesheet = self.create_spritesheet(self.plugin,
self._spritesheet, 'decade')
if args[-1]:
self.update_image = True
def do_action(self):
if self.current_key == self._initial_decade:
self._album_model.remove_filter('decade')
else:
self._album_model.replace_filter('decade',
self.values[self.current_key][0])
def get_current_image(self):
decade = self.values[self.current_key][1]
return self._spritesheet[decade]
def get_current_description(self):
return self.current_key
class SortOrderToggleController(OptionsController):
toolbar_type = "album"
def __init__(self, plugin, viewmgr):
super(SortOrderToggleController, self).__init__()
self._viewmgr = viewmgr
self.plugin = plugin
# options
self.values = OrderedDict([(_('Sort in descending order'), False),
(_('Sort in ascending order'), True)])
self.options = list(self.values.keys())
self._images = []
# set the current key
self.gs = GSetting()
self.settings = self.gs.get_setting(self.gs.Path.PLUGIN)
self.key = self.get_key()
sort_order = self.settings[self.key]
self.current_key = list(self.values.keys())[
list(self.values.values()).index(sort_order)]
self.update_images(False)
def get_key(self):
return self.gs.PluginKey.SORT_ORDER
def update_images(self, *args):
# initialize images
if len(self._images) > 0:
del self._images[:]
self._images.append(self.create_button_image(self.plugin,
None, 'arrow_down.png'))
self._images.append(self.create_button_image(self.plugin,
None, 'arrow_up.png'))
if args[-1]:
self.update_image = True
def do_action(self):
sort_order = self.values[self.current_key]
self.settings[self.key] = sort_order
self._viewmgr.current_view.get_default_manager().emit('sort', self.toolbar_type)
def get_current_image(self):
return self._images[self.get_current_key_index()]
class ArtistSortOrderToggleController(SortOrderToggleController):
toolbar_type = "artist"
def __init__(self, plugin, model):
super(ArtistSortOrderToggleController, self).__init__(plugin, model)
def get_key(self):
return self.gs.PluginKey.SORT_ORDER_ARTIST
class AlbumSearchEntryController(OptionsController):
# properties
search_text = GObject.property(type=str, default='')
def __init__(self, album_model):
super(AlbumSearchEntryController, self).__init__()
self._album_model = album_model
self._filter_type = 'all'
# options
self.values = OrderedDict()
self.values[_('Search all fields')] = 'all'
self.values[_('Search album artists')] = 'album_artist'
self.values[_('Search track artists')] = 'artist'
self.values[_('Search composers')] = 'composers'
self.values[_('Search albums')] = 'album_name'
self.values[_('Search titles')] = 'track'
self.options = list(self.values.keys())
self.current_key = list(self.values.keys())[0]
self._typing = False
self._typing_counter = 0
self._current_search_text = ""
def do_action(self):
# remove old filter
self._album_model.remove_filter(self._filter_type, False)
# assign the new filter
self._filter_type = self.values[self.current_key]
self.do_search(self.search_text, True)
def _search_typing(self, *args):
self._typing_counter = self._typing_counter + 1
if self._typing_counter >= 4 and self._typing:
self._typing = False
self._change_filter(self._current_search_text, False)
return self._typing
def _change_filter(self, search_text, force):
# self.search_text = search_text
self._current_search_text = search_text
if search_text:
self._album_model.replace_filter(self._filter_type,
search_text)
elif not force:
self._album_model.remove_filter(self._filter_type)
def do_search(self, search_text, force=False):
'''
if self.search_text != search_text or force:
self.search_text = search_text
if search_text:
self._album_model.replace_filter(self._filter_type,
search_text)
elif not force:
self._album_model.remove_filter(self._filter_type)
'''
# self.search_text = search_text
if force:
self._typing_counter = 99
self._typing = False
self._change_filter(search_text, force)
return
if self._current_search_text != search_text:
#self.search_text = search_text
self._current_search_text = search_text
self._typing_counter = 0
if not self._typing:
self._typing = True
Gdk.threads_add_timeout(GLib.PRIORITY_DEFAULT_IDLE, 100,
self._search_typing)
class AlbumQuickSearchController(object):
def __init__(self, album_manager):
self._album_manager = album_manager
def connect_quick_search(self, quick_search):
quick_search.connect('quick-search', self._on_quick_search)
quick_search.connect('arrow-pressed', self._on_arrow_pressed)
quick_search.connect('hide', self._on_hide)
def _on_quick_search(self, quick_search, search_text, *args):
album = self._album_manager.model.find_first_visible('album_name',
search_text)
if album:
path = self._album_manager.model.get_path(album)
self._album_manager.current_view.select_and_scroll_to_path(path)
def _on_arrow_pressed(self, quick_search, key, *args):
current = self._album_manager.current_view.get_selected_objects()[0]
search_text = quick_search.get_text()
album = None
if key == Gdk.KEY_Up:
album = self._album_manager.model.find_first_visible(
'album_name', search_text, current, True)
elif key == Gdk.KEY_Down:
album = self._album_manager.model.find_first_visible(
'album_name', search_text, current)
if album:
path = self._album_manager.model.get_path(album)
self._album_manager.current_view.select_and_scroll_to_path(path)
def _on_hide(self, quick_search, *args):
self._album_manager.current_view.grab_focus()
class ViewController(OptionsController):
def __init__(self, shell, viewmgr):
super(ViewController, self).__init__()
self._viewmgr = viewmgr
from coverart_browser_source import Views
views = Views(shell)
self.values = OrderedDict()
for view_name in views.get_view_names():
self.values[views.get_menu_name(view_name)] = view_name
print(view_name)
self.options = list(self.values.keys())
viewmgr.connect('new-view', self.on_notify_view_name)
def on_notify_view_name(self, *args):
for key in self.options:
if self.values[key] == self._viewmgr.view_name:
self.current_key = key
def do_action(self):
if self._viewmgr.view_name != self.values[self.current_key]:
self._viewmgr.view_name = self.values[self.current_key]
| gpl-3.0 | -712,676,144,686,856,600 | 32.941107 | 111 | 0.58877 | false | 4.040381 | true | false | false |
neo900/skyline | src/horizon/listen.py | 1 | 4999 | import socket
from os import kill, getpid
from Queue import Full
from multiprocessing import Process
from struct import Struct, unpack
from msgpack import unpackb
from cPickle import loads
import logging
import settings
logger = logging.getLogger("HorizonLog")
class Listen(Process):
"""
The listener is responsible for listening on a port.
"""
def __init__(self, port, queue, parent_pid, type="pickle"):
super(Listen, self).__init__()
try:
self.ip = settings.HORIZON_IP
except AttributeError:
# Default for backwards compatibility
self.ip = socket.gethostname()
self.port = port
self.q = queue
self.daemon = True
self.parent_pid = parent_pid
self.current_pid = getpid()
self.type = type
def gen_unpickle(self, infile):
"""
Generate a pickle from a stream
"""
try:
bunch = loads(infile)
yield bunch
except EOFError:
return
def read_all(self, sock, n):
"""
Read n bytes from a stream
"""
data = ''
while n > 0:
buf = sock.recv(n)
n -= len(buf)
data += buf
return data
def check_if_parent_is_alive(self):
"""
Self explanatory
"""
try:
kill(self.current_pid, 0)
kill(self.parent_pid, 0)
except:
exit(0)
def listen_pickle(self):
"""
Listen for pickles over tcp
"""
while 1:
try:
# Set up the TCP listening socket
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.bind((self.ip, self.port))
s.setblocking(1)
s.listen(5)
logger.info('listening over tcp for pickles on %s' % self.port)
(conn, address) = s.accept()
logger.info('connection from %s:%s' % (address[0], self.port))
chunk = []
while 1:
self.check_if_parent_is_alive()
try:
length = Struct('!I').unpack(self.read_all(conn, 4))
body = self.read_all(conn, length[0])
# Iterate and chunk each individual datapoint
for bunch in self.gen_unpickle(body):
for metric in bunch:
chunk.append(metric)
# Queue the chunk and empty the variable
if len(chunk) > settings.CHUNK_SIZE:
try:
self.q.put(list(chunk), block=False)
chunk[:] = []
# Drop chunk if queue is full
except Full:
logger.info('queue is full, dropping datapoints')
chunk[:] = []
except Exception as e:
logger.info(e)
logger.info('incoming connection dropped, attempting to reconnect')
break
except Exception as e:
logger.info('can\'t connect to socket: ' + str(e))
break
def listen_udp(self):
"""
Listen over udp for MessagePack strings
"""
while 1:
try:
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.bind((self.ip, self.port))
logger.info('listening over udp for messagepack on %s' % self.port)
chunk = []
while 1:
self.check_if_parent_is_alive()
data, addr = s.recvfrom(1024)
metric = unpackb(data)
chunk.append(metric)
# Queue the chunk and empty the variable
if len(chunk) > settings.CHUNK_SIZE:
try:
self.q.put(list(chunk), block=False)
chunk[:] = []
# Drop chunk if queue is full
except Full:
logger.info('queue is full, dropping datapoints')
chunk[:] = []
except Exception as e:
logger.info('can\'t connect to socket: ' + str(e))
break
def run(self):
"""
Called when process intializes.
"""
logger.info('started listener')
if self.type == 'pickle':
self.listen_pickle()
elif self.type == 'udp':
self.listen_udp()
else:
logging.error('unknown listener format')
| mit | -8,091,358,596,528,935,000 | 31.044872 | 91 | 0.455091 | false | 4.891389 | false | false | false |
bavla/Graph | PsCoresTQ.py | 1 | 2315 | gdir = 'c:/users/batagelj/work/python/graph/graph'
# wdir = 'c:/users/batagelj/work/python/graph/JSON/test'
wdir = 'c:/users/batagelj/work/python/graph/JSON/SN5'
import sys, os, datetime, json
sys.path = [gdir]+sys.path; os.chdir(wdir)
import GraphNew as Graph
import TQ
# fJSON = 'ConnectivityWeighted.json'
# fJSON = "violenceE.json"
# fJSON = 'stem.json'
# fJSON = 'CcCtest.json'
# fJSON = 'Terror news 50.json'
fJSON = 'CcCSN5.json'
# S = Graph.Graph.loadNetJSON(fJSON); G = S.pairs2edges()
G = Graph.Graph.loadNetJSON(fJSON)
# G.saveNetJSON(file="Terror50E",indent=1)
# fJSON = 'ConnectivityTest.json'
# fJSON = 'ExampleB.json'
# fJSON = 'PathfinderTest.json'
# G = Graph.Graph.loadNetJSON(fJSON)
G.delLoops()
print("Temporal Ps cores in: ",fJSON)
t1 = datetime.datetime.now()
print("started: ",t1.ctime(),"\n")
Tmin,Tmax = G._graph['time']
D = { u: G.TQnetSum(u) for u in G._nodes }
# print("Sum =",D,"\n")
Core = { u: [d for d in D[u] if d[2]==0] for u in G.nodes() }
# core number = 0
D = { u: [d for d in D[u] if d[2]>0] for u in G.nodes() }
D = { u: d for u,d in D.items() if d!=[] }
Dmin = { u: min([e[2] for e in d]) for u,d in D.items() }
step = 0
while len(D)>0:
step += 1
dmin,u = min( (v,k) for k,v in Dmin.items() )
if step % 100 == 1:
print("{0:3d}. dmin={1:10.4f} node={2:4d}".format(step,dmin,u))
cCore = TQ.TQ.complement(Core[u],Tmin,Tmax+1)
core = TQ.TQ.extract(cCore,[d for d in D[u] if d[2] == dmin])
if core!=[]:
Core[u] = TQ.TQ.sum(Core[u],core)
D[u] = TQ.TQ.cutGE(TQ.TQ.sum(D[u],TQ.TQ.minus(core)),dmin)
for link in G.star(u):
v = G.twin(u,link)
if not(v in D): continue
chLink = TQ.TQ.minus(TQ.TQ.extract(core,G.getLink(link,'tq')))
if chLink==[]: continue
diff = TQ.TQ.cutGE(TQ.TQ.sum(D[v],chLink),0)
D[v] = [ (sd,fd,max(vd,dmin)) for sd,fd,vd in diff ]
if len(D[v])==0: del D[v]; del Dmin[v]
else: Dmin[v] = min([e[2] for e in D[v]])
if len(D[u])==0: del D[u]; del Dmin[u]
else: Dmin[u] = min([e[2] for e in D[u]])
print("{0:3d}. dmin={1:10.4f} node={2:4d}".format(step,dmin,u))
# print("\n-----\nCore =",Core)
t2 = datetime.datetime.now()
print("\nfinished: ",t2.ctime(),"\ntime used: ", t2-t1)
| gpl-3.0 | 3,479,323,956,564,014,600 | 36.583333 | 71 | 0.577538 | false | 2.232401 | false | false | false |
HarmonyEnterpriseSolutions/harmony-platform | src/gnue/common/datasources/drivers/ldap/Connection.py | 1 | 5525 | # GNU Enterprise Common Library - Generic DBSIG2 database driver - Connection
#
# Copyright 2000-2007 Free Software Foundation
#
# This file is part of GNU Enterprise.
#
# GNU Enterprise is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation; either
# version 2, or (at your option) any later version.
#
# GNU Enterprise is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public
# License along with program; see the file COPYING. If not,
# write to the Free Software Foundation, Inc., 59 Temple Place
# - Suite 330, Boston, MA 02111-1307, USA.
#
# $Id: Connection.py,v 1.2 2008/11/04 20:14:03 oleg Exp $
"""
Generic Connection class for DBSIG2 based database driver plugins.
"""
from src.gnue.common.datasources.drivers.ldap import ResultSet
__all__ = ['Connection']
import ldap
from gnue.common.datasources import Exceptions
from gnue.common.datasources.drivers import Base
# =============================================================================
# Connection class
# =============================================================================
class Connection (Base.Connection):
"""
"""
_resultSetClass_ = ResultSet
# ---------------------------------------------------------------------------
# Constructor
# ---------------------------------------------------------------------------
def __init__ (self, connections, name, parameters):
Base.Connection.__init__ (self, connections, name, parameters)
# ---------------------------------------------------------------------------
# Implementations of virtual methods
# ---------------------------------------------------------------------------
def _getLoginFields_ (self):
return [(u_('User Name'), '_username', 'string', None, None, []),
(u_('Password'), '_password', 'password', None, None, [])]
# ---------------------------------------------------------------------------
def _connect_ (self, connectData):
print "_connect_", connectData
try:
self._ldapObject = ldap.open(connectData['host'], int(connectData.get('port', 389)))
self._ldapObject.simple_bind_s(connectData['_username'], connectData['_password'])
except ldap.LDAPError, e:
raise self.decorateError(
Exceptions.LoginError("%s: %s" % tuple(errors.getException()[1:3]))
)
# ---------------------------------------------------------------------------
def _insert_ (self, table, newfields):
raise NotImplementedError
# ---------------------------------------------------------------------------
def _update_ (self, table, oldfields, newfields):
raise NotImplementedError
# ---------------------------------------------------------------------------
def _delete_ (self, table, oldfields):
raise NotImplementedError
# ---------------------------------------------------------------------------
def _requery_ (self, table, oldfields, fields, parameters):
raise NotImplementedError
# ---------------------------------------------------------------------------
def _commit_ (self):
pass
# ---------------------------------------------------------------------------
def _rollback_ (self):
pass
# ---------------------------------------------------------------------------
def _close_ (self):
pass
# ---------------------------------------------------------------------------
# Virtual methods to be implemented by descendants
# ---------------------------------------------------------------------------
def _getConnectParams_ (self, connectData):
"""
Return a tuple with a list and a dictionary, being the parameters and
keyword arguments to be passed to the connection function of the DBSIG2
driver.
This method must be overwritten by all descendants.
"""
return ([], {})
# ---------------------------------------------------------------------------
def _createTimestamp_ (self, year, month, day, hour, minute, secs, msec = 0):
"""
Create a timestamp object for the given point in time.
This function doesn't have to be overwritten unless the handling of time
values is weird.
@param year: Year number
@param month: Month number (1 - 12)
@param day: Day of the month (1 - 31)
@param hour: Hour (0 - 23)
@param minute: Minute (0 - 59)
@param secs: Whole seconds (integer)
@param msec: Microseconds (integer)
returns: a timestamp object created by the driver's Timestamp constructor
"""
raise NotImplementedError
# ---------------------------------------------------------------------------
# Create an apropriate time object for the given values
# ---------------------------------------------------------------------------
def _createTime_ (self, hour, minute, second, msec = 0):
"""
Create a time object for the given point in time.
This function doesn't have to be overwritten unless the handling of time
values is weird.
@param hour: Hour (0 - 23)
@param minute: Minute (0 - 59)
@param second: Whole seconds (integer)
@param msec: Microseconds (integer)
returns: a time object created by the driver's Time constructor
"""
raise NotImplementedError
def decorateError(self, error):
"""
This function used to make database related error user frielndly
"""
return error
| gpl-2.0 | 5,706,516,712,830,585,000 | 31.122093 | 87 | 0.520543 | false | 4.638959 | false | false | false |
doraemonext/wechat-platform | wechat_platform/system/response/models.py | 1 | 7805 | # -*- coding: utf-8 -*-
import logging
from time import time
from django.db import models
from wechat_sdk.messages import EventMessage
from system.official_account.models import OfficialAccount
logger_response = logging.getLogger(__name__)
class ResponseManager(models.Manager):
"""
微信服务器响应消息记录表 Manager
"""
def get(self, official_account, msgid):
return super(ResponseManager, self).get_queryset().filter(
official_account=official_account
).filter(
msgid=msgid
).exclude(
type=Response.TYPE_WAITING
)
def get_latest(self, official_account, wechat_instance):
"""
获取指定公众号指定OpenID下最新一条回复
:param official_account: 微信公众号实例 (OfficialAccount)
:param wechat_instance: 微信请求实例 (WechatBasic)
:return: 响应实例 (Response)
"""
message = wechat_instance.get_message()
return super(ResponseManager, self).get_queryset().filter(
official_account=official_account
).filter(
target=message.source
).exclude(
type=Response.TYPE_WAITING
).order_by(
'-time'
)[:1]
def add(self, official_account, wechat_instance, type, pattern, raw, plugin_dict):
"""
添加一条新的响应消息记录
:param official_account: 微信公众号实例 (OfficialAccount)
:param wechat_instance: 微信请求实例 (WechatBasic)
:param type: 信息类型
:param pattern: 响应方式
:param raw: 原始信息内容
:param plugin_dict: 所使用的插件字典, exp: {'iden': 'text', 'reply_id': 54}
"""
message = wechat_instance.get_message()
if isinstance(message, EventMessage):
msgid = message.target + str(message.time)
else:
msgid = message.id
# 先在数据库中查找等待中的消息回复
response = super(ResponseManager, self).get_queryset().filter(
official_account=official_account
).filter(
msgid=msgid
).filter(
type=Response.TYPE_WAITING
)
if response:
response = response[0]
response.time = int(time())
response.type = type
response.pattern = pattern
response.raw = raw
response.plugin_iden = plugin_dict['iden']
response.reply_id = plugin_dict['reply_id']
response.save()
logger_response.debug('Response has been updated [Detail: %s]' % response.__dict__)
else:
response = super(ResponseManager, self).create(
official_account=official_account,
msgid=msgid,
target=message.source,
source=message.target,
time=int(time()),
type=type,
pattern=pattern,
raw=raw,
plugin_iden=plugin_dict['iden'],
reply_id=plugin_dict['reply_id'],
)
logger_response.debug('New response created [Detail: %s]' % response.__dict__)
return response
def is_waiting(self, official_account, wechat_instance):
"""
判断该回复是否正在等待中
:param official_account: 微信公众号实例 (OfficialAccount)
:param wechat_instance: 微信请求实例 (WechatBasic)
:return: 如果正在等待中, 返回 True
"""
message = wechat_instance.get_message()
if isinstance(message, EventMessage):
msgid = message.target + str(message.time)
else:
msgid = message.id
# 在数据库中查找等待中的消息回复
response = super(ResponseManager, self).get_queryset().filter(
official_account=official_account
).filter(
msgid=msgid
).filter(
type=Response.TYPE_WAITING
)
if response:
return True
else:
return False
def add_waiting(self, official_account, wechat_instance):
"""
添加一条新的响应消息记录 (说明该请求正在被执行中)
:param official_account: 微信公众号实例 (OfficialAccount)
:param wechat_instance: 微信请求实例 (WechatBasic)
"""
message = wechat_instance.get_message()
if isinstance(message, EventMessage):
msgid = message.target + str(message.time)
else:
msgid = message.id
response = super(ResponseManager, self).create(
official_account=official_account,
msgid=msgid,
target=message.source,
source=message.target,
time=int(time()),
type=Response.TYPE_WAITING,
pattern=Response.PATTERN_WAITING,
raw=''
)
logger_response.debug('New response created [Detail: %s]' % response.__dict__)
return response
def end_waiting(self, official_account, wechat_instance):
"""
结束一条正在等待的响应消息记录
:param official_account: 微信公众号实例 (OfficialAccount)
:param wechat_instance: 微信请求实例 (WechatBasic)
"""
message = wechat_instance.get_message()
if isinstance(message, EventMessage):
msgid = message.target + str(message.time)
else:
msgid = message.id
# 在数据库中查找等待中的消息回复
response = super(ResponseManager, self).get_queryset().filter(
official_account=official_account
).filter(
msgid=msgid
).filter(
type=Response.TYPE_WAITING
)
if response:
response[0].delete()
class Response(models.Model):
"""
微信服务器响应消息记录表
"""
TYPE_TEXT = 'text'
TYPE_IMAGE = 'image'
TYPE_VIDEO = 'video'
TYPE_VOICE = 'voice'
TYPE_NEWS = 'news'
TYPE_MUSIC = 'music'
TYPE_WAITING = 'waiting'
TYPE = (
(TYPE_TEXT, u'文本消息'),
(TYPE_IMAGE, u'图片消息'),
(TYPE_VIDEO, u'视频消息'),
(TYPE_VOICE, u'语音消息'),
(TYPE_NEWS, u'图文消息'),
(TYPE_MUSIC, u'音乐消息'),
(TYPE_WAITING, u'执行中消息'),
)
PATTERN_NORMAL = 0
PATTERN_SERVICE = 1
PATTERN_SIMULATION = 2
PATTERN_WAITING = 3
PATTERN = (
(PATTERN_NORMAL, u'正常XML返回模式'),
(PATTERN_SERVICE, u'多客服返回模式'),
(PATTERN_SIMULATION, u'模拟登陆返回模式'),
(PATTERN_WAITING, u'执行中消息'),
)
official_account = models.ForeignKey(OfficialAccount, verbose_name=u'所属公众号')
msgid = models.CharField(u'MsgID或FromUserName+CreateTime', max_length=50)
target = models.CharField(u'目标用户OpenID', max_length=50)
source = models.CharField(u'来源用户OpenID', max_length=50)
time = models.IntegerField(u'信息发送时间')
type = models.CharField(u'信息类型', choices=TYPE, max_length=15)
pattern = models.IntegerField(u'响应方式', choices=PATTERN)
raw = models.TextField(u'响应信息原始内容')
plugin_iden = models.CharField(u'插件标识符', max_length=50, null=True, blank=True)
reply_id = models.IntegerField(u'插件回复ID', null=True, blank=True)
objects = models.Manager()
manager = ResponseManager()
class Meta:
verbose_name = u'微信服务器响应信息'
verbose_name_plural = u'微信服务器响应信息'
db_table = 'response'
def __unicode__(self):
return self.raw | bsd-2-clause | 8,729,925,608,646,641,000 | 30.864865 | 95 | 0.579952 | false | 3.201901 | false | false | false |
blab/antibody-response-pulse | bcell-array/code/Virus_Bcell_IgM_IgG_Landscape.py | 1 | 11385 |
# coding: utf-8
# # Antibody Response Pulse
# https://github.com/blab/antibody-response-pulse
#
# ### B-cells evolution --- cross-reactive antibody response after influenza virus infection or vaccination
# ### Adaptive immune response for repeated infection
# In[1]:
'''
author: Alvason Zhenhua Li
date: 04/09/2015
'''
get_ipython().magic(u'matplotlib inline')
import numpy as np
import matplotlib.pyplot as plt
import os
import alva_machinery_event_OAS_new as alva
AlvaFontSize = 23
AlvaFigSize = (15, 5)
numberingFig = 0
# equation plotting
dir_path = '/Users/al/Desktop/GitHub/antibody-response-pulse/bcell-array/figure'
file_name = 'Virus-Bcell-IgM-IgG'
figure_name = '-equation'
file_suffix = '.png'
save_figure = os.path.join(dir_path, file_name + figure_name + file_suffix)
numberingFig = numberingFig + 1
plt.figure(numberingFig, figsize=(12, 5))
plt.axis('off')
plt.title(r'$ Virus-Bcell-IgM-IgG \ equations \ (antibody-response \ for \ repeated-infection) $'
, fontsize = AlvaFontSize)
plt.text(0, 7.0/9, r'$ \frac{\partial V_n(t)}{\partial t} = +\mu_{v} V_{n}(t)(1 - \frac{V_n(t)}{V_{max}}) - \phi_{m} M_{n}(t) V_{n}(t) - \phi_{g} G_{n}(t) V_{n}(t) $'
, fontsize = 1.2*AlvaFontSize)
plt.text(0, 5.0/9, r'$ \frac{\partial B_n(t)}{\partial t} = +\mu_{b}V_{n}(t)(1 - \frac{V_n(t)}{V_{max}}) + (\beta_{m} + \beta_{g}) V_{n}(t) B_{n}(t) - \mu_{b} B_{n}(t) + m_b V_{n}(t)\frac{B_{i-1}(t) - 2B_i(t) + B_{i+1}(t)}{(\Delta i)^2} $'
, fontsize = 1.2*AlvaFontSize)
plt.text(0, 3.0/9,r'$ \frac{\partial M_n(t)}{\partial t} = +\xi_{m} B_{n}(t) - \phi_{m} M_{n}(t) V_{n}(t) - \mu_{m} M_{n}(t) $'
, fontsize = 1.2*AlvaFontSize)
plt.text(0, 1.0/9,r'$ \frac{\partial G_n(t)}{\partial t} = +\xi_{g} B_{n}(t) - \phi_{g} G_{n}(t) V_{n}(t) - \mu_{g} G_{n}(t) + m_a V_{n}(t)\frac{G_{i-1}(t) - 2G_i(t) + G_{i+1}(t)}{(\Delta i)^2} $'
, fontsize = 1.2*AlvaFontSize)
plt.savefig(save_figure, dpi = 100)
plt.show()
# define the V-M-G partial differential equations
def dVdt_array(VBMGxt = [], *args):
# naming
V = VBMGxt[0]
B = VBMGxt[1]
M = VBMGxt[2]
G = VBMGxt[3]
x_totalPoint = VBMGxt.shape[1]
# there are n dSdt
dV_dt_array = np.zeros(x_totalPoint)
# each dSdt with the same equation form
dV_dt_array[:] = +inRateV*V[:]*(1 - V[:]/maxV) - killRateVm*M[:]*V[:] - killRateVg*G[:]*V[:]
return(dV_dt_array)
def dBdt_array(VBMGxt = [], *args):
# naming
V = VBMGxt[0]
B = VBMGxt[1]
M = VBMGxt[2]
G = VBMGxt[3]
x_totalPoint = VBMGxt.shape[1]
# there are n dSdt
dB_dt_array = np.zeros(x_totalPoint)
# each dSdt with the same equation form
Bcopy = np.copy(B)
centerX = Bcopy[:]
leftX = np.roll(Bcopy[:], 1)
rightX = np.roll(Bcopy[:], -1)
leftX[0] = centerX[0]
rightX[-1] = centerX[-1]
dB_dt_array[:] = +inRateB*V[:]*(1 - V[:]/maxV) + (actRateBm + alva.event_active + alva.event_OAS_B)*V[:]*B[:] - outRateB*B[:] + mutatRateB*V[:]*(leftX[:] - 2*centerX[:] + rightX[:])/(dx**2)
return(dB_dt_array)
def dMdt_array(VBMGxt = [], *args):
# naming
V = VBMGxt[0]
B = VBMGxt[1]
M = VBMGxt[2]
G = VBMGxt[3]
x_totalPoint = VBMGxt.shape[1]
# there are n dSdt
dM_dt_array = np.zeros(x_totalPoint)
# each dSdt with the same equation form
dM_dt_array[:] = +inRateM*B[:] - consumeRateM*M[:]*V[:] - outRateM*M[:]
return(dM_dt_array)
def dGdt_array(VBMGxt = [], *args):
# naming
V = VBMGxt[0]
B = VBMGxt[1]
M = VBMGxt[2]
G = VBMGxt[3]
x_totalPoint = VBMGxt.shape[1]
# there are n dSdt
dG_dt_array = np.zeros(x_totalPoint)
# each dSdt with the same equation form
Gcopy = np.copy(G)
centerX = Gcopy[:]
leftX = np.roll(Gcopy[:], 1)
rightX = np.roll(Gcopy[:], -1)
leftX[0] = centerX[0]
rightX[-1] = centerX[-1]
dG_dt_array[:] = +(inRateG + alva.event_OAS)*B[:] - consumeRateG*G[:]*V[:] - outRateG*G[:] + mutatRateA*(leftX[:] - 2*centerX[:] + rightX[:])/(dx**2)
return(dG_dt_array)
# In[2]:
# setting parameter
timeUnit = 'year'
if timeUnit == 'hour':
hour = float(1)
day = float(24)
elif timeUnit == 'day':
day = float(1)
hour = float(1)/24
elif timeUnit == 'year':
year = float(1)
day = float(1)/365
hour = float(1)/24/365
maxV = float(50) # max virus/micro-liter
inRateV = 0.2/hour # in-rate of virus
killRateVm = 0.0003/hour # kill-rate of virus by antibody-IgM
killRateVg = killRateVm # kill-rate of virus by antibody-IgG
inRateB = 0.06/hour # in-rate of B-cell
outRateB = inRateB/8 # out-rate of B-cell
actRateBm = killRateVm # activation rate of naive B-cell
inRateM = 0.16/hour # in-rate of antibody-IgM from naive B-cell
outRateM = inRateM/1 # out-rate of antibody-IgM from naive B-cell
consumeRateM = killRateVm # consume-rate of antibody-IgM by cleaning virus
inRateG = inRateM/10 # in-rate of antibody-IgG from memory B-cell
outRateG = outRateM/250 # out-rate of antibody-IgG from memory B-cell
consumeRateG = killRateVg # consume-rate of antibody-IgG by cleaning virus
mutatRateB = 0.00002/hour # B-cell mutation rate
mutatRateA = 0.0002/hour # mutation rate
# time boundary and griding condition
minT = float(0)
maxT = float(10*12*30*day)
totalPoint_T = int(6*10**3 + 1)
gT = np.linspace(minT, maxT, totalPoint_T)
spacingT = np.linspace(minT, maxT, num = totalPoint_T, retstep = True)
gT = spacingT[0]
dt = spacingT[1]
# space boundary and griding condition
minX = float(0)
maxX = float(9)
totalPoint_X = int(maxX - minX + 1)
gX = np.linspace(minX, maxX, totalPoint_X)
gridingX = np.linspace(minX, maxX, num = totalPoint_X, retstep = True)
gX = gridingX[0]
dx = gridingX[1]
gV_array = np.zeros([totalPoint_X, totalPoint_T])
gB_array = np.zeros([totalPoint_X, totalPoint_T])
gM_array = np.zeros([totalPoint_X, totalPoint_T])
gG_array = np.zeros([totalPoint_X, totalPoint_T])
# initial output condition
#gV_array[1, 0] = float(2)
#[pre-parameter, post-parameter, recovered-day, OAS+, OSA-, origin_virus]
actRateBg_1st = 0.0002/hour # activation rate of memory B-cell at 1st time (pre-)
actRateBg_2nd = actRateBg_1st*10 # activation rate of memory B-cell at 2nd time (post-)
origin_virus = int(2)
current_virus = int(6)
event_parameter = np.array([[actRateBg_1st,
actRateBg_2nd,
14*day,
+5/hour,
-actRateBm - actRateBg_1st + (actRateBm + actRateBg_1st)/1.3,
origin_virus,
current_virus]])
# [viral population, starting time] ---first
infection_period = 12*30*day
viral_population = np.zeros(int(maxX + 1))
viral_population[origin_virus:current_virus + 1] = 3
infection_starting_time = np.arange(int(maxX + 1))*infection_period
event_1st = np.zeros([int(maxX + 1), 2])
event_1st[:, 0] = viral_population
event_1st[:, 1] = infection_starting_time
print ('event_1st = {:}'.format(event_1st))
# [viral population, starting time] ---2nd]
viral_population = np.zeros(int(maxX + 1))
viral_population[origin_virus:current_virus + 1] = 0
infection_starting_time = np.arange(int(maxX + 1))*0
event_2nd = np.zeros([int(maxX + 1), 2])
event_2nd[:, 0] = viral_population
event_2nd[:, 1] = infection_starting_time
print ('event_2nd = {:}'.format(event_2nd))
event_table = np.array([event_parameter, event_1st, event_2nd])
# Runge Kutta numerical solution
pde_array = np.array([dVdt_array, dBdt_array, dMdt_array, dGdt_array])
initial_Out = np.array([gV_array, gB_array, gM_array, gG_array])
gOut_array = alva.AlvaRungeKutta4XT(pde_array, initial_Out, minX, maxX, totalPoint_X, minT, maxT, totalPoint_T, event_table)
# plotting
gV = gOut_array[0]
gB = gOut_array[1]
gM = gOut_array[2]
gG = gOut_array[3]
numberingFig = numberingFig + 1
for i in range(totalPoint_X):
figure_name = '-response-%i'%(i)
figure_suffix = '.png'
save_figure = os.path.join(dir_path, file_name + figure_name + file_suffix)
plt.figure(numberingFig, figsize = AlvaFigSize)
plt.plot(gT, gV[i], color = 'red', label = r'$ V_{%i}(t) $'%(i), linewidth = 3.0, alpha = 0.5)
plt.plot(gT, gM[i], color = 'blue', label = r'$ IgM_{%i}(t) $'%(i), linewidth = 3.0, alpha = 0.5)
plt.plot(gT, gG[i], color = 'green', label = r'$ IgG_{%i}(t) $'%(i), linewidth = 3.0, alpha = 0.5)
plt.plot(gT, gM[i] + gG[i], color = 'gray', linewidth = 5.0, alpha = 0.5, linestyle = 'dashed'
, label = r'$ IgM_{%i}(t) + IgG_{%i}(t) $'%(i, i))
plt.grid(True, which = 'both')
plt.title(r'$ Antibody \ from \ Virus-{%i} $'%(i), fontsize = AlvaFontSize)
plt.xlabel(r'$time \ (%s)$'%(timeUnit), fontsize = AlvaFontSize)
plt.ylabel(r'$ Neutralization \ \ titer $', fontsize = AlvaFontSize)
plt.xlim([minT, maxT])
plt.xticks(fontsize = AlvaFontSize*0.6)
plt.yticks(fontsize = AlvaFontSize*0.6)
plt.ylim([2**0, 2**12])
plt.yscale('log', basey = 2)
plt.legend(loc = (1,0), fontsize = AlvaFontSize)
plt.savefig(save_figure, dpi = 100, bbox_inches='tight')
plt.show()
# In[3]:
# Normalization stacked graph
numberingFig = numberingFig + 1
plt.figure(numberingFig, figsize = AlvaFigSize)
plt.stackplot(gT, gM + gG, alpha = 0.3)
plt.title(r'$ Stacked-graph \ of \ Antibody $', fontsize = AlvaFontSize)
plt.xlabel(r'$time \ (%s)$'%(timeUnit), fontsize = AlvaFontSize)
plt.ylabel(r'$ Neutralization \ \ titer $', fontsize = AlvaFontSize)
plt.xticks(fontsize = AlvaFontSize*0.6)
plt.yticks(fontsize = AlvaFontSize*0.6)
plt.ylim([2**0, 2**12])
plt.yscale('log', basey = 2)
plt.grid(True)
plt.show()
# In[4]:
# expected peak of the antibody response
totalColor = current_virus - origin_virus + 1
AlvaColor = [plt.get_cmap('rainbow')(float(i)/(totalColor)) for i in range(1, totalColor + 1)]
sample_time = 90*day
# plotting
figure_name = '-landscape'
figure_suffix = '.png'
save_figure = os.path.join(dir_path, file_name + figure_name + file_suffix)
numberingFig = numberingFig + 1
plt.figure(numberingFig, figsize = (12, 9))
for i in range(origin_virus, current_virus + 1):
detect_xn = current_virus + 2 - i
if detect_xn == origin_virus:
virus_label = '$ origin-virus $'
elif detect_xn == current_virus:
virus_label = '$ current-virus $'
else: virus_label = '$ {:}th-virus $'.format(detect_xn - origin_virus + 1)
detect_time = int(totalPoint_T/(maxT - minT)*(detect_xn*infection_period + sample_time))
plt.plot(gX, gM[:, detect_time] + gG[:, detect_time], marker = 'o', markersize = 20
, color = AlvaColor[detect_xn - origin_virus], label = virus_label)
plt.fill_between(gX, gM[:, detect_time] + gG[:, detect_time], facecolor = AlvaColor[detect_xn - origin_virus]
, alpha = 0.5)
plt.grid(True, which = 'both')
plt.title(r'$ Antibody \ Landscape $', fontsize = AlvaFontSize)
plt.xlabel(r'$ Virus \ space \ (Antigenic-distance) $', fontsize = AlvaFontSize)
plt.ylabel(r'$ Neutralization \ \ titer $', fontsize = AlvaFontSize)
plt.xlim([minX, maxX])
plt.xticks(fontsize = AlvaFontSize)
plt.yticks(fontsize = AlvaFontSize)
plt.ylim([2**0, 2**9])
plt.yscale('log', basey = 2)
plt.legend(loc = (1,0), fontsize = AlvaFontSize)
plt.savefig(save_figure, dpi = 100, bbox_inches='tight')
plt.show()
# In[ ]:
| gpl-2.0 | 5,572,586,783,299,461,000 | 36.084691 | 257 | 0.622047 | false | 2.538462 | false | false | false |
moyogo/ufo2ft | Lib/ufo2ft/outlineCompiler.py | 1 | 55408 | import logging
import math
from collections import Counter, namedtuple
from io import BytesIO
from types import SimpleNamespace
from fontTools.cffLib import (
CharStrings,
GlobalSubrsIndex,
IndexedStrings,
PrivateDict,
SubrsIndex,
TopDict,
TopDictIndex,
)
from fontTools.misc.arrayTools import unionRect
from fontTools.misc.fixedTools import otRound
from fontTools.pens.boundsPen import ControlBoundsPen
from fontTools.pens.reverseContourPen import ReverseContourPen
from fontTools.pens.t2CharStringPen import T2CharStringPen
from fontTools.pens.ttGlyphPen import TTGlyphPen
from fontTools.ttLib import TTFont, newTable
from fontTools.ttLib.tables._g_l_y_f import USE_MY_METRICS, Glyph
from fontTools.ttLib.tables._h_e_a_d import mac_epoch_diff
from fontTools.ttLib.tables.O_S_2f_2 import Panose
from ufo2ft.constants import COLOR_LAYERS_KEY, COLOR_PALETTES_KEY
from ufo2ft.errors import InvalidFontData
from ufo2ft.fontInfoData import (
dateStringForNow,
dateStringToTimeValue,
getAttrWithFallback,
intListToNum,
normalizeStringForPostscript,
)
from ufo2ft.util import (
_copyGlyph,
calcCodePageRanges,
makeOfficialGlyphOrder,
makeUnicodeToGlyphNameMapping,
)
logger = logging.getLogger(__name__)
BoundingBox = namedtuple("BoundingBox", ["xMin", "yMin", "xMax", "yMax"])
EMPTY_BOUNDING_BOX = BoundingBox(0, 0, 0, 0)
def _isNonBMP(s):
for c in s:
if ord(c) > 65535:
return True
return False
def _getVerticalOrigin(font, glyph):
if hasattr(glyph, "verticalOrigin") and glyph.verticalOrigin is not None:
verticalOrigin = glyph.verticalOrigin
else:
os2 = font.get("OS/2")
typo_ascender = os2.sTypoAscender if os2 is not None else 0
verticalOrigin = typo_ascender
return otRound(verticalOrigin)
class BaseOutlineCompiler:
"""Create a feature-less outline binary."""
sfntVersion = None
tables = frozenset(
[
"head",
"hmtx",
"hhea",
"name",
"maxp",
"cmap",
"OS/2",
"post",
"vmtx",
"vhea",
"COLR",
"CPAL",
]
)
def __init__(
self,
font,
glyphSet=None,
glyphOrder=None,
tables=None,
notdefGlyph=None,
):
self.ufo = font
# use the previously filtered glyphSet, if any
if glyphSet is None:
glyphSet = {g.name: g for g in font}
self.makeMissingRequiredGlyphs(font, glyphSet, self.sfntVersion, notdefGlyph)
self.allGlyphs = glyphSet
# store the glyph order
if glyphOrder is None:
glyphOrder = font.glyphOrder
self.glyphOrder = self.makeOfficialGlyphOrder(glyphOrder)
# make a reusable character mapping
self.unicodeToGlyphNameMapping = self.makeUnicodeToGlyphNameMapping()
if tables is not None:
self.tables = tables
# cached values defined later on
self._glyphBoundingBoxes = None
self._fontBoundingBox = None
self._compiledGlyphs = None
def compile(self):
"""
Compile the OpenType binary.
"""
self.otf = TTFont(sfntVersion=self.sfntVersion)
# only compile vertical metrics tables if vhea metrics are defined
vertical_metrics = [
"openTypeVheaVertTypoAscender",
"openTypeVheaVertTypoDescender",
"openTypeVheaVertTypoLineGap",
]
self.vertical = all(
getAttrWithFallback(self.ufo.info, metric) is not None
for metric in vertical_metrics
)
self.colorLayers = (
COLOR_LAYERS_KEY in self.ufo.lib and COLOR_PALETTES_KEY in self.ufo.lib
)
# write the glyph order
self.otf.setGlyphOrder(self.glyphOrder)
# populate basic tables
self.setupTable_head()
self.setupTable_hmtx()
self.setupTable_hhea()
self.setupTable_name()
self.setupTable_maxp()
self.setupTable_cmap()
self.setupTable_OS2()
self.setupTable_post()
if self.vertical:
self.setupTable_vmtx()
self.setupTable_vhea()
if self.colorLayers:
self.setupTable_COLR()
self.setupTable_CPAL()
self.setupOtherTables()
self.importTTX()
return self.otf
def compileGlyphs(self):
"""Compile glyphs and return dict keyed by glyph name.
**This should not be called externally.**
Subclasses must override this method to handle compilation of glyphs.
"""
raise NotImplementedError
def getCompiledGlyphs(self):
if self._compiledGlyphs is None:
self._compiledGlyphs = self.compileGlyphs()
return self._compiledGlyphs
def makeGlyphsBoundingBoxes(self):
"""
Make bounding boxes for all the glyphs, and return a dictionary of
BoundingBox(xMin, xMax, yMin, yMax) namedtuples keyed by glyph names.
The bounding box of empty glyphs (without contours or components) is
set to None.
The bbox values are integers.
**This should not be called externally.**
Subclasses must override this method to handle the bounds creation for
their specific glyph type.
"""
raise NotImplementedError
@property
def glyphBoundingBoxes(self):
if self._glyphBoundingBoxes is None:
self._glyphBoundingBoxes = self.makeGlyphsBoundingBoxes()
return self._glyphBoundingBoxes
def makeFontBoundingBox(self):
"""
Make a bounding box for the font.
**This should not be called externally.** Subclasses
may override this method to handle the bounds creation
in a different way if desired.
"""
fontBox = None
for glyphBox in self.glyphBoundingBoxes.values():
if glyphBox is None:
continue
if fontBox is None:
fontBox = glyphBox
else:
fontBox = unionRect(fontBox, glyphBox)
if fontBox is None: # unlikely
fontBox = EMPTY_BOUNDING_BOX
return fontBox
@property
def fontBoundingBox(self):
if self._fontBoundingBox is None:
self._fontBoundingBox = self.makeFontBoundingBox()
return self._fontBoundingBox
def makeUnicodeToGlyphNameMapping(self):
"""
Make a ``unicode : glyph name`` mapping for the font.
**This should not be called externally.** Subclasses
may override this method to handle the mapping creation
in a different way if desired.
"""
return makeUnicodeToGlyphNameMapping(self.allGlyphs, self.glyphOrder)
@staticmethod
def makeMissingRequiredGlyphs(font, glyphSet, sfntVersion, notdefGlyph=None):
"""
Add .notdef to the glyph set if it is not present.
**This should not be called externally.** Subclasses
may override this method to handle the glyph creation
in a different way if desired.
"""
if ".notdef" in glyphSet:
return
reverseContour = sfntVersion == "\000\001\000\000"
if notdefGlyph:
notdefGlyph = _copyGlyph(notdefGlyph, reverseContour=reverseContour)
else:
unitsPerEm = otRound(getAttrWithFallback(font.info, "unitsPerEm"))
ascender = otRound(getAttrWithFallback(font.info, "ascender"))
descender = otRound(getAttrWithFallback(font.info, "descender"))
defaultWidth = otRound(unitsPerEm * 0.5)
notdefGlyph = StubGlyph(
name=".notdef",
width=defaultWidth,
unitsPerEm=unitsPerEm,
ascender=ascender,
descender=descender,
reverseContour=reverseContour,
)
glyphSet[".notdef"] = notdefGlyph
def makeOfficialGlyphOrder(self, glyphOrder):
"""
Make the final glyph order.
**This should not be called externally.** Subclasses
may override this method to handle the order creation
in a different way if desired.
"""
return makeOfficialGlyphOrder(self.allGlyphs, glyphOrder)
# --------------
# Table Builders
# --------------
def setupTable_gasp(self):
if "gasp" not in self.tables:
return
self.otf["gasp"] = gasp = newTable("gasp")
gasp_ranges = dict()
for record in self.ufo.info.openTypeGaspRangeRecords:
rangeMaxPPEM = record["rangeMaxPPEM"]
behavior_bits = record["rangeGaspBehavior"]
rangeGaspBehavior = intListToNum(behavior_bits, 0, 4)
gasp_ranges[rangeMaxPPEM] = rangeGaspBehavior
gasp.gaspRange = gasp_ranges
def setupTable_head(self):
"""
Make the head table.
**This should not be called externally.** Subclasses
may override or supplement this method to handle the
table creation in a different way if desired.
"""
if "head" not in self.tables:
return
self.otf["head"] = head = newTable("head")
font = self.ufo
head.checkSumAdjustment = 0
head.tableVersion = 1.0
head.magicNumber = 0x5F0F3CF5
# version numbers
# limit minor version to 3 digits as recommended in OpenType spec:
# https://www.microsoft.com/typography/otspec/recom.htm
versionMajor = getAttrWithFallback(font.info, "versionMajor")
versionMinor = getAttrWithFallback(font.info, "versionMinor")
fullFontRevision = float("%d.%03d" % (versionMajor, versionMinor))
head.fontRevision = round(fullFontRevision, 3)
if head.fontRevision != fullFontRevision:
logger.warning(
"Minor version in %s has too many digits and won't fit into "
"the head table's fontRevision field; rounded to %s.",
fullFontRevision,
head.fontRevision,
)
# upm
head.unitsPerEm = otRound(getAttrWithFallback(font.info, "unitsPerEm"))
# times
head.created = (
dateStringToTimeValue(getAttrWithFallback(font.info, "openTypeHeadCreated"))
- mac_epoch_diff
)
head.modified = dateStringToTimeValue(dateStringForNow()) - mac_epoch_diff
# bounding box
xMin, yMin, xMax, yMax = self.fontBoundingBox
head.xMin = otRound(xMin)
head.yMin = otRound(yMin)
head.xMax = otRound(xMax)
head.yMax = otRound(yMax)
# style mapping
styleMapStyleName = getAttrWithFallback(font.info, "styleMapStyleName")
macStyle = []
if styleMapStyleName == "bold":
macStyle = [0]
elif styleMapStyleName == "bold italic":
macStyle = [0, 1]
elif styleMapStyleName == "italic":
macStyle = [1]
head.macStyle = intListToNum(macStyle, 0, 16)
# misc
head.flags = intListToNum(
getAttrWithFallback(font.info, "openTypeHeadFlags"), 0, 16
)
head.lowestRecPPEM = otRound(
getAttrWithFallback(font.info, "openTypeHeadLowestRecPPEM")
)
head.fontDirectionHint = 2
head.indexToLocFormat = 0
head.glyphDataFormat = 0
def setupTable_name(self):
"""
Make the name table.
**This should not be called externally.** Subclasses
may override or supplement this method to handle the
table creation in a different way if desired.
"""
if "name" not in self.tables:
return
font = self.ufo
self.otf["name"] = name = newTable("name")
name.names = []
# Set name records from font.info.openTypeNameRecords
for nameRecord in getAttrWithFallback(font.info, "openTypeNameRecords"):
nameId = nameRecord["nameID"]
platformId = nameRecord["platformID"]
platEncId = nameRecord["encodingID"]
langId = nameRecord["languageID"]
# on Python 2, plistLib (used by ufoLib) returns unicode strings
# only when plist data contain non-ascii characters, and returns
# ascii-encoded bytes when it can. On the other hand, fontTools's
# name table `setName` method wants unicode strings, so we must
# decode them first
nameVal = nameRecord["string"]
name.setName(nameVal, nameId, platformId, platEncId, langId)
# Build name records
familyName = getAttrWithFallback(font.info, "styleMapFamilyName")
styleName = getAttrWithFallback(font.info, "styleMapStyleName").title()
preferredFamilyName = getAttrWithFallback(
font.info, "openTypeNamePreferredFamilyName"
)
preferredSubfamilyName = getAttrWithFallback(
font.info, "openTypeNamePreferredSubfamilyName"
)
fullName = f"{preferredFamilyName} {preferredSubfamilyName}"
nameVals = {
0: getAttrWithFallback(font.info, "copyright"),
1: familyName,
2: styleName,
3: getAttrWithFallback(font.info, "openTypeNameUniqueID"),
4: fullName,
5: getAttrWithFallback(font.info, "openTypeNameVersion"),
6: getAttrWithFallback(font.info, "postscriptFontName"),
7: getAttrWithFallback(font.info, "trademark"),
8: getAttrWithFallback(font.info, "openTypeNameManufacturer"),
9: getAttrWithFallback(font.info, "openTypeNameDesigner"),
10: getAttrWithFallback(font.info, "openTypeNameDescription"),
11: getAttrWithFallback(font.info, "openTypeNameManufacturerURL"),
12: getAttrWithFallback(font.info, "openTypeNameDesignerURL"),
13: getAttrWithFallback(font.info, "openTypeNameLicense"),
14: getAttrWithFallback(font.info, "openTypeNameLicenseURL"),
16: preferredFamilyName,
17: preferredSubfamilyName,
18: getAttrWithFallback(font.info, "openTypeNameCompatibleFullName"),
19: getAttrWithFallback(font.info, "openTypeNameSampleText"),
21: getAttrWithFallback(font.info, "openTypeNameWWSFamilyName"),
22: getAttrWithFallback(font.info, "openTypeNameWWSSubfamilyName"),
}
# don't add typographic names if they are the same as the legacy ones
if nameVals[1] == nameVals[16]:
del nameVals[16]
if nameVals[2] == nameVals[17]:
del nameVals[17]
# postscript font name
if nameVals[6]:
nameVals[6] = normalizeStringForPostscript(nameVals[6])
for nameId in sorted(nameVals.keys()):
nameVal = nameVals[nameId]
if not nameVal:
continue
platformId = 3
platEncId = 10 if _isNonBMP(nameVal) else 1
langId = 0x409
# Set built name record if not set yet
if name.getName(nameId, platformId, platEncId, langId):
continue
name.setName(nameVal, nameId, platformId, platEncId, langId)
def setupTable_maxp(self):
"""
Make the maxp table.
**This should not be called externally.** Subclasses
must override or supplement this method to handle the
table creation for either CFF or TT data.
"""
raise NotImplementedError
def setupTable_cmap(self):
"""
Make the cmap table.
**This should not be called externally.** Subclasses
may override or supplement this method to handle the
table creation in a different way if desired.
"""
if "cmap" not in self.tables:
return
from fontTools.ttLib.tables._c_m_a_p import cmap_format_4
nonBMP = {k: v for k, v in self.unicodeToGlyphNameMapping.items() if k > 65535}
if nonBMP:
mapping = {
k: v for k, v in self.unicodeToGlyphNameMapping.items() if k <= 65535
}
else:
mapping = dict(self.unicodeToGlyphNameMapping)
# mac
cmap4_0_3 = cmap_format_4(4)
cmap4_0_3.platformID = 0
cmap4_0_3.platEncID = 3
cmap4_0_3.language = 0
cmap4_0_3.cmap = mapping
# windows
cmap4_3_1 = cmap_format_4(4)
cmap4_3_1.platformID = 3
cmap4_3_1.platEncID = 1
cmap4_3_1.language = 0
cmap4_3_1.cmap = mapping
# store
self.otf["cmap"] = cmap = newTable("cmap")
cmap.tableVersion = 0
cmap.tables = [cmap4_0_3, cmap4_3_1]
# If we have glyphs outside Unicode BMP, we must set another
# subtable that can hold longer codepoints for them.
if nonBMP:
from fontTools.ttLib.tables._c_m_a_p import cmap_format_12
nonBMP.update(mapping)
# mac
cmap12_0_4 = cmap_format_12(12)
cmap12_0_4.platformID = 0
cmap12_0_4.platEncID = 4
cmap12_0_4.language = 0
cmap12_0_4.cmap = nonBMP
# windows
cmap12_3_10 = cmap_format_12(12)
cmap12_3_10.platformID = 3
cmap12_3_10.platEncID = 10
cmap12_3_10.language = 0
cmap12_3_10.cmap = nonBMP
# update tables registry
cmap.tables = [cmap4_0_3, cmap4_3_1, cmap12_0_4, cmap12_3_10]
def setupTable_OS2(self):
"""
Make the OS/2 table.
**This should not be called externally.** Subclasses
may override or supplement this method to handle the
table creation in a different way if desired.
"""
if "OS/2" not in self.tables:
return
self.otf["OS/2"] = os2 = newTable("OS/2")
font = self.ufo
os2.version = 0x0004
# average glyph width
os2.xAvgCharWidth = 0
hmtx = self.otf.get("hmtx")
if hmtx is not None:
widths = [width for width, _ in hmtx.metrics.values() if width > 0]
if widths:
os2.xAvgCharWidth = otRound(sum(widths) / len(widths))
# weight and width classes
os2.usWeightClass = getAttrWithFallback(font.info, "openTypeOS2WeightClass")
os2.usWidthClass = getAttrWithFallback(font.info, "openTypeOS2WidthClass")
# embedding
os2.fsType = intListToNum(
getAttrWithFallback(font.info, "openTypeOS2Type"), 0, 16
)
# subscript, superscript, strikeout values, taken from AFDKO:
# FDK/Tools/Programs/makeotf/makeotf_lib/source/hotconv/hot.c
unitsPerEm = getAttrWithFallback(font.info, "unitsPerEm")
italicAngle = getAttrWithFallback(font.info, "italicAngle")
xHeight = getAttrWithFallback(font.info, "xHeight")
def adjustOffset(offset, angle):
"""Adjust Y offset based on italic angle, to get X offset."""
return offset * math.tan(math.radians(-angle)) if angle else 0
v = getAttrWithFallback(font.info, "openTypeOS2SubscriptXSize")
if v is None:
v = unitsPerEm * 0.65
os2.ySubscriptXSize = otRound(v)
v = getAttrWithFallback(font.info, "openTypeOS2SubscriptYSize")
if v is None:
v = unitsPerEm * 0.6
os2.ySubscriptYSize = otRound(v)
v = getAttrWithFallback(font.info, "openTypeOS2SubscriptYOffset")
if v is None:
v = unitsPerEm * 0.075
os2.ySubscriptYOffset = otRound(v)
v = getAttrWithFallback(font.info, "openTypeOS2SubscriptXOffset")
if v is None:
v = adjustOffset(-os2.ySubscriptYOffset, italicAngle)
os2.ySubscriptXOffset = otRound(v)
v = getAttrWithFallback(font.info, "openTypeOS2SuperscriptXSize")
if v is None:
v = os2.ySubscriptXSize
os2.ySuperscriptXSize = otRound(v)
v = getAttrWithFallback(font.info, "openTypeOS2SuperscriptYSize")
if v is None:
v = os2.ySubscriptYSize
os2.ySuperscriptYSize = otRound(v)
v = getAttrWithFallback(font.info, "openTypeOS2SuperscriptYOffset")
if v is None:
v = unitsPerEm * 0.35
os2.ySuperscriptYOffset = otRound(v)
v = getAttrWithFallback(font.info, "openTypeOS2SuperscriptXOffset")
if v is None:
v = adjustOffset(os2.ySuperscriptYOffset, italicAngle)
os2.ySuperscriptXOffset = otRound(v)
v = getAttrWithFallback(font.info, "openTypeOS2StrikeoutSize")
if v is None:
v = getAttrWithFallback(font.info, "postscriptUnderlineThickness")
os2.yStrikeoutSize = otRound(v)
v = getAttrWithFallback(font.info, "openTypeOS2StrikeoutPosition")
if v is None:
v = xHeight * 0.6 if xHeight else unitsPerEm * 0.22
os2.yStrikeoutPosition = otRound(v)
# family class
ibmFontClass, ibmFontSubclass = getAttrWithFallback(
font.info, "openTypeOS2FamilyClass"
)
os2.sFamilyClass = (ibmFontClass << 8) + ibmFontSubclass
# panose
data = getAttrWithFallback(font.info, "openTypeOS2Panose")
panose = Panose()
panose.bFamilyType = data[0]
panose.bSerifStyle = data[1]
panose.bWeight = data[2]
panose.bProportion = data[3]
panose.bContrast = data[4]
panose.bStrokeVariation = data[5]
panose.bArmStyle = data[6]
panose.bLetterForm = data[7]
panose.bMidline = data[8]
panose.bXHeight = data[9]
os2.panose = panose
# Unicode ranges
uniRanges = getAttrWithFallback(font.info, "openTypeOS2UnicodeRanges")
if uniRanges is not None:
os2.ulUnicodeRange1 = intListToNum(uniRanges, 0, 32)
os2.ulUnicodeRange2 = intListToNum(uniRanges, 32, 32)
os2.ulUnicodeRange3 = intListToNum(uniRanges, 64, 32)
os2.ulUnicodeRange4 = intListToNum(uniRanges, 96, 32)
else:
os2.recalcUnicodeRanges(self.otf)
# codepage ranges
codepageRanges = getAttrWithFallback(font.info, "openTypeOS2CodePageRanges")
if codepageRanges is None:
unicodes = self.unicodeToGlyphNameMapping.keys()
codepageRanges = calcCodePageRanges(unicodes)
os2.ulCodePageRange1 = intListToNum(codepageRanges, 0, 32)
os2.ulCodePageRange2 = intListToNum(codepageRanges, 32, 32)
# vendor id
os2.achVendID = getAttrWithFallback(font.info, "openTypeOS2VendorID")
# vertical metrics
os2.sxHeight = otRound(getAttrWithFallback(font.info, "xHeight"))
os2.sCapHeight = otRound(getAttrWithFallback(font.info, "capHeight"))
os2.sTypoAscender = otRound(
getAttrWithFallback(font.info, "openTypeOS2TypoAscender")
)
os2.sTypoDescender = otRound(
getAttrWithFallback(font.info, "openTypeOS2TypoDescender")
)
os2.sTypoLineGap = otRound(
getAttrWithFallback(font.info, "openTypeOS2TypoLineGap")
)
os2.usWinAscent = otRound(
getAttrWithFallback(font.info, "openTypeOS2WinAscent")
)
os2.usWinDescent = otRound(
getAttrWithFallback(font.info, "openTypeOS2WinDescent")
)
# style mapping
selection = list(getAttrWithFallback(font.info, "openTypeOS2Selection"))
styleMapStyleName = getAttrWithFallback(font.info, "styleMapStyleName")
if styleMapStyleName == "regular":
selection.append(6)
elif styleMapStyleName == "bold":
selection.append(5)
elif styleMapStyleName == "italic":
selection.append(0)
elif styleMapStyleName == "bold italic":
selection += [0, 5]
os2.fsSelection = intListToNum(selection, 0, 16)
# characetr indexes
unicodes = [i for i in self.unicodeToGlyphNameMapping.keys() if i is not None]
if unicodes:
minIndex = min(unicodes)
maxIndex = max(unicodes)
else:
# the font may have *no* unicode values (it really happens!) so
# there needs to be a fallback. use 0xFFFF, as AFDKO does:
# FDK/Tools/Programs/makeotf/makeotf_lib/source/hotconv/map.c
minIndex = 0xFFFF
maxIndex = 0xFFFF
if maxIndex > 0xFFFF:
# the spec says that 0xFFFF should be used
# as the max if the max exceeds 0xFFFF
maxIndex = 0xFFFF
os2.fsFirstCharIndex = minIndex
os2.fsLastCharIndex = maxIndex
os2.usBreakChar = 32
os2.usDefaultChar = 0
# maximum contextual lookup length
os2.usMaxContex = 0
def setupTable_hmtx(self):
"""
Make the hmtx table.
**This should not be called externally.** Subclasses
may override or supplement this method to handle the
table creation in a different way if desired.
"""
if "hmtx" not in self.tables:
return
self.otf["hmtx"] = hmtx = newTable("hmtx")
hmtx.metrics = {}
for glyphName, glyph in self.allGlyphs.items():
width = otRound(glyph.width)
if width < 0:
raise ValueError("The width should not be negative: '%s'" % (glyphName))
bounds = self.glyphBoundingBoxes[glyphName]
left = bounds.xMin if bounds else 0
hmtx[glyphName] = (width, left)
def _setupTable_hhea_or_vhea(self, tag):
"""
Make the hhea table or the vhea table. This assume the hmtx or
the vmtx were respectively made first.
"""
if tag not in self.tables:
return
if tag == "hhea":
isHhea = True
else:
isHhea = False
self.otf[tag] = table = newTable(tag)
mtxTable = self.otf.get(tag[0] + "mtx")
font = self.ufo
if isHhea:
table.tableVersion = 0x00010000
else:
table.tableVersion = 0x00011000
# Vertical metrics in hhea, horizontal metrics in vhea
# and caret info.
# The hhea metrics names are formed as:
# "openType" + tag.title() + "Ascender", etc.
# While vhea metrics names are formed as:
# "openType" + tag.title() + "VertTypo" + "Ascender", etc.
# Caret info names only differ by tag.title().
commonPrefix = "openType%s" % tag.title()
if isHhea:
metricsPrefix = commonPrefix
else:
metricsPrefix = "openType%sVertTypo" % tag.title()
metricsDict = {
"ascent": "%sAscender" % metricsPrefix,
"descent": "%sDescender" % metricsPrefix,
"lineGap": "%sLineGap" % metricsPrefix,
"caretSlopeRise": "%sCaretSlopeRise" % commonPrefix,
"caretSlopeRun": "%sCaretSlopeRun" % commonPrefix,
"caretOffset": "%sCaretOffset" % commonPrefix,
}
for otfName, ufoName in metricsDict.items():
setattr(table, otfName, otRound(getAttrWithFallback(font.info, ufoName)))
# Horizontal metrics in hhea, vertical metrics in vhea
advances = [] # width in hhea, height in vhea
firstSideBearings = [] # left in hhea, top in vhea
secondSideBearings = [] # right in hhea, bottom in vhea
extents = []
if mtxTable is not None:
for glyphName in self.allGlyphs:
advance, firstSideBearing = mtxTable[glyphName]
advances.append(advance)
bounds = self.glyphBoundingBoxes[glyphName]
if bounds is None:
continue
if isHhea:
boundsAdvance = bounds.xMax - bounds.xMin
# equation from the hhea spec for calculating xMaxExtent:
# Max(lsb + (xMax - xMin))
extent = firstSideBearing + boundsAdvance
else:
boundsAdvance = bounds.yMax - bounds.yMin
# equation from the vhea spec for calculating yMaxExtent:
# Max(tsb + (yMax - yMin)).
extent = firstSideBearing + boundsAdvance
secondSideBearing = advance - firstSideBearing - boundsAdvance
firstSideBearings.append(firstSideBearing)
secondSideBearings.append(secondSideBearing)
extents.append(extent)
setattr(
table,
"advance%sMax" % ("Width" if isHhea else "Height"),
max(advances) if advances else 0,
)
setattr(
table,
"min%sSideBearing" % ("Left" if isHhea else "Top"),
min(firstSideBearings) if firstSideBearings else 0,
)
setattr(
table,
"min%sSideBearing" % ("Right" if isHhea else "Bottom"),
min(secondSideBearings) if secondSideBearings else 0,
)
setattr(
table,
"%sMaxExtent" % ("x" if isHhea else "y"),
max(extents) if extents else 0,
)
if isHhea:
reserved = range(4)
else:
# vhea.reserved0 is caretOffset for legacy reasons
reserved = range(1, 5)
for i in reserved:
setattr(table, "reserved%i" % i, 0)
table.metricDataFormat = 0
# glyph count
setattr(
table, "numberOf%sMetrics" % ("H" if isHhea else "V"), len(self.allGlyphs)
)
def setupTable_hhea(self):
"""
Make the hhea table. This assumes that the hmtx table was made first.
**This should not be called externally.** Subclasses
may override or supplement this method to handle the
table creation in a different way if desired.
"""
self._setupTable_hhea_or_vhea("hhea")
def setupTable_vmtx(self):
"""
Make the vmtx table.
**This should not be called externally.** Subclasses
may override or supplement this method to handle the
table creation in a different way if desired.
"""
if "vmtx" not in self.tables:
return
self.otf["vmtx"] = vmtx = newTable("vmtx")
vmtx.metrics = {}
for glyphName, glyph in self.allGlyphs.items():
height = otRound(glyph.height)
if height < 0:
raise ValueError(
"The height should not be negative: '%s'" % (glyphName)
)
verticalOrigin = _getVerticalOrigin(self.otf, glyph)
bounds = self.glyphBoundingBoxes[glyphName]
top = bounds.yMax if bounds else 0
vmtx[glyphName] = (height, verticalOrigin - top)
def setupTable_VORG(self):
"""
Make the VORG table.
**This should not be called externally.** Subclasses
may override or supplement this method to handle the
table creation in a different way if desired.
"""
if "VORG" not in self.tables:
return
self.otf["VORG"] = vorg = newTable("VORG")
vorg.majorVersion = 1
vorg.minorVersion = 0
vorg.VOriginRecords = {}
# Find the most frequent verticalOrigin
vorg_count = Counter(
_getVerticalOrigin(self.otf, glyph) for glyph in self.allGlyphs.values()
)
vorg.defaultVertOriginY = vorg_count.most_common(1)[0][0]
if len(vorg_count) > 1:
for glyphName, glyph in self.allGlyphs.items():
vertOriginY = _getVerticalOrigin(self.otf, glyph)
if vertOriginY == vorg.defaultVertOriginY:
continue
vorg.VOriginRecords[glyphName] = vertOriginY
vorg.numVertOriginYMetrics = len(vorg.VOriginRecords)
def setupTable_vhea(self):
"""
Make the vhea table. This assumes that the head and vmtx tables were
made first.
**This should not be called externally.** Subclasses
may override or supplement this method to handle the
table creation in a different way if desired.
"""
self._setupTable_hhea_or_vhea("vhea")
def setupTable_post(self):
"""
Make the post table.
**This should not be called externally.** Subclasses
may override or supplement this method to handle the
table creation in a different way if desired.
"""
if "post" not in self.tables:
return
self.otf["post"] = post = newTable("post")
font = self.ufo
post.formatType = 3.0
# italic angle
italicAngle = getAttrWithFallback(font.info, "italicAngle")
post.italicAngle = italicAngle
# underline
underlinePosition = getAttrWithFallback(
font.info, "postscriptUnderlinePosition"
)
post.underlinePosition = otRound(underlinePosition)
underlineThickness = getAttrWithFallback(
font.info, "postscriptUnderlineThickness"
)
post.underlineThickness = otRound(underlineThickness)
post.isFixedPitch = getAttrWithFallback(font.info, "postscriptIsFixedPitch")
# misc
post.minMemType42 = 0
post.maxMemType42 = 0
post.minMemType1 = 0
post.maxMemType1 = 0
def setupTable_COLR(self):
"""
Compile the COLR table.
**This should not be called externally.**
"""
if "COLR" not in self.tables:
return
from fontTools.colorLib.builder import buildCOLR
layerInfo = self.ufo.lib[COLOR_LAYERS_KEY]
glyphMap = self.otf.getReverseGlyphMap()
if layerInfo:
self.otf["COLR"] = buildCOLR(layerInfo, glyphMap=glyphMap)
def setupTable_CPAL(self):
"""
Compile the CPAL table.
**This should not be called externally.**
"""
if "CPAL" not in self.tables:
return
from fontTools.colorLib.builder import buildCPAL
from fontTools.colorLib.errors import ColorLibError
# colorLib wants colors as tuples, plistlib gives us lists
palettes = [
[tuple(color) for color in palette]
for palette in self.ufo.lib[COLOR_PALETTES_KEY]
]
try:
self.otf["CPAL"] = buildCPAL(palettes)
except ColorLibError as e:
raise InvalidFontData("Failed to build CPAL table") from e
def setupOtherTables(self):
"""
Make the other tables. The default implementation does nothing.
**This should not be called externally.** Subclasses
may override this method to add other tables to the
font if desired.
"""
pass
def importTTX(self):
"""
Merge TTX files from data directory "com.github.fonttools.ttx"
**This should not be called externally.** Subclasses
may override this method to handle the bounds creation
in a different way if desired.
"""
import os
prefix = "com.github.fonttools.ttx"
if not hasattr(self.ufo, "data"):
return
if not self.ufo.data.fileNames:
return
for path in self.ufo.data.fileNames:
foldername, filename = os.path.split(path)
if foldername == prefix and filename.endswith(".ttx"):
ttx = self.ufo.data[path].decode("utf-8")
fp = BytesIO(ttx.encode("utf-8"))
# Preserve the original SFNT version when loading a TTX dump.
sfntVersion = self.otf.sfntVersion
try:
self.otf.importXML(fp)
finally:
self.otf.sfntVersion = sfntVersion
class OutlineOTFCompiler(BaseOutlineCompiler):
"""Compile a .otf font with CFF outlines."""
sfntVersion = "OTTO"
tables = BaseOutlineCompiler.tables | {"CFF", "VORG"}
def __init__(
self,
font,
glyphSet=None,
glyphOrder=None,
tables=None,
notdefGlyph=None,
roundTolerance=None,
optimizeCFF=True,
):
if roundTolerance is not None:
self.roundTolerance = float(roundTolerance)
else:
# round all coordinates to integers by default
self.roundTolerance = 0.5
super().__init__(
font,
glyphSet=glyphSet,
glyphOrder=glyphOrder,
tables=tables,
notdefGlyph=notdefGlyph,
)
self.optimizeCFF = optimizeCFF
self._defaultAndNominalWidths = None
def getDefaultAndNominalWidths(self):
"""Return (defaultWidthX, nominalWidthX).
If fontinfo.plist doesn't define these explicitly, compute optimal values
from the glyphs' advance widths.
"""
if self._defaultAndNominalWidths is None:
info = self.ufo.info
# populate the width values
if all(
getattr(info, attr, None) is None
for attr in ("postscriptDefaultWidthX", "postscriptNominalWidthX")
):
# no custom values set in fontinfo.plist; compute optimal ones
from fontTools.cffLib.width import optimizeWidths
widths = [otRound(glyph.width) for glyph in self.allGlyphs.values()]
defaultWidthX, nominalWidthX = optimizeWidths(widths)
else:
defaultWidthX = otRound(
getAttrWithFallback(info, "postscriptDefaultWidthX")
)
nominalWidthX = otRound(
getAttrWithFallback(info, "postscriptNominalWidthX")
)
self._defaultAndNominalWidths = (defaultWidthX, nominalWidthX)
return self._defaultAndNominalWidths
def compileGlyphs(self):
"""Compile and return the CFF T2CharStrings for this font."""
defaultWidth, nominalWidth = self.getDefaultAndNominalWidths()
# The real PrivateDict will be created later on in setupTable_CFF.
# For convenience here we use a namespace object to pass the default/nominal
# widths that we need to draw the charstrings when computing their bounds.
private = SimpleNamespace(
defaultWidthX=defaultWidth, nominalWidthX=nominalWidth
)
compiledGlyphs = {}
for glyphName in self.glyphOrder:
glyph = self.allGlyphs[glyphName]
cs = self.getCharStringForGlyph(glyph, private)
compiledGlyphs[glyphName] = cs
return compiledGlyphs
def makeGlyphsBoundingBoxes(self):
"""
Make bounding boxes for all the glyphs, and return a dictionary of
BoundingBox(xMin, xMax, yMin, yMax) namedtuples keyed by glyph names.
The bounding box of empty glyphs (without contours or components) is
set to None.
Check that the float values are within the range of the specified
self.roundTolerance, and if so use the rounded value; else take the
floor or ceiling to ensure that the bounding box encloses the original
values.
"""
def toInt(value, else_callback):
rounded = otRound(value)
if tolerance >= 0.5 or abs(rounded - value) <= tolerance:
return rounded
else:
return int(else_callback(value))
tolerance = self.roundTolerance
glyphBoxes = {}
charStrings = self.getCompiledGlyphs()
for name, cs in charStrings.items():
bounds = cs.calcBounds(charStrings)
if bounds is not None:
rounded = []
for value in bounds[:2]:
rounded.append(toInt(value, math.floor))
for value in bounds[2:]:
rounded.append(toInt(value, math.ceil))
bounds = BoundingBox(*rounded)
if bounds == EMPTY_BOUNDING_BOX:
bounds = None
glyphBoxes[name] = bounds
return glyphBoxes
def getCharStringForGlyph(self, glyph, private, globalSubrs=None):
"""
Get a Type2CharString for the *glyph*
**This should not be called externally.** Subclasses
may override this method to handle the charstring creation
in a different way if desired.
"""
width = glyph.width
defaultWidth = private.defaultWidthX
nominalWidth = private.nominalWidthX
if width == defaultWidth:
# if width equals the default it can be omitted from charstring
width = None
else:
# subtract the nominal width
width -= nominalWidth
if width is not None:
width = otRound(width)
pen = T2CharStringPen(width, self.allGlyphs, roundTolerance=self.roundTolerance)
glyph.draw(pen)
charString = pen.getCharString(private, globalSubrs, optimize=self.optimizeCFF)
return charString
def setupTable_maxp(self):
"""Make the maxp table."""
if "maxp" not in self.tables:
return
self.otf["maxp"] = maxp = newTable("maxp")
maxp.tableVersion = 0x00005000
maxp.numGlyphs = len(self.glyphOrder)
def setupOtherTables(self):
self.setupTable_CFF()
if self.vertical:
self.setupTable_VORG()
def setupTable_CFF(self):
"""Make the CFF table."""
if not {"CFF", "CFF "}.intersection(self.tables):
return
self.otf["CFF "] = cff = newTable("CFF ")
cff = cff.cff
# set up the basics
cff.major = 1
cff.minor = 0
cff.hdrSize = 4
cff.offSize = 4
cff.fontNames = []
strings = IndexedStrings()
cff.strings = strings
private = PrivateDict(strings=strings)
private.rawDict.update(private.defaults)
globalSubrs = GlobalSubrsIndex(private=private)
topDict = TopDict(GlobalSubrs=globalSubrs, strings=strings)
topDict.Private = private
charStrings = topDict.CharStrings = CharStrings(
file=None,
charset=None,
globalSubrs=globalSubrs,
private=private,
fdSelect=None,
fdArray=None,
)
charStrings.charStringsAreIndexed = True
topDict.charset = []
charStringsIndex = charStrings.charStringsIndex = SubrsIndex(
private=private, globalSubrs=globalSubrs
)
cff.topDictIndex = topDictIndex = TopDictIndex()
topDictIndex.append(topDict)
topDictIndex.strings = strings
cff.GlobalSubrs = globalSubrs
# populate naming data
info = self.ufo.info
psName = getAttrWithFallback(info, "postscriptFontName")
cff.fontNames.append(psName)
topDict = cff.topDictIndex[0]
topDict.version = "%d.%d" % (
getAttrWithFallback(info, "versionMajor"),
getAttrWithFallback(info, "versionMinor"),
)
trademark = getAttrWithFallback(info, "trademark")
if trademark:
trademark = normalizeStringForPostscript(
trademark.replace("\u00A9", "Copyright")
)
if trademark != self.ufo.info.trademark:
logger.info(
"The trademark was normalized for storage in the "
"CFF table and consequently some characters were "
"dropped: '%s'",
trademark,
)
if trademark is None:
trademark = ""
topDict.Notice = trademark
copyright = getAttrWithFallback(info, "copyright")
if copyright:
copyright = normalizeStringForPostscript(
copyright.replace("\u00A9", "Copyright")
)
if copyright != self.ufo.info.copyright:
logger.info(
"The copyright was normalized for storage in the "
"CFF table and consequently some characters were "
"dropped: '%s'",
copyright,
)
if copyright is None:
copyright = ""
topDict.Copyright = copyright
topDict.FullName = getAttrWithFallback(info, "postscriptFullName")
topDict.FamilyName = getAttrWithFallback(
info, "openTypeNamePreferredFamilyName"
)
topDict.Weight = getAttrWithFallback(info, "postscriptWeightName")
# populate various numbers
topDict.isFixedPitch = getAttrWithFallback(info, "postscriptIsFixedPitch")
topDict.ItalicAngle = getAttrWithFallback(info, "italicAngle")
underlinePosition = getAttrWithFallback(info, "postscriptUnderlinePosition")
topDict.UnderlinePosition = otRound(underlinePosition)
underlineThickness = getAttrWithFallback(info, "postscriptUnderlineThickness")
topDict.UnderlineThickness = otRound(underlineThickness)
# populate font matrix
unitsPerEm = otRound(getAttrWithFallback(info, "unitsPerEm"))
topDict.FontMatrix = [1.0 / unitsPerEm, 0, 0, 1.0 / unitsPerEm, 0, 0]
# populate the width values
defaultWidthX, nominalWidthX = self.getDefaultAndNominalWidths()
if defaultWidthX:
private.rawDict["defaultWidthX"] = defaultWidthX
if nominalWidthX:
private.rawDict["nominalWidthX"] = nominalWidthX
# populate hint data
blueFuzz = otRound(getAttrWithFallback(info, "postscriptBlueFuzz"))
blueShift = otRound(getAttrWithFallback(info, "postscriptBlueShift"))
blueScale = getAttrWithFallback(info, "postscriptBlueScale")
forceBold = getAttrWithFallback(info, "postscriptForceBold")
blueValues = getAttrWithFallback(info, "postscriptBlueValues")
if isinstance(blueValues, list):
blueValues = [otRound(i) for i in blueValues]
otherBlues = getAttrWithFallback(info, "postscriptOtherBlues")
if isinstance(otherBlues, list):
otherBlues = [otRound(i) for i in otherBlues]
familyBlues = getAttrWithFallback(info, "postscriptFamilyBlues")
if isinstance(familyBlues, list):
familyBlues = [otRound(i) for i in familyBlues]
familyOtherBlues = getAttrWithFallback(info, "postscriptFamilyOtherBlues")
if isinstance(familyOtherBlues, list):
familyOtherBlues = [otRound(i) for i in familyOtherBlues]
stemSnapH = getAttrWithFallback(info, "postscriptStemSnapH")
if isinstance(stemSnapH, list):
stemSnapH = [otRound(i) for i in stemSnapH]
stemSnapV = getAttrWithFallback(info, "postscriptStemSnapV")
if isinstance(stemSnapV, list):
stemSnapV = [otRound(i) for i in stemSnapV]
# only write the blues data if some blues are defined.
if any((blueValues, otherBlues, familyBlues, familyOtherBlues)):
private.rawDict["BlueFuzz"] = blueFuzz
private.rawDict["BlueShift"] = blueShift
private.rawDict["BlueScale"] = blueScale
private.rawDict["ForceBold"] = forceBold
if blueValues:
private.rawDict["BlueValues"] = blueValues
if otherBlues:
private.rawDict["OtherBlues"] = otherBlues
if familyBlues:
private.rawDict["FamilyBlues"] = familyBlues
if familyOtherBlues:
private.rawDict["FamilyOtherBlues"] = familyOtherBlues
# only write the stems if both are defined.
if stemSnapH and stemSnapV:
private.rawDict["StemSnapH"] = stemSnapH
private.rawDict["StdHW"] = stemSnapH[0]
private.rawDict["StemSnapV"] = stemSnapV
private.rawDict["StdVW"] = stemSnapV[0]
# populate glyphs
cffGlyphs = self.getCompiledGlyphs()
for glyphName in self.glyphOrder:
charString = cffGlyphs[glyphName]
charString.private = private
charString.globalSubrs = globalSubrs
# add to the font
if glyphName in charStrings:
# XXX a glyph already has this name. should we choke?
glyphID = charStrings.charStrings[glyphName]
charStringsIndex.items[glyphID] = charString
else:
charStringsIndex.append(charString)
glyphID = len(topDict.charset)
charStrings.charStrings[glyphName] = glyphID
topDict.charset.append(glyphName)
topDict.FontBBox = self.fontBoundingBox
class OutlineTTFCompiler(BaseOutlineCompiler):
"""Compile a .ttf font with TrueType outlines."""
sfntVersion = "\000\001\000\000"
tables = BaseOutlineCompiler.tables | {"loca", "gasp", "glyf"}
def compileGlyphs(self):
"""Compile and return the TrueType glyphs for this font."""
allGlyphs = self.allGlyphs
ttGlyphs = {}
for name in self.glyphOrder:
glyph = allGlyphs[name]
pen = TTGlyphPen(allGlyphs)
try:
glyph.draw(pen)
except NotImplementedError:
logger.error("%r has invalid curve format; skipped", name)
ttGlyph = Glyph()
else:
ttGlyph = pen.glyph()
ttGlyphs[name] = ttGlyph
return ttGlyphs
def makeGlyphsBoundingBoxes(self):
"""Make bounding boxes for all the glyphs.
Return a dictionary of BoundingBox(xMin, xMax, yMin, yMax) namedtuples
keyed by glyph names.
The bounding box of empty glyphs (without contours or components) is
set to None.
"""
glyphBoxes = {}
ttGlyphs = self.getCompiledGlyphs()
for glyphName, glyph in ttGlyphs.items():
glyph.recalcBounds(ttGlyphs)
bounds = BoundingBox(glyph.xMin, glyph.yMin, glyph.xMax, glyph.yMax)
if bounds == EMPTY_BOUNDING_BOX:
bounds = None
glyphBoxes[glyphName] = bounds
return glyphBoxes
def setupTable_maxp(self):
"""Make the maxp table."""
if "maxp" not in self.tables:
return
self.otf["maxp"] = maxp = newTable("maxp")
maxp.tableVersion = 0x00010000
maxp.numGlyphs = len(self.glyphOrder)
maxp.maxZones = 1
maxp.maxTwilightPoints = 0
maxp.maxStorage = 0
maxp.maxFunctionDefs = 0
maxp.maxInstructionDefs = 0
maxp.maxStackElements = 0
maxp.maxSizeOfInstructions = 0
maxp.maxComponentElements = max(
len(g.components) for g in self.allGlyphs.values()
)
def setupTable_post(self):
"""Make a format 2 post table with the compiler's glyph order."""
super().setupTable_post()
if "post" not in self.otf:
return
post = self.otf["post"]
post.formatType = 2.0
post.extraNames = []
post.mapping = {}
post.glyphOrder = self.glyphOrder
def setupOtherTables(self):
self.setupTable_glyf()
if self.ufo.info.openTypeGaspRangeRecords:
self.setupTable_gasp()
def setupTable_glyf(self):
"""Make the glyf table."""
if not {"glyf", "loca"}.issubset(self.tables):
return
self.otf["loca"] = newTable("loca")
self.otf["glyf"] = glyf = newTable("glyf")
glyf.glyphs = {}
glyf.glyphOrder = self.glyphOrder
hmtx = self.otf.get("hmtx")
ttGlyphs = self.getCompiledGlyphs()
for name in self.glyphOrder:
ttGlyph = ttGlyphs[name]
if ttGlyph.isComposite() and hmtx is not None and self.autoUseMyMetrics:
self.autoUseMyMetrics(ttGlyph, name, hmtx)
glyf[name] = ttGlyph
@staticmethod
def autoUseMyMetrics(ttGlyph, glyphName, hmtx):
"""Set the "USE_MY_METRICS" flag on the first component having the
same advance width as the composite glyph, no transform and no
horizontal shift (but allow it to shift vertically).
This forces the composite glyph to use the possibly hinted horizontal
metrics of the sub-glyph, instead of those from the "hmtx" table.
"""
width = hmtx[glyphName][0]
for component in ttGlyph.components:
try:
baseName, transform = component.getComponentInfo()
except AttributeError:
# component uses '{first,second}Pt' instead of 'x' and 'y'
continue
try:
baseMetrics = hmtx[baseName]
except KeyError:
continue # ignore missing components
else:
if baseMetrics[0] == width and transform[:-1] == (1, 0, 0, 1, 0):
component.flags |= USE_MY_METRICS
break
class StubGlyph:
"""
This object will be used to create missing glyphs
(specifically .notdef) in the provided UFO.
"""
def __init__(
self,
name,
width,
unitsPerEm,
ascender,
descender,
unicodes=None,
reverseContour=False,
):
self.name = name
self.width = width
self.unitsPerEm = unitsPerEm
self.ascender = ascender
self.descender = descender
self.unicodes = unicodes if unicodes is not None else []
self.components = []
self.anchors = []
if self.unicodes:
self.unicode = self.unicodes[0]
else:
self.unicode = None
if name == ".notdef":
self.draw = self._drawDefaultNotdef
self.reverseContour = reverseContour
def __len__(self):
if self.name == ".notdef":
return 1
return 0
@property
def height(self):
return self.ascender - self.descender
def draw(self, pen):
pass
def _drawDefaultNotdef(self, pen):
# Draw contour in PostScript direction (counter-clockwise) by default. Reverse
# for TrueType.
if self.reverseContour:
pen = ReverseContourPen(pen)
width = otRound(self.unitsPerEm * 0.5)
stroke = otRound(self.unitsPerEm * 0.05)
ascender = self.ascender
descender = self.descender
xMin = stroke
xMax = width - stroke
yMax = ascender
yMin = descender
pen.moveTo((xMin, yMin))
pen.lineTo((xMax, yMin))
pen.lineTo((xMax, yMax))
pen.lineTo((xMin, yMax))
pen.lineTo((xMin, yMin))
pen.closePath()
xMin += stroke
xMax -= stroke
yMax -= stroke
yMin += stroke
pen.moveTo((xMin, yMin))
pen.lineTo((xMin, yMax))
pen.lineTo((xMax, yMax))
pen.lineTo((xMax, yMin))
pen.lineTo((xMin, yMin))
pen.closePath()
def _get_controlPointBounds(self):
pen = ControlBoundsPen(None)
self.draw(pen)
return pen.bounds
controlPointBounds = property(_get_controlPointBounds)
| mit | 6,340,247,939,920,733,000 | 36.062207 | 88 | 0.600022 | false | 3.953196 | false | false | false |
only4hj/fast-rcnn | lib/roi_data_layer/minibatch.py | 1 | 22641 | # --------------------------------------------------------
# Fast R-CNN
# Copyright (c) 2015 Microsoft
# Licensed under The MIT License [see LICENSE for details]
# Written by Ross Girshick
# --------------------------------------------------------
"""Compute minibatch blobs for training a Fast R-CNN network."""
import numpy as np
import numpy.random as npr
import cv2
from fast_rcnn.config import cfg
from utils.blob import prep_im_for_blob, im_list_to_blob
from utils.model import last_conv_size
from roi_data_layer.roidb import prepare_one_roidb_rpn, prepare_one_roidb_frcnn
from roidb import clear_one_roidb
def get_minibatch(roidb, num_classes, bbox_means, bbox_stds, proposal_file):
"""Given a roidb, construct a minibatch sampled from it."""
num_images = len(roidb)
# Sample random scales to use for each image in this batch
random_scale_inds = npr.randint(0, high=len(cfg.TRAIN.SCALES),
size=num_images)
assert(cfg.TRAIN.BATCH_SIZE % num_images == 0), \
'num_images ({}) must divide BATCH_SIZE ({})'. \
format(num_images, cfg.TRAIN.BATCH_SIZE)
rois_per_image = cfg.TRAIN.BATCH_SIZE / num_images
fg_rois_per_image = np.round(cfg.TRAIN.FG_FRACTION * rois_per_image)
# Get the input image blob, formatted for caffe
im_blob, im_scales, processed_ims = _get_image_blob(roidb, random_scale_inds)
if 'model_to_use' in roidb[0] and roidb[0]['model_to_use'] == 'rpn':
conv_h, scale_h = last_conv_size(im_blob.shape[2], cfg.MODEL_NAME)
conv_w, scale_w = last_conv_size(im_blob.shape[3], cfg.MODEL_NAME)
# Now, build the region of interest and label blobs
rois_blob = np.zeros((0, 5), dtype=np.float32)
labels_blob = np.zeros((0, 9, conv_h, conv_w), dtype=np.float32)
bbox_targets_blob = np.zeros((0, 36, conv_h, conv_w), dtype=np.float32)
bbox_loss_blob = np.zeros(bbox_targets_blob.shape, dtype=np.float32)
all_overlaps = []
for im_i in xrange(num_images):
if cfg.TRAIN.LAZY_PREPARING_ROIDB:
prepare_one_roidb_rpn(roidb[im_i],
processed_ims[im_i].shape[0],
processed_ims[im_i].shape[1],
im_scales[im_i])
# Normalize bbox_targets
if cfg.TRAIN.NORMALIZE_BBOX:
bbox_targets = roidb[im_i]['bbox_targets']
cls_inds = np.where(bbox_targets[:, 0] > 0)[0]
if cls_inds.size > 0:
bbox_targets[cls_inds, 1:] -= bbox_means[0, :]
bbox_targets[cls_inds, 1:] /= bbox_stds[0, :]
labels, overlaps, im_rois, bbox_targets, bbox_loss \
= _sample_rois_rpn(roidb[im_i], fg_rois_per_image, rois_per_image,
num_classes, conv_h, conv_w)
# Add to RoIs blob
if im_rois != None:
batch_ind = im_i * np.ones((im_rois.shape[0], 1))
rois_blob_this_image = np.hstack((batch_ind, im_rois))
rois_blob = np.vstack((rois_blob, rois_blob_this_image))
# Add to labels, bbox targets, and bbox loss blobs
labels_blob = np.vstack((labels_blob, labels))
bbox_targets_blob = np.vstack((bbox_targets_blob, bbox_targets))
bbox_loss_blob = np.vstack((bbox_loss_blob, bbox_loss))
# For debug visualizations
#_vis_minibatch_rpn(im_blob, conv_h, conv_w, rois_blob, labels_blob, roidb, bbox_targets_blob, bbox_loss_blob)
blobs = {'data': im_blob,
'labels': labels_blob}
else:
# Now, build the region of interest and label blobs
rois_blob = np.zeros((0, 5), dtype=np.float32)
labels_blob = np.zeros((0), dtype=np.float32)
bbox_targets_blob = np.zeros((0, 4 * num_classes), dtype=np.float32)
bbox_loss_blob = np.zeros(bbox_targets_blob.shape, dtype=np.float32)
all_overlaps = []
for im_i in xrange(num_images):
if cfg.TRAIN.LAZY_PREPARING_ROIDB:
prepare_one_roidb_frcnn(roidb[im_i], proposal_file, num_classes)
# Normalize bbox_targets
if cfg.TRAIN.NORMALIZE_BBOX:
bbox_targets = roidb[im_i]['bbox_targets']
for cls in xrange(1, num_classes):
cls_inds = np.where(bbox_targets[:, 0] == cls)[0]
bbox_targets[cls_inds, 1:] -= bbox_means[cls, :]
bbox_targets[cls_inds, 1:] /= bbox_stds[cls, :]
labels, overlaps, im_rois, bbox_targets, bbox_loss \
= _sample_rois(roidb[im_i], fg_rois_per_image, rois_per_image,
num_classes)
# Add to RoIs blob
rois = _project_im_rois(im_rois, im_scales[im_i])
batch_ind = im_i * np.ones((rois.shape[0], 1))
rois_blob_this_image = np.hstack((batch_ind, rois))
rois_blob = np.vstack((rois_blob, rois_blob_this_image))
# Add to labels, bbox targets, and bbox loss blobs
labels_blob = np.hstack((labels_blob, labels))
bbox_targets_blob = np.vstack((bbox_targets_blob, bbox_targets))
bbox_loss_blob = np.vstack((bbox_loss_blob, bbox_loss))
#all_overlaps = np.hstack((all_overlaps, overlaps))
# For debug visualizations
#_vis_minibatch(im_blob, rois_blob, labels_blob, all_overlaps)
blobs = {'data': im_blob,
'rois': rois_blob,
'labels': labels_blob}
if cfg.TRAIN.BBOX_REG:
blobs['bbox_targets'] = bbox_targets_blob
blobs['bbox_loss_weights'] = bbox_loss_blob
return blobs
def clear_minibatch(roidb):
num_images = len(roidb)
for im_i in xrange(num_images):
clear_one_roidb(roidb[im_i])
def _sample_rois(roidb, fg_rois_per_image, rois_per_image, num_classes):
"""Generate a random sample of RoIs comprising foreground and background
examples.
"""
# label = class RoI has max overlap with
labels = roidb['max_classes']
overlaps = roidb['max_overlaps']
rois = roidb['boxes']
# Select foreground RoIs as those with >= FG_THRESH overlap
fg_inds = np.where(overlaps >= cfg.TRAIN.FG_THRESH)[0]
# Guard against the case when an image has fewer than fg_rois_per_image
# foreground RoIs
fg_rois_per_this_image = np.minimum(fg_rois_per_image, fg_inds.size)
# Sample foreground regions without replacement
if fg_inds.size > 0:
fg_inds = npr.choice(fg_inds, size=fg_rois_per_this_image,
replace=False)
# Select background RoIs as those within [BG_THRESH_LO, BG_THRESH_HI)
bg_inds = np.where((overlaps < cfg.TRAIN.BG_THRESH_HI) &
(overlaps >= cfg.TRAIN.BG_THRESH_LO))[0]
# Compute number of background RoIs to take from this image (guarding
# against there being fewer than desired)
bg_rois_per_this_image = rois_per_image - fg_rois_per_this_image
bg_rois_per_this_image = np.minimum(bg_rois_per_this_image,
bg_inds.size)
# Sample foreground regions without replacement
if bg_inds.size > 0:
bg_inds = npr.choice(bg_inds, size=bg_rois_per_this_image,
replace=False)
# The indices that we're selecting (both fg and bg)
keep_inds = np.append(fg_inds, bg_inds)
# Select sampled values from various arrays:
labels = labels[keep_inds]
# Clamp labels for the background RoIs to 0
labels[fg_rois_per_this_image:] = 0
overlaps = overlaps[keep_inds]
rois = rois[keep_inds]
bbox_targets, bbox_loss_weights = \
_get_bbox_regression_labels(roidb['bbox_targets'][keep_inds, :],
num_classes)
return labels, overlaps, rois, bbox_targets, bbox_loss_weights
def get_img_rect(img_height, img_width, conv_height, conv_width, axis1, axis2, axis3):
anchors = np.array([[128*2, 128*1], [128*1, 128*1], [128*1, 128*2],
[256*2, 256*1], [256*1, 256*1], [256*1, 256*2],
[512*2, 512*1], [512*1, 512*1], [512*1, 512*2]])
scale_width = img_width / conv_width
scale_height = img_height / conv_height
img_center_x = img_width * axis3 / conv_width + scale_width / 2
img_center_y = img_height * axis2 / conv_height + scale_height / 2
anchor_size = anchors[axis1]
img_x1 = img_center_x - anchor_size[0] / 2
img_x2 = img_center_x + anchor_size[0] / 2
img_y1 = img_center_y - anchor_size[1] / 2
img_y2 = img_center_y + anchor_size[1] / 2
return [img_x1, img_y1, img_x2, img_y2]
def _sample_rois_rpn(roidb, fg_rois_per_image, rois_per_image, num_classes,
union_conv_height, union_conv_width):
"""Generate a random sample of RoIs comprising foreground and background
examples.
"""
# label = class RoI has max overlap with
labels = roidb['max_classes']
new_labels = np.zeros(labels.shape, dtype=np.int16)
new_labels.fill(-1)
bbox_target = roidb['bbox_targets']
new_bbox_target = np.zeros(bbox_target.shape, dtype=np.float32)
conv_width = roidb['conv_width']
conv_height = roidb['conv_height']
# Select foreground RoIs as those with >= FG_THRESH overlap
fg_inds = np.where(labels > 0)[0]
# Guard against the case when an image has fewer than fg_rois_per_image
# foreground RoIs
fg_rois_per_this_image = np.minimum(fg_rois_per_image, fg_inds.size)
# Sample foreground regions without replacement
if fg_inds.size > 0:
fg_inds = npr.choice(fg_inds, size=fg_rois_per_this_image,
replace=False)
# Select background RoIs as those within [BG_THRESH_LO, BG_THRESH_HI)
bg_inds = np.where(labels == 0)[0]
# Compute number of background RoIs to take from this image (guarding
# against there being fewer than desired)
bg_rois_per_this_image = rois_per_image - fg_rois_per_this_image
bg_rois_per_this_image = np.minimum(bg_rois_per_this_image,
bg_inds.size)
# Sample foreground regions without replacement
if bg_inds.size > 0:
bg_inds = npr.choice(bg_inds, size=bg_rois_per_this_image,
replace=False)
new_labels[fg_inds] = 1
new_labels[bg_inds] = 0
if 'rois' in roidb:
rois = roidb['rois'][fg_inds]
else:
rois = None
"""
print 'labels.shape %s' % labels.shape
print 'bbox_target.shape %s' % (bbox_target.shape, )
for fg_ind in fg_inds:
print 'label : %s ' % labels[fg_ind]
print 'bbox_target : %s ' % bbox_target[fg_ind]
axis1 = fg_ind / conv_height / conv_width
axis2 = fg_ind / conv_width % conv_height
axis3 = fg_ind % conv_width
im = cv2.imread(roidb['image'])
target_size = cfg.TRAIN.SCALES[0]
im, im_scale = prep_im_for_blob(im, 0, target_size,
cfg.TRAIN.MAX_SIZE,
cfg.TRAIN.MIN_SIZE)
img_height = im.shape[2]
img_width = im.shape[3]
proposal_rects = get_img_rect(img_height, img_width, conv_height, conv_width, axis1, axis2, axis3)
for proposal_rect in proposal_rects:
plt.imshow(im)
for ground_rect in ground_rects:
plt.gca().add_patch(
plt.Rectangle((ground_rect[0], ground_rect[1]), ground_rect[2] - ground_rect[0],
ground_rect[3] - ground_rect[1], fill=False,
edgecolor='b', linewidth=3)
)
plt.gca().add_patch(
plt.Rectangle((proposal_rect[0], proposal_rect[1]), proposal_rect[2] - proposal_rect[0],
proposal_rect[3] - proposal_rect[1], fill=False,
edgecolor='g', linewidth=3)
)
plt.gca().add_patch(
plt.Rectangle((pred_rect[0], pred_rect[1]), pred_rect[2] - pred_rect[0],
pred_rect[3] - pred_rect[1], fill=False,
edgecolor='r', linewidth=3)
)
plt.show(block=False)
raw_input("")
plt.close()
"""
new_bbox_target[fg_inds] = bbox_target[fg_inds]
new_bbox_target, bbox_loss_weights = \
_get_bbox_regression_labels_rpn(new_bbox_target,
num_classes, labels)
"""
print 'label no 1 : %s' % len(np.where(new_labels == 1)[0])
print 'new_bbox_target no 1 : %s' % len(np.where(new_bbox_target != 0)[0])
print 'bbox_loss_weights no 1 : %s' % len(np.where(bbox_loss_weights > 0)[0])
"""
new_labels = new_labels.reshape((1, 9, conv_height, conv_width))
new_bbox_target = new_bbox_target.reshape((1, 9, conv_height, conv_width, 4))
new_bbox_target = new_bbox_target.transpose(0, 1, 4, 2, 3)
new_bbox_target = new_bbox_target.reshape((1, 36, conv_height, conv_width))
bbox_loss_weights = bbox_loss_weights.reshape((1, 9, conv_height, conv_width, 4))
bbox_loss_weights = bbox_loss_weights.transpose(0, 1, 4, 2, 3)
bbox_loss_weights = bbox_loss_weights.reshape((1, 36, conv_height, conv_width))
output_labels = np.zeros((1, 9, union_conv_height, union_conv_width))
output_bbox_targets = np.zeros((1, 36, union_conv_height, union_conv_width))
output_bbox_loss_weights = np.zeros((1, 36, union_conv_height, union_conv_width))
output_labels.fill(-1)
output_labels[:, :, 0:conv_height, 0:conv_width] = new_labels
output_bbox_targets[:, :, 0:conv_height, 0:conv_width] = new_bbox_target
output_bbox_loss_weights[:, :, 0:conv_height, 0:conv_width] = bbox_loss_weights
"""
for fg_ind in fg_inds:
if fg_ind == 6510:
axis1 = fg_ind / conv_height / conv_width
axis2 = fg_ind / conv_width % conv_height
axis3 = fg_ind % conv_width
print ''
print 'conv_size : %s, %s' % (conv_height, conv_width)
print 'axis : %s, %s, %s' % (axis1, axis2, axis3)
print 'output_labels[%s] : %s' % (fg_ind, output_labels[0, axis1, axis2, axis3])
print 'output_bbox_targets[%s] : %s' % (fg_ind, output_bbox_targets[0, axis1*4:axis1*4+4, axis2, axis3])
print 'output_bbox_loss_weights[%s] : %s' % (fg_ind, output_bbox_loss_weights[0, axis1*4:axis1*4+4, axis2, axis3])
"""
"""
# Generate positive rois based on index for debugging
anchors = [[128*2, 128*1], [128*1, 128*1], [128*1, 128*2],
[256*2, 256*1], [256*1, 256*1], [256*1, 256*2],
[512*2, 512*1], [512*1, 512*1], [512*1, 512*2]]
conv_scale_width = roidb['conv_scale_width']
conv_scale_height = roidb['conv_scale_height']
rois = np.zeros((len(fg_inds), 4), dtype=np.int16)
for i, fg_ind in enumerate(fg_inds):
center_x = fg_ind % conv_width
center_y = (fg_ind - center_x) / conv_width % conv_height
anchor = fg_ind / conv_height / conv_width
anchor_w = anchors[anchor][0]
anchor_h = anchors[anchor][1]
x1 = center_x * conv_scale_width - anchor_w / 2
y1 = center_y * conv_scale_height - anchor_h / 2
x2 = x1 + anchor_w
y2 = y1 + anchor_h
rois[i, :] = x1, y1, x2, y2
"""
"""
pos_labels = np.where(new_labels == 1)
i = 0
for d0, d1, d2, d3 in zip(pos_labels[0], pos_labels[1], pos_labels[2], pos_labels[3]):
print '[%s] label : %s, bbox_target : %s, bbox_loss_weights : %s' % (i, new_labels[d0, d1, d2, d3],
new_bbox_target[d0, d1*4 : d1*4+4, d2, d3],
bbox_loss_weights[d0, d1*4 : d1*4+4, d2, d3])
i += 1
"""
"""
print 'label no 2 : %s' % len(np.where(output_labels == 1)[0])
print 'new_bbox_target no 2 : %s' % len(np.where(output_bbox_targets != 0)[0])
print 'bbox_loss_weights no 2 : %s' % len(np.where(output_bbox_loss_weights > 0)[0])
"""
return output_labels, None, rois, output_bbox_targets, output_bbox_loss_weights
def _get_image_blob(roidb, scale_inds):
"""Builds an input blob from the images in the roidb at the specified
scales.
"""
num_images = len(roidb)
processed_ims = []
im_scales = []
for i in xrange(num_images):
im = cv2.imread(roidb[i]['image'])
if roidb[i]['flipped']:
im = im[:, ::-1, :]
target_size = cfg.TRAIN.SCALES[scale_inds[i]]
im, im_scale = prep_im_for_blob(im, cfg.PIXEL_MEANS, target_size,
cfg.TRAIN.MAX_SIZE,
cfg.TRAIN.MIN_SIZE)
im_scales.append(im_scale)
processed_ims.append(im)
# Create a blob to hold the input images
blob = im_list_to_blob(processed_ims)
return blob, im_scales, processed_ims
def _project_im_rois(im_rois, im_scale_factor):
"""Project image RoIs into the rescaled training image."""
rois = im_rois * im_scale_factor
return rois
def _get_bbox_regression_labels(bbox_target_data, num_classes):
"""Bounding-box regression targets are stored in a compact form in the
roidb.
This function expands those targets into the 4-of-4*K representation used
by the network (i.e. only one class has non-zero targets). The loss weights
are similarly expanded.
Returns:
bbox_target_data (ndarray): N x 4K blob of regression targets
bbox_loss_weights (ndarray): N x 4K blob of loss weights
"""
clss = bbox_target_data[:, 0]
bbox_targets = np.zeros((clss.size, 4 * num_classes), dtype=np.float32)
bbox_loss_weights = np.zeros(bbox_targets.shape, dtype=np.float32)
inds = np.where(clss > 0)[0]
for ind in inds:
cls = clss[ind]
start = 4 * cls
end = start + 4
bbox_targets[ind, start:end] = bbox_target_data[ind, 1:]
bbox_loss_weights[ind, start:end] = [1., 1., 1., 1.]
return bbox_targets, bbox_loss_weights
def _get_bbox_regression_labels_rpn(bbox_target_data, num_classes, labels):
"""Bounding-box regression targets are stored in a compact form in the
roidb.
This function expands those targets into the 4-of-4*K representation used
by the network (i.e. only one class has non-zero targets). The loss weights
are similarly expanded.
Returns:
bbox_target_data (ndarray): N x 4K blob of regression targets
bbox_loss_weights (ndarray): N x 4K blob of loss weights
"""
clss = bbox_target_data[:, 0]
bbox_targets = np.zeros((clss.size, 4), dtype=np.float32)
bbox_loss_weights = np.zeros(bbox_targets.shape, dtype=np.float32)
inds = np.where(clss > 0)[0]
#print ''
#print 'len(inds) : %s' % len(inds)
for ind in inds:
bbox_targets[ind, :] = bbox_target_data[ind, 1:]
bbox_loss_weights[ind, :] = [1., 1., 1., 1.]
#print 'bbox_targets[ind, :] : %s - %s ' % (bbox_target_data[ind, 0], bbox_targets[ind, :])
return bbox_targets, bbox_loss_weights
def _vis_minibatch(im_blob, rois_blob, labels_blob, overlaps):
"""Visualize a mini-batch for debugging."""
import matplotlib.pyplot as plt
for i in xrange(rois_blob.shape[0]):
rois = rois_blob[i, :]
im_ind = rois[0]
roi = rois[1:]
im = im_blob[im_ind, :, :, :].transpose((1, 2, 0)).copy()
im += cfg.PIXEL_MEANS
im = im[:, :, (2, 1, 0)]
im = im.astype(np.uint8)
cls = labels_blob[i]
plt.imshow(im)
print 'class: ', cls, ' overlap: ', overlaps[i]
plt.gca().add_patch(
plt.Rectangle((roi[0], roi[1]), roi[2] - roi[0],
roi[3] - roi[1], fill=False,
edgecolor='r', linewidth=3)
)
plt.show()
def _vis_minibatch_rpn(im_blob, conv_h, conv_w, rois_blob, labels_blob, roidb, bbox_targets_blob, bbox_loss_blob):
"""Visualize a mini-batch for debugging."""
import matplotlib.pyplot as plt
for i in xrange(len(roidb)):
# DJDJ
#if roidb[i]['image'].endswith('000009.jpg') == False:
# continue
print 'image : %s' % roidb[i]['image']
resized_gt_boxes = roidb[int(i)]['resized_gt_boxes']
im = im_blob[i, :, :, :].transpose((1, 2, 0)).copy()
im += cfg.PIXEL_MEANS
im = im[:, :, (2, 1, 0)]
im = im.astype(np.uint8)
for j in range(9):
for k in range(labels_blob.shape[2]):
for l in range(labels_blob.shape[3]):
label = labels_blob[i][j][k][l]
if label == -1:
continue
elif label == 1:
color = 'g'
elif label == 0:
#color = 'y'
continue
plt.imshow(im)
for resized_gt_box in resized_gt_boxes:
resized_gt_box = resized_gt_box.astype(np.int)
plt.gca().add_patch(
plt.Rectangle((resized_gt_box[0], resized_gt_box[1]), resized_gt_box[2] - resized_gt_box[0],
resized_gt_box[3] - resized_gt_box[1], fill=False,
edgecolor='b', linewidth=3)
)
proposal_rects = get_img_rect(im.shape[0], im.shape[1], conv_h, conv_w, j, k, l)
plt.gca().add_patch(
plt.Rectangle((proposal_rects[0], proposal_rects[1]), proposal_rects[2] - proposal_rects[0],
proposal_rects[3] - proposal_rects[1], fill=False,
edgecolor=color, linewidth=3)
)
plt.show(block=False)
raw_input("")
plt.close()
| mit | 2,063,657,557,521,492,200 | 40.619485 | 126 | 0.548209 | false | 3.342338 | false | false | false |
Princeton-CDH/derrida-django | derrida/outwork/migrations/0001_initial.py | 1 | 1186 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2017-10-17 18:44
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import mezzanine.core.fields
class Migration(migrations.Migration):
initial = True
dependencies = [
('people', '0002_allow_neg_years_bc'),
('pages', '0004_auto_20170411_0504'),
]
operations = [
migrations.CreateModel(
name='Outwork',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='pages.Page')),
('content', mezzanine.core.fields.RichTextField(verbose_name='Content')),
('orig_pubdate', models.DateField(blank=True, null=True, verbose_name='Original Publication Date')),
('author', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='people.Person')),
],
options={
'ordering': ('_order',),
},
bases=('pages.page', models.Model),
),
]
| apache-2.0 | 759,727,622,976,371,800 | 34.939394 | 185 | 0.604553 | false | 3.953333 | false | false | false |
phalcon/readthedocs.org | readthedocs/vcs_support/backends/hg.py | 1 | 3235 | import csv
from StringIO import StringIO
from projects.exceptions import ProjectImportError
from vcs_support.base import BaseVCS, VCSVersion
class Backend(BaseVCS):
supports_tags = True
supports_branches = True
fallback_branch = 'default'
def update(self):
super(Backend, self).update()
retcode = self.run('hg', 'status')[0]
if retcode == 0:
return self.pull()
else:
return self.clone()
def pull(self):
pull_output = self.run('hg', 'pull')
if pull_output[0] != 0:
raise ProjectImportError(
("Failed to get code from '%s' (hg pull): %s"
% (self.repo_url, pull_output[0]))
)
update_output = self.run('hg', 'update', '-C')[0]
if update_output[0] != 0:
raise ProjectImportError(
("Failed to get code from '%s' (hg update): %s"
% (self.repo_url, pull_output[0]))
)
return update_output
def clone(self):
output = self.run('hg', 'clone', self.repo_url, '.')
if output[0] != 0:
raise ProjectImportError(
("Failed to get code from '%s' (hg clone): %s"
% (self.repo_url, output[0]))
)
return output
@property
def branches(self):
retcode, stdout = self.run('hg', 'branches', '-q')[:2]
# error (or no tags found)
if retcode != 0:
return []
return self.parse_branches(stdout)
def parse_branches(self, data):
"""
stable
default
"""
names = [name.lstrip() for name in data.splitlines()]
return [VCSVersion(self, name, name) for name in names if name]
@property
def tags(self):
retcode, stdout = self.run('hg', 'tags')[:2]
# error (or no tags found)
if retcode != 0:
return []
return self.parse_tags(stdout)
def parse_tags(self, data):
"""
Parses output of show-ref --tags, eg:
tip 278:c4b2d21db51a
0.2.2 152:6b0364d98837
0.2.1 117:a14b7b6ffa03
0.1 50:30c2c6b3a055
"""
# parse the lines into a list of tuples (commit-hash, tag ref name)
raw_tags = csv.reader(StringIO(data), delimiter=' ')
vcs_tags = []
for row in raw_tags:
row = filter(lambda f: f != '', row)
if row == []:
continue
name, commit = row
if name == 'tip':
continue
revision, commit_hash = commit.split(':')
vcs_tags.append(VCSVersion(self, commit_hash, name))
return vcs_tags
def checkout(self, identifier=None):
super(Backend, self).checkout()
if not identifier:
identifier = 'tip'
retcode = self.run('hg', 'status')[0]
if retcode == 0:
self.run('hg', 'pull')
return self.run('hg', 'update', '-C', identifier)
else:
self.clone()
return self.run('hg', 'update', '-C', identifier)
| mit | -1,967,239,795,073,363,200 | 30.407767 | 75 | 0.4983 | false | 4.048811 | false | false | false |
Crystalnix/house-of-life-chromium | chrome/test/functional/chromeos_security.py | 1 | 1213 | #!/usr/bin/python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import pyauto_functional
import pyauto
class ChromeosSecurity(pyauto.PyUITest):
"""Security tests for chrome on ChromeOS.
Requires ChromeOS to be logged in.
"""
def ExtraChromeFlagsOnChromeOS(self):
"""Override default list of extra flags typicall used with automation.
See the default flags used with automation in pyauto.py.
Chrome flags for this test should be as close to reality as possible.
"""
return [
'--homepage=about:blank',
]
def testCannotViewLocalFiles(self):
"""Verify that local files cannot be accessed from the browser."""
urls_and_titles = {
'file:///': 'Index of /',
'file:///etc/': 'Index of /etc/',
self.GetFileURLForDataPath('title2.html'): 'Title Of Awesomeness',
}
for url, title in urls_and_titles.iteritems():
self.NavigateToURL(url)
self.assertNotEqual(title, self.GetActiveTabTitle(),
msg='Could access local file %s.' % url)
if __name__ == '__main__':
pyauto_functional.Main()
| bsd-3-clause | -6,535,854,804,755,249,000 | 27.880952 | 74 | 0.677659 | false | 3.863057 | false | false | false |
sotlampr/personal-site | app/blog/views.py | 1 | 3882 | from datetime import datetime
from flask import abort, flash, request, render_template, redirect, url_for
from flask.ext.login import login_required, current_user
from sqlalchemy import desc
from . import blog
from .forms import PostForm, DeleteForm
from ..models import db, Post
@blog.route('/')
@blog.route('/index')
@blog.route('/page/<int:page>')
def index(page=1):
posts = (Post.query
.filter_by(is_published=True)
.order_by(desc(Post.timestamp))
.paginate(page, 3))
return render_template('blog/index.html', posts=posts)
@blog.route('/new', methods=['GET', 'POST'])
@login_required
def new():
form = PostForm(request.form)
if form.validate_on_submit():
post = Post(title=form.title.data, body=form.body.data,
timestamp=datetime.utcnow(),
user_id=int(current_user.get_id()),
is_published=form.is_published.data)
db.session.add(post)
db.session.commit()
if form.is_published.data:
flash("Post is now published.")
else:
flash("Post updated")
return redirect('blog/'+post.slug)
return render_template('blog/edit.html', form=form)
@blog.route('/<slug>/')
def show_post(slug):
post = Post.query.filter_by(slug=slug).first()
if post is not None:
if post.is_published:
return render_template('blog/post.html', post=post)
else:
if current_user.is_authenticated:
flash("This post is unpublished.")
return render_template('blog/post.html', post=post)
else:
abort(401)
else:
abort(404)
@blog.route('/<slug>/edit', methods=['GET', 'POST'])
@login_required
def edit_post(slug):
post = Post.query.filter_by(slug=slug).first()
if post is not None:
if request.method == 'GET':
form = PostForm(obj=post)
return render_template('blog/edit.html', form=form)
else:
form = PostForm(request.form)
post.title = form.title.data
post.body = form.body.data
post.is_published = form.is_published.data
post.user_id = current_user.get_id()
db.session.commit()
flash("Post updated.")
return redirect(url_for('blog.show_post', slug=post.slug))
else:
abort(404)
@blog.route('/<slug>/delete', methods=['GET', 'POST'])
@login_required
def delete_post(slug):
form = DeleteForm(request.form)
post = Post.query.filter_by(slug=slug).first()
if post is not None:
if form.validate_on_submit():
db.session.delete(post)
db.session.commit()
return redirect(url_for('blog.index'))
else:
return render_template("blog/delete.html", form=form)
else:
abort(404)
"""
@blog.route('/search/<search_terms>/')
def search():
return render_template('blog/search.html')
"""
@blog.route('/archive')
@blog.route('/archive/page/<int:page>')
def archive(page=1):
posts = (Post.query
.filter_by(is_published=True)
.order_by(desc(Post.timestamp))
.paginate(page, 10))
return render_template('blog/archive.html',
head_title="Blog Archive",
header_title="Archives",
posts=posts)
@blog.route('/unpublished')
@login_required
def show_unpublished():
posts = (Post.query
.filter_by(is_published=False)
.order_by(desc(Post.timestamp))
.paginate(1, 10))
return render_template('blog/archive.html',
head_title="Administration",
header_title="Unpublished posts",
posts=posts)
| mit | 6,123,719,996,085,297,000 | 29.328125 | 75 | 0.569809 | false | 3.843564 | false | false | false |
b-jazz/stravamous | src/converter.py | 1 | 1697 | import logging
import os
import subprocess
def out_for_in(in_path):
return '{0}.gpx'.format(os.path.splitext(os.path.basename(in_path))[0])
class Converter(object):
def __init__(self, config, input_path):
self.config = config
self.logger = logging.getLogger()
self.input_file = input_path
self.output_file = os.path.expanduser(os.path.join(self.config.storage_root,
'converted_files',
out_for_in(self.input_file)))
self.logger.debug('Created converter object with input_file of {0} and output_file of {1}'.format(self.input_file, self.output_file))
self.gpx_text = None
def convert(self):
command = [self.config.gpsbabel_cmd,
'-i', 'garmin_fit',
'-f', self.input_file,
'-o', 'gpx,garminextensions',
'-F', self.output_file]
self.logger.debug('starting subprocess with command: {0}'.format(command))
try:
subprocess.call(command)
self.logger.debug('Happily done with the conversion. No exceptions.')
except Exception as exception:
self.logger.error('CONVERTER EXCEPTION: {0}'.format(exception))
# raise
else:
self.logger.debug('Opening {0} for read'.format(self.output_file))
try:
self.gpx_text = open(self.output_file, 'r').read()
except Exception as exception:
self.logger.error('open().read() exception: {0}, of type: {1}'.format(exception, exception.args))
raise
| lgpl-3.0 | -3,339,667,609,686,575,000 | 40.390244 | 141 | 0.551562 | false | 4.059809 | false | false | false |
jesonyang001/qarepo | askbot/__init__.py | 1 | 3117 | """
:synopsis: the Django Q&A forum application
Functions in the askbot module perform various
basic actions on behalf of the forum application
"""
import os
import platform
VERSION = (0, 7, 51)
#keys are module names used by python imports,
#values - the package qualifier to use for pip
REQUIREMENTS = {
'akismet': 'akismet',
'avatar': 'django-avatar>=2.0',
'bs4': 'beautifulsoup4',
'coffin': 'Coffin>=0.3,<=0.3.8',
'compressor': 'django-compressor==1.2',
'django': 'django>=1.3.1,<1.6',
'django_countries': 'django-countries==1.0.5',
'djcelery': 'django-celery>=3.0.11',
'djkombu': 'django-kombu==0.9.4',
'followit': 'django-followit==0.0.7',
'html5lib': 'html5lib==0.90',
'jinja2': 'Jinja2',
'keyedcache': 'django-keyedcache',
'longerusername': 'longerusername',
'markdown2': 'markdown2',
'oauth2': 'oauth2',
'openid': 'python-openid',
'picklefield': 'django-picklefield==0.3.0',
'jwt': 'pyjwt',
'pystache': 'pystache==0.3.1',
'pytz': 'pytz==2013b',
'recaptcha_works': 'django-recaptcha-works',
'robots': 'django-robots',
'sanction': 'sanction==0.3.1',
'south': 'South>=0.7.1',
'threaded_multihost': 'django-threaded-multihost',
'tinymce': 'django-tinymce==1.5.1b2',
'unidecode': 'unidecode',
#'stopforumspam': 'stopforumspam'
}
if platform.system() != 'Windows':
REQUIREMENTS['lamson'] = 'Lamson'
#necessary for interoperability of django and coffin
try:
from askbot import patches
from askbot.deployment.assertions import assert_package_compatibility
assert_package_compatibility()
patches.patch_django()
patches.patch_coffin() # must go after django
except ImportError:
pass
def get_install_directory():
"""returns path to directory
where code of the askbot django application
is installed
"""
return os.path.dirname(__file__)
def get_path_to(relative_path):
"""returns absolute path to a file
relative to ``askbot`` directory
``relative_path`` must use only forward slashes
and must not start with a slash
"""
root_dir = get_install_directory()
assert(relative_path[0] != 0)
path_bits = relative_path.split('/')
return os.path.join(root_dir, *path_bits)
def get_version():
"""returns version of the askbot app
this version is meaningful for pypi only
"""
return '.'.join([str(subversion) for subversion in VERSION])
def get_database_engine_name():
"""returns name of the database engine,
independently of the version of django
- for django >=1.2 looks into ``settings.DATABASES['default']``,
(i.e. assumes that askbot uses database named 'default')
, and for django 1.1 and below returns settings.DATABASE_ENGINE
"""
import django
from django.conf import settings as django_settings
major_version = django.VERSION[0]
minor_version = django.VERSION[1]
if major_version == 1:
if minor_version > 1:
return django_settings.DATABASES['default']['ENGINE']
else:
return django_settings.DATABASE_ENGINE
| gpl-3.0 | -2,211,768,841,389,933,000 | 29.558824 | 73 | 0.655759 | false | 3.399128 | false | false | false |
OmeGak/indico-plugins | vc_vidyo/indico_vc_vidyo/api/cache.py | 1 | 1207 | # This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from suds.cache import Cache
from MaKaC.common.cache import GenericCache
DEFAULT_CACHE_TTL = 24 * 3600
class SudsCache(Cache):
_instance = None
def __init__(self, duration=DEFAULT_CACHE_TTL):
self._cache = GenericCache("SudsCache")
self._duration = duration
def get(self, key):
self._cache.get(key)
def put(self, key, val):
self._cache.set(key, val, self._duration)
def purge(self, key):
self._cache.delete(key)
| gpl-3.0 | 5,846,497,963,158,314,000 | 31.621622 | 78 | 0.710853 | false | 3.795597 | false | false | false |
AndrewGoldstein/grasshopper | tests/test_models.py | 1 | 1954 | # -*- coding: utf-8 -*-
"""Model unit tests."""
import datetime as dt
import pytest
from grasshopper.user.models import Role, User
from .factories import UserFactory
@pytest.mark.usefixtures('db')
class TestUser:
"""User tests."""
def test_get_by_id(self):
"""Get user by ID."""
user = User('foo', '[email protected]')
user.save()
retrieved = User.get_by_id(user.id)
assert retrieved == user
def test_created_at_defaults_to_datetime(self):
"""Test creation date."""
user = User(username='foo', email='[email protected]')
user.save()
assert bool(user.created_at)
assert isinstance(user.created_at, dt.datetime)
def test_password_is_nullable(self):
"""Test null password."""
user = User(username='foo', email='[email protected]')
user.save()
assert user.password is None
def test_factory(self, db):
"""Test user factory."""
user = UserFactory(password='myprecious')
db.session.commit()
assert bool(user.username)
assert bool(user.email)
assert bool(user.created_at)
assert user.is_admin is False
assert user.active is True
assert user.check_password('myprecious')
def test_check_password(self):
"""Check password."""
user = User.create(username='foo', email='[email protected]',
password='foobarbaz123')
assert user.check_password('foobarbaz123') is True
assert user.check_password('barfoobaz') is False
def test_full_name(self):
"""User full name."""
user = UserFactory(first_name='Foo', last_name='Bar')
assert user.full_name == 'Foo Bar'
def test_roles(self):
"""Add a role to a user."""
role = Role(name='admin')
role.save()
user = UserFactory()
user.roles.append(role)
user.save()
assert role in user.roles
| bsd-3-clause | 1,862,980,038,182,105,900 | 28.164179 | 63 | 0.589048 | false | 3.908 | true | false | false |
rodluger/everest | docs/mcmc.py | 1 | 2721 | """MCMC example for transit fitting."""
import matplotlib.pyplot as pl
from everest import Everest, TransitModel
import numpy as np
import emcee
from tqdm import tqdm
from corner import corner
def lnprior(x):
"""Return the log prior given parameter vector `x`."""
per, t0, b = x
if b < -1 or b > 1:
return -np.inf
elif per < 7 or per > 10:
return -np.inf
elif t0 < 1978 or t0 > 1979:
return -np.inf
else:
return 0.
def lnlike(x, star):
"""Return the log likelihood given parameter vector `x`."""
ll = lnprior(x)
if np.isinf(ll):
return ll, (np.nan, np.nan)
per, t0, b = x
model = TransitModel('b', per=per, t0=t0, b=b, rhos=10.)(star.time)
like, d, vard = star.lnlike(model, full_output=True)
ll += like
return ll, (d,)
# Initialize the everest model
star = Everest(201635569)
# Set up the MCMC sampler
params = ['Period (days)', r't$_0$ (BJD - 2456811)', 'Impact parameter']
blobs = ['Depth (%)']
nsteps = 1000
nburn = 300
nwalk = 10
ndim = len(params)
nblobs = len(blobs)
sampler = emcee.EnsembleSampler(nwalk, ndim, lnlike, args=[star])
x0 = [[8.368 + 0.01 * np.random.randn(),
1978.4513 + 0.01 * np.random.randn(),
0. + 0.1 * np.random.randn()] for k in range(nwalk)]
blobs0 = [[0.] for k in range(nwalk)]
# Run!
for i in tqdm(sampler.sample(x0, iterations=nsteps, blobs0=blobs0),
total=nsteps):
pass
# Add the blobs to the chain for plotting
chain = np.concatenate((sampler.chain,
np.array(sampler.blobs).swapaxes(0, 1)), axis=2)
# Re-scale the transit time for prettier axes labels
chain[:, :, 1] -= 1978.
# Take the absolute value of the impact parameter for plotting
chain[:, :, 2] = np.abs(chain[:, :, 2])
# Re-scale the transit depth as a percentage
chain[:, :, 3] *= 100.
# Plot the chains
fig1, ax = pl.subplots(ndim + nblobs, figsize=(6, 7))
fig1.suptitle("K2-14b", fontsize=16, fontweight='bold')
ax[-1].set_xlabel("Iteration", fontsize=14)
for n in range(ndim + nblobs):
for k in range(nwalk):
ax[n].plot(chain[k, :, n], alpha=0.3, lw=1)
ax[n].set_ylabel((params + blobs)[n], fontsize=9)
ax[n].margins(0, None)
ax[n].axvline(nburn, color='b', alpha=0.5, lw=1, ls='--')
fig1.savefig("k2-14b_chains.png", bbox_inches='tight')
# Plot the posterior distributions
samples = chain[:, nburn:, :].reshape(-1, ndim + nblobs)
fig2 = corner(samples, labels=params + blobs)
fig2.suptitle("K2-14b", fontsize=16, fontweight='bold')
fig2.set_size_inches(6, 6)
for ax in fig2.axes:
for tick in ax.get_xticklabels() + ax.get_yticklabels():
tick.set_fontsize(7)
fig2.savefig("k2-14b_corner.png", bbox_inches='tight')
| mit | 202,969,213,362,555,870 | 28.901099 | 72 | 0.632488 | false | 2.858193 | false | false | false |
cxcsds/ciao-contrib | crates_contrib/images.py | 1 | 4630 | #
# Copyright (C) 2012, 2015, 2016, 2019
# Smithsonian Astrophysical Observatory
#
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
"""
Image-specific Crates routines.
At present there is only one routine - imextent.
"""
from pytransform import LINEAR2DTransform
__all__ = ('imextent', )
def imextent(img, xmin, xmax, ymin, ymax, limits='center'):
"""Create a linear transform for the image axes.
Returns a 2D linear transform object that represents the
mapping from "pixel" units (e.g. logical values) to
a linearly scaled system (offset and scale change, no
rotation). One use of this is to mimic the extent
argument from matplotlib's imshow command, as discussed
in the examples below.
Parameters
----------
img : 2D NumPy array
xmin, xmax, ymin, ymax : float
The coordinates of the lower-left and upper-right
corners of the image in the transformed (non-logical)
system.
limits : {'center', 'edge'}
Do the coordinates (xmin, ..., ymax) refer to the
center of the pixels, or their edges. In FITS convention,
the bottom-left pixel is centered on 1,1 and the top-right
pixel is nx,ny (for a nx by ny grid). With limits='center'
xmin,xmax refers to the center of the lower-left pixel
(i.e. 1,1 in FITS terminology) whereas with limits='edge'
it refers to the bottom-left corner (0.5,0.5 in FITS).
Returns
-------
tr : pytransform.LINEAR2DTransform
The transform object containing the coordinate mapping.
Notes
-----
The logical coordinate system follows the FITS standard, so the
first pixel is (1,1) and not (0,0), and the X axis values are
given first.
Examples
--------
The following example creates a 40 pixel wide by 20 pixel high
image, zi, where the X axis goes from 40 to 60 and the Y
axis 10 to 20. The imextent call creates a transform object.
>>> yi, xi = np.mgrid[10:20:20j, 40:60:40j]
>>> zi = 100.0 / np.sqrt((xi - 45.62) ** 2 + (yi - 14.7) ** 2)
>>> tr = imextent(zi, 40, 60, 10, 20)
The transform object can be used to convert between logical
coordinates (where 1,1 refers to the center of the lower-left
pixel) and the data coordinates:
>>> print(tr.apply([[1,1], [40,20]]))
[[40 10]
[60 20]]
and the invert method goes from data to logical coordinates:
>>> print(tr.invert([[45.0, 15.0]]))
[[ 10.75 10.5 ]]
The following examples use a 4 pixel by 3 pixel image:
>>> img = np.arange(0, 12).reshape(3, 4)
The default value for the limits argument is 'center', which
means that the given coordinates - in this case 10,-10 and
13,-6 - refer to the center of the bottom-left and top-right
pixels:
>>> tr_cen = imextent(img, 10, 13, -10, -6, limits='center')
The alternative is limits='edge', where 10,-10 refers to the
bottom-left corner of the image and 13,-6 refers to the
top-right corner:
>>> tr_edge = imextent(img, 10, 13, -10, -6, limits='edge')
>>> print(tr_cen.apply([[1.0, 1.0]]))
[[ 10. -10.]]
>>> print(tr_edge.apply([[1.0, 1.0]]))
[[ 10.375 -9.33333333]]
"""
try:
(ny, nx) = img.shape
except AttributeError:
raise ValueError("First argument has no shape attribute.")
dx = (xmax - xmin) * 1.0
dy = (ymax - ymin) * 1.0
if limits == 'center':
dx /= (nx - 1.0)
dy /= (ny - 1.0)
x0 = xmin - dx
y0 = ymin - dy
elif limits == 'edge':
dx /= nx
dy /= ny
x0 = xmin - dx / 2.0
y0 = ymin - dy / 2.0
else:
raise ValueError("limits must be 'center' or 'edge', not '{}'".format(limits))
tr = LINEAR2DTransform()
tr.get_parameter('ROTATION').set_value(0.0)
tr.get_parameter('SCALE').set_value([dx, dy])
tr.get_parameter('OFFSET').set_value([x0, y0])
return tr
| gpl-3.0 | 6,630,351,362,508,218,000 | 30.496599 | 86 | 0.630886 | false | 3.605919 | false | false | false |
lintrace/GravityFalls3 | gravity_falls.py | 1 | 1529 | #!/usr/bin/python3
"""
Главный файл для запуска
Решение шифрограмм из книги "Дневник Гравити Фоллз 3"
"""
import atbash_chiper, caesar_cipher, vigenere_cipher
print('='*80)
print('Зашифровано шифром Цезаря, см. коментарии к строкам выше (место в книге)')
print('='*80)
for line in open('caesar.txt'):
if line[0] == '#':
print('-' * 80,'\n',line)
continue
if line[-1]=='\n':
line = line[:-1]
print('Из дневника:\t{0}\nРасшифровано:\t{1}\n'.format(line, caesar_cipher.caesar_dec(line, 23)), end ='')
print('='*80)
print('Текст из Дневника 3 Гравити Фоллз (самый длинный),\nхранился в шкатулке с нацарапанным на крышке словом "ПАЙНС"')
print('Текст зашифрован шифром Виженера, а слово "ПАЙНС" является паролем')
print('='*80)
print(vigenere_cipher.vigenere_file_dec('ПАЙНС.txt','ПАЙНС'))
print('='*80,'\n\n\n')
print('='*80)
print('### В книге встречается подсказка к расшифровке инопланетного шифра - закорючек\nСудя по всему, каждой закорючке соответствует своя цифра,\nкоторая соответствует номеру буквы в алфавите (шифр А1Я33)')
| gpl-3.0 | 8,400,138,516,059,571,000 | 34.766667 | 207 | 0.689655 | false | 1.53505 | false | false | false |
ucb-sejits/ctree | ctree/c/codegen.py | 1 | 6213 | """
Code generator for C constructs.
"""
from ctree.codegen import CodeGenVisitor
from ctree.c.nodes import Op
from ctree.types import codegen_type, get_suffix
from ctree.precedence import UnaryOp, BinaryOp, TernaryOp, Cast
from ctree.precedence import get_precedence, is_left_associative
from numbers import Number
from ctree.nodes import CommonCodeGen
class CCodeGen(CommonCodeGen):
"""
Manages generation of C code.
"""
def _requires_parentheses(self, parent, node):
"""
Returns node as a string, optionally with parentheses around it if
needed to enforce precendence rules.
"""
if isinstance(node, (UnaryOp, BinaryOp, TernaryOp)) and\
isinstance(parent, (UnaryOp, BinaryOp, TernaryOp, Cast)):
prec = get_precedence(node)
parent_prec = get_precedence(parent)
is_not_last_child = isinstance(parent, UnaryOp) or\
isinstance(parent, Cast) or\
(isinstance(parent, BinaryOp) and node is parent.left) or\
(isinstance(parent, TernaryOp) and node is not parent.elze)
assoc_left = is_left_associative(parent)
if (prec < parent_prec) or \
(prec == parent_prec and (assoc_left is not is_not_last_child)):
return True
return False
# -------------------------------------------------------------------------
# visitor methods
def visit_MultiNode(self, node):
return self._genblock(node.body, insert_curly_brackets=False, increase_indent=False)
def visit_FunctionDecl(self, node):
params = ", ".join(map(str, node.params))
s = []
for attrib in node.attributes:
s.append("__attribute__ (({}))".format(attrib))
if node.kernel:
s.append("__kernel")
if node.static:
s.append("static")
if node.inline:
s.append("inline")
s.append("%s %s(%s)" % (codegen_type(node.return_type), node.name, params))
if node.defn:
s.append("%s" % self._genblock(node.defn))
return " ".join(s)
def visit_UnaryOp(self, node):
op = self._parenthesize(node, node.op)
arg = self._parenthesize(node, node.arg)
if isinstance(node.op, (Op.PostInc, Op.PostDec)):
return "%s %s" % (arg, op)
else:
return "%s %s" % (op, arg)
def visit_BinaryOp(self, node):
left = self._parenthesize(node, node.left)
right = self._parenthesize(node, node.right)
if isinstance(node.op, Op.ArrayRef):
return "%s[%s]" % (left, right)
else:
return "%s %s %s" % (left, node.op, right)
def visit_AugAssign(self, node):
return "%s %s= %s" % (node.target, node.op, node.value)
def visit_TernaryOp(self, node):
cond = self._parenthesize(node, node.cond)
then = self._parenthesize(node, node.then)
elze = self._parenthesize(node, node.elze)
return "%s ? %s : %s" % (cond, then, elze)
def visit_Cast(self, node):
value = self._parenthesize(node, node.value)
return "(%s) %s" % (codegen_type(node.type), value)
def visit_Constant(self, node):
if isinstance(node.value, str):
return "'%s'" % node.value[0]
else:
return str(node.value)
def visit_SymbolRef(self, node):
s = ""
if node._global:
s += "__global "
if node._local:
s += "__local "
if node._static:
s += "static "
if node._const:
s += "const "
if node.type is not None:
s += "%s " % codegen_type(node.type)
if node._restrict:
s += "restrict "
return "%s%s" % (s, node.name)
def visit_Block(self, node):
return self._genblock(node.body)
def visit_Return(self, node):
if node.value:
return "return %s" % node.value
else:
return "return"
def visit_If(self, node):
then = self._genblock(node.then)
if node.elze:
elze = self._genblock(node.elze)
return "if (%s) %s else %s" % (node.cond, then, elze)
else:
return "if (%s) %s" % (node.cond, then)
def visit_While(self, node):
body = self._genblock(node.body)
return "while (%s) %s" % (node.cond, body)
def visit_DoWhile(self, node):
body = self._genblock(node.body)
return "do %s while (%s)" % (body, node.cond)
def visit_For(self, node):
body = self._genblock(node.body)
s = ""
if node.pragma:
s += "#pragma %s\n" % node.pragma + self._tab()
return s + "for (%s; %s; %s) %s" % (node.init, node.test, node.incr, body)
def visit_FunctionCall(self, node):
args = ", ".join(map(str, node.args))
return "%s(%s)" % (node.func, args)
def visit_String(self, node):
return '"%s"' % '" "'.join(node.values)
def visit_CFile(self, node):
stmts = self._genblock(node.body, insert_curly_brackets=False, increase_indent=False)
return '// <file: %s>%s' % (node.get_filename(), stmts)
def visit_ArrayDef(self, node):
return "%s[%s] = " % (node.target, node.size) + self.visit(node.body)
def visit_Break(self, node):
return 'break'
def visit_Continue(self, node):
return 'continue'
def visit_Array(self, node):
return "{%s}" % ', '.join([i.codegen() for i in node.body])
def visit_Hex(self, node):
return hex(node.value) + get_suffix(node.ctype)
def visit_Number(self, node):
return str(node.value) + get_suffix(node.ctype)
def visit_Attribute(self, node):
s = self.visit(node.target)
return "{target} __attribute__({items})".format(target=s, items=", ".join(node.attributes))
def visit_Pragma(self, node):
stuff = self._genblock(node.body, insert_curly_brackets=node.braces)
if node.braces:
stuff = '\n\t'.join(stuff.split("\n"))
return '#pragma ' + node.pragma + '\n' + stuff
| bsd-2-clause | 4,827,430,226,630,825,000 | 33.325967 | 99 | 0.55062 | false | 3.540171 | false | false | false |
nuagenetworks/vspk-python | vspk/v6/nuegressauditacltemplate.py | 1 | 21660 | # -*- coding: utf-8 -*-
#
# Copyright (c) 2015, Alcatel-Lucent Inc, 2017 Nokia
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from .fetchers import NUPermissionsFetcher
from .fetchers import NUMetadatasFetcher
from .fetchers import NUEgressAuditACLEntryTemplatesFetcher
from .fetchers import NUGlobalMetadatasFetcher
from bambou import NURESTObject
class NUEgressAuditACLTemplate(NURESTObject):
""" Represents a EgressAuditACLTemplate in the VSD
Notes:
An egress audit policy is a set of rules defining how network traffic is monitored and mirrored from a domain for Audit purposes
"""
__rest_name__ = "egressauditacltemplate"
__resource_name__ = "egressauditacltemplates"
## Constants
CONST_POLICY_STATE_DRAFT = "DRAFT"
CONST_ENTITY_SCOPE_GLOBAL = "GLOBAL"
CONST_PRIORITY_TYPE_TOP_AUDIT = "TOP_AUDIT"
CONST_POLICY_STATE_LIVE = "LIVE"
CONST_ENTITY_SCOPE_ENTERPRISE = "ENTERPRISE"
def __init__(self, **kwargs):
""" Initializes a EgressAuditACLTemplate instance
Notes:
You can specify all parameters while calling this methods.
A special argument named `data` will enable you to load the
object from a Python dictionary
Examples:
>>> egressauditacltemplate = NUEgressAuditACLTemplate(id=u'xxxx-xxx-xxx-xxx', name=u'EgressAuditACLTemplate')
>>> egressauditacltemplate = NUEgressAuditACLTemplate(data=my_dict)
"""
super(NUEgressAuditACLTemplate, self).__init__()
# Read/Write Attributes
self._name = None
self._last_updated_by = None
self._last_updated_date = None
self._active = None
self._default_allow_ip = None
self._default_allow_non_ip = None
self._default_install_acl_implicit_rules = None
self._description = None
self._embedded_metadata = None
self._entity_scope = None
self._policy_state = None
self._creation_date = None
self._priority = None
self._priority_type = None
self._associated_live_entity_id = None
self._associated_virtual_firewall_policy_id = None
self._auto_generate_priority = None
self._owner = None
self._external_id = None
self.expose_attribute(local_name="name", remote_name="name", attribute_type=str, is_required=True, is_unique=False)
self.expose_attribute(local_name="last_updated_by", remote_name="lastUpdatedBy", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="last_updated_date", remote_name="lastUpdatedDate", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="active", remote_name="active", attribute_type=bool, is_required=False, is_unique=False)
self.expose_attribute(local_name="default_allow_ip", remote_name="defaultAllowIP", attribute_type=bool, is_required=False, is_unique=False)
self.expose_attribute(local_name="default_allow_non_ip", remote_name="defaultAllowNonIP", attribute_type=bool, is_required=False, is_unique=False)
self.expose_attribute(local_name="default_install_acl_implicit_rules", remote_name="defaultInstallACLImplicitRules", attribute_type=bool, is_required=False, is_unique=False)
self.expose_attribute(local_name="description", remote_name="description", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="embedded_metadata", remote_name="embeddedMetadata", attribute_type=list, is_required=False, is_unique=False)
self.expose_attribute(local_name="entity_scope", remote_name="entityScope", attribute_type=str, is_required=False, is_unique=False, choices=[u'ENTERPRISE', u'GLOBAL'])
self.expose_attribute(local_name="policy_state", remote_name="policyState", attribute_type=str, is_required=False, is_unique=False, choices=[u'DRAFT', u'LIVE'])
self.expose_attribute(local_name="creation_date", remote_name="creationDate", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="priority", remote_name="priority", attribute_type=int, is_required=False, is_unique=True)
self.expose_attribute(local_name="priority_type", remote_name="priorityType", attribute_type=str, is_required=False, is_unique=True, choices=[u'TOP_AUDIT'])
self.expose_attribute(local_name="associated_live_entity_id", remote_name="associatedLiveEntityID", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="associated_virtual_firewall_policy_id", remote_name="associatedVirtualFirewallPolicyID", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="auto_generate_priority", remote_name="autoGeneratePriority", attribute_type=bool, is_required=False, is_unique=False)
self.expose_attribute(local_name="owner", remote_name="owner", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="external_id", remote_name="externalID", attribute_type=str, is_required=False, is_unique=True)
# Fetchers
self.permissions = NUPermissionsFetcher.fetcher_with_object(parent_object=self, relationship="child")
self.metadatas = NUMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child")
self.egress_audit_acl_entry_templates = NUEgressAuditACLEntryTemplatesFetcher.fetcher_with_object(parent_object=self, relationship="child")
self.global_metadatas = NUGlobalMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child")
self._compute_args(**kwargs)
# Properties
@property
def name(self):
""" Get name value.
Notes:
The name of the entity
"""
return self._name
@name.setter
def name(self, value):
""" Set name value.
Notes:
The name of the entity
"""
self._name = value
@property
def last_updated_by(self):
""" Get last_updated_by value.
Notes:
ID of the user who last updated the object.
This attribute is named `lastUpdatedBy` in VSD API.
"""
return self._last_updated_by
@last_updated_by.setter
def last_updated_by(self, value):
""" Set last_updated_by value.
Notes:
ID of the user who last updated the object.
This attribute is named `lastUpdatedBy` in VSD API.
"""
self._last_updated_by = value
@property
def last_updated_date(self):
""" Get last_updated_date value.
Notes:
Time stamp when this object was last updated.
This attribute is named `lastUpdatedDate` in VSD API.
"""
return self._last_updated_date
@last_updated_date.setter
def last_updated_date(self, value):
""" Set last_updated_date value.
Notes:
Time stamp when this object was last updated.
This attribute is named `lastUpdatedDate` in VSD API.
"""
self._last_updated_date = value
@property
def active(self):
""" Get active value.
Notes:
If enabled, it means that this ACL or QOS entry is active
"""
return self._active
@active.setter
def active(self, value):
""" Set active value.
Notes:
If enabled, it means that this ACL or QOS entry is active
"""
self._active = value
@property
def default_allow_ip(self):
""" Get default_allow_ip value.
Notes:
If enabled a default ACL of Allow All is added as the last entry in the list of ACL entries
This attribute is named `defaultAllowIP` in VSD API.
"""
return self._default_allow_ip
@default_allow_ip.setter
def default_allow_ip(self, value):
""" Set default_allow_ip value.
Notes:
If enabled a default ACL of Allow All is added as the last entry in the list of ACL entries
This attribute is named `defaultAllowIP` in VSD API.
"""
self._default_allow_ip = value
@property
def default_allow_non_ip(self):
""" Get default_allow_non_ip value.
Notes:
If enabled, non ip traffic will be dropped
This attribute is named `defaultAllowNonIP` in VSD API.
"""
return self._default_allow_non_ip
@default_allow_non_ip.setter
def default_allow_non_ip(self, value):
""" Set default_allow_non_ip value.
Notes:
If enabled, non ip traffic will be dropped
This attribute is named `defaultAllowNonIP` in VSD API.
"""
self._default_allow_non_ip = value
@property
def default_install_acl_implicit_rules(self):
""" Get default_install_acl_implicit_rules value.
Notes:
If enabled, implicit rule will allow intra domain traffic by default
This attribute is named `defaultInstallACLImplicitRules` in VSD API.
"""
return self._default_install_acl_implicit_rules
@default_install_acl_implicit_rules.setter
def default_install_acl_implicit_rules(self, value):
""" Set default_install_acl_implicit_rules value.
Notes:
If enabled, implicit rule will allow intra domain traffic by default
This attribute is named `defaultInstallACLImplicitRules` in VSD API.
"""
self._default_install_acl_implicit_rules = value
@property
def description(self):
""" Get description value.
Notes:
A description of the entity
"""
return self._description
@description.setter
def description(self, value):
""" Set description value.
Notes:
A description of the entity
"""
self._description = value
@property
def embedded_metadata(self):
""" Get embedded_metadata value.
Notes:
Metadata objects associated with this entity. This will contain a list of Metadata objects if the API request is made using the special flag to enable the embedded Metadata feature. Only a maximum of Metadata objects is returned based on the value set in the system configuration.
This attribute is named `embeddedMetadata` in VSD API.
"""
return self._embedded_metadata
@embedded_metadata.setter
def embedded_metadata(self, value):
""" Set embedded_metadata value.
Notes:
Metadata objects associated with this entity. This will contain a list of Metadata objects if the API request is made using the special flag to enable the embedded Metadata feature. Only a maximum of Metadata objects is returned based on the value set in the system configuration.
This attribute is named `embeddedMetadata` in VSD API.
"""
self._embedded_metadata = value
@property
def entity_scope(self):
""" Get entity_scope value.
Notes:
Specify if scope of entity is Data center or Enterprise level
This attribute is named `entityScope` in VSD API.
"""
return self._entity_scope
@entity_scope.setter
def entity_scope(self, value):
""" Set entity_scope value.
Notes:
Specify if scope of entity is Data center or Enterprise level
This attribute is named `entityScope` in VSD API.
"""
self._entity_scope = value
@property
def policy_state(self):
""" Get policy_state value.
Notes:
None
This attribute is named `policyState` in VSD API.
"""
return self._policy_state
@policy_state.setter
def policy_state(self, value):
""" Set policy_state value.
Notes:
None
This attribute is named `policyState` in VSD API.
"""
self._policy_state = value
@property
def creation_date(self):
""" Get creation_date value.
Notes:
Time stamp when this object was created.
This attribute is named `creationDate` in VSD API.
"""
return self._creation_date
@creation_date.setter
def creation_date(self, value):
""" Set creation_date value.
Notes:
Time stamp when this object was created.
This attribute is named `creationDate` in VSD API.
"""
self._creation_date = value
@property
def priority(self):
""" Get priority value.
Notes:
The priority of the ACL entry that determines the order of entries
"""
return self._priority
@priority.setter
def priority(self, value):
""" Set priority value.
Notes:
The priority of the ACL entry that determines the order of entries
"""
self._priority = value
@property
def priority_type(self):
""" Get priority_type value.
Notes:
Possible values: TOP_AUDIT. This will be the top most of the egres ACL stack
This attribute is named `priorityType` in VSD API.
"""
return self._priority_type
@priority_type.setter
def priority_type(self, value):
""" Set priority_type value.
Notes:
Possible values: TOP_AUDIT. This will be the top most of the egres ACL stack
This attribute is named `priorityType` in VSD API.
"""
self._priority_type = value
@property
def associated_live_entity_id(self):
""" Get associated_live_entity_id value.
Notes:
In the draft mode, the ACL entry refers to this LiveEntity. In non-drafted mode, this is null.
This attribute is named `associatedLiveEntityID` in VSD API.
"""
return self._associated_live_entity_id
@associated_live_entity_id.setter
def associated_live_entity_id(self, value):
""" Set associated_live_entity_id value.
Notes:
In the draft mode, the ACL entry refers to this LiveEntity. In non-drafted mode, this is null.
This attribute is named `associatedLiveEntityID` in VSD API.
"""
self._associated_live_entity_id = value
@property
def associated_virtual_firewall_policy_id(self):
""" Get associated_virtual_firewall_policy_id value.
Notes:
The ID of the Virtual Firewall Policy, if this was created as part of the Virtual Firewall Policy creation
This attribute is named `associatedVirtualFirewallPolicyID` in VSD API.
"""
return self._associated_virtual_firewall_policy_id
@associated_virtual_firewall_policy_id.setter
def associated_virtual_firewall_policy_id(self, value):
""" Set associated_virtual_firewall_policy_id value.
Notes:
The ID of the Virtual Firewall Policy, if this was created as part of the Virtual Firewall Policy creation
This attribute is named `associatedVirtualFirewallPolicyID` in VSD API.
"""
self._associated_virtual_firewall_policy_id = value
@property
def auto_generate_priority(self):
""" Get auto_generate_priority value.
Notes:
This option only affects how the children ACL entry priorities of this template/policy are generated when the priority is not specified. If 'false', the priority is generated by incrementing the current highest ACL Entry priority by 100. If 'true', a random priority will be generated, which is advised when creating many entries concurrently without specifying the priority. This will cause the new child ACL entry to get a random, non-predictable, priority. Therefore it is advised to only enable this when allow rules are being created. If any type of ACL entry order is required, keep this value to 'false' and use your own defined priorities, this will make sure there is a clear set of priorities and how traffic is validated against the ACL entries.
This attribute is named `autoGeneratePriority` in VSD API.
"""
return self._auto_generate_priority
@auto_generate_priority.setter
def auto_generate_priority(self, value):
""" Set auto_generate_priority value.
Notes:
This option only affects how the children ACL entry priorities of this template/policy are generated when the priority is not specified. If 'false', the priority is generated by incrementing the current highest ACL Entry priority by 100. If 'true', a random priority will be generated, which is advised when creating many entries concurrently without specifying the priority. This will cause the new child ACL entry to get a random, non-predictable, priority. Therefore it is advised to only enable this when allow rules are being created. If any type of ACL entry order is required, keep this value to 'false' and use your own defined priorities, this will make sure there is a clear set of priorities and how traffic is validated against the ACL entries.
This attribute is named `autoGeneratePriority` in VSD API.
"""
self._auto_generate_priority = value
@property
def owner(self):
""" Get owner value.
Notes:
Identifies the user that has created this object.
"""
return self._owner
@owner.setter
def owner(self, value):
""" Set owner value.
Notes:
Identifies the user that has created this object.
"""
self._owner = value
@property
def external_id(self):
""" Get external_id value.
Notes:
External object ID. Used for integration with third party systems
This attribute is named `externalID` in VSD API.
"""
return self._external_id
@external_id.setter
def external_id(self, value):
""" Set external_id value.
Notes:
External object ID. Used for integration with third party systems
This attribute is named `externalID` in VSD API.
"""
self._external_id = value
| bsd-3-clause | 4,749,001,960,045,595,000 | 32.792512 | 772 | 0.605263 | false | 4.683243 | false | false | false |
mbartling/TAMU_senior_design | Python/gps2local.py | 1 | 1458 | #! /usr/bin/env python
from numpy import *
import sys
#p1 = [30.625621, -96.336753]
#p2 = [30.624388, -96.335755]
#p3 = [30.626050, -96.333368]
#p4 = [30.627195, -96.334945]
xres = 300
yres = 500
lat = []
lon = []
for line in sys.stdin:
line.strip()
(latS, lonS, dummy) = line.split(',')
lat.append(latS)
lon.append(lonS)
lat = array(lat, dtype='int32')
lon = array(lon, dtype='int32')
latmin = min(lat)
latmax = max(lat)
lonmin = min(lon)
lonmax = max(lon)
xlin = linspace(latmin,latmax,xres)
ylin = linspace(lonmin,lonmax,yres)
print xlin, ylin
#s1 = [30.625383, -96.336161]
#s2 = [30.624978, -96.335295]
#s3 = [30.625749, -96.334460]
#rssi1 = -16.2342
#rssi2 = -20.2342
#rssi3 = -22.2342
thresh = 0.15 #15% of minimum distance
#gamemap = zeros([len(xlin),len(ylin)])
#
#dx = xlin - s1[0]
#dy = ylin - s1[1]
#xloc = abs(dx) <= (1+thresh)*min(abs(dx)) #consider doing average
#yloc = abs(dy) <= (1+thresh)*min(abs(dy))
#gamemap[xloc,yloc] = rssi1
#
#dx = xlin - s2[0]
#dy = ylin - s2[1]
#xloc = abs(dx) <= (1+thresh)*min(abs(dx)) #consider doing average
#yloc = abs(dy) <= (1+thresh)*min(abs(dy))
#gamemap[xloc,yloc] = rssi2
#
#dx = xlin - s3[0]
#dy = ylin - s3[1]
#xloc = abs(dx) <= (1+thresh)*min(abs(dx)) #consider doing average
#yloc = abs(dy) <= (1+thresh)*min(abs(dy))
#gamemap[xloc,yloc] = rssi3
#
#temp = zeros([len(xlin),len(ylin)])
#mask = gamemap != 0
#temp[mask] =
#
| mit | -899,562,376,890,822,100 | 20.78125 | 66 | 0.593964 | false | 2.036313 | false | false | false |
gengwg/leetcode | 179_largest_number.py | 1 | 1445 | # -*- coding: utf-8 -*-
# 179. Largest Number
# Given a list of non negative integers,
# arrange them such that they form the largest number.
#
# For example, given [3, 30, 34, 5, 9], the largest formed number is 9534330.
#
# Note: The result may be very large,
# so you need to return a string instead of an integer.
#
# Credits:
# Special thanks to @ts for adding this problem and creating all test cases.
#
# http://bookshadow.com/weblog/2015/01/13/leetcode-largest-number/
# 排序思路:
# 对于两个备选数字a和b,
# 如果str(a) + str(b) > str(b) + str(a),
# 则a在b之前,否则b在a之前
#
# 按照此原则对原数组从大到小排序即可
#
# 时间复杂度O(nlogn)
#
# 易错样例:
# Input: [0,0]
# Output: "00"
# Expected: "0"
class Solution:
# @param {integer[]} nums
# @return {string}
def largestNumber(self, nums):
nums = sorted([str(x) for x in nums], cmp=self.compare)
ans = ''.join(nums).lstrip('0')
return ans or '0'
def compare(self, a, b):
# 2nd [] is not a list. it is indexing: True = 1, False = 0
# [1, -1][1] == -1; [1, -1][1] == 1
# so this will sort a, b in reverted (descending) order
return [1, -1][a + b > b + a]
# equivalent to:
if a + b > b + a:
return -1
else:
return 1
if __name__ == '__main__':
print Solution().largestNumber([3, 30, 34, 5, 9])
| apache-2.0 | -4,658,683,190,685,438,000 | 25.098039 | 77 | 0.57701 | false | 2.497186 | false | false | false |
alex-bauer/kelvin-power-challenge | src/features/features_saaf.py | 1 | 1680 | """Concatenate and resample SAAF data"""
import sys
import os
sys.path.append("../")
from utils.utils import *
folder = config.features_folder
if not os.path.exists(folder):
os.makedirs(folder)
def parse_saaf(filename, dropna=True):
df = pd.read_csv(config.data_folder + '/' + filename)
df = convert_time(df)
df = resample(df)
if dropna:
df = df.dropna()
return df
saaf_train1 = parse_saaf('/train_set/context--2008-08-22_2010-07-10--saaf.csv')
saaf_train2 = parse_saaf('/train_set/context--2010-07-10_2012-05-27--saaf.csv')
saaf_train3 = parse_saaf('/train_set/context--2012-05-27_2014-04-14--saaf.csv')
saaf_train = pd.concat([saaf_train1, saaf_train2, saaf_train3])
saaf_test = parse_saaf('/test_set/context--2014-04-14_2016-03-01--saaf.csv')
saaf_all = pd.concat([saaf_train, saaf_test])
#Binary indicator for solar conjunction
saaf_all['conjunction']=0
saaf_all.loc['2008-11-16':'2008-12-20','conjunction']=1
saaf_all.loc['2011-01-17':'2011-02-20','conjunction']=1
saaf_all.loc['2013-03-28':'2013-05-05','conjunction']=1
saaf_all.loc['2015-05-27':'2015-07-01','conjunction']=1
cols=['sa','sx','sy','sz']
#Averages over previous hours
for col in cols:
saaf_all[col+'_last_1']=saaf_all[col].shift(-1)
saaf_all[col+'_last_3']=saaf_all[col].rolling(3).mean()
saaf_all[col+'_last_24']=saaf_all[col].rolling(24).mean()
target = pd.read_pickle(config.data_folder + '/target.pkl')
target = target.join(saaf_all.reindex(target.index, method='nearest'))
saaf_all = target.drop(config.target_cols, axis=1)
saaf_all.fillna(method='ffill').fillna(method='bfill').to_pickle(config.features_folder + '/saaf.pkl')
print "Done." | mit | 8,595,800,424,774,629,000 | 27.982759 | 102 | 0.683333 | false | 2.612753 | false | false | false |
ryanjoneil/docker-image-construction | dicp/solvers/most_common.py | 1 | 1247 | from collections import defaultdict
class MostCommonHeuristic(object):
'''Heuristic that shares the most common command at any point'''
_slug = 'most-common'
def slug(self):
return MostCommonHeuristic._slug
def solve(self, problem, saver):
# Keep track of what hasn't been assigned and how many of each thing there are.
remaining = {i: set(problem.images[i]) for i in problem.images}
order = defaultdict(list)
self._assign(remaining, order)
saver(order)
def _assign(self, remaining, order):
if not remaining:
return
# Figure the most common command.
by_cmd = defaultdict(set)
for i, cmds in remaining.items():
for c in cmds:
by_cmd[c].add(i)
most_common = max(by_cmd, key=lambda p: len(by_cmd[p]))
# Add this to the schedule for any it applies to.
new_remain = {}
for i in by_cmd[most_common]:
order[i].append(most_common)
remaining[i].remove(most_common)
if remaining[i]:
new_remain[i] = set(remaining[i])
del remaining[i]
self._assign(new_remain, order)
self._assign(remaining, order)
| mit | 5,420,271,509,696,158,000 | 30.974359 | 87 | 0.588613 | false | 4.048701 | false | false | false |
Balannen/LSMASOMM | atom3/Kernel/ErrorHandlers/exceptionStreamHook.py | 1 | 1233 | """
exceptionStreamHook.py
A custom stderr hook by Denis Dube, http://msdl.cs.mcgill.ca/people/denis/
"""
import tkMessageBox, sys
class exceptionStreamHook:
"""
This class pretends to be an open stderr file stream
Perhaps it should subclass File to be safer... but what would be the default
behaviour then??? Must I override each method on at a time? Bah, like anyone
uses that on stderr...
"""
def write( self, errorString ):
""" Simulates the write method of a file stream object """
# Send the error back where it belongs
sys.__stderr__.write( errorString )
#print ">"+errorString+"<"
# Dealing with a new exception
if( errorString[:9] == 'Exception' ):
tkMessageBox.showerror( 'Uncaught Exception',
'See console for details\n\n' + errorString )
def close( self, *args ): pass
def open( self, *args ): pass
def applyHook2stderr():
# Redirect error output stream to customized handler
sys.stderr = exceptionStreamHook()
if __name__ == '__main__':
print "Testing error redirect"
applyHook2stderr()
5/0
x=bs
print "Done" | gpl-3.0 | 469,603,371,962,662,600 | 23.729167 | 78 | 0.607461 | false | 3.96463 | false | false | false |
yuanming-hu/taichi | python/taichi/lang/shell.py | 1 | 1472 | import atexit
import functools
import os
import sys
from taichi.core.util import ti_core as _ti_core
import taichi as ti
try:
import sourceinspect as oinspect
except ImportError:
ti.warn('`sourceinspect` not installed!')
ti.warn(
'Without this package Taichi may not function well in Python IDLE interactive shell, '
'Blender scripting module and Python native shell.')
ti.warn('Please run `python3 -m pip install sourceinspect` to install.')
import inspect as oinspect
pybuf_enabled = False
_env_enable_pybuf = os.environ.get('TI_ENABLE_PYBUF', '1')
if not _env_enable_pybuf or int(_env_enable_pybuf):
# When using in Jupyter / IDLE, the sys.stdout will be their wrapped ones.
# While sys.__stdout__ should always be the raw console stdout.
pybuf_enabled = sys.stdout is not sys.__stdout__
_ti_core.toggle_python_print_buffer(pybuf_enabled)
def _shell_pop_print(old_call):
if not pybuf_enabled:
# zero-overhead!
return old_call
ti.info('Graphical python shell detected, using wrapped sys.stdout')
@functools.wraps(old_call)
def new_call(*args, **kwargs):
_taichi_skip_traceback = 1
ret = old_call(*args, **kwargs)
# print's in kernel won't take effect until ti.sync(), discussion:
# https://github.com/taichi-dev/taichi/pull/1303#discussion_r444897102
print(_ti_core.pop_python_print_buffer(), end='')
return ret
return new_call
| mit | 6,888,034,702,434,073,000 | 31 | 94 | 0.686821 | false | 3.521531 | false | false | false |
leeopop/2015-CS570-Project | lda_preprocess.py | 1 | 2532 | import csv
from loader import load_single_file
num_of_topics__criteria_for_cut_words = 20
num_of_appearance__criteria_for_cut_words = 10
output_num_of_words_per_topic = 50
def load_vocab():
vocab = {}
inverse_dict = load_single_file('keyword_table.csv')
num = max([int(x['unique']) for x in inverse_dict.values()]) + 1
for (key, val) in inverse_dict.items():
vocab[int(val['unique'])] = str(key)
return num, vocab
def check_line_is_useless(line, cut_topic, cut_word):
count = 0
for i in range(1,len(line)):
if int(line[i]) >= cut_word:
count+=1
if count >= cut_topic:
return True
return False
def load_lda(num_vocab, vocab, cut_topic, cut_word):
f = open("lda_output.txt","r")
line = f.readline()
line = line.split()
n_topics = len(line)-1
ret = []
removed_words = []
index = int(line[0])
for i in range(n_topics):
ret.append([0] * num_vocab)
if check_line_is_useless(line, cut_topic, cut_word):
print(vocab[index])
removed_words.append(vocab[index])
else:
for i in range(n_topics):
ret[i][index] = int(line[i+1])
for line in f:
line = line.split()
index = int(line[0])
if check_line_is_useless(line, cut_topic, cut_word):
print(vocab[index])
removed_words.append(vocab[index])
else:
for i in range(n_topics):
ret[i][index] = int(line[i+1])
for i in range(n_topics):
ret[i] = list(enumerate(ret[i]))
ret[i].sort(key=lambda item:item[1], reverse=True)
return removed_words, n_topics, ret
def write_csv(lda, vocab, removed_words, n_topic, n_word):
with open('lda.csv', 'w', encoding='utf-8') as writecsvfile:
writer = csv.writer(writecsvfile, delimiter=',', quotechar='|')
row = []
for i in range(n_topic):
row.append("topic" + str(i+1))
writer.writerow(row)
for i in range(n_word):
row = []
for j in range(n_topic):
row.append(vocab[lda[j][i][0]])
writer.writerow(row)
writer.writerow([])
removed_words.insert(0,'')
removed_words.insert(0,'removed_words')
writer.writerow(removed_words)
def main():
num_vocab,vocab = load_vocab()
print("reading vocabulary file finished!")
#remove_words = ['of', 'the', 'and', 'in', 'for', 'a', 'to', 'with', 'by', 'on','at', 'an']
removed_words, num_topic,lda = load_lda(num_vocab, vocab,
num_of_topics__criteria_for_cut_words,
num_of_appearance__criteria_for_cut_words)
print("processing lda file finished!")
write_csv(lda,vocab, removed_words, num_topic, output_num_of_words_per_topic)
print("writing lda file finished!")
if __name__ == '__main__':
main() | mit | 6,811,511,562,006,119,000 | 27.460674 | 92 | 0.651659 | false | 2.648536 | false | false | false |
DarthMaulware/EquationGroupLeaks | Leak #5 - Lost In Translation/windows/exploits/ZIBE/plugin_manager.py | 1 | 2051 | # uncompyle6 version 2.9.10
# Python bytecode 2.6 (62161)
# Decompiled from: Python 3.6.0b2 (default, Oct 11 2016, 05:27:10)
# [GCC 6.2.0 20161005]
# Embedded file name: c:\Temp\build\ZIBE\plugin_manager.py
# Compiled at: 2013-02-27 18:02:46
from plugins import *
class PluginManager(object):
__plugins__ = {}
__handlers__ = {}
def __init__(self):
plugins = __import__('ZIBE').plugins
for member in plugins.__all__:
plugin_mod = getattr(plugins, member)
for type_name in dir(plugin_mod):
try:
t = getattr(plugin_mod, type_name)
if t.zibe_plugin is True:
self._add_plugin(t)
except AttributeError:
pass
def plugin_commands(self, plugin_name):
if self.__plugins__.has_key(plugin_name):
return self.__plugins__[plugin_name].get_command_handlers().keys()
return None
return None
def plugins(self):
ret = []
for p in self.__plugins__.keys():
ret.append((p, self.__plugins__[p].friendly_name))
return ret
def _add_plugin(self, t):
inst = t()
self.__plugins__[t.plugin_name] = inst
handlers = inst.get_command_handlers()
for k in handlers:
self.__handlers__[k] = handlers[k]
def handler_exists(self, cmd_name):
if self.__handlers__.has_key(cmd_name):
return True
return False
def get_handler_info(self, name):
if self.__handlers__.has_key(name) is False:
return None
return self.__handlers__[name]
def get_handler_func(self, cmd):
return self.__handlers__[cmd]['handler']
def invoke_cmd_handler(self, cmd, ctx, stdin, stdout, stderr, args):
if self.__handlers__.has_key(cmd):
record = self.__handlers__[cmd]
return record['handler'](stdin, stdout, stderr, ctx, args)
raise Exception('No command handler registered under that name') | unlicense | 1,598,297,665,892,283,400 | 32.096774 | 78 | 0.561677 | false | 3.88447 | false | false | false |
alekz112/xlwings | xlwings/tests/test_xlwings.py | 1 | 33895 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
import sys
import shutil
import pytz
import nose
from nose.tools import assert_equal, raises, assert_true, assert_false, assert_not_equal
from datetime import datetime, date
from xlwings import Application, Workbook, Sheet, Range, Chart, ChartType, RgbColor, Calculation
# Mac imports
if sys.platform.startswith('darwin'):
from appscript import k as kw
# TODO: uncomment the desired Excel installation or set to None for default installation
APP_TARGET = None
# APP_TARGET = '/Applications/Microsoft Office 2011/Microsoft Excel'
else:
APP_TARGET = None
# Optional dependencies
try:
import numpy as np
from numpy.testing import assert_array_equal
except ImportError:
np = None
try:
import pandas as pd
from pandas import DataFrame, Series
from pandas.util.testing import assert_frame_equal, assert_series_equal
except ImportError:
pd = None
# Test data
data = [[1, 2.222, 3.333],
['Test1', None, 'éöà'],
[datetime(1962, 11, 3), datetime(2020, 12, 31, 12, 12, 20), 9.999]]
test_date_1 = datetime(1962, 11, 3)
test_date_2 = datetime(2020, 12, 31, 12, 12, 20)
list_row_1d = [1.1, None, 3.3]
list_row_2d = [[1.1, None, 3.3]]
list_col = [[1.1], [None], [3.3]]
chart_data = [['one', 'two'], [1.1, 2.2]]
if np is not None:
array_1d = np.array([1.1, 2.2, np.nan, -4.4])
array_2d = np.array([[1.1, 2.2, 3.3], [-4.4, 5.5, np.nan]])
if pd is not None:
series_1 = pd.Series([1.1, 3.3, 5., np.nan, 6., 8.])
rng = pd.date_range('1/1/2012', periods=10, freq='D')
timeseries_1 = pd.Series(np.arange(len(rng)) + 0.1, rng)
timeseries_1[1] = np.nan
df_1 = pd.DataFrame([[1, 'test1'],
[2, 'test2'],
[np.nan, None],
[3.3, 'test3']], columns=['a', 'b'])
df_2 = pd.DataFrame([1, 3, 5, np.nan, 6, 8], columns=['col1'])
df_dateindex = pd.DataFrame(np.arange(50).reshape(10,5) + 0.1, index=rng)
# MultiIndex (Index)
tuples = list(zip(*[['bar', 'bar', 'baz', 'baz', 'foo', 'foo', 'qux', 'qux'],
['one', 'two', 'one', 'two', 'one', 'two', 'one', 'two'],
['x', 'x', 'x', 'x', 'y', 'y', 'y', 'y']]))
index = pd.MultiIndex.from_tuples(tuples, names=['first', 'second', 'third'])
df_multiindex = pd.DataFrame([[1.1, 2.2], [3.3, 4.4], [5.5, 6.6], [7.7, 8.8], [9.9, 10.10],
[11.11, 12.12],[13.13, 14.14], [15.15, 16.16]], index=index)
# MultiIndex (Header)
header = [['Foo', 'Foo', 'Bar', 'Bar', 'Baz'], ['A', 'B', 'C', 'D', 'E']]
df_multiheader = pd.DataFrame([[0.0, 1.0, 2.0, 3.0, 4.0],
[0.0, 1.0, 2.0, 3.0, 4.0],
[0.0, 1.0, 2.0, 3.0, 4.0],
[0.0, 1.0, 2.0, 3.0, 4.0],
[0.0, 1.0, 2.0, 3.0, 4.0],
[0.0, 1.0, 2.0, 3.0, 4.0]], columns=pd.MultiIndex.from_arrays(header))
# Test skips and fixtures
def _skip_if_no_numpy():
if np is None:
raise nose.SkipTest('numpy missing')
def _skip_if_no_pandas():
if pd is None:
raise nose.SkipTest('pandas missing')
def _skip_if_not_default_xl():
if APP_TARGET is not None:
raise nose.SkipTest('not Excel default')
def class_teardown(wb):
wb.close()
if sys.platform.startswith('win'):
Application(wb).quit()
class TestApplication:
def setUp(self):
# Connect to test file and make Sheet1 the active sheet
xl_file1 = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'test_workbook_1.xlsx')
self.wb = Workbook(xl_file1, app_visible=False, app_target=APP_TARGET)
Sheet('Sheet1').activate()
def tearDown(self):
class_teardown(self.wb)
def test_screen_updating(self):
Application(wkb=self.wb).screen_updating = False
assert_equal(Application(wkb=self.wb).screen_updating, False)
Application(wkb=self.wb).screen_updating = True
assert_equal(Application(wkb=self.wb).screen_updating, True)
def test_calculation(self):
Range('A1').value = 2
Range('B1').formula = '=A1 * 2'
app = Application(wkb=self.wb)
app.calculation = Calculation.xlCalculationManual
Range('A1').value = 4
assert_equal(Range('B1').value, 4)
app.calculation = Calculation.xlCalculationAutomatic
app.calculate() # This is needed on Mac Excel 2016 but not on Mac Excel 2011 (changed behaviour)
assert_equal(Range('B1').value, 8)
Range('A1').value = 2
assert_equal(Range('B1').value, 4)
class TestWorkbook:
def setUp(self):
# Connect to test file and make Sheet1 the active sheet
xl_file1 = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'test_workbook_1.xlsx')
self.wb = Workbook(xl_file1, app_visible=False, app_target=APP_TARGET)
Sheet('Sheet1').activate()
def tearDown(self):
class_teardown(self.wb)
def test_name(self):
assert_equal(self.wb.name, 'test_workbook_1.xlsx')
def test_active_sheet(self):
assert_equal(self.wb.active_sheet.name, 'Sheet1')
def test_current(self):
assert_equal(self.wb.xl_workbook, Workbook.current().xl_workbook)
def test_set_current(self):
wb2 = Workbook(app_visible=False, app_target=APP_TARGET)
assert_equal(Workbook.current().xl_workbook, wb2.xl_workbook)
self.wb.set_current()
assert_equal(Workbook.current().xl_workbook, self.wb.xl_workbook)
wb2.close()
def test_get_selection(self):
Range('A1').value = 1000
assert_equal(self.wb.get_selection().value, 1000)
def test_reference_two_unsaved_wb(self):
"""Covers GH Issue #63"""
wb1 = Workbook(app_visible=False, app_target=APP_TARGET)
wb2 = Workbook(app_visible=False, app_target=APP_TARGET)
Range('A1').value = 2. # wb2
Range('A1', wkb=wb1).value = 1. # wb1
assert_equal(Range('A1').value, 2.)
assert_equal(Range('A1', wkb=wb1).value, 1.)
wb1.close()
wb2.close()
def test_save_naked(self):
cwd = os.getcwd()
wb1 = Workbook(app_visible=False, app_target=APP_TARGET)
target_file_path = os.path.join(cwd, wb1.name + '.xlsx')
if os.path.isfile(target_file_path):
os.remove(target_file_path)
wb1.save()
assert_equal(os.path.isfile(target_file_path), True)
wb2 = Workbook(target_file_path, app_visible=False, app_target=APP_TARGET)
wb2.close()
if os.path.isfile(target_file_path):
os.remove(target_file_path)
def test_save_path(self):
cwd = os.getcwd()
wb1 = Workbook(app_visible=False, app_target=APP_TARGET)
target_file_path = os.path.join(cwd, 'TestFile.xlsx')
if os.path.isfile(target_file_path):
os.remove(target_file_path)
wb1.save(target_file_path)
assert_equal(os.path.isfile(target_file_path), True)
wb2 = Workbook(target_file_path, app_visible=False, app_target=APP_TARGET)
wb2.close()
if os.path.isfile(target_file_path):
os.remove(target_file_path)
def test_mock_caller(self):
# Can't really run this one with app_visible=False
_skip_if_not_default_xl()
Workbook.set_mock_caller(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'test_workbook_1.xlsx'))
wb = Workbook.caller()
Range('A1', wkb=wb).value = 333
assert_equal(Range('A1', wkb=wb).value, 333)
def test_unicode_path(self):
# pip3 seems to struggle with unicode filenames
src = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'unicode_path.xlsx')
dst = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'ünicödé_päth.xlsx')
shutil.move(src, dst)
wb = Workbook(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'ünicödé_päth.xlsx'), app_visible=False, app_target=APP_TARGET)
Range('A1').value = 1
wb.close()
shutil.move(dst, src)
def test_unsaved_workbook_reference(self):
wb = Workbook(app_visible=False, app_target=APP_TARGET)
Range('B2').value = 123
wb2 = Workbook(wb.name, app_visible=False, app_target=APP_TARGET)
assert_equal(Range('B2', wkb=wb2).value, 123)
wb2.close()
def test_delete_named_item(self):
Range('B10:C11').name = 'to_be_deleted'
assert_equal(Range('to_be_deleted').name, 'to_be_deleted')
del self.wb.names['to_be_deleted']
assert_not_equal(Range('B10:C11').name, 'to_be_deleted')
def test_names_collection(self):
Range('A1').name = 'name1'
Range('A2').name = 'name2'
assert_true('name1' in self.wb.names and 'name2' in self.wb.names)
Range('A3').name = 'name3'
assert_true('name1' in self.wb.names and 'name2' in self.wb.names and
'name3' in self.wb.names)
def test_active_workbook(self):
# TODO: add test over multiple Excel instances on Windows
Range('A1').value = 'active_workbook'
wb_active = Workbook.active(app_target=APP_TARGET)
assert_equal(Range('A1', wkb=wb_active).value, 'active_workbook')
def test_workbook_name(self):
Range('A10').value = 'name-test'
wb2 = Workbook('test_workbook_1.xlsx', app_visible=False, app_target=APP_TARGET)
assert_equal(Range('A10', wkb=wb2).value, 'name-test')
class TestSheet:
def setUp(self):
# Connect to test file and make Sheet1 the active sheet
xl_file1 = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'test_workbook_1.xlsx')
self.wb = Workbook(xl_file1, app_visible=False, app_target=APP_TARGET)
Sheet('Sheet1').activate()
def tearDown(self):
class_teardown(self.wb)
def test_activate(self):
Sheet('Sheet2').activate()
assert_equal(Sheet.active().name, 'Sheet2')
Sheet(3).activate()
assert_equal(Sheet.active().index, 3)
def test_name(self):
Sheet(1).name = 'NewName'
assert_equal(Sheet(1).name, 'NewName')
def test_index(self):
assert_equal(Sheet('Sheet1').index, 1)
def test_clear_content_active_sheet(self):
Range('G10').value = 22
Sheet.active().clear_contents()
cell = Range('G10').value
assert_equal(cell, None)
def test_clear_active_sheet(self):
Range('G10').value = 22
Sheet.active().clear()
cell = Range('G10').value
assert_equal(cell, None)
def test_clear_content(self):
Range('Sheet2', 'G10').value = 22
Sheet('Sheet2').clear_contents()
cell = Range('Sheet2', 'G10').value
assert_equal(cell, None)
def test_clear(self):
Range('Sheet2', 'G10').value = 22
Sheet('Sheet2').clear()
cell = Range('Sheet2', 'G10').value
assert_equal(cell, None)
def test_autofit(self):
Range('Sheet1', 'A1:D4').value = 'test_string'
Sheet('Sheet1').autofit()
Sheet('Sheet1').autofit('r')
Sheet('Sheet1').autofit('c')
Sheet('Sheet1').autofit('rows')
Sheet('Sheet1').autofit('columns')
def test_add_before(self):
new_sheet = Sheet.add(before='Sheet1')
assert_equal(Sheet(1).name, new_sheet.name)
def test_add_after(self):
Sheet.add(after=Sheet.count())
assert_equal(Sheet(Sheet.count()).name, Sheet.active().name)
Sheet.add(after=1)
assert_equal(Sheet(2).name, Sheet.active().name)
def test_add_default(self):
# TODO: test call without args properly
Sheet.add()
def test_add_named(self):
Sheet.add('test', before=1)
assert_equal(Sheet(1).name, 'test')
@raises(Exception)
def test_add_name_already_taken(self):
Sheet.add('Sheet1')
def test_count(self):
count = Sheet.count()
assert_equal(count, 3)
def test_all(self):
all_names = [i.name for i in Sheet.all()]
assert_equal(all_names, ['Sheet1', 'Sheet2', 'Sheet3'])
class TestRange:
def setUp(self):
# Connect to test file and make Sheet1 the active sheet
xl_file1 = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'test_range_1.xlsx')
self.wb = Workbook(xl_file1, app_visible=False, app_target=APP_TARGET)
Sheet('Sheet1').activate()
def tearDown(self):
class_teardown(self.wb)
def test_cell(self):
params = [('A1', 22),
((1,1), 22),
('A1', 22.2222),
((1,1), 22.2222),
('A1', 'Test String'),
((1,1), 'Test String'),
('A1', 'éöà'),
((1,1), 'éöà'),
('A2', test_date_1),
((2,1), test_date_1),
('A3', test_date_2),
((3,1), test_date_2)]
for param in params:
yield self.check_cell, param[0], param[1]
def check_cell(self, address, value):
# Active Sheet
Range(address).value = value
cell = Range(address).value
assert_equal(cell, value)
# SheetName
Range('Sheet2', address).value = value
cell = Range('Sheet2', address).value
assert_equal(cell, value)
# SheetIndex
Range(3, address).value = value
cell = Range(3, address).value
assert_equal(cell, value)
def test_range_address(self):
""" Style: Range('A1:C3') """
address = 'C1:E3'
# Active Sheet
Range(address[:2]).value = data # assign to starting cell only
cells = Range(address).value
assert_equal(cells, data)
# Sheetname
Range('Sheet2', address).value = data
cells = Range('Sheet2', address).value
assert_equal(cells, data)
# Sheetindex
Range(3, address).value = data
cells = Range(3, address).value
assert_equal(cells, data)
def test_range_index(self):
""" Style: Range((1,1), (3,3)) """
index1 = (1,3)
index2 = (3,5)
# Active Sheet
Range(index1, index2).value = data
cells = Range(index1, index2).value
assert_equal(cells, data)
# Sheetname
Range('Sheet2', index1, index2).value = data
cells = Range('Sheet2', index1, index2).value
assert_equal(cells, data)
# Sheetindex
Range(3, index1, index2).value = data
cells = Range(3, index1, index2).value
assert_equal(cells, data)
def test_named_range_value(self):
value = 22.222
# Active Sheet
Range('cell_sheet1').value = value
cells = Range('cell_sheet1').value
assert_equal(cells, value)
Range('range_sheet1').value = data
cells = Range('range_sheet1').value
assert_equal(cells, data)
# Sheetname
Range('Sheet2', 'cell_sheet2').value = value
cells = Range('Sheet2', 'cell_sheet2').value
assert_equal(cells, value)
Range('Sheet2', 'range_sheet2').value = data
cells = Range('Sheet2', 'range_sheet2').value
assert_equal(cells, data)
# Sheetindex
Range(3, 'cell_sheet3').value = value
cells = Range(3, 'cell_sheet3').value
assert_equal(cells, value)
Range(3, 'range_sheet3').value = data
cells = Range(3, 'range_sheet3').value
assert_equal(cells, data)
def test_array(self):
_skip_if_no_numpy()
# 1d array
Range('Sheet6', 'A1').value = array_1d
cells = Range('Sheet6', 'A1:D1', asarray=True).value
assert_array_equal(cells, array_1d)
# 2d array
Range('Sheet6', 'A4').value = array_2d
cells = Range('Sheet6', 'A4', asarray=True).table.value
assert_array_equal(cells, array_2d)
# 1d array (atleast_2d)
Range('Sheet6', 'A10').value = array_1d
cells = Range('Sheet6', 'A10:D10', asarray=True, atleast_2d=True).value
assert_array_equal(cells, np.atleast_2d(array_1d))
# 2d array (atleast_2d)
Range('Sheet6', 'A12').value = array_2d
cells = Range('Sheet6', 'A12', asarray=True, atleast_2d=True).table.value
assert_array_equal(cells, array_2d)
def sheet_ref(self):
Range(Sheet(1), 'A20').value = 123
assert_equal(Range(1, 'A20').value, 123)
Range(Sheet(1), (2,2), (4,4)).value = 321
assert_equal(Range(1, (2,2)).value, 321)
def test_vertical(self):
Range('Sheet4', 'A10').value = data
if sys.platform.startswith('win') and self.wb.xl_app.Version == '14.0':
Range('Sheet4', 'A12:B12').xl_range.NumberFormat = 'dd/mm/yyyy' # Hack for Excel 2010 bug, see GH #43
cells = Range('Sheet4', 'A10').vertical.value
assert_equal(cells, [row[0] for row in data])
def test_horizontal(self):
Range('Sheet4', 'A20').value = data
cells = Range('Sheet4', 'A20').horizontal.value
assert_equal(cells, data[0])
def test_table(self):
Range('Sheet4', 'A1').value = data
if sys.platform.startswith('win') and self.wb.xl_app.Version == '14.0':
Range('Sheet4', 'A3:B3').xl_range.NumberFormat = 'dd/mm/yyyy' # Hack for Excel 2010 bug, see GH #43
cells = Range('Sheet4', 'A1').table.value
assert_equal(cells, data)
def test_list(self):
# 1d List Row
Range('Sheet4', 'A27').value = list_row_1d
cells = Range('Sheet4', 'A27:C27').value
assert_equal(list_row_1d, cells)
# 2d List Row
Range('Sheet4', 'A29').value = list_row_2d
cells = Range('Sheet4', 'A29:C29', atleast_2d=True).value
assert_equal(list_row_2d, cells)
# 1d List Col
Range('Sheet4', 'A31').value = list_col
cells = Range('Sheet4', 'A31:A33').value
assert_equal([i[0] for i in list_col], cells)
# 2d List Col
cells = Range('Sheet4', 'A31:A33', atleast_2d=True).value
assert_equal(list_col, cells)
def test_is_cell(self):
assert_equal(Range('A1').is_cell(), True)
assert_equal(Range('A1:B1').is_cell(), False)
assert_equal(Range('A1:A2').is_cell(), False)
assert_equal(Range('A1:B2').is_cell(), False)
def test_is_row(self):
assert_equal(Range('A1').is_row(), False)
assert_equal(Range('A1:B1').is_row(), True)
assert_equal(Range('A1:A2').is_row(), False)
assert_equal(Range('A1:B2').is_row(), False)
def test_is_column(self):
assert_equal(Range('A1').is_column(), False)
assert_equal(Range('A1:B1').is_column(), False)
assert_equal(Range('A1:A2').is_column(), True)
assert_equal(Range('A1:B2').is_column(), False)
def test_is_table(self):
assert_equal(Range('A1').is_table(), False)
assert_equal(Range('A1:B1').is_table(), False)
assert_equal(Range('A1:A2').is_table(), False)
assert_equal(Range('A1:B2').is_table(), True)
def test_formula(self):
Range('A1').formula = '=SUM(A2:A10)'
assert_equal(Range('A1').formula, '=SUM(A2:A10)')
def test_current_region(self):
values = [[1.,2.],[3.,4.]]
Range('A20').value = values
assert_equal(Range('B21').current_region.value, values)
def test_clear_content(self):
Range('Sheet4', 'G1').value = 22
Range('Sheet4', 'G1').clear_contents()
cell = Range('Sheet4', 'G1').value
assert_equal(cell, None)
def test_clear(self):
Range('Sheet4', 'G1').value = 22
Range('Sheet4', 'G1').clear()
cell = Range('Sheet4', 'G1').value
assert_equal(cell, None)
def test_dataframe_1(self):
_skip_if_no_pandas()
df_expected = df_1
Range('Sheet5', 'A1').value = df_expected
cells = Range('Sheet5', 'B1:C5').value
df_result = DataFrame(cells[1:], columns=cells[0])
assert_frame_equal(df_expected, df_result)
def test_dataframe_2(self):
""" Covers GH Issue #31"""
_skip_if_no_pandas()
df_expected = df_2
Range('Sheet5', 'A9').value = df_expected
cells = Range('Sheet5', 'B9:B15').value
df_result = DataFrame(cells[1:], columns=[cells[0]])
assert_frame_equal(df_expected, df_result)
def test_dataframe_multiindex(self):
_skip_if_no_pandas()
df_expected = df_multiindex
Range('Sheet5', 'A20').value = df_expected
cells = Range('Sheet5', 'D20').table.value
multiindex = Range('Sheet5', 'A20:C28').value
ix = pd.MultiIndex.from_tuples(multiindex[1:], names=multiindex[0])
df_result = DataFrame(cells[1:], columns=cells[0], index=ix)
assert_frame_equal(df_expected, df_result)
def test_dataframe_multiheader(self):
_skip_if_no_pandas()
df_expected = df_multiheader
Range('Sheet5', 'A52').value = df_expected
cells = Range('Sheet5', 'B52').table.value
df_result = DataFrame(cells[2:], columns=pd.MultiIndex.from_arrays(cells[:2]))
assert_frame_equal(df_expected, df_result)
def test_dataframe_dateindex(self):
_skip_if_no_pandas()
df_expected = df_dateindex
Range('Sheet5', 'A100').value = df_expected
if sys.platform.startswith('win') and self.wb.xl_app.Version == '14.0':
Range('Sheet5', 'A100').vertical.xl_range.NumberFormat = 'dd/mm/yyyy' # Hack for Excel 2010 bug, see GH #43
cells = Range('Sheet5', 'B100').table.value
index = Range('Sheet5', 'A101').vertical.value
df_result = DataFrame(cells[1:], index=index, columns=cells[0])
assert_frame_equal(df_expected, df_result)
def test_series_1(self):
_skip_if_no_pandas()
series_expected = series_1
Range('Sheet5', 'A32').value = series_expected
cells = Range('Sheet5', 'B32:B37').value
series_result = Series(cells)
assert_series_equal(series_expected, series_result)
def test_timeseries_1(self):
_skip_if_no_pandas()
series_expected = timeseries_1
Range('Sheet5', 'A40').value = series_expected
if sys.platform.startswith('win') and self.wb.xl_app.Version == '14.0':
Range('Sheet5', 'A40').vertical.xl_range.NumberFormat = 'dd/mm/yyyy' # Hack for Excel 2010 bug, see GH #43
cells = Range('Sheet5', 'B40:B49').value
date_index = Range('Sheet5', 'A40:A49').value
series_result = Series(cells, index=date_index)
assert_series_equal(series_expected, series_result)
def test_none(self):
""" Covers GH Issue #16"""
# None
Range('Sheet1', 'A7').value = None
assert_equal(None, Range('Sheet1', 'A7').value)
# List
Range('Sheet1', 'A7').value = [None, None]
assert_equal(None, Range('Sheet1', 'A7').horizontal.value)
def test_scalar_nan(self):
"""Covers GH Issue #15"""
_skip_if_no_numpy()
Range('Sheet1', 'A20').value = np.nan
assert_equal(None, Range('Sheet1', 'A20').value)
def test_atleast_2d_scalar(self):
"""Covers GH Issue #53a"""
Range('Sheet1', 'A50').value = 23
result = Range('Sheet1', 'A50', atleast_2d=True).value
assert_equal([[23]], result)
def test_atleast_2d_scalar_as_array(self):
"""Covers GH Issue #53b"""
_skip_if_no_numpy()
Range('Sheet1', 'A50').value = 23
result = Range('Sheet1', 'A50', atleast_2d=True, asarray=True).value
assert_equal(np.array([[23]]), result)
def test_column_width(self):
Range('Sheet1', 'A1:B2').column_width = 10.0
result = Range('Sheet1', 'A1').column_width
assert_equal(10.0, result)
Range('Sheet1', 'A1:B2').value = 'ensure cells are used'
Range('Sheet1', 'B2').column_width = 20.0
result = Range('Sheet1', 'A1:B2').column_width
if sys.platform.startswith('win'):
assert_equal(None, result)
else:
assert_equal(kw.missing_value, result)
def test_row_height(self):
Range('Sheet1', 'A1:B2').row_height = 15.0
result = Range('Sheet1', 'A1').row_height
assert_equal(15.0, result)
Range('Sheet1', 'A1:B2').value = 'ensure cells are used'
Range('Sheet1', 'B2').row_height = 20.0
result = Range('Sheet1', 'A1:B2').row_height
if sys.platform.startswith('win'):
assert_equal(None, result)
else:
assert_equal(kw.missing_value, result)
def test_width(self):
"""Width depends on default style text size, so do not test absolute widths"""
Range('Sheet1', 'A1:D4').column_width = 10.0
result_before = Range('Sheet1', 'A1').width
Range('Sheet1', 'A1:D4').column_width = 12.0
result_after = Range('Sheet1', 'A1').width
assert_true(result_after > result_before)
def test_height(self):
Range('Sheet1', 'A1:D4').row_height = 60.0
result = Range('Sheet1', 'A1:D4').height
assert_equal(240.0, result)
def test_autofit_range(self):
# TODO: compare col/row widths before/after - not implemented yet
Range('Sheet1', 'A1:D4').value = 'test_string'
Range('Sheet1', 'A1:D4').autofit()
Range('Sheet1', 'A1:D4').autofit('r')
Range('Sheet1', 'A1:D4').autofit('c')
Range('Sheet1', 'A1:D4').autofit('rows')
Range('Sheet1', 'A1:D4').autofit('columns')
def test_autofit_col(self):
# TODO: compare col/row widths before/after - not implemented yet
Range('Sheet1', 'A1:D4').value = 'test_string'
Range('Sheet1', 'A:D').autofit()
Range('Sheet1', 'A:D').autofit('r')
Range('Sheet1', 'A:D').autofit('c')
Range('Sheet1', 'A:D').autofit('rows')
Range('Sheet1', 'A:D').autofit('columns')
def test_autofit_row(self):
# TODO: compare col/row widths before/after - not implemented yet
Range('Sheet1', 'A1:D4').value = 'test_string'
Range('Sheet1', '1:1000000').autofit()
Range('Sheet1', '1:1000000').autofit('r')
Range('Sheet1', '1:1000000').autofit('c')
Range('Sheet1', '1:1000000').autofit('rows')
Range('Sheet1', '1:1000000').autofit('columns')
def test_number_format_cell(self):
format_string = "mm/dd/yy;@"
Range('Sheet1', 'A1').number_format = format_string
result = Range('Sheet1', 'A1').number_format
assert_equal(format_string, result)
def test_number_format_range(self):
format_string = "mm/dd/yy;@"
Range('Sheet1', 'A1:D4').number_format = format_string
result = Range('Sheet1', 'A1:D4').number_format
assert_equal(format_string, result)
def test_get_address(self):
res = Range((1,1),(3,3)).get_address()
assert_equal(res, '$A$1:$C$3')
res = Range((1,1),(3,3)).get_address(False)
assert_equal(res, '$A1:$C3')
res = Range((1,1),(3,3)).get_address(True, False)
assert_equal(res, 'A$1:C$3')
res = Range((1,1),(3,3)).get_address(False, False)
assert_equal(res, 'A1:C3')
res = Range((1,1),(3,3)).get_address(include_sheetname=True)
assert_equal(res, 'Sheet1!$A$1:$C$3')
res = Range('Sheet2', (1,1),(3,3)).get_address(include_sheetname=True)
assert_equal(res, 'Sheet2!$A$1:$C$3')
res = Range((1,1),(3,3)).get_address(external=True)
assert_equal(res, '[test_range_1.xlsx]Sheet1!$A$1:$C$3')
def test_hyperlink(self):
address = 'www.xlwings.org'
# Naked address
Range('A1').add_hyperlink(address)
assert_equal(Range('A1').value, address)
hyperlink = Range('A1').hyperlink
if not hyperlink.endswith('/'):
hyperlink += '/'
assert_equal(hyperlink, 'http://' + address + '/')
# Address + FriendlyName
Range('A2').add_hyperlink(address, 'test_link')
assert_equal(Range('A2').value, 'test_link')
hyperlink = Range('A2').hyperlink
if not hyperlink.endswith('/'):
hyperlink += '/'
assert_equal(hyperlink, 'http://' + address + '/')
def test_hyperlink_formula(self):
Range('B10').formula = '=HYPERLINK("http://xlwings.org", "xlwings")'
assert_equal(Range('B10').hyperlink, 'http://xlwings.org')
def test_color(self):
rgb = (30, 100, 200)
Range('A1').color = rgb
assert_equal(rgb, Range('A1').color)
Range('A2').color = RgbColor.rgbAqua
assert_equal((0, 255, 255), Range('A2').color)
Range('A2').color = None
assert_equal(Range('A2').color, None)
Range('A1:D4').color = rgb
assert_equal(rgb, Range('A1:D4').color)
def test_size(self):
assert_equal(Range('A1:C4').size, 12)
def test_shape(self):
assert_equal(Range('A1:C4').shape, (4, 3))
def test_len(self):
assert_equal(len(Range('A1:C4')), 4)
def test_iterator(self):
Range('A20').value = [[1., 2.], [3., 4.]]
l = []
for i in Range('A20:B21'):
l.append(i.value)
assert_equal(l, [1., 2., 3., 4.])
Range('Sheet2', 'A20').value = [[1., 2.], [3., 4.]]
l = []
for i in Range('Sheet2', 'A20:B21'):
l.append(i.value)
assert_equal(l, [1., 2., 3., 4.])
def test_resize(self):
r = Range('A1').resize(4, 5)
assert_equal(r.shape, (4, 5))
r = Range('A1').resize(row_size=4)
assert_equal(r.shape, (4, 1))
r = Range('A1:B4').resize(column_size=5)
assert_equal(r.shape, (1, 5))
def test_offset(self):
o = Range('A1:B3').offset(3, 4)
assert_equal(o.get_address(), '$E$4:$F$6')
o = Range('A1:B3').offset(row_offset=3)
assert_equal(o.get_address(), '$A$4:$B$6')
o = Range('A1:B3').offset(column_offset=4)
assert_equal(o.get_address(), '$E$1:$F$3')
def test_date(self):
date_1 = date(2000, 12, 3)
Range('X1').value = date_1
date_2 = Range('X1').value
assert_equal(date_1, date(date_2.year, date_2.month, date_2.day))
def test_row(self):
assert_equal(Range('B3:F5').row, 3)
def test_column(self):
assert_equal(Range('B3:F5').column, 2)
def test_last_cell(self):
assert_equal(Range('B3:F5').last_cell.row, 5)
assert_equal(Range('B3:F5').last_cell.column, 6)
def test_get_set_named_range(self):
Range('A100').name = 'test1'
assert_equal(Range('A100').name, 'test1')
Range('A200:B204').name = 'test2'
assert_equal(Range('A200:B204').name, 'test2')
def test_integers(self):
"""Covers GH 227"""
Range('A99').value = 2147483647 # max SInt32
assert_equal(Range('A99').value, 2147483647)
Range('A100').value = 2147483648 # SInt32 < x < SInt64
assert_equal(Range('A100').value, 2147483648)
Range('A101').value = 10000000000000000000 # long
assert_equal(Range('A101').value, 10000000000000000000)
def test_numpy_datetime(self):
_skip_if_no_numpy()
Range('A55').value = np.datetime64('2005-02-25T03:30Z')
assert_equal(Range('A55').value, datetime(2005, 2, 25, 3, 30))
def test_dataframe_timezone(self):
_skip_if_no_pandas()
dt = np.datetime64(1434149887000, 'ms')
ix = pd.DatetimeIndex(data=[dt], tz='GMT')
df = pd.DataFrame(data=[1], index=ix, columns=['A'])
Range('A1').value = df
assert_equal(Range('A2').value, datetime(2015, 6, 12, 22, 58, 7))
def test_datetime_timezone(self):
eastern = pytz.timezone('US/Eastern')
dt_naive = datetime(2002, 10, 27, 6, 0, 0)
dt_tz = eastern.localize(dt_naive)
Range('F34').value = dt_tz
assert_equal(Range('F34').value, dt_naive)
class TestChart:
def setUp(self):
# Connect to test file and make Sheet1 the active sheet
xl_file1 = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'test_chart_1.xlsx')
self.wb = Workbook(xl_file1, app_visible=False, app_target=APP_TARGET)
Sheet('Sheet1').activate()
def tearDown(self):
class_teardown(self.wb)
def test_add_keywords(self):
name = 'My Chart'
chart_type = ChartType.xlLine
Range('A1').value = chart_data
chart = Chart.add(chart_type=chart_type, name=name, source_data=Range('A1').table)
chart_actual = Chart(name)
name_actual = chart_actual.name
chart_type_actual = chart_actual.chart_type
assert_equal(name, name_actual)
if sys.platform.startswith('win'):
assert_equal(chart_type, chart_type_actual)
else:
assert_equal(kw.line_chart, chart_type_actual)
def test_add_properties(self):
name = 'My Chart'
chart_type = ChartType.xlLine
Range('Sheet2', 'A1').value = chart_data
chart = Chart.add('Sheet2')
chart.chart_type = chart_type
chart.name = name
chart.set_source_data(Range('Sheet2', 'A1').table)
chart_actual = Chart('Sheet2', name)
name_actual = chart_actual.name
chart_type_actual = chart_actual.chart_type
assert_equal(name, name_actual)
if sys.platform.startswith('win'):
assert_equal(chart_type, chart_type_actual)
else:
assert_equal(kw.line_chart, chart_type_actual)
if __name__ == '__main__':
nose.main()
| apache-2.0 | 8,551,476,815,334,439,000 | 33.782341 | 142 | 0.574001 | false | 3.208144 | true | false | false |
Zomboided/VPN-Manager | resetVPN.py | 1 | 4136 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2016 Zomboided
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# This module resets all VPN connections
import xbmcgui
import xbmcaddon
from libs.common import resetVPNConnections, stopService, startService, DIALOG_SPEED, getVPNRequestedProfile, setAPICommand
from libs.utility import debugTrace, errorTrace, infoTrace, newPrint, getID, getName
debugTrace("-- Entered resetVPN.py --")
if not getID() == "":
# Get info about the addon that this script is pretending to be attached to
addon = xbmcaddon.Addon(getID())
addon_name = getName()
success = True
# Reset the VPN connection values stored in the settings.xml
if xbmcgui.Dialog().yesno(addon_name, "Updating the VPN settings will reset all VPN connections. Connections must be re-validated before use.\nContinue?"):
# Display dialog to show what's going on
progress = xbmcgui.DialogProgress()
progress_title = "Resetting VPN connections"
progress.create(addon_name,progress_title)
if not getVPNRequestedProfile() == "":
progress.close()
xbmcgui.Dialog().ok(addon_name, "Connection to VPN being attempted and will be aborted. Try again in a few seconds.")
setAPICommand("Disconnect")
success = False
if success:
# Stop the VPN monitor
xbmc.sleep(100)
progress.update(0, progress_title, "Pausing VPN monitor...")
xbmc.sleep(100)
if not stopService():
progress.close()
# Display error result in an ok dialog
errorTrace("resetVPN.py", "VPN monitor service is not running, can't reset VPNs")
xbmcgui.Dialog().ok(progress_title, "Error, Service not running. Check log and re-enable.")
success = False
# Disconnect and reset all connections
if success:
progress.update(20, progress_title, "VPN monitor paused")
xbmc.sleep(DIALOG_SPEED)
progress.update(40, progress_title, "Stopping any active VPN connection...")
xbmc.sleep(100)
resetVPNConnections(addon)
# Reset any validated values
addon.setSetting("vpn_provider_validated", "")
addon.setSetting("vpn_username_validated", "")
addon.setSetting("vpn_password_validated", "")
# Restart the VPN monitor
if success:
progress.update(60, progress_title, "VPN connections have been reset")
xbmc.sleep(DIALOG_SPEED)
progress.update(80, progress_title, "Restarting VPN monitor...")
xbmc.sleep(100)
if not startService():
progress.close()
errorTrace("resetVPN.py", "VPN monitor service is not running, connections have been reset")
xbmcgui.Dialog().ok(progress_title, "Error, cannot restart service. Check log and re-enable.")
success = False
else:
# Close out the final progress dialog
progress.update(100, progress_title, "VPN monitor restarted")
xbmc.sleep(DIALOG_SPEED)
progress.close()
command = "Addon.OpenSettings(" + getID() + ")"
xbmc.executebuiltin(command)
else:
errorTrace("resetVPN.py", "VPN service is not ready")
debugTrace("-- Exit resetVPN.py --") | gpl-2.0 | 245,625,491,115,489,120 | 43.010638 | 160 | 0.629594 | false | 4.358272 | false | false | false |
jskDr/keraspp | old/gan_cnn_mse.py | 1 | 6490 | ################################
# 공통 패키지 불러오기
################################
from keras.datasets import mnist
import numpy as np
from PIL import Image
import math
import os
import keras.backend as K
K.set_image_data_format('channels_first')
print(K.image_data_format)
################################
# GAN 모델링
################################
from keras import models, layers, optimizers
def mean_squared_error(y_true, y_pred):
return K.mean(K.square(y_pred - y_true), axis=(1,2,3))
class GAN(models.Sequential):
def __init__(self, input_dim=64):
"""
self, self.generator, self.discriminator are all models
"""
super().__init__()
self.input_dim = input_dim
self.generator = self.GENERATOR()
self.discriminator = self.DISCRIMINATOR()
self.add(self.generator)
self.discriminator.trainable = False
self.add(self.discriminator)
self.compile_all()
def compile_all(self):
# Compiling stage
d_optim = optimizers.SGD(lr=0.0005, momentum=0.9, nesterov=True)
g_optim = optimizers.SGD(lr=0.0005, momentum=0.9, nesterov=True)
self.generator.compile(loss=mean_squared_error, optimizer="SGD")
self.compile(loss='binary_crossentropy', optimizer=g_optim)
self.discriminator.trainable = True
self.discriminator.compile(loss='binary_crossentropy', optimizer=d_optim)
def GENERATOR(self):
input_dim = self.input_dim
model = models.Sequential()
model.add(layers.Dense(1024, activation='tanh', input_dim=input_dim))
model.add(layers.Dense(128 * 7 * 7, activation='tanh'))
model.add(layers.BatchNormalization())
model.add(layers.Reshape((128, 7, 7), input_shape=(128 * 7 * 7,)))
model.add(layers.UpSampling2D(size=(2, 2)))
model.add(layers.Conv2D(64, (5, 5), padding='same', activation='tanh'))
model.add(layers.UpSampling2D(size=(2, 2)))
model.add(layers.Conv2D(1, (5, 5), padding='same', activation='tanh'))
return model
def DISCRIMINATOR(self):
model = models.Sequential()
model.add(layers.Conv2D(64, (5, 5), padding='same', activation='tanh',
input_shape=(1, 28, 28)))
model.add(layers.MaxPooling2D(pool_size=(2, 2)))
model.add(layers.Conv2D(128, (5, 5), activation='tanh'))
model.add(layers.MaxPooling2D(pool_size=(2, 2)))
model.add(layers.Flatten())
model.add(layers.Dense(1024, activation='tanh'))
model.add(layers.Dense(1, activation='sigmoid'))
return model
def get_z(self, ln):
input_dim = self.input_dim
return np.random.uniform(-1, 1, (ln, input_dim))
def train_both(self, x):
ln = x.shape[0]
# First trial for training discriminator
z = self.get_z(ln)
w = self.generator.predict(z, verbose=0)
xw = np.concatenate((x, w))
y2 = [1] * ln + [0] * ln
d_loss = self.discriminator.train_on_batch(xw, y2)
# Second trial for training generator
z = self.get_z(ln)
self.discriminator.trainable = False
g_loss = self.train_on_batch(z, [1] * ln)
self.discriminator.trainable = True
return d_loss, g_loss
################################
# GAN 학습하기
################################
def combine_images(generated_images):
num = generated_images.shape[0]
width = int(math.sqrt(num))
height = int(math.ceil(float(num) / width))
shape = generated_images.shape[2:]
image = np.zeros((height * shape[0], width * shape[1]),
dtype=generated_images.dtype)
for index, img in enumerate(generated_images):
i = int(index / width)
j = index % width
image[i * shape[0]:(i + 1) * shape[0],
j * shape[1]:(j + 1) * shape[1]] = img[0, :, :]
return image
def get_x(X_train, index, BATCH_SIZE):
return X_train[index * BATCH_SIZE:(index + 1) * BATCH_SIZE]
def save_images(generated_images, output_fold, epoch, index):
image = combine_images(generated_images)
image = image * 127.5 + 127.5
Image.fromarray(image.astype(np.uint8)).save(
output_fold + '/' +
str(epoch) + "_" + str(index) + ".png")
def load_data():
(X_train, y_train), (_, _) = mnist.load_data()
return X_train[:10]
def train(args):
BATCH_SIZE = args.batch_size
epochs = args.epochs
output_fold = args.output_fold
input_dim = args.input_dim
os.makedirs(output_fold, exist_ok=True)
print('Output_fold is', output_fold)
X_train = load_data()
X_train = (X_train.astype(np.float32) - 127.5) / 127.5
X_train = X_train.reshape((X_train.shape[0], 1) + X_train.shape[1:])
gan = GAN(input_dim)
d_loss_ll = []
g_loss_ll = []
for epoch in range(epochs):
print("Epoch is", epoch)
print("Number of batches", int(X_train.shape[0] / BATCH_SIZE))
d_loss_l = []
g_loss_l = []
for index in range(int(X_train.shape[0] / BATCH_SIZE)):
x = get_x(X_train, index, BATCH_SIZE)
d_loss, g_loss = gan.train_both(x)
d_loss_l.append(d_loss)
g_loss_l.append(g_loss)
if epoch % 10 == 0 or epoch == epochs - 1:
z = gan.get_z(x.shape[0])
w = gan.generator.predict(z, verbose=0)
save_images(w, output_fold, epoch, 0)
d_loss_ll.append(d_loss_l)
g_loss_ll.append(g_loss_l)
gan.generator.save_weights(output_fold + '/' + 'generator', True)
gan.discriminator.save_weights(output_fold + '/' + 'discriminator', True)
np.savetxt(output_fold + '/' + 'd_loss', d_loss_ll)
np.savetxt(output_fold + '/' + 'g_loss', g_loss_ll)
################################
# GAN 예제 실행하기
################################
import argparse
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--batch_size', type=int, default=2,
help='Batch size for the networks')
parser.add_argument('--epochs', type=int, default=1000,
help='Epochs for the networks')
parser.add_argument('--output_fold', type=str, default='GAN_OUT',
help='Output fold to save the results')
parser.add_argument('--input_dim', type=int, default=2,
help='Input dimension for the generator.')
args = parser.parse_args()
train(args)
if __name__ == '__main__':
main() | mit | 1,528,142,904,994,182,000 | 30.758621 | 81 | 0.57431 | false | 3.25063 | false | false | false |
StrongBoy998/ihome-project | Tornado_Project/utils/image_storage.py | 1 | 1089 | # -*- coding: utf-8 -*-
import qiniu.config
import logging
from qiniu import Auth, put_data, etag, urlsafe_base64_encode
#需要填写你的 Access Key 和 Secret Key
access_key = 'btZDjv_qjI4O1P5-KKeaZXBGLJcM-AZfigN8HjQf'
secret_key = 'j2Sgq1Pz-1O90OoFSKr24Xa80mVWqzpqbo-byiN5'
#要上传的空间
bucket_name = 'ihome'
def storage(data):
"""
七牛云存储上传文件接口
"""
if not data:
return None
try:
#构建鉴权对象
q = Auth(access_key, secret_key)
#生成上传 Token,可以指定过期时间等
token = q.upload_token(bucket_name)
ret, info = put_data(token, None, data)
except Exception as e:
logging.error(e)
raise Exception("上传文件到七牛错误")
if info and info.status_code != 200:
raise Exception("上传文件到七牛错误")
return ret["key"]
if __name__ == '__main__':
file_name = raw_input("输入上传的文件")
file = open(file_name, 'rb')
data = file.read()
key = storage(data)
print key
file.close()
| apache-2.0 | -76,605,809,266,359,420 | 18.408163 | 61 | 0.615142 | false | 2.389447 | false | false | false |
fapable/pygameproject2 | Menu/Opties.py | 1 | 1096 | import pygame
import Game as dm
def opties():
pygame.mixer.music.load("music.mp3")
pygame.mixer.music.play(loops=0, start=0.0)
size = width, height = 340,240
screen = pygame.display.set_mode(size)
redSquare = pygame.image.load('achtergrondSpelers.png').convert()
white = (255,255,255)
w = 700
h = 420
screen = pygame.display.set_mode((w, h))
screen.fill((white))
x = 0
y = 0
screen.blit(redSquare, (x, y))
pygame.display.flip()
black = (0,0,0)
pygame.mixer.music.load("music.mp3")
pygame.mixer.music.play(loops=0, start=0.0)
choose = dm.dumbmenu(screen,[
'Geluid uit',
'Taal wijzigen -> Engels',
'Terug naar het menu']
, 180,150,None,35,1.4,black ,black)
if choose == 0:
print ("Je hebt gekozen voor Geluid uit'.")
elif choose == 1:
print ("Je hebt gekozen voor Taal wijzigen naar het engels''.")
elif choose == 2:
print( "Je hebt gekozen voor Terug naar het men'.")
| apache-2.0 | -6,016,865,642,873,361,000 | 27.102564 | 71 | 0.556569 | false | 3.140401 | false | false | false |
OnroerendErfgoed/pyramid_urireferencer | tests/test_renderers.py | 1 | 2787 | # -*- coding: utf-8 -*-
from pyramid_urireferencer.models import (
RegistryResponse,
ApplicationResponse,
Item
)
class TestRenderers:
def test_empty_registry_renderer(self):
rr = RegistryResponse('http://id.example.org/foo/1', True, False, 0, [])
from pyramid_urireferencer.renderers import registry_adapter
r = registry_adapter(rr, {})
assert r['query_uri'] == 'http://id.example.org/foo/1'
assert r['success']
assert not r['has_references']
assert r['count'] == 0
assert len(r['applications']) == 0
def test_registry_renderer_one_app_no_results(self):
ar = ApplicationResponse(
'My app',
'http://something.example.org',
'http://somethingelse.example.org',
True,
False,
0,
[]
)
rr = RegistryResponse('http://id.example.org/foo/1', True, False, 0, [ar])
from pyramid_urireferencer.renderers import registry_adapter
r = registry_adapter(rr, {})
assert r['query_uri'] == 'http://id.example.org/foo/1'
assert r['success']
assert not r['has_references']
assert r['count'] == 0
assert len(r['applications']) == 1
assert 'title' in r['applications'][0]
def test_empty_application_renderer(self):
ar = ApplicationResponse(
'My app',
'http://something.example.org',
'http://somethingelse.example.org/references',
True,
False,
0,
[]
)
from pyramid_urireferencer.renderers import application_adapter
r = application_adapter(ar, {})
assert r['uri'] == 'http://something.example.org'
assert r['service_url'] == 'http://somethingelse.example.org/references'
assert r['success']
assert not r['has_references']
assert r['count'] == 0
assert len(r['items']) == 0
def test_application_renderer_one_item(self):
ar = ApplicationResponse(
'My app',
'http://something.example.org',
'http://somethingelse.example.org/references',
True,
False,
0,
[Item('http://something.example.org/thingy/thing', 'My item')]
)
from pyramid_urireferencer.renderers import application_adapter
r = application_adapter(ar, {})
assert r['uri'] == 'http://something.example.org'
assert r['service_url'] == 'http://somethingelse.example.org/references'
assert r['success']
assert not r['has_references']
assert r['count'] == 0
assert len(r['items']) == 1
assert 'title' in r['items'][0]
assert 'uri' in r['items'][0]
| mit | 6,088,043,973,341,636,000 | 33.8375 | 82 | 0.558306 | false | 3.936441 | false | false | false |
torehc/sinucha | web/sinucha/control/models.py | 1 | 5334 | from django.db import models
from django.contrib.auth.models import User
import datetime
from django.db.models.signals import post_save
from django.dispatch import receiver
class User_Data(models.Model):
USER = 'user'
ADMIN = 'admin'
ROL_CHOICES = (
(USER, 'USER'),
(ADMIN, 'ADMIN'),
)
tagRfid = models.CharField(max_length=64, blank=False)
chatid = models.DecimalField(max_digits=12, decimal_places=0)
balance_actual = models.DecimalField(max_digits=5, decimal_places=2, default=0)
username = models.CharField(max_length=20, blank=True)
user = models.ForeignKey(User, null=True, blank=True)
rol = models.CharField(max_length=5,
choices=ROL_CHOICES,
default=USER,
blank=False,
)
def __str__(self):
return '{}'.format(self.username)
@staticmethod
def check_user_chatid(chatid):
if (User_Data.objects.filter(chatid=chatid)):
return True
else:
return False
@staticmethod
def register_user(chatid, rfid):
if(User_Data.check_user_chatid(chatid)):
return False
else:
create_user = User_Data.objects.create(
tagRfid = rfid,
chatid = chatid,
)
return True
@staticmethod
def check_user_balance(rfid,barcode):
user = User_Data.objects.get(tagRfid=rfid)
item = Item.objects.get(barcode=barcode)
if(user.balance_actual >= item.price_sale):
user.balance_actual -= item.price_sale
user.save()
item.stock = (item.stock)-1
item.save()
Sale_History.create_sale(item,user)
return True
else:
return False
class Balance(models.Model):
CASH = 'cash'
PAYPAL = 'PayPal'
TYPE_PAYMENT = (
(CASH, 'CASH'),
(PAYPAL, 'PAYPAL'),
)
user = models.ForeignKey(User_Data, blank=False)
amount_entered = models.DecimalField(max_digits=5, decimal_places=2, default=0, blank=False)
type_amount = models.CharField(max_length=6,
choices=TYPE_PAYMENT,
default=CASH,
blank=False,
)
date = models.DateTimeField( default=datetime.datetime.now() )
def __str__(self):
return '{}: +{}'.format(self.user, self.amount_entered)
class Item(models.Model):
name = models.CharField(max_length=30, blank=False)
barcode = models.CharField(max_length=30, blank=False)
price_sale = models.DecimalField(max_digits=5, decimal_places=2, default=0)
stock = models.IntegerField(blank=False, default=0)
def __str__(self):
return '{}'.format(self.name)
class Shopping_History(models.Model):
MERCADONA = 'mercadona'
LIDL = 'lidl'
OTRO = 'otro'
TYPE_SUPERMARKET = (
(MERCADONA, 'MERCADONA'),
(LIDL, 'LIDL'),
(OTRO, 'OTRO'),
)
item = models.ForeignKey(Item, blank=False)
date = models.DateTimeField( default=datetime.datetime.now() )
units = models.IntegerField(default=0)
unit_purchase_price = models.DecimalField(max_digits=5, decimal_places=2, default=0)
supermarket = models.CharField(max_length=9,
choices=TYPE_SUPERMARKET,
default=OTRO,
blank=False,
)
def __str__(self):
return '{} - {}'.format(self.item, self.date)
class Sale_History(models.Model):
item = models.ForeignKey(Item, blank=False)
user = models.ForeignKey(User_Data, blank=False)
date = models.DateTimeField( default=datetime.datetime.now() )
price_sale = models.DecimalField(max_digits=5, decimal_places=2, default=0)
price_cost = models.DecimalField(max_digits=5, decimal_places=2, default=0)
def __str__(self):
return '{}: {} - {}'.format(self.item, self.user, self.price_sale)
@staticmethod
def create_sale(item,user):
bought_item = Shopping_History.objects.filter(item=item).last()
create_sale = Sale_History.objects.create(
item=item,
user=user,
price_sale=item.price_sale,
price_cost=bought_item.unit_purchase_price,
)
@receiver(post_save, sender=Shopping_History, dispatch_uid="create_stock_item")
def create_stock(sender, instance, **kwargs):
object_product = Item.objects.get(id=instance.item.id)
object_product.stock += instance.units
object_product.save()
@receiver(post_save, sender=Balance, dispatch_uid="add_payment_user")
def update_user_balance(sender, instance, **kwargs):
user = User_Data.objects.get(id=instance.user.id)
user.balance_actual += instance.amount_entered
user.save()
#import pdb; pdb.set_trace() | gpl-3.0 | 2,278,878,880,838,755,800 | 30.382353 | 96 | 0.553056 | false | 3.939439 | false | false | false |
joequant/pyswagger | pyswagger/tests/v2_0/test_circular.py | 1 | 3163 | from pyswagger import SwaggerApp, utils, primitives, errs
from ..utils import get_test_data_folder
from ...scanner import CycleDetector
from ...scan import Scanner
import unittest
import os
import six
class CircularRefTestCase(unittest.TestCase):
""" test for circular reference guard """
def test_path_item_prepare_with_cycle(self):
app = SwaggerApp.load(get_test_data_folder(
version='2.0',
which=os.path.join('circular', 'path_item')
))
# should raise nothing
app.prepare()
def test_path_item(self):
folder = get_test_data_folder(
version='2.0',
which=os.path.join('circular', 'path_item')
)
def _pf(s):
return six.moves.urllib.parse.urlunparse((
'file',
'',
folder,
'',
'',
s))
app = SwaggerApp.create(folder)
s = Scanner(app)
c = CycleDetector()
s.scan(root=app.raw, route=[c])
self.assertEqual(sorted(c.cycles['path_item']), sorted([[
_pf('/paths/~1p1'),
_pf('/paths/~1p2'),
_pf('/paths/~1p3'),
_pf('/paths/~1p4'),
_pf('/paths/~1p1')
]]))
def test_schema(self):
folder = get_test_data_folder(
version='2.0',
which=os.path.join('circular', 'schema')
)
def _pf(s):
return six.moves.urllib.parse.urlunparse((
'file',
'',
folder,
'',
'',
s))
app = SwaggerApp.load(folder)
app.prepare(strict=False)
s = Scanner(app)
c = CycleDetector()
s.scan(root=app.raw, route=[c])
self.maxDiff = None
self.assertEqual(sorted(c.cycles['schema']), sorted([
[_pf('/definitions/s10'), _pf('/definitions/s11'), _pf('/definitions/s9'), _pf('/definitions/s10')],
[_pf('/definitions/s5'), _pf('/definitions/s5')],
[_pf('/definitions/s1'), _pf('/definitions/s2'), _pf('/definitions/s3'), _pf('/definitions/s4'), _pf('/definitions/s1')],
[_pf('/definitions/s12'), _pf('/definitions/s13'), _pf('/definitions/s12')],
[_pf('/definitions/s6'), _pf('/definitions/s7'), _pf('/definitions/s6')],
[_pf('/definitions/s14'), _pf('/definitions/s15'), _pf('/definitions/s14')]
]))
def test_deref(self):
app = SwaggerApp.create(get_test_data_folder(
version='2.0',
which=os.path.join('circular', 'schema'),
),
strict=False
)
s = app.resolve('#/definitions/s1')
self.assertRaises(errs.CycleDetectionError, utils.deref, s)
def test_primfactory(self):
app = SwaggerApp.create(get_test_data_folder(
version='2.0',
which=os.path.join('circular', 'schema'),
),
strict=False
)
s = app.resolve('#/definitions/s1')
self.assertRaises(errs.CycleDetectionError, app.prim_factory.produce, s, {})
| mit | 7,400,816,715,443,850,000 | 30.009804 | 133 | 0.510907 | false | 3.824667 | true | false | false |
rocktavious/DevToolsLib | DTL/api/bases.py | 1 | 3894 | import inspect
#------------------------------------------------------------
#------------------------------------------------------------
class BaseStruct(object):
#------------------------------------------------------------
def __init__(self, *args, **kwds):
self.deserialize(*args, **kwds)
#------------------------------------------------------------
def _get_repr_format(self):
return r'{0}({1})'.format(type(self).__name__, self._get_init_params_format())
#------------------------------------------------------------
def _get_init_params_format(self):
param_format = ''
params = inspect.getargspec(self.deserialize)[0]
params_count = len(params[1:])
for i in range(params_count):
param_format += '{0}='.format(params[i+1]) +'{'+str(i)+'}'
if i != params_count-1:
param_format += ', '
return param_format
#------------------------------------------------------------
def _get_repr(self):
try:
return self._get_repr_format().format(*self.serialize())
except:
return r'{0}({1})'.format(type(self).__name__, self.serialize())
#------------------------------------------------------------
def __str__(self):
return self._get_repr()
#------------------------------------------------------------
def __repr__(self):
return self._get_repr()
#------------------------------------------------------------
def __eq__(self, other):
if isinstance(other, type(self)) :
return self.serialize() == other.serialize()
else:
try:
coerced = self.__class__()
coerced.deserialize(other)
except:
return False
return self == coerced
#------------------------------------------------------------
def __ne__(self, other):
return not self.__eq__(other)
#------------------------------------------------------------
def add_quotes(self, data):
'''Convenience method to help in serialization of strings'''
return r"r'{0}'".format(data)
#------------------------------------------------------------
def serialize(self):
'''Returns the arg list in which deserialize can recreate this object'''
return (None,)
#------------------------------------------------------------
def deserialize(self, *args, **kwds):
'''If provided the info from serialize, this should should beable to construct the object
deserialize must provide all of the args for the spec because the format is pulled from this function'''
pass
#------------------------------------------------------------
#------------------------------------------------------------
class BaseDict(BaseStruct, dict):
#------------------------------------------------------------
def __init__(self, *args, **kwds):
super(BaseDict, self).__init__(*args, **kwds)
#------------------------------------------------------------
def _set_data(self, datadict):
for key, value in datadict.items():
self.__setitem__(key, value)
#------------------------------------------------------------
def set_default(self, default={}):
'''Allows the user to specify default values that should appear in the data'''
for key, value in default.items():
if not self.has_key(key):
self.__setitem__(key, eval(value))
#------------------------------------------------------------
def serialize(self):
return (dict(self),)
#------------------------------------------------------------
def deserialize(self, datadict={}):
self._set_data(datadict=datadict)
| mit | -2,309,170,053,516,535,000 | 38.734694 | 112 | 0.362866 | false | 5.760355 | false | false | false |
aristanetworks/arista-ovs-nova | nova/utils.py | 1 | 38397 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2011 Justin Santa Barbara
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Utilities and helper functions."""
import contextlib
import datetime
import errno
import functools
import hashlib
import inspect
import os
import pyclbr
import random
import re
import shlex
import shutil
import signal
import socket
import struct
import sys
import tempfile
import time
import weakref
from xml.sax import saxutils
from eventlet import event
from eventlet.green import subprocess
from eventlet import greenthread
from eventlet import semaphore
import netaddr
from nova import exception
from nova.openstack.common import cfg
from nova.openstack.common import excutils
from nova.openstack.common import importutils
from nova.openstack.common import log as logging
from nova.openstack.common import timeutils
LOG = logging.getLogger(__name__)
CONF = cfg.CONF
CONF.register_opt(
cfg.BoolOpt('disable_process_locking', default=False,
help='Whether to disable inter-process locks'))
CONF.import_opt('glance_host', 'nova.config')
CONF.import_opt('glance_port', 'nova.config')
CONF.import_opt('glance_protocol', 'nova.config')
CONF.import_opt('instance_usage_audit_period', 'nova.config')
CONF.import_opt('monkey_patch', 'nova.config')
CONF.import_opt('rootwrap_config', 'nova.config')
CONF.import_opt('service_down_time', 'nova.config')
# Used for looking up extensions of text
# to their 'multiplied' byte amount
BYTE_MULTIPLIERS = {
'': 1,
't': 1024 ** 4,
'g': 1024 ** 3,
'm': 1024 ** 2,
'k': 1024,
}
def vpn_ping(address, port, timeout=0.05, session_id=None):
"""Sends a vpn negotiation packet and returns the server session.
Returns False on a failure. Basic packet structure is below.
Client packet (14 bytes)::
0 1 8 9 13
+-+--------+-----+
|x| cli_id |?????|
+-+--------+-----+
x = packet identifier 0x38
cli_id = 64 bit identifier
? = unknown, probably flags/padding
Server packet (26 bytes)::
0 1 8 9 13 14 21 2225
+-+--------+-----+--------+----+
|x| srv_id |?????| cli_id |????|
+-+--------+-----+--------+----+
x = packet identifier 0x40
cli_id = 64 bit identifier
? = unknown, probably flags/padding
bit 9 was 1 and the rest were 0 in testing
"""
if session_id is None:
session_id = random.randint(0, 0xffffffffffffffff)
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
data = struct.pack('!BQxxxxx', 0x38, session_id)
sock.sendto(data, (address, port))
sock.settimeout(timeout)
try:
received = sock.recv(2048)
except socket.timeout:
return False
finally:
sock.close()
fmt = '!BQxxxxxQxxxx'
if len(received) != struct.calcsize(fmt):
print struct.calcsize(fmt)
return False
(identifier, server_sess, client_sess) = struct.unpack(fmt, received)
if identifier == 0x40 and client_sess == session_id:
return server_sess
def _subprocess_setup():
# Python installs a SIGPIPE handler by default. This is usually not what
# non-Python subprocesses expect.
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
def execute(*cmd, **kwargs):
"""Helper method to execute command with optional retry.
If you add a run_as_root=True command, don't forget to add the
corresponding filter to etc/nova/rootwrap.d !
:param cmd: Passed to subprocess.Popen.
:param process_input: Send to opened process.
:param check_exit_code: Single bool, int, or list of allowed exit
codes. Defaults to [0]. Raise
exception.ProcessExecutionError unless
program exits with one of these code.
:param delay_on_retry: True | False. Defaults to True. If set to
True, wait a short amount of time
before retrying.
:param attempts: How many times to retry cmd.
:param run_as_root: True | False. Defaults to False. If set to True,
the command is run with rootwrap.
:raises exception.NovaException: on receiving unknown arguments
:raises exception.ProcessExecutionError:
:returns: a tuple, (stdout, stderr) from the spawned process, or None if
the command fails.
"""
process_input = kwargs.pop('process_input', None)
check_exit_code = kwargs.pop('check_exit_code', [0])
ignore_exit_code = False
if isinstance(check_exit_code, bool):
ignore_exit_code = not check_exit_code
check_exit_code = [0]
elif isinstance(check_exit_code, int):
check_exit_code = [check_exit_code]
delay_on_retry = kwargs.pop('delay_on_retry', True)
attempts = kwargs.pop('attempts', 1)
run_as_root = kwargs.pop('run_as_root', False)
shell = kwargs.pop('shell', False)
if len(kwargs):
raise exception.NovaException(_('Got unknown keyword args '
'to utils.execute: %r') % kwargs)
if run_as_root and os.geteuid() != 0:
cmd = ['sudo', 'nova-rootwrap', CONF.rootwrap_config] + list(cmd)
cmd = map(str, cmd)
while attempts > 0:
attempts -= 1
try:
LOG.debug(_('Running cmd (subprocess): %s'), ' '.join(cmd))
_PIPE = subprocess.PIPE # pylint: disable=E1101
if os.name == 'nt':
preexec_fn = None
close_fds = False
else:
preexec_fn = _subprocess_setup
close_fds = True
obj = subprocess.Popen(cmd,
stdin=_PIPE,
stdout=_PIPE,
stderr=_PIPE,
close_fds=close_fds,
preexec_fn=preexec_fn,
shell=shell)
result = None
if process_input is not None:
result = obj.communicate(process_input)
else:
result = obj.communicate()
obj.stdin.close() # pylint: disable=E1101
_returncode = obj.returncode # pylint: disable=E1101
LOG.debug(_('Result was %s') % _returncode)
if not ignore_exit_code and _returncode not in check_exit_code:
(stdout, stderr) = result
raise exception.ProcessExecutionError(
exit_code=_returncode,
stdout=stdout,
stderr=stderr,
cmd=' '.join(cmd))
return result
except exception.ProcessExecutionError:
if not attempts:
raise
else:
LOG.debug(_('%r failed. Retrying.'), cmd)
if delay_on_retry:
greenthread.sleep(random.randint(20, 200) / 100.0)
finally:
# NOTE(termie): this appears to be necessary to let the subprocess
# call clean something up in between calls, without
# it two execute calls in a row hangs the second one
greenthread.sleep(0)
def trycmd(*args, **kwargs):
"""
A wrapper around execute() to more easily handle warnings and errors.
Returns an (out, err) tuple of strings containing the output of
the command's stdout and stderr. If 'err' is not empty then the
command can be considered to have failed.
:discard_warnings True | False. Defaults to False. If set to True,
then for succeeding commands, stderr is cleared
"""
discard_warnings = kwargs.pop('discard_warnings', False)
try:
out, err = execute(*args, **kwargs)
failed = False
except exception.ProcessExecutionError, exn:
out, err = '', str(exn)
failed = True
if not failed and discard_warnings and err:
# Handle commands that output to stderr but otherwise succeed
err = ''
return out, err
def ssh_execute(ssh, cmd, process_input=None,
addl_env=None, check_exit_code=True):
LOG.debug(_('Running cmd (SSH): %s'), cmd)
if addl_env:
raise exception.NovaException(_('Environment not supported over SSH'))
if process_input:
# This is (probably) fixable if we need it...
msg = _('process_input not supported over SSH')
raise exception.NovaException(msg)
stdin_stream, stdout_stream, stderr_stream = ssh.exec_command(cmd)
channel = stdout_stream.channel
#stdin.write('process_input would go here')
#stdin.flush()
# NOTE(justinsb): This seems suspicious...
# ...other SSH clients have buffering issues with this approach
stdout = stdout_stream.read()
stderr = stderr_stream.read()
stdin_stream.close()
exit_status = channel.recv_exit_status()
# exit_status == -1 if no exit code was returned
if exit_status != -1:
LOG.debug(_('Result was %s') % exit_status)
if check_exit_code and exit_status != 0:
raise exception.ProcessExecutionError(exit_code=exit_status,
stdout=stdout,
stderr=stderr,
cmd=cmd)
return (stdout, stderr)
def novadir():
import nova
return os.path.abspath(nova.__file__).split('nova/__init__.py')[0]
def debug(arg):
LOG.debug(_('debug in callback: %s'), arg)
return arg
def generate_uid(topic, size=8):
characters = '01234567890abcdefghijklmnopqrstuvwxyz'
choices = [random.choice(characters) for _x in xrange(size)]
return '%s-%s' % (topic, ''.join(choices))
# Default symbols to use for passwords. Avoids visually confusing characters.
# ~6 bits per symbol
DEFAULT_PASSWORD_SYMBOLS = ('23456789', # Removed: 0,1
'ABCDEFGHJKLMNPQRSTUVWXYZ', # Removed: I, O
'abcdefghijkmnopqrstuvwxyz') # Removed: l
# ~5 bits per symbol
EASIER_PASSWORD_SYMBOLS = ('23456789', # Removed: 0, 1
'ABCDEFGHJKLMNPQRSTUVWXYZ') # Removed: I, O
def last_completed_audit_period(unit=None, before=None):
"""This method gives you the most recently *completed* audit period.
arguments:
units: string, one of 'hour', 'day', 'month', 'year'
Periods normally begin at the beginning (UTC) of the
period unit (So a 'day' period begins at midnight UTC,
a 'month' unit on the 1st, a 'year' on Jan, 1)
unit string may be appended with an optional offset
like so: 'day@18' This will begin the period at 18:00
UTC. 'month@15' starts a monthly period on the 15th,
and year@3 begins a yearly one on March 1st.
before: Give the audit period most recently completed before
<timestamp>. Defaults to now.
returns: 2 tuple of datetimes (begin, end)
The begin timestamp of this audit period is the same as the
end of the previous."""
if not unit:
unit = CONF.instance_usage_audit_period
offset = 0
if '@' in unit:
unit, offset = unit.split("@", 1)
offset = int(offset)
if before is not None:
rightnow = before
else:
rightnow = timeutils.utcnow()
if unit not in ('month', 'day', 'year', 'hour'):
raise ValueError('Time period must be hour, day, month or year')
if unit == 'month':
if offset == 0:
offset = 1
end = datetime.datetime(day=offset,
month=rightnow.month,
year=rightnow.year)
if end >= rightnow:
year = rightnow.year
if 1 >= rightnow.month:
year -= 1
month = 12 + (rightnow.month - 1)
else:
month = rightnow.month - 1
end = datetime.datetime(day=offset,
month=month,
year=year)
year = end.year
if 1 >= end.month:
year -= 1
month = 12 + (end.month - 1)
else:
month = end.month - 1
begin = datetime.datetime(day=offset, month=month, year=year)
elif unit == 'year':
if offset == 0:
offset = 1
end = datetime.datetime(day=1, month=offset, year=rightnow.year)
if end >= rightnow:
end = datetime.datetime(day=1,
month=offset,
year=rightnow.year - 1)
begin = datetime.datetime(day=1,
month=offset,
year=rightnow.year - 2)
else:
begin = datetime.datetime(day=1,
month=offset,
year=rightnow.year - 1)
elif unit == 'day':
end = datetime.datetime(hour=offset,
day=rightnow.day,
month=rightnow.month,
year=rightnow.year)
if end >= rightnow:
end = end - datetime.timedelta(days=1)
begin = end - datetime.timedelta(days=1)
elif unit == 'hour':
end = rightnow.replace(minute=offset, second=0, microsecond=0)
if end >= rightnow:
end = end - datetime.timedelta(hours=1)
begin = end - datetime.timedelta(hours=1)
return (begin, end)
def generate_password(length=20, symbolgroups=DEFAULT_PASSWORD_SYMBOLS):
"""Generate a random password from the supplied symbol groups.
At least one symbol from each group will be included. Unpredictable
results if length is less than the number of symbol groups.
Believed to be reasonably secure (with a reasonable password length!)
"""
r = random.SystemRandom()
# NOTE(jerdfelt): Some password policies require at least one character
# from each group of symbols, so start off with one random character
# from each symbol group
password = [r.choice(s) for s in symbolgroups]
# If length < len(symbolgroups), the leading characters will only
# be from the first length groups. Try our best to not be predictable
# by shuffling and then truncating.
r.shuffle(password)
password = password[:length]
length -= len(password)
# then fill with random characters from all symbol groups
symbols = ''.join(symbolgroups)
password.extend([r.choice(symbols) for _i in xrange(length)])
# finally shuffle to ensure first x characters aren't from a
# predictable group
r.shuffle(password)
return ''.join(password)
def last_octet(address):
return int(address.split('.')[-1])
def get_my_linklocal(interface):
try:
if_str = execute('ip', '-f', 'inet6', '-o', 'addr', 'show', interface)
condition = '\s+inet6\s+([0-9a-f:]+)/\d+\s+scope\s+link'
links = [re.search(condition, x) for x in if_str[0].split('\n')]
address = [w.group(1) for w in links if w is not None]
if address[0] is not None:
return address[0]
else:
msg = _('Link Local address is not found.:%s') % if_str
raise exception.NovaException(msg)
except Exception as ex:
msg = _("Couldn't get Link Local IP of %(interface)s"
" :%(ex)s") % locals()
raise exception.NovaException(msg)
def parse_mailmap(mailmap='.mailmap'):
mapping = {}
if os.path.exists(mailmap):
fp = open(mailmap, 'r')
for l in fp:
l = l.strip()
if not l.startswith('#') and ' ' in l:
canonical_email, alias = l.split(' ')
mapping[alias.lower()] = canonical_email.lower()
return mapping
def str_dict_replace(s, mapping):
for s1, s2 in mapping.iteritems():
s = s.replace(s1, s2)
return s
class LazyPluggable(object):
"""A pluggable backend loaded lazily based on some value."""
def __init__(self, pivot, **backends):
self.__backends = backends
self.__pivot = pivot
self.__backend = None
def __get_backend(self):
if not self.__backend:
backend_name = CONF[self.__pivot]
if backend_name not in self.__backends:
msg = _('Invalid backend: %s') % backend_name
raise exception.NovaException(msg)
backend = self.__backends[backend_name]
if isinstance(backend, tuple):
name = backend[0]
fromlist = backend[1]
else:
name = backend
fromlist = backend
self.__backend = __import__(name, None, None, fromlist)
return self.__backend
def __getattr__(self, key):
backend = self.__get_backend()
return getattr(backend, key)
class LoopingCallDone(Exception):
"""Exception to break out and stop a LoopingCall.
The poll-function passed to LoopingCall can raise this exception to
break out of the loop normally. This is somewhat analogous to
StopIteration.
An optional return-value can be included as the argument to the exception;
this return-value will be returned by LoopingCall.wait()
"""
def __init__(self, retvalue=True):
""":param retvalue: Value that LoopingCall.wait() should return."""
self.retvalue = retvalue
class LoopingCall(object):
def __init__(self, f=None, *args, **kw):
self.args = args
self.kw = kw
self.f = f
self._running = False
def start(self, interval, initial_delay=None):
self._running = True
done = event.Event()
def _inner():
if initial_delay:
greenthread.sleep(initial_delay)
try:
while self._running:
self.f(*self.args, **self.kw)
if not self._running:
break
greenthread.sleep(interval)
except LoopingCallDone, e:
self.stop()
done.send(e.retvalue)
except Exception:
LOG.exception(_('in looping call'))
done.send_exception(*sys.exc_info())
return
else:
done.send(True)
self.done = done
greenthread.spawn(_inner)
return self.done
def stop(self):
self._running = False
def wait(self):
return self.done.wait()
def xhtml_escape(value):
"""Escapes a string so it is valid within XML or XHTML.
"""
return saxutils.escape(value, {'"': '"', "'": '''})
def utf8(value):
"""Try to turn a string into utf-8 if possible.
Code is directly from the utf8 function in
http://github.com/facebook/tornado/blob/master/tornado/escape.py
"""
if isinstance(value, unicode):
return value.encode('utf-8')
assert isinstance(value, str)
return value
def to_bytes(text, default=0):
"""Try to turn a string into a number of bytes. Looks at the last
characters of the text to determine what conversion is needed to
turn the input text into a byte number.
Supports: B/b, K/k, M/m, G/g, T/t (or the same with b/B on the end)
"""
# Take off everything not number 'like' (which should leave
# only the byte 'identifier' left)
mult_key_org = text.lstrip('-1234567890')
mult_key = mult_key_org.lower()
mult_key_len = len(mult_key)
if mult_key.endswith("b"):
mult_key = mult_key[0:-1]
try:
multiplier = BYTE_MULTIPLIERS[mult_key]
if mult_key_len:
# Empty cases shouldn't cause text[0:-0]
text = text[0:-mult_key_len]
return int(text) * multiplier
except KeyError:
msg = _('Unknown byte multiplier: %s') % mult_key_org
raise TypeError(msg)
except ValueError:
return default
def delete_if_exists(pathname):
"""delete a file, but ignore file not found error"""
try:
os.unlink(pathname)
except OSError as e:
if e.errno == errno.ENOENT:
return
else:
raise
def get_from_path(items, path):
"""Returns a list of items matching the specified path.
Takes an XPath-like expression e.g. prop1/prop2/prop3, and for each item
in items, looks up items[prop1][prop2][prop3]. Like XPath, if any of the
intermediate results are lists it will treat each list item individually.
A 'None' in items or any child expressions will be ignored, this function
will not throw because of None (anywhere) in items. The returned list
will contain no None values.
"""
if path is None:
raise exception.NovaException('Invalid mini_xpath')
(first_token, sep, remainder) = path.partition('/')
if first_token == '':
raise exception.NovaException('Invalid mini_xpath')
results = []
if items is None:
return results
if not isinstance(items, list):
# Wrap single objects in a list
items = [items]
for item in items:
if item is None:
continue
get_method = getattr(item, 'get', None)
if get_method is None:
continue
child = get_method(first_token)
if child is None:
continue
if isinstance(child, list):
# Flatten intermediate lists
for x in child:
results.append(x)
else:
results.append(child)
if not sep:
# No more tokens
return results
else:
return get_from_path(results, remainder)
def flatten_dict(dict_, flattened=None):
"""Recursively flatten a nested dictionary."""
flattened = flattened or {}
for key, value in dict_.iteritems():
if hasattr(value, 'iteritems'):
flatten_dict(value, flattened)
else:
flattened[key] = value
return flattened
def partition_dict(dict_, keys):
"""Return two dicts, one with `keys` the other with everything else."""
intersection = {}
difference = {}
for key, value in dict_.iteritems():
if key in keys:
intersection[key] = value
else:
difference[key] = value
return intersection, difference
def map_dict_keys(dict_, key_map):
"""Return a dict in which the dictionaries keys are mapped to new keys."""
mapped = {}
for key, value in dict_.iteritems():
mapped_key = key_map[key] if key in key_map else key
mapped[mapped_key] = value
return mapped
def subset_dict(dict_, keys):
"""Return a dict that only contains a subset of keys."""
subset = partition_dict(dict_, keys)[0]
return subset
def diff_dict(orig, new):
"""
Return a dict describing how to change orig to new. The keys
correspond to values that have changed; the value will be a list
of one or two elements. The first element of the list will be
either '+' or '-', indicating whether the key was updated or
deleted; if the key was updated, the list will contain a second
element, giving the updated value.
"""
# Figure out what keys went away
result = dict((k, ['-']) for k in set(orig.keys()) - set(new.keys()))
# Compute the updates
for key, value in new.items():
if key not in orig or value != orig[key]:
result[key] = ['+', value]
return result
def check_isinstance(obj, cls):
"""Checks that obj is of type cls, and lets PyLint infer types."""
if isinstance(obj, cls):
return obj
raise Exception(_('Expected object of type: %s') % (str(cls)))
def parse_server_string(server_str):
"""
Parses the given server_string and returns a list of host and port.
If it's not a combination of host part and port, the port element
is a null string. If the input is invalid expression, return a null
list.
"""
try:
# First of all, exclude pure IPv6 address (w/o port).
if netaddr.valid_ipv6(server_str):
return (server_str, '')
# Next, check if this is IPv6 address with a port number combination.
if server_str.find("]:") != -1:
(address, port) = server_str.replace('[', '', 1).split(']:')
return (address, port)
# Third, check if this is a combination of an address and a port
if server_str.find(':') == -1:
return (server_str, '')
# This must be a combination of an address and a port
(address, port) = server_str.split(':')
return (address, port)
except Exception:
LOG.error(_('Invalid server_string: %s'), server_str)
return ('', '')
def bool_from_str(val):
"""Convert a string representation of a bool into a bool value"""
if not val:
return False
try:
return True if int(val) else False
except ValueError:
return val.lower() == 'true' or \
val.lower() == 'yes' or \
val.lower() == 'y'
def is_valid_boolstr(val):
"""Check if the provided string is a valid bool string or not. """
val = str(val).lower()
return val == 'true' or val == 'false' or \
val == 'yes' or val == 'no' or \
val == 'y' or val == 'n' or \
val == '1' or val == '0'
def is_valid_ipv4(address):
"""valid the address strictly as per format xxx.xxx.xxx.xxx.
where xxx is a value between 0 and 255.
"""
parts = address.split(".")
if len(parts) != 4:
return False
for item in parts:
try:
if not 0 <= int(item) <= 255:
return False
except ValueError:
return False
return True
def is_valid_cidr(address):
"""Check if the provided ipv4 or ipv6 address is a valid
CIDR address or not"""
try:
# Validate the correct CIDR Address
netaddr.IPNetwork(address)
except netaddr.core.AddrFormatError:
return False
except UnboundLocalError:
# NOTE(MotoKen): work around bug in netaddr 0.7.5 (see detail in
# https://github.com/drkjam/netaddr/issues/2)
return False
# Prior validation partially verify /xx part
# Verify it here
ip_segment = address.split('/')
if (len(ip_segment) <= 1 or
ip_segment[1] == ''):
return False
return True
def monkey_patch():
""" If the Flags.monkey_patch set as True,
this function patches a decorator
for all functions in specified modules.
You can set decorators for each modules
using CONF.monkey_patch_modules.
The format is "Module path:Decorator function".
Example: 'nova.api.ec2.cloud:nova.notifier.api.notify_decorator'
Parameters of the decorator is as follows.
(See nova.notifier.api.notify_decorator)
name - name of the function
function - object of the function
"""
# If CONF.monkey_patch is not True, this function do nothing.
if not CONF.monkey_patch:
return
# Get list of modules and decorators
for module_and_decorator in CONF.monkey_patch_modules:
module, decorator_name = module_and_decorator.split(':')
# import decorator function
decorator = importutils.import_class(decorator_name)
__import__(module)
# Retrieve module information using pyclbr
module_data = pyclbr.readmodule_ex(module)
for key in module_data.keys():
# set the decorator for the class methods
if isinstance(module_data[key], pyclbr.Class):
clz = importutils.import_class("%s.%s" % (module, key))
for method, func in inspect.getmembers(clz, inspect.ismethod):
setattr(clz, method,
decorator("%s.%s.%s" % (module, key, method), func))
# set the decorator for the function
if isinstance(module_data[key], pyclbr.Function):
func = importutils.import_class("%s.%s" % (module, key))
setattr(sys.modules[module], key,
decorator("%s.%s" % (module, key), func))
def convert_to_list_dict(lst, label):
"""Convert a value or list into a list of dicts"""
if not lst:
return None
if not isinstance(lst, list):
lst = [lst]
return [{label: x} for x in lst]
def timefunc(func):
"""Decorator that logs how long a particular function took to execute"""
@functools.wraps(func)
def inner(*args, **kwargs):
start_time = time.time()
try:
return func(*args, **kwargs)
finally:
total_time = time.time() - start_time
LOG.debug(_("timefunc: '%(name)s' took %(total_time).2f secs") %
dict(name=func.__name__, total_time=total_time))
return inner
def generate_glance_url():
"""Generate the URL to glance."""
return "%s://%s:%d" % (CONF.glance_protocol, CONF.glance_host,
CONF.glance_port)
def generate_image_url(image_ref):
"""Generate an image URL from an image_ref."""
return "%s/images/%s" % (generate_glance_url(), image_ref)
@contextlib.contextmanager
def remove_path_on_error(path):
"""Protect code that wants to operate on PATH atomically.
Any exception will cause PATH to be removed.
"""
try:
yield
except Exception:
with excutils.save_and_reraise_exception():
delete_if_exists(path)
def make_dev_path(dev, partition=None, base='/dev'):
"""Return a path to a particular device.
>>> make_dev_path('xvdc')
/dev/xvdc
>>> make_dev_path('xvdc', 1)
/dev/xvdc1
"""
path = os.path.join(base, dev)
if partition:
path += str(partition)
return path
def total_seconds(td):
"""Local total_seconds implementation for compatibility with python 2.6"""
if hasattr(td, 'total_seconds'):
return td.total_seconds()
else:
return ((td.days * 86400 + td.seconds) * 10 ** 6 +
td.microseconds) / 10.0 ** 6
def sanitize_hostname(hostname):
"""Return a hostname which conforms to RFC-952 and RFC-1123 specs."""
if isinstance(hostname, unicode):
hostname = hostname.encode('latin-1', 'ignore')
hostname = re.sub('[ _]', '-', hostname)
hostname = re.sub('[^\w.-]+', '', hostname)
hostname = hostname.lower()
hostname = hostname.strip('.-')
return hostname
def read_cached_file(filename, cache_info, reload_func=None):
"""Read from a file if it has been modified.
:param cache_info: dictionary to hold opaque cache.
:param reload_func: optional function to be called with data when
file is reloaded due to a modification.
:returns: data from file
"""
mtime = os.path.getmtime(filename)
if not cache_info or mtime != cache_info.get('mtime'):
LOG.debug(_("Reloading cached file %s") % filename)
with open(filename) as fap:
cache_info['data'] = fap.read()
cache_info['mtime'] = mtime
if reload_func:
reload_func(cache_info['data'])
return cache_info['data']
def file_open(*args, **kwargs):
"""Open file
see built-in file() documentation for more details
Note: The reason this is kept in a separate module is to easily
be able to provide a stub module that doesn't alter system
state at all (for unit tests)
"""
return file(*args, **kwargs)
def hash_file(file_like_object):
"""Generate a hash for the contents of a file."""
checksum = hashlib.sha1()
for chunk in iter(lambda: file_like_object.read(32768), b''):
checksum.update(chunk)
return checksum.hexdigest()
@contextlib.contextmanager
def temporary_mutation(obj, **kwargs):
"""Temporarily set the attr on a particular object to a given value then
revert when finished.
One use of this is to temporarily set the read_deleted flag on a context
object:
with temporary_mutation(context, read_deleted="yes"):
do_something_that_needed_deleted_objects()
"""
NOT_PRESENT = object()
old_values = {}
for attr, new_value in kwargs.items():
old_values[attr] = getattr(obj, attr, NOT_PRESENT)
setattr(obj, attr, new_value)
try:
yield
finally:
for attr, old_value in old_values.items():
if old_value is NOT_PRESENT:
del obj[attr]
else:
setattr(obj, attr, old_value)
def generate_mac_address():
"""Generate an Ethernet MAC address."""
# NOTE(vish): We would prefer to use 0xfe here to ensure that linux
# bridge mac addresses don't change, but it appears to
# conflict with libvirt, so we use the next highest octet
# that has the unicast and locally administered bits set
# properly: 0xfa.
# Discussion: https://bugs.launchpad.net/nova/+bug/921838
mac = [0xfa, 0x16, 0x3e,
random.randint(0x00, 0xff),
random.randint(0x00, 0xff),
random.randint(0x00, 0xff)]
return ':'.join(map(lambda x: "%02x" % x, mac))
def read_file_as_root(file_path):
"""Secure helper to read file as root."""
try:
out, _err = execute('cat', file_path, run_as_root=True)
return out
except exception.ProcessExecutionError:
raise exception.FileNotFound(file_path=file_path)
@contextlib.contextmanager
def temporary_chown(path, owner_uid=None):
"""Temporarily chown a path.
:params owner_uid: UID of temporary owner (defaults to current user)
"""
if owner_uid is None:
owner_uid = os.getuid()
orig_uid = os.stat(path).st_uid
if orig_uid != owner_uid:
execute('chown', owner_uid, path, run_as_root=True)
try:
yield
finally:
if orig_uid != owner_uid:
execute('chown', orig_uid, path, run_as_root=True)
@contextlib.contextmanager
def tempdir(**kwargs):
tmpdir = tempfile.mkdtemp(**kwargs)
try:
yield tmpdir
finally:
try:
shutil.rmtree(tmpdir)
except OSError, e:
LOG.error(_('Could not remove tmpdir: %s'), str(e))
def strcmp_const_time(s1, s2):
"""Constant-time string comparison.
:params s1: the first string
:params s2: the second string
:return: True if the strings are equal.
This function takes two strings and compares them. It is intended to be
used when doing a comparison for authentication purposes to help guard
against timing attacks.
"""
if len(s1) != len(s2):
return False
result = 0
for (a, b) in zip(s1, s2):
result |= ord(a) ^ ord(b)
return result == 0
def walk_class_hierarchy(clazz, encountered=None):
"""Walk class hierarchy, yielding most derived classes first"""
if not encountered:
encountered = []
for subclass in clazz.__subclasses__():
if subclass not in encountered:
encountered.append(subclass)
# drill down to leaves first
for subsubclass in walk_class_hierarchy(subclass, encountered):
yield subsubclass
yield subclass
class UndoManager(object):
"""Provides a mechanism to facilitate rolling back a series of actions
when an exception is raised.
"""
def __init__(self):
self.undo_stack = []
def undo_with(self, undo_func):
self.undo_stack.append(undo_func)
def _rollback(self):
for undo_func in reversed(self.undo_stack):
undo_func()
def rollback_and_reraise(self, msg=None, **kwargs):
"""Rollback a series of actions then re-raise the exception.
.. note:: (sirp) This should only be called within an
exception handler.
"""
with excutils.save_and_reraise_exception():
if msg:
LOG.exception(msg, **kwargs)
self._rollback()
def mkfs(fs, path, label=None):
"""Format a file or block device
:param fs: Filesystem type (examples include 'swap', 'ext3', 'ext4'
'btrfs', etc.)
:param path: Path to file or block device to format
:param label: Volume label to use
"""
if fs == 'swap':
args = ['mkswap']
else:
args = ['mkfs', '-t', fs]
#add -F to force no interactive execute on non-block device.
if fs in ('ext3', 'ext4'):
args.extend(['-F'])
if label:
if fs in ('msdos', 'vfat'):
label_opt = '-n'
else:
label_opt = '-L'
args.extend([label_opt, label])
args.append(path)
execute(*args)
def last_bytes(file_like_object, num):
"""Return num bytes from the end of the file, and remaining byte count.
:param file_like_object: The file to read
:param num: The number of bytes to return
:returns (data, remaining)
"""
try:
file_like_object.seek(-num, os.SEEK_END)
except IOError, e:
if e.errno == 22:
file_like_object.seek(0, os.SEEK_SET)
else:
raise
remaining = file_like_object.tell()
return (file_like_object.read(), remaining)
| apache-2.0 | -4,965,048,132,937,103,000 | 31.104515 | 79 | 0.587963 | false | 4.086961 | false | false | false |
larsbutler/swift | swift/obj/expirer.py | 1 | 12641 | # Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from six.moves import urllib
from random import random
from time import time
from os.path import join
from swift import gettext_ as _
import hashlib
from eventlet import sleep, Timeout
from eventlet.greenpool import GreenPool
from swift.common.daemon import Daemon
from swift.common.internal_client import InternalClient, UnexpectedResponse
from swift.common.utils import get_logger, dump_recon_cache, split_path
from swift.common.http import HTTP_NOT_FOUND, HTTP_CONFLICT, \
HTTP_PRECONDITION_FAILED
from swift.container.reconciler import direct_delete_container_entry
MAX_OBJECTS_TO_CACHE = 100000
class ObjectExpirer(Daemon):
"""
Daemon that queries the internal hidden expiring_objects_account to
discover objects that need to be deleted.
:param conf: The daemon configuration.
"""
def __init__(self, conf, logger=None, swift=None):
self.conf = conf
self.logger = logger or get_logger(conf, log_route='object-expirer')
self.interval = int(conf.get('interval') or 300)
self.expiring_objects_account = \
(conf.get('auto_create_account_prefix') or '.') + \
(conf.get('expiring_objects_account_name') or 'expiring_objects')
conf_path = conf.get('__file__') or '/etc/swift/object-expirer.conf'
request_tries = int(conf.get('request_tries') or 3)
self.swift = swift or InternalClient(
conf_path, 'Swift Object Expirer', request_tries)
self.report_interval = int(conf.get('report_interval') or 300)
self.report_first_time = self.report_last_time = time()
self.report_objects = 0
self.recon_cache_path = conf.get('recon_cache_path',
'/var/cache/swift')
self.rcache = join(self.recon_cache_path, 'object.recon')
self.concurrency = int(conf.get('concurrency', 1))
if self.concurrency < 1:
raise ValueError("concurrency must be set to at least 1")
self.processes = int(self.conf.get('processes', 0))
self.process = int(self.conf.get('process', 0))
self.reclaim_age = int(conf.get('reclaim_age', 86400 * 7))
def report(self, final=False):
"""
Emits a log line report of the progress so far, or the final progress
is final=True.
:param final: Set to True for the last report once the expiration pass
has completed.
"""
if final:
elapsed = time() - self.report_first_time
self.logger.info(_('Pass completed in %(time)ds; '
'%(objects)d objects expired') % {
'time': elapsed, 'objects': self.report_objects})
dump_recon_cache({'object_expiration_pass': elapsed,
'expired_last_pass': self.report_objects},
self.rcache, self.logger)
elif time() - self.report_last_time >= self.report_interval:
elapsed = time() - self.report_first_time
self.logger.info(_('Pass so far %(time)ds; '
'%(objects)d objects expired') % {
'time': elapsed, 'objects': self.report_objects})
self.report_last_time = time()
def iter_cont_objs_to_expire(self):
"""
Yields (container, obj) tuples to be deleted
"""
obj_cache = {}
cnt = 0
all_containers = set()
for c in self.swift.iter_containers(self.expiring_objects_account):
container = str(c['name'])
timestamp = int(container)
if timestamp > int(time()):
break
all_containers.add(container)
for o in self.swift.iter_objects(self.expiring_objects_account,
container):
obj = o['name'].encode('utf8')
timestamp, actual_obj = obj.split('-', 1)
timestamp = int(timestamp)
if timestamp > int(time()):
break
try:
cust_account, cust_cont, cust_obj = \
split_path('/' + actual_obj, 3, 3, True)
cache_key = '%s/%s' % (cust_account, cust_cont)
except ValueError:
cache_key = None
if self.processes > 0:
obj_process = int(
hashlib.md5('%s/%s' % (container, obj)).
hexdigest(), 16)
if obj_process % self.processes != self.process:
continue
if cache_key not in obj_cache:
obj_cache[cache_key] = []
obj_cache[cache_key].append((container, obj))
cnt += 1
if cnt > MAX_OBJECTS_TO_CACHE:
while obj_cache:
for key in obj_cache.keys():
if obj_cache[key]:
yield obj_cache[key].pop()
cnt -= 1
else:
del obj_cache[key]
while obj_cache:
for key in obj_cache.keys():
if obj_cache[key]:
yield obj_cache[key].pop()
else:
del obj_cache[key]
for container in all_containers:
yield (container, None)
def run_once(self, *args, **kwargs):
"""
Executes a single pass, looking for objects to expire.
:param args: Extra args to fulfill the Daemon interface; this daemon
has no additional args.
:param kwargs: Extra keyword args to fulfill the Daemon interface; this
daemon accepts processes and process keyword args.
These will override the values from the config file if
provided.
"""
self.get_process_values(kwargs)
pool = GreenPool(self.concurrency)
containers_to_delete = set([])
self.report_first_time = self.report_last_time = time()
self.report_objects = 0
try:
self.logger.debug('Run begin')
containers, objects = \
self.swift.get_account_info(self.expiring_objects_account)
self.logger.info(_('Pass beginning; '
'%(containers)s possible containers; '
'%(objects)s possible objects') % {
'containers': containers, 'objects': objects})
for container, obj in self.iter_cont_objs_to_expire():
containers_to_delete.add(container)
if not obj:
continue
timestamp, actual_obj = obj.split('-', 1)
timestamp = int(timestamp)
if timestamp > int(time()):
break
pool.spawn_n(
self.delete_object, actual_obj, timestamp,
container, obj)
pool.waitall()
for container in containers_to_delete:
try:
self.swift.delete_container(
self.expiring_objects_account,
container,
acceptable_statuses=(2, HTTP_NOT_FOUND, HTTP_CONFLICT))
except (Exception, Timeout) as err:
self.logger.exception(
_('Exception while deleting container %(container)s '
'%(err)s') % {'container': container,
'err': str(err)})
self.logger.debug('Run end')
self.report(final=True)
except (Exception, Timeout):
self.logger.exception(_('Unhandled exception'))
def run_forever(self, *args, **kwargs):
"""
Executes passes forever, looking for objects to expire.
:param args: Extra args to fulfill the Daemon interface; this daemon
has no additional args.
:param kwargs: Extra keyword args to fulfill the Daemon interface; this
daemon has no additional keyword args.
"""
sleep(random() * self.interval)
while True:
begin = time()
try:
self.run_once(*args, **kwargs)
except (Exception, Timeout):
self.logger.exception(_('Unhandled exception'))
elapsed = time() - begin
if elapsed < self.interval:
sleep(random() * (self.interval - elapsed))
def get_process_values(self, kwargs):
"""
Sets self.processes and self.process from the kwargs if those
values exist, otherwise, leaves those values as they were set in
the config file.
:param kwargs: Keyword args passed into the run_forever(), run_once()
methods. They have values specified on the command
line when the daemon is run.
"""
if kwargs.get('processes') is not None:
self.processes = int(kwargs['processes'])
if kwargs.get('process') is not None:
self.process = int(kwargs['process'])
if self.process < 0:
raise ValueError(
'process must be an integer greater than or equal to 0')
if self.processes < 0:
raise ValueError(
'processes must be an integer greater than or equal to 0')
if self.processes and self.process >= self.processes:
raise ValueError(
'process must be less than or equal to processes')
def delete_object(self, actual_obj, timestamp, container, obj):
start_time = time()
try:
try:
self.delete_actual_object(actual_obj, timestamp)
except UnexpectedResponse as err:
if err.resp.status_int not in {HTTP_NOT_FOUND,
HTTP_PRECONDITION_FAILED}:
raise
if float(timestamp) > time() - self.reclaim_age:
# we'll have to retry the DELETE later
raise
self.pop_queue(container, obj)
self.report_objects += 1
self.logger.increment('objects')
except (Exception, Timeout) as err:
self.logger.increment('errors')
self.logger.exception(
_('Exception while deleting object %(container)s %(obj)s'
' %(err)s') % {'container': container,
'obj': obj, 'err': str(err)})
self.logger.timing_since('timing', start_time)
self.report()
def pop_queue(self, container, obj):
"""
Issue a delete object request to the container for the expiring object
queue entry.
"""
direct_delete_container_entry(self.swift.container_ring,
self.expiring_objects_account,
container, obj)
def delete_actual_object(self, actual_obj, timestamp):
"""
Deletes the end-user object indicated by the actual object name given
'<account>/<container>/<object>' if and only if the X-Delete-At value
of the object is exactly the timestamp given.
:param actual_obj: The name of the end-user object to delete:
'<account>/<container>/<object>'
:param timestamp: The timestamp the X-Delete-At value must match to
perform the actual delete.
"""
path = '/v1/' + urllib.parse.quote(actual_obj.lstrip('/'))
self.swift.make_request('DELETE', path,
{'X-If-Delete-At': str(timestamp),
'X-Timestamp': str(timestamp)},
(2,))
| apache-2.0 | -1,666,015,285,613,331,700 | 40.445902 | 79 | 0.537616 | false | 4.652558 | false | false | false |
krivenko/som | python/som.py | 1 | 1706 | ##############################################################################
#
# SOM: Stochastic Optimization Method for Analytic Continuation
#
# Copyright (C) 2016-2020 Igor Krivenko <[email protected]>
#
# SOM is free software: you can redistribute it and/or modify it under the
# terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# SOM is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# SOM. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
"""
Main module of SOM
"""
from core import SomCore
import numpy as np
class Som(SomCore):
"""Stochastic Optimization Method"""
def __init__(self, g, s = None, kind = "FermionGf", norms = np.array([])):
if s is None:
s = g.copy()
s.data[:,Ellipsis] = np.eye(s.target_shape[0])
if isinstance(norms,float) or isinstance(norms,int):
norms = norms * np.ones((g.target_shape[0],))
SomCore.__init__(self, g, s, kind, norms)
def count_good_solutions(hist, upper_lim = 1):
"""
Given a histogram of objective function values,
count the number of solutions with D/D_{min} <= 1 + upper_lim
"""
d_max = hist.limits[0] * (1 + upper_lim)
return int(sum(c for n, c in enumerate(hist.data) if hist.mesh_point(n) <= d_max))
| gpl-3.0 | -1,575,309,946,060,850,000 | 36.911111 | 86 | 0.611958 | false | 3.676724 | false | false | false |
atumanov/ray | python/ray/exceptions.py | 1 | 3236 | import os
import colorama
try:
import setproctitle
except ImportError:
setproctitle = None
class RayError(Exception):
"""Super class of all ray exception types."""
pass
class RayTaskError(RayError):
"""Indicates that a task threw an exception during execution.
If a task throws an exception during execution, a RayTaskError is stored in
the object store for each of the task's outputs. When an object is
retrieved from the object store, the Python method that retrieved it checks
to see if the object is a RayTaskError and if it is then an exception is
thrown propagating the error message.
Attributes:
function_name (str): The name of the function that failed and produced
the RayTaskError.
traceback_str (str): The traceback from the exception.
"""
def __init__(self, function_name, traceback_str):
"""Initialize a RayTaskError."""
if setproctitle:
self.proctitle = setproctitle.getproctitle()
else:
self.proctitle = "ray_worker"
self.pid = os.getpid()
self.host = os.uname()[1]
self.function_name = function_name
self.traceback_str = traceback_str
assert traceback_str is not None
def __str__(self):
"""Format a RayTaskError as a string."""
lines = self.traceback_str.split("\n")
out = []
in_worker = False
for line in lines:
if line.startswith("Traceback "):
out.append("{}{}{} (pid={}, host={})".format(
colorama.Fore.CYAN, self.proctitle, colorama.Fore.RESET,
self.pid, self.host))
elif in_worker:
in_worker = False
elif "ray/worker.py" in line or "ray/function_manager.py" in line:
in_worker = True
else:
out.append(line)
return "\n".join(out)
class RayWorkerError(RayError):
"""Indicates that the worker died unexpectedly while executing a task."""
def __str__(self):
return "The worker died unexpectedly while executing this task."
class RayActorError(RayError):
"""Indicates that the actor died unexpectedly before finishing a task.
This exception could happen either because the actor process dies while
executing a task, or because a task is submitted to a dead actor.
"""
def __str__(self):
return "The actor died unexpectedly before finishing this task."
class UnreconstructableError(RayError):
"""Indicates that an object is lost and cannot be reconstructed.
Note, this exception only happens for actor objects. If actor's current
state is after object's creating task, the actor cannot re-run the task to
reconstruct the object.
Attributes:
object_id: ID of the object.
"""
def __init__(self, object_id):
self.object_id = object_id
def __str__(self):
return ("Object {} is lost (either evicted or explicitly deleted) and "
+ "cannot be reconstructed.").format(self.object_id.hex())
RAY_EXCEPTION_TYPES = [
RayError,
RayTaskError,
RayWorkerError,
RayActorError,
UnreconstructableError,
]
| apache-2.0 | -4,839,456,202,281,432,000 | 29.819048 | 79 | 0.635352 | false | 4.263505 | false | false | false |
mysociety/barnetplanning | alerts/forms.py | 1 | 2895 | import urllib2
import simplejson
from django import forms
from django.contrib.localflavor.uk.forms import UKPostcodeField
from models import Alert
# Due to a bug in UKPostcodeField, can't override error message. This is
# fixed in: http://code.djangoproject.com/ticket/12017
# So remove this extra class when we have a recent enough Django.
class MyUKPostcodeField(UKPostcodeField):
default_error_messages = {
'invalid': 'We need your complete UK postcode.'
}
widget = forms.TextInput(attrs={'size':'8'})
class AlertForm(forms.ModelForm):
email = forms.EmailField(label='Your email address', error_messages={'required': 'Please enter your email address.'})
postcode = MyUKPostcodeField(required=False)
ward_mapit_id = forms.TypedChoiceField(required=False, coerce=int, initial=None)
def __init__(self, *args, **kwargs):
super(AlertForm, self).__init__(*args, **kwargs)
self.fields['radius'].label = 'If you chose a postcode, how far around your postcode would you like to receive alerts for?'
# Because radius is not compulsory on the model, choices has puts in a blank row for leaving
# it out. We don't want that, hence the [1:]
self.fields['radius'].widget = forms.RadioSelect(choices=self.fields['radius'].choices[1:])
# Make a dictionary of ward name to id
mapit_response = urllib2.urlopen("http://mapit.mysociety.org/area/2489/children.json")
mapit_data = simplejson.load(mapit_response)
ward_choices = [(int(value), mapit_data[value]['name']) for value in mapit_data]
ward_choices.sort(key=lambda x: x[1])
# FIXME - at some point in the future, should work out why None doesn't work here,
# and get rid of the clean_ward_mapit_id method.
ward_choices.insert(0, (-1, 'Select'))
self.fields['ward_mapit_id'].choices = ward_choices
self.fields['ward_mapit_id'].label = 'Ward'
def clean_ward_mapit_id(self):
"""We can't use None directly in the form, as it gets stringified into 'None'.
Instead, we use -1 as the signifier of nothing chosen, and turn it into None here."""
ward_id = self.cleaned_data['ward_mapit_id']
if ward_id == -1:
return None
else:
return ward_id
def clean(self):
cleaned_data = super(AlertForm, self).clean()
postcode = cleaned_data.get('postcode')
ward_mapit_id = cleaned_data.get('ward_mapit_id')
if postcode and ward_mapit_id:
raise forms.ValidationError('You cannot enter both a postcode and a ward.')
if not postcode and not ward_mapit_id:
raise forms.ValidationError('Please enter a postcode or a ward.')
return cleaned_data
class Meta:
model = Alert
fields = ('postcode', 'ward_mapit_id', 'email', 'radius')
| agpl-3.0 | -3,994,539,111,728,237,600 | 40.357143 | 131 | 0.656995 | false | 3.683206 | false | false | false |
jiaphuan/models | research/slim/nets/mobilenet/mobilenet.py | 1 | 16178 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Mobilenet Base Class."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import contextlib
import copy
import os
import contextlib2
import tensorflow as tf
slim = tf.contrib.slim
@slim.add_arg_scope
def apply_activation(x, name=None, activation_fn=None):
return activation_fn(x, name=name) if activation_fn else x
def _fixed_padding(inputs, kernel_size, rate=1):
"""Pads the input along the spatial dimensions independently of input size.
Pads the input such that if it was used in a convolution with 'VALID' padding,
the output would have the same dimensions as if the unpadded input was used
in a convolution with 'SAME' padding.
Args:
inputs: A tensor of size [batch, height_in, width_in, channels].
kernel_size: The kernel to be used in the conv2d or max_pool2d operation.
rate: An integer, rate for atrous convolution.
Returns:
output: A tensor of size [batch, height_out, width_out, channels] with the
input, either intact (if kernel_size == 1) or padded (if kernel_size > 1).
"""
kernel_size_effective = [kernel_size[0] + (kernel_size[0] - 1) * (rate - 1),
kernel_size[0] + (kernel_size[0] - 1) * (rate - 1)]
pad_total = [kernel_size_effective[0] - 1, kernel_size_effective[1] - 1]
pad_beg = [pad_total[0] // 2, pad_total[1] // 2]
pad_end = [pad_total[0] - pad_beg[0], pad_total[1] - pad_beg[1]]
padded_inputs = tf.pad(inputs, [[0, 0], [pad_beg[0], pad_end[0]],
[pad_beg[1], pad_end[1]], [0, 0]])
return padded_inputs
def _make_divisible(v, divisor, min_value=None):
if min_value is None:
min_value = divisor
new_v = max(min_value, int(v + divisor / 2) // divisor * divisor)
# Make sure that round down does not go down by more than 10%.
if new_v < 0.9 * v:
new_v += divisor
return new_v
@contextlib.contextmanager
def _set_arg_scope_defaults(defaults):
"""Sets arg scope defaults for all items present in defaults.
Args:
defaults: dictionary mapping function to default_dict
Yields:
context manager
"""
with contextlib2.ExitStack() as stack:
_ = [
stack.enter_context(slim.arg_scope(func, **default_arg))
for func, default_arg in defaults.items()
]
yield
@slim.add_arg_scope
def depth_multiplier(output_params,
multiplier,
divisible_by=8,
min_depth=8,
**unused_kwargs):
if 'num_outputs' not in output_params:
return
d = output_params['num_outputs']
output_params['num_outputs'] = _make_divisible(d * multiplier, divisible_by,
min_depth)
_Op = collections.namedtuple('Op', ['op', 'params', 'multiplier_func'])
def op(opfunc, **params):
multiplier = params.pop('multiplier_transorm', depth_multiplier)
return _Op(opfunc, params=params, multiplier_func=multiplier)
@slim.add_arg_scope
def mobilenet_base( # pylint: disable=invalid-name
inputs,
conv_defs,
multiplier=1.0,
final_endpoint=None,
output_stride=None,
use_explicit_padding=False,
scope=None,
is_training=False):
"""Mobilenet base network.
Constructs a network from inputs to the given final endpoint. By default
the network is constructed in inference mode. To create network
in training mode use:
with slim.arg_scope(mobilenet.training_scope()):
logits, endpoints = mobilenet_base(...)
Args:
inputs: a tensor of shape [batch_size, height, width, channels].
conv_defs: A list of op(...) layers specifying the net architecture.
multiplier: Float multiplier for the depth (number of channels)
for all convolution ops. The value must be greater than zero. Typical
usage will be to set this value in (0, 1) to reduce the number of
parameters or computation cost of the model.
final_endpoint: The name of last layer, for early termination for
for V1-based networks: last layer is "layer_14", for V2: "layer_20"
output_stride: An integer that specifies the requested ratio of input to
output spatial resolution. If not None, then we invoke atrous convolution
if necessary to prevent the network from reducing the spatial resolution
of the activation maps. Allowed values are 1 or any even number, excluding
zero. Typical values are 8 (accurate fully convolutional mode), 16
(fast fully convolutional mode), and 32 (classification mode).
NOTE- output_stride relies on all consequent operators to support dilated
operators via "rate" parameter. This might require wrapping non-conv
operators to operate properly.
use_explicit_padding: Use 'VALID' padding for convolutions, but prepad
inputs so that the output dimensions are the same as if 'SAME' padding
were used.
scope: optional variable scope.
is_training: How to setup batch_norm and other ops. Note: most of the time
this does not need be set directly. Use mobilenet.training_scope() to set
up training instead. This parameter is here for backward compatibility
only. It is safe to set it to the value matching
training_scope(is_training=...). It is also safe to explicitly set
it to False, even if there is outer training_scope set to to training.
(The network will be built in inference mode).
Returns:
tensor_out: output tensor.
end_points: a set of activations for external use, for example summaries or
losses.
Raises:
ValueError: depth_multiplier <= 0, or the target output_stride is not
allowed.
"""
if multiplier <= 0:
raise ValueError('multiplier is not greater than zero.')
# Set conv defs defaults and overrides.
conv_defs_defaults = conv_defs.get('defaults', {})
conv_defs_overrides = conv_defs.get('overrides', {})
if use_explicit_padding:
conv_defs_overrides = copy.deepcopy(conv_defs_overrides)
conv_defs_overrides[
(slim.conv2d, slim.separable_conv2d)] = {'padding': 'VALID'}
if output_stride is not None:
if output_stride == 0 or (output_stride > 1 and output_stride % 2):
raise ValueError('Output stride must be None, 1 or a multiple of 2.')
# a) Set the tensorflow scope
# b) set padding to default: note we might consider removing this
# since it is also set by mobilenet_scope
# c) set all defaults
# d) set all extra overrides.
with _scope_all(scope, default_scope='Mobilenet'), \
slim.arg_scope([slim.batch_norm], is_training=is_training), \
_set_arg_scope_defaults(conv_defs_defaults), \
_set_arg_scope_defaults(conv_defs_overrides):
# The current_stride variable keeps track of the output stride of the
# activations, i.e., the running product of convolution strides up to the
# current network layer. This allows us to invoke atrous convolution
# whenever applying the next convolution would result in the activations
# having output stride larger than the target output_stride.
current_stride = 1
# The atrous convolution rate parameter.
rate = 1
net = inputs
# Insert default parameters before the base scope which includes
# any custom overrides set in mobilenet.
end_points = {}
scopes = {}
for i, opdef in enumerate(conv_defs['spec']):
params = dict(opdef.params)
opdef.multiplier_func(params, multiplier)
stride = params.get('stride', 1)
if output_stride is not None and current_stride == output_stride:
# If we have reached the target output_stride, then we need to employ
# atrous convolution with stride=1 and multiply the atrous rate by the
# current unit's stride for use in subsequent layers.
layer_stride = 1
layer_rate = rate
rate *= stride
else:
layer_stride = stride
layer_rate = 1
current_stride *= stride
# Update params.
params['stride'] = layer_stride
# Only insert rate to params if rate > 1.
if layer_rate > 1:
params['rate'] = layer_rate
# Set padding
if use_explicit_padding:
if 'kernel_size' in params:
net = _fixed_padding(net, params['kernel_size'], layer_rate)
else:
params['use_explicit_padding'] = True
end_point = 'layer_%d' % (i + 1)
try:
net = opdef.op(net, **params)
except Exception:
print('Failed to create op %i: %r params: %r' % (i, opdef, params))
raise
end_points[end_point] = net
scope = os.path.dirname(net.name)
scopes[scope] = end_point
if final_endpoint is not None and end_point == final_endpoint:
break
# Add all tensors that end with 'output' to
# endpoints
for t in net.graph.get_operations():
scope = os.path.dirname(t.name)
bn = os.path.basename(t.name)
if scope in scopes and t.name.endswith('output'):
end_points[scopes[scope] + '/' + bn] = t.outputs[0]
return net, end_points
@contextlib.contextmanager
def _scope_all(scope, default_scope=None):
with tf.variable_scope(scope, default_name=default_scope) as s,\
tf.name_scope(s.original_name_scope):
yield s
@slim.add_arg_scope
def mobilenet(inputs,
num_classes=1001,
prediction_fn=slim.softmax,
reuse=None,
scope='Mobilenet',
base_only=False,
**mobilenet_args):
"""Mobilenet model for classification, supports both V1 and V2.
Note: default mode is inference, use mobilenet.training_scope to create
training network.
Args:
inputs: a tensor of shape [batch_size, height, width, channels].
num_classes: number of predicted classes. If 0 or None, the logits layer
is omitted and the input features to the logits layer (before dropout)
are returned instead.
prediction_fn: a function to get predictions out of logits
(default softmax).
reuse: whether or not the network and its variables should be reused. To be
able to reuse 'scope' must be given.
scope: Optional variable_scope.
base_only: if True will only create the base of the network (no pooling
and no logits).
**mobilenet_args: passed to mobilenet_base verbatim.
- conv_defs: list of conv defs
- multiplier: Float multiplier for the depth (number of channels)
for all convolution ops. The value must be greater than zero. Typical
usage will be to set this value in (0, 1) to reduce the number of
parameters or computation cost of the model.
- output_stride: will ensure that the last layer has at most total stride.
If the architecture calls for more stride than that provided
(e.g. output_stride=16, but the architecture has 5 stride=2 operators),
it will replace output_stride with fractional convolutions using Atrous
Convolutions.
Returns:
logits: the pre-softmax activations, a tensor of size
[batch_size, num_classes]
end_points: a dictionary from components of the network to the corresponding
activation tensor.
Raises:
ValueError: Input rank is invalid.
"""
is_training = mobilenet_args.get('is_training', False)
input_shape = inputs.get_shape().as_list()
if len(input_shape) != 4:
raise ValueError('Expected rank 4 input, was: %d' % len(input_shape))
with tf.variable_scope(scope, 'Mobilenet', reuse=reuse) as scope:
inputs = tf.identity(inputs, 'input')
net, end_points = mobilenet_base(inputs, scope=scope, **mobilenet_args)
if base_only:
return net, end_points
net = tf.identity(net, name='embedding')
with tf.variable_scope('Logits'):
net = global_pool(net)
end_points['global_pool'] = net
if not num_classes:
return net, end_points
net = slim.dropout(net, scope='Dropout', is_training=is_training)
# 1 x 1 x num_classes
# Note: legacy scope name.
logits = slim.conv2d(
net,
num_classes, [1, 1],
activation_fn=None,
normalizer_fn=None,
biases_initializer=tf.zeros_initializer(),
scope='Conv2d_1c_1x1')
logits = tf.squeeze(logits, [1, 2])
logits = tf.identity(logits, name='output')
end_points['Logits'] = logits
if prediction_fn:
end_points['Predictions'] = prediction_fn(logits, 'Predictions')
return logits, end_points
def global_pool(input_tensor, pool_op=tf.nn.avg_pool):
"""Applies avg pool to produce 1x1 output.
NOTE: This function is funcitonally equivalenet to reduce_mean, but it has
baked in average pool which has better support across hardware.
Args:
input_tensor: input tensor
pool_op: pooling op (avg pool is default)
Returns:
a tensor batch_size x 1 x 1 x depth.
"""
shape = input_tensor.get_shape().as_list()
if shape[1] is None or shape[2] is None:
kernel_size = tf.convert_to_tensor(
[1, tf.shape(input_tensor)[1],
tf.shape(input_tensor)[2], 1])
else:
kernel_size = [1, shape[1], shape[2], 1]
output = pool_op(
input_tensor, ksize=kernel_size, strides=[1, 1, 1, 1], padding='VALID')
# Recover output shape, for unknown shape.
output.set_shape([None, 1, 1, None])
return output
def training_scope(is_training=True,
weight_decay=0.00004,
stddev=0.09,
dropout_keep_prob=0.8,
bn_decay=0.997):
"""Defines Mobilenet training scope.
Usage:
with tf.contrib.slim.arg_scope(mobilenet.training_scope()):
logits, endpoints = mobilenet_v2.mobilenet(input_tensor)
# the network created will be trainble with dropout/batch norm
# initialized appropriately.
Args:
is_training: if set to False this will ensure that all customizations are
set to non-training mode. This might be helpful for code that is reused
across both training/evaluation, but most of the time training_scope with
value False is not needed.
weight_decay: The weight decay to use for regularizing the model.
stddev: Standard deviation for initialization, if negative uses xavier.
dropout_keep_prob: dropout keep probability
bn_decay: decay for the batch norm moving averages.
Returns:
An argument scope to use via arg_scope.
"""
# Note: do not introduce parameters that would change the inference
# model here (for example whether to use bias), modify conv_def instead.
batch_norm_params = {
'is_training': is_training,
'decay': bn_decay,
}
if stddev < 0:
weight_intitializer = slim.initializers.xavier_initializer()
else:
weight_intitializer = tf.truncated_normal_initializer(stddev=stddev)
# Set weight_decay for weights in Conv and FC layers.
with slim.arg_scope(
[slim.conv2d, slim.fully_connected, slim.separable_conv2d],
weights_initializer=weight_intitializer,
normalizer_fn=slim.batch_norm), \
slim.arg_scope([mobilenet_base, mobilenet], is_training=is_training),\
slim.arg_scope([slim.batch_norm], **batch_norm_params), \
slim.arg_scope([slim.dropout], is_training=is_training,
keep_prob=dropout_keep_prob), \
slim.arg_scope([slim.conv2d], \
weights_regularizer=slim.l2_regularizer(weight_decay)), \
slim.arg_scope([slim.separable_conv2d], weights_regularizer=None) as s:
return s
| apache-2.0 | 7,967,390,960,214,579,000 | 36.710956 | 80 | 0.665657 | false | 3.862942 | false | false | false |
Autodesk/molecular-design-toolkit | moldesign/interfaces/nbo_interface.py | 1 | 9611 | from __future__ import print_function, absolute_import, division
from future.builtins import *
from future import standard_library
standard_library.install_aliases()
# Copyright 2017 Autodesk Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
import moldesign as mdt
from .. import units as u
from .. import utils
SIGMA_UTF = u"\u03C3"
PI_UTF = u"\u03C0"
def run_nbo(mol, requests=('nlmo', 'nbo'),
image='nbo',
engine=None):
wfn = mol.wfn
inputs = {'in.47': make_nbo_input_file(mol, requests)}
command = 'gennbo.i4.exe in.47'
engine = utils.if_not_none(engine, mdt.compute.config.get_engine())
imagename = mdt.compute.get_image_path(image)
job = engine.launch(imagename,
command,
inputs=inputs,
name="nbo, %s" % mol.name)
mdt.helpers.display_log(job.get_display_object(), "nbo, %s"%mol.name)
job.wait()
parsed_data = parse_nbo(job.get_output('FILE.10'),
len(mol.wfn.aobasis))
for orbtype, data in parsed_data.items():
if orbtype[0] == 'P': # copy data from the orthogonal orbitals
orthdata = parsed_data[orbtype[1:]]
for key in 'bond_names iatom jatom stars bondnums num_bonded_atoms'.split():
data[key] = orthdata[key]
data.occupations = [None for orb in data.coeffs]
add_orbitals(mol, wfn, data, orbtype)
wfn._nbo_job = job
def add_orbitals(mol, wfn, orbdata, orbtype):
orbs = []
for i in range(len(orbdata.coeffs)):
bond = None
atoms = [mol.atoms[orbdata.iatom[i] - 1]]
if orbdata.bond_names[i] == 'RY':
bname = '%s Ryd*' % atoms[0].name
nbotype = 'rydberg'
utf_name = bname
elif orbdata.bond_names[i] == 'LP':
bname = '%s lone pair' % atoms[0].name
nbotype = 'lone pair'
utf_name = bname
elif orbdata.bond_names[i] == 'LV':
bname = '%s lone vacancy' % atoms[0].name
nbotype = 'lone vacancy'
utf_name = bname
elif orbdata.num_bonded_atoms[i] == 1:
bname = '%s Core' % atoms[0].name
nbotype = 'core'
utf_name = bname
else:
atoms.append(mol.atoms[orbdata.jatom[i] - 1])
bond = mdt.Bond(*atoms)
if orbdata.bondnums[i] == 1: # THIS IS NOT CORRECT
nbotype = 'sigma'
utf_type = SIGMA_UTF
else:
nbotype = 'pi'
utf_type = PI_UTF
bname = '%s%s (%s - %s)' % (nbotype, orbdata.stars[i],
atoms[0].name, atoms[1].name)
utf_name = '%s%s (%s - %s)' % (utf_type, orbdata.stars[i],
atoms[0].name, atoms[1].name)
name = '%s %s' % (bname, orbtype)
orbs.append(mdt.Orbital(orbdata.coeffs[i],
wfn=wfn, occupation=orbdata.occupations[i],
atoms=atoms, name=name,
nbotype=nbotype,
bond=bond,
unicode_name=utf_name,
_data=orbdata))
return wfn.add_orbitals(orbs, orbtype=orbtype)
def make_nbo_input_file(mol, requests):
"""
:param mol:
:type mol: moldesign.molecules.Molecule
:return:
"""
# Units: angstroms, hartrees
wfn = mol.wfn
orbs = wfn.molecular_orbitals
nbofile = []
# TODO: check for open shell wfn (OPEN keyword)
# TODO: check normalization, orthogonalization
nbofile.append(" $GENNBO BODM NATOMS=%d NBAS=%d $END" %
(mol.num_atoms, len(wfn.aobasis)))
commands = ['NBOSUM']
for r in requests:
commands.append('AO%s=W10' % r.upper())
if r[0] != 'P': commands.append('%s' % r.upper())
nbofile.append('$NBO %s $END' % (' '.join(commands)))
nbofile.append("$COORD\n %s" % mol.name)
for iatom, atom in enumerate(mol.atoms):
#TODO: deal with pseudopotential electrons
x, y, z = list(map(repr, atom.position.value_in(u.angstrom)))
nbofile.append("%d %d %s %s %s" % (atom.atnum, atom.atnum,
x, y, z))
nbofile.append("$END")
nbofile.append("$BASIS")
nbofile.append(' CENTER = ' +
' '.join(str(1+bfn.atom.index) for bfn in wfn.aobasis))
nbofile.append(" LABEL = " +
' '.join(str(AOLABELS[bfn.orbtype]) for bfn in wfn.aobasis))
nbofile.append('$END')
#TODO: deal with CI wavefunctions ($WF keyword)
nbofile.append('$OVERLAP')
append_matrix(nbofile, wfn.aobasis.overlaps)
nbofile.append('$END')
nbofile.append('$DENSITY')
append_matrix(nbofile, wfn.density_matrix)
nbofile.append('$END')
return '\n '.join(nbofile)
def parse_nbo(f, nbasis):
lines = f.__iter__()
parsed = {}
while True:
try:
l = next(lines)
except StopIteration:
break
fields = l.split()
if fields[1:5] == 'in the AO basis:'.split():
orbname = fields[0]
assert orbname[-1] == 's'
orbname = orbname[:-1]
next(lines)
if orbname[0] == 'P': # these are pre-orthogonal orbitals, it only prints the coefficients
coeffs = _parse_wrapped_matrix(lines, nbasis)
parsed[orbname] = utils.DotDict(coeffs=np.array(coeffs))
else: # there's more complete information available
parsed[orbname] = read_orbital_set(lines, nbasis)
return parsed
def read_orbital_set(lineiter, nbasis):
# First, get the actual matrix
mat = _parse_wrapped_matrix(lineiter, nbasis)
# First, occupation numbers
occupations = list(map(float,_get_wrapped_separated_vals(lineiter, nbasis)))
# Next, a line of things that always appear to be ones (for spin orbitals maybe?)
oneline = _get_wrapped_separated_vals(lineiter, nbasis)
for x in oneline: assert x == '1'
# next is number of atoms involved in the bond
num_bonded_atoms = list(map(int, _get_wrapped_separated_vals(lineiter, nbasis)))
bond_names = _get_wrapped_separated_vals(lineiter, nbasis)
# Next indicates whether real or virtual
stars = _get_wrapped_column_vals(lineiter, nbasis)
for s in stars: assert (s == '' or s == '*')
# number of bonds between this pair of atoms
bondnums = list(map(int, _get_wrapped_separated_vals(lineiter, nbasis)))
# first atom index (1-based)
iatom = list(map(int, _get_wrapped_separated_vals(lineiter, nbasis)))
jatom = list(map(int, _get_wrapped_separated_vals(lineiter, nbasis)))
# The rest appears to be 0 most of the time ...
return utils.DotDict(coeffs=np.array(mat),
iatom=iatom, jatom=jatom, bondnums=bondnums,
bond_names=bond_names,
num_bonded_atoms=num_bonded_atoms,
stars=stars, occupations=occupations)
def _parse_wrapped_matrix(lineiter, nbasis):
mat = []
for i in range(nbasis):
currline = list(map(float, _get_wrapped_separated_vals(lineiter, nbasis)))
assert len(currline) == nbasis
mat.append(currline)
return mat
def _get_wrapped_separated_vals(lineiter, nbasis):
vals = []
while True:
l = next(lineiter)
vals.extend(l.split())
if len(vals) == nbasis:
break
assert len(vals) < nbasis
return vals
def _get_wrapped_column_vals(lineiter, nbasis):
vals = []
while True:
l = next(lineiter.next)[1:]
lenl = len(l)
for i in range(20):
if lenl <= 3*i + 1: break
vals.append(l[3*i: 3*i + 3].strip())
if len(vals) == nbasis:
break
assert len(vals) < nbasis
return vals
def append_matrix(l, mat):
for row in mat:
icol = 0
while icol < len(row):
l.append(' ' + ' '.join(map(repr, row[icol:icol + 6])))
icol += 6
AOLABELS = {'s': 1, 'px': 101, 'py': 102, 'pz': 103,
"dxx": 201, "dxy": 202, "dxz": 203, "dyy": 204, "dyz": 205, "dzz": 206,
"fxxx": 301, "fxxy": 302, "fxxz": 303, "fxyy": 304, "fxyz": 305,
"fxzz": 306, "fyyy": 307, "fyyz": 308, "fyzz": 309, "fzzz": 310,
"gxxxx": 401, "gxxxy": 402, "gxxxz": 403, "gxxyy": 404, "gxxyz": 405,
"gxxzz": 406, "gxyyy": 407, "gxyyz": 408, "gxyzz": 409, "gxzzz": 410,
"gyyyy": 411, "gyyyz": 412, "gyyzz": 413, "gyzzz": 414, "gzzzz": 415, # end of cartesian
# start of spherical:
'p(x)': 151, 'p(y)': 152, 'p(z)': 153,
"d(xy)": 251, "d(xz)": 252, "d(yz)": 253, "d(x2-y2)": 254, "d(z2)": 255,
"f(z(5z2-3r2))": 351, "f(x(5z2-r2))": 352, "f(y(5z2-r2))": 353, "f(z(x2-y2))": 354, "f(xyz)": 355,
"f(x(x2-3y2))": 356, "f(y(3x2-y2))": 357}
| apache-2.0 | 3,648,654,696,899,533,000 | 35.405303 | 110 | 0.553949 | false | 3.169855 | false | false | false |
wufangjie/leetcode | 187. Repeated DNA Sequences.py | 1 | 1241 | class Solution(object):
def findRepeatedDnaSequences(self, s):
"""
:type s: str
:rtype: List[str]
"""
more_than_once, once = set(), set()
for i in range(len(s) - 9):
t = s[i:i+10]
if t not in more_than_once:
if t in once:
more_than_once.add(t)
else:
once.add(t)
return list(more_than_once)
# if overlap is not permit
# more_than_once, once = {}, {}
# for i in range(len(s) - 9):
# t = s[i:i+10]
# if t not in more_than_once:
# if t in once:
# more_than_once[t] = [once[t], i]
# else:
# once[t] = i
# else:
# more_than_once[t][1] = i
# return list(k for k, v in more_than_once.items() if v[1] - v[0] >= 10)
if __name__ == '__main__':
assert sorted(Solution().findRepeatedDnaSequences("AAAAACCCCCAAAAACCCCCCAAAAAGGGTTT")) == ['AAAAACCCCC', 'CCCCCAAAAA']
assert Solution().findRepeatedDnaSequences('AAAAAAAAAAA') == ['AAAAAAAAAA']
# assert Solution().findRepeatedDnaSequences('AAAAAAAAAAAAAAAAAAAA') == ['AAAAAAAAAA']
| gpl-3.0 | -132,350,777,096,766,640 | 36.606061 | 122 | 0.48751 | false | 3.447222 | false | false | false |
ging/keystone | keystone/contrib/keystone_scim/routers.py | 1 | 11710 | #
# Copyright 2014 Telefonica Investigacion y Desarrollo, S.A.U
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""WSGI Routers for the SCIM API."""
from keystone.common import wsgi
import controllers
class ScimRouter(wsgi.ExtensionRouter):
PATH_PREFIX = '/OS-SCIM'
def add_routes(self, mapper):
user_controller = controllers.ScimUserV3Controller()
role_controller = controllers.ScimRoleV3Controller()
group_controller = controllers.ScimGroupV3Controller()
scim_info_controller = controllers.ScimInfoController()
org_controller = controllers.ScimOrganizationV3Controller()
# Users v1.1
mapper.connect(self.PATH_PREFIX + '/v1/Users',
controller=user_controller,
action='list_users',
conditions=dict(method=['GET']))
mapper.connect(self.PATH_PREFIX + '/v1/Users',
controller=user_controller,
action='create_user',
conditions=dict(method=['POST']))
mapper.connect(self.PATH_PREFIX + '/v1/Users/{user_id}',
controller=user_controller,
action='get_user',
conditions=dict(method=['GET']))
mapper.connect(self.PATH_PREFIX + '/v1/Users/{user_id}',
controller=user_controller,
action='patch_user',
conditions=dict(method=['PATCH']))
mapper.connect(self.PATH_PREFIX + '/v1/Users/{user_id}',
controller=user_controller,
action='put_user',
conditions=dict(method=['PUT']))
mapper.connect(self.PATH_PREFIX + '/v1/Users/{user_id}',
controller=user_controller,
action='delete_user',
conditions=dict(method=['DELETE']))
# Users /v2
mapper.connect(self.PATH_PREFIX + '/v2/Users',
controller=user_controller,
action='list_users',
conditions=dict(method=['GET']))
mapper.connect(self.PATH_PREFIX + '/v2/Users',
controller=user_controller,
action='create_user',
conditions=dict(method=['POST']))
mapper.connect(self.PATH_PREFIX + '/v2/Users/{user_id}',
controller=user_controller,
action='get_user',
conditions=dict(method=['GET']))
mapper.connect(self.PATH_PREFIX + '/v2/Users/{user_id}',
controller=user_controller,
action='patch_user',
conditions=dict(method=['PATCH']))
mapper.connect(self.PATH_PREFIX + '/v2/Users/{user_id}',
controller=user_controller,
action='put_user',
conditions=dict(method=['PUT']))
mapper.connect(self.PATH_PREFIX + '/v2/Users/{user_id}',
controller=user_controller,
action='delete_user',
conditions=dict(method=['DELETE']))
# Roles v1.1
mapper.connect(self.PATH_PREFIX + '/v1/Roles',
controller=role_controller,
action='scim_list_roles',
conditions=dict(method=['GET']))
mapper.connect(self.PATH_PREFIX + '/v1/Roles',
controller=role_controller,
action='scim_create_role',
conditions=dict(method=['POST']))
mapper.connect(self.PATH_PREFIX + '/v1/Roles/{role_id}',
controller=role_controller,
action='scim_get_role',
conditions=dict(method=['GET']))
mapper.connect(self.PATH_PREFIX + '/v1/Roles/{role_id}',
controller=role_controller,
action='scim_patch_role',
conditions=dict(method=['PATCH']))
mapper.connect(self.PATH_PREFIX + '/v1/Roles/{role_id}',
controller=role_controller,
action='scim_put_role',
conditions=dict(method=['PUT']))
mapper.connect(self.PATH_PREFIX + '/v1/Roles/{role_id}',
controller=role_controller,
action='scim_delete_role',
conditions=dict(method=['DELETE']))
# Roles /v2
mapper.connect(self.PATH_PREFIX + '/v2/Roles',
controller=role_controller,
action='scim_list_roles',
conditions=dict(method=['GET']))
mapper.connect(self.PATH_PREFIX + '/v2/Roles',
controller=role_controller,
action='scim_create_role',
conditions=dict(method=['POST']))
mapper.connect(self.PATH_PREFIX + '/v2/Roles/{role_id}',
controller=role_controller,
action='scim_get_role',
conditions=dict(method=['GET']))
mapper.connect(self.PATH_PREFIX + '/v2/Roles/{role_id}',
controller=role_controller,
action='scim_patch_role',
conditions=dict(method=['PATCH']))
mapper.connect(self.PATH_PREFIX + '/v2/Roles/{role_id}',
controller=role_controller,
action='scim_put_role',
conditions=dict(method=['PUT']))
mapper.connect(self.PATH_PREFIX + '/v2/Roles/{role_id}',
controller=role_controller,
action='scim_delete_role',
conditions=dict(method=['DELETE']))
# Groups v1.1
mapper.connect(self.PATH_PREFIX + '/v1/Groups',
controller=group_controller,
action='list_groups',
conditions=dict(method=['GET']))
mapper.connect(self.PATH_PREFIX + '/v1/Groups',
controller=group_controller,
action='create_group',
conditions=dict(method=['POST']))
mapper.connect(self.PATH_PREFIX + '/v1/Groups/{group_id}',
controller=group_controller,
action='get_group',
conditions=dict(method=['GET']))
mapper.connect(self.PATH_PREFIX + '/v1/Groups/{group_id}',
controller=group_controller,
action='patch_group',
conditions=dict(method=['PATCH']))
mapper.connect(self.PATH_PREFIX + '/v1/Groups/{group_id}',
controller=group_controller,
action='put_group',
conditions=dict(method=['PUT']))
mapper.connect(self.PATH_PREFIX + '/v1/Groups/{group_id}',
controller=group_controller,
action='delete_group',
conditions=dict(method=['DELETE']))
# Groups
mapper.connect(self.PATH_PREFIX + '/v2/Groups',
controller=group_controller,
action='list_groups',
conditions=dict(method=['GET']))
mapper.connect(self.PATH_PREFIX + '/v2/Groups',
controller=group_controller,
action='create_group',
conditions=dict(method=['POST']))
mapper.connect(self.PATH_PREFIX + '/v2/Groups/{group_id}',
controller=group_controller,
action='get_group',
conditions=dict(method=['GET']))
mapper.connect(self.PATH_PREFIX + '/v2/Groups/{group_id}',
controller=group_controller,
action='patch_group',
conditions=dict(method=['PATCH']))
mapper.connect(self.PATH_PREFIX + '/v2/Groups/{group_id}',
controller=group_controller,
action='put_group',
conditions=dict(method=['PUT']))
mapper.connect(self.PATH_PREFIX + '/v2/Groups/{group_id}',
controller=group_controller,
action='delete_group',
conditions=dict(method=['DELETE']))
# SCIM Info
mapper.connect(self.PATH_PREFIX + '/v1/ServiceProviderConfigs',
controller=scim_info_controller,
action='scim_get_service_provider_configs',
conditions=dict(method=['GET']))
mapper.connect(self.PATH_PREFIX + '/v1/Schemas',
controller=scim_info_controller,
action='scim_get_schemas',
conditions=dict(method=['GET']))
mapper.connect(self.PATH_PREFIX + '/v2/ServiceProviderConfigs',
controller=scim_info_controller,
action='scim_get_service_provider_configs',
conditions=dict(method=['GET']))
mapper.connect(self.PATH_PREFIX + '/v2/Schemas',
controller=scim_info_controller,
action='scim_get_schemas',
conditions=dict(method=['GET']))
# Organizations
mapper.connect(self.PATH_PREFIX + '/v2/Organizations',
controller=org_controller,
action='list_organizations',
conditions=dict(method=['GET']))
mapper.connect(self.PATH_PREFIX + '/v2/Organizations',
controller=org_controller,
action='create_organization',
conditions=dict(method=['POST']))
mapper.connect(self.PATH_PREFIX + '/v2/Organizations/{organization_id}',
controller=org_controller,
action='get_organization',
conditions=dict(method=['GET']))
mapper.connect(self.PATH_PREFIX + '/v2/Organizations/{organization_id}',
controller=org_controller,
action='patch_organization',
conditions=dict(method=['PATCH']))
mapper.connect(self.PATH_PREFIX + '/v2/Organizations/{organization_id}',
controller=org_controller,
action='put_organization',
conditions=dict(method=['PUT']))
mapper.connect(self.PATH_PREFIX + '/v2/Organizations/{organization_id}',
controller=org_controller,
action='delete_organization',
conditions=dict(method=['DELETE']))
| apache-2.0 | 2,083,376,882,307,337,700 | 40.378092 | 80 | 0.514944 | false | 4.812988 | false | false | false |
openid/python-openid | openid/yadis/manager.py | 1 | 6142 | from __future__ import unicode_literals
class YadisServiceManager(object):
"""Holds the state of a list of selected Yadis services, managing
storing it in a session and iterating over the services in order."""
def __init__(self, starting_url, yadis_url, services, session_key):
# The URL that was used to initiate the Yadis protocol
self.starting_url = starting_url
# The URL after following redirects (the identifier)
self.yadis_url = yadis_url
# List of service elements
self.services = list(services)
self.session_key = session_key
# Reference to the current service object
self._current = None
def __len__(self):
"""How many untried services remain?"""
return len(self.services)
def __iter__(self):
return self
def next(self):
"""Return the next service
self.current() will continue to return that service until the
next call to this method."""
try:
self._current = self.services.pop(0)
except IndexError:
raise StopIteration
else:
return self._current
def current(self):
"""Return the current service.
Returns None if there are no services left.
"""
return self._current
def forURL(self, url):
return url in [self.starting_url, self.yadis_url]
def started(self):
"""Has the first service been returned?"""
return self._current is not None
def store(self, session):
"""Store this object in the session, by its session key."""
session[self.session_key] = self
class Discovery(object):
"""State management for discovery.
High-level usage pattern is to call .getNextService(discover) in
order to find the next available service for this user for this
session. Once a request completes, call .finish() to clean up the
session state.
@ivar session: a dict-like object that stores state unique to the
requesting user-agent. This object must be able to store
serializable objects.
@ivar url: the URL that is used to make the discovery request
@ivar session_key_suffix: The suffix that will be used to identify
this object in the session object.
"""
DEFAULT_SUFFIX = 'auth'
PREFIX = '_yadis_services_'
def __init__(self, session, url, session_key_suffix=None):
"""Initialize a discovery object"""
self.session = session
self.url = url
if session_key_suffix is None:
session_key_suffix = self.DEFAULT_SUFFIX
self.session_key_suffix = session_key_suffix
def getNextService(self, discover):
"""Return the next authentication service for the pair of
user_input and session. This function handles fallback.
@param discover: a callable that takes a URL and returns a
list of services
@type discover: six.text_type -> [service]
@return: the next available service
"""
manager = self.getManager()
if manager is not None and not manager:
self.destroyManager()
if not manager:
yadis_url, services = discover(self.url)
manager = self.createManager(services, yadis_url)
if manager:
service = manager.next()
manager.store(self.session)
else:
service = None
return service
def cleanup(self, force=False):
"""Clean up Yadis-related services in the session and return
the most-recently-attempted service from the manager, if one
exists.
@param force: True if the manager should be deleted regardless
of whether it's a manager for self.url.
@return: current service endpoint object or None if there is
no current service
"""
manager = self.getManager(force=force)
if manager is not None:
service = manager.current()
self.destroyManager(force=force)
else:
service = None
return service
# Lower-level methods
def getSessionKey(self):
"""Get the session key for this starting URL and suffix
@return: The session key
@rtype: six.text_type
"""
return self.PREFIX + self.session_key_suffix
def getManager(self, force=False):
"""Extract the YadisServiceManager for this object's URL and
suffix from the session.
@param force: True if the manager should be returned
regardless of whether it's a manager for self.url.
@return: The current YadisServiceManager, if it's for this
URL, or else None
"""
manager = self.session.get(self.getSessionKey())
if (manager is not None and (manager.forURL(self.url) or force)):
return manager
else:
return None
def createManager(self, services, yadis_url=None):
"""Create a new YadisService Manager for this starting URL and
suffix, and store it in the session.
@raises KeyError: When I already have a manager.
@return: A new YadisServiceManager or None
"""
key = self.getSessionKey()
if self.getManager():
raise KeyError('There is already a %r manager for %r' %
(key, self.url))
if not services:
return None
manager = YadisServiceManager(self.url, yadis_url, services, key)
manager.store(self.session)
return manager
def destroyManager(self, force=False):
"""Delete any YadisServiceManager with this starting URL and
suffix from the session.
If there is no service manager or the service manager is for a
different URL, it silently does nothing.
@param force: True if the manager should be deleted regardless
of whether it's a manager for self.url.
"""
if self.getManager(force=force) is not None:
key = self.getSessionKey()
del self.session[key]
| apache-2.0 | 8,008,034,754,780,183,000 | 30.020202 | 73 | 0.620156 | false | 4.542899 | false | false | false |
mojodna/debian-mapnik | plugins/input/osm/build.py | 1 | 1631 | #
# This file is part of Mapnik (c++ mapping toolkit)
#
# Copyright (C) 2007 Artem Pavlenko, Jean-Francois Doyon
#
# Mapnik is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
# $Id$
Import ('env')
prefix = env['PREFIX']
plugin_env = env.Clone()
osm_src = Split(
"""
osmparser.cpp
osm.cpp
osm_datasource.cpp
osm_featureset.cpp
dataset_deliverer.cpp
basiccurl.cpp
"""
)
libraries = [ 'xml2' ]
libraries.append('curl')
libraries.append('mapnik')
libraries.append(env['ICU_LIB_NAME'])
input_plugin = plugin_env.SharedLibrary('../osm', source=osm_src, SHLIBPREFIX='', SHLIBSUFFIX='.input', LIBS=libraries, LINKFLAGS=env['CUSTOM_LDFLAGS'])
# if the plugin links to libmapnik ensure it is built first
Depends(input_plugin, env.subst('../../../src/%s' % env['MAPNIK_LIB_NAME']))
if 'uninstall' not in COMMAND_LINE_TARGETS:
env.Install(env['MAPNIK_INPUT_PLUGINS_DEST'], input_plugin)
env.Alias('install', env['MAPNIK_INPUT_PLUGINS_DEST'])
| lgpl-2.1 | 7,205,425,582,168,794,000 | 30.980392 | 152 | 0.72103 | false | 3.369835 | false | false | false |
dudulianangang/vps | EneConsTest.py | 1 | 5969 | import sdf
import matplotlib.pyplot as plt
import numpy as np
import matplotlib as mpl
plt.style.use('seaborn-white')
# plt.rcParams['font.family'] = 'sans-serif'
# plt.rcParams['font.sans-serif'] = 'Tahoma'
# # plt.rcParams['font.monospace'] = 'Ubuntu Mono'
plt.rcParams['font.size'] = 16
# plt.rcParams['axes.labelsize'] = 10
# plt.rcParams['axes.labelweight'] = 'bold'
# plt.rcParams['xtick.labelsize'] = 8
# plt.rcParams['ytick.labelsize'] = 8
# plt.rcParams['legend.fontsize'] = 10
# plt.rcParams['figure.titlesize'] = 12
# constants for normalization
n0 = 1.8e20
me = 9.1e-31
qe = 1.6e-19
ep = 8.9e-12
c = 3e8
wp = np.sqrt(n0*qe*qe/me/ep)
ld = c/wp
e0 = me*c*wp/qe
b0 = e0/c
tt = 1/wp
ts = 50*5
te = 1500
pct = 100
en0 = me*c**2
en1 = 0.5*ep*ld**2
# simulation domain
nx = 3500
ny = 3500
lx = 3500
ly = 3500
# figure domain (set by grid)
grid_min_x = 0
grid_max_x = nx
grid_min_y = 0
grid_max_y = ny
Gx = np.linspace(0,lx,nx)
Gy = np.linspace(0,ly,ny)
gx = Gx[grid_min_x:grid_max_x+1]
gy = Gy[grid_min_y:grid_max_y+1]
# figure parameters
# fs = 24
jetcmap = plt.cm.get_cmap("rainbow", 9) #generate a jet map with 10 values
jet_vals = jetcmap(np.arange(9)) #extract those values as an array
jet_vals[0] = [1.0, 1, 1.0, 1] #change the first value
newcmap = mpl.colors.LinearSegmentedColormap.from_list("newjet", jet_vals)
# define array
EneBmE = np.ones(7)
EneBmI = np.ones(7)
EneBgE = np.ones(7)
EneBgI = np.ones(7)
sex = np.ones(7)
sey = np.ones(7)
sez = np.ones(7)
sbx = np.ones(7)
sby = np.ones(7)
sbz = np.ones(7)
TpeC1 = np.ones(7)
TpeS1 = np.ones(7)
TfeC1 = np.ones(7)
TfeS1 = np.ones(7)
TpeC2 = np.ones(7)
TpeS2 = np.ones(7)
TfeC2 = np.ones(7)
TfeS2 = np.ones(7)
TeC1 = np.ones(7)
TeS1 = np.ones(7)
TeC2 = np.ones(7)
TeS2 = np.ones(7)
time = np.ones(7)
# plot function
file = '/Volumes/yaowp2016/'
folder = 'nj'
for i in range(7):
ii = i*5
time[i] = i*ts
fname = file+folder+'/6'+str(ii).zfill(4)+'.sdf'
datafile = sdf.read(fname)
GamBmE = datafile.Particles_Gamma_subset_ele1_ele_bm.data
GamBmI = datafile.Particles_Gamma_subset_ion1_ion_bm.data
GamBgE = datafile.Particles_Gamma_subset_ele1_ele_e.data
GamBgI = datafile.Particles_Gamma_subset_ion1_ion_e.data
WgtBmE = datafile.Particles_Weight_subset_ele1_ele_bm.data
WgtBmI = datafile.Particles_Weight_subset_ion1_ion_bm.data
WgtBgE = datafile.Particles_Weight_subset_ele1_ele_e.data
WgtBgI = datafile.Particles_Weight_subset_ion1_ion_e.data
EneBmE[i] = np.sum((GamBmE-1)*en0*np.mean(WgtBmE))*pct
EneBmI[i] = np.sum((GamBmI-1)*en0*np.mean(WgtBmI))*pct
EneBgE[i] = np.sum((GamBgE-1)*en0*np.mean(WgtBgE))*pct
EneBgI[i] = np.sum((GamBgI-1)*en0*np.mean(WgtBgI))*pct
fname = file+folder+'/'+str(ii).zfill(4)+'.sdf'
datafile = sdf.read(fname)
Ex = datafile.Electric_Field_Ex.data
Ey = datafile.Electric_Field_Ey.data
Ez = datafile.Electric_Field_Ez.data
Bx = datafile.Magnetic_Field_Bx.data*c
By = datafile.Magnetic_Field_By.data*c
Bz = datafile.Magnetic_Field_Bz.data*c
sex[i] = np.sum(Ex**2)*en1
sey[i] = np.sum(Ey**2)*en1
sez[i] = np.sum(Ez**2)*en1
sbx[i] = np.sum(Bx**2)*en1
sby[i] = np.sum(By**2)*en1
sbz[i] = np.sum(Bz**2)*en1
TpeC1[i] = EneBmE[i]+EneBmI[i]+EneBgE[i]+EneBgI[i]
TfeC1[i] = sex[i]+sey[i]+sez[i]+sbx[i]+sby[i]+sbz[i]
TfeS1[i] = datafile.Total_Field_Energy_in_Simulation__J_.data
TpeS1[i] = datafile.Total_Particle_Energy_in_Simulation__J_.data
folder = 'nj_non'
for i in range(7):
ii = i*5
time[i] = i*ts
fname = file+folder+'/6'+str(ii).zfill(4)+'.sdf'
datafile = sdf.read(fname)
GamBmE = datafile.Particles_Gamma_subset_ele1_ele_bm.data
GamBmI = datafile.Particles_Gamma_subset_ion1_ion_bm.data
GamBgE = datafile.Particles_Gamma_subset_ele1_ele_e.data
GamBgI = datafile.Particles_Gamma_subset_ion1_ion_e.data
WgtBmE = datafile.Particles_Weight_subset_ele1_ele_bm.data
WgtBmI = datafile.Particles_Weight_subset_ion1_ion_bm.data
WgtBgE = datafile.Particles_Weight_subset_ele1_ele_e.data
WgtBgI = datafile.Particles_Weight_subset_ion1_ion_e.data
EneBmE[i] = np.sum((GamBmE-1)*en0*np.mean(WgtBmE))*pct
EneBmI[i] = np.sum((GamBmI-1)*en0*np.mean(WgtBmI))*pct
EneBgE[i] = np.sum((GamBgE-1)*en0*np.mean(WgtBgE))*pct
EneBgI[i] = np.sum((GamBgI-1)*en0*np.mean(WgtBgI))*pct
fname = file+folder+'/'+str(ii).zfill(4)+'.sdf'
datafile = sdf.read(fname)
Ex = datafile.Electric_Field_Ex.data
Ey = datafile.Electric_Field_Ey.data
Ez = datafile.Electric_Field_Ez.data
Bx = datafile.Magnetic_Field_Bx.data*c
By = datafile.Magnetic_Field_By.data*c
Bz = datafile.Magnetic_Field_Bz.data*c
sex[i] = np.sum(Ex**2)*en1
sey[i] = np.sum(Ey**2)*en1
sez[i] = np.sum(Ez**2)*en1
sbx[i] = np.sum(Bx**2)*en1
sby[i] = np.sum(By**2)*en1
sbz[i] = np.sum(Bz**2)*en1
TpeC2[i] = EneBmE[i]+EneBmI[i]+EneBgE[i]+EneBgI[i]
TfeC2[i] = sex[i]+sey[i]+sez[i]+sbx[i]+sby[i]+sbz[i]
TfeS2[i] = datafile.Total_Field_Energy_in_Simulation__J_.data
TpeS2[i] = datafile.Total_Particle_Energy_in_Simulation__J_.data
TeC1 = TpeC1+TfeC1
TeS1 = TpeS1+TfeS1
TeC2 = TpeC2+TfeC2
TeS2 = TpeS2+TfeS2
np.save('tpec1.npy', TpeC1)
np.save('tpes1.npy', TpeS1)
np.save('tfec1.npy', TfeC1)
np.save('tfes1.npy', TfeS1)
np.save('tpec2.npy', TpeC2)
np.save('tpes2.npy', TpeS2)
np.save('tfec2.npy', TfeC2)
np.save('tfes2.npy', TfeS2)
np.save('tec1.npy', TeC1)
np.save('tes1.npy', TeS1)
np.save('tec2.npy', TeC2)
np.save('tes2.npy', TeS2)
# plt.figure(figsize=(8,5))
# ax = plt.subplot()
# ax.plot(time, TpeC1,'r-', lw=2, label='tbc-cal')
# ax.plot(time, TpeS1,'r--', lw=2, label='tbc-sys')
# ax.plot(time, TpeC2,'b-', lw=2, label='pbc-cal')
# ax.plot(time, TpeS2,'b--', lw=2, label='pbc-sys')
# plt.xlabel('time($\omega_{pe}^{-1}$)',fontsize=24)
# plt.ylabel('energy($J$)',fontsize=24)
# plt.legend(loc='best', numpoints=1, fancybox=True)
# plt.title('total system energy',fontsize=32,fontstyle='normal')
# plt.show()
# plt.savefig(file+folder+'/plots/'+'TotalEnergyComp.png',bbox_inches='tight') # n means normalized
# plt.close()
| apache-2.0 | -2,299,900,135,137,136,400 | 26.892523 | 100 | 0.675658 | false | 2.017236 | false | false | false |
Adamssss/projectEuler | pb347.py | 1 | 1198 | import math
import time
t1 = time.time()
N = 10000000
prime = []
def primeSieve(n):
global prime
n = (n+1)//2
p = [True]*(n)
i = 1
prime.append(2)
while i < n:
if p[i]:
t = 2*i+1
prime.append(t)
p[i] = False
j = 2*i*i+2*i
while j < n:
p[j] = False
j += t
i += 1
return prime
primeSieve(N//2 + 100)
def S(n):
result = 0
for i in range(0,len(prime)-1):
if prime[i]*prime[i] > n:
break
for j in range(i+1,len(prime)):
if prime[i]*prime[j] > n:
break
result += M(prime[i],prime[j],n)
return result
def M(p,q,n):
if p*q > n:
return 0
m = p*q
r = m*Mh(p,q,n//m)
#print(p,q,n,r)
return r
def Mh(p,q,n):
if p > n and q > n:
return 1
t = 1
c = 0
while t <= n:
t= p*t
c += 1
t = t//p
c -= 1
m = t
while c > 0:
t = t//p
c -= 1
if t*q <= n:
t = t*q
if t > m:
m = t
return m
print(S(N))
print("time:",time.time()-t1)
| mit | 9,094,591,799,074,366,000 | 14.558442 | 44 | 0.3798 | false | 2.812207 | false | false | false |
tisnik/fabric8-analytics-common | integration-tests/features/steps/jobs_api.py | 1 | 14506 | """Tests for jobs API endpoints."""
import os
import requests
import uuid
from behave import given, then, when
from src.attribute_checks import check_timestamp, check_job_token_attributes
from src.parsing import parse_token_clause
from src.authorization_tokens import jobs_api_authorization
@given('Jobs debug API is running')
def running_jobs_debug_api(context):
"""Wait for the job debug REST API to be available."""
if not context.is_jobs_debug_api_running(context):
context.wait_for_jobs_debug_api_service(context, 60)
@when('I access jobs API {url:S}')
def jobs_api_url(context, url):
"""Access the jobs service API using the HTTP GET method."""
context.response = requests.get(context.jobs_api_url + url)
@when('I access jobs API {url:S} with authorization token')
def jobs_api_url_with_authorization_token(context, url):
"""Access the jobs service API using the HTTP GET method."""
context.response = requests.get(context.jobs_api_url + url,
headers=jobs_api_authorization(context))
@when('I read list of jobs')
@when('I read list of jobs with type {type}')
@when('I read list of jobs {token} authorization token')
@when('I read list of jobs with type {type} {token} authorization token')
def list_of_jobs(context, type=None, token=None):
"""Read list of jobs via job API."""
endpoint = job_endpoint(context)
if type is not None:
endpoint += "?job_type=" + type
use_token = parse_token_clause(token)
if use_token:
context.response = requests.get(endpoint, headers=jobs_api_authorization(context))
else:
context.response = requests.get(endpoint)
def check_all_report_attributes(report):
"""Check all report attributes."""
attributes = ["analyses", "analyses_finished", "analyses_finished_unique",
"analyses_unfinished", "analyses_unique", "packages",
"packages_finished", "versions"]
for attribute in attributes:
assert attribute in report
assert int(report[attribute]) >= 0
@then('I should see proper analyses report')
def check_job_debug_analyses_report(context):
"""Check the analyses report returned by job API."""
json_data = context.response.json()
assert json_data is not None
assert "now" in json_data
check_timestamp(json_data["now"])
assert "report" in json_data
report = json_data["report"]
check_all_report_attributes(report)
def flow_sheduling_endpoint(context, state, job_id=None):
"""Return URL to flow-scheduling with the given state and job ID."""
if job_id:
return "{jobs_api_url}api/v1/jobs/flow-scheduling?state={state}&job_id={job_id}".\
format(jobs_api_url=context.jobs_api_url, state=state, job_id=job_id)
else:
return "{jobs_api_url}api/v1/jobs/flow-scheduling?state={state}".\
format(jobs_api_url=context.jobs_api_url, state=state)
def job_metadata_filename(metadata):
"""Construct relative filename to job metadata."""
return "data/{metadata}".format(metadata=metadata)
def job_endpoint(context, job_id=None):
"""Return URL for given job id that can be used to job state manipulation."""
url = "{jobs_api_url}api/v1/jobs".format(
jobs_api_url=context.jobs_api_url)
if job_id is not None:
url = "{url}/{job_id}".format(url=url, job_id=job_id)
return url
def send_json_file_to_job_api(context, endpoint, filename, use_token):
"""Send the given file to the selected job API endpoints.
If the use_token is set, send the 'auth-token' header with the token taken
from the context environment.
"""
if use_token:
headers = jobs_api_authorization(context)
context.response = context.send_json_file(endpoint, filename, headers)
else:
context.response = context.send_json_file(endpoint, filename)
@when("I post a job metadata {metadata} with state {state}")
@when("I post a job metadata {metadata} with state {state} {token} authorization token")
def perform_post_job(context, metadata, state, token="without"):
"""Perform API call to create a new job using the provided metadata.
The token parameter can be set to 'with', 'without', or 'using'.
"""
filename = job_metadata_filename(metadata)
endpoint = flow_sheduling_endpoint(context, state)
use_token = parse_token_clause(token)
send_json_file_to_job_api(context, endpoint, filename, use_token)
def get_unique_job_id(context, job_id):
"""Return unique job ID consisting of generated UUID and actual ID."""
if 'job_id_prefix' in context:
return "{uuid}_{job_id}".format(uuid=context.job_id_prefix, job_id=job_id)
else:
return job_id
@when("I post a job metadata {metadata} with job id {job_id} and state {state}")
@when("I post a job metadata {metadata} with job id {job_id} and state {state} {token} "
"authorization token")
def perform_post_job_with_state(context, metadata, job_id, state, token="without"):
"""Perform API call to create a new job.
The new job is created using the provided metadata and set a job
to given state. The token parameter can be set to 'with', 'without', or
'using'.
"""
filename = job_metadata_filename(metadata)
job_id = get_unique_job_id(context, job_id)
endpoint = flow_sheduling_endpoint(context, state, job_id)
use_token = parse_token_clause(token)
send_json_file_to_job_api(context, endpoint, filename, use_token)
@when("I delete job without id")
@when("I delete job without id {token} authorization token")
@when("I delete job with id {job_id}")
@when("I delete job with id {job_id} {token} authorization token")
def delete_job(context, job_id=None, token="without"):
"""Perform API call to delete a job with given ID."""
job_id = get_unique_job_id(context, job_id)
endpoint = job_endpoint(context, job_id)
use_token = parse_token_clause(token)
if use_token:
context.response = requests.delete(endpoint, headers=jobs_api_authorization(context))
else:
context.response = requests.delete(endpoint)
@when("I set status for job with id {job_id} to {status}")
@when("I set status for job with id {job_id} to {status} {token} authorization token")
def set_job_status(context, job_id, status, token="without"):
"""Perform API call to set job status."""
endpoint = job_endpoint(context, job_id)
url = "{endpoint}?state={status}".format(endpoint=endpoint, status=status)
use_token = parse_token_clause(token)
if use_token:
context.response = requests.put(url, headers=jobs_api_authorization(context))
else:
context.response = requests.put(url)
@when("I reset status for the job service")
@when("I set status for job service to {status}")
@when("I set status for job service to {status} {token} authorization token")
def set_job_service_status(context, status=None, token="without"):
"""Perform API call to set or reset job service status."""
url = "{jobs_api_url}api/v1/service/state".format(
jobs_api_url=context.jobs_api_url)
use_token = parse_token_clause(token)
if status is not None:
url = "{url}?state={status}".format(url=url, status=status)
if use_token:
context.response = requests.put(url, headers=jobs_api_authorization(context))
else:
context.response = requests.put(url)
@when("I clean all failed jobs")
@when("I clean all failed jobs {token} authorization token")
def clean_all_failed_jobs(context, token="without"):
"""Perform API call to clean up all failed jobs."""
url = "{url}api/v1/jobs/clean-failed".format(url=context.jobs_api_url)
use_token = parse_token_clause(token)
if use_token:
context.response = requests.delete(url, headers=jobs_api_authorization(context))
else:
context.response = requests.delete(url)
@when('I logout from the job service')
@when('I logout from the job service {token} authorization token')
def logout_from_the_jobs_service(context, token='without'):
"""Call API to logout from the job service."""
url = "{jobs_api_url}api/v1/logout".format(
jobs_api_url=context.jobs_api_url)
use_token = parse_token_clause(token)
if use_token:
headers = jobs_api_authorization(context)
context.response = requests.put(url, headers)
else:
context.response = requests.put(url)
@when('I access the job service endpoint to generate token')
def job_service_generate_token(context):
"""Generate token for the job service."""
url = "{jobs_api_url}api/v1/generate-token".format(
jobs_api_url=context.jobs_api_url)
context.response = requests.get(url)
@then('I should be redirected to {url}')
def check_redirection(context, url):
"""Check the response with redirection."""
assert context.response is not None
assert context.response.history is not None
assert context.response.url is not None
assert context.response.url.startswith(url)
@when("I ask for analyses report for ecosystem {ecosystem}")
@when("I ask for analyses report for ecosystem {ecosystem} {token} authorization token")
@when("I ask for analyses report for ecosystem {ecosystem} from date {from_date} {token} "
"authorization token")
@when("I ask for analyses report for ecosystem {ecosystem} to date {to_date} {token} "
"authorization token")
@when("I ask for analyses report for ecosystem {ecosystem} between dates {from_date} {to_date} "
"{token} authorization token")
def access_analyses_report(context, ecosystem, from_date=None, to_date=None, token="without"):
"""Perform API call to get analyses report for selected ecosystem."""
use_token = parse_token_clause(token)
url = "{url}api/v1/debug/analyses-report?ecosystem={ecosystem}".format(
url=context.jobs_api_url, ecosystem=ecosystem)
if from_date is not None:
url += "&from_date=" + from_date
if to_date is not None:
url += "&to_date=" + to_date
if use_token:
headers = jobs_api_authorization(context)
context.response = requests.get(url, headers=headers)
else:
context.response = requests.get(url)
def get_jobs_count(context):
"""Return job count read from the JSON response."""
jsondata = context.response.json()
jobs = jsondata['jobs']
assert jobs is not None
return jsondata['jobs_count']
@then('I should see {num:d} jobs')
def check_jobs_count(context, num):
"""Check the number of jobs."""
jobs_count = get_jobs_count(context)
assert jobs_count == num
@then('I should see N jobs')
def check_jobs(context):
"""Check and remember the number of jobs."""
jobs_count = get_jobs_count(context)
context.jobs_count = jobs_count
@then('I should see N+{num:d} jobs')
def check_jobs_count_plus_one(context, num):
"""Check the relative jobs count and remember the number of jobs."""
assert context.jobs_count is not None, \
"Please use 'I should see N jobs' test step first"
old_jobs_count = context.jobs_count
jobs_count = get_jobs_count(context)
expected = old_jobs_count + num
assert jobs_count == expected, "Expected %d jobs, but %d found instead" % \
(expected, jobs_count)
# remember the new number
context.jobs_count = jobs_count
def get_job_by_id(jobs, job_id):
"""Find the job by its ID."""
return next((job for job in jobs if job["job_id"] == job_id), None)
def check_job_state(job, state):
"""Check the state of given job."""
assert job is not None
assert job["state"] is not None
assert job["state"] == state
@then('I should find job with ID {job_id}')
@then('I should find job with ID {job_id} and state {state}')
def find_job(context, job_id, state=None):
"""Check the job ID existence.
Check if job with given ID is returned from the service and optionally if
the job status has expected value.
"""
jsondata = context.response.json()
jobs = jsondata['jobs']
job_id = get_unique_job_id(context, job_id)
job_ids = [job["job_id"] for job in jobs]
assert job_id in job_ids
if state is not None:
job = get_job_by_id(jobs, job_id)
check_job_state(job, state)
@then('I should not find job with ID {job_id}')
def should_not_find_job_by_id(context, job_id):
"""Check if job with given ID does not exist."""
jsondata = context.response.json()
jobs = jsondata['jobs']
job_id = get_unique_job_id(context, job_id)
job_ids = [job["job_id"] for job in jobs]
assert job_id not in job_ids
@when('I acquire job API authorization token')
def acquire_jobs_api_authorization_token(context):
"""Acquite the job API authorization token from the environment variable."""
context.jobs_api_token = os.environ.get("JOB_API_TOKEN")
# TODO: authorization via GitHub?
def check_token_attributes(token):
"""Check if given token has all required attributes."""
assert "token" in token
assert "rate" in token
assert "resources" in token
def check_token_name(token):
"""Check token name."""
resources = token["resources"]
token_names = ["core", "graphql", "search"]
for token_name in token_names:
assert token_name in resources
check_job_token_attributes(resources[token_name])
@then('I should see proper information about job API tokens')
def check_job_api_tokens_information(context):
"""Check the tokens information returned by job API."""
json_data = context.response.json()
assert json_data is not None
assert "tokens" in json_data
tokens = json_data["tokens"]
assert len(tokens) > 0
for token in tokens:
check_token_attributes(token)
rate_token = token["rate"]
check_job_token_attributes(rate_token)
check_token_name(token)
@when('I generate unique job ID prefix')
def generate_job_id_prefix(context):
"""Generate unique job ID prefix."""
context.job_id_prefix = uuid.uuid1()
@when("I perform kerberized {method} request to {url}")
def perform_kerberized_request(context, method, url):
"""Call REST API on coreapi-server."""
command = "curl -s -X {method} --negotiate -u : " + \
"http://coreapi-server:5000{url}".format(method=method, url=url)
context.kerb_request = \
context.exec_command_in_container(context.client, context.container,
command)
| apache-2.0 | -6,514,030,452,853,019,000 | 35.539043 | 96 | 0.674411 | false | 3.595935 | false | false | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.