repo_name
stringlengths 5
92
| path
stringlengths 4
221
| copies
stringclasses 19
values | size
stringlengths 4
6
| content
stringlengths 766
896k
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 32
997
| alpha_frac
float64 0.25
0.96
| autogenerated
bool 1
class | ratio
float64 1.5
13.6
| config_test
bool 2
classes | has_no_keywords
bool 2
classes | few_assignments
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
Vayel/WAMPLab | transaction/v1/locator.py | 1 | 1589 | from twisted.internet.defer import inlineCallbacks
from autobahn import wamp
from autobahn.twisted.wamp import ApplicationSession
from autobahn.twisted.wamp import ApplicationRunner
class Locator(ApplicationSession):
@inlineCallbacks
def onJoin(self, details):
yield self.register(self)
@wamp.register(u'locator.move')
@inlineCallbacks
def move(self):
"""Try to mark the current position and to go to the next one. Fail if
the current position is not markable or if the next position does not
exist.
"""
print('Move!')
pos = yield self.call('data.get_position')
direction = yield self.call('data.get_direction')
next_pos = pos + direction
markable = yield self.call('data.is_markable', pos)
next_existing = yield self.call('data.is_pos', next_pos)
if markable and next_existing:
self.call('data.mark_pos', pos)
self.call('data.set_position', next_pos)
elif not markable:
self.publish(
'error',
'The pos {} is not markable.'.format(pos)
)
else:
self.publish(
'error',
'The pos {} does not exist.'.format(next_pos)
)
# Always done
self.call('data.sth')
if __name__ == "__main__":
print('Starting Locator component...')
ApplicationRunner(url='ws://localhost:8080/ws', realm='realm1').run(Locator)
| gpl-2.0 | -6,945,406,505,201,642,000 | 28.981132 | 80 | 0.563247 | false | 4.34153 | false | false | false |
iiitv/algos | breadth_first_traversal/breadth_first_traversal.py | 1 | 2410 | """
Breadth-first-traversal is an algorithm for traversing a tree or
graph data structure. Starting at the tree root (or some arbitrary node of a
graph, sometimes referred to as a 'search key'[1]) and explores the neighbor
nodes at that level first, before moving to the next level.
"""
from collections import deque
def breadth_first_traversal(graph, source):
""" Performs a breadth-first traversal on a graph
Args:
graph (list of list of int): Adjacency matrix representation of graph
source (int): Index of source vertex to begin search from
Returns:
list of dicts describing each vertex in the searched graph
-> [{distance: _, predecessor: _ }]
"""
vertex_info = []
for i in range(len(graph)):
vertex_info.append({"distance": None, "predecessor": None})
vertex_info[source]["distance"] = 0
search_queue = deque()
search_queue.append(source)
while search_queue:
u = search_queue.popleft()
for v in graph[u]:
if vertex_info[v]["distance"] is None:
vertex_info[v]["distance"] = vertex_info[u]["distance"] + 1
vertex_info[v]["predecessor"] = u
search_queue.append(v)
return vertex_info
def main():
graph_adj_list = [
[1],
[0, 4, 5],
[3, 4, 5],
[2, 6],
[1, 2],
[1, 2, 6],
[3, 5],
[]
]
vertex_info = breadth_first_traversal(graph_adj_list, 3)
for i in range(len(graph_adj_list)):
print("vertex %s : distance = %s, predecessor = %s" %
(i, vertex_info[i]["distance"], vertex_info[i]["predecessor"]))
assert(vertex_info[0] == {
"distance": 4,
"predecessor": 1
})
assert(vertex_info[1] == {
"distance": 3,
"predecessor": 4
})
assert(vertex_info[2] == {
"distance": 1,
"predecessor": 3
})
assert(vertex_info[3] == {
"distance": 0,
"predecessor": None
})
assert(vertex_info[4] == {
"distance": 2,
"predecessor": 2
})
assert(vertex_info[5] == {
"distance": 2,
"predecessor": 2
})
assert(vertex_info[6] == {
"distance": 1,
"predecessor": 3
})
assert(vertex_info[7] == {
"distance": None,
"predecessor": None
})
if __name__ == '__main__':
main()
| mit | -3,733,688,494,333,147,000 | 25.195652 | 77 | 0.544813 | false | 3.580981 | false | false | false |
Kortemme-Lab/protein_feature_analysis | ProteinFeatureAnalyzer/features/data_loading.py | 1 | 3308 | import os
import io
import Bio.PDB as PDB
from . import topology
from . import secondary_structures
def structure_from_pdb_file(file_path, name=''):
'''Read the structure stored in a PDB file.'''
parser = PDB.PDBParser()
return parser.get_structure(name, file_path)
def structure_from_pdb_string(pdb_string, name=''):
'''Read the structure stored in a PDB string.'''
parser = PDB.PDBParser()
pdb_sf = io.StringIO(pdb_string)
return parser.get_structure(name, pdb_sf)
def load_data_from_cath_pmls(input_path, output_path, job_list, dssp_path):
'''Load data from structures in the input path.
The input data should be stored in .pml files of superposed homologous
superfamilies from the CATH database.
'''
superfamilies = []
for f in job_list:
if f.endswith('.pml'):
# Make a scratch directory
scratch_path = os.path.join(output_path, f[0:-4])
if not os.path.exists(scratch_path):
os.mkdir(scratch_path)
# Load data from one file
load_from_one_cath_pml_file(os.path.join(input_path, f), scratch_path, superfamilies, dssp_path)
return superfamilies
def load_from_one_cath_pml_file(pml_file, scratch_path, superfamilies, dssp_path):
'''Load data from a .pml file of superposed
homologous superfamilies from the CATH database.
'''
superfamilies.append([])
candidate_proteins = []
with open(pml_file, 'r') as f:
while True:
line = f.readline()
if not line: break
# Read one structure
if line.strip().startswith('cmd.read_pdbstr'):
pdb_lines = [line.strip()[19:].strip('\\')]
pdb_id = ''
while True:
line = f.readline()
if line.strip().startswith('"""'):
pdb_id = line.strip()[5:12]
break
pdb_line = line.strip().strip('\\')
if len(pdb_line) > 17:
pdb_line = pdb_line[0:16] + ' ' + pdb_line[17:] # Remove all altLoc flags
pdb_lines.append(pdb_line) # Remove all altLoc flags
# Make a pdb file of the structure for DSSP analysis
structure = structure_from_pdb_string('\n'.join(pdb_lines), pdb_id)
# Store structures without chain breaks
if len(topology.find_structure_chain_breaks(structure)) == 0:
structure_path = os.path.join(scratch_path, pdb_id + '.pdb')
io = PDB.PDBIO()
io.set_structure(structure)
io.save(structure_path)
candidate_proteins.append({'structure' : structure, 'path' : structure_path})
for p in candidate_proteins:
try:
find_secondary_structures(p, dssp_path)
except:
continue
superfamilies[-1].append(p) # Add a protein to a superfamily if there's no exception
def find_secondary_structures(protein_dict, dssp_path):
'''Find secondary structures of a protein.
Arguements:
- protein_dict - a dictionary to store informations of a protein
'''
protein_dict['dssp_dict'], protein_dict['dssp_key_map'] = \
secondary_structures.make_dssp_dict(protein_dict['path'], dssp_path)
protein_dict['ss_list'], protein_dict['sheet_list'] = \
secondary_structures.pack_dssp_dict_into_ss_list(protein_dict['structure'][0],
protein_dict['dssp_dict'], protein_dict['dssp_key_map'])
| mit | -2,651,815,429,011,390,500 | 29.62963 | 102 | 0.637848 | false | 3.406797 | false | false | false |
jralls/gramps | gramps/plugins/webreport/person.py | 1 | 75620 | # -*- coding: utf-8 -*-
#!/usr/bin/env python
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2000-2007 Donald N. Allingham
# Copyright (C) 2007 Johan Gonqvist <[email protected]>
# Copyright (C) 2007-2009 Gary Burton <[email protected]>
# Copyright (C) 2007-2009 Stephane Charette <[email protected]>
# Copyright (C) 2008-2009 Brian G. Matherly
# Copyright (C) 2008 Jason M. Simanek <[email protected]>
# Copyright (C) 2008-2011 Rob G. Healey <[email protected]>
# Copyright (C) 2010 Doug Blank <[email protected]>
# Copyright (C) 2010 Jakim Friant
# Copyright (C) 2010-2017 Serge Noiraud
# Copyright (C) 2011 Tim G L Lyons
# Copyright (C) 2013 Benny Malengier
# Copyright (C) 2016 Allen Crider
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
"""
Narrative Web Page generator.
Classe:
PersonPage - Person index page and individual `Person pages
"""
#------------------------------------------------
# python modules
#------------------------------------------------
from collections import defaultdict
from operator import itemgetter
from decimal import Decimal, getcontext
import logging
#------------------------------------------------
# Gramps module
#------------------------------------------------
from gramps.gen.const import GRAMPS_LOCALE as glocale
from gramps.gen.lib import (ChildRefType, Date, Name, Person, EventRoleType)
from gramps.gen.lib.date import Today
from gramps.gen.plug.report import Bibliography
from gramps.gen.plug.report import utils
from gramps.gen.utils.alive import probably_alive
from gramps.gen.constfunc import win
from gramps.gen.display.name import displayer as _nd
from gramps.gen.utils.db import get_birth_or_fallback, get_death_or_fallback
from gramps.plugins.lib.libhtml import Html
from gramps.gen.utils.place import conv_lat_lon
#------------------------------------------------
# specific narrative web import
#------------------------------------------------
from gramps.plugins.webreport.basepage import BasePage
from gramps.plugins.webreport.common import (get_first_letters, _KEYPERSON,
alphabet_navigation, sort_people,
_NAME_STYLE_FIRST, first_letter,
get_index_letter, add_birthdate,
primary_difference, FULLCLEAR,
_find_birth_date, _find_death_date,
MARKER_PATH, OSM_MARKERS,
GOOGLE_MAPS, MARKERS, html_escape,
DROPMASTERS, FAMILYLINKS)
_ = glocale.translation.sgettext
LOG = logging.getLogger(".NarrativeWeb")
getcontext().prec = 8
_WIDTH = 160
_HEIGHT = 64
_VGAP = 10
_HGAP = 30
_SHADOW = 5
_XOFFSET = 5
#################################################
#
# creates the Individual List Page and IndividualPages
#
#################################################
class PersonPages(BasePage):
"""
This class is responsible for displaying information about the 'Person'
database objects. It displays this information under the 'Individuals'
tab. It is told by the 'add_instances' call which 'Person's to display,
and remembers the list of persons. A single call to 'display_pages'
displays both the Individual List (Index) page and all the Individual
pages.
The base class 'BasePage' is initialised once for each page that is
displayed.
"""
def __init__(self, report):
"""
@param: report -- The instance of the main report class for this report
"""
BasePage.__init__(self, report, title="")
self.ind_dict = defaultdict(set)
self.mapservice = None
self.sort_name = None
self.googleopts = None
self.googlemapkey = None
self.birthorder = None
self.person = None
self.familymappages = None
self.rel_class = None
self.placemappages = None
self.name = None
def display_pages(self, title):
"""
Generate and output the pages under the Individuals tab, namely the
individual index and the individual pages.
@param: title -- Is the title of the web page
"""
LOG.debug("obj_dict[Person]")
for item in self.report.obj_dict[Person].items():
LOG.debug(" %s", str(item))
with self.r_user.progress(_("Narrated Web Site Report"),
_('Creating individual pages'),
len(self.report.obj_dict[Person]) + 1
) as step:
self.individuallistpage(self.report, title,
self.report.obj_dict[Person].keys())
for person_handle in sorted(self.report.obj_dict[Person]):
step()
person = self.r_db.get_person_from_handle(person_handle)
self.individualpage(self.report, title, person)
#################################################
#
# creates the Individual List Page
#
#################################################
def individuallistpage(self, report, title, ppl_handle_list):
"""
Creates an individual page
@param: report -- The instance of the main report class
for this report
@param: title -- Is the title of the web page
@param: ppl_handle_list -- The list of people for whom we need
to create a page.
"""
BasePage.__init__(self, report, title)
prev_letter = " "
# plugin variables for this module
showbirth = report.options['showbirth']
showdeath = report.options['showdeath']
showpartner = report.options['showpartner']
showparents = report.options['showparents']
output_file, sio = self.report.create_file("individuals")
indlistpage, head, body = self.write_header(self._("Individuals"))
date = 0
# begin Individuals division
with Html("div", class_="content", id="Individuals") as individuallist:
body += individuallist
# Individual List page message
msg = self._("This page contains an index of all the individuals "
"in the database, sorted by their last names. "
"Selecting the person’s "
"name will take you to that "
"person’s individual page.")
individuallist += Html("p", msg, id="description")
# add alphabet navigation
index_list = get_first_letters(self.r_db, ppl_handle_list,
_KEYPERSON, rlocale=self.rlocale)
alpha_nav = alphabet_navigation(index_list, self.rlocale)
if alpha_nav is not None:
individuallist += alpha_nav
# begin table and table head
with Html("table",
class_="infolist primobjlist IndividualList") as table:
individuallist += table
thead = Html("thead")
table += thead
trow = Html("tr")
thead += trow
# show surname and first name
trow += Html("th", self._("Surname"), class_="ColumnSurname",
inline=True)
trow += Html("th", self._("Given Name"), class_="ColumnName",
inline=True)
if showbirth:
trow += Html("th", self._("Birth"), class_="ColumnDate",
inline=True)
if showdeath:
trow += Html("th", self._("Death"), class_="ColumnDate",
inline=True)
if showpartner:
trow += Html("th", self._("Partner"),
class_="ColumnPartner",
inline=True)
if showparents:
trow += Html("th", self._("Parents"),
class_="ColumnParents",
inline=True)
tbody = Html("tbody")
table += tbody
ppl_handle_list = sort_people(self.r_db, ppl_handle_list,
self.rlocale)
first = True
for (surname, handle_list) in ppl_handle_list:
if surname and not surname.isspace():
letter = get_index_letter(first_letter(surname), index_list,
self.rlocale)
else:
letter = ' '
surname = self._("<absent>")
first_surname = True
for person_handle in sorted(handle_list,
key=self.sort_on_name_and_grampsid):
person = self.r_db.get_person_from_handle(person_handle)
if person.get_change_time() > date:
date = person.get_change_time()
# surname column
trow = Html("tr")
tbody += trow
tcell = Html("td", class_="ColumnSurname", inline=True)
trow += tcell
if first or primary_difference(letter, prev_letter,
self.rlocale):
first = False
first_surname = False
prev_letter = letter
trow.attr = 'class = "BeginSurname"'
ttle = self._("Surnames %(surname)s beginning "
"with letter %(letter)s" %
{'surname' : surname,
'letter' : letter})
tcell += Html(
"a", html_escape(surname), name=letter,
id_=letter,
title=ttle)
elif first_surname:
first_surname = False
tcell += Html("a", html_escape(surname),
title=self._("Surnames") + " " + surname)
else:
tcell += " "
# firstname column
link = self.new_person_link(person_handle, person=person,
name_style=_NAME_STYLE_FIRST)
trow += Html("td", link, class_="ColumnName")
# birth column
if showbirth:
tcell = Html("td", class_="ColumnBirth", inline=True)
trow += tcell
birth_date = _find_birth_date(self.r_db, person)
if birth_date is not None:
if birth_date.fallback:
tcell += Html('em',
self.rlocale.get_date(birth_date),
inline=True)
else:
tcell += self.rlocale.get_date(birth_date)
else:
tcell += " "
# death column
if showdeath:
tcell = Html("td", class_="ColumnDeath", inline=True)
trow += tcell
death_date = _find_death_date(self.r_db, person)
if death_date is not None:
if death_date.fallback:
tcell += Html('em',
self.rlocale.get_date(death_date),
inline=True)
else:
tcell += self.rlocale.get_date(death_date)
else:
tcell += " "
# partner column
if showpartner:
family_list = person.get_family_handle_list()
first_family = True
#partner_name = None
tcell = () # pylint: disable=R0204
if family_list:
for family_handle in family_list:
family = self.r_db.get_family_from_handle(
family_handle)
partner_handle = utils.find_spouse(
person, family)
if partner_handle:
if not first_family:
# have to do this to get the comma on
# the same line as the link
if isinstance(tcell[-1], Html):
# tcell is an instance of Html (or
# of a subclass thereof)
tcell[-1].inside += ","
else:
tcell = tcell[:-1] + (
# TODO for Arabic, translate?
(tcell[-1] + ", "),)
# Have to manipulate as tuples so that
# subsequent people are not nested
# within the first link
tcell += (
self.new_person_link(partner_handle),)
first_family = False
else:
tcell = " "
trow += Html("td", class_="ColumnPartner") + tcell
# parents column
if showparents:
parent_hdl_list = person.get_parent_family_handle_list()
if parent_hdl_list:
parent_handle = parent_hdl_list[0]
family = self.r_db.get_family_from_handle(
parent_handle)
father_handle = family.get_father_handle()
mother_handle = family.get_mother_handle()
if father_handle:
father = self.r_db.get_person_from_handle(
father_handle)
else:
father = None
if mother_handle:
mother = self.r_db.get_person_from_handle(
mother_handle)
else:
mother = None
if father:
father_name = self.get_name(father)
if mother:
mother_name = self.get_name(mother)
samerow = False
if mother and father:
tcell = (Html("span", father_name,
class_="father fatherNmother",
inline=True),
Html("span", mother_name,
class_="mother", inline=True))
elif mother:
tcell = Html("span", mother_name,
class_="mother", inline=True)
elif father:
tcell = Html("span", father_name,
class_="father", inline=True)
else:
tcell = " "
samerow = True
else:
tcell = " "
samerow = True
trow += Html("td", class_="ColumnParents",
inline=samerow) + tcell
# create clear line for proper styling
# create footer section
footer = self.write_footer(date)
body += (FULLCLEAR, footer)
# send page out for processing
# and close the file
self.xhtml_writer(indlistpage, output_file, sio, date)
#################################################
#
# creates an Individual Page
#
#################################################
gender_map = {
Person.MALE : _('male'),
Person.FEMALE : _('female'),
Person.UNKNOWN : _('unknown'),
}
def individualpage(self, report, title, person):
"""
Creates an individual page
@param: report -- The instance of the main report class for this report
@param: title -- Is the title of the web page
@param: person -- The person to use for this page.
"""
BasePage.__init__(self, report, title, person.get_gramps_id())
place_lat_long = []
self.person = person
self.bibli = Bibliography()
self.sort_name = self.get_name(person)
self.name = self.get_name(person)
date = self.person.get_change_time()
# to be used in the Family Map Pages...
self.familymappages = self.report.options['familymappages']
self.placemappages = self.report.options['placemappages']
self.mapservice = self.report.options['mapservice']
self.googleopts = self.report.options['googleopts']
self.googlemapkey = self.report.options['googlemapkey']
# decide if we will sort the birth order of siblings...
self.birthorder = self.report.options['birthorder']
# get the Relationship Calculator so that we can determine
# bio, half, step- siblings for use in display_ind_parents() ...
self.rel_class = self.report.rel_class
output_file, sio = self.report.create_file(person.get_handle(), "ppl")
self.uplink = True
indivdetpage, head, body = self.write_header(self.sort_name)
# attach the ancestortree style sheet if ancestor
# graph is being created?
if self.report.options["ancestortree"]:
if self.usecms:
fname = "/".join([self.target_uri, "css", "ancestortree.css"])
else:
fname = "/".join(["css", "ancestortree.css"])
url = self.report.build_url_fname(fname, None, self.uplink)
head += Html("link", href=url, type="text/css", media="screen",
rel="stylesheet")
# begin individualdetail division
with Html("div", class_="content",
id='IndividualDetail') as individualdetail:
body += individualdetail
# display a person's general data
thumbnail, name, summary = self.display_ind_general()
if thumbnail is not None:
individualdetail += thumbnail
individualdetail += (name, summary)
# display a person's events
sect2 = self.display_ind_events(place_lat_long)
if sect2 is not None:
individualdetail += sect2
# display relationship to the center person
sect3 = self.display_ind_center_person()
if sect3 is not None:
individualdetail += sect3
# display parents
sect4 = self.display_ind_parents()
if sect4 is not None:
individualdetail += sect4
# display relationships
relationships = self.display_relationships(self.person,
place_lat_long)
if relationships is not None:
individualdetail += relationships
# display LDS ordinance
sect5 = self.display_lds_ordinance(self.person)
if sect5 is not None:
individualdetail += sect5
# display address(es) and show sources
sect6 = self.display_addr_list(self.person.get_address_list(), True)
if sect6 is not None:
individualdetail += sect6
photo_list = self.person.get_media_list()
media_list = photo_list[:]
# if Family Pages are not being created, then include the Family
# Media objects? There is no reason to add these objects to the
# Individual Pages...
if not self.inc_families:
for handle in self.person.get_family_handle_list():
family = self.r_db.get_family_from_handle(handle)
if family:
media_list += family.get_media_list()
for evt_ref in family.get_event_ref_list():
event = self.r_db.get_event_from_handle(evt_ref.ref)
media_list += event.get_media_list()
# if the Event Pages are not being created, then include the Event
# Media objects? There is no reason to add these objects to the
# Individual Pages...
if not self.inc_events:
for evt_ref in self.person.get_primary_event_ref_list():
event = self.r_db.get_event_from_handle(evt_ref.ref)
if event:
media_list += event.get_media_list()
# display additional images as gallery
sect7 = self.disp_add_img_as_gallery(media_list, person)
if sect7 is not None:
individualdetail += sect7
# display Narrative Notes
notelist = person.get_note_list()
sect8 = self.display_note_list(notelist)
if sect8 is not None:
individualdetail += sect8
# display attributes
attrlist = person.get_attribute_list()
if attrlist:
attrsection, attrtable = self.display_attribute_header()
self.display_attr_list(attrlist, attrtable)
individualdetail += attrsection
# display web links
sect10 = self.display_url_list(self.person.get_url_list())
if sect10 is not None:
individualdetail += sect10
# display associations
assocs = person.get_person_ref_list()
if assocs:
individualdetail += self.display_ind_associations(assocs)
# for use in family map pages...
if len(place_lat_long) > 0:
if self.report.options["familymappages"]:
# save output_file, string_io and cur_fname
# before creating a new page
sof = output_file
sstring_io = sio
sfname = self.report.cur_fname
individualdetail += self.__display_family_map(
person, place_lat_long)
# restore output_file, string_io and cur_fname
# after creating a new page
output_file = sof
sio = sstring_io
self.report.cur_fname = sfname
# display pedigree
sect13 = self.display_ind_pedigree()
if sect13 is not None:
individualdetail += sect13
# display ancestor tree
if report.options['ancestortree']:
sect14 = self.display_tree()
if sect14 is not None:
individualdetail += sect14
# display source references
sect14 = self.display_ind_sources(person)
if sect14 is not None:
individualdetail += sect14
# add clearline for proper styling
# create footer section
footer = self.write_footer(date)
body += (FULLCLEAR, footer)
# send page out for processing
# and close the file
self.xhtml_writer(indivdetpage, output_file, sio, date)
def __create_family_map(self, person, place_lat_long):
"""
creates individual family map page
@param: person -- person from database
@param: place_lat_long -- for use in Family Map Pages
"""
if not place_lat_long:
return
output_file, sio = self.report.create_file(person.get_handle(), "maps")
self.uplink = True
familymappage, head, body = self.write_header(self._("Family Map"))
minx, maxx = Decimal("0.00000001"), Decimal("0.00000001")
miny, maxy = Decimal("0.00000001"), Decimal("0.00000001")
xwidth, yheight = [], []
midx_, midy_, spanx, spany = [None]*4
number_markers = len(place_lat_long)
if number_markers > 1:
for (latitude, longitude, placetitle, handle,
date, etype) in place_lat_long:
xwidth.append(latitude)
yheight.append(longitude)
xwidth.sort()
yheight.sort()
minx = xwidth[0] if xwidth[0] else minx
maxx = xwidth[-1] if xwidth[-1] else maxx
minx, maxx = Decimal(minx), Decimal(maxx)
midx_ = str(Decimal((minx + maxx) /2))
miny = yheight[0] if yheight[0] else miny
maxy = yheight[-1] if yheight[-1] else maxy
miny, maxy = Decimal(miny), Decimal(maxy)
midy_ = str(Decimal((miny + maxy) /2))
midx_, midy_ = conv_lat_lon(midx_, midy_, "D.D8")
# get the integer span of latitude and longitude
spanx = int(maxx - minx)
spany = int(maxy - miny)
# set zoom level based on span of Longitude?
tinyset = [value for value in (-3, -2, -1, 0, 1, 2, 3)]
smallset = [value for value in (-4, -5, -6, -7, 4, 5, 6, 7)]
middleset = [value for value in (-8, -9, -10, -11, 8, 9, 10, 11)]
largeset = [value for value in (-11, -12, -13, -14, -15, -16,
-17, 11, 12, 13, 14, 15, 16, 17)]
if spany in tinyset or spany in smallset:
zoomlevel = 6
elif spany in middleset:
zoomlevel = 5
elif spany in largeset:
zoomlevel = 4
else:
zoomlevel = 3
# 0 = latitude, 1 = longitude, 2 = place title,
# 3 = handle, and 4 = date, 5 = event type...
# being sorted by date, latitude, and longitude...
place_lat_long = sorted(place_lat_long, key=itemgetter(4, 0, 1))
# for all plugins
# if family_detail_page
# if active
# call_(report, up, head)
# add narrative-maps style sheet
if self.usecms:
fname = "/".join([self.target_uri, "css", "narrative-maps.css"])
else:
fname = "/".join(["css", "narrative-maps.css"])
url = self.report.build_url_fname(fname, None, self.uplink)
head += Html("link", href=url, type="text/css", media="screen",
rel="stylesheet")
# add MapService specific javascript code
if self.mapservice == "Google":
src_js = GOOGLE_MAPS + "api/js?sensor=false"
if self.googlemapkey:
src_js += "&key=" + self.googlemapkey
head += Html("script", type="text/javascript",
src=src_js, inline=True)
else:
url = self.secure_mode
url += ("maxcdn.bootstrapcdn.com/bootstrap/3.3.7/"
"css/bootstrap.min.css")
head += Html("link", href=url, type="text/javascript",
rel="stylesheet")
src_js = self.secure_mode
src_js += "ajax.googleapis.com/ajax/libs/jquery/1.9.1/jquery.min.js"
head += Html("script", type="text/javascript",
src=src_js, inline=True)
src_js = self.secure_mode
src_js += "openlayers.org/en/v3.17.1/build/ol.js"
head += Html("script", type="text/javascript",
src=src_js, inline=True)
url = self.secure_mode
url += "openlayers.org/en/v3.17.1/css/ol.css"
head += Html("link", href=url, type="text/javascript",
rel="stylesheet")
src_js = self.secure_mode
src_js += ("maxcdn.bootstrapcdn.com/bootstrap/3.3.7/"
"js/bootstrap.min.js")
head += Html("script", type="text/javascript",
src=src_js, inline=True)
if number_markers > 0:
tracelife = "["
seq_ = 1
for index in range(0, (number_markers - 1)):
(latitude, longitude, placetitle, handle, date,
etype) = place_lat_long[index]
# are we using Google?
if self.mapservice == "Google":
# are we creating Family Links?
if self.googleopts == "FamilyLinks":
tracelife += """
new google.maps.LatLng(%s, %s),""" % (latitude, longitude)
# are we creating Drop Markers or Markers?
elif self.googleopts in ["Drop", "Markers"]:
tracelife += """
['%s', %s, %s, %d],""" % (placetitle.replace("'", "\\'"), latitude,
longitude, seq_)
# are we using OpenStreetMap?
else:
tracelife += """
[%f, %f, \'%s\'],""" % (float(longitude), float(latitude),
placetitle.replace("'", "\\'"))
seq_ += 1
# FIXME: The last element in the place_lat_long list is treated
# specially, and the code above is apparently repeated so as to
# avoid a comma at the end, and get the right closing. This is very
# ugly.
(latitude, longitude, placetitle, handle, date,
etype) = place_lat_long[-1]
# are we using Google?
if self.mapservice == "Google":
# are we creating Family Links?
if self.googleopts == "FamilyLinks":
tracelife += """
new google.maps.LatLng(%s, %s)
];""" % (latitude, longitude)
# are we creating Drop Markers or Markers?
elif self.googleopts in ["Drop", "Markers"]:
tracelife += """
['%s', %s, %s, %d]
];""" % (placetitle.replace("'", "\\'"), latitude, longitude, seq_)
# are we using OpenStreetMap?
elif self.mapservice == "OpenStreetMap":
tracelife += """
[%f, %f, \'%s\']
];""" % (float(longitude), float(latitude), placetitle.replace("'", "\\'"))
# begin MapDetail division...
with Html("div", class_="content", id="FamilyMapDetail") as mapdetail:
body += mapdetail
# add page title
mapdetail += Html("h3",
html_escape(self._("Tracking %s")
% self.get_name(person)),
inline=True)
# page description
msg = self._("This map page represents that person "
"and any descendants with all of their event/ places. "
"If you place your mouse over "
"the marker it will display the place name. "
"The markers and the Reference "
"list are sorted in date order (if any?). "
"Clicking on a place’s "
"name in the Reference section will take you "
"to that place’s page.")
mapdetail += Html("p", msg, id="description")
# this is the style element where the Map is held in the CSS...
with Html("div", id="map_canvas") as canvas:
mapdetail += canvas
# begin javascript inline code...
with Html("script", deter="deter",
style='width =100%; height =100%;',
type="text/javascript", indent=False) as jsc:
head += jsc
# Link to Gramps marker
fname = "/".join(['images', 'marker.png'])
marker_path = self.report.build_url_image("marker.png",
"images",
self.uplink)
jsc += MARKER_PATH % marker_path
# are we using Google?
if self.mapservice == "Google":
# are we creating Family Links?
if self.googleopts == "FamilyLinks":
if midy_ is None:
jsc += FAMILYLINKS % (tracelife, latitude,
longitude, int(10))
else:
jsc += FAMILYLINKS % (tracelife, midx_, midy_,
zoomlevel)
# are we creating Drop Markers?
elif self.googleopts == "Drop":
if midy_ is None:
jsc += DROPMASTERS % (tracelife, latitude,
longitude, int(10))
else:
jsc += DROPMASTERS % (tracelife, midx_, midy_,
zoomlevel)
# we are creating Markers only...
else:
if midy_ is None:
jsc += MARKERS % (tracelife, latitude,
longitude, int(10))
else:
jsc += MARKERS % (tracelife, midx_, midy_,
zoomlevel)
# we are using OpenStreetMap...
else:
if midy_ is None:
jsc += OSM_MARKERS % (tracelife,
longitude,
latitude, 10)
else:
jsc += OSM_MARKERS % (tracelife, midy_, midx_,
zoomlevel)
# if Google and Drop Markers are selected,
# then add "Drop Markers" button?
if self.mapservice == "Google" and self.googleopts == "Drop":
mapdetail += Html("button", _("Drop Markers"),
id="drop", onclick="drop()", inline=True)
# add div for popups.
with Html("div", id="popup", inline=True) as popup:
mapdetail += popup
# begin place reference section and its table...
with Html("div", class_="subsection", id="references") as section:
mapdetail += section
section += Html("h4", self._("References"), inline=True)
with Html("table", class_="infolist") as table:
section += table
thead = Html("thead")
table += thead
trow = Html("tr")
thead += trow
trow.extend(
Html("th", label, class_=colclass, inline=True)
for (label, colclass) in [
(_("Date"), "ColumnDate"),
(_("Place Title"), "ColumnPlace"),
(_("Event Type"), "ColumnType")
]
)
tbody = Html("tbody")
table += tbody
for (latitude, longitude, placetitle, handle, date,
etype) in place_lat_long:
trow = Html("tr")
tbody += trow
trow.extend(
Html("td", data, class_=colclass, inline=True)
for data, colclass in [
(date, "ColumnDate"),
(self.place_link(handle, placetitle,
uplink=True),
"ColumnPlace"),
(str(etype), "ColumnType")
]
)
# add body id for this page...
body.attr = 'id ="FamilyMap" onload ="initialize()"'
# add clearline for proper styling
# add footer section
footer = self.write_footer(None)
body += (FULLCLEAR, footer)
# send page out for processing
# and close the file
self.xhtml_writer(familymappage, output_file, sio, 0)
def __display_family_map(self, person, place_lat_long):
"""
Create the family map link
@param: person -- The person to set in the box
@param: place_lat_long -- The center of the box
"""
# create family map page
self.__create_family_map(person, place_lat_long)
# begin family map division plus section title
with Html("div", class_="subsection", id="familymap") as familymap:
familymap += Html("h4", self._("Family Map"), inline=True)
# add family map link
person_handle = person.get_handle()
url = self.report.build_url_fname_html(person_handle, "maps", True)
familymap += self.family_map_link(person_handle, url)
# return family map link to its caller
return familymap
def draw_box(self, center, col, person):
"""
Draw the box around the AncestorTree Individual name box...
@param: center -- The center of the box
@param: col -- The generation number
@param: person -- The person to set in the box
"""
top = center - _HEIGHT/2
xoff = _XOFFSET+col*(_WIDTH+_HGAP)
sex = person.gender
if sex == Person.MALE:
divclass = "male"
elif sex == Person.FEMALE:
divclass = "female"
else:
divclass = "unknown"
boxbg = Html("div", class_="boxbg %s AncCol%s" % (divclass, col),
style="top: %dpx; left: %dpx;" % (top, xoff+1)
)
person_name = self.get_name(person)
# This does not use [new_]person_link because the requirements are
# unique
result = self.report.obj_dict.get(Person).get(person.handle)
if result is None or result[0] == "":
# The person is not included in the webreport or there is no link
# to them
boxbg += Html("span", person_name, class_="unlinked", inline=True)
else:
thumbnail_url = None
if self.create_media and col < 5:
photolist = person.get_media_list()
if photolist:
photo_handle = photolist[0].get_reference_handle()
photo = self.r_db.get_media_from_handle(photo_handle)
mime_type = photo.get_mime_type()
if mime_type:
region = self.media_ref_region_to_object(photo_handle,
person)
if region:
# make a thumbnail of this region
newpath = self.copy_thumbnail(
photo_handle, photo, region)
# TODO. Check if build_url_fname can be used.
newpath = "/".join(['..']*3 + [newpath])
if win():
newpath = newpath.replace('\\', "/")
thumbnail_url = newpath
else:
(photo_url,
thumbnail_url) = self.report.prepare_copy_media(
photo)
thumbnail_url = "/".join(['..']*3 + [thumbnail_url])
if win():
thumbnail_url = thumbnail_url.replace('\\', "/")
url = self.report.build_url_fname_html(person.handle, "ppl", True)
birth = death = ""
bd_event = get_birth_or_fallback(self.r_db, person)
if bd_event:
birth = self.rlocale.get_date(bd_event.get_date_object())
dd_event = get_death_or_fallback(self.r_db, person)
if dd_event:
death = self.rlocale.get_date(dd_event.get_date_object())
if death == "":
death = "..."
value = person_name + "<br/>*", birth, "<br/>+", death
if thumbnail_url is None:
boxbg += Html("a", href=url, class_="noThumb") + value
else:
thumb = Html("span", class_="thumbnail") + (
Html("img", src=thumbnail_url, alt="Image: " + person_name))
boxbg += Html("a", href=url) + thumb + value
shadow = Html(
"div", class_="shadow", inline=True,
style="top: %dpx; left: %dpx;" % (top + _SHADOW, xoff + _SHADOW))
return [boxbg, shadow]
def extend_line(self, coord_y0, coord_x0):
"""
Draw and extended line
@param: coord_y0 -- The starting point
@param: coord_x0 -- The end of the line
"""
style = "top: %dpx; left: %dpx; width: %dpx"
ext_bv = Html("div", class_="bvline", inline=True,
style=style % (coord_y0, coord_x0, _HGAP/2)
)
ext_gv = Html("div", class_="gvline", inline=True,
style=style % (coord_y0+_SHADOW,
coord_x0, _HGAP/2+_SHADOW)
)
return [ext_bv, ext_gv]
def connect_line(self, coord_y0, coord_y1, col):
"""
We need to draw a line between to points
@param: coord_y0 -- The starting point
@param: coord_y1 -- The end of the line
@param: col -- The generation number
"""
coord_y = min(coord_y0, coord_y1)
stylew = "top: %dpx; left: %dpx; width: %dpx;"
styleh = "top: %dpx; left: %dpx; height: %dpx;"
coord_x0 = _XOFFSET + col * _WIDTH + (col-1)*_HGAP + _HGAP/2
cnct_bv = Html("div", class_="bvline", inline=True,
style=stylew % (coord_y1, coord_x0, _HGAP/2))
cnct_gv = Html("div", class_="gvline", inline=True,
style=stylew % (coord_y1+_SHADOW,
coord_x0+_SHADOW,
_HGAP/2+_SHADOW))
cnct_bh = Html("div", class_="bhline", inline=True,
style=styleh % (coord_y, coord_x0,
abs(coord_y0-coord_y1)))
cnct_gh = Html("div", class_="gvline", inline=True,
style=styleh % (coord_y+_SHADOW,
coord_x0+_SHADOW,
abs(coord_y0-coord_y1)))
return [cnct_bv, cnct_gv, cnct_bh, cnct_gh]
def draw_connected_box(self, center1, center2, col, handle):
"""
Draws the connected box for Ancestor Tree on the Individual Page
@param: center1 -- The first box to connect
@param: center2 -- The destination box to draw
@param: col -- The generation number
@param: handle -- The handle of the person to set in the new box
"""
box = []
if not handle:
return box
person = self.r_db.get_person_from_handle(handle)
box = self.draw_box(center2, col, person)
box += self.connect_line(center1, center2, col)
return box
def display_tree(self):
"""
Display the Ancestor Tree
"""
tree = []
if not self.person.get_main_parents_family_handle():
return None
generations = self.report.options['graphgens']
max_in_col = 1 << (generations-1)
max_size = _HEIGHT*max_in_col + _VGAP*(max_in_col+1)
center = int(max_size/2)
with Html("div", id="tree", class_="subsection") as tree:
tree += Html("h4", self._('Ancestors'), inline=True)
with Html("div", id="treeContainer",
style="width:%dpx; height:%dpx;" % (
_XOFFSET+(generations)*_WIDTH+(generations-1)*_HGAP,
max_size)
) as container:
tree += container
container += self.draw_tree(1, generations, max_size,
0, center, self.person.handle)
return tree
def draw_tree(self, gen_nr, maxgen, max_size, old_center,
new_center, person_handle):
"""
Draws the Ancestor Tree
@param: gen_nr -- The generation number to draw
@param: maxgen -- The maximum number of generations to draw
@param: max_size -- The maximum size of the drawing area
@param: old_center -- The position of the old box
@param: new_center -- The position of the new box
@param: person_handle -- The handle of the person to draw
"""
tree = []
if gen_nr > maxgen:
return tree
gen_offset = int(max_size / pow(2, gen_nr+1))
if person_handle:
person = self.r_db.get_person_from_handle(person_handle)
else:
person = None
if not person:
return tree
if gen_nr == 1:
tree = self.draw_box(new_center, 0, person)
else:
tree = self.draw_connected_box(old_center, new_center,
gen_nr-1, person_handle)
if gen_nr == maxgen:
return tree
family_handle = person.get_main_parents_family_handle()
if family_handle:
line_offset = _XOFFSET + gen_nr*_WIDTH + (gen_nr-1)*_HGAP
tree += self.extend_line(new_center, line_offset)
family = self.r_db.get_family_from_handle(family_handle)
f_center = new_center-gen_offset
f_handle = family.get_father_handle()
tree += self.draw_tree(gen_nr+1, maxgen, max_size,
new_center, f_center, f_handle)
m_center = new_center+gen_offset
m_handle = family.get_mother_handle()
tree += self.draw_tree(gen_nr+1, maxgen, max_size,
new_center, m_center, m_handle)
return tree
def display_ind_associations(self, assoclist):
"""
Display an individual's associations
@param: assoclist -- The list of persons for association
"""
# begin Associations division
with Html("div", class_="subsection", id="Associations") as section:
section += Html("h4", self._('Associations'), inline=True)
with Html("table", class_="infolist assoclist") as table:
section += table
thead = Html("thead")
table += thead
trow = Html("tr")
thead += trow
assoc_row = [
(self._("Person"), 'Person'),
(self._('Relationship'), 'Relationship'),
(self._("Notes"), 'Notes'),
(self._("Sources"), 'Sources'),
]
trow.extend(
Html("th", label, class_="Column" + colclass, inline=True)
for (label, colclass) in assoc_row)
tbody = Html("tbody")
table += tbody
for person_ref in assoclist:
trow = Html("tr")
tbody += trow
person_lnk = self.new_person_link(person_ref.ref,
uplink=True)
index = 0
for data in [
person_lnk,
person_ref.get_relation(),
self.dump_notes(person_ref.get_note_list()),
self.get_citation_links(
person_ref.get_citation_list()),
]:
# get colclass from assoc_row
colclass = assoc_row[index][1]
trow += Html("td", data, class_="Column" + colclass,
inline=True)
index += 1
# return section to its callers
return section
def display_ind_pedigree(self):
"""
Display an individual's pedigree
"""
birthorder = self.report.options["birthorder"]
# Define helper functions
def children_ped(ol_html):
"""
Create a children list
@param: ol_html -- The html element to complete
"""
if family:
childlist = family.get_child_ref_list()
childlist = [child_ref.ref for child_ref in childlist]
children = add_birthdate(self.r_db, childlist, self.rlocale)
if birthorder:
children = sorted(children)
for birthdate, birth, death, handle in children:
if handle == self.person.get_handle():
child_ped(ol_html)
elif handle:
child = self.r_db.get_person_from_handle(handle)
if child:
ol_html += Html("li") + self.pedigree_person(child)
else:
child_ped(ol_html)
return ol_html
def child_ped(ol_html):
"""
Create a child element list
@param: ol_html -- The html element to complete
"""
with Html("li", self.name, class_="thisperson") as pedfam:
family = self.pedigree_family()
if family:
pedfam += Html("ol", class_="spouselist") + family
return ol_html + pedfam
# End of helper functions
parent_handle_list = self.person.get_parent_family_handle_list()
if parent_handle_list:
parent_handle = parent_handle_list[0]
family = self.r_db.get_family_from_handle(parent_handle)
father_handle = family.get_father_handle()
mother_handle = family.get_mother_handle()
if mother_handle:
mother = self.r_db.get_person_from_handle(mother_handle)
else:
mother = None
if father_handle:
father = self.r_db.get_person_from_handle(father_handle)
else:
father = None
else:
family = None
father = None
mother = None
with Html("div", id="pedigree", class_="subsection") as ped:
ped += Html("h4", self._('Pedigree'), inline=True)
with Html("ol", class_="pedigreegen") as pedol:
ped += pedol
if father and mother:
pedfa = Html("li") + self.pedigree_person(father)
pedol += pedfa
with Html("ol") as pedma:
pedfa += pedma
pedma += (Html("li", class_="spouse") +
self.pedigree_person(mother) +
children_ped(Html("ol"))
)
elif father:
pedol += (Html("li") + self.pedigree_person(father) +
children_ped(Html("ol"))
)
elif mother:
pedol += (Html("li") + self.pedigree_person(mother) +
children_ped(Html("ol"))
)
else:
pedol += (Html("li") + children_ped(Html("ol")))
return ped
def display_ind_general(self):
"""
display an individual's general information...
"""
self.page_title = self.sort_name
thumbnail = self.disp_first_img_as_thumbnail(
self.person.get_media_list(), self.person)
section_title = Html("h3", html_escape(self.page_title),
inline=True) + (
Html('sup') + (
Html('small') +
self.get_citation_links(
self.person.get_citation_list())))
# begin summaryarea division
with Html("div", id='summaryarea') as summaryarea:
# begin general details table
with Html("table", class_="infolist") as table:
summaryarea += table
primary_name = self.person.get_primary_name()
all_names = [primary_name] + self.person.get_alternate_names()
# if the callname or the nickname is the same as the 'first
# name' (given name), then they are not displayed.
first_name = primary_name.get_first_name()
# Names [and their sources]
for name in all_names:
pname = html_escape(_nd.display_name(name))
pname += self.get_citation_links(name.get_citation_list())
# if we have just a firstname, then the name is preceeded
# by ", " which doesn't exactly look very nice printed on
# the web page
if pname[:2] == ', ': # TODO for Arabic, translate this?
pname = pname[2:]
if name != primary_name:
datetext = self.rlocale.get_date(name.date)
if datetext:
pname = datetext + ': ' + pname
type_ = self._(name.get_type().xml_str())
trow = Html("tr") + (
Html("td", type_, class_="ColumnAttribute",
inline=True)
)
tcell = Html("td", pname, class_="ColumnValue")
# display any notes associated with this name
notelist = name.get_note_list()
if len(notelist):
unordered = Html("ul")
for notehandle in notelist:
note = self.r_db.get_note_from_handle(notehandle)
if note:
note_text = self.get_note_format(note, True)
# attach note
unordered += note_text
tcell += unordered
trow += tcell
table += trow
# display the callname associated with this name.
call_name = name.get_call_name()
if call_name and call_name != first_name:
trow = Html("tr") + (
Html("td", _("Call Name"), class_="ColumnAttribute",
inline=True),
Html("td", call_name, class_="ColumnValue",
inline=True)
)
table += trow
# display the nickname associated with this name. Note that
# this no longer displays the Nickname attribute (if
# present), because the nickname attribute is deprecated in
# favour of the nick_name property of the name structure
# (see http://gramps.1791082.n4.nabble.com/Where-is-
# nickname-stored-tp4469779p4484272.html), and also because
# the attribute is (normally) displayed lower down the
# wNarrative Web report.
nick_name = name.get_nick_name()
if nick_name and nick_name != first_name:
trow = Html("tr") + (
Html("td", self._("Nick Name"),
class_="ColumnAttribute",
inline=True),
Html("td", nick_name, class_="ColumnValue",
inline=True)
)
table += trow
# Gramps ID
person_gid = self.person.get_gramps_id()
if not self.noid and person_gid:
trow = Html("tr") + (
Html("td", self._("Gramps ID"),
class_="ColumnAttribute",
inline=True),
Html("td", person_gid, class_="ColumnValue",
inline=True)
)
table += trow
# Gender
gender = self._(self.gender_map[self.person.gender])
trow = Html("tr") + (
Html("td", self._("Gender"), class_="ColumnAttribute",
inline=True),
Html("td", gender, class_="ColumnValue", inline=True)
)
table += trow
# Age At Death???
birth_date = Date.EMPTY
birth_ref = self.person.get_birth_ref()
if birth_ref:
birth = self.r_db.get_event_from_handle(birth_ref.ref)
if birth:
birth_date = birth.get_date_object()
if birth_date and birth_date is not Date.EMPTY:
alive = probably_alive(self.person, self.r_db, Today())
death_date = _find_death_date(self.r_db, self.person)
if not alive and death_date is not None:
nyears = death_date - birth_date
nyears = nyears.format(precision=3,
dlocale=self.rlocale)
trow = Html("tr") + (
Html("td", self._("Age at Death"),
class_="ColumnAttribute", inline=True),
Html("td", nyears,
class_="ColumnValue", inline=True)
)
table += trow
# return all three pieces to its caller
# do NOT combine before returning
return thumbnail, section_title, summaryarea
def display_ind_events(self, place_lat_long):
"""
will create the events table
@param: place_lat_long -- For use in Family Map Pages. This will be None
if called from Family pages, which do not
create a Family Map
"""
event_ref_list = self.person.get_event_ref_list()
if not event_ref_list:
return None
# begin events division and section title
with Html("div", id="events", class_="subsection") as section:
section += Html("h4", self._("Events"), inline=True)
# begin events table
with Html("table", class_="infolist eventlist") as table:
section += table
thead = Html("thead")
table += thead
# attach event header row
thead += self.event_header_row()
tbody = Html("tbody")
table += tbody
for evt_ref in event_ref_list:
event = self.r_db.get_event_from_handle(evt_ref.ref)
if event:
# display event row
tbody += self.display_event_row(event, evt_ref,
place_lat_long,
True, True,
EventRoleType.PRIMARY)
return section
def display_parent(self, handle, title, rel):
"""
This will display a parent ...
@param: handle -- The person handle
@param: title -- Is the title of the web page
@param: rel -- The relation
"""
tcell1 = Html("td", title, class_="ColumnAttribute", inline=True)
tcell2 = Html("td", class_="ColumnValue", close=False, inline=True)
tcell2 += self.new_person_link(handle, uplink=True)
if rel and rel != ChildRefType(ChildRefType.BIRTH):
tcell2 += ''.join([' '] *3 + ['(%s)']) % str(rel)
person = self.r_db.get_person_from_handle(handle)
birth = death = ""
if person:
bd_event = get_birth_or_fallback(self.r_db, person)
if bd_event:
birth = self.rlocale.get_date(bd_event.get_date_object())
dd_event = get_death_or_fallback(self.r_db, person)
if dd_event:
death = self.rlocale.get_date(dd_event.get_date_object())
tcell3 = Html("td", birth, class_="ColumnDate",
inline=False, close=False, indent=False)
tcell4 = Html("td", death, class_="ColumnDate",
inline=True, close=False, indent=False)
tcell2 += tcell3
tcell2 += tcell4
# return table columns to its caller
return tcell1, tcell2
def get_reln_in_family(self, ind, family):
"""
Display the relation of the indiv in the family
@param: ind -- The person to use
@param: family -- The family
"""
child_handle = ind.get_handle()
child_ref_list = family.get_child_ref_list()
for child_ref in child_ref_list:
if child_ref.ref == child_handle:
return (child_ref.get_father_relation(),
child_ref.get_mother_relation())
return (None, None)
def display_ind_parent_family(self, birthmother, birthfather, family,
table,
first=False):
"""
Display the individual parent family
@param: birthmother -- The birth mother
@param: birthfather -- The birth father
@param: family -- The family
@param: table -- The html document to complete
@param: first -- Is this the first indiv ?
"""
if not first:
trow = Html("tr") + (Html("td", " ", colspan=3,
inline=True))
table += trow
# get the father
father_handle = family.get_father_handle()
if father_handle:
if father_handle == birthfather:
# The parent may not be birth father in ths family, because it
# may be a step family. However, it will be odd to display the
# parent as anything other than "Father"
reln = self._("Father")
else:
# Stepfather may not always be quite right (for example, it may
# actually be StepFather-in-law), but it is too expensive to
# calculate out the correct relationship using the Relationship
# Calculator
reln = self._("Stepfather")
trow = Html("tr") + (self.display_parent(father_handle, reln, None))
table += trow
# get the mother
mother_handle = family.get_mother_handle()
if mother_handle:
if mother_handle == birthmother:
reln = self._("Mother")
else:
reln = self._("Stepmother")
trow = Html("tr") + (self.display_parent(mother_handle, reln, None))
table += trow
for child_ref in family.get_child_ref_list():
child_handle = child_ref.ref
child = self.r_db.get_person_from_handle(child_handle)
if child:
if child == self.person:
reln = ""
else:
try:
# We have a try except block here, because the two
# people MUST be siblings for the called Relationship
# routines to work. Depending on your definition of
# sibling, we cannot necessarily guarantee that.
sibling_type = self.rel_class.get_sibling_type(
self.r_db, self.person, child)
reln = self.rel_class.get_sibling_relationship_string(
sibling_type, self.person.gender, child.gender)
# We have a problem here : reln is never in the choosen
# language but in the default language.
# Does get_sibling_relationship_string work ?
reln = reln[0].upper() + reln[1:]
except:
reln = self._("Not siblings")
val1 = " "
reln = val1 + reln
# Now output reln, child_link, (frel, mrel)
frel = child_ref.get_father_relation()
mrel = child_ref.get_mother_relation()
if frel != ChildRefType.BIRTH or mrel != ChildRefType.BIRTH:
frelmrel = "(%s, %s)" % (str(frel), str(mrel))
else:
frelmrel = ""
trow = Html("tr") + (
Html("td", reln, class_="ColumnAttribute", inline=True))
tcell = Html("td", val1, class_="ColumnValue", inline=True)
tcell += self.display_child_link(child_handle)
birth = death = ""
bd_event = get_birth_or_fallback(self.r_db, child)
if bd_event:
birth = self.rlocale.get_date(bd_event.get_date_object())
dd_event = get_death_or_fallback(self.r_db, child)
if dd_event:
death = self.rlocale.get_date(dd_event.get_date_object())
tcell2 = Html("td", birth, class_="ColumnDate",
inline=True)
tcell3 = Html("td", death, class_="ColumnDate",
inline=True)
trow += tcell
trow += tcell2
trow += tcell3
tcell = Html("td", frelmrel, class_="ColumnValue",
inline=True)
trow += tcell
table += trow
def display_step_families(self, parent_handle,
family,
all_family_handles,
birthmother, birthfather,
table):
"""
Display step families
@param: parent_handle -- The family parent handle to display
@param: family -- The family
@param: all_family_handles -- All known family handles
@param: birthmother -- The birth mother
@param: birthfather -- The birth father
@param: table -- The html document to complete
"""
if parent_handle:
parent = self.r_db.get_person_from_handle(parent_handle)
for parent_family_handle in parent.get_family_handle_list():
if parent_family_handle not in all_family_handles:
parent_family = self.r_db.get_family_from_handle(
parent_family_handle)
self.display_ind_parent_family(birthmother, birthfather,
parent_family, table)
all_family_handles.append(parent_family_handle)
def display_ind_center_person(self):
"""
Display the person's relationship to the center person
"""
center_person = self.r_db.get_person_from_gramps_id(
self.report.options['pid'])
if center_person is None:
return
relationship = self.rel_class.get_one_relationship(self.r_db,
self.person,
center_person)
if relationship == "": # No relation to display
return
# begin center_person division
section = ""
with Html("div", class_="subsection", id="parents") as section:
message = self._("Relation to the center person")
message += " ("
name_format = self.report.options['name_format']
primary_name = center_person.get_primary_name()
name = Name(primary_name)
name.set_display_as(name_format)
message += _nd.display_name(name)
message += ") : "
message += relationship
section += Html("h4", message, inline=True)
return section
def display_ind_parents(self):
"""
Display a person's parents
"""
parent_list = self.person.get_parent_family_handle_list()
if not parent_list:
return None
# begin parents division
with Html("div", class_="subsection", id="parents") as section:
section += Html("h4", self._("Parents"), inline=True)
# begin parents table
with Html("table", class_="infolist") as table:
section += table
thead = Html("thead")
table += thead
trow = Html("tr")
thead += trow
trow.extend(
Html("th", label, class_=colclass, inline=True)
for (label, colclass) in [
(self._("Relation to main person"), "ColumnAttribute"),
(self._("Name"), "ColumnValue"),
(self._("Birth date"), "ColumnValue"),
(self._("Death date"), "ColumnValue"),
(self._("Relation within this family "
"(if not by birth)"),
"ColumnValue")
]
)
tbody = Html("tbody")
all_family_handles = list(parent_list)
(birthmother, birthfather) = self.rel_class.get_birth_parents(
self.r_db, self.person)
first = True
for family_handle in parent_list:
family = self.r_db.get_family_from_handle(family_handle)
if family:
# Display this family
self.display_ind_parent_family(birthmother,
birthfather,
family, tbody, first)
first = False
if self.report.options['showhalfsiblings']:
# Display all families in which the parents are
# involved. This displays half siblings and step
# siblings
self.display_step_families(
family.get_father_handle(), family,
all_family_handles,
birthmother, birthfather, tbody)
self.display_step_families(
family.get_mother_handle(), family,
all_family_handles,
birthmother, birthfather, tbody)
table += tbody
return section
def pedigree_person(self, person):
"""
will produce a hyperlink for a pedigree person ...
@param: person -- The person
"""
hyper = self.new_person_link(person.handle, person=person, uplink=True)
return hyper
def pedigree_family(self):
"""
Returns a family pedigree
"""
ped = []
for family_handle in self.person.get_family_handle_list():
rel_family = self.r_db.get_family_from_handle(family_handle)
spouse_handle = utils.find_spouse(self.person, rel_family)
if spouse_handle:
spouse = self.r_db.get_person_from_handle(spouse_handle)
pedsp = (Html("li", class_="spouse") +
self.pedigree_person(spouse)
)
else:
pedsp = (Html("li", class_="spouse"))
ped += [pedsp]
childlist = rel_family.get_child_ref_list()
if childlist:
with Html("ol") as childol:
pedsp += [childol]
for child_ref in childlist:
child = self.r_db.get_person_from_handle(child_ref.ref)
if child:
childol += (Html("li") +
self.pedigree_person(child)
)
return ped
| gpl-2.0 | -1,617,659,728,472,627,700 | 41.24581 | 80 | 0.463608 | false | 4.611538 | false | false | false |
faddai/newfies-dialer | newfies/user_profile/views.py | 1 | 11084 | #
# Newfies-Dialer License
# http://www.newfies-dialer.org
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Copyright (C) 2011-2012 Star2Billing S.L.
#
# The Initial Developer of the Original Code is
# Arezqui Belaid <[email protected]>
#
from django.contrib.auth.models import User
from django.contrib.auth.decorators import login_required
from django.contrib.auth.forms import PasswordChangeForm
from django.shortcuts import render_to_response
from django.http import HttpResponseRedirect, HttpResponse, Http404
from django.template.context import RequestContext
from django.utils.translation import ugettext_lazy as _
from django.utils import simplejson
from django.db.models import Q
from django.conf import settings
from notification import models as notification
from dialer_campaign.models import common_contact_authorization
from dialer_campaign.views import current_view, notice_count, grid_common_function
from dialer_campaign.function_def import user_dialer_setting_msg, variable_value
from dialer_settings.models import DialerSetting
from user_profile.models import UserProfile
from user_profile.forms import UserChangeDetailForm, \
UserChangeDetailExtendForm, \
CheckPhoneNumberForm,\
UserProfileForm
@login_required
def customer_detail_change(request):
"""User Detail change on Customer UI
**Attributes**:
* ``form`` - UserChangeDetailForm, UserChangeDetailExtendForm, PasswordChangeForm, CheckPhoneNumberForm
* ``template`` - 'frontend/registration/user_detail_change.html'
**Logic Description**:
* User is able to change his/her detail.
"""
user_detail = User.objects.get(username=request.user)
try:
user_detail_extened = UserProfile.objects.get(user=user_detail)
except UserProfile.DoesNotExist:
#create UserProfile
user_detail_extened = UserProfile(user=user_detail)
user_detail_extened.save()
user_detail_form = UserChangeDetailForm(request.user,
instance=user_detail)
user_detail_extened_form = UserChangeDetailExtendForm(request.user,
instance=user_detail_extened)
user_password_form = PasswordChangeForm(user=request.user)
check_phone_no_form = CheckPhoneNumberForm()
try:
user_ds = UserProfile.objects.get(user=request.user)
dialer_set = DialerSetting.objects.get(id=user_ds.dialersetting.id)
except:
dialer_set = ''
user_notification = \
notification.Notice.objects.filter(recipient=request.user)
# Search on sender name
q = (Q(sender=request.user))
if q:
user_notification = user_notification.filter(q)
msg_detail = ''
msg_pass = ''
msg_number = ''
msg_note = ''
error_detail = ''
error_pass = ''
error_number = ''
action = ''
if 'action' in request.GET:
action = request.GET['action']
if request.GET.get('msg_note') == 'true':
msg_note = request.session['msg_note']
# Mark all notification as read
if request.GET.get('notification') == 'mark_read_all':
notification_list = notification.Notice.objects.filter(unseen=1, recipient=request.user)
notification_list.update(unseen=0)
msg_note = _('All notifications are marked as read.')
if request.method == 'POST':
if request.POST['form-type'] == "change-detail":
user_detail_form = UserChangeDetailForm(request.user, request.POST,
instance=user_detail)
user_detail_extened_form = UserChangeDetailExtendForm(request.user,
request.POST,
instance=user_detail_extened)
action = 'tabs-1'
if user_detail_form.is_valid() and user_detail_extened_form.is_valid():
user_detail_form.save()
user_detail_extened_form.save()
msg_detail = _('Detail has been changed.')
else:
error_detail = _('Please correct the errors below.')
elif request.POST['form-type'] == "check-number": # check phone no
action = 'tabs-5'
check_phone_no_form = CheckPhoneNumberForm(data=request.POST)
if check_phone_no_form.is_valid():
if not common_contact_authorization(request.user,
request.POST['phone_number']):
error_number = _('This phone number is not authorized.')
else:
msg_number = _('This phone number is authorized.')
else:
error_number = _('Please correct the errors below.')
else: # "change-password"
user_password_form = PasswordChangeForm(user=request.user,
data=request.POST)
action = 'tabs-2'
if user_password_form.is_valid():
user_password_form.save()
msg_pass = _('Your password has been changed.')
else:
error_pass = _('Please correct the errors below.')
template = 'frontend/registration/user_detail_change.html'
data = {
'module': current_view(request),
'user_detail_form': user_detail_form,
'user_detail_extened_form': user_detail_extened_form,
'user_password_form': user_password_form,
'check_phone_no_form': check_phone_no_form,
'user_notification': user_notification,
'msg_detail': msg_detail,
'msg_pass': msg_pass,
'msg_number': msg_number,
'msg_note': msg_note,
'error_detail': error_detail,
'error_pass': error_pass,
'error_number': error_number,
'notice_count': notice_count(request),
'dialer_set': dialer_set,
'dialer_setting_msg': user_dialer_setting_msg(request.user),
'action': action,
}
return render_to_response(template, data,
context_instance=RequestContext(request))
def call_style(val):
"""Notification icon style"""
unseen_style = \
'style="text-decoration:none;background-image:url(%snewfies/icons/new.png);"' \
% settings.STATIC_URL
seen_style = \
'style="text-decoration:none;background-image:url(%snewfies/icons/tick.png);"' \
% settings.STATIC_URL
if val:
return unseen_style
else:
return seen_style
# Notification
@login_required
def notification_grid(request):
"""notification list in json format for flexigrid
**Model**: notification.Notice
"""
grid_data = grid_common_function(request)
page = int(grid_data['page'])
start_page = int(grid_data['start_page'])
end_page = int(grid_data['end_page'])
sortorder_sign = grid_data['sortorder_sign']
sortname = grid_data['sortname']
user_notification = \
notification.Notice.objects.filter(recipient=request.user)
# Search on sender name
q = (Q(sender=request.user))
if q:
user_notification = user_notification.filter(q)
count = user_notification.count()
user_notification_list = \
user_notification.order_by(sortorder_sign + sortname)[start_page:end_page]
rows = [{'id': row.id,
'cell': ['<input type="checkbox" name="select" class="checkbox"\
value="' + str(row.id) + '" />',
str(row.message),
str(row.notice_type),
str(row.sender),
str(row.added),
str('<a href="../update_notice_status_cust/' + str(row.id) + '/" class="icon" ' \
+ call_style(row.unseen) + '> </a>' ),
]}for row in user_notification_list ]
data = {'rows': rows,
'page': page,
'total': count}
return HttpResponse(simplejson.dumps(data), mimetype='application/json',
content_type="application/json")
@login_required
def notification_del_read(request, object_id):
"""Delete notification for the logged in user
**Attributes**:
* ``object_id`` - Selected notification object
* ``object_list`` - Selected notification objects
**Logic Description**:
* Delete/Mark as Read the selected notification from the notification list
"""
try:
# When object_id is not 0
notification_obj = notification.Notice.objects.get(pk=object_id)
# Delete/Read notification
if object_id:
if request.POST.get('mark_read') == 'false':
request.session["msg_note"] = _('"%(name)s" is deleted.') \
% {'name': notification_obj.notice_type}
notification_obj.delete()
else:
request.session["msg_note"] = _('"%(name)s" is marked as read.') \
% {'name': notification_obj.notice_type}
notification_obj.update(unseen=0)
return HttpResponseRedirect('/user_detail_change/?action=tabs-3&msg_note=true')
except:
# When object_id is 0 (Multiple records delete/mark as read)
values = request.POST.getlist('select')
values = ", ".join(["%s" % el for el in values])
notification_list = notification.Notice.objects.extra(where=['id IN (%s)' % values])
if request.POST.get('mark_read') == 'false':
request.session["msg_note"] = _('%(count)s notification(s) are deleted.')\
% {'count': notification_list.count()}
notification_list.delete()
else:
request.session["msg_note"] = _('%(count)s notification(s) are marked as read.')\
% {'count': notification_list.count()}
notification_list.update(unseen=0)
return HttpResponseRedirect('/user_detail_change/?action=tabs-3&msg_note=true')
def common_notification_status(request, id):
"""Notification Status (e.g. seen/unseen) need to be change.
It is a common function for admin and customer UI
**Attributes**:
* ``pk`` - primary key of notice record
**Logic Description**:
* Selected Notification's status need to be changed.
Changed status can be seen or unseen.
"""
notice = notification.Notice.objects.get(pk=id)
if notice.unseen == 1:
notice.unseen = 0
else:
notice.unseen = 1
notice.save()
return True
@login_required
def update_notice_status_cust(request, id):
"""Notification Status (e.g. seen/unseen) can be changed from
customer interface"""
common_notification_status(request, id)
return HttpResponseRedirect('/user_detail_change/?action=tabs-3')
| mpl-2.0 | 5,991,948,951,880,823,000 | 37.352941 | 111 | 0.603934 | false | 4.117385 | false | false | false |
jiasir/pycs | vulpo/pyami/scriptbase.py | 1 | 1430 | import os
import sys
from vulpo.utils import ShellCommand, get_ts
import vulpo
import vulpo.utils
class ScriptBase(object):
def __init__(self, config_file=None):
self.instance_id = vulpo.config.get('Instance', 'instance-id', 'default')
self.name = self.__class__.__name__
self.ts = get_ts()
if config_file:
vulpo.config.read(config_file)
def notify(self, subject, body=''):
vulpo.utils.notify(subject, body)
def mkdir(self, path):
if not os.path.isdir(path):
try:
os.mkdir(path)
except:
vulpo.log.error('Error creating directory: %s' % path)
def umount(self, path):
if os.path.ismount(path):
self.run('umount %s' % path)
def run(self, command, notify=True, exit_on_error=False, cwd=None):
self.last_command = ShellCommand(command, cwd=cwd)
if self.last_command.status != 0:
vulpo.log.error('Error running command: "%s". Output: "%s"' % (command, self.last_command.output))
if notify:
self.notify('Error encountered', \
'Error running the following command:\n\t%s\n\nCommand output:\n\t%s' % \
(command, self.last_command.output))
if exit_on_error:
sys.exit(-1)
return self.last_command.status
def main(self):
pass
| mit | -3,219,314,286,743,809,500 | 31.5 | 110 | 0.565734 | false | 3.620253 | false | false | false |
platipy/spyral | examples/collisions.py | 1 | 1514 | try:
import _path
except NameError:
pass
import spyral
SIZE = (640, 480)
BG_COLOR = (0, 0, 0)
class Square(spyral.Sprite):
def __init__(self, scene, direction, color=(255, 0,0)):
spyral.Sprite.__init__(self, scene)
self.image = spyral.Image(size=(16, 16)).fill(color)
self.direction = direction
self.anchor = 'center'
spyral.event.register("director.update", self.update)
def update(self):
self.x += self.direction * 4
if not self.collide_rect(self.scene.rect):
self.x -= self.direction * 4
self.flip()
def flip(self):
self.direction *= -1
class Game(spyral.Scene):
def __init__(self):
spyral.Scene.__init__(self, SIZE)
self.background = spyral.Image(size=SIZE).fill(BG_COLOR)
self.left_square = Square(self, 1, (0,255,0))
self.left_square.pos = self.rect.midleft
self.right_square = Square(self, -1)
self.right_square.pos = self.rect.midright
spyral.event.register("system.quit", spyral.director.quit)
spyral.event.register("director.update", self.update)
def update(self):
# Collision test
if self.left_square.collide_sprite(self.right_square):
self.right_square.flip()
self.left_square.flip()
if __name__ == "__main__":
spyral.director.init(SIZE) # the director is the manager for your scenes
spyral.director.run(scene=Game()) # This will run your game. It will not return.
| lgpl-2.1 | -6,099,856,198,345,127,000 | 30.541667 | 84 | 0.612285 | false | 3.334802 | false | false | false |
DOAJ/doaj | portality/formcontext/formcontext.py | 1 | 82183 | import json
import uuid
from datetime import datetime
from flask import render_template, url_for, request
from flask_login import current_user
import portality.formcontext.forms
from portality.crosswalks.journal_form import JournalFormXWalk
from portality.crosswalks.article_form import ArticleFormXWalk
from portality.crosswalks.application_form import ApplicationFormXWalk
from portality import constants
from portality import models, app_email, util
from portality.bll import DOAJ
from portality.core import app
from portality.formcontext import forms, render, choices, FormContextException
from portality.lcc import lcc_jstree
from portality.ui.messages import Messages
import portality.notifications.application_emails as emails
from portality.forms.application_forms import JAVASCRIPT_FUNCTIONS
ACC_MSG = 'Please note you <span class="red">cannot edit</span> this application as it has been accepted into the DOAJ.'
SCOPE_MSG = 'Please note you <span class="red">cannot edit</span> this application as you don\'t have the necessary ' \
'account permissions to edit applications which are {0}.'
FIELDS_WITH_DESCRIPTION = ["publisher", "society_institution", "platform", "title", "alternative_title"]
URL_FIELDS = ["url", "processing_charges_url", "submission_charges_url", "articles_last_year_url", "digital_archiving_policy_url", "editorial_board_url", "review_process_url", "instructions_authors_url", "oa_statement_url", "license_url", "waiver_policy_url", "download_statistics_url", "copyright_url", "publishing_rights_url", "plagiarism_screening_url", "license_embedded_url", "aims_scope_url"]
class FormContext(object):
def __init__(self, form_data=None, source=None, formulaic_context=None):
# initialise our core properties
self._source = source
self._target = None
self._form_data = form_data
self._form = None
self._renderer = None
self._template = None
self._alert = []
self._info = ''
self._formulaic = formulaic_context
# initialise the renderer (falling back to a default if necessary)
self.make_renderer()
if self.renderer is None:
self.renderer = render.Renderer()
# specify the jinja template that will wrap the renderer
self.set_template()
# now create our form instance, with the form_data (if there is any)
if form_data is not None:
self.data2form()
# if there isn't any form data, then we should create the form properties from source instead
elif source is not None:
self.source2form()
# if there is no source, then a blank form object
else:
self.blank_form()
############################################################
# getters and setters on the main FormContext properties
############################################################
@property
def form(self):
return self._form
@form.setter
def form(self, val):
self._form = val
@property
def source(self):
return self._source
@property
def form_data(self):
return self._form_data
@property
def target(self):
return self._target
@target.setter
def target(self, val):
self._target = val
@property
def renderer(self):
return self._renderer
@renderer.setter
def renderer(self, val):
self._renderer = val
@property
def template(self):
return self._template
@template.setter
def template(self, val):
self._template = val
@property
def alert(self):
return self._alert
def add_alert(self, val):
self._alert.append(val)
@property
def info(self):
return self._info
@info.setter
def info(self, val):
self._info = val
############################################################
# Lifecycle functions that subclasses should implement
############################################################
def make_renderer(self):
"""
This will be called during init, and must populate the self.render property
"""
pass
def set_template(self):
"""
This will be called during init, and must populate the self.template property with the path to the jinja template
"""
pass
def pre_validate(self):
"""
This will be run before validation against the form is run.
Use it to patch the form with any relevant data, such as fields which were disabled
"""
pass
def blank_form(self):
"""
This will be called during init, and must populate the self.form_data property with an instance of the form in this
context, based on no originating source or form data
"""
pass
def data2form(self):
"""
This will be called during init, and must convert the form_data into an instance of the form in this context,
and write to self.form
"""
pass
def source2form(self):
"""
This will be called during init, and must convert the source object into an instance of the form in this
context, and write to self.form
"""
pass
def form2target(self):
"""
Convert the form object into a the target system object, and write to self.target
"""
pass
def patch_target(self):
"""
Patch the target with data from the source. This will be run by the finalise method (unless you override it)
"""
pass
def finalise(self, *args, **kwargs):
"""
Finish up with the FormContext. Carry out any final workflow tasks, etc.
"""
self.form2target()
self.patch_target()
############################################################
# Functions which can be called directly, but may be overridden if desired
############################################################
def validate(self):
self.pre_validate()
f = self.form
valid = False
if f is not None:
valid = f.validate()
# if this isn't a valid form, record the fields that have errors
# with the renderer for use later
if not valid:
error_fields = []
for field in self.form:
if field.errors:
error_fields.append(field.short_name)
return valid
@property
def errors(self):
f = self.form
if f is not None:
return f.errors
return False
def render_template(self, **kwargs):
return render_template(self.template, form_context=self, **kwargs)
#def render_field_group(self, field_group_name=None, **kwargs):
# return self.renderer.render_field_group(self, field_group_name, **kwargs)
def fieldset(self, fieldset_name=None):
return self._formulaic.fieldset(fieldset_name)
def fieldsets(self):
return self._formulaic.fieldsets()
def check_field_group_exists(self, field_group_name):
return self.renderer.check_field_group_exists(field_group_name)
@property
def ui_settings(self):
return self._formulaic.ui_settings
class PrivateContext(FormContext):
def _expand_descriptions(self, fields):
# add the contents of a few fields to their descriptions since select2 autocomplete
# would otherwise obscure the full values
for field in fields:
if field in self.form.data:
if self.form[field].data:
if not self.form[field].description:
self.form[field].description = '<small>Full contents: ' + self.form[field].data + '</small>'
else:
self.form[field].description += '<br><br><small>Full contents: ' + self.form[field].data + '</small>'
def _expand_url_descriptions(self, fields):
# add the contents of a few fields to their descriptions since select2 autocomplete
# would otherwise obscure the full values
for field in fields:
if field in self.form.data:
if self.form[field].data:
if not self.form[field].description:
self.form[field].description = '<small>Full contents: <a href=' + self.form[field].data + " target='_blank'>" + self.form[field].data + "</a><small>"
else:
self.form[field].description += '<br><br><small>Full contents: <a href=' + self.form[field].data + " target='_blank'>" + self.form[field].data + "</a><small>"
def _carry_fixed_aspects(self):
if self.source is None:
raise FormContextException("Cannot carry data from a non-existent source")
now = datetime.now().strftime("%Y-%m-%dT%H:%M:%SZ")
# copy over any important fields from the previous version of the object
created_date = self.source.created_date if self.source.created_date else now
self.target.set_created(created_date)
if "id" in self.source.data:
self.target.data['id'] = self.source.data['id']
try:
if self.source.current_application:
self.target.set_current_application(self.source.current_application)
except AttributeError:
# this means that the source doesn't know about current_applications, which is fine
pass
try:
if self.source.current_journal:
self.target.set_current_journal(self.source.current_journal)
except AttributeError:
# this means that the source doesn't know about current_journals, which is fine
pass
try:
if self.source.related_journal:
self.target.set_related_journal(self.source.related_journal)
except AttributeError:
# this means that the source doesn't know about related_journals, which is fine
pass
try:
if self.source.related_applications:
related = self.source.related_applications
for rel in related:
self.target.add_related_application(rel.get("application_id"), rel.get("date_accepted"))
except AttributeError:
# this means that the source doesn't know about related_applications, which is fine
pass
# if the source is a journal, we need to carry the in_doaj flag
if isinstance(self.source, models.Journal):
self.target.set_in_doaj(self.source.is_in_doaj())
@staticmethod
def _subjects2str(subjects):
subject_strings = []
for sub in subjects:
subject_strings.append('{term}'.format(term=sub.get('term')))
return ', '.join(subject_strings)
def _merge_notes_forward(self, allow_delete=False):
if self.source is None:
raise FormContextException("Cannot carry data from a non-existent source")
if self.target is None:
raise FormContextException("Cannot carry data on to a non-existent target - run the xwalk first")
# first off, get the notes (by reference) in the target and the notes from the source
tnotes = self.target.notes
snotes = self.source.notes
# if there are no notes, we might not have the notes by reference, so later will
# need to set them by value
apply_notes_by_value = len(tnotes) == 0
# for each of the target notes we need to get the original dates from the source notes
for n in tnotes:
for sn in snotes:
if n.get("note") == sn.get("note"):
n["date"] = sn.get("date")
# record the positions of any blank notes
i = 0
removes = []
for n in tnotes:
if n.get("note").strip() == "":
removes.append(i)
i += 1
# actually remove all the notes marked for deletion
removes.sort(reverse=True)
for r in removes:
tnotes.pop(r)
# finally, carry forward any notes that aren't already in the target
if not allow_delete:
for sn in snotes:
found = False
for tn in tnotes:
if sn.get("note") == tn.get("note"):
found = True
if not found:
tnotes.append(sn)
if apply_notes_by_value:
self.target.set_notes(tnotes)
def _populate_editor_field(self, editor_group_name):
"""Set the editor field choices from a given editor group name"""
if editor_group_name is None:
self.form.editor.choices = [("", "")]
else:
eg = models.EditorGroup.pull_by_key("name", editor_group_name)
if eg is not None:
editors = [eg.editor]
editors += eg.associates
editors = list(set(editors))
self.form.editor.choices = [("", "Choose an editor")] + [(editor, editor) for editor in editors]
else:
self.form.editor.choices = [("", "")]
def _validate_editor_field(self):
""" Validate the choice of editor, which could be out of sync with the group in exceptional circumstances """
editor = self.form.editor.data
if editor is not None and editor != "":
editor_group_name = self.form.editor_group.data
if editor_group_name is not None and editor_group_name != "":
eg = models.EditorGroup.pull_by_key("name", editor_group_name)
if eg is not None:
all_eds = eg.associates + [eg.editor]
if editor in all_eds:
return # success - an editor group was found and our editor was in it
raise FormContextException("Editor '{0}' not found in editor group '{1}'".format(editor, editor_group_name))
else:
raise FormContextException("An editor has been assigned without an editor group")
def _carry_continuations(self):
if self.source is None:
raise FormContextException("Cannot carry data from a non-existent source")
try:
sbj = self.source.bibjson()
tbj = self.target.bibjson()
if sbj.replaces:
tbj.replaces = sbj.replaces
if sbj.is_replaced_by:
tbj.is_replaced_by = sbj.is_replaced_by
if sbj.discontinued_date:
tbj.discontinued_date = sbj.discontinued_date
except AttributeError:
# this means that the source doesn't know about current_applications, which is fine
pass
class ApplicationContext(PrivateContext):
ERROR_MSG_TEMPLATE = \
"""Problem while creating account while turning suggestion into journal.
There should be a {missing_thing} on user {username} but there isn't.
Created the user but not sending the email.
""".replace("\n", ' ')
def _carry_fixed_aspects(self):
super(ApplicationContext, self)._carry_fixed_aspects()
if self.source.suggested_on is not None:
self.target.suggested_on = self.source.suggested_on
def _create_account_on_suggestion_approval(self, suggestion, journal):
o = models.Account.pull(suggestion.owner)
if o:
self.add_alert('Account {username} already exists, so simply associating the journal with it.'.format(username=o.id))
o.add_journal(journal.id)
if not o.has_role('publisher'):
o.add_role('publisher')
o.save()
return o
suggestion_contact = util.listpop(suggestion.contacts())
if not suggestion_contact.get('email'):
msg = self.ERROR_MSG_TEMPLATE.format(username=o.id, missing_thing='journal contact email in the application')
app.logger.error(msg)
self.add_alert(msg)
return o
send_info_to = suggestion_contact.get('email')
o = models.Account.make_account(
suggestion.owner,
name=suggestion_contact.get('name'),
email=send_info_to,
roles=['publisher'],
associated_journal_ids=[journal.id]
)
o.save()
if not o.reset_token:
msg = self.ERROR_MSG_TEMPLATE.format(username=o.id, missing_thing='reset token')
app.logger.error(msg)
self.add_alert(msg)
return o
url_root = request.url_root
if url_root.endswith("/"):
url_root = url_root[:-1]
reset_url = url_root + url_for('account.reset', reset_token=o.reset_token)
forgot_pw_url = url_root + url_for('account.forgot')
password_create_timeout_seconds = int(app.config.get("PASSWORD_CREATE_TIMEOUT", app.config.get('PASSWORD_RESET_TIMEOUT', 86400) * 14))
password_create_timeout_days = password_create_timeout_seconds / (60*60*24)
to = [send_info_to]
fro = app.config.get('SYSTEM_EMAIL_FROM', '[email protected]')
subject = app.config.get("SERVICE_NAME","") + " - account created"
try:
if app.config.get("ENABLE_PUBLISHER_EMAIL", False):
app_email.send_mail(to=to,
fro=fro,
subject=subject,
template_name="email/account_created.txt",
reset_url=reset_url,
username=o.id,
timeout_days=password_create_timeout_days,
forgot_pw_url=forgot_pw_url
)
self.add_alert('Sent email to ' + send_info_to + ' to tell them about the new account.')
else:
self.add_alert('Did not email to ' + send_info_to + ' to tell them about the new account, as publisher emailing is disabled.')
if app.config.get('DEBUG', False):
self.add_alert('Debug mode - url for create is <a href="{url}">{url}</a>'.format(url=reset_url))
except app_email.EmailException:
magic = str(uuid.uuid1())
self.add_alert('Hm, sending the account creation email didn\'t work. Please quote this magic number when reporting the issue: ' + magic + ' . Thank you!')
if app.config.get('DEBUG', False):
self.add_alert('Debug mode - url for create is <a href="{url}">{url}</a>'.format(url=reset_url))
app.logger.exception('Error sending account creation email - ' + magic)
self.add_alert('Account {username} created'.format(username=o.id))
return o
def _send_application_approved_email(self, journal_title, publisher_name, email, journal_contact, update_request=False):
"""Email the publisher when an application is accepted (it's here because it's too troublesome to factor out)"""
url_root = request.url_root
if url_root.endswith("/"):
url_root = url_root[:-1]
to = [email]
fro = app.config.get('SYSTEM_EMAIL_FROM', '[email protected]')
subject = app.config.get("SERVICE_NAME", "") + " - journal accepted"
publisher_name = publisher_name if publisher_name is not None else "Journal Owner"
try:
if app.config.get("ENABLE_PUBLISHER_EMAIL", False):
msg = Messages.SENT_ACCEPTED_APPLICATION_EMAIL.format(email=email)
template = "email/publisher_application_accepted.txt"
if update_request:
msg = Messages.SENT_ACCEPTED_UPDATE_REQUEST_EMAIL.format(email=email)
template = "email/publisher_update_request_accepted.txt"
jn = journal_title #.encode('utf-8', 'replace')
app_email.send_mail(to=to,
fro=fro,
subject=subject,
template_name=template,
journal_title=jn,
publisher_name=publisher_name,
journal_contact=journal_contact,
url_root=url_root
)
self.add_alert(msg)
else:
msg = Messages.NOT_SENT_ACCEPTED_APPLICATION_EMAIL.format(email=email)
if update_request:
msg = Messages.NOT_SENT_ACCEPTED_UPDATE_REQUEST_EMAIL.format(email=email)
self.add_alert(msg)
except Exception as e:
magic = str(uuid.uuid1())
self.add_alert('Hm, sending the journal acceptance information email didn\'t work. Please quote this magic number when reporting the issue: ' + magic + ' . Thank you!')
app.logger.exception('Error sending application approved email failed - ' + magic)
def _send_contact_approved_email(self, journal_title, journal_contact, email, publisher_name, update_request=False):
"""Email the journal contact when an application is accepted """
url_root = request.url_root
if url_root.endswith("/"):
url_root = url_root[:-1]
to = [email]
fro = app.config.get('SYSTEM_EMAIL_FROM', '[email protected]')
subject = app.config.get("SERVICE_NAME", "") + " - journal accepted"
try:
if app.config.get("ENABLE_PUBLISHER_EMAIL", False):
template = "email/contact_application_accepted.txt"
alert = Messages.SENT_JOURNAL_CONTACT_ACCEPTED_APPLICATION_EMAIL.format(email=to[0])
if update_request: # NOTE: right now, the way this is called, update request is always False. Should deprecate and remove this code.
template = "email/contact_update_request_accepted.txt"
alert = Messages.SENT_JOURNAL_CONTACT_ACCEPTED_UPDATE_REQUEST_EMAIL.format(email=to[0])
jn = journal_title #.encode('utf-8', 'replace')
app_email.send_mail(to=to,
fro=fro,
subject=subject,
template_name=template,
journal_title=jn,
journal_contact=journal_contact,
publisher=publisher_name,
url_root=url_root
)
self.add_alert(alert)
else:
alert = Messages.NOT_SENT_JOURNAL_CONTACT_ACCEPTED_APPLICATION_EMAIL.format(email=to[0])
self.add_alert(alert)
except Exception as e:
magic = str(uuid.uuid1())
self.add_alert('Hm, sending the journal contact acceptance information email didn\'t work. Please quote this magic number when reporting the issue: ' + magic + ' . Thank you!')
app.logger.exception('Error sending accepted email to journal contact - ' + magic)
def render_template(self, **kwargs):
diff = None
cj = None
if self.source is not None:
current_journal = self.source.current_journal
if current_journal is not None:
cj = models.Journal.pull(current_journal)
if cj is not None:
jform = JournalFormXWalk.obj2form(cj)
if "notes" in jform:
del jform["notes"]
aform = ApplicationFormXWalk.obj2form(self.source)
if "notes" in aform:
del aform["notes"]
diff = self._form_diff(jform, aform)
return super(ApplicationContext, self).render_template(
form_diff=diff,
current_journal=cj,
js_functions=JAVASCRIPT_FUNCTIONS,
**kwargs)
def _form_diff(self, journal_form, application_form):
diff = []
for k, v in application_form.items():
try:
q = self.form[k].label
except KeyError:
continue
q_num = self.renderer.question_number(k)
if q_num is None or q_num == "":
q_num = 0
else:
q_num = int(q_num)
if k in journal_form and journal_form[k] != v:
diff.append((k, q_num, q.text, journal_form[k], v))
elif k not in journal_form and q_num != 0:
diff.append((k, q_num, q.text, Messages.DIFF_TABLE_NOT_PRESENT, v))
diff = sorted(diff, key=lambda x: x[1])
return diff
class ApplicationFormFactory(object):
@classmethod
def get_form_context(cls, role=None, source=None, form_data=None):
if role is None:
# return PublicApplication(source=source, form_data=form_data)
return None
elif role == "admin":
return ManEdApplicationReview(source=source, form_data=form_data)
elif role == "editor":
return EditorApplicationReview(source=source, form_data=form_data)
elif role == "associate_editor":
return AssEdApplicationReview(source=source, form_data=form_data)
elif role == "publisher":
return PublisherUpdateRequest(source=source, form_data=form_data)
elif role == "update_request_readonly":
return PublisherUpdateRequestReadOnly(source=source, form_data=form_data)
class JournalFormFactory(object):
@classmethod
def get_form_context(cls, role, source=None, form_data=None):
if role == "admin":
return ManEdJournalReview(source=source, form_data=form_data)
elif role == "editor":
return EditorJournalReview(source=source, form_data=form_data)
elif role == "associate_editor":
return AssEdJournalReview(source=source, form_data=form_data)
elif role == "readonly":
return ReadOnlyJournal(source=source, form_data=form_data)
elif role == "bulk_edit":
return ManEdBulkEdit(source=source, form_data=form_data)
class ManEdApplicationReview(ApplicationContext):
"""
Managing Editor's Application Review form. Should be used in a context where the form warrants full
admin priviledges. It will permit conversion of applications to journals, and assignment of owner account
as well as assignment to editorial group.
"""
def make_renderer(self):
self.renderer = render.ManEdApplicationReviewRenderer()
def set_template(self):
self.template = "formcontext/maned_application_review.html"
def blank_form(self):
self.form = forms.ManEdApplicationReviewForm()
self._set_choices()
def data2form(self):
self.form = forms.ManEdApplicationReviewForm(formdata=self.form_data)
self._set_choices()
self._expand_descriptions(FIELDS_WITH_DESCRIPTION)
self._expand_url_descriptions(URL_FIELDS)
def source2form(self):
self.form = forms.ManEdApplicationReviewForm(data=ApplicationFormXWalk.obj2form(self.source))
self._set_choices()
self._expand_descriptions(FIELDS_WITH_DESCRIPTION)
self._expand_url_descriptions(URL_FIELDS)
if self.source.application_status == constants.APPLICATION_STATUS_ACCEPTED:
self.info = ACC_MSG
def pre_validate(self):
# Editor field is populated in JS after page load - check the selected editor is actually in that editor group
self._validate_editor_field()
def form2target(self):
self.target = ApplicationFormXWalk.form2obj(self.form)
def patch_target(self):
if self.source is None:
raise FormContextException("You cannot patch a target from a non-existent source")
self._carry_fixed_aspects()
self._merge_notes_forward(allow_delete=True)
# NOTE: this means you can't unset an owner once it has been set. But you can change it.
if (self.target.owner is None or self.target.owner == "") and (self.source.owner is not None):
self.target.set_owner(self.source.owner)
def finalise(self):
# FIXME: this first one, we ought to deal with outside the form context, but for the time being this
# can be carried over from the old implementation
if self.source is None:
raise FormContextException("You cannot edit a not-existent application")
if self.source.application_status == constants.APPLICATION_STATUS_ACCEPTED:
raise FormContextException("You cannot edit applications which have been accepted into DOAJ.")
# if we are allowed to finalise, kick this up to the superclass
super(ManEdApplicationReview, self).finalise()
# FIXME: may want to factor this out of the suggestionformxwalk
# If we have changed the editors assinged to this application, let them know.
is_editor_group_changed = ApplicationFormXWalk.is_new_editor_group(self.form, self.source)
is_associate_editor_changed = ApplicationFormXWalk.is_new_editor(self.form, self.source)
# record the event in the provenance tracker
models.Provenance.make(current_user, "edit", self.target)
# delayed import of the DOAJ BLL
from portality.bll.doaj import DOAJ
applicationService = DOAJ.applicationService()
# if this application is being accepted, then do the conversion to a journal
if self.target.application_status == constants.APPLICATION_STATUS_ACCEPTED:
# remember whether this was an update request or not
is_update_request = self.target.current_journal is not None
j = applicationService.accept_application(self.target, current_user._get_current_object())
# record the url the journal is available at in the admin are and alert the user
jurl = url_for("doaj.toc", identifier=j.toc_id)
if self.source.current_journal is not None:
self.add_alert('<a href="{url}" target="_blank">Existing journal updated</a>.'.format(url=jurl))
else:
self.add_alert('<a href="{url}" target="_blank">New journal created</a>.'.format(url=jurl))
# create the user account for the owner and send the notification email
try:
owner = self._create_account_on_suggestion_approval(self.target, j)
names = []
for contact in j.contacts():
names.append(contact.get("name"))
journal_contacts = ", ".join(names)
# for all acceptances, send an email to the owner of the journal
self._send_application_approved_email(j.bibjson().title, owner.name, owner.email, journal_contacts, self.source.current_journal is not None)
# in the case of a new application, also send emails to the journal contacts
if not is_update_request:
for contact in j.contacts():
self._send_contact_approved_email(j.bibjson().title, contact.get("name"), contact.get("email"), owner.name, self.source.current_journal is not None)
except app_email.EmailException:
self.add_alert("Problem sending email to suggester - probably address is invalid")
app.logger.exception("Acceptance email to owner failed.")
# if the application was instead rejected, carry out the rejection actions
elif self.source.application_status != constants.APPLICATION_STATUS_REJECTED and self.target.application_status == constants.APPLICATION_STATUS_REJECTED:
# remember whether this was an update request or not
is_update_request = self.target.current_journal is not None
# reject the application
applicationService.reject_application(self.target, current_user._get_current_object())
# if this was an update request, send an email to the owner
if is_update_request:
sent = False
send_report = []
try:
send_report = emails.send_publisher_reject_email(self.target, update_request=is_update_request, send_to_owner=True, send_to_suggester=False)
sent = True
except app_email.EmailException as e:
pass
if sent:
self.add_alert(Messages.SENT_REJECTED_UPDATE_REQUEST_EMAIL.format(user=self.target.owner, email=send_report[0].get("email"), name=send_report[0].get("name")))
else:
self.add_alert(Messages.NOT_SENT_REJECTED_UPDATE_REQUEST_EMAIL.format(user=self.target.owner))
# the application was neither accepted or rejected, so just save it
else:
self.target.set_last_manual_update()
self.target.save()
# if revisions were requested, email the publisher
if self.source.application_status != constants.APPLICATION_STATUS_REVISIONS_REQUIRED and self.target.application_status == constants.APPLICATION_STATUS_REVISIONS_REQUIRED:
try:
emails.send_publisher_update_request_revisions_required(self.target)
self.add_alert(Messages.SENT_REJECTED_UPDATE_REQUEST_REVISIONS_REQUIRED_EMAIL.format(user=self.target.owner))
except app_email.EmailException as e:
self.add_alert(Messages.NOT_SENT_REJECTED_UPDATE_REQUEST_REVISIONS_REQUIRED_EMAIL.format(user=self.target.owner))
# if we need to email the editor and/or the associate, handle those here
if is_editor_group_changed:
try:
emails.send_editor_group_email(self.target)
except app_email.EmailException:
self.add_alert("Problem sending email to editor - probably address is invalid")
app.logger.exception("Email to associate failed.")
if is_associate_editor_changed:
try:
emails.send_assoc_editor_email(self.target)
except app_email.EmailException:
self.add_alert("Problem sending email to associate editor - probably address is invalid")
app.logger.exception("Email to associate failed.")
# If this is the first time this application has been assigned to an editor, notify the publisher.
old_ed = self.source.editor
if (old_ed is None or old_ed == '') and self.target.editor is not None:
is_update_request = self.target.current_journal is not None
if is_update_request:
alerts = emails.send_publisher_update_request_editor_assigned_email(self.target)
else:
alerts = emails.send_publisher_application_editor_assigned_email(self.target)
for alert in alerts:
self.add_alert(alert)
# Inform editor and associate editor if this application was 'ready' or 'completed', but has been changed to 'in progress'
if (self.source.application_status == constants.APPLICATION_STATUS_READY or self.source.application_status == constants.APPLICATION_STATUS_COMPLETED) and self.target.application_status == constants.APPLICATION_STATUS_IN_PROGRESS:
# First, the editor
try:
emails.send_editor_inprogress_email(self.target)
self.add_alert('An email has been sent to notify the editor of the change in status.')
except AttributeError:
magic = str(uuid.uuid1())
self.add_alert('Couldn\'t find a recipient for this email - check editor groups are correct. Please quote this magic number when reporting the issue: ' + magic + ' . Thank you!')
app.logger.exception('No editor recipient for failed review email - ' + magic)
except app_email.EmailException:
magic = str(uuid.uuid1())
self.add_alert('Sending the failed review email to editor didn\'t work. Please quote this magic number when reporting the issue: ' + magic + ' . Thank you!')
app.logger.exception('Error sending review failed email to editor - ' + magic)
# Then the associate
try:
emails.send_assoc_editor_inprogress_email(self.target)
self.add_alert('An email has been sent to notify the assigned associate editor of the change in status.')
except AttributeError:
magic = str(uuid.uuid1())
self.add_alert('Couldn\'t find a recipient for this email - check an associate editor is assigned. Please quote this magic number when reporting the issue: ' + magic + ' . Thank you!')
app.logger.exception('No associate editor recipient for failed review email - ' + magic)
except app_email.EmailException:
magic = str(uuid.uuid1())
self.add_alert('Sending the failed review email to associate editor didn\'t work. Please quote this magic number when reporting the issue: ' + magic + ' . Thank you!')
app.logger.exception('Error sending review failed email to associate editor - ' + magic)
# email other managing editors if this was newly set to 'ready'
if self.source.application_status != constants.APPLICATION_STATUS_READY and self.target.application_status == constants.APPLICATION_STATUS_READY:
# this template requires who made the change, say it was an Admin
ed_id = 'an administrator'
try:
emails.send_admin_ready_email(self.target, editor_id=ed_id)
self.add_alert('A confirmation email has been sent to the Managing Editors.')
except app_email.EmailException:
magic = str(uuid.uuid1())
self.add_alert('Hm, sending the ready status to managing editors didn\'t work. Please quote this magic number when reporting the issue: ' + magic + ' . Thank you!')
app.logger.exception('Error sending ready status email to managing editors - ' + magic)
def render_template(self, **kwargs):
if self.source is None:
raise FormContextException("You cannot edit a not-existent application")
return super(ManEdApplicationReview, self).render_template(
lcc_jstree=json.dumps(lcc_jstree),
subjectstr=self._subjects2str(self.source.bibjson().subjects()),
**kwargs)
def _set_choices(self):
self.form.application_status.choices = choices.Choices.choices_for_status('admin', self.source.application_status)
# The first time the form is rendered, it needs to populate the editor drop-down from saved group
egn = self.form.editor_group.data
self._populate_editor_field(egn)
class EditorApplicationReview(ApplicationContext):
"""
Editors Application Review form. This should be used in a context where an editor who owns an editorial group
is accessing an application. This prevents re-assignment of Editorial group, but permits assignment of associate
editor. It also permits change in application state, except to "accepted"; therefore this form context cannot
be used to create journals from applications. Deleting notes is not allowed, but adding is.
"""
def make_renderer(self):
self.renderer = render.EditorApplicationReviewRenderer()
self.renderer.set_disabled_fields(["editor_group"])
def set_template(self):
self.template = "formcontext/editor_application_review.html"
def blank_form(self):
self.form = forms.EditorApplicationReviewForm()
self._set_choices()
def data2form(self):
self.form = forms.EditorApplicationReviewForm(formdata=self.form_data)
self._set_choices()
self._expand_descriptions(FIELDS_WITH_DESCRIPTION)
self._expand_url_descriptions(URL_FIELDS)
def source2form(self):
self.form = forms.EditorApplicationReviewForm(data=ApplicationFormXWalk.obj2form(self.source))
self._set_choices()
self._expand_descriptions(FIELDS_WITH_DESCRIPTION)
self._expand_url_descriptions(URL_FIELDS)
editor_choices = list(sum(choices.Choices.application_status('editor'), ())) # flattens the list of tuples
if self.source.application_status not in editor_choices:
self.info = SCOPE_MSG.format(self.source.application_status)
if self.source.application_status == constants.APPLICATION_STATUS_ACCEPTED:
self.info = ACC_MSG # This is after so we can supersede the last message
def pre_validate(self):
self.form.editor_group.data = self.source.editor_group
if "application_status" in self.renderer.disabled_fields:
self.form.application_status.data = constants.APPLICATION_STATUS_ACCEPTED
def form2target(self):
self.target = ApplicationFormXWalk.form2obj(self.form)
def patch_target(self):
if self.source is None:
raise FormContextException("You cannot patch a target from a non-existent source")
self._carry_fixed_aspects()
self._merge_notes_forward()
self.target.set_owner(self.source.owner)
self.target.set_editor_group(self.source.editor_group)
self._carry_continuations()
def finalise(self):
# FIXME: this first one, we ought to deal with outside the form context, but for the time being this
# can be carried over from the old implementation
if self.source is None:
raise FormContextException("You cannot edit a not-existent application")
if self.source.application_status == constants.APPLICATION_STATUS_ACCEPTED:
raise FormContextException("You cannot edit applications which have been accepted into DOAJ.")
# if we are allowed to finalise, kick this up to the superclass
super(EditorApplicationReview, self).finalise()
# Check the status change is valid
choices.Choices.validate_status_change('editor', self.source.application_status, self.target.application_status)
# FIXME: may want to factor this out of the suggestionformxwalk
new_associate_assigned = ApplicationFormXWalk.is_new_editor(self.form, self.source)
# Save the target
self.target.set_last_manual_update()
self.target.save()
# record the event in the provenance tracker
models.Provenance.make(current_user, "edit", self.target)
# if we need to email the associate because they have just been assigned, handle that here.
if new_associate_assigned:
try:
emails.send_assoc_editor_email(self.target)
except app_email.EmailException:
self.add_alert("Problem sending email to associate editor - probably address is invalid")
app.logger.exception('Error sending associate assigned email')
# If this is the first time this application has been assigned to an editor, notify the publisher.
old_ed = self.source.editor
if (old_ed is None or old_ed == '') and self.target.editor is not None:
is_update_request = self.target.current_journal is not None
if is_update_request:
alerts = emails.send_publisher_update_request_editor_assigned_email(self.target)
else:
alerts = emails.send_publisher_application_editor_assigned_email(self.target)
for alert in alerts:
self.add_alert(alert)
# Email the assigned associate if the application was reverted from 'completed' to 'in progress' (failed review)
if self.source.application_status == constants.APPLICATION_STATUS_COMPLETED and self.target.application_status == constants.APPLICATION_STATUS_IN_PROGRESS:
try:
emails.send_assoc_editor_inprogress_email(self.target)
self.add_alert('An email has been sent to notify the assigned associate editor of the change in status.')
except AttributeError as e:
magic = str(uuid.uuid1())
self.add_alert('Couldn\'t find a recipient for this email - check an associate editor is assigned. Please quote this magic number when reporting the issue: ' + magic + ' . Thank you!')
app.logger.exception('No associate editor recipient for failed review email - ' + magic)
except app_email.EmailException:
magic = str(uuid.uuid1())
self.add_alert('Sending the failed review email to associate editor didn\'t work. Please quote this magic number when reporting the issue: ' + magic + ' . Thank you!')
app.logger.exception('Error sending failed review email to associate editor - ' + magic)
# email managing editors if the application was newly set to 'ready'
if self.source.application_status != constants.APPLICATION_STATUS_READY and self.target.application_status == constants.APPLICATION_STATUS_READY:
# Tell the ManEds who has made the status change - the editor in charge of the group
editor_group_name = self.target.editor_group
editor_group_id = models.EditorGroup.group_exists_by_name(name=editor_group_name)
editor_group = models.EditorGroup.pull(editor_group_id)
editor_acc = editor_group.get_editor_account()
# record the event in the provenance tracker
models.Provenance.make(current_user, "status:ready", self.target)
editor_id = editor_acc.id
try:
emails.send_admin_ready_email(self.target, editor_id=editor_id)
self.add_alert('A confirmation email has been sent to the Managing Editors.')
except app_email.EmailException:
magic = str(uuid.uuid1())
self.add_alert('Hm, sending the ready status to managing editors didn\'t work. Please quote this magic number when reporting the issue: ' + magic + ' . Thank you!')
app.logger.exception('Error sending ready status email to managing editors - ' + magic)
def render_template(self, **kwargs):
if self.source is None:
raise FormContextException("You cannot edit a not-existent application")
return super(EditorApplicationReview, self).render_template(
lcc_jstree=json.dumps(lcc_jstree),
subjectstr=self._subjects2str(self.source.bibjson().subjects()),
**kwargs)
def _set_choices(self):
if self.source is None:
raise FormContextException("You cannot set choices for a non-existent source")
if self.form.application_status.data == constants.APPLICATION_STATUS_ACCEPTED:
self.form.application_status.choices = choices.Choices.application_status("accepted")
self.renderer.set_disabled_fields(self.renderer.disabled_fields + ["application_status"])
else:
try:
# Assign the choices to the form
self.form.application_status.choices = choices.Choices.choices_for_status('editor', self.source.application_status)
except ValueError:
# If the current status isn't in the editor's status list, it must be out of bounds. Show it greyed out.
self.form.application_status.choices = choices.Choices.application_status("admin")
self.renderer.set_disabled_fields(self.renderer.disabled_fields + ["application_status"])
# get the editor group from the source because it isn't in the form
egn = self.source.editor_group
self._populate_editor_field(egn)
class AssEdApplicationReview(ApplicationContext):
"""
Associate Editors Application Review form. This is to be used in a context where an associate editor (fewest rights)
needs to access an application for review. This editor cannot change the editorial group or the assigned editor.
They also cannot change the owner of the application. They cannot set an application to "Accepted" so this form can't
be used to create a journal from an application. They cannot delete, only add notes.
"""
def make_renderer(self):
self.renderer = render.AssEdApplicationReviewRenderer()
def set_template(self):
self.template = "formcontext/assed_application_review.html"
def blank_form(self):
self.form = forms.AssEdApplicationReviewForm()
self._set_choices()
def data2form(self):
self.form = forms.AssEdApplicationReviewForm(formdata=self.form_data)
self._set_choices()
self._expand_descriptions(FIELDS_WITH_DESCRIPTION)
self._expand_url_descriptions(URL_FIELDS)
def source2form(self):
self.form = forms.AssEdApplicationReviewForm(data=ApplicationFormXWalk.obj2form(self.source))
self._set_choices()
self._expand_descriptions(FIELDS_WITH_DESCRIPTION)
self._expand_url_descriptions(URL_FIELDS)
associate_editor_choices = list(sum(choices.Choices.application_status(), ())) # flattens the list of tuples
if self.source.application_status not in associate_editor_choices:
self.info = SCOPE_MSG.format(self.source.application_status)
if self.source.application_status == constants.APPLICATION_STATUS_ACCEPTED:
self.info = ACC_MSG # This is after so we can supersede the last message
def pre_validate(self):
if "application_status" in self.renderer.disabled_fields:
self.form.application_status.data = constants.APPLICATION_STATUS_ACCEPTED
def form2target(self):
self.target = ApplicationFormXWalk.form2obj(self.form)
def patch_target(self):
if self.source is None:
raise FormContextException("You cannot patch a target from a non-existent source")
self._carry_fixed_aspects()
self._merge_notes_forward()
self.target.set_owner(self.source.owner)
self.target.set_editor_group(self.source.editor_group)
self.target.set_editor(self.source.editor)
self.target.set_seal(self.source.has_seal())
self._carry_continuations()
def finalise(self):
# FIXME: this first one, we ought to deal with outside the form context, but for the time being this
# can be carried over from the old implementation
if self.source is None:
raise FormContextException("You cannot edit a not-existent application")
if self.source.application_status == constants.APPLICATION_STATUS_ACCEPTED:
raise FormContextException("You cannot edit applications which have been accepted into DOAJ.")
# if we are allowed to finalise, kick this up to the superclass
super(AssEdApplicationReview, self).finalise()
# Check the status change is valid
choices.Choices.validate_status_change('associate', self.source.application_status, self.target.application_status)
# Save the target
self.target.set_last_manual_update()
self.target.save()
# record the event in the provenance tracker
models.Provenance.make(current_user, "edit", self.target)
# inform publisher if this was set to 'in progress' from 'pending'
if self.source.application_status == constants.APPLICATION_STATUS_PENDING and self.target.application_status == constants.APPLICATION_STATUS_IN_PROGRESS:
if app.config.get("ENABLE_PUBLISHER_EMAIL", False):
is_update_request = self.target.current_journal is not None
if is_update_request:
alerts = emails.send_publisher_update_request_inprogress_email(self.target)
else:
alerts = emails.send_publisher_application_inprogress_email(self.target)
for alert in alerts:
self.add_alert(alert)
else:
self.add_alert(Messages.IN_PROGRESS_NOT_SENT_EMAIL_DISABLED)
# inform editor if this was newly set to 'completed'
if self.source.application_status != constants.APPLICATION_STATUS_COMPLETED and self.target.application_status == constants.APPLICATION_STATUS_COMPLETED:
# record the event in the provenance tracker
models.Provenance.make(current_user, "status:completed", self.target)
try:
emails.send_editor_completed_email(self.target)
self.add_alert('A confirmation email has been sent to notify the editor of the change in status.')
except app_email.EmailException:
magic = str(uuid.uuid1())
self.add_alert('Hm, sending the ready status to editor email didn\'t work. Please quote this magic number when reporting the issue: ' + magic + ' . Thank you!')
app.logger.exception('Error sending completed status email to editor - ' + magic)
def render_template(self, **kwargs):
if self.source is None:
raise FormContextException("You cannot edit a not-existent application")
return super(AssEdApplicationReview, self).render_template(
lcc_jstree=json.dumps(lcc_jstree),
subjectstr=self._subjects2str(self.source.bibjson().subjects()),
**kwargs)
def _set_choices(self):
if self.form.application_status.data == constants.APPLICATION_STATUS_ACCEPTED:
self.form.application_status.choices = choices.Choices.application_status("accepted")
self.renderer.set_disabled_fields(self.renderer.disabled_fields + ["application_status"])
else:
try:
# Assign the choices to the form
self.form.application_status.choices = choices.Choices.choices_for_status('associate_editor', self.source.application_status)
except ValueError:
# If the current status isn't in the associate editor's status list, it must be out of bounds. Show it greyed out.
self.form.application_status.choices = choices.Choices.application_status("admin")
self.renderer.set_disabled_fields(self.renderer.disabled_fields + ["application_status"])
class PublisherUpdateRequest(ApplicationContext):
def make_renderer(self):
self.renderer = render.PublisherUpdateRequestRenderer()
def set_template(self):
self.template = "formcontext/publisher_update_request.html"
def blank_form(self):
self.form = forms.PublisherUpdateRequestForm()
def data2form(self):
self.form = forms.PublisherUpdateRequestForm(formdata=self.form_data)
self._expand_descriptions(FIELDS_WITH_DESCRIPTION)
self._expand_url_descriptions(URL_FIELDS)
self._disable_fields()
def source2form(self):
self.form = forms.PublisherUpdateRequestForm(data=ApplicationFormXWalk.obj2form(self.source))
self._expand_descriptions(FIELDS_WITH_DESCRIPTION)
self._expand_url_descriptions(URL_FIELDS)
self._disable_fields()
def pre_validate(self):
if self.source is None:
raise FormContextException("You cannot validate a form from a non-existent source")
# carry forward the disabled fields
bj = self.source.bibjson()
contact = self.source.contact
self.form.title.data = bj.title
self.form.alternative_title.data = bj.alternative_title
pissn = bj.get_one_identifier(bj.P_ISSN)
if pissn == "": pissn = None
self.form.pissn.data = pissn
eissn = bj.get_one_identifier(bj.E_ISSN)
if eissn == "": eissn = None
self.form.eissn.data = eissn
if len(contact) == 0:
# this will cause a validation failure if the form does not provide them
return
# we copy across the contacts if they are necessary. The contact details are conditionally
# disabled, so they /may/ be set
if "contact_name" in self.renderer.disabled_fields:
self.form.contact_name.data = contact.get("name")
if "contact_email" in self.renderer.disabled_fields:
self.form.contact_email.data = contact.get("email")
if "confirm_contact_email" in self.renderer.disabled_fields:
self.form.confirm_contact_email.data = contact.get("email")
def form2target(self):
self.target = ApplicationFormXWalk.form2obj(self.form)
def patch_target(self):
if self.source is None:
raise FormContextException("You cannot patch a target from a non-existent source")
self._carry_subjects_and_seal()
self._carry_fixed_aspects()
self._merge_notes_forward()
self.target.set_owner(self.source.owner)
self.target.set_editor_group(self.source.editor_group)
self.target.set_editor(self.source.editor)
self._carry_continuations()
# set the suggester to the account owner
acc = models.Account.pull(self.target.owner)
if acc is not None:
self.target.set_suggester(acc.name, acc.email)
# we carry this over for completeness, although it will be overwritten in the finalise() method
self.target.set_application_status(self.source.application_status)
def finalise(self, save_target=True, email_alert=True):
# FIXME: this first one, we ought to deal with outside the form context, but for the time being this
# can be carried over from the old implementation
if self.source is None:
raise FormContextException("You cannot edit a not-existent application")
# if we are allowed to finalise, kick this up to the superclass
super(PublisherUpdateRequest, self).finalise()
# set the status to update_request (if not already)
self.target.set_application_status(constants.APPLICATION_STATUS_UPDATE_REQUEST)
# Save the target
self.target.set_last_manual_update()
if save_target:
saved = self.target.save()
if saved is None:
raise FormContextException("Save on application failed")
# obtain the related journal, and attach the current application id to it
journal_id = self.target.current_journal
from portality.bll.doaj import DOAJ
journalService = DOAJ.journalService()
if journal_id is not None:
journal, _ = journalService.journal(journal_id)
if journal is not None:
journal.set_current_application(self.target.id)
if save_target:
saved = journal.save()
if saved is None:
raise FormContextException("Save on journal failed")
else:
self.target.remove_current_journal()
# email the publisher to tell them we received their update request
if email_alert:
try:
self._send_received_email()
except app_email.EmailException as e:
self.add_alert("We were unable to send you an email confirmation - possible problem with your email address")
app.logger.exception('Error sending reapplication received email to publisher')
def render_template(self, **kwargs):
if self.source is None:
raise FormContextException("You cannot edit a not-existent application")
return super(PublisherUpdateRequest, self).render_template(**kwargs)
def _carry_subjects_and_seal(self):
# carry over the subjects
source_subjects = self.source.bibjson().subjects()
self.target.bibjson().set_subjects(source_subjects)
# carry over the seal
self.target.set_seal(self.source.has_seal())
def _disable_fields(self):
if self.source is None:
raise FormContextException("You cannot disable fields on a not-existent application")
disable = ["title", "alternative_title", "pissn", "eissn"] # these are always disabled
# contact fields are only disabled if they already have content in source
contact = self.source.contact
if contact.get("name"):
disable.append("contact_name")
if contact.get("email"):
disable += ["contact_email", "confirm_contact_email"]
self.renderer.set_disabled_fields(disable)
def _send_received_email(self):
acc = models.Account.pull(self.target.owner)
if acc is None:
self.add_alert("Unable to locate account for specified owner")
return
journal_name = self.target.bibjson().title #.encode('utf-8', 'replace')
to = [acc.email]
fro = app.config.get('SYSTEM_EMAIL_FROM', '[email protected]')
subject = app.config.get("SERVICE_NAME","") + " - update request received"
try:
if app.config.get("ENABLE_PUBLISHER_EMAIL", False):
app_email.send_mail(to=to,
fro=fro,
subject=subject,
template_name="email/publisher_update_request_received.txt",
journal_name=journal_name,
username=self.target.owner
)
self.add_alert('A confirmation email has been sent to ' + acc.email + '.')
except app_email.EmailException as e:
magic = str(uuid.uuid1())
self.add_alert('Hm, sending the "update request received" email didn\'t work. Please quote this magic number when reporting the issue: ' + magic + ' . Thank you!')
app.logger.error(magic + "\n" + repr(e))
raise e
class PublisherUpdateRequestReadOnly(PrivateContext):
"""
Read Only Application form for publishers. Nothing can be changed. Useful to show publishers what they
currently have submitted for review
"""
def make_renderer(self):
self.renderer = render.PublisherUpdateRequestReadOnlyRenderer()
def set_template(self):
self.template = "formcontext/readonly_application.html"
def blank_form(self):
self.form = forms.PublisherUpdateRequestForm()
self.renderer.disable_all_fields(False)
# self._set_choices()
def data2form(self):
self.form = forms.PublisherUpdateRequestForm(formdata=self.form_data)
# self._set_choices()
self._expand_descriptions(FIELDS_WITH_DESCRIPTION)
self._expand_url_descriptions(URL_FIELDS)
self.renderer.disable_all_fields(False)
def source2form(self):
self.form = forms.PublisherUpdateRequestForm(data=JournalFormXWalk.obj2form(self.source))
# self._set_choices()
self._expand_descriptions(FIELDS_WITH_DESCRIPTION)
self._expand_url_descriptions(URL_FIELDS)
self.renderer.set_disabled_fields(["digital_archiving_policy"])
# self.renderer.disable_all_fields(True)
def form2target(self):
pass # you can't edit objects using this form
def patch_target(self):
pass # you can't edit objects using this form
def finalise(self):
raise FormContextException("You cannot edit applications using the read-only form")
"""
def render_template(self, **kwargs):
if self.source is None:
raise FormContextException("You cannot view a not-existent journal")
return super(ReadOnlyJournal, self).render_template(
lcc_jstree=json.dumps(lcc_jstree),
subjectstr=self._subjects2str(self.source.bibjson().subjects()),
**kwargs
)
"""
"""
def _set_choices(self):
# no application status (this is a journal) or editorial info (it's not even in the form) to set
pass
"""
### Journal form contexts ###
class ManEdJournalReview(PrivateContext):
"""
Managing Editor's Journal Review form. Should be used in a context where the form warrants full
admin privileges. It will permit doing every action.
"""
def make_renderer(self):
self.renderer = render.ManEdJournalReviewRenderer()
def set_template(self):
self.template = "formcontext/maned_journal_review.html"
def render_template(self, **kwargs):
if self.source is None:
raise FormContextException("You cannot edit a not-existent journal")
return super(ManEdJournalReview, self).render_template(
lcc_jstree=json.dumps(lcc_jstree),
subjectstr=self._subjects2str(self.source.bibjson().subjects()),
**kwargs)
def blank_form(self):
self.form = forms.ManEdApplicationReviewForm()
self._set_choices()
def data2form(self):
self.form = forms.ManEdJournalReviewForm(formdata=self.form_data)
self._set_choices()
self._expand_descriptions(FIELDS_WITH_DESCRIPTION)
self._expand_url_descriptions(URL_FIELDS)
def source2form(self):
self.form = forms.ManEdJournalReviewForm(data=JournalFormXWalk.obj2form(self.source))
self._set_choices()
self._expand_descriptions(FIELDS_WITH_DESCRIPTION)
self._expand_url_descriptions(URL_FIELDS)
def pre_validate(self):
# Editor field is populated in JS after page load - check the selected editor is actually in that editor group
self._validate_editor_field()
def form2target(self):
self.target = JournalFormXWalk.form2obj(self.form)
def patch_target(self):
if self.source is None:
raise FormContextException("You cannot patch a target from a non-existent source")
self._carry_fixed_aspects()
# NOTE: this means you can't unset an owner once it has been set. But you can change it.
if (self.target.owner is None or self.target.owner == "") and (self.source.owner is not None):
self.target.set_owner(self.source.owner)
self._merge_notes_forward(allow_delete=True)
def _set_choices(self):
# The first time this is rendered, it needs to populate the editor drop-down from saved group
egn = self.form.editor_group.data
self._populate_editor_field(egn)
def finalise(self):
# FIXME: this first one, we ought to deal with outside the form context, but for the time being this
# can be carried over from the old implementation
if self.source is None:
raise FormContextException("You cannot edit a not-existent journal")
# if we are allowed to finalise, kick this up to the superclass
super(ManEdJournalReview, self).finalise()
# FIXME: may want to factor this out of the suggestionformxwalk
# If we have changed the editors assinged to this application, let them know.
is_editor_group_changed = JournalFormXWalk.is_new_editor_group(self.form, self.source)
is_associate_editor_changed = JournalFormXWalk.is_new_editor(self.form, self.source)
# Save the target
self.target.set_last_manual_update()
self.target.save()
# if we need to email the editor and/or the associate, handle those here
if is_editor_group_changed:
try:
emails.send_editor_group_email(self.target)
except app_email.EmailException:
self.add_alert("Problem sending email to editor - probably address is invalid")
app.logger.exception('Error sending assignment email to editor.')
if is_associate_editor_changed:
try:
emails.send_assoc_editor_email(self.target)
except app_email.EmailException:
self.add_alert("Problem sending email to associate editor - probably address is invalid")
app.logger.exception('Error sending assignment email to associate.')
def validate(self):
# make use of the ability to disable validation, otherwise, let it run
if self.form is not None:
if self.form.make_all_fields_optional.data:
self.pre_validate()
return True
return super(ManEdJournalReview, self).validate()
class ManEdBulkEdit(PrivateContext):
"""
Managing Editor's Journal Review form. Should be used in a context where the form warrants full
admin privileges. It will permit doing every action.
"""
def make_renderer(self):
self.renderer = render.ManEdJournalBulkEditRenderer()
def set_template(self):
self.template = "formcontext/maned_journal_bulk_edit.html"
def blank_form(self):
self.form = forms.ManEdBulkEditJournalForm()
def data2form(self):
self.form = forms.ManEdBulkEditJournalForm(formdata=self.form_data)
self._expand_descriptions(FIELDS_WITH_DESCRIPTION)
self._expand_url_descriptions(URL_FIELDS)
class EditorJournalReview(PrivateContext):
"""
Editors Journal Review form. This should be used in a context where an editor who owns an editorial group
is accessing a journal. This prevents re-assignment of Editorial group, but permits assignment of associate
editor.
"""
def make_renderer(self):
self.renderer = render.EditorJournalReviewRenderer()
self.renderer.set_disabled_fields(["editor_group"])
def set_template(self):
self.template = "formcontext/editor_journal_review.html"
def render_template(self, **kwargs):
if self.source is None:
raise FormContextException("You cannot edit a not-existent journal")
return super(EditorJournalReview, self).render_template(
lcc_jstree=json.dumps(lcc_jstree),
subjectstr=self._subjects2str(self.source.bibjson().subjects()),
**kwargs)
def blank_form(self):
self.form = forms.EditorJournalReviewForm()
self._set_choices()
def data2form(self):
self.form = forms.EditorJournalReviewForm(formdata=self.form_data)
self._set_choices()
self._expand_descriptions(FIELDS_WITH_DESCRIPTION)
self._expand_url_descriptions(URL_FIELDS)
def source2form(self):
self.form = forms.EditorJournalReviewForm(data=JournalFormXWalk.obj2form(self.source))
self._set_choices()
self._expand_descriptions(FIELDS_WITH_DESCRIPTION)
self._expand_url_descriptions(URL_FIELDS)
def form2target(self):
self.target = JournalFormXWalk.form2obj(self.form)
def patch_target(self):
if self.source is None:
raise FormContextException("You cannot patch a target from a non-existent source")
self._carry_fixed_aspects()
self.target.set_owner(self.source.owner)
self.target.set_editor_group(self.source.editor_group)
self._merge_notes_forward()
self._carry_continuations()
def pre_validate(self):
self.form.editor_group.data = self.source.editor_group
def _set_choices(self):
if self.source is None:
raise FormContextException("You cannot set choices for a non-existent source")
# get the editor group from the source because it isn't in the form
egn = self.source.editor_group
self._populate_editor_field(egn)
def finalise(self):
# FIXME: this first one, we ought to deal with outside the form context, but for the time being this
# can be carried over from the old implementation
if self.source is None:
raise FormContextException("You cannot edit a not-existent journal")
# if we are allowed to finalise, kick this up to the superclass
super(EditorJournalReview, self).finalise()
# FIXME: may want to factor this out of the suggestionformxwalk
email_associate = ApplicationFormXWalk.is_new_editor(self.form, self.source)
# Save the target
self.target.set_last_manual_update()
self.target.save()
# if we need to email the associate, handle that here.
if email_associate:
try:
emails.send_assoc_editor_email(self.target)
except app_email.EmailException:
self.add_alert("Problem sending email to associate editor - probably address is invalid")
app.logger.exception('Error sending assignment email to associate.')
class AssEdJournalReview(PrivateContext):
"""
Associate Editors Journal Review form. This is to be used in a context where an associate editor (fewest rights)
needs to access a journal for review. This editor cannot change the editorial group or the assigned editor.
They also cannot change the owner of the journal. They cannot delete, only add notes.
"""
def make_renderer(self):
self.renderer = render.AssEdJournalReviewRenderer()
def set_template(self):
self.template = "formcontext/assed_journal_review.html"
def blank_form(self):
self.form = forms.AssEdJournalReviewForm()
self._set_choices()
def data2form(self):
self.form = forms.AssEdJournalReviewForm(formdata=self.form_data)
self._set_choices()
self._expand_descriptions(FIELDS_WITH_DESCRIPTION)
self._expand_url_descriptions(URL_FIELDS)
def source2form(self):
self.form = forms.AssEdJournalReviewForm(data=JournalFormXWalk.obj2form(self.source))
self._set_choices()
self._expand_descriptions(FIELDS_WITH_DESCRIPTION)
self._expand_url_descriptions(URL_FIELDS)
def form2target(self):
self.target = JournalFormXWalk.form2obj(self.form)
def patch_target(self):
if self.source is None:
raise FormContextException("You cannot patch a target from a non-existent source")
self._carry_fixed_aspects()
self._merge_notes_forward()
self.target.set_owner(self.source.owner)
self.target.set_editor_group(self.source.editor_group)
self.target.set_editor(self.source.editor)
self._carry_continuations()
def finalise(self):
# FIXME: this first one, we ought to deal with outside the form context, but for the time being this
# can be carried over from the old implementation
if self.source is None:
raise FormContextException("You cannot edit a not-existent journal")
# if we are allowed to finalise, kick this up to the superclass
super(AssEdJournalReview, self).finalise()
# Save the target
self.target.set_last_manual_update()
self.target.save()
def render_template(self, **kwargs):
if self.source is None:
raise FormContextException("You cannot edit a not-existent journal")
return super(AssEdJournalReview, self).render_template(
lcc_jstree=json.dumps(lcc_jstree),
subjectstr=self._subjects2str(self.source.bibjson().subjects()),
**kwargs
)
def _set_choices(self):
# no application status (this is a journal) or editorial info (it's not even in the form) to set
pass
class ReadOnlyJournal(PrivateContext):
"""
Read Only Journal form. Nothing can be changed. Useful for reviewing a journal and an application
(or update request) side by side in 2 browser windows or tabs.
"""
def make_renderer(self):
self.renderer = render.ReadOnlyJournalRenderer()
def set_template(self):
self.template = "formcontext/readonly_journal.html"
def blank_form(self):
self.form = forms.ReadOnlyJournalForm()
self._set_choices()
def data2form(self):
self.form = forms.ReadOnlyJournalForm(formdata=self.form_data)
self._set_choices()
self._expand_descriptions(FIELDS_WITH_DESCRIPTION)
self._expand_url_descriptions(URL_FIELDS)
def source2form(self):
self.form = forms.ReadOnlyJournalForm(data=JournalFormXWalk.obj2form(self.source))
self._set_choices()
self._expand_descriptions(FIELDS_WITH_DESCRIPTION)
self._expand_url_descriptions(URL_FIELDS)
def form2target(self):
pass # you can't edit objects using this form
def patch_target(self):
pass # you can't edit objects using this form
def finalise(self):
raise FormContextException("You cannot edit journals using the read-only form")
def render_template(self, **kwargs):
if self.source is None:
raise FormContextException("You cannot view a not-existent journal")
return super(ReadOnlyJournal, self).render_template(
lcc_jstree=json.dumps(lcc_jstree),
subjectstr=self._subjects2str(self.source.bibjson().subjects()),
**kwargs
)
def _set_choices(self):
# no application status (this is a journal) or editorial info (it's not even in the form) to set
pass
class ArticleFormFactory(object):
@classmethod
def get_from_context(cls, role, source=None, form_data=None, user=None):
if role == "admin":
return AdminMetadataArticleForm(source=source, form_data=form_data, user=user)
if role == "publisher":
return PublisherMetadataForm(source=source, form_data=form_data, user=user)
class MetadataForm(FormContext):
def __init__(self, source, form_data, user):
self.user = user
self.author_error = False
super(MetadataForm, self).__init__(source=source, form_data=form_data)
def _set_choices(self):
try:
ic = choices.Choices.choices_for_article_issns(user=self.user, article_id=self.source.id)
self.form.pissn.choices = ic
self.form.eissn.choices = ic
except Exception as e:
print (str(e))
# not logged in, and current_user is broken
# probably you are loading the class from the command line
pass
def modify_authors_if_required(self, request_data):
more_authors = request_data.get("more_authors")
remove_author = None
for v in list(request.values.keys()):
if v.startswith("remove_authors"):
remove_author = v.split("-")[1]
# if the user wants more authors, add an extra entry
if more_authors:
return self.render_template(more_authors=True)
# if the user wants to remove an author, do the various back-flips required
if remove_author is not None:
return self.render_template(remove_authors=remove_author)
def _check_for_author_errors(self, **kwargs):
if "more_authors" in kwargs and kwargs["more_authors"] == True:
self.form.authors.append_entry()
if "remove_authors" in kwargs:
keep = []
while len(self.form.authors.entries) > 0:
entry = self.form.authors.pop_entry()
if entry.short_name == "authors-" + kwargs["remove_author"]:
break
else:
keep.append(entry)
while len(keep) > 0:
self.form.authors.append_entry(keep.pop().data)
def _validate_authors(self):
counted = 0
for entry in self.form.authors.entries:
name = entry.data.get("name")
if name is not None and name != "":
counted += 1
return counted >= 1
def blank_form(self):
self.form = portality.formcontext.forms.ArticleForm()
self._set_choices()
def source2form(self):
self.form = portality.formcontext.forms.ArticleForm()
ArticleFormXWalk.obj2form(self.form, article=self.source)
self._set_choices()
def data2form(self):
self.form = portality.formcontext.forms.ArticleForm(formdata=self.form_data)
self._set_choices()
def form2target(self):
self.target = ArticleFormXWalk.form2obj(form=self.form)
def validate(self):
if not self._validate_authors():
self.author_error = True
if not self.form.validate():
return False
return True
def finalise(self, duplicate_check = True):
self.form2target()
if not self.author_error:
article_service = DOAJ.articleService()
article_service.create_article(self.target, self.user, add_journal_info=True,
update_article_id=self.source.id if self.source is not None else None,
duplicate_check = duplicate_check)
article_url = url_for('doaj.article_page', identifier=self.target.id)
msg, how = Messages.ARTICLE_METADATA_SUBMITTED_FLASH
Messages.flash_with_url(msg.format(url=article_url), how)
else:
return
class PublisherMetadataForm(MetadataForm):
def __init__(self, source, form_data, user):
super(PublisherMetadataForm, self).__init__(source=source, form_data=form_data, user=user)
def set_template(self):
self.template = "publisher/metadata.html"
def render_template(self, **kwargs):
self._check_for_author_errors(**kwargs)
if "validated" in kwargs and kwargs["validated"] == True:
self.blank_form()
return render_template(self.template, form=self.form, form_context=self, author_error=self.author_error)
class AdminMetadataArticleForm(MetadataForm):
def __init__(self, source, form_data, user):
super(AdminMetadataArticleForm, self).__init__(source=source, form_data=form_data, user=user)
def set_template(self):
self.template = "admin/article_metadata.html"
def render_template(self, **kwargs):
self._check_for_author_errors(**kwargs)
return render_template(self.template, form=self.form, form_context=self, author_error=self.author_error)
| apache-2.0 | -3,858,372,472,115,063,000 | 43.59197 | 398 | 0.630581 | false | 4.252018 | false | false | false |
mushtaqak/edx-platform | openedx/core/djangoapps/credit/api.py | 1 | 28489 | """
Contains the APIs for course credit requirements.
"""
import logging
import uuid
import datetime
import pytz
from django.db import transaction
from util.date_utils import to_timestamp
from opaque_keys import InvalidKeyError
from opaque_keys.edx.keys import CourseKey
from student.models import User
from .exceptions import (
InvalidCreditRequirements,
InvalidCreditCourse,
UserIsNotEligible,
CreditProviderNotConfigured,
RequestAlreadyCompleted,
CreditRequestNotFound,
InvalidCreditStatus,
)
from .models import (
CreditCourse,
CreditProvider,
CreditRequirement,
CreditRequirementStatus,
CreditRequest,
CreditEligibility,
)
from .signature import signature, get_shared_secret_key
log = logging.getLogger(__name__)
def set_credit_requirements(course_key, requirements):
"""
Add requirements to given course.
Args:
course_key(CourseKey): The identifier for course
requirements(list): List of requirements to be added
Example:
>>> set_credit_requirements(
"course-v1-edX-DemoX-1T2015",
[
{
"namespace": "reverification",
"name": "i4x://edX/DemoX/edx-reverification-block/assessment_uuid",
"display_name": "Assessment 1",
"criteria": {},
},
{
"namespace": "proctored_exam",
"name": "i4x://edX/DemoX/proctoring-block/final_uuid",
"display_name": "Final Exam",
"criteria": {},
},
{
"namespace": "grade",
"name": "grade",
"display_name": "Grade",
"criteria": {"min_grade": 0.8},
},
])
Raises:
InvalidCreditRequirements
Returns:
None
"""
invalid_requirements = _validate_requirements(requirements)
if invalid_requirements:
invalid_requirements = ", ".join(invalid_requirements)
raise InvalidCreditRequirements(invalid_requirements)
try:
credit_course = CreditCourse.get_credit_course(course_key=course_key)
except CreditCourse.DoesNotExist:
raise InvalidCreditCourse()
old_requirements = CreditRequirement.get_course_requirements(course_key=course_key)
requirements_to_disable = _get_requirements_to_disable(old_requirements, requirements)
if requirements_to_disable:
CreditRequirement.disable_credit_requirements(requirements_to_disable)
for requirement in requirements:
CreditRequirement.add_or_update_course_requirement(credit_course, requirement)
def get_credit_requirements(course_key, namespace=None):
"""
Get credit eligibility requirements of a given course and namespace.
Args:
course_key(CourseKey): The identifier for course
namespace(str): Namespace of requirements
Example:
>>> get_credit_requirements("course-v1-edX-DemoX-1T2015")
{
requirements =
[
{
"namespace": "reverification",
"name": "i4x://edX/DemoX/edx-reverification-block/assessment_uuid",
"display_name": "Assessment 1",
"criteria": {},
},
{
"namespace": "proctored_exam",
"name": "i4x://edX/DemoX/proctoring-block/final_uuid",
"display_name": "Final Exam",
"criteria": {},
},
{
"namespace": "grade",
"name": "grade",
"display_name": "Grade",
"criteria": {"min_grade": 0.8},
},
]
}
Returns:
Dict of requirements in the given namespace
"""
requirements = CreditRequirement.get_course_requirements(course_key, namespace)
return [
{
"namespace": requirement.namespace,
"name": requirement.name,
"display_name": requirement.display_name,
"criteria": requirement.criteria
}
for requirement in requirements
]
@transaction.commit_on_success
def create_credit_request(course_key, provider_id, username):
"""
Initiate a request for credit from a credit provider.
This will return the parameters that the user's browser will need to POST
to the credit provider. It does NOT calculate the signature.
Only users who are eligible for credit (have satisfied all credit requirements) are allowed to make requests.
A provider can be configured either with *integration enabled* or not.
If automatic integration is disabled, this method will simply return
a URL to the credit provider and method set to "GET", so the student can
visit the URL and request credit directly. No database record will be created
to track these requests.
If automatic integration *is* enabled, then this will also return the parameters
that the user's browser will need to POST to the credit provider.
These parameters will be digitally signed using a secret key shared with the credit provider.
A database record will be created to track the request with a 32-character UUID.
The returned dictionary can be used by the user's browser to send a POST request to the credit provider.
If a pending request already exists, this function should return a request description with the same UUID.
(Other parameters, such as the user's full name may be different than the original request).
If a completed request (either accepted or rejected) already exists, this function will
raise an exception. Users are not allowed to make additional requests once a request
has been completed.
Arguments:
course_key (CourseKey): The identifier for the course.
provider_id (str): The identifier of the credit provider.
user (User): The user initiating the request.
Returns: dict
Raises:
UserIsNotEligible: The user has not satisfied eligibility requirements for credit.
CreditProviderNotConfigured: The credit provider has not been configured for this course.
RequestAlreadyCompleted: The user has already submitted a request and received a response
from the credit provider.
Example Usage:
>>> create_credit_request(course.id, "hogwarts", "ron")
{
"url": "https://credit.example.com/request",
"method": "POST",
"parameters": {
"request_uuid": "557168d0f7664fe59097106c67c3f847",
"timestamp": 1434631630,
"course_org": "HogwartsX",
"course_num": "Potions101",
"course_run": "1T2015",
"final_grade": 0.95,
"user_username": "ron",
"user_email": "[email protected]",
"user_full_name": "Ron Weasley",
"user_mailing_address": "",
"user_country": "US",
"signature": "cRCNjkE4IzY+erIjRwOQCpRILgOvXx4q2qvx141BCqI="
}
}
"""
try:
user_eligibility = CreditEligibility.objects.select_related('course').get(
username=username,
course__course_key=course_key
)
credit_course = user_eligibility.course
credit_provider = credit_course.providers.get(provider_id=provider_id)
except (CreditEligibility.DoesNotExist, CreditProvider.DoesNotExist):
log.warning(u'User tried to initiate a request for credit, but the user is not eligible for credit')
raise UserIsNotEligible
# Check if we've enabled automatic integration with the credit
# provider. If not, we'll show the user a link to a URL
# where the user can request credit directly from the provider.
# Note that we do NOT track these requests in our database,
# since the state would always be "pending" (we never hear back).
if not credit_provider.enable_integration:
return {
"url": credit_provider.provider_url,
"method": "GET",
"parameters": {}
}
else:
# If automatic credit integration is enabled, then try
# to retrieve the shared signature *before* creating the request.
# That way, if there's a misconfiguration, we won't have requests
# in our system that we know weren't sent to the provider.
shared_secret_key = get_shared_secret_key(credit_provider.provider_id)
if shared_secret_key is None:
msg = u'Credit provider with ID "{provider_id}" does not have a secret key configured.'.format(
provider_id=credit_provider.provider_id
)
log.error(msg)
raise CreditProviderNotConfigured(msg)
# Initiate a new request if one has not already been created
credit_request, created = CreditRequest.objects.get_or_create(
course=credit_course,
provider=credit_provider,
username=username,
)
# Check whether we've already gotten a response for a request,
# If so, we're not allowed to issue any further requests.
# Skip checking the status if we know that we just created this record.
if not created and credit_request.status != "pending":
log.warning(
(
u'Cannot initiate credit request because the request with UUID "%s" '
u'exists with status "%s"'
), credit_request.uuid, credit_request.status
)
raise RequestAlreadyCompleted
if created:
credit_request.uuid = uuid.uuid4().hex
# Retrieve user account and profile info
user = User.objects.select_related('profile').get(username=username)
# Retrieve the final grade from the eligibility table
try:
final_grade = CreditRequirementStatus.objects.get(
username=username,
requirement__namespace="grade",
requirement__name="grade",
status="satisfied"
).reason["final_grade"]
except (CreditRequirementStatus.DoesNotExist, TypeError, KeyError):
log.exception(
"Could not retrieve final grade from the credit eligibility table "
"for user %s in course %s.",
user.id, course_key
)
raise UserIsNotEligible
parameters = {
"request_uuid": credit_request.uuid,
"timestamp": to_timestamp(datetime.datetime.now(pytz.UTC)),
"course_org": course_key.org,
"course_num": course_key.course,
"course_run": course_key.run,
"final_grade": final_grade,
"user_username": user.username,
"user_email": user.email,
"user_full_name": user.profile.name,
"user_mailing_address": (
user.profile.mailing_address
if user.profile.mailing_address is not None
else ""
),
"user_country": (
user.profile.country.code
if user.profile.country.code is not None
else ""
),
}
credit_request.parameters = parameters
credit_request.save()
if created:
log.info(u'Created new request for credit with UUID "%s"', credit_request.uuid)
else:
log.info(
u'Updated request for credit with UUID "%s" so the user can re-issue the request',
credit_request.uuid
)
# Sign the parameters using a secret key we share with the credit provider.
parameters["signature"] = signature(parameters, shared_secret_key)
return {
"url": credit_provider.provider_url,
"method": "POST",
"parameters": parameters
}
def update_credit_request_status(request_uuid, provider_id, status):
"""
Update the status of a credit request.
Approve or reject a request for a student to receive credit in a course
from a particular credit provider.
This function does NOT check that the status update is authorized.
The caller needs to handle authentication and authorization (checking the signature
of the message received from the credit provider)
The function is idempotent; if the request has already been updated to the status,
the function does nothing.
Arguments:
request_uuid (str): The unique identifier for the credit request.
provider_id (str): Identifier for the credit provider.
status (str): Either "approved" or "rejected"
Returns: None
Raises:
CreditRequestNotFound: No request exists that is associated with the given provider.
InvalidCreditStatus: The status is not either "approved" or "rejected".
"""
if status not in ["approved", "rejected"]:
raise InvalidCreditStatus
try:
request = CreditRequest.objects.get(uuid=request_uuid, provider__provider_id=provider_id)
old_status = request.status
request.status = status
request.save()
log.info(
u'Updated request with UUID "%s" from status "%s" to "%s" for provider with ID "%s".',
request_uuid, old_status, status, provider_id
)
except CreditRequest.DoesNotExist:
msg = (
u'Credit provider with ID "{provider_id}" attempted to '
u'update request with UUID "{request_uuid}", but no request '
u'with this UUID is associated with the provider.'
).format(provider_id=provider_id, request_uuid=request_uuid)
log.warning(msg)
raise CreditRequestNotFound(msg)
def get_credit_requests_for_user(username):
"""
Retrieve the status of a credit request.
Returns either "pending", "accepted", or "rejected"
Arguments:
username (unicode): The username of the user who initiated the requests.
Returns: list
Example Usage:
>>> get_credit_request_status_for_user("bob")
[
{
"uuid": "557168d0f7664fe59097106c67c3f847",
"timestamp": 1434631630,
"course_key": "course-v1:HogwartsX+Potions101+1T2015",
"provider": {
"id": "HogwartsX",
"display_name": "Hogwarts School of Witchcraft and Wizardry",
},
"status": "pending" # or "approved" or "rejected"
}
]
"""
return CreditRequest.credit_requests_for_user(username)
def get_credit_requirement_status(course_key, username, namespace=None, name=None):
""" Retrieve the user's status for each credit requirement in the course.
Args:
course_key (CourseKey): The identifier for course
username (str): The identifier of the user
Example:
>>> get_credit_requirement_status("course-v1-edX-DemoX-1T2015", "john")
[
{
"namespace": "reverification",
"name": "i4x://edX/DemoX/edx-reverification-block/assessment_uuid",
"display_name": "In Course Reverification",
"criteria": {},
"status": "failed",
},
{
"namespace": "proctored_exam",
"name": "i4x://edX/DemoX/proctoring-block/final_uuid",
"display_name": "Proctored Mid Term Exam",
"criteria": {},
"status": "satisfied",
},
{
"namespace": "grade",
"name": "i4x://edX/DemoX/proctoring-block/final_uuid",
"display_name": "Minimum Passing Grade",
"criteria": {"min_grade": 0.8},
"status": "failed",
},
]
Returns:
list of requirement statuses
"""
requirements = CreditRequirement.get_course_requirements(course_key, namespace=namespace, name=name)
requirement_statuses = CreditRequirementStatus.get_statuses(requirements, username)
requirement_statuses = dict((o.requirement, o) for o in requirement_statuses)
statuses = []
for requirement in requirements:
requirement_status = requirement_statuses.get(requirement)
statuses.append({
"namespace": requirement.namespace,
"name": requirement.name,
"display_name": requirement.display_name,
"criteria": requirement.criteria,
"status": requirement_status.status if requirement_status else None,
"status_date": requirement_status.modified if requirement_status else None,
})
return statuses
def is_user_eligible_for_credit(username, course_key):
"""Returns a boolean indicating if the user is eligible for credit for
the given course
Args:
username(str): The identifier for user
course_key (CourseKey): The identifier for course
Returns:
True if user is eligible for the course else False
"""
return CreditEligibility.is_user_eligible_for_credit(course_key, username)
def get_credit_requirement(course_key, namespace, name):
"""Returns the requirement of a given course, namespace and name.
Args:
course_key(CourseKey): The identifier for course
namespace(str): Namespace of requirement
name(str): Name of the requirement
Returns: dict
Example:
>>> get_credit_requirement_status(
"course-v1-edX-DemoX-1T2015", "proctored_exam", "i4x://edX/DemoX/proctoring-block/final_uuid"
)
{
"course_key": "course-v1-edX-DemoX-1T2015"
"namespace": "reverification",
"name": "i4x://edX/DemoX/edx-reverification-block/assessment_uuid",
"display_name": "reverification"
"criteria": {},
}
"""
requirement = CreditRequirement.get_course_requirement(course_key, namespace, name)
return {
"course_key": requirement.course.course_key,
"namespace": requirement.namespace,
"name": requirement.name,
"display_name": requirement.display_name,
"criteria": requirement.criteria
} if requirement else None
def set_credit_requirement_status(username, course_key, req_namespace, req_name, status="satisfied", reason=None):
"""
Update the user's requirement status.
This will record whether the user satisfied or failed a particular requirement
in a course. If the user has satisfied all requirements, the user will be marked
as eligible for credit in the course.
Args:
username (str): Username of the user
course_key (CourseKey): Identifier for the course associated with the requirement.
req_namespace (str): Namespace of the requirement (e.g. "grade" or "reverification")
req_name (str): Name of the requirement (e.g. "grade" or the location of the ICRV XBlock)
Keyword Arguments:
status (str): Status of the requirement (either "satisfied" or "failed")
reason (dict): Reason of the status
Example:
>>> set_credit_requirement_status(
"staff",
CourseKey.from_string("course-v1-edX-DemoX-1T2015"),
"reverification",
"i4x://edX/DemoX/edx-reverification-block/assessment_uuid",
status="satisfied",
reason={}
)
"""
# Check if we're already eligible for credit.
# If so, short-circuit this process.
if CreditEligibility.is_user_eligible_for_credit(course_key, username):
return
# Retrieve all credit requirements for the course
# We retrieve all of them to avoid making a second query later when
# we need to check whether all requirements have been satisfied.
reqs = CreditRequirement.get_course_requirements(course_key)
# Find the requirement we're trying to set
req_to_update = next((
req for req in reqs
if req.namespace == req_namespace
and req.name == req_name
), None)
# If we can't find the requirement, then the most likely explanation
# is that there was a lag updating the credit requirements after the course
# was published. We *could* attempt to create the requirement here,
# but that could cause serious performance issues if many users attempt to
# lock the row at the same time.
# Instead, we skip updating the requirement and log an error.
if req_to_update is None:
log.error(
(
u'Could not update credit requirement in course "%s" '
u'with namespace "%s" and name "%s" '
u'because the requirement does not exist. '
u'The user "%s" should have had his/her status updated to "%s".'
),
unicode(course_key), req_namespace, req_name, username, status
)
return
# Update the requirement status
CreditRequirementStatus.add_or_update_requirement_status(
username, req_to_update, status=status, reason=reason
)
# If we're marking this requirement as "satisfied", there's a chance
# that the user has met all eligibility requirements.
if status == "satisfied":
CreditEligibility.update_eligibility(reqs, username, course_key)
def _get_requirements_to_disable(old_requirements, new_requirements):
"""
Get the ids of 'CreditRequirement' entries to be disabled that are
deleted from the courseware.
Args:
old_requirements(QuerySet): QuerySet of CreditRequirement
new_requirements(list): List of requirements being added
Returns:
List of ids of CreditRequirement that are not in new_requirements
"""
requirements_to_disable = []
for old_req in old_requirements:
found_flag = False
for req in new_requirements:
# check if an already added requirement is modified
if req["namespace"] == old_req.namespace and req["name"] == old_req.name:
found_flag = True
break
if not found_flag:
requirements_to_disable.append(old_req.id)
return requirements_to_disable
def _validate_requirements(requirements):
"""
Validate the requirements.
Args:
requirements(list): List of requirements
Returns:
List of strings of invalid requirements
"""
invalid_requirements = []
for requirement in requirements:
invalid_params = []
if not requirement.get("namespace"):
invalid_params.append("namespace")
if not requirement.get("name"):
invalid_params.append("name")
if not requirement.get("display_name"):
invalid_params.append("display_name")
if "criteria" not in requirement:
invalid_params.append("criteria")
if invalid_params:
invalid_requirements.append(
u"{requirement} has missing/invalid parameters: {params}".format(
requirement=requirement,
params=invalid_params,
)
)
return invalid_requirements
def is_credit_course(course_key):
"""API method to check if course is credit or not.
Args:
course_key(CourseKey): The course identifier string or CourseKey object
Returns:
Bool True if the course is marked credit else False
"""
try:
course_key = CourseKey.from_string(unicode(course_key))
except InvalidKeyError:
return False
return CreditCourse.is_credit_course(course_key=course_key)
def get_credit_request_status(username, course_key):
"""Get the credit request status.
This function returns the status of credit request of user for given course.
It returns the latest request status for the any credit provider.
The valid status are 'pending', 'approved' or 'rejected'.
Args:
username(str): The username of user
course_key(CourseKey): The course locator key
Returns:
A dictionary of credit request user has made if any
"""
credit_request = CreditRequest.get_user_request_status(username, course_key)
if credit_request:
credit_status = {
"uuid": credit_request.uuid,
"timestamp": credit_request.modified,
"course_key": credit_request.course.course_key,
"provider": {
"id": credit_request.provider.provider_id,
"display_name": credit_request.provider.display_name
},
"status": credit_request.status
}
else:
credit_status = {}
return credit_status
def _get_duration_and_providers(credit_course):
"""Returns the credit providers and eligibility durations.
The eligibility_duration is the max of the credit duration of
all the credit providers of given course.
Args:
credit_course(CreditCourse): The CreditCourse object
Returns:
Tuple of eligibility_duration and credit providers of given course
"""
providers = credit_course.providers.all()
seconds_good_for_display = 0
providers_list = []
for provider in providers:
providers_list.append(
{
"id": provider.provider_id,
"display_name": provider.display_name,
"eligibility_duration": provider.eligibility_duration,
"provider_url": provider.provider_url
}
)
eligibility_duration = int(provider.eligibility_duration) if provider.eligibility_duration else 0
seconds_good_for_display = max(eligibility_duration, seconds_good_for_display)
return seconds_good_for_display, providers_list
def get_credit_eligibility(username):
"""
Returns the all the eligibility the user has meet.
Args:
username(str): The username of user
Example:
>> get_credit_eligibility('Aamir'):
{
"edX/DemoX/Demo_Course": {
"created_at": "2015-12-21",
"providers": [
"id": 12,
"display_name": "Arizona State University",
"eligibility_duration": 60,
"provider_url": "http://arizona/provideere/link"
],
"seconds_good_for_display": 90
}
}
Returns:
A dict of eligibilities
"""
eligibilities = CreditEligibility.get_user_eligibility(username)
user_credit_requests = get_credit_requests_for_user(username)
request_dict = {}
# Change the list to dict for iteration
for request in user_credit_requests:
request_dict[unicode(request["course_key"])] = request
user_eligibilities = {}
for eligibility in eligibilities:
course_key = eligibility.course.course_key
duration, providers_list = _get_duration_and_providers(eligibility.course)
user_eligibilities[unicode(course_key)] = {
"created_at": eligibility.created,
"seconds_good_for_display": duration,
"providers": providers_list,
}
# Default status is requirements_meet
user_eligibilities[unicode(course_key)]["status"] = "requirements_meet"
# If there is some request user has made for this eligibility then update the status
if unicode(course_key) in request_dict:
user_eligibilities[unicode(course_key)]["status"] = request_dict[unicode(course_key)]["status"]
user_eligibilities[unicode(course_key)]["provider"] = request_dict[unicode(course_key)]["provider"]
return user_eligibilities
def get_purchased_credit_courses(username): # pylint: disable=unused-argument
"""
Returns the purchased credit courses.
Args:
username(str): Username of the student
Returns:
A dict of courses user has purchased from the credit provider after completion
"""
# TODO: How to track the purchased courses. It requires Will's work for credit provider integration
return {}
| agpl-3.0 | 8,880,496,703,872,569,000 | 35.062025 | 114 | 0.614097 | false | 4.482222 | false | false | false |
jonashaag/django-autocomplete-light | autocomplete_light/widgets.py | 1 | 6373 | """
ChoiceWidget is intended to work as a replacement for django's Select widget,
and MultipleChoiceWidget for django's SelectMultiple.
Constructing a widget needs an Autocomplete class or registered autocomplete
name.
The choice autocomplete widget renders from autocomplete_light/widget.html
template.
"""
from django import forms
from django.forms.util import flatatt
from django.utils import safestring
from django.template.loader import render_to_string
from django.utils.translation import ugettext_lazy as _
__all__ = ['WidgetBase', 'ChoiceWidget', 'MultipleChoiceWidget', 'TextWidget']
class WidgetBase(object):
"""
Base widget for autocompletes.
Mainly handles passing arguments from Python to HTML data-* attributes,
via widget_js_attributes and autocomplete_js_attributes. Javascript will
parse these data-* attributes.
This widget also renders the widget template.
"""
def __init__(self, autocomplete,
widget_js_attributes=None, autocomplete_js_attributes=None,
extra_context=None):
if isinstance(autocomplete, basestring):
self.autocomplete_name = autocomplete
from autocomplete_light import registry
self.autocomplete = registry[self.autocomplete_name]
else:
self.autocomplete = autocomplete
self.autocomplete_name = autocomplete.__class__.__name__
if extra_context is None:
self.extra_context = {}
else:
self.extra_context = extra_context
if widget_js_attributes is None:
self.widget_js_attributes = {}
else:
self.widget_js_attributes = widget_js_attributes
if autocomplete_js_attributes is None:
self.autocomplete_js_attributes = {}
else:
self.autocomplete_js_attributes = autocomplete_js_attributes
def process_js_attributes(self):
extra_autocomplete_js_attributes = getattr(self.autocomplete,
'autocomplete_js_attributes', {})
self.autocomplete_js_attributes.update(
extra_autocomplete_js_attributes)
extra_widget_js_attributes = getattr(self.autocomplete,
'widget_js_attributes', {})
self.widget_js_attributes.update(
extra_widget_js_attributes)
if 'bootstrap' not in self.widget_js_attributes.keys():
self.widget_js_attributes['bootstrap'] = 'normal'
if 'choice_selector' not in self.autocomplete_js_attributes.keys():
self.autocomplete_js_attributes['choice_selector'] = '[data-value]'
if 'url' not in self.autocomplete_js_attributes.keys():
url = self.autocomplete().get_absolute_url()
self.autocomplete_js_attributes['url'] = url
if 'placeholder' not in self.autocomplete_js_attributes.keys():
self.autocomplete_js_attributes['placeholder'] = _(
'type some text to search in this autocomplete').capitalize()
def render(self, name, value, attrs=None):
final_attrs = self.build_attrs(attrs)
self.html_id = final_attrs.pop('id', name)
if value is not None and not isinstance(value, (list, tuple)):
values = [value]
else:
values = value
autocomplete = self.autocomplete(values=values)
if values and not autocomplete.validate_values():
raise forms.ValidationError('%s cannot validate %s' % (
self.autocomplete_name, values))
self.process_js_attributes()
autocomplete_name = self.autocomplete_name.lower()
context = {
'name': name,
'values': values,
'widget': self,
'extra_attrs': safestring.mark_safe(flatatt(final_attrs)),
'autocomplete': autocomplete,
}
context.update(self.extra_context)
return safestring.mark_safe(render_to_string([
getattr(autocomplete, 'widget_template', ''),
'autocomplete_light/%s/widget.html' % autocomplete_name,
'autocomplete_light/%s/widget.html' % getattr(autocomplete,
'widget_template_name', ''),
'autocomplete_light/widget.html',
], context))
class ChoiceWidget(WidgetBase, forms.Select):
"""
Widget that provides an autocomplete for zero to one choice.
"""
def __init__(self, autocomplete,
widget_js_attributes=None, autocomplete_js_attributes=None,
extra_context=None, *args, **kwargs):
forms.Select.__init__(self, *args, **kwargs)
WidgetBase.__init__(self, autocomplete,
widget_js_attributes, autocomplete_js_attributes, extra_context)
self.widget_js_attributes['max_values'] = 1
class MultipleChoiceWidget(WidgetBase, forms.SelectMultiple):
"""
Widget that provides an autocomplete for zero to n choices.
"""
def __init__(self, autocomplete=None,
widget_js_attributes=None, autocomplete_js_attributes=None,
extra_context=None, *args, **kwargs):
forms.SelectMultiple.__init__(self, *args, **kwargs)
WidgetBase.__init__(self, autocomplete,
widget_js_attributes, autocomplete_js_attributes, extra_context)
class TextWidget(forms.TextInput, WidgetBase):
""" Widget that just adds an autocomplete to fill a text input """
def __init__(self, autocomplete,
widget_js_attributes=None, autocomplete_js_attributes=None,
*args, **kwargs):
forms.TextInput.__init__(self, *args, **kwargs)
WidgetBase.__init__(self, autocomplete,
widget_js_attributes, autocomplete_js_attributes)
def build_attrs(self, extra_attrs=None, **kwargs):
attrs = forms.TextInput.build_attrs(self, extra_attrs, **kwargs)
def update_attrs(source, prefix=''):
for key, value in source.items():
key = u'data-%s%s' % (prefix, key.replace('_', '-'))
attrs[key] = value
self.process_js_attributes()
update_attrs(self.widget_js_attributes)
update_attrs(self.autocomplete_js_attributes, 'autocomplete-')
if 'class' not in attrs.keys():
attrs['class'] = ''
attrs['class'] += ' autocomplete-light-text-widget'
return attrs
| mit | 1,981,258,003,177,358,000 | 34.405556 | 79 | 0.630943 | false | 4.332427 | false | false | false |
hearsaycorp/normalize | normalize/property/types.py | 1 | 5422 | #
# This file is a part of the normalize python library
#
# normalize is free software: you can redistribute it and/or modify
# it under the terms of the MIT License.
#
# normalize is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# MIT License for more details.
#
# You should have received a copy of the MIT license along with
# normalize. If not, refer to the upstream repository at
# http://github.com/hearsaycorp/normalize
#
"""``normalize.property.types`` provides an assortment of pre-generated
types"""
import six
from past.builtins import basestring
import datetime
import numbers
from sys import maxsize
from . import make_property_type
from ..subtype import subtype
try:
from dateutil.parser import parse as parse_datetime
except ImportError:
formats = {
6: "%y%m%d",
8: "%Y%m%d",
13: "%Y%m%d%H:%M",
14: "%Y%m%d%H:%MZ",
16: "%Y%m%d%H:%M:%S",
17: "%Y%m%d%H:%M:%SZ",
}
def parse_datetime(not_a_datetime):
datetime_stripped = not_a_datetime.replace(
"-", "").replace("T", "").replace(" ", "")
if len(datetime_stripped) in formats:
return datetime.datetime.strptime(
datetime_stripped, formats[len(datetime_stripped)],
)
else:
raise Exception(
"``dateutil`` not installed, so can't parse %r" %
not_a_datetime
)
IntProperty = make_property_type(
"IntProperty", isa=int, trait_name="int",
attrs={
"__doc__": "A property which must be an ``int``",
},
)
LongProperty = make_property_type(
"LongProperty", isa=six.integer_types[-1], trait_name="long",
attrs={
"__doc__": "A property which must be a ``long``",
},
)
IntegerProperty = make_property_type(
"IntegerProperty", isa=numbers.Integral, trait_name="integer",
coerce=lambda x: (
int(x) if abs(float(x)) < maxsize else six.integer_types[-1](x)
),
attrs={
"__doc__": "A property which holds an integer, int or long",
},
)
NumberProperty = make_property_type(
"NumberProperty", isa=numbers.Number, trait_name="number",
coerce=lambda x: coerce_number(x),
attrs={
"__doc__": "A property which holds a number type (eg float, int) "
"with automatic cast from string",
},
)
StringProperty = make_property_type(
"StringProperty", isa=basestring, trait_name="str",
attrs={
"__doc__": "A property which must be a ``basestring`` or "
"``unicode``, and if not, throws a coerce error",
},
)
FloatProperty = make_property_type(
"FloatProperty", isa=float, trait_name="float",
attrs={
"__doc__": "A property which must be a floating point number.",
},
)
UnicodeProperty = make_property_type(
"UnicodeProperty", base_type=StringProperty,
isa=six.text_type, coerce=(lambda s: six.text_type(s)
if isinstance(s, str) else s),
trait_name="unicode",
attrs={
"__doc__": "A property which must be a ``unicode`` or ``str`` "
"(it is upgraded to ``unicode`` if it is passed in as "
"a ``str``)",
},
)
def coerce_datetime(not_a_datetime):
if isinstance(not_a_datetime, date):
tt = not_a_datetime.timetuple()
return datetime.datetime(*(tt[0:6]))
elif isinstance(not_a_datetime, basestring):
return parse_datetime(not_a_datetime)
else:
raise ValueError(
"Cannot coerce %r to a date/datetime" % not_a_datetime
)
def coerce_date(not_a_date):
if isinstance(not_a_date, datetime.datetime) or (
hasattr(not_a_date, "date") and callable(not_a_date.date)
):
return not_a_date.date()
else:
return coerce_datetime(not_a_date).date()
def coerce_number(not_a_number):
if isinstance(not_a_number, basestring):
try:
return six.integer_types[-1](not_a_number)
except ValueError:
return float(not_a_number)
else:
return float(not_a_number)
date = subtype(
"date",
of=datetime.date,
where=lambda x: not isinstance(x, datetime.datetime),
)
DateProperty = make_property_type(
"DateProperty",
trait_name="date", isa=date, coerce=coerce_date,
json_out=lambda dt: dt.isoformat(),
attrs={
"__doc__": "A property which must hold a python date; coercion "
"from string is provided via ``dateutil.parse``. "
"As of normalize v1, if a ``datetime.datetime`` "
"instance is assigned to a ``DateProperty``, it will "
"be truncated to a ``datetime.date``.",
},
)
DatetimeProperty = make_property_type(
"DatetimeProperty",
trait_name="datetime", isa=datetime.datetime,
coerce=coerce_datetime,
json_out=lambda dt: dt.isoformat(),
attrs={
"__doc__": "A property which must holds a python datetime. "
"Correct timezone handling is currently TODO and "
"users should not depend on timezone behavior until "
"this message is removed (submit tests and a patch!)",
},
)
__all__ = tuple(k for k in list(globals().keys()) if k.endswith("Property"))
| mit | -1,562,669,226,994,970,400 | 30.16092 | 76 | 0.601254 | false | 3.781032 | false | false | false |
kewljedi/octoprint-pushbullet | setup.py | 1 | 1047 | # coding=utf-8
import setuptools
def package_data_dirs(source, sub_folders):
import os
dirs = []
for d in sub_folders:
for dirname, _, files in os.walk(os.path.join(source, d)):
dirname = os.path.relpath(dirname, source)
for f in files:
dirs.append(os.path.join(dirname, f))
return dirs
def params():
name = "OctoPrint-Pushbullet"
version = "0.0.1"
description = "Adds support to push OctoPrint events to a Pushbullet channel"
long_description = "TODO"
author = "kewljedi"
author_email = "[email protected]"
url = "https://github.com/kewljedi/octoprint-pushbullet"
license = "GPLv3"
packages = ["octoprint_pushbullet"]
package_data = {"octoprint_pushbullet": package_data_dirs('octoprint_pushbullet', ['static', 'templates'])}
include_package_data = True
zip_safe = False
install_requires = open("requirements.txt").read().split("\n")
entry_points = {
"octoprint.plugin": [
"pushbullet = octoprint_pushbullet"
]
}
return locals()
setuptools.setup(**params()) | gpl-3.0 | 8,190,900,412,091,826,000 | 23.952381 | 109 | 0.671442 | false | 3.271875 | false | false | false |
mateoqac/unqTip | language/vxgbs/lang/gbs_compiler.py | 1 | 29369 | #
# Copyright (C) 2011, 2012 Pablo Barenbaum <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"Gobstones compiler from source ASTs to virtual machine code."
import lang.gbs_vm
import lang.gbs_builtins
import lang.gbs_type
import lang.gbs_def_helper as def_helper
import common.i18n as i18n
import common.position
import common.utils
class GbsCompileException(common.utils.StaticException):
"Base exception for Gobstones compiler errors."
pass
def parse_literal(tok):
"""Given a token, parse its string value and return the denotated
Gobstones value.
"""
if tok.type == 'symbol':
val = tok.value
else:
val = lang.gbs_builtins.parse_constant(tok.value)
#assert val is not None
if val is None:
val = tok.value
return val
class GbsLabel(object):
"Represents a unique label in the program."
def __repr__(self):
return 'L_%s' % (id(self),)
class GbsCompiler(object):
"Compiler of Gobstones programs."
def __init__(self):
self.code = None
self.temp_counter = None
self.module_handler = None
self._current_def_name = None
self.constructor_of_type = {"Arreglo":"Arreglo"}
def compile_program(self, tree, module_prefix='', explicit_board=None):
"""Given an AST for a full program, compile it to virtual machine
code, returning an instance of lang.gbs_vm.GbsCompiledProgram.
The Main module should be given the empty module prefix ''.
Every other module should be given the module name as a prefix.
"""
if explicit_board is None:
entrypoint_tree = def_helper.find_def(tree.children[2], def_helper.is_entrypoint_def)
self.explicit_board = len(entrypoint_tree.children[2].children) != 0
else:
self.explicit_board = explicit_board
self.module_handler = tree.module_handler
self.compile_imported_modules(tree)
imports = tree.children[1].children
defs = tree.children[2]
self.code = lang.gbs_vm.GbsCompiledProgram(
tree, module_prefix=module_prefix)
self.compile_imports(imports)
self.user_defined_routine_names = list(self.code.external_routines.keys())
self.user_defined_routine_names += def_helper.get_routine_names(defs)
self.compile_defs(defs)
return self.code
def compile_imported_modules(self, tree):
"Recursively compile the imported modules."
for mdl_name, mdl_tree in self.module_handler.parse_trees():
compiler = GbsCompiler()
try:
code = compiler.compile_program(
mdl_tree, module_prefix=mdl_name, explicit_board=self.explicit_board
)
self.constructor_of_type.update(compiler.constructor_of_type)
except common.utils.SourceException as exception:
self.module_handler.reraise(
GbsCompileException,
exception,
i18n.i18n(
'Error compiling module %s'
) % (
mdl_name,
),
common.position.ProgramAreaNear(tree.children[1]))
self.module_handler.set_compiled_code(mdl_name, code)
def compile_imports(self, imports):
"""Add the imported procedures and functions to the local
namespace of routines.
"""
for imp in imports:
mdl_name = imp.children[1].value
rtns = imp.children[2].children
for rtn in rtns:
if (not isinstance(rtn, lang.gbs_constructs.UserType) and
not isinstance(rtn, lang.gbs_constructs.BuiltinFieldGetter)):
mdl_code = self.module_handler.compiled_code_for(mdl_name)
if rtn.name() in mdl_code.routines:
self.code.external_routines[rtn.name()] = (
mdl_code,
mdl_code.routines[rtn.name()]
)
else:
assert rtn.name() in mdl_code.external_routines
val = mdl_code.external_routines[rtn.name()]
self.code.external_routines[rtn.name()] = val
def compile_defs(self, tree):
"Compile a list of definitions."
self.temp_counter = 0
for def_ in tree.children:
if def_helper.is_type_def(def_):
self.gather_type_data(def_)
else:
self.compile_routine_def(def_)
def gather_type_data(self, def_):
_, type_name, type_or_def = def_.children
if type_or_def.children[0] == 'record':
self.constructor_of_type[type_name.value] = type_name.value
else:
body = type_or_def.children[1]
for case in body.children:
_, cname, _ = case.children
self.constructor_of_type[cname.value] = type_name.value
def temp_varname(self):
"Make a temporary variable name."
self.temp_counter += 1
return '_tempvar%i' % (self.temp_counter)
def compile_routine_def(self, tree):
"Compile a single definition."
prfn = def_helper.get_def_keyword(tree)
name = def_helper.get_def_name(tree).value
self._current_def_name = name
params = [param.value for param in def_helper.get_def_params(tree)]
immutable_params = []
if prfn == 'function':
immutable_params = params
elif prfn == 'procedure' and len(params) > 1:
immutable_params = params[1:]
code = lang.gbs_vm.GbsCompiledCode(tree, prfn, name, params, self.explicit_board)
code.add_enter()
for p in immutable_params:
code.push(('setImmutable', p), near=tree)
self.compile_commands(def_helper.get_def_body(tree), code)
if prfn == 'procedure' and self.explicit_board:
code.push(('pushFrom', params[0]), near=tree)
code.add_leave_return()
code.build_label_table()
self.code.routines[name] = code
#### The following methods take a program fragment in form of an AST
#### and a "code" argument, which should be an instance of
#### lang.gbs_vm.GbsCompiledCode.
####
#### The compilation process appends to the compiled code the virtual
#### machine code corresponding to the given program fragment.
#### Commands
def compile_commands(self, tree, code):
"Compile a sequence of commands."
for cmd in tree.children:
self.compile_cmd(cmd, code)
def compile_cmd(self, tree, code):
"Compile a single command."
command = tree.children[0]
dispatch = {
'Skip': self.compile_skip,
'THROW_ERROR': self.compile_boom,
'procCall': self.compile_proc_call,
'assignVarName': self.compile_assign_var_name,
'assignVarTuple1': self.compile_assign_var_tuple1,
'if': self.compile_if,
'case': self.compile_case,
'while': self.compile_while,
'repeat': self.compile_repeat,
'repeatWith': self.compile_repeat_with,
'foreach': self.compile_foreach,
'block': self.compile_block,
'return': self.compile_return,
}
assert command in dispatch
dispatch[command](tree, code)
def compile_type(self, tree, code):
"""Compile a type expression. Just fill a hole in construct() function.
In a future, it could be usefull for runtime type checks. [CHECK]"""
tok = tree.children[1]
type = self.constructor_of_type[tok.value] + "::" + tok.value
code.push(('pushConst', type), near=tree)
def compile_skip(self, tree, code):
"Compile a Skip command."
pass
def compile_boom(self, tree, code):
"Compile a THROW_ERROR command."
code.push(('THROW_ERROR', tree.children[1].value), near=tree)
def compile_proc_call(self, tree, code):
"Compile a procedure call."
procname = tree.children[1].value
args = tree.children[2].children
if self.explicit_board:
inout_var = args[0]
type_annotation = None
if hasattr(tree, 'type_annotation'):
type_annotation = tree.type_annotation
for i, arg in zip(range(len(args)), args):
self.compile_expression(arg, code)
code.push(('call', procname, len(args)), near=tree)
if self.explicit_board:
code.push(('popTo', inout_var.children[1].value), near=tree)
def compile_projectable_var_check(self, tree, code, var):
"Compile a projectable variable check. Varname is pushed to stack."
code.push(('pushConst', var), near=tree)
code.push(('call', '_checkProjectableVar', 1), near=tree)
def compile_assign_var_name(self, tree, code):
"Compile an assignment: <lvalue> := <expr>"
offsets = tree.children[2].children
if len(offsets) > 0:
#calculate assignment reference
var = tree.children[1].children[1].value
self.compile_projectable_var_check(tree, code, var)
code.push(('pushFrom', var), near=tree)
for offset in offsets:
if offset.children[0] == 'index':
self.compile_expression(offset.children[1], code)
else:
code.push(('pushConst', parse_literal(offset.children[1].children[1])), near=tree)
code.push(('call', '_getRef', 2), near=tree)
#compile expression
self.compile_expression(tree.children[3], code)
#Set ref
code.push(('call', '_SetRefValue', 2), near=tree)
else:
#compile expression
self.compile_expression(tree.children[3], code)
#assign varname
full_varname = '.'.join([tok.value for tok in tree.children[1].children[1:]])
code.push(('popTo', full_varname), near=tree)
def compile_assign_var_tuple1(self, tree, code):
"Compile a tuple assignment: (v1, ..., vN) := f(...)"
self.compile_expression(tree.children[2], code)
varnames = [var.value for var in tree.children[1].children]
for var in common.utils.seq_reversed(varnames):
code.push(('popTo', var), near=tree)
def compile_if(self, tree, code):
"Compile a conditional statement."
lelse = GbsLabel()
self.compile_expression(tree.children[1], code) # cond
code.push((('jumpIfFalse'), lelse), near=tree)
self.compile_block(tree.children[2], code) # then
if tree.children[3] is None:
code.push(('label', lelse), near=tree)
else:
lend = GbsLabel()
code.push(('jump', lend), near=tree)
code.push(('label', lelse), near=tree)
self.compile_block(tree.children[3], code) # else
code.push(('label', lend), near=tree)
def compile_case(self, tree, code):
"Compile a case statement."
# case (Value) of
# Lits1 -> {Body1}
# LitsN -> {BodyN}
# _ -> {BodyElse}
#
# Compiles to code corresponding to:
#
# value0 := Value
# if (value0 in Lits1) {Body1}
# elif (value0 in Lits2) {Body2}
# ...
# elif (value0 in LitsN) {BodyN}
# else {BodyElse}
value = tree.children[1]
value0 = self.temp_varname()
self.compile_expression(value, code)
# value0 := value
code.push(('popTo', value0), near=tree)
lend = GbsLabel()
next_label = None
for branch in tree.children[2].children:
if next_label is not None:
code.push(('label', next_label), near=tree)
if branch.children[0] == 'branch':
lits = [parse_literal(lit) for lit in branch.children[1].children]
next_label = GbsLabel()
# if value0 in LitsI
code.push(('pushFrom', value0), near=tree)
code.push(('jumpIfNotIn', lits, next_label), near=tree)
# BodyI
self.compile_block(branch.children[2], code)
code.push(('jump', lend), near=tree)
else: # defaultBranch
# BodyElse
self.compile_block(branch.children[1], code)
code.push(('label', lend), near=tree)
def compile_match(self, tree, code):
"Compile a match statement."
# match (<Expr-V>) of
# <Case-1> -> <Expr-1>
# <Case-2> -> <Expr-2>
# ...
# <Case-N> -> <Expr-N>
# _ -> <Expr-Else>
#
# Compiles to code corresponding to:
#
# case := _extract_case(<Expr-V>)
# if (case == <Case-1>) <Expr-1>
# elif (case == <Case-2>) <Expr-2>
# ...
# elif (case == <Case-N>) <Expr-N>
# else <Expr-Else>
value = tree.children[1]
value0 = self.temp_varname()
self.compile_expression(value, code)
# This is a runtime function to extract type name
code.push(('call', '_extract_case', 1), near=tree)
# value0 := value
code.push(('popTo', value0), near=tree)
lend = GbsLabel()
next_label = None
default_branch = False
for branch in tree.children[2].children:
if not next_label is None:
code.push(('label', next_label), near=tree)
if branch.children[0] == 'branch':
case_i = parse_literal(branch.children[1])
next_label = GbsLabel()
# if value0 in LitsI
code.push(('pushFrom', value0), near=tree)
code.push(('pushConst', case_i), near=tree)
code.push(('call', '==', 2), near=tree)
code.push(('jumpIfFalse', next_label), near=tree)
# BodyI
self.compile_expression(branch.children[2], code)
code.push(('jump', lend), near=tree)
else: # defaultBranch
# BodyElse
default_branch = True
self.compile_expression(branch.children[1], code)
if not default_branch:
code.push(('label', next_label), near=tree)
code.push(('THROW_ERROR', '"' + i18n.i18n('Expression has no matching branch.') + '"'), near=tree)
code.push(('label', lend), near=tree)
def compile_while(self, tree, code):
"Compile a while statement."
lbegin = GbsLabel()
lend = GbsLabel()
code.push(('label', lbegin), near=tree)
self.compile_expression(tree.children[1], code) # cond
code.push(('jumpIfFalse', lend), near=tree)
self.compile_block(tree.children[2], code) # body
code.push(('jump', lbegin), near=tree)
code.push(('label', lend), near=tree)
def compile_repeat(self, tree, code):
"Compile a repeat statement."
#
# repeat (<Expr>) <Block>
#
# Compiles to code corresponding to
# the following fragment:
#
# counter := <Expr>
# while (true) {
# if (not (counter > 0)) { break }
# <Block>
# counter := counter - 1
# }
#
times = tree.children[1]
body = tree.children[2]
counter = self.temp_varname()
lbegin = GbsLabel()
lend = GbsLabel()
# counter := <Expr>
self.compile_expression(times, code)
code.push(('popTo', counter), near=tree)
# while (true) {
code.push(('label', lbegin), near=tree)
# if (not (counter > 0) { break }
code.push(('pushFrom', counter), near=tree)
code.push(('pushConst', 0), near=tree)
code.push(('call', '>', 2), near=tree)
code.push(('jumpIfFalse', lend), near=tree)
# <Block>
self.compile_block(body, code)
# counter := counter - 1
code.push(('pushFrom', counter), near=tree)
code.push(('pushConst', 1), near=tree)
code.push(('call', '-', 2), near=tree)
code.push(('popTo', counter), near=tree)
# end while
code.push(('jump', lbegin), near=tree)
code.push(('label', lend), near=tree)
code.push(('delVar', counter), near=tree)
def compile_foreach(self, tree, code):
"Compile a foreach statement."
#
# foreach <Index> in <List> <Block>
#
# Compiles to code corresponding to
# the following fragment:
#
# xs0 := <List>
# while (true) {
# if (isEmpty(xs0)) break;
# <Index> := head(xs0)
# setImmutable(<Index>)
# <Block>
# unsetImmutable(<Index>)
# xs0 := tail(xs)
# }
#
def jumpIfIsEmpty(var, label):
code.push(('pushFrom', var), near=tree)
code.push(('call', i18n.i18n('isEmpty'), 1), near=tree)
code.push(('call', 'not', 1), near=tree)
code.push(('jumpIfFalse', label), near=tree)
def head(listVar, var):
code.push(('pushFrom', listVar), near=tree)
code.push(('call', i18n.i18n('head'), 1), near=tree)
code.push(('popTo', var), near=tree)
def tail(listVar, var):
code.push(('pushFrom', listVar), near=tree)
code.push(('call', i18n.i18n('tail'), 1), near=tree)
code.push(('popTo', var), near=tree)
index = tree.children[1].value
list_ = tree.children[2]
body = tree.children[3]
xs0 = self.temp_varname()
lbegin = GbsLabel()
lend = GbsLabel()
lend2 = GbsLabel()
# xs0 := <List>
self.compile_expression(list_, code)
code.push(('popTo', xs0), near=tree)
# while (true) {
code.push(('label', lbegin), near=tree)
# if (isEmpty(xs0)) break;
jumpIfIsEmpty(xs0, lend)
# <Index> := head(xs0)
head(xs0, index)
# setImmutable(<Index>)
code.push(('setImmutable', index), near=tree)
# <Block>
self.compile_block(body, code)
# setImmutable(<Index>)
code.push(('unsetImmutable', index), near=tree)
# xs0 := tail(xs0)
tail(xs0, xs0)
# }
code.push(('jump', lbegin), near=tree)
code.push(('label', lend2), near=tree)
code.push(('delVar', index), near=tree)
code.push(('label', lend), near=tree)
def compile_repeat_with(self, tree, code):
"Compile a repeatWith statement."
#
# repeatWith i in Lower..Upper {BODY}
#
# Compiles to code corresponding to
# the following fragment:
#
# i := Lower
# upper0 := Upper
# if (i <= upper0) {
# while (true) {
# {BODY}
# if (i == upper0) break;
# i := next(i)
# }
# }
#
def call_next():
"""Add a VM instruction for calling the builtin 'next' function,
which operates on any iterable value.
"""
name = i18n.i18n('next')
if hasattr(tree, 'index_type_annotation'):
name = lang.gbs_builtins.polyname(
name,
[repr(tree.index_type_annotation)])
code.push(('call', name, 1), near=tree)
# upper0 is preserved in the stack
i = tree.children[1].value
limit_lower = tree.children[2].children[1]
limit_upper = tree.children[2].children[2]
body = tree.children[3]
upper0 = self.temp_varname()
lbegin = GbsLabel()
lend = GbsLabel()
# i := Lower
self.compile_expression(limit_lower, code)
code.push(('popTo', i), near=tree)
code.push(('setImmutable', i), near=tree)
# upper0 := Upper
self.compile_expression(limit_upper, code)
code.push(('popTo', upper0), near=tree)
# if i <= upper0
code.push(('pushFrom', i), near=tree)
code.push(('pushFrom', upper0), near=tree)
code.push(('call', '<=', 2), near=tree)
code.push(('jumpIfFalse', lend), near=tree)
# while true
code.push(('label', lbegin), near=tree)
# body
self.compile_block(body, code)
# if (i == upper0) break
code.push(('pushFrom', i), near=tree)
code.push(('pushFrom', upper0), near=tree)
code.push(('call', '/=', 2), near=tree)
code.push(('jumpIfFalse', lend), near=tree)
# i := next(i)
code.push(('pushFrom', i), near=tree)
call_next()
code.push(('unsetImmutable', i), near=tree)
code.push(('popTo', i), near=tree)
code.push(('setImmutable', i), near=tree)
# end while
code.push(('jump', lbegin), near=tree)
code.push(('label', lend), near=tree)
code.push(('delVar', i), near=tree)
def compile_block(self, tree, code):
"Compile a block statement."
self.compile_commands(tree.children[1], code)
def compile_return(self, tree, code):
"Compile a return statement."
vals = tree.children[1].children
for val in vals:
self.compile_expression(val, code)
if self._current_def_name == 'program':
vrs = []
expr_count = 1
for v in tree.children[1].children:
if v.children[0] == 'varName':
vrs.append(v.children[1].value)
else:
vrs.append("#%s" % (expr_count,))
expr_count += 1
if hasattr(tree, 'type_annot'):
# Decorate the return variables with their types.
types = [
repr(subtype)
for subtype in tree.type_annot.subtypes()
]
vrs = [
lang.gbs_builtins.polyname(vname, [vtype])
for vname, vtype in zip(vrs, types)
]
code.push(('returnVars', len(vals), vrs), near=tree)
else:
code.push(('return', len(vals)), near=tree)
#### Expressions
def compile_expression(self, tree, code):
"Compile an expression."
exptype = tree.children[0]
dispatch = {
'or': self.compile_or,
'and': self.compile_and,
'not': self.compile_not,
'relop': self.compile_binary_op,
'addsub': self.compile_binary_op,
'mul': self.compile_binary_op,
'divmod': self.compile_binary_op,
'pow': self.compile_binary_op,
'listop': self.compile_binary_op,
'projection': self.compile_binary_op,
'constructor': self.compile_func_call,
'varName': self.compile_var_name,
'funcCall': self.compile_func_call,
'match': self.compile_match,
'unaryMinus': self.compile_unary_minus,
'literal': self.compile_literal,
'type': self.compile_type,
}
if exptype in dispatch:
dispatch[exptype](tree, code)
else:
msg = i18n.i18n('Unknown expression: %s') % (exptype,)
area = common.position.ProgramAreaNear(tree)
raise GbsCompileException(msg, area)
def get_type_annotation(self, tree):
if hasattr(tree, 'type_annotation'):
return tree.type_annotation
else:
return None
def compile_binary_op(self, tree, code):
"Compile a binary operator expression."
type_annotation = self.get_type_annotation(tree)
self.compile_expression(tree.children[2], code)
self.compile_expression(tree.children[3], code)
code.push(('call', tree.children[1].value, 2), near=tree)
def compile_not(self, tree, code):
"Compile a boolean not expression."
self.compile_expression(tree.children[1], code)
code.push(('call', 'not', 1), near=tree)
def compile_or(self, tree, code):
"Compile a short-circuiting disjunction."
lcontinue = GbsLabel()
lend = GbsLabel()
type_annotation = self.get_type_annotation(tree)
self.compile_expression(tree.children[2], code)
code.push(('jumpIfFalse', lcontinue), near=tree)
code.push(('pushConst', lang.gbs_builtins.parse_constant('True')),
near=tree)
code.push(('jump', lend), near=tree)
code.push(('label', lcontinue), near=tree)
self.compile_expression(tree.children[3], code)
code.push(('label', lend), near=tree)
def compile_and(self, tree, code):
"Compile a short-circuiting conjunction."
lcontinue = GbsLabel()
lend = GbsLabel()
type_annotation = self.get_type_annotation(tree)
self.compile_expression(tree.children[2], code)
code.push(('jumpIfFalse', lcontinue), near=tree)
self.compile_expression(tree.children[3], code)
code.push(('jump', lend), near=tree)
code.push(('label', lcontinue), near=tree)
code.push(('pushConst', lang.gbs_builtins.parse_constant('False')),
near=tree)
code.push(('label', lend), near=tree)
def compile_unary_minus(self, tree, code):
"Compile a unary minus expression."
funcname = 'unary-'
args = tree.children[1:]
self._compile_func_call_poly(tree, funcname, args, code)
def compile_var_name(self, tree, code):
"Compile a variable name expression."
offsets = tree.children[2].children
var = tree.children[1].value
code.push(('pushFrom', var), near=tree)
if len(offsets) > 0:
self.compile_projectable_var_check(tree, code, var)
#calculate assignment reference
for offset in offsets:
self.compile_expression(offset.children[1], code)
code.push(('call', '_getRef', 2), near=tree)
code.push(('call', '_getRefValue', 1), near=tree)
def compile_func_call(self, tree, code):
"Compile a function call."
funcname = tree.children[1].value
args = tree.children[2].children
if lang.gbs_builtins.is_defined(funcname) or funcname in self.user_defined_routine_names:
self._compile_func_call_poly(tree, funcname, args, code)
else:
self._compile_field_getter(tree, funcname, args, code)
def _compile_field_getter(self, tree, field_name, args, code):
self.compile_expression(args[0], code)
field = tree.children[1]
field.type = 'symbol'
code.push(('pushConst', parse_literal(field)), near=tree)
code.push(('call', '_get_field', 2), near=tree)
def _compile_func_call_poly(self, tree, funcname, args, code):
"Compile a potentially polymorphic function call."
polys = lang.gbs_builtins.BUILTINS_POLYMORPHIC
annotate = True
annotate = annotate and funcname in polys
annotate = annotate and hasattr(tree, 'type_annotation')
annotate = annotate and isinstance(tree.type_annotation, list)
type_annotation = None
if hasattr(tree, 'type_annotation'):
type_annotation = tree.type_annotation
for i, arg in zip(range(len(args)),args):
self.compile_expression(arg, code)
if annotate:
funcname = lang.gbs_builtins.polyname(
funcname,
[repr(ann) for ann in tree.type_annotation])
code.push(('call', funcname, len(args)), near=tree)
def compile_literal(self, tree, code):
"Compile a constant expression."
tok = tree.children[1]
code.push(('pushConst', parse_literal(tok)), near=tree)
def compile_program(tree):
"Compile a full Gobstones program."
compiler = GbsCompiler()
return compiler.compile_program(tree)
| gpl-3.0 | -6,050,560,207,559,410,000 | 37.899338 | 110 | 0.548367 | false | 3.871984 | false | false | false |
altair-viz/altair | altair/utils/core.py | 1 | 20886 | """
Utility routines
"""
from collections.abc import Mapping
from copy import deepcopy
import json
import itertools
import re
import sys
import traceback
import warnings
import jsonschema
import pandas as pd
import numpy as np
from .schemapi import SchemaBase, Undefined
try:
from pandas.api.types import infer_dtype as _infer_dtype
except ImportError:
# Import for pandas < 0.20.0
from pandas.lib import infer_dtype as _infer_dtype
def infer_dtype(value):
"""Infer the dtype of the value.
This is a compatibility function for pandas infer_dtype,
with skipna=False regardless of the pandas version.
"""
if not hasattr(infer_dtype, "_supports_skipna"):
try:
_infer_dtype([1], skipna=False)
except TypeError:
# pandas < 0.21.0 don't support skipna keyword
infer_dtype._supports_skipna = False
else:
infer_dtype._supports_skipna = True
if infer_dtype._supports_skipna:
return _infer_dtype(value, skipna=False)
else:
return _infer_dtype(value)
TYPECODE_MAP = {
"ordinal": "O",
"nominal": "N",
"quantitative": "Q",
"temporal": "T",
"geojson": "G",
}
INV_TYPECODE_MAP = {v: k for k, v in TYPECODE_MAP.items()}
# aggregates from vega-lite version 4.6.0
AGGREGATES = [
"argmax",
"argmin",
"average",
"count",
"distinct",
"max",
"mean",
"median",
"min",
"missing",
"product",
"q1",
"q3",
"ci0",
"ci1",
"stderr",
"stdev",
"stdevp",
"sum",
"valid",
"values",
"variance",
"variancep",
]
# window aggregates from vega-lite version 4.6.0
WINDOW_AGGREGATES = [
"row_number",
"rank",
"dense_rank",
"percent_rank",
"cume_dist",
"ntile",
"lag",
"lead",
"first_value",
"last_value",
"nth_value",
]
# timeUnits from vega-lite version 4.6.0
TIMEUNITS = [
"utcyear",
"utcquarter",
"utcmonth",
"utcday",
"utcdate",
"utchours",
"utcminutes",
"utcseconds",
"utcmilliseconds",
"utcyearquarter",
"utcyearquartermonth",
"utcyearmonth",
"utcyearmonthdate",
"utcyearmonthdatehours",
"utcyearmonthdatehoursminutes",
"utcyearmonthdatehoursminutesseconds",
"utcquartermonth",
"utcmonthdate",
"utcmonthdatehours",
"utchoursminutes",
"utchoursminutesseconds",
"utcminutesseconds",
"utcsecondsmilliseconds",
"year",
"quarter",
"month",
"day",
"date",
"hours",
"minutes",
"seconds",
"milliseconds",
"yearquarter",
"yearquartermonth",
"yearmonth",
"yearmonthdate",
"yearmonthdatehours",
"yearmonthdatehoursminutes",
"yearmonthdatehoursminutesseconds",
"quartermonth",
"monthdate",
"monthdatehours",
"hoursminutes",
"hoursminutesseconds",
"minutesseconds",
"secondsmilliseconds",
]
def infer_vegalite_type(data):
"""
From an array-like input, infer the correct vega typecode
('ordinal', 'nominal', 'quantitative', or 'temporal')
Parameters
----------
data: Numpy array or Pandas Series
"""
# Otherwise, infer based on the dtype of the input
typ = infer_dtype(data)
# TODO: Once this returns 'O', please update test_select_x and test_select_y in test_api.py
if typ in [
"floating",
"mixed-integer-float",
"integer",
"mixed-integer",
"complex",
]:
return "quantitative"
elif typ in ["string", "bytes", "categorical", "boolean", "mixed", "unicode"]:
return "nominal"
elif typ in [
"datetime",
"datetime64",
"timedelta",
"timedelta64",
"date",
"time",
"period",
]:
return "temporal"
else:
warnings.warn(
"I don't know how to infer vegalite type from '{}'. "
"Defaulting to nominal.".format(typ)
)
return "nominal"
def merge_props_geom(feat):
"""
Merge properties with geometry
* Overwrites 'type' and 'geometry' entries if existing
"""
geom = {k: feat[k] for k in ("type", "geometry")}
try:
feat["properties"].update(geom)
props_geom = feat["properties"]
except (AttributeError, KeyError):
# AttributeError when 'properties' equals None
# KeyError when 'properties' is non-existing
props_geom = geom
return props_geom
def sanitize_geo_interface(geo):
"""Santize a geo_interface to prepare it for serialization.
* Make a copy
* Convert type array or _Array to list
* Convert tuples to lists (using json.loads/dumps)
* Merge properties with geometry
"""
geo = deepcopy(geo)
# convert type _Array or array to list
for key in geo.keys():
if str(type(geo[key]).__name__).startswith(("_Array", "array")):
geo[key] = geo[key].tolist()
# convert (nested) tuples to lists
geo = json.loads(json.dumps(geo))
# sanitize features
if geo["type"] == "FeatureCollection":
geo = geo["features"]
if len(geo) > 0:
for idx, feat in enumerate(geo):
geo[idx] = merge_props_geom(feat)
elif geo["type"] == "Feature":
geo = merge_props_geom(geo)
else:
geo = {"type": "Feature", "geometry": geo}
return geo
def sanitize_dataframe(df): # noqa: C901
"""Sanitize a DataFrame to prepare it for serialization.
* Make a copy
* Convert RangeIndex columns to strings
* Raise ValueError if column names are not strings
* Raise ValueError if it has a hierarchical index.
* Convert categoricals to strings.
* Convert np.bool_ dtypes to Python bool objects
* Convert np.int dtypes to Python int objects
* Convert floats to objects and replace NaNs/infs with None.
* Convert DateTime dtypes into appropriate string representations
* Convert Nullable integers to objects and replace NaN with None
* Convert Nullable boolean to objects and replace NaN with None
* convert dedicated string column to objects and replace NaN with None
* Raise a ValueError for TimeDelta dtypes
"""
df = df.copy()
if isinstance(df.columns, pd.RangeIndex):
df.columns = df.columns.astype(str)
for col in df.columns:
if not isinstance(col, str):
raise ValueError(
"Dataframe contains invalid column name: {0!r}. "
"Column names must be strings".format(col)
)
if isinstance(df.index, pd.MultiIndex):
raise ValueError("Hierarchical indices not supported")
if isinstance(df.columns, pd.MultiIndex):
raise ValueError("Hierarchical indices not supported")
def to_list_if_array(val):
if isinstance(val, np.ndarray):
return val.tolist()
else:
return val
for col_name, dtype in df.dtypes.iteritems():
if str(dtype) == "category":
# XXXX: work around bug in to_json for categorical types
# https://github.com/pydata/pandas/issues/10778
col = df[col_name].astype(object)
df[col_name] = col.where(col.notnull(), None)
elif str(dtype) == "string":
# dedicated string datatype (since 1.0)
# https://pandas.pydata.org/pandas-docs/version/1.0.0/whatsnew/v1.0.0.html#dedicated-string-data-type
col = df[col_name].astype(object)
df[col_name] = col.where(col.notnull(), None)
elif str(dtype) == "bool":
# convert numpy bools to objects; np.bool is not JSON serializable
df[col_name] = df[col_name].astype(object)
elif str(dtype) == "boolean":
# dedicated boolean datatype (since 1.0)
# https://pandas.io/docs/user_guide/boolean.html
col = df[col_name].astype(object)
df[col_name] = col.where(col.notnull(), None)
elif str(dtype).startswith("datetime"):
# Convert datetimes to strings. This needs to be a full ISO string
# with time, which is why we cannot use ``col.astype(str)``.
# This is because Javascript parses date-only times in UTC, but
# parses full ISO-8601 dates as local time, and dates in Vega and
# Vega-Lite are displayed in local time by default.
# (see https://github.com/altair-viz/altair/issues/1027)
df[col_name] = (
df[col_name].apply(lambda x: x.isoformat()).replace("NaT", "")
)
elif str(dtype).startswith("timedelta"):
raise ValueError(
'Field "{col_name}" has type "{dtype}" which is '
"not supported by Altair. Please convert to "
"either a timestamp or a numerical value."
"".format(col_name=col_name, dtype=dtype)
)
elif str(dtype).startswith("geometry"):
# geopandas >=0.6.1 uses the dtype geometry. Continue here
# otherwise it will give an error on np.issubdtype(dtype, np.integer)
continue
elif str(dtype) in {
"Int8",
"Int16",
"Int32",
"Int64",
"UInt8",
"UInt16",
"UInt32",
"UInt64",
}: # nullable integer datatypes (since 24.0)
# https://pandas.pydata.org/pandas-docs/version/0.25/whatsnew/v0.24.0.html#optional-integer-na-support
col = df[col_name].astype(object)
df[col_name] = col.where(col.notnull(), None)
elif np.issubdtype(dtype, np.integer):
# convert integers to objects; np.int is not JSON serializable
df[col_name] = df[col_name].astype(object)
elif np.issubdtype(dtype, np.floating):
# For floats, convert to Python float: np.float is not JSON serializable
# Also convert NaN/inf values to null, as they are not JSON serializable
col = df[col_name]
bad_values = col.isnull() | np.isinf(col)
df[col_name] = col.astype(object).where(~bad_values, None)
elif dtype == object:
# Convert numpy arrays saved as objects to lists
# Arrays are not JSON serializable
col = df[col_name].apply(to_list_if_array, convert_dtype=False)
df[col_name] = col.where(col.notnull(), None)
return df
def parse_shorthand(
shorthand,
data=None,
parse_aggregates=True,
parse_window_ops=False,
parse_timeunits=True,
parse_types=True,
):
"""General tool to parse shorthand values
These are of the form:
- "col_name"
- "col_name:O"
- "average(col_name)"
- "average(col_name):O"
Optionally, a dataframe may be supplied, from which the type
will be inferred if not specified in the shorthand.
Parameters
----------
shorthand : dict or string
The shorthand representation to be parsed
data : DataFrame, optional
If specified and of type DataFrame, then use these values to infer the
column type if not provided by the shorthand.
parse_aggregates : boolean
If True (default), then parse aggregate functions within the shorthand.
parse_window_ops : boolean
If True then parse window operations within the shorthand (default:False)
parse_timeunits : boolean
If True (default), then parse timeUnits from within the shorthand
parse_types : boolean
If True (default), then parse typecodes within the shorthand
Returns
-------
attrs : dict
a dictionary of attributes extracted from the shorthand
Examples
--------
>>> data = pd.DataFrame({'foo': ['A', 'B', 'A', 'B'],
... 'bar': [1, 2, 3, 4]})
>>> parse_shorthand('name') == {'field': 'name'}
True
>>> parse_shorthand('name:Q') == {'field': 'name', 'type': 'quantitative'}
True
>>> parse_shorthand('average(col)') == {'aggregate': 'average', 'field': 'col'}
True
>>> parse_shorthand('foo:O') == {'field': 'foo', 'type': 'ordinal'}
True
>>> parse_shorthand('min(foo):Q') == {'aggregate': 'min', 'field': 'foo', 'type': 'quantitative'}
True
>>> parse_shorthand('month(col)') == {'field': 'col', 'timeUnit': 'month', 'type': 'temporal'}
True
>>> parse_shorthand('year(col):O') == {'field': 'col', 'timeUnit': 'year', 'type': 'ordinal'}
True
>>> parse_shorthand('foo', data) == {'field': 'foo', 'type': 'nominal'}
True
>>> parse_shorthand('bar', data) == {'field': 'bar', 'type': 'quantitative'}
True
>>> parse_shorthand('bar:O', data) == {'field': 'bar', 'type': 'ordinal'}
True
>>> parse_shorthand('sum(bar)', data) == {'aggregate': 'sum', 'field': 'bar', 'type': 'quantitative'}
True
>>> parse_shorthand('count()', data) == {'aggregate': 'count', 'type': 'quantitative'}
True
"""
if not shorthand:
return {}
valid_typecodes = list(TYPECODE_MAP) + list(INV_TYPECODE_MAP)
units = dict(
field="(?P<field>.*)",
type="(?P<type>{})".format("|".join(valid_typecodes)),
agg_count="(?P<aggregate>count)",
op_count="(?P<op>count)",
aggregate="(?P<aggregate>{})".format("|".join(AGGREGATES)),
window_op="(?P<op>{})".format("|".join(AGGREGATES + WINDOW_AGGREGATES)),
timeUnit="(?P<timeUnit>{})".format("|".join(TIMEUNITS)),
)
patterns = []
if parse_aggregates:
patterns.extend([r"{agg_count}\(\)"])
patterns.extend([r"{aggregate}\({field}\)"])
if parse_window_ops:
patterns.extend([r"{op_count}\(\)"])
patterns.extend([r"{window_op}\({field}\)"])
if parse_timeunits:
patterns.extend([r"{timeUnit}\({field}\)"])
patterns.extend([r"{field}"])
if parse_types:
patterns = list(itertools.chain(*((p + ":{type}", p) for p in patterns)))
regexps = (
re.compile(r"\A" + p.format(**units) + r"\Z", re.DOTALL) for p in patterns
)
# find matches depending on valid fields passed
if isinstance(shorthand, dict):
attrs = shorthand
else:
attrs = next(
exp.match(shorthand).groupdict() for exp in regexps if exp.match(shorthand)
)
# Handle short form of the type expression
if "type" in attrs:
attrs["type"] = INV_TYPECODE_MAP.get(attrs["type"], attrs["type"])
# counts are quantitative by default
if attrs == {"aggregate": "count"}:
attrs["type"] = "quantitative"
# times are temporal by default
if "timeUnit" in attrs and "type" not in attrs:
attrs["type"] = "temporal"
# if data is specified and type is not, infer type from data
if isinstance(data, pd.DataFrame) and "type" not in attrs:
if "field" in attrs and attrs["field"] in data.columns:
attrs["type"] = infer_vegalite_type(data[attrs["field"]])
return attrs
def use_signature(Obj):
"""Apply call signature and documentation of Obj to the decorated method"""
def decorate(f):
# call-signature of f is exposed via __wrapped__.
# we want it to mimic Obj.__init__
f.__wrapped__ = Obj.__init__
f._uses_signature = Obj
# Supplement the docstring of f with information from Obj
if Obj.__doc__:
doclines = Obj.__doc__.splitlines()
if f.__doc__:
doc = f.__doc__ + "\n".join(doclines[1:])
else:
doc = "\n".join(doclines)
try:
f.__doc__ = doc
except AttributeError:
# __doc__ is not modifiable for classes in Python < 3.3
pass
return f
return decorate
def update_subtraits(obj, attrs, **kwargs):
"""Recursively update sub-traits without overwriting other traits"""
# TODO: infer keywords from args
if not kwargs:
return obj
# obj can be a SchemaBase object or a dict
if obj is Undefined:
obj = dct = {}
elif isinstance(obj, SchemaBase):
dct = obj._kwds
else:
dct = obj
if isinstance(attrs, str):
attrs = (attrs,)
if len(attrs) == 0:
dct.update(kwargs)
else:
attr = attrs[0]
trait = dct.get(attr, Undefined)
if trait is Undefined:
trait = dct[attr] = {}
dct[attr] = update_subtraits(trait, attrs[1:], **kwargs)
return obj
def update_nested(original, update, copy=False):
"""Update nested dictionaries
Parameters
----------
original : dict
the original (nested) dictionary, which will be updated in-place
update : dict
the nested dictionary of updates
copy : bool, default False
if True, then copy the original dictionary rather than modifying it
Returns
-------
original : dict
a reference to the (modified) original dict
Examples
--------
>>> original = {'x': {'b': 2, 'c': 4}}
>>> update = {'x': {'b': 5, 'd': 6}, 'y': 40}
>>> update_nested(original, update) # doctest: +SKIP
{'x': {'b': 5, 'c': 4, 'd': 6}, 'y': 40}
>>> original # doctest: +SKIP
{'x': {'b': 5, 'c': 4, 'd': 6}, 'y': 40}
"""
if copy:
original = deepcopy(original)
for key, val in update.items():
if isinstance(val, Mapping):
orig_val = original.get(key, {})
if isinstance(orig_val, Mapping):
original[key] = update_nested(orig_val, val)
else:
original[key] = val
else:
original[key] = val
return original
def display_traceback(in_ipython=True):
exc_info = sys.exc_info()
if in_ipython:
from IPython.core.getipython import get_ipython
ip = get_ipython()
else:
ip = None
if ip is not None:
ip.showtraceback(exc_info)
else:
traceback.print_exception(*exc_info)
def infer_encoding_types(args, kwargs, channels):
"""Infer typed keyword arguments for args and kwargs
Parameters
----------
args : tuple
List of function args
kwargs : dict
Dict of function kwargs
channels : module
The module containing all altair encoding channel classes.
Returns
-------
kwargs : dict
All args and kwargs in a single dict, with keys and types
based on the channels mapping.
"""
# Construct a dictionary of channel type to encoding name
# TODO: cache this somehow?
channel_objs = (getattr(channels, name) for name in dir(channels))
channel_objs = (
c for c in channel_objs if isinstance(c, type) and issubclass(c, SchemaBase)
)
channel_to_name = {c: c._encoding_name for c in channel_objs}
name_to_channel = {}
for chan, name in channel_to_name.items():
chans = name_to_channel.setdefault(name, {})
key = "value" if chan.__name__.endswith("Value") else "field"
chans[key] = chan
# First use the mapping to convert args to kwargs based on their types.
for arg in args:
if isinstance(arg, (list, tuple)) and len(arg) > 0:
type_ = type(arg[0])
else:
type_ = type(arg)
encoding = channel_to_name.get(type_, None)
if encoding is None:
raise NotImplementedError("positional of type {}" "".format(type_))
if encoding in kwargs:
raise ValueError("encoding {} specified twice.".format(encoding))
kwargs[encoding] = arg
def _wrap_in_channel_class(obj, encoding):
try:
condition = obj["condition"]
except (KeyError, TypeError):
pass
else:
if condition is not Undefined:
obj = obj.copy()
obj["condition"] = _wrap_in_channel_class(condition, encoding)
if isinstance(obj, SchemaBase):
return obj
if isinstance(obj, str):
obj = {"shorthand": obj}
if isinstance(obj, (list, tuple)):
return [_wrap_in_channel_class(subobj, encoding) for subobj in obj]
if encoding not in name_to_channel:
warnings.warn("Unrecognized encoding channel '{}'".format(encoding))
return obj
classes = name_to_channel[encoding]
cls = classes["value"] if "value" in obj else classes["field"]
try:
# Don't force validation here; some objects won't be valid until
# they're created in the context of a chart.
return cls.from_dict(obj, validate=False)
except jsonschema.ValidationError:
# our attempts at finding the correct class have failed
return obj
return {
encoding: _wrap_in_channel_class(obj, encoding)
for encoding, obj in kwargs.items()
}
| bsd-3-clause | 6,587,024,043,765,292,000 | 29.269565 | 114 | 0.584123 | false | 3.893736 | false | false | false |
seanbell/opensurfaces | server/photos/migrations/0011_auto__del_field_flickruser_displayname__del_field_flickruser_subname__.py | 1 | 29209 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'FlickrUser.displayname'
db.delete_column(u'photos_flickruser', 'displayname')
# Deleting field 'FlickrUser.subname'
db.delete_column(u'photos_flickruser', 'subname')
# Adding field 'FlickrUser.display_name'
db.add_column(u'photos_flickruser', 'display_name',
self.gf('django.db.models.fields.CharField')(default='', max_length=255, blank=True),
keep_default=False)
# Adding field 'FlickrUser.sub_name'
db.add_column(u'photos_flickruser', 'sub_name',
self.gf('django.db.models.fields.CharField')(default='', max_length=255, blank=True),
keep_default=False)
def backwards(self, orm):
# Adding field 'FlickrUser.displayname'
db.add_column(u'photos_flickruser', 'displayname',
self.gf('django.db.models.fields.CharField')(default='', max_length=255, blank=True),
keep_default=False)
# Adding field 'FlickrUser.subname'
db.add_column(u'photos_flickruser', 'subname',
self.gf('django.db.models.fields.CharField')(default='', max_length=255, blank=True),
keep_default=False)
# Deleting field 'FlickrUser.display_name'
db.delete_column(u'photos_flickruser', 'display_name')
# Deleting field 'FlickrUser.sub_name'
db.delete_column(u'photos_flickruser', 'sub_name')
models = {
u'accounts.userprofile': {
'Meta': {'object_name': 'UserProfile'},
'always_approve': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'blocked': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'blocked_reason': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'exclude_from_aggregation': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'mturk_worker_id': ('django.db.models.fields.CharField', [], {'max_length': '127', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'user'", 'unique': 'True', 'primary_key': 'True', 'to': u"orm['auth.User']"})
},
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'licenses.license': {
'Meta': {'object_name': 'License'},
'added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'cc_attribution': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'cc_no_deriv': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'cc_noncommercial': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'cc_share_alike': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'creative_commons': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'publishable': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '255', 'blank': 'True'})
},
u'mturk.experiment': {
'Meta': {'ordering': "['slug', 'variant']", 'unique_together': "(('slug', 'variant'),)", 'object_name': 'Experiment'},
'added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'completed_id': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'cubam_dirty': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'examples_group_attr': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'has_tutorial': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'module': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'new_hit_settings': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'experiments'", 'null': 'True', 'to': u"orm['mturk.ExperimentSettings']"}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'template_dir': ('django.db.models.fields.CharField', [], {'default': "'mturk/experiments'", 'max_length': '255'}),
'test_contents_per_assignment': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'variant': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'version': ('django.db.models.fields.IntegerField', [], {'default': '1'})
},
u'mturk.experimentsettings': {
'Meta': {'object_name': 'ExperimentSettings'},
'added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'auto_add_hits': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'auto_approval_delay': ('django.db.models.fields.IntegerField', [], {'default': '2592000'}),
'content_filter': ('django.db.models.fields.TextField', [], {'default': "'{}'"}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'experiment_settings_in'", 'to': u"orm['contenttypes.ContentType']"}),
'contents_per_hit': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'duration': ('django.db.models.fields.IntegerField', [], {'default': '1800'}),
'feedback_bonus': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '8', 'decimal_places': '4', 'blank': 'True'}),
'frame_height': ('django.db.models.fields.IntegerField', [], {'default': '800'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'keywords': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'lifetime': ('django.db.models.fields.IntegerField', [], {'default': '2678400'}),
'max_active_hits': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'max_total_hits': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'min_output_consensus': ('django.db.models.fields.IntegerField', [], {'default': '4'}),
'num_outputs_max': ('django.db.models.fields.IntegerField', [], {'default': '5'}),
'out_content_attr': ('django.db.models.fields.CharField', [], {'max_length': '127', 'blank': 'True'}),
'out_content_type': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'experiment_settings_out'", 'null': 'True', 'to': u"orm['contenttypes.ContentType']"}),
'out_count_ratio': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'qualifications': ('django.db.models.fields.TextField', [], {'default': "'{}'"}),
'requirements': ('django.db.models.fields.TextField', [], {'default': "'{}'"}),
'reward': ('django.db.models.fields.DecimalField', [], {'max_digits': '8', 'decimal_places': '4'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'mturk.experimenttestcontent': {
'Meta': {'ordering': "['-id']", 'object_name': 'ExperimentTestContent'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
'experiment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'test_contents'", 'to': u"orm['mturk.Experiment']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'priority': ('django.db.models.fields.FloatField', [], {'default': '0', 'db_index': 'True'})
},
u'mturk.mtassignment': {
'Meta': {'object_name': 'MtAssignment'},
'accept_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'action_log': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'approval_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'approve_message': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'auto_approval_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'bonus': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '8', 'decimal_places': '2', 'blank': 'True'}),
'bonus_message': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'deadline': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'feedback': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'feedback_bonus_given': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'has_feedback': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'hit': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'assignments'", 'to': u"orm['mturk.MtHit']"}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '128', 'primary_key': 'True'}),
'manually_rejected': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'num_test_contents': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'num_test_correct': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'num_test_incorrect': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'partially_completed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'post_data': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'post_meta': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'reject_message': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'rejection_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'screen_height': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'screen_width': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '1', 'null': 'True', 'blank': 'True'}),
'submission_complete': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'submit_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'test_contents': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'assignments'", 'symmetrical': 'False', 'to': u"orm['mturk.ExperimentTestContent']"}),
'time_active_ms': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'time_load_ms': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'time_ms': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user_agent': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'wage': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'worker': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounts.UserProfile']", 'null': 'True', 'blank': 'True'})
},
u'mturk.mthit': {
'Meta': {'object_name': 'MtHit'},
'added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'all_submitted_assignments': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'any_submitted_assignments': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'compatible_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'expired': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'hit_status': ('django.db.models.fields.CharField', [], {'max_length': '1', 'null': 'True', 'blank': 'True'}),
'hit_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'hits'", 'to': u"orm['mturk.MtHitType']"}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '128', 'primary_key': 'True'}),
'incompatible_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'lifetime': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'max_assignments': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'num_assignments_available': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'num_assignments_completed': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'num_assignments_pending': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'num_contents': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'out_count_ratio': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'review_status': ('django.db.models.fields.CharField', [], {'max_length': '1', 'null': 'True', 'blank': 'True'}),
'sandbox': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'mturk.mthittype': {
'Meta': {'object_name': 'MtHitType'},
'added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'auto_approval_delay': ('django.db.models.fields.IntegerField', [], {'default': '2592000'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'duration': ('django.db.models.fields.IntegerField', [], {'default': '3600'}),
'experiment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'hit_types'", 'to': u"orm['mturk.Experiment']"}),
'experiment_settings': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'hit_types'", 'to': u"orm['mturk.ExperimentSettings']"}),
'external_url': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'feedback_bonus': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '8', 'decimal_places': '2', 'blank': 'True'}),
'frame_height': ('django.db.models.fields.IntegerField', [], {'default': '800'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '128', 'primary_key': 'True'}),
'keywords': ('django.db.models.fields.CharField', [], {'max_length': '1000', 'blank': 'True'}),
'reward': ('django.db.models.fields.DecimalField', [], {'default': "'0.01'", 'max_digits': '8', 'decimal_places': '4'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'photos.flickruser': {
'Meta': {'ordering': "['-id']", 'object_name': 'FlickrUser'},
'blacklisted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'display_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'family_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'given_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'sub_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '127'}),
'website_name': ('django.db.models.fields.CharField', [], {'max_length': '1023', 'blank': 'True'}),
'website_url': ('django.db.models.fields.URLField', [], {'max_length': '1023', 'blank': 'True'})
},
u'photos.photo': {
'Meta': {'ordering': "['aspect_ratio', '-id']", 'object_name': 'Photo'},
'added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'aspect_ratio': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'exif': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'flickr_id': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'flickr_user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'photos'", 'null': 'True', 'to': u"orm['photos.FlickrUser']"}),
'focal_y': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
'fov': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image_orig': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}),
'inappropriate': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'license': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'photos'", 'null': 'True', 'to': u"orm['licenses.License']"}),
'light_stack': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'photos'", 'null': 'True', 'to': u"orm['photos.PhotoLightStack']"}),
'md5': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'median_intrinsic_error': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'nonperspective': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'num_intrinsic_comparisons': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'num_intrinsic_points': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'num_shapes': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'num_vertices': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'orig_height': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'orig_width': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'rotated': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'scene_category': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'photos'", 'null': 'True', 'to': u"orm['photos.PhotoSceneCategory']"}),
'scene_category_correct': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'scene_category_correct_method': ('django.db.models.fields.CharField', [], {'max_length': '1', 'null': 'True', 'blank': 'True'}),
'scene_category_correct_score': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'stylized': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'synthetic': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounts.UserProfile']"}),
'vanishing_length': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'vanishing_lines': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'vanishing_points': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'whitebalanced': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'whitebalanced_score': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'})
},
u'photos.photolightstack': {
'Meta': {'ordering': "['-id']", 'object_name': 'PhotoLightStack'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'})
},
u'photos.photoscenecategory': {
'Meta': {'ordering': "['name']", 'object_name': 'PhotoSceneCategory'},
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '127'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['photos.PhotoSceneCategory']", 'null': 'True', 'blank': 'True'})
},
u'photos.photoscenequalitylabel': {
'Meta': {'ordering': "['photo', '-time_ms']", 'object_name': 'PhotoSceneQualityLabel'},
'added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'correct': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'invalid': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'mturk_assignment': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['mturk.MtAssignment']"}),
'photo': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'scene_qualities'", 'to': u"orm['photos.Photo']"}),
'quality_method': ('django.db.models.fields.CharField', [], {'max_length': '1', 'null': 'True', 'blank': 'True'}),
'reward': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '8', 'decimal_places': '4', 'blank': 'True'}),
'sandbox': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'time_active_ms': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'time_ms': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounts.UserProfile']"})
},
u'photos.photowhitebalancelabel': {
'Meta': {'ordering': "['photo', '-time_ms']", 'object_name': 'PhotoWhitebalanceLabel'},
'added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'chroma_median': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'invalid': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'mturk_assignment': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['mturk.MtAssignment']"}),
'num_points': ('django.db.models.fields.IntegerField', [], {}),
'photo': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'whitebalances'", 'to': u"orm['photos.Photo']"}),
'points': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'quality_method': ('django.db.models.fields.CharField', [], {'max_length': '1', 'null': 'True', 'blank': 'True'}),
'reward': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '8', 'decimal_places': '4', 'blank': 'True'}),
'sandbox': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'time_active_ms': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'time_ms': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounts.UserProfile']"}),
'whitebalanced': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
}
}
complete_apps = ['photos'] | mit | 2,888,323,342,558,164,000 | 86.717718 | 209 | 0.561471 | false | 3.642474 | true | false | false |
sacharya/nova | nova/api/openstack/compute/schemas/v3/flavor_access_schema.py | 1 | 1776 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2013 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
add_tenant_access = {
'type': 'object',
'properties': {
'add_tenant_access': {
'type': 'object',
'properties': {
'tenant_id': {
# defined from project_id in instance_type_projects table
'type': 'string', 'minLength': 1, 'maxLength': 255,
},
},
'required': ['tenant_id'],
'additionalProperties': False,
},
},
'required': ['add_tenant_access'],
'additionalProperties': False,
}
remove_tenant_access = {
'type': 'object',
'properties': {
'remove_tenant_access': {
'type': 'object',
'properties': {
'tenant_id': {
# defined from project_id in instance_type_projects table
'type': 'string', 'minLength': 1, 'maxLength': 255,
},
},
'required': ['tenant_id'],
'additionalProperties': False,
},
},
'required': ['remove_tenant_access'],
'additionalProperties': False,
}
| apache-2.0 | -7,900,809,347,432,931,000 | 31.888889 | 78 | 0.560248 | false | 4.238663 | false | false | false |
msg/g2ools | nord/g2/file.py | 1 | 33799 | #!/usr/bin/env python2
#
# Copyright (c) 2006,2007 Matt Gerassimoff
#
# This file is part of g2ools.
#
# g2ools is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# g2ools is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Foobar; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
import string, sys
from struct import pack, unpack
import nord.g2.modules
from nord import printf
from nord.module import Module
from nord.file import hexdump, binhexdump
from nord.file import Patch, Performance, Note, Cable, Knob, Ctrl, MorphMap
from nord.g2 import modules
from nord.g2.crc import crc
from nord.g2.bits import setbits, getbits, BitStream
section_debug = 0 # outputs section debug
title_section = 0 # replace end of section with section title
NVARIATIONS = 9 # 1-8, init
NMORPHS = 8 # 8 morphs
NKNOBS = 120 # 120 knob settings
NMORPHMAPS = 25 # max morphmaps per variation
FX, VOICE, SETTINGS = 0, 1, 2
class G2Error(Exception):
'''G2Error - exception for throwing an unrecoverable error.'''
def __init__(self, value):
Exception.__init__(self)
self.value = value
def __str__(self):
return repr(self.value)
def read_string(bitstream, l, pad=False):
read_str = bitstream.read_str
if pad == True:
return read_str(l).strip('\0')
else:
s = bytearray(l)
for i in xrange(l):
s[i] = read_str(1)
if s[i] == 0:
return str(s[:i])
return str(s[:l])
def format_string(s, l, pad=False):
if len(s) < l:
s = s + '\0'
if pad == True:
s = s.ljust(l, '\0')
return s[:l] # in case the string is longer
def write_string(bitstream, s, l, pad=False):
bitstream.write_str(format_string(s, l, pad))
def get_patch_area(patch, area):
return [patch.fx, patch.voice][area]
class Section(object):
'''Section abstract class that represents a section of .pch2 file.
all sections objects have parse() and format() methods.
'''
default = [0] * (2 << 10) # max 64k section size
def __init__(self, **kw):
self.__dict__ = kw
self.data = bytearray(64<<10)
class SectionManager(object):
def add(self, class_):
self.__dict__[class_.type] = class_
def get(self, type, default=None):
return self.__dict__.get(type, default)
section_manager = SectionManager()
class Description(object):
'''Description class for patch/performance description.'''
pass
class PatchDescription(Section):
'''PatchDescription Section subclass'''
type = 0x21
description_attrs = [
['reserved', 5], ['voices', 5], ['height', 14], ['unk2', 3],
['red', 1], ['blue', 1], ['yellow', 1], ['orange', 1],
['green', 1], ['purple', 1], ['white', 1],
['monopoly', 2], ['variation', 8], ['category', 8],
]
def parse_description(self, description, bitstream):
for name, nbits in self.description_attrs:
setattr(description, name, bitstream.read_bits(nbits))
def format_description(self, description, bitstream):
for name, nbits in self.description_attrs:
bitstream.write_bits(nbits, getattr(description, name))
bitstream.write_bits(8, 0)
def parse(self, patch, data):
description = patch.description = Description() # G2Patch
bitstream = BitStream(data, 7*8)
self.parse_description(patch.description, bitstream)
def format(self, patch, data):
bitstream = BitStream(data, 7*8)
self.format_description(patch.description, bitstream)
return bitstream.tell_bit()
section_manager.add(PatchDescription)
class ModuleList(Section):
'''ModuleList Section subclass'''
type = 0x4a
# NOTE: module.leds seems to be related to a group of modules. i cannot
# see the relationship but I have got a list of modules
# that require this to be set. This will probably be handled
# without this property but added to the module types that
# set it.
# make sure leds bit is set for specific modules
# - some earlier generated .pch2 files where different
# these were emperically determined.
# NOTE2: these should be in nord/g2/modules.py
ledtypes = [
3, 4, 17, 38, 42, 48, 50, 57, 59, 60, 68, 69,
71, 75, 76, 81, 82, 83, 85,
105, 108, 112, 115, 141, 142, 143, 147, 148, 149, 150,
156, 157, 170, 171, 178, 188, 189, 198, 199, 208,
]
def fixleds(self, module):
module.leds = 0
#if module.type.id in ModuleList.ledtypes:
# module.leds = 1
#else:
# module.leds = 0
module_params = [
['index', 8 ], ['horiz', 7], ['vert', 7], ['color', 8],
['uprate', 1 ], ['leds', 1], ['reserved', 6],
]
def parse_area(self, area, bitstream):
read_bits = bitstream.read_bits
nmodules = read_bits(8)
area.modules = [ None ] * nmodules
for i in xrange(nmodules):
id = read_bits(8)
module = Module(modules.fromid(id), area)
area.modules[i] = module
for attr, nbits in self.module_params:
setattr(module, attr, read_bits(nbits))
nmodes = read_bits(4)
self.fixleds(module)
# mode data for module (if there is any)
for mode in module.modes:
mode.value = read_bits(6)
# add missing mode data. some .pch2 versions didn't contain
# all the modes in version 23 BUILD 266
module_type = module.type
if len(module.modes) < len(module_type.modes):
for mode in xrange(len(module.modes), len(module_type.modes)):
module.modes[mode].value = module_type.modes[mode].type.default
def format_area(self, area, bitstream):
write_bits = bitstream.write_bits
write_bits(2, self.area)
write_bits(8, len(area.modules))
for module in area.modules:
write_bits(8, module.type.id)
module.reserved = 0 # just in case is wasn't set
for attr, nbits in self.module_params:
bitstream.write_bits(nbits, getattr(module, attr))
self.fixleds(module)
write_bits(4, len(module.modes))
for mode in module.modes:
write_bits(6, mode.value)
def parse(self, patch, data):
bitstream = BitStream(data)
self.area = bitstream.read_bits(2)
area = get_patch_area(patch, self.area)
self.parse_area(area, bitstream)
def format(self, patch, data):
bitstream = BitStream(data)
area = get_patch_area(patch, self.area)
self.format_area(area, bitstream)
return bitstream.tell_bit()
section_manager.add(ModuleList)
class CurrentNote(Section):
'''CurrentNote Section subclass'''
type = 0x69
def parse(self, patch, data):
bitstream = BitStream(data)
lastnote = patch.lastnote = Note() # G2Patch
values = bitstream.read_bitsa([7] * 3)
lastnote.note, lastnote.attack, lastnote.release = values
nnotes = bitstream.read_bits(5) + 1
notes = patch.notes = [ Note() for i in xrange(nnotes) ] # G2Patch
for note in notes:
note.note, note.attack, note.release = bitstream.read_bitsa([7, 7, 7])
def format(self, patch, data):
bitstream = BitStream(data)
if len(patch.notes):
lastnote = patch.lastnote
if not lastnote:
values = [ 64, 0, 0 ]
else:
values = [ lastnote.note, lastnote.attack, lastnote.release ]
bitstream.write_bitsa([7, 7, 7], values)
bitstream.write_bits(5, len(patch.notes)-1)
for note in patch.notes:
bitstream.write_bitsa([7, 7, 7], [note.note, note.attack, note.release])
else:
bitstream.write_bits(24, 0x800000)
bitstream.write_bits(24, 0x200000)
return bitstream.tell_bit()
def invalid_cable(smodule, sconn, direction, dmodule, dconn):
'''invalid_cable(area, smodule, sconn, direction, dmodule, dconn) -> bool
if connection valid return 0, otherwise error.
'''
if direction == 1: # verify from
if sconn >= len(smodule.outputs): # out -> in
return 1
elif sconn >= len(smodule.inputs): # in -> in
return 2
if dconn >= len(dmodule.inputs): # verify to
return 3
return 0 # if we got here, everything's cool.
section_manager.add(CurrentNote)
class CableList(Section):
'''CableList Section subclass'''
type = 0x52
def parse_area(self, area, bitstream):
_, ncables = bitstream.read_bitsa([6, 16])
area.cables = [ None ] * ncables
for i in xrange(ncables):
cable = Cable(area)
cable.color, source, src_conn, direction, dest, dest_conn = \
bitstream.read_bitsa([3, 8, 6, 1, 8, 6])
src_module = area.find_module(source)
dest_module = area.find_module(dest)
if invalid_cable(src_module, src_conn, direction, dest_module, dest_conn):
printf('Invalid cable %d: "%s"(%d,%d) -%d-> "%s"(%d,%d)\n',
i, src_module.type.shortnm, src_module.index, src_conn, direction,
dest_module.type.shortnm, dest_module.index, dest_conn)
continue
if direction == 1:
cable.source = src_module.outputs[src_conn]
else:
cable.source = src_module.inputs[src_conn]
cable.dest = dest_module.inputs[dest_conn]
area.cables[i] = cable
cable.source.cables.append(cable)
cable.dest.cables.append(cable)
area.netlist.add(cable.source, cable.dest)
def format_area(self, area, bitstream):
bitstream.write_bitsa([2, 6, 16], [area.index, 0, len(area.cables)])
for cable in area.cables:
bitstream.write_bitsa([3, 8, 6, 1, 8, 6],
[ cable.color, cable.source.module.index, cable.source.index,
cable.source.direction, cable.dest.module.index, cable.dest.index])
def parse(self, patch, data):
bitstream = BitStream(data)
self.area = bitstream.read_bits(2)
area = get_patch_area(patch, self.area)
self.parse_area(area, bitstream)
def format(self, patch, data):
bitstream = BitStream(data)
area = get_patch_area(patch, self.area)
self.format_area(area, bitstream)
return bitstream.tell_bit()
section_manager.add(CableList)
class SettingsArea(object):
def __init__(self):
self.index = SETTINGS
self.name = 'settings'
class ParameterModule(object):
def __init__(self, area, index):
self.area = area
self.index = index
class Parameter(object):
'''Parameter class for module parameters/settings.'''
def __init__(self, area, mod_index, index, default=0, name='', module=None):
self.area = area
self.index = index
self.variations = [default]*NVARIATIONS
self.name = name
self.module = ParameterModule(area, mod_index)
self.knob = None
self.mmap = None
self.ctrl = None
class Morph(object):
'''Morph class for morph settings.'''
def __init__(self, area, index):
self.name = 'morph%d' % (index+1)
self.maps = [[] for variation in xrange(NVARIATIONS) ]
self.index = index
self.area = area
# morph "module" has 2 parameters dial and mode
self.dial = Parameter(area, 1, index, 0, name='dial')
self.mode = Parameter(area, 1, index+NMORPHS, 1, name='mode')
class Settings(object):
'''Settings class for patch settings.'''
groups = [
[ 'patchvol', 'activemuted' ],
[ 'glide', 'glidetime' ],
[ 'bend', 'semi' ],
[ 'vibrato', 'cents', 'rate' ],
[ 'arpeggiator', 'arptime', 'arptype', 'octaves' ],
[ 'octaveshift', 'sustain' ],
]
def __init__(self):
self.area = SettingsArea()
for i, group in enumerate(self.groups, 2):
for j, name in enumerate(group):
setattr(self, name, Parameter(self.area, i, j, name=name))
self.morphs = [ Morph(self.area, morph+1) for morph in xrange(NMORPHS) ]
self.morphmaps = [ [] for variation in xrange(NVARIATIONS) ]
class Parameters(Section):
'''Parameters Section subclass'''
type = 0x4d
def parse_settings(self, settings, bitstream):
read_bits = bitstream.read_bits
read_bitsa = bitstream.read_bitsa
nsections, nvariations, section, nentries = read_bitsa([8, 8, 8, 7])
# nentries: 16 parameters per variation: 8 dials, 8 modes
for i in xrange(nvariations): # usually 9
variation = read_bits(8)
for morph in settings.morphs:
dial = read_bits(7)
if variation < NVARIATIONS:
morph.dial.variations[variation] = dial
for morph in settings.morphs:
mode = read_bits(7)
if variation < NVARIATIONS:
morph.mode.variations[variation] = mode
for group in settings.groups:
section, nentries = read_bitsa([8, 7])
for i in xrange(nvariations):
variation = read_bits(8)
for entry in xrange(nentries):
value = read_bits(7)
if variation < NVARIATIONS:
getattr(settings, group[entry]).variations[variation] = value
def format_settings(self, settings, bitstream):
write_bits = bitstream.write_bits
write_bitsa = bitstream.write_bitsa
# 1 for morph--. .-- 16/var
write_bitsa([2, 8, 8, 8, 7], [SETTINGS, 7, NVARIATIONS, 1, 16])
for variation in xrange(NVARIATIONS): # morph groups
write_bits(8, variation)
for morph in settings.morphs:
write_bits(7, morph.dial.variations[variation])
for morph in settings.morphs:
write_bits(7, morph.mode.variations[variation])
section = 2 # starts at 2 (above: morph is section 1)
for group in settings.groups:
nentries = len(group)
write_bitsa([8, 7], [section, nentries])
for variation in xrange(NVARIATIONS):
write_bits(8, variation)
for entry in xrange(nentries):
value = getattr(settings, group[entry]).variations[variation]
write_bits(7, value)
section += 1
return bitstream.tell_bit()
def parse_area(self, area, bitstream):
read_bits = bitstream.read_bits
nmodules, nvariations = bitstream.read_bitsa([8, 8])
for i in xrange(nmodules):
index, nparams = bitstream.read_bitsa([8, 7])
module = area.find_module(index)
params = module.params
for i in xrange(nvariations):
variation = read_bits(8)
for param in xrange(nparams):
value = read_bits(7)
if param < len(params) and variation < NVARIATIONS:
params[param].variations[variation] = value
def format_area(self, area, bitstream):
modules = []
for module in area.modules:
try:
if not len(module.params):
continue
modules.append(module)
except:
pass
modules.sort(lambda a, b: cmp(a.index, b.index))
write_bits = bitstream.write_bits
mlen = len(modules)
bitstream.write_bitsa([2, 8], [area.index, mlen])
if mlen == 0:
write_bits(8, 0)
return bitstream.tell_bit()
write_bits(8, NVARIATIONS)
for module in modules:
write_bits(8, module.index)
params = module.params
write_bits(7, len(params))
for variation in xrange(NVARIATIONS):
write_bits(8, variation)
for param in params:
write_bits(7, param.variations[variation])
def parse(self, patch, data):
bitstream = BitStream(data)
self.area = bitstream.read_bits(2)
if self.area == SETTINGS:
patch.settings = Settings() # G2Patch
self.parse_settings(patch.settings, bitstream)
else:
area = get_patch_area(patch, self.area)
self.parse_area(area, bitstream)
def format(self, patch, data):
bitstream = BitStream(data)
if self.area == SETTINGS:
self.format_settings(patch.settings, bitstream)
else:
area = get_patch_area(patch, self.area)
self.format_area(area, bitstream)
return bitstream.tell_bit()
section_manager.add(Parameters)
def get_settings_param(patch, index, param):
if index < 2:
morph = patch.settings.morphs[param & 7]
if param < 8:
return morph.dial
else:
return morph.mode
else:
group = patch.settings.groups[index - 2]
return getattr(patch.settings, group[param])
class MorphParameters(Section):
'''MorphParameters Section subclass'''
type = 0x65
def parse(self, patch, data):
bitstream = BitStream(data)
read_bits = bitstream.read_bits
nvariations, nmorphs, _, _ = bitstream.read_bitsa([8, 4, 10, 10])
# variations seem to be 9 bytes with first nibble variation # from 0 ~ 8
# number of morph parameters starts at byte 7-bit 0 for 5-bits
morphs = patch.settings.morphs
morphmaps = patch.settings.morphmaps
for i in xrange(nvariations):
variation = read_bits(4)
bitstream.seek_bit(4 + (6*8) + 4, 1) # zeros
nmorphs = read_bits(8)
for j in xrange(nmorphs):
morph_map = MorphMap()
area, index, param, morph = bitstream.read_bitsa([2, 8, 7, 4])
morph_map.range = read_bits(8, 1)
module = get_patch_area(patch, area).find_module(index)
morph_map.param = module.params[param]
morph_map.variation = variation
morph_map.morph = morphs[morph-1]
morph_map.morph.maps[variation].append(morph_map)
morphmaps[variation].append(morph_map)
reserved = read_bits(4) # always 0
def format(self, patch, data):
bitstream = BitStream(data)
write_bits = bitstream.write_bits
bitstream.write_bitsa([8, 4, 10, 10], [ NVARIATIONS, NMORPHS, 0, 0])
# variations seem to be 9 bytes with first nibble variation # from 0 ~ 8
# number of morph parameters starts at byte 7-bit 0 for 5-bits
morphs = patch.settings.morphs
for variation in xrange(NVARIATIONS):
write_bits(4, variation)
bitstream.seek_bit(4 + (6 * 8) + 4, 1)
# collect all morph_maps of this variation into 1 array
morph_maps = []
for morph in morphs:
morph_maps.extend(morph.maps[variation])
def mod_param_index_cmp(a, b):
return cmp(a.param.module.index, b.param.module.index)
morph_maps.sort(mod_param_index_cmp)
write_bits(8, len(morph_maps))
for morph_map in morph_maps:
values = [
morph_map.param.module.area.index, morph_map.param.module.index,
morph_map.param.index, morph_map.morph.index, morph_map.range,
] # range is signed
bitstream.write_bitsa([2, 8, 7, 4, 8], values)
write_bits(4, 0) # always 0
bitstream.seek_bit(-4, 1) # remove last 4-bits
return bitstream.tell_bit()
section_manager.add(MorphParameters)
class KnobAssignments(Section):
'''KnobAssignments Section subclass'''
type = 0x62
def parse(self, patch, data):
bitstream = BitStream(data)
nknobs = bitstream.read_bits(16)
patch.knobs = [ Knob() for i in xrange(nknobs)] # G2Patch / G2Performance
for knob in patch.knobs:
knob.assigned = bitstream.read_bits(1)
if not knob.assigned:
continue
area, index, knob.isled, param = bitstream.read_bitsa([2, 8, 2, 7])
if type(patch) == Performance:
knob.slot = bitstream.read_bits(2)
perf = patch
patch = perf.slots[knob.slot].patch
else:
knob.slot = 0
if area == SETTINGS:
knob.param = get_settings_param(patch, index, param)
else:
module = get_patch_area(patch, area).find_module(index)
if module:
knob.param = module.params[param]
else:
knob.assigned = 0
continue
knob.param.knob = knob
def format(self, patch, data):
bitstream = BitStream(data)
bitstream.write_bits(16, NKNOBS)
for knob in patch.knobs:
bitstream.write_bits(1, knob.assigned)
if not knob.assigned:
continue
module = knob.param.module
bitstream.write_bitsa([2, 8, 2, 7],
[ module.area.index, module.index, knob.isled, knob.param.index ])
if type(patch) == Performance:
bitstream.write_bits(2, knob.slot)
return bitstream.tell_bit()
section_manager.add(KnobAssignments)
class CtrlAssignments(Section):
'''CtrlAssignments Section subclass'''
type = 0x60
def parse(self, patch, data):
bitstream = BitStream(data)
nctrls = bitstream.read_bits(7)
patch.ctrls = [ Ctrl() for i in xrange(nctrls)] # G2Patch? / G2Ctrl?
for ctrl in patch.ctrls:
ctrl.midicc, area, index, param = bitstream.read_bitsa([7, 2, 8, 7])
if area == SETTINGS:
ctrl.param = get_settings_param(patch, index, param)
else:
module = get_patch_area(patch, area).find_module(index)
ctrl.param = module.params[param]
ctrl.param.ctrl = ctrl
def format(self, patch, data):
bitstream = BitStream(data)
bitstream.write_bits(7, len(patch.ctrls))
for ctrl in patch.ctrls:
param = ctrl.param
bitstream.write_bitsa([7, 2, 8, 7], [ ctrl.midicc,
param.module.area.index, param.module.index, param.index ])
return bitstream.tell_bit()
section_manager.add(CtrlAssignments)
class Labels(Section):
'''Labels Section subclass'''
type = 0x5b
def parse_morphs(self, morphs, bitstream):
nentries, entry, length = bitstream.read_bitsa([8, 8, 8]) # 1, 1, 0x50
for morph in morphs:
index, morphlen, entry = bitstream.read_bytes(3)
morph.label = read_string(bitstream, 7, pad=True)
def format_morphs(self, morphs, bitstream):
bitstream.write_bits(2, SETTINGS)
bitstream.write_str('\1\1\x50')
s = bytearray([1, 1, 0])
for morph in morphs:
s[2] = 8 + morph.index
bitstream.write_str(str(s))
write_string(bitstream, morph.label, 7, pad=True)
return bitstream.tell_bit()
def parse_module(self, module, bitstream):
modlen = bitstream.read_bits(8)
if module.type.id == 121: # SeqNote
# extra editor parameters
# [0, 1, mag, 0, 1, octave]
# mag: 0=3-octaves, 1=2-octaves, 2=1-octave
# octave: 0-9 (c0-c9)
module.editmodes = bitstream.read_bytes(modlen)
else:
while modlen > 0:
stri, paramlen, parami = bitstream.read_bitsa([8, 8, 8])
modlen -= 3
param = module.params[parami]
paramlen -= 1 # decrease because we got param index
if paramlen:
param.labels = [ read_string(bitstream, 7, pad=True)
for i in xrange(paramlen / 7) ]
modlen -= paramlen
else:
param.labels = ['']
if section_debug:
printf('%d %s %d %d %s\n', module.index, module.type.shortnm,
paramlen, parami, param.labels)
def parse_area(self, area, bitstream):
read_bits = bitstream.read_bits
nmodules = read_bits(8)
for i in xrange(nmodules):
index = read_bits(8)
module = area.find_module(index)
self.parse_module(module, bitstream)
def format_module(self, module, bitstream):
s = ''
if module.type.id == 121: # SeqNote
s += str(bytearray(module.editmodes))
else:
# build up the labels and then write them
for i, param in enumerate(module.params):
if not hasattr(param, 'labels'):
continue
if section_debug:
printf('%d %s %d %d %s\n', module.index, module.type.shortnm,
7*len(param.labels), i, param.labels)
labels = [format_string(lbl, 7, pad=True) for lbl in param.labels]
ps = chr(i) + ''.join(labels)
s += chr(1)+chr(len(ps))+ps
bitstream.write_bitsa([8, 8], [module.index, len(s)])
bitstream.write_str(s)
def format_area(self, area, bitstream):
# collect all modules with parameters that have labels
modules = []
for module in area.modules:
if hasattr(module, 'params'):
for param in module.params:
if hasattr(param, 'labels'):
modules.append(module)
break
elif hasattr(module, 'editmodes'):
modules.append(module)
bitstream.write_bitsa([2, 8], [area.index, len(modules)])
for module in modules:
self.format_module(module, bitstream)
return bitstream.tell_bit()
def parse(self, patch, data):
bitstream = BitStream(data)
self.area = bitstream.read_bits(2)
if self.area == SETTINGS:
self.parse_morphs(patch.settings.morphs, bitstream)
else:
area = get_patch_area(patch, self.area)
self.parse_area(area, bitstream)
def format(self, patch, data):
bitstream = BitStream(data)
if self.area == SETTINGS:
return self.format_morphs(patch.settings.morphs, bitstream)
else:
area = get_patch_area(patch, self.area)
return self.format_area(area, bitstream)
section_manager.add(Labels)
class ModuleNames(Section):
'''ModuleNames Section subclass'''
type = 0x5a
def parse_area(self, area, bitstream):
areai, nmodules = bitstream.read_bitsa([6, 8])
for i in xrange(nmodules):
module = area.find_module(bitstream.read_bits(8))
module.name = read_string(bitstream, 16)
def parse(self, patch, data):
bitstream = BitStream(data)
self.area = bitstream.read_bits(2)
area = get_patch_area(patch, self.area)
self.parse_area(area, bitstream)
def format_area(self, area, bitstream):
bitstream.write_bitsa([2, 6, 8], [area.index, self.area, len(area.modules)])
for module in area.modules:
bitstream.write_bits(8, module.index)
write_string(bitstream, module.name, 16)
def format(self, patch, data):
bitstream = BitStream(data)
area = get_patch_area(patch, self.area)
self.format_area(area, bitstream)
return bitstream.tell_bit()
section_manager.add(ModuleNames)
class TextPad(Section):
'''TextPad Section subclass'''
type = 0x6f
def parse(self, patch, data):
patch.textpad = data
def format(self, patch, data):
bitstream = BitStream(data)
bitstream.write_str(patch.textpad)
return bitstream.tell_bit()
section_manager.add(TextPad)
class PerformanceDescription(Section):
'''PerformanceDescription Section subclass'''
type = 0x11
description_attrs = [
['unk1', 8],
['unk2', 4], ['focus', 2], [ 'unk3', 2 ],
['rangesel', 8], ['bpm', 8],
['split', 8], ['clock', 8], ['unk4', 8], ['unk5', 8],
]
slot_attrs = [
['active', 8], ['keyboard', 8], ['hold', 8], ['bank', 8 ], [ 'patch', 8 ],
['keylow', 8], ['keyhigh', 8], ['unk3', 8], ['unk4', 8], ['unk5', 8],
]
def parse(self, performance, data):
description = performance.description = Description() # G2Performance
bitstream = BitStream(data)
read_bits = bitstream.read_bits
for name, nbits in self.description_attrs:
value = read_bits(nbits)
setattr(description, name, value)
for slot in performance.slots:
slot.description = Description()
slot.name = read_string(bitstream, 16)
for name, nbits in self.slot_attrs:
value = read_bits(nbits)
setattr(slot.description, name, value)
def format(self, performance, data):
bitstream = BitStream(data)
write_bits = bitstream.write_bits
description = performance.description
for name, nbits in self.description_attrs:
write_bits(nbits, getattr(description, name))
for slot in performance.slots:
write_string(bitstream, slot.name, 16)
for name, nbits in self.slot_attrs:
write_bits(nbits, getattr(slot.description, name))
return bitstream.tell_bit()
section_manager.add(PerformanceDescription)
class GlobalKnobAssignments(KnobAssignments):
'''GlobalKnobAssignments Section subclasss'''
type = 0x5f
class Pch2File(object):
'''Pch2File(filename) - main reading/writing object for .pch2 files
this may become generic G2 file for .pch2 and .prf2 files
just by handling the performance sections (and perhaps others)
and parsing all 4 patches within the .prf2 file.
'''
patch_sections = [
PatchDescription(),
ModuleList(area=1),
ModuleList(area=0),
CurrentNote(),
CableList(area=1),
CableList(area=0),
Parameters(area=2),
Parameters(area=1),
Parameters(area=0),
MorphParameters(area=2),
KnobAssignments(),
CtrlAssignments(),
Labels(area=2),
Labels(area=1),
Labels(area=0),
ModuleNames(area=1),
ModuleNames(area=0),
TextPad(),
]
standard_text_header = '''Version=Nord Modular G2 File Format 1\r
Type=%s\r
Version=%d\r
Info=BUILD %d\r
\0''' # needs the null byte
binary_version = 23
build_version = 266
def __init__(self, filename=None):
self.type = 'Patch'
self.binary_revision = 0
self.patch = Patch(nord.g2.modules.fromname)
if filename:
self.read(filename)
def parse_section(self, section, patch_or_perf, memview):
type, l = unpack('>BH', memview[:3])
l += 3
if section_debug:
nm = section.__class__.__name__
printf('0x%02x %-25s len:0x%04x\n', type, nm, l)
printf('%s\n', binhexdump(memview[:l].tobytes()))
section.parse(patch_or_perf, memview[3:l])
return memview[l:]
def parse_patch(self, patch, memview):
memview = self.parse_section(PatchDescription(), patch, memview)
while len(memview) > 0:
type = ord(memview[0])
if type == PatchDescription.type: # prf2 concats patches
break
section_class = section_manager.get(type, None)
if not section_class:
break
memview = self.parse_section(section_class(), patch, memview)
return memview
def parse(self, memview):
return self.parse_patch(self.patch, memview)
def parse_header(self, memview, filename):
header2x = bytearray(memview[:2*len(self.standard_text_header)])
null = header2x.find('\0')
if null < 0:
raise G2Error('Invalid G2File "%s" missing null terminator.' % filename)
self.txthdr = str(header2x[:null])
self.binhdr = header2x[null+1], header2x[null+2]
if self.binhdr[0] != self.binary_version:
printf('Warning: %s version %d\n', filename, self.binhdr[0])
printf(' version %d supported. it may fail to load.\n',
self.binary_version)
return memview[null+1:] # include binhdr for crc
# read - this is where the rubber meets the road. it start here....
def read(self, filename):
self.filename = filename
self.data = bytearray(open(filename, 'rb').read())
memview = self.parse_header(memoryview(self.data), filename)
bytes = len(self.parse(memview[2:-2]))
ecrc = unpack('>H', self.data[-2:])[0]
acrc = crc(memview[:-2])
if ecrc != acrc:
printf('Bad CRC 0x%x 0x%x\n' % (ecrc, acrc))
def format_section(self, section, patch_or_perf, memview):
#print section.__class__.__name__
bits = section.format(patch_or_perf, memview[3:]) # skip type, size
bytes = (bits + 7) >> 3
# write type, size
memview[:3] = pack('>BH', section.type, bytes)
if section_debug:
nm = section.__class__.__name__
printf('0x%02x %-25s len:0x%04x\n', section.type, nm, bytes)
tbl = string.maketrans(string.ascii_lowercase, ' '*26)
nm = nm.translate(tbl).replace(' ', '')
printf('%s\n', nm)
#if title_section and len(nm) < len(f):
# f = nm+f[len(nm):]
return memview[bytes + 3:]
def format_patch(self, patch, memview):
for section in Pch2File.patch_sections:
memview = self.format_section(section, patch, memview)
return memview
def format(self, memview):
return self.format_patch(self.patch, memview)
def format_file(self):
data = bytearray(64<<10)
memview = memoryview(data)
hdr = Pch2File.standard_text_header % (self.type,
self.binary_version, self.build_version)
memview[:len(hdr)] = hdr
memview = memview[len(hdr):]
#memview = self.format_header(memview)
memview[0] = chr(self.binary_version)
memview[1] = chr(self.binary_revision)
fmemview = self.format(memview[2:])
bytes = len(memview) - len(fmemview)
data_crc = crc(memview[:bytes])
memview[bytes:bytes+2] = pack('>H', crc(memview[:bytes]))
bytes = len(data) - len(fmemview) + 2
return data[:bytes]
# write - this looks a lot easier then read ehhhh???
def write(self, filename=None):
out = open(filename, 'wb')
out.write(str(self.format_file()))
class Prf2File(Pch2File):
'''Prf2File(filename) -> load a nord modular g2 performance.'''
def __init__(self, filename=None):
self.type = 'Performance'
self.binary_revision = 1
self.performance = Performance(nord.g2.modules.fromname)
self.performance_section = PerformanceDescription()
self.globalknobs_section = GlobalKnobAssignments()
if filename:
self.read(filename)
def parse(self, memview):
performance = self.performance
performance_section = self.performance_section
globalknobs_section = self.globalknobs_section
memview = self.parse_section(performance_section, performance, memview)
for slot in performance.slots:
memview = self.parse_patch(slot.patch, memview)
memview = self.parse_section(globalknobs_section, performance, memview)
return memview
def format_performance(self, memview):
performance = self.performance
performace_section = self.performance_section
globalknobs_section = self.globalknobs_section
memview = self.format_section(performance_section, performance, memview)
for slot in performance.slots:
memview = self.format_patch(slot.patch, memview)
memview = self.format_section(globalknobs_section, performance, memview)
return memview
def format(self, memview):
return self.format_performace(memview)
if __name__ == '__main__':
prog = sys.argv.pop(0)
filename = sys.argv.pop(0)
printf('"%s"\n', filename)
pch2 = Pch2File(filename)
#pch2.write(sys.argv.pop(0))
| gpl-2.0 | -1,795,341,959,768,795,100 | 32.564052 | 80 | 0.641912 | false | 3.29425 | false | false | false |
rossant/spiky | spiky/colors.py | 1 | 2971 | import numpy as np
# from matplotlib.colors import hsv_to_rgb, rgb_to_hsv
__all__ = ['COLORMAP', 'HIGHLIGHT_COLORMAP', 'COLORS', 'COLORS_COUNT', 'generate_colors']
# Color creation routines
# -----------------------
def hue(H):
H = H.reshape((-1, 1))
R = np.abs(H * 6 - 3) - 1;
G = 2 - np.abs(H * 6 - 2);
B = 2 - np.abs(H * 6 - 4);
return np.clip(np.hstack((R,G,B)), 0, 1)
def hsv_to_rgb(HSV):
a = HSV[:,1].reshape((-1, 1))
b = HSV[:,2].reshape((-1, 1))
a = np.tile(a, (1, 3))
b = np.tile(b, (1, 3))
return ((hue(HSV[:,0]) - 1) * a + 1) * b
def generate_hsv(n0=20):
H = np.linspace(0., 1., n0)
i = np.arange(n0)
H = H[~((i==5) | (i==7) | (i==10) | (i==12) | (i==15) |(i==17) | (i==18) | (i==19))]
# H = H[((i==15) |(i==17) | (i==18) | (i==19))]
H = np.repeat(H, 4)
n = len(H)
S = np.ones(n)
V = np.ones(n)
# change V for half of the colors
V[1::2] = .75
# change S for half of the colors
S[2::4] = .75
S[3::4] = .75
hsv = np.zeros((n, 3))
hsv[:,0] = H
hsv[:,1] = S
hsv[:,2] = V
return hsv
# Global variables with all colors
# --------------------------------
# generate a list of RGB values for each color
hsv = generate_hsv()
hsv = np.clip(hsv, 0, 1)
# hsv = hsv.reshape((1, -1, 3))
COLORS = hsv_to_rgb(hsv)
COLORS = np.clip(COLORS, 0, 1)
COLORS_COUNT = len(COLORS)
step = 17 # needs to be prime with COLORS_COUNT
perm = np.mod(np.arange(0, step * 24, step), 24)
perm = np.hstack((2 * perm, 2 * perm + 1))
COLORMAP = COLORS[perm, ...]
# COLORMAP = np.array(COLORS)
# Highlight color map
# rgb = COLORMAP.reshape((1, -1, 3))
# hsv = rgb_to_hsv(rgb)
# decrease saturation, increase value
hsv[:,1] -= .5
hsv[:,2] += .5
hsv = np.clip(hsv, 0, 1)
hsv = hsv[perm, ...]
HIGHLIGHT_COLORMAP = hsv_to_rgb(hsv)
def generate_colors(n=None):
if n is None:
n = COLORS_COUNT
if n < COLORS_COUNT:
return COLORS[:n]
else:
return [COLORS[i % COLORS_COUNT] for i in xrange(n)]
if __name__ == "__main__":
def hsv_rect(hsv, coords):
col = hsv_to_rgb(hsv)
col = np.clip(col, 0, 1)
rgb_rect(col, coords)
def rgb_rect(rgb, coords):
x0, y0, x1, y1 = coords
a = 2./len(rgb)
c = np.zeros((len(rgb), 4))
c[:,0] = np.linspace(x0, x1-a, len(rgb))
c[:,1] = y0
c[:,2] = np.linspace(x0+a, x1, len(rgb))
c[:,3] = y1
rectangles(coordinates=c, color=rgb)
from galry import *
figure(constrain_navigation=False)
rgb_rect(COLORMAP, (-1,0,1,1))
rgb_rect(HIGHLIGHT_COLORMAP, (-1,-1,1,0))
ylim(-1,1)
show()
# hsv = generate_hsv()
# hsv_rect(hsv, (-1,0,1,1))
# highlight
# hsv[:,1] -= 0.5 # white -> color
# hsv[:,2] += 0.5 # black -> white
# hsv[:,1] -= 0.25 # white -> color
# hsv[:,2] += 0.5 # black -> white
# hsv_rect(hsv, (-1,-1,1,0)) | bsd-3-clause | -7,645,004,919,115,126,000 | 23.97479 | 89 | 0.499159 | false | 2.534983 | false | false | false |
foxmask/django-th | django_th/management/commands/read.py | 1 | 1331 | #!/usr/bin/env python
# coding: utf-8
from __future__ import unicode_literals
from concurrent.futures import ThreadPoolExecutor
# django
from django.conf import settings
from django.core.management.base import BaseCommand
from django.db.models import Q
# trigger happy
from django_th.models import TriggerService
from django_th.read import Read
from logging import getLogger
# create logger
logger = getLogger('django_th.trigger_happy')
class Command(BaseCommand):
help = 'Trigger all the services and put them in cache'
def handle(self, *args, **options):
"""
get all the triggers that need to be handled
"""
from django.db import connection
connection.close()
failed_tries = settings.DJANGO_TH.get('failed_tries', 10)
trigger = TriggerService.objects.filter(
Q(provider_failed__lte=failed_tries) |
Q(consumer_failed__lte=failed_tries),
status=True,
user__is_active=True,
provider__name__status=True,
consumer__name__status=True,
).select_related('consumer__name', 'provider__name')
with ThreadPoolExecutor(max_workers=settings.DJANGO_TH.get('processes')) as executor:
r = Read()
for t in trigger:
executor.submit(r.reading, t)
| bsd-3-clause | 6,266,242,189,755,051,000 | 31.463415 | 93 | 0.6574 | false | 4.185535 | false | false | false |
jmesteve/saas3 | openerp/addons_extra/l10n_es_payment_order/wizard/converter.py | 1 | 5200 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (c) 2006 ACYSOS S.L. (http://acysos.com) All Rights Reserved.
# Pedro Tarrafeta <[email protected]>
# Copyright (c) 2008 Pablo Rocandio. All Rights Reserved.
# Copyright (c) 2009 Zikzakmedia S.L. (http://zikzakmedia.com) All Rights Reserved.
# Jordi Esteve <[email protected]>
# Copyright (c) 2009 NaN (http://www.nan-tic.com) All Rights Reserved.
# Albert Cervera i Areny <[email protected]>
# $Id$
# Refactorización. Acysos S.L. (http://www.acysos.com) 2012
# Ignacio Ibeas <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import osv, fields
from openerp.tools.translate import _
from log import *
class payment_converter_spain(osv.osv):
_name= 'payment.converter.spain'
_auto = False
def digits_only(self, cr, uid, cc_in):
"""Discards non-numeric chars"""
cc = ""
for i in cc_in or '':
try:
int(i)
cc += i
except ValueError:
pass
return cc
def to_ascii(self, cr, uid, text):
"""Converts special characters such as those with accents to their ASCII equivalents"""
old_chars = ['á','é','í','ó','ú','à','è','ì','ò','ù','ä','ë','ï','ö','ü','â','ê','î','ô','û','Á','É','Í','Ú','Ó','À','È','Ì','Ò','Ù','Ä','Ë','Ï','Ö','Ü','Â','Ê','Î','Ô','Û','ñ','Ñ','ç','Ç','ª','º','·','\n']
new_chars = ['a','e','i','o','u','a','e','i','o','u','a','e','i','o','u','a','e','i','o','u','A','E','I','U','O','A','E','I','O','U','A','E','I','O','U','A','E','I','O','U','n','N','c','C','a','o','.',' ']
for old, new in zip(old_chars, new_chars):
text = text.replace(unicode(old,'UTF-8'), new)
return text
def convert_text(self, cr, uid, text, size, justified='left'):
if justified == 'left':
return self.to_ascii(cr,uid,text)[:size].ljust(size)
else:
return self.to_ascii(cr,uid,text)[:size].rjust(size)
def convert_float(self, cr, uid, number, size, context):
text = str( int( round( number * 100, 0 ) ) )
if len(text) > size:
raise Log(_('Error:\n\nCan not convert float number %(number).2f to fit in %(size)d characters.') % {
'number': number,
'size': size
})
return text.zfill(size)
def convert_int(self, cr, uid, number, size, context):
text = str( number )
if len(text) > size:
raise Log( _('Error:\n\nCan not convert integer number %(number)d to fit in %(size)d characters.') % {
'number': number,
'size': size
})
return text.zfill(size)
def convert(self, cr, uid, value, size, context, justified='left'):
if value == False:
return self.convert_text(cr, uid, '', size)
elif isinstance(value, float):
return self.convert_float(cr, uid, value, size, context)
elif isinstance(value, int):
return self.convert_int(cr, uid, value, size, context)
else:
return self.convert_text(cr, uid, value, size, justified)
def convert_bank_account(self, cr, uid, value, partner_name, context):
if not isinstance(value, basestring):
raise Log( _('User error:\n\nThe bank account number of %s is not defined.') % partner_name )
ccc = self.digits_only(cr, uid, value)
if len(ccc) != 20:
raise Log( _('User error:\n\nThe bank account number of %s does not have 20 digits.') % partner_name )
return ccc
def bank_account_parts(self, cr, uid, value, partner_name, context):
if not isinstance(value, basestring):
raise Log( _('User error:\n\nThe bank account number of %s is not defined.') % partner_name )
ccc = self.digits_only(cr, uid, value)
if len(ccc) != 20:
raise Log( _('User error:\n\nThe bank account number of %s does not have 20 digits.') % partner_name )
return {'bank':ccc[:4],
'office': ccc[4:8],
'dc': ccc[8:10],
'account': ccc[10:]}
payment_converter_spain()
| agpl-3.0 | -6,711,844,283,154,323,000 | 44.59292 | 214 | 0.542508 | false | 3.492881 | false | false | false |
veblush/PyAuParser | sample/tutorial2.py | 1 | 1053 | import os
import sys
import pyauparser
def main():
g = pyauparser.Grammar.load_file("data/operator.egt")
# every production has a lambda handler which evaluates value from childs.
# Because LALR is a bottom-up parser, handler would be called from bottom.
h = pyauparser.ProductionHandler({
'<E> ::= <E> + <M>': lambda c: c[0] + c[2],
'<E> ::= <E> - <M>': lambda c: c[0] - c[2],
'<E> ::= <M>': lambda c: c[0],
'<M> ::= <M> * <N>': lambda c: c[0] * c[2],
'<M> ::= <M> / <N>': lambda c: c[0] / c[2],
'<M> ::= <N>': lambda c: c[0],
'<N> ::= - <V>': lambda c: -c[1],
'<N> ::= <V>': lambda c: c[0],
'<V> ::= Num': lambda c: int(c[0].lexeme),
'<V> ::= ( <E> )': lambda c: c[1],
}, g)
try:
pyauparser.parse_string(g, "-2*(3+4)-5", handler=h)
print "Result = {0}".format(h.result)
except pyauparser.ParseError as e:
print e
if __name__ == "__main__":
main()
| mit | 3,278,430,495,155,954,700 | 30.90625 | 78 | 0.449193 | false | 2.877049 | false | false | false |
mattcaldwell/djangopypi | userpypi/migrations/0001_initial.py | 1 | 10447 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Classifier'
db.create_table('userpypi_classifier', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(unique=True, max_length=255)),
))
db.send_create_signal('userpypi', ['Classifier'])
# Adding model 'Project'
db.create_table('userpypi_project', (
('updated', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
('license', self.gf('django.db.models.fields.TextField')(blank=True)),
('metadata_version', self.gf('django.db.models.fields.CharField')(default=1.0, max_length=64)),
('author', self.gf('django.db.models.fields.CharField')(max_length=128, blank=True)),
('home_page', self.gf('django.db.models.fields.URLField')(max_length=200, null=True, blank=True)),
('description', self.gf('django.db.models.fields.TextField')(blank=True)),
('download_url', self.gf('django.db.models.fields.CharField')(max_length=200, null=True, blank=True)),
('summary', self.gf('django.db.models.fields.TextField')(blank=True)),
('author_email', self.gf('django.db.models.fields.CharField')(max_length=255, blank=True)),
('owner', self.gf('django.db.models.fields.related.ForeignKey')(related_name='projects', to=orm['auth.User'])),
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(unique=True, max_length=255)),
))
db.send_create_signal('userpypi', ['Project'])
# Adding M2M table for field classifiers on 'Project'
db.create_table('userpypi_project_classifiers', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('project', models.ForeignKey(orm['userpypi.project'], null=False)),
('classifier', models.ForeignKey(orm['userpypi.classifier'], null=False))
))
db.create_unique('userpypi_project_classifiers', ['project_id', 'classifier_id'])
# Adding model 'Release'
db.create_table('userpypi_release', (
('upload_time', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
('md5_digest', self.gf('django.db.models.fields.CharField')(max_length=255, blank=True)),
('filetype', self.gf('django.db.models.fields.CharField')(max_length=255, blank=True)),
('pyversion', self.gf('django.db.models.fields.CharField')(max_length=255, blank=True)),
('project', self.gf('django.db.models.fields.related.ForeignKey')(related_name='releases', to=orm['userpypi.Project'])),
('platform', self.gf('django.db.models.fields.CharField')(max_length=255, blank=True)),
('version', self.gf('django.db.models.fields.CharField')(max_length=128)),
('signature', self.gf('django.db.models.fields.CharField')(max_length=128, blank=True)),
('distribution', self.gf('django.db.models.fields.files.FileField')(max_length=100)),
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
))
db.send_create_signal('userpypi', ['Release'])
# Adding unique constraint on 'Release', fields ['project', 'version', 'platform', 'distribution', 'pyversion']
db.create_unique('userpypi_release', ['project_id', 'version', 'platform', 'distribution', 'pyversion'])
def backwards(self, orm):
# Deleting model 'Classifier'
db.delete_table('userpypi_classifier')
# Deleting model 'Project'
db.delete_table('userpypi_project')
# Removing M2M table for field classifiers on 'Project'
db.delete_table('userpypi_project_classifiers')
# Deleting model 'Release'
db.delete_table('userpypi_release')
# Removing unique constraint on 'Release', fields ['project', 'version', 'platform', 'distribution', 'pyversion']
db.delete_unique('userpypi_release', ['project_id', 'version', 'platform', 'distribution', 'pyversion'])
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'blank': 'True'})
},
'auth.permission': {
'Meta': {'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'userpypi.classifier': {
'Meta': {'object_name': 'Classifier'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'})
},
'userpypi.project': {
'Meta': {'object_name': 'Project'},
'author': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'author_email': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'classifiers': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['userpypi.Classifier']"}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'download_url': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'home_page': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'license': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'metadata_version': ('django.db.models.fields.CharField', [], {'default': '1.0', 'max_length': '64'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'projects'", 'to': "orm['auth.User']"}),
'summary': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'userpypi.release': {
'Meta': {'unique_together': "(('project', 'version', 'platform', 'distribution', 'pyversion'),)", 'object_name': 'Release'},
'distribution': ('django.db.models.fields.files.FileField', [], {'max_length': '100'}),
'filetype': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'md5_digest': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'releases'", 'to': "orm['userpypi.Project']"}),
'pyversion': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'signature': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'upload_time': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'version': ('django.db.models.fields.CharField', [], {'max_length': '128'})
}
}
complete_apps = ['userpypi']
| bsd-3-clause | -2,172,781,878,352,863,500 | 67.281046 | 139 | 0.577965 | false | 3.740422 | false | false | false |
tecan/xchat-rt | plugins/scripts/ss-autoaway.py | 1 | 2072 | #!/usr/bin/env python
#
# screensaverAutoAway.py - X-Chat script to monitor for the DBUS message
# emitted when the screensaver is activated and de-activated and set the user
# away.
#
# To install:
# o Copy this file to your ~/.xchat2/ directory and it will be loaded on startup.
# o To load without restart, run: /py load screensaverAutoAway.py
# (If you don't want to put it in your ~/.xchat2, then specify the full path.)
#
# If running the '/py' command above results in a message 'py :Unknown command',
# then you do not have the Python plugin installed.
# Written by Wil Cooley <[email protected]>
# Began 26 Aug 2008
#
# $Id$
#todo idle sensor
import dbus
from dbus.mainloop.glib import DBusGMainLoop
try:
import xchat
except ImportError:
# Allow for external tests
pass
__author__ = 'Wil Cooley <wcooley at nakedape.cc>'
__module_name__ = 'screensaverAutoAway'
__module_version__ = '0.2'
__module_description__ = 'Sets user away when the GNOME screensaver is activated'
def screensaver_changed(state):
''' Called when screensaver stops or starts
state is either:
- True: Screensaver activated
- False: Screensaver deactivated
'''
if state:
set_away()
else:
set_back()
def set_away():
away_msg = '%s (Auto-away by %s, version %s)' % \
(xchat.get_prefs('away_reason'),
__module_name__ ,
__module_version__)
if xchat.get_info('away') is None:
xchat.command('away ' + away_msg)
def set_back():
if xchat.get_info('away') is not None:
xchat.command('back')
def setup_session():
DBusGMainLoop(set_as_default=True)
sesbus = dbus.SessionBus()
sesbus.add_signal_receiver(screensaver_changed,
'SessionIdleChanged', 'org.mate.ScreenSaver')
if __name__ == '__main__':
setup_session()
xchat.prnt('%s version %s by %s loaded' % \
(__module_name__, __module_version__, __author__) )
| gpl-2.0 | 1,878,460,759,486,318,300 | 27.383562 | 82 | 0.615347 | false | 3.482353 | false | false | false |
Bajoo/client-pc | bajoo/filesync/task_builder.py | 1 | 5313 | # -*- coding: utf-8 -*-
from ..index.folder_node import FolderNode
from ..index.hints import DeletedHint, DestMoveHint, SourceMoveHint
from ..index.hint_builder import HintBuilder
from .added_local_files_task import AddedLocalFilesTask
from .added_remote_files_task import AddedRemoteFilesTask
from .folder_task import FolderTask
from .moved_local_files_task import MovedLocalFilesTask
from .removed_local_files_task import RemovedLocalFilesTask
from .removed_remote_files_task import RemovedRemoteFilesTask
class TaskBuilder(object):
"""Create sync task by acquiring node and release node when task is done.
A task is created from the node, depending of the node's type and its
state (new or existing node, type of hints).
When the task has been executed, it can release the reserved node(s).
A task can reserve several nodes, by example when there is a "Move" hint.
"""
@classmethod
def build_from_node(cls, local_container, node):
"""Create the best suited sync task for the target node.
The type of task will depends of the type of node and the hints set by
external events.
After this call, the node is not yet acquired. `acquire_from_task()`
should be called before executing the task.
Note:
- This method must be called with the IndexTree's lock acquired.
Args:
local_container (LocalContainer): container owning the node.
node (BaseNode): node to sync.
Returns:
Task: sync task, executable by the filesync service.
"""
container = local_container.container
node_path = node.get_full_path()
if isinstance(node, FolderNode):
task = FolderTask(local_container, node)
else:
if node.local_hint:
if isinstance(node.local_hint, DestMoveHint):
node = node.local_hint.source_node
node_path = node.get_full_path()
if isinstance(node.local_hint, SourceMoveHint):
dest_path = node.local_hint.dest_node.get_full_path()
task = MovedLocalFilesTask(container,
(node_path, dest_path,),
local_container)
elif isinstance(node.local_hint, DeletedHint):
task = RemovedLocalFilesTask(container, (node_path,),
local_container)
else: # ModifiedHint
task = AddedLocalFilesTask(container, (node_path,),
local_container)
elif node.remote_hint:
if isinstance(node.remote_hint, DestMoveHint):
node = node.remote_hint.source_node
node_path = node.get_full_path()
# if isinstance(node.remote_hint, SourceMoveHint):
# # TODO: no support for remove Move events.
# dest_path = node.remote_hint.dest_node.get_full_path()
if isinstance(node.remote_hint, DeletedHint):
task = RemovedRemoteFilesTask(container, (node_path,),
local_container)
else: # ModifiedHint
task = AddedRemoteFilesTask(container, (node_path,),
local_container)
else:
task = AddedLocalFilesTask(container, (node_path,),
local_container)
return task
@classmethod
def acquire_from_task(cls, node, task):
"""Acquire the node and all related nodes used by the task.
For most of the tasks, only the primary node is acquired. If there are
some "Move" hints, hint pairs can be split in (Deleted, Modified)
couple.
If the task is of type "MovedLocalFilesTask", both source and
destination nodes are acquired by the task.
Note:
- This method must be called with the IndexTree's lock acquired.
- After an acquisition, nodes's hints are reset to None. If they
are needed by the task, they should be copied before that.
Args:
node (BaseNode): primary target of the task.
task: sync task that will take care of the node(s).
"""
if isinstance(task, MovedLocalFilesTask):
HintBuilder.break_coupled_hints(node, HintBuilder.SCOPE_REMOTE)
if isinstance(node.local_hint, SourceMoveHint):
# acquire destination node
dest_node = node.local_hint.dest_node
dest_node.task = task
dest_node.remote_hint = None
dest_node.local_hint = None
else:
# acquire source node
source_node = node.local_hint.source_node
source_node.task = task
source_node.remote_hint = None
source_node.local_hint = None
else:
HintBuilder.break_coupled_hints(node)
# acquire target node
node.task = task
node.remote_hint = None
node.local_hint = None
| gpl-3.0 | -5,430,126,019,375,362,000 | 41.166667 | 78 | 0.576699 | false | 4.664618 | false | false | false |
maximz/cooperate-without-looking | src/cwl.py | 1 | 24981 | # -*- coding: utf-8 -*-
"""Module cwl.
Produces simulation calculation and figures for the Cooperate With/Without Looking project.
Usage:
python cwl.py {recalculate?}
Examples:
python cwl.py run using pre-calculated saved data
python cwl.py recalculate run with freshly calculated data
@author: Maxim Zaslavsky <[email protected]>
@author: Erez Yoeli <[email protected]>
"""
### GENERAL
# system imports
import sys, os
import numpy as np
import matplotlib
matplotlib.use("pdf") # save as PDFs
import matplotlib.pyplot as plt
from scipy.integrate import odeint
from collections import defaultdict
from random import sample as random_sample
from math import floor
import cPickle as pickle
# Choose whether to recalculate or to used saved data
Calculate = False
if __name__ == "__main__":
try:
if sys.argv[1] == 'recalculate':
Calculate = True
except: # Interactive mode
pass
output_dir = '../bin/'
print 'Welcome to the CW(O)L Simulations and Figures toolkit.'
print
#######################################################
#######################################################
# Game theory methods
def are_assumptions_valid(a, b, c1, c2, d, p, w):
#P1 and P2 prefer a cooperative interaction to no interaction
statement_1 = a > 0 and b > 0
#P1 gets short-term gains from defection
statement_2 = c1 > a and c2 > a
#P2 2 doesn't want to interact with 1 if he expects 1 to defect in either game.
statement_3 = b * p + d * (1 - p) < 0 and d * p + b * (1 - p) < 0
#wlog it is more tempting to defect in state 2.
statement_4 = c2 > c1
#all of this must hold
return statement_1 and statement_2 and statement_3 and statement_4
def get_game_population_1(a, b, c1, c2, d, p, w):
"""
Game for population 1 of CWL
"""
if not are_assumptions_valid(a, b, c1, c2, d, p, w):
raise ValueError("This parameters do not comply with assumptions")
A = np.empty(shape=(4, 3))
A[0, 0] = (a * p + a * (1.0 - p)) / (1.0 - w)
A[0, 1] = (a * p + a * (1.0 - p)) / (1.0 - w)
A[0, 2] = (a * p + a * (1.0 - p))
A[1, 0] = (a * p + a * (1.0 - p))
A[1, 1] = (a * p + a * (1.0 - p)) / (1 - w)
A[1, 2] = (a * p + a * (1.0 - p))
A[2, 0] = (a * p + c2 * (1.0 - p))
A[2, 1] = (a * p + c2 * (1.0 - p)) / (1 - p * w)
A[2, 2] = (a * p + c2 * (1.0 - p))
A[3, 0] = (c1 * p + c2 * (1.0 - p))
A[3, 1] = (c1 * p + c2 * (1.0 - p))
A[3, 2] = (c1 * p + c2 * (1.0 - p))
return A
def get_game_population_2(a, b, c1, c2, d, p, w):
"""
Game for population 2 of CWL
"""
if not are_assumptions_valid(a, b, c1, c2, d, p, w):
raise ValueError("This parameters do not comply with assumptions")
B = np.empty(shape=(4, 3))
B[0, 0] = (b * p + b * (1.0 - p)) / (1.0 - w)
B[0, 1] = (b * p + b * (1.0 - p)) / (1.0 - w)
B[0, 2] = (b * p + b * (1.0 - p))
B[1, 0] = (b * p + b * (1.0 - p))
B[1, 1] = (b * p + b * (1.0 - p)) / (1.0 - w)
B[1, 2] = (b * p + b * (1.0 - p))
B[2, 0] = (b * p + d * (1.0 - p))
B[2, 1] = (b * p + d * (1.0 - p)) / (1.0 - p * w)
B[2, 2] = (b * p + d * (1.0 - p))
B[3, 0] = (d * p + d * (1.0 - p))
B[3, 1] = (d * p + d * (1.0 - p))
B[3, 2] = (d * p + d * (1.0 - p))
return B.T
# replicator
def __replicator_equation_two_populations(x, t, game1, game2, number__of_strategies_population_1, number__of_strategies_population_2):
"""
This auxiliary function codes the replicator dynamics step. Typically it is only called from replicator_trajectory_two_populations()
Parameters
----------
x: ndarray initial state (concatenated from the two populations)
t: time
game1: ndarray, game for population 1
game2: ndarray, game for population 2
number__of_strategies_population_1: int
number__of_strategies_population_2: int
Returns:
out: ndarray next state (concatenated from the two populations)
"""
x_population_1 = x[0:number__of_strategies_population_1]
#the first piece of y corresponds to population 1
x_population_2 = x[number__of_strategies_population_1:number__of_strategies_population_1 + number__of_strategies_population_2] # the second piece of y corresponds to population 2
#First Ay
fitness_vector_1 = np.dot(game1, x_population_2)
# and Bx (see equation above)
fitness_vector_2 = np.dot(game2, x_population_1)
#Now xAy
average_fitness_1 = np.dot(x_population_1, fitness_vector_1)
#And yBx
average_fitness_2 = np.dot(x_population_2, fitness_vector_2)
#the next lines correspond to equations 10.5 and 10.6 of Hofbauer and Sigmund (page 116)
new_population_1 = x_population_1 * (fitness_vector_1 - average_fitness_1)
new_population_2 = x_population_2 * (fitness_vector_2 - average_fitness_2)
return np.array(new_population_1.tolist() + new_population_2.tolist())
def replicator_trajectory_two_populations(game_matrix_1, game_matrix_2, x_0, y_0, t_vector, **kwargs):
"""
Computes a replicator trajectory for two populations, given two games, starting points and time vector.
It uses scipy's odeint.
Parameters
----------
game_matrix_1: numpy matrix (for population 1)
game_matrix_2: numpy matrix (for population 2)
x_0: ndarray
y_0: ndarray
t_vector: time array
Returns
-------
out: list
Examples
--------
#TODO: Write examples
"""
#join initial populations to fit signature of replicator_equation
start = np.array(x_0.tolist() + y_0.tolist())
number__of_strategies_population_1 = len(x_0)
number__of_strategies_population_2 = len(y_0)
#solve
soln = odeint(__replicator_equation_two_populations, start, t_vector, args=(game_matrix_1, game_matrix_2, number__of_strategies_population_1, number__of_strategies_population_2), **kwargs)
return [soln[:, i] for i in xrange(number__of_strategies_population_1 + number__of_strategies_population_2)]
def get_random_point_inside_simplex(dimension):
"""
Returns a vector that sums up to one, where components have been uniformly chosen.
Parameters:
----------
dimension:int
"""
exponencial = np.random.exponential(size=dimension)
exponencial /= np.sum(exponencial, dtype=float)
return exponencial
def adjusted_solution(a, b, c1, c2, d, p, w, x_0, y_0, max_t, **kwargs):
"""
Returns a steady state, by ajusting dynamically the step size and total error.
"""
tolerance = 1e-4
added_factor_vector = [10.0, 20.0, 50.0, 100.0]
game_1 = get_game_population_1(a, b, c1, c2, d, p, w)
game_2 = get_game_population_2(a, b, c1, c2, d, p, w)
t = np.linspace(0.0, max_t, 2000)
if x_0 is None or y_0 is None:
(x_0, y_0) = (get_random_point_inside_simplex(4), get_random_point_inside_simplex(3))
for added_factor in added_factor_vector:
sol = replicator_trajectory_two_populations(added_factor + game_1, added_factor + game_2, x_0, y_0, t, atol=tolerance, **kwargs)
end_point = [sol[i][-1] for i in xrange(0, 7)]
if np.allclose(sum(end_point), 2.0, atol=tolerance):
return end_point
raise ValueError("Numerics: x = {}, y = {}, a = {}, b = {}, c1 = {}, c2 = {}, d = {}, p = {}, w = {}".format(x_0.tolist(), y_0.tolist(), a, b, c1, c2, d, p, w))
def determine_outcome(solution):
tolerance = 1e-3
if not np.allclose(np.sum(solution), 2.0, atol=tolerance):
raise ValueError("Probabilities don't add up: {} ".format(solution))
elif player1_CWOL(solution, atol=tolerance) and player2_sometimes_exits_if_looks_or_defects(solution, atol=tolerance):
return (1, solution)
elif player1_alwaysD(solution, atol=tolerance) and (player2_pure_strategy(solution, atol=tolerance) or player2_mixes(solution, atol=tolerance)):
return (2, solution)
elif player2_exitifdefect(solution, atol=tolerance) and (player1_CWOL(solution, atol=tolerance) or player1_CWL(solution, atol=tolerance) or player1_CWOL_or_CWL(solution, atol=tolerance)):
return (3, solution)
else:
return (4, solution)
def determine_random_outcome(a, b, c1, c2, d, p, w, max_t, **kwargs):
"""
Starting in a random point tries to determine the outcome, given parameters.
This is the main function to be called from montecarlo procedures
"""
x_0 = get_random_point_inside_simplex(4)
y_0 = get_random_point_inside_simplex(3)
solution = adjusted_solution(a, b, c1, c2, d, p, w, x_0, y_0, max_t)
return determine_outcome(solution)
def montecarlo(a, b, c1, c2, d, p, w, max_t=300, repetitions=5000):
"""
Takes samples for a given point in the space. Counting the occurrences
of different outcomes, and returns them in a dictionary with the
following indexes:
1 - Outcome 1
2 - Outcome 2
3 - Outcome 3
4 - No categorized
"""
ans = defaultdict(int)
sum_of_solution = np.zeros(7)
for i in xrange(0, repetitions):
try:
outcome, solution = determine_random_outcome(a, b, c1, c2, d, p, w, max_t)
ans[outcome] = ans[outcome]+1
sum_of_solution += solution
except ValueError, e:
print e
ans[5] = ans[5] + 1
avg_of_solution = sum_of_solution/repetitions
return (ans, sum_of_solution)
#--------- THEORY CHECKING FUNCTIONS ----------
def is_coop_wihtout_looking_an_equilibrium(a, b, c1, c2, d, p, w):
return c1*p+c2*(1.0 - p) < a / (1.0 - w)
def is_coop_looking_an_equilibrium(a, b, c1, c2, d, p, w):
return c2 < a / (1.0 - w)
def number_of_equlibria(a, b, c1, c2, d, p, w):
CWOL = is_coop_wihtout_looking_an_equilibrium(a, b, c1, c2, d, p, w)
CWL = is_coop_looking_an_equilibrium(a, b, c1, c2, d, p, w)
if CWOL and CWL:
return 3
elif CWOL or CWOL:
return 2
else:
return 1
#--- classifier functions
def player1_CWOL(solution, atol=1e-3):
player1_plays_desired_pure_strategy = np.allclose(solution[0], 1.0, atol)
return player1_plays_desired_pure_strategy
def player1_CWL(solution, atol=1e-3):
player1_plays_desired_pure_strategy = np.allclose(solution[1], 1.0, atol)
return player1_plays_desired_pure_strategy
def player1_Cin1(solution, atol=1e-3):
player1_plays_desired_pure_strategy = np.allclose(solution[2], 1.0, atol)
return player1_plays_desired_pure_strategy
def player1_alwaysD(solution, atol=1e-3):
player1_plays_desired_pure_strategy = np.allclose(solution[3], 1.0, atol)
return player1_plays_desired_pure_strategy
def player1_pure_strategy(solution, atol=1e-3):
return (player1_CWOL(solution, atol) or player1_CWL(solution, atol) or player1_Cin1(solution, atol) or player1_alwaysD(solution, atol))
def player1_CWOL_or_CWL(solution, atol=1e-3):
#solution[0:1] is now solution[0:2]
player1_mixes_CWL_CWOL = np.allclose(np.sum(solution[0:2]), 1.0, atol)
return player1_mixes_CWL_CWOL and not player1_pure_strategy(solution, atol)
def player1_mixes(solution, atol=1e-3):
#solution[0:3] is now solution[0:4]
player1_mixes = np.allclose(np.sum(solution[0:4]), 1.0, atol)
return player1_mixes and not player1_pure_strategy(solution, atol)
def player2_exitiflook(solution, atol=1e-3):
player2_plays_desired_pure_strategy = np.allclose(solution[4], 1.0, atol)
return player2_plays_desired_pure_strategy
def player2_exitifdefect(solution, atol=1e-3):
player2_plays_desired_pure_strategy = np.allclose(solution[5], 1.0, atol)
return player2_plays_desired_pure_strategy
def player2_alwaysexit(solution, atol=1e-3):
player2_plays_desired_pure_strategy = np.allclose(solution[6], 1.0, atol)
return player2_plays_desired_pure_strategy
def player2_pure_strategy(solution, atol=1e-3):
return (player2_exitifdefect(solution, atol=1e-3) or player2_exitiflook(solution, atol=atol) or player2_alwaysexit(solution, atol=atol))
def player2_mixes(solution, atol=1e-3):
#solution[4:6] is now changed to solution[4:7], please verify.
player2_mixes = np.allclose(np.sum(solution[4:7]), 1.0, atol)
return player2_mixes and not player2_pure_strategy(solution, atol=atol)
def player2_sometimes_exits_if_looks_or_defects(solution, atol=1e-3):
player2_sometimes_exits_if_looks = not np.allclose(solution[4], 0.0, atol)
player2_sometimes_exits_if_defects = not np.allclose(solution[5], 0.0, atol)
return player2_sometimes_exits_if_looks or player2_sometimes_exits_if_defects
# Additioanl plot beautifier functions:
def summarize_binary_list(lista):
"""
#determines edges of sequences of 1's in a binary list
"""
ans = []
x_0 = None
tamano = len(lista)
for i in xrange(tamano):
if lista[i] == 1 and x_0 is None:
x_0 = i
end_of_sequence = lista[i] == 0
end_of_array = i == (tamano-1) and lista[i] == 1
if (end_of_sequence or end_of_array) and x_0 is not None:
if end_of_sequence:
ans.append((x_0, i-1))
if end_of_array:
ans.append((x_0, i))
x_0 = None
return ans
#######################################################
#######################################################
### FIGURE 2 PREPARATION
def clear_past_figs():
plt.close()
plt.clf()
plt.cla()
plt.close()
#del f, fig_all
#gc.collect()
def export_graph(f_i, f_name):
#f_i.savefig(output_dir+f_name+'.png',dpi=300)
#f_i.savefig(output_dir+f_name+'.png',dpi=600)
f_i.savefig(output_dir+f_name+'.pdf', dpi=600) # This one looks the best
print f_name, 'exported as pdf at 600 dpi.' # 300dpi_png, 600dpi_png,
# Figure 2B and 2C calculations:
print 'Calculating or loading values for Figure 2B and Figure 2C'
p = 0.5 + 0.01
b = 1.0
c1 = 4.0
c2 = 12.0
d = -10.0
w = 7.0/8.0 + 0.02
repetitions = 10000
number_of_points = 50
if Calculate:
a_interval = np.linspace(0.0+0.1, 2.0, number_of_points, endpoint=False)
a_interval_tight = np.linspace(0.0+0.1, 2.0, number_of_points) # TODO: change to 300?
#lets plot the theory predictions first as a shade
calculated_equilibria=[number_of_equlibria(a, b, c1, c2, d, p, w) for a in a_interval_tight]
one_equilibrium_region = summarize_binary_list([ce == 1 for ce in calculated_equilibria])
two_equilibria_region = summarize_binary_list([ce == 2 for ce in calculated_equilibria])
three_equilibria_region = summarize_binary_list([ce == 3 for ce in calculated_equilibria])
#first the sampling
outcome_1 = []
outcome_2 = []
outcome_3 = []
outcome_4 = []
no_outcome = []
strategy_1 = []
strategy_2 = []
strategy_3 = []
strategy_4 = []
strategy_5 = []
strategy_6 = []
strategy_7 = []
for a in a_interval_tight: # TODO: should this be a_interval?
diccionario, avg_strategy_frequency = montecarlo(a, b, c1, c2, d, p, w, repetitions=repetitions)
outcome_1.append(diccionario[1])
outcome_2.append(diccionario[2])
outcome_3.append(diccionario[3])
outcome_4.append(diccionario[4])
no_outcome.append(diccionario[5])
strategy_1.append(avg_strategy_frequency[0])
strategy_2.append(avg_strategy_frequency[1])
strategy_3.append(avg_strategy_frequency[2])
strategy_4.append(avg_strategy_frequency[3])
strategy_5.append(avg_strategy_frequency[4])
strategy_6.append(avg_strategy_frequency[5])
strategy_7.append(avg_strategy_frequency[6])
stuff = [a_interval, a_interval_tight, one_equilibrium_region, two_equilibria_region, three_equilibria_region, outcome_1, outcome_2, outcome_3, outcome_4, no_outcome, strategy_1, strategy_2, strategy_3, strategy_4, strategy_5, strategy_6, strategy_7]
pickle.dump( stuff, open( output_dir+"Figure 2_B and C_strategy frequency.saved_data", "wb" ) )
else:
(a_interval, a_interval_tight, one_equilibrium_region, two_equilibria_region, three_equilibria_region, outcome_1, outcome_2, outcome_3, outcome_4, no_outcome, strategy_1, strategy_2, strategy_3, strategy_4, strategy_5, strategy_6, strategy_7) = pickle.load(open(output_dir+"Figure 2_B and C_strategy frequency.saved_data", "r"))
# Plotting:
clear_past_figs()
def process_ax(ax):
'''
Shades figure to correspond to equilibria regions.
'''
# hack to fill white space in the middle:
midpoint = (a_interval_tight[one_equilibrium_region[0][1]] + a_interval_tight[two_equilibria_region[0][0]])/2
midpoint1 = (a_interval_tight[two_equilibria_region[0][1]] + a_interval_tight[three_equilibria_region[0][0]])/2
for dupla in one_equilibrium_region:
#ax.axvspan(p_interval_tight[dupla[0]], p_interval_tight[dupla[1]], facecolor='red', alpha=0.2)
ax.axvspan(a_interval_tight[dupla[0]], midpoint, facecolor='white', alpha=1) # red, alpha=0.2
print 'one', dupla, a_interval_tight[dupla[0]], a_interval_tight[dupla[1]]
for dupla in two_equilibria_region:
#ax.axvspan(p_interval_tight[dupla[0]], p_interval_tight[dupla[1]], facecolor='blue', alpha=0.2)
ax.axvspan(midpoint, midpoint1, facecolor='0.50', alpha=0.2) # blue or .80
print 'two', dupla, a_interval_tight[dupla[0]], a_interval_tight[dupla[1]]
for dupla in three_equilibria_region:
ax.axvspan(midpoint1, a_interval_tight[dupla[1]], facecolor='0.10', alpha=0.2) # yellow or .20
print 'three', dupla, a_interval_tight[dupla[0]], a_interval_tight[dupla[1]]
avoid_end = -1 # remove last 1 point
#######################################################
#######################################################
### PLOT FIGURE 2(B): Frequency vs. a-value
print
print 'Plotting Figure 2B'
clear_past_figs()
f = plt.figure(figsize=(10,10))
process_ax(f.gca())
plt.plot(a_interval[:avoid_end], (np.array(outcome_1)/(float(repetitions)-np.array(no_outcome)))[:avoid_end], 'bo-', label='Cooperate without looking')
plt.plot(a_interval[:avoid_end], (np.array(outcome_2)/(float(repetitions)-np.array(no_outcome)))[:avoid_end], 'ro-', label='Always defect')
plt.plot(a_interval[:avoid_end], (np.array(outcome_3)/(float(repetitions)-np.array(no_outcome)))[:avoid_end], 'yo-', label='Cooperate with looking')
plt.plot(a_interval[:avoid_end], (np.array(outcome_4)/(float(repetitions)-np.array(no_outcome)))[:avoid_end], 'ko-', label='Other')
plt.grid()
plt.legend(loc='best')
plt.ylim((-0.01, 1.01))
plt.xlim((a_interval[0]-0.01, a_interval[-1]+0.01))
plt.xlabel('a')
plt.ylabel('Frequency')
plt.title('Frequency vs a')
export_graph(f, 'Figure_2B')
#######################################################
#######################################################
### PLOT FIGURE 2(C): Average frequency of strategies for players 1 and 2
print
print 'Plotting Figure 2C'
clear_past_figs()
fig_all, (ax1, ax2) = plt.subplots(2,1, sharex=False, sharey=False) # make 2x1 grid of subplots
fig_all.set_size_inches(10, 15)
#plt.subplots_adjust(wspace=0.30, hspace=0.15)
#prepare plots
for ax in (ax1, ax2):
ax.grid()
ax.legend(loc='best')
ax.set_ylim((-0.01, 1.01))
ax.set_xlim((a_interval[0]-0.01, a_interval[-1]+0.01))
ax.set_xlabel('a')
ax.set_ylabel('Frequency')
process_ax(ax)
plt.tight_layout()
#player1
ax1.plot(a_interval[:avoid_end], (np.array(strategy_1)/(float(repetitions)-np.array(no_outcome)))[:avoid_end], 'bo-', label='P1 CWOL')
ax1.plot(a_interval[:avoid_end], (np.array(strategy_2)/(float(repetitions)-np.array(no_outcome)))[:avoid_end], 'ro-', label='P1 CWL')
ax1.plot(a_interval[:avoid_end], (np.array(strategy_3)/(float(repetitions)-np.array(no_outcome)))[:avoid_end], 'yo-', label='P1 C in 1')
ax1.plot(a_interval[:avoid_end], (np.array(strategy_4)/(float(repetitions)-np.array(no_outcome)))[:avoid_end], 'ko-', label='P1 All D')
ax1.set_title('Average Frequency of Strategies - Player 1')
ax1.legend(loc='best')
#player2
ax2.plot(a_interval[:avoid_end], (np.array(strategy_5)/(float(repetitions)-np.array(no_outcome)))[:avoid_end], 'co-', label='P2 Exit if Look')
ax2.plot(a_interval[:avoid_end], (np.array(strategy_6)/(float(repetitions)-np.array(no_outcome)))[:avoid_end], 'mo-', label='P2 Exit if Defect')
ax2.plot(a_interval[:avoid_end], (np.array(strategy_7)/(float(repetitions)-np.array(no_outcome)))[:avoid_end], 'go-', label='P2 Always Exit')
ax2.set_title('Average Frequency of Strategies - Player 2')
ax2.legend(loc='best')
fig_all.tight_layout()
export_graph(fig_all, 'Figure_2C')
#######################################################
#######################################################
### PLOT FIGURE 2(A): Player 1 and 2 strategy replicator trajectories from single simulation run
print
print 'Calculating or loading values for Figure 2A'
# Decide which a-values to use and plot.
def get_a_value_from_interval(bounds):
for (bound_x, bound_y) in bounds:
i_chosen = int(floor((bound_x+bound_y)/2.0))
yield a_interval_tight[i_chosen]
a_selected = list(get_a_value_from_interval([one_equilibrium_region[0], two_equilibria_region[0], three_equilibria_region[0]]))
# This setup supports having multiple columns, i.e. one column for each a-value.
# The below is currently configured to hide all but the second column - however, we could easily disable this to return to all-column view, simply by commenting out the following line:
a_selected = a_selected[1:2]
print 'Using these a-values:', a_selected
# Randomly seed strategy frequencies:
if Calculate:
tolerance_current=1e-2 # previously, 1e-3. arbitrary designation.
x_0 = get_random_point_inside_simplex(4) # random frequency
y_0 = get_random_point_inside_simplex(3) # random frequency
t_vector = np.linspace(0.0, 30.0, 1000) # time values
parameters_saved = [x_0, y_0, t_vector, tolerance_current, b, c1, c2, d, p, w] # a_selected is not necessary
pickle.dump( parameters_saved, open( output_dir+"Figure 2_A_single simulation run of strategy replicator trajectories.saved_data", "wb" ) )
else: # load previous working version
(x_0, y_0, t_vector, tolerance_current, b, c1, c2, d, p, w) = pickle.load(open(output_dir+"Figure 2_A_single simulation run of strategy replicator trajectories.saved_data", "r"))
# Begin plot:
print
print 'Plotting Figure 2A'
clear_past_figs()
fig_all, ax_arr = plt.subplots(2,len(a_selected), sharex=False, sharey=False, figsize=(10,20)) # make 2 rows x 3 columns grid of subplots; (30, 20) size when 3x2
for i in range(len(a_selected)):
if len(a_selected) == 1: # Treat situation differently based on whether we are conmparing a-values or not.
(ax_p1, ax_p2) = (ax_arr[0], ax_arr[1])
else:
(ax_p1, ax_p2) = (ax_arr[0,i], ax_arr[1,i])
a_cur = a_selected[i]
solution = replicator_trajectory_two_populations(get_game_population_1(a_cur, b, c1, c2, d, p, w), get_game_population_2(a_cur, b, c1, c2, d, p, w), x_0, y_0, t_vector, atol=tolerance_current)
for ax in (ax_p1, ax_p2):
ax.set_ylim((-0.1, 1.1))
ax.set_xlim(0,10)
ax.set_ylabel('Frequency')
ax.set_xlabel('Time')
ax.grid(True)
ax_p1.plot(t_vector, solution[0], 'b-', label='P1 C wout looking', linewidth=2.0)
ax_p1.plot(t_vector, solution[1], 'g-', label='P1 Observe and C', linewidth=2.0)
ax_p1.plot(t_vector, solution[2], 'y-', label='P1 Observe and C only if 1 is chosen', linewidth=2.0)
ax_p1.plot(t_vector, solution[3], 'r-', label='P1 ALLD', linewidth=2.0)
ax_p2.plot(t_vector, solution[4], 'm--', label='P2 Continue iff P1 C wout looking', linewidth=2.0)
ax_p2.plot(t_vector, solution[5], 'y--', label='P2 Continue iff P1 C', linewidth=2.0)
ax_p2.plot(t_vector, solution[6], 'r--', label='P2 Exit', linewidth=2.0)
ax_p1.set_title('Player 1 Strategies') # 'Player 1. a = '+str(a_cur)+'.'
ax_p2.set_title('Player 2 Strategies') # 'Player 2. a = '+str(a_cur)+'.'
ax_p1.legend(loc='best')
ax_p2.legend(loc='best')
#fig_all.suptitle('Single simulation run, replicator trajectory; tolerance = '+str(tolerance_current)+'.', fontsize=24)
fig_all.tight_layout()
fig_all.subplots_adjust(top=0.85)
# fig_all.show()
export_graph(fig_all, 'Figure_2A')
#######################################################
#######################################################
print
print 'CW(O)L Simulation Calculations and Figures Complete.'
| mit | -9,027,010,238,727,414,000 | 38.818627 | 332 | 0.616148 | false | 2.931698 | false | false | false |
walkr/cryex | cryex/coins/poloniex.py | 1 | 2638 | POLONIEX_REPAIRS = {
"1cr": "1CR",
"aby": "ABY",
"adn": "ADN",
"amp": "AMP",
"arch": "ARCH",
"bbr": "BBR",
"bcn": "BCN",
"bcy": "BCY",
"bela": "BELA",
"bitcny": "BITCNY",
"bits": "BITS",
"bitusd": "BITUSD",
"blk": "BLK",
"block": "BLOCK",
"btcd": "BTCD",
"btm": "BTM",
"bts": "BTS",
"burst": "BURST",
"c2": "C2",
"cga": "CGA",
"clam": "CLAM",
"cnmt": "CNMT",
"cure": "CURE",
"dash": "DASH",
"dgb": "DGB",
"diem": "DIEM",
"doge": "DOGE",
"emc2": "EMC2",
"eth": "ETH",
"exe": "EXE",
"exp": "EXP",
"fct": "FCT",
"fibre": "FIBRE",
"fldc": "FLDC",
"flo": "FLO",
"flt": "FLT",
"gap": "GAP",
"gemz": "GEMZ",
"geo": "GEO",
"gmc": "GMC",
"grc": "GRC",
"grs": "GRS",
"huc": "HUC",
"hyp": "HYP",
"hz": "HZ",
"index": "INDEX",
"ioc": "IOC",
"lqd": "LQD",
"ltbc": "LTBC",
"ltc": "LTC",
"maid": "MAID",
"mcn": "MCN",
"mil": "MIL",
"mint": "MINT",
"mmc": "MMC",
"mmnxt": "MMNXT",
"mrs": "MRS",
"myr": "MYR",
"naut": "NAUT",
"nav": "NAV",
"nbt": "NBT",
"neos": "NEOS",
"nmc": "NMC",
"nobl": "NOBL",
"note": "NOTE",
"noxt": "NOXT",
"nsr": "NSR",
"nxt": "NXT",
"omni": "OMNI",
"piggy": "PIGGY",
"pink": "PINK",
"pot": "POT",
"ppc": "PPC",
"pts": "PTS",
"qbk": "QBK",
"qora": "QORA",
"qtl": "QTL",
"rads": "RADS",
"rby": "RBY",
"rdd": "RDD",
"ric": "RIC",
"sc": "SC",
"sdc": "SDC",
"silk": "SILK",
"sjcx": "SJCX",
"str": "STR",
"swarm": "SWARM",
"sync": "SYNC",
"sys": "SYS",
"unity": "UNITY",
"via": "VIA",
"vrc": "VRC",
"vtc": "VTC",
"wdc": "WDC",
"xbc": "XBC",
"xc": "XC",
"xch": "XCH",
"xcn": "XCN",
"xcp": "XCP",
"xcr": "XCR",
"xdn": "XDN",
"xdp": "XDP",
"xem": "XEM",
"xmg": "XMG",
"xmr": "XMR",
"xpb": "XPB",
"xpm": "XPM",
"xrp": "XRP",
"xst": "XST",
"xvc": "XVC",
"yacc": "YACC",
}
def update():
new_pairs = {}
# Add *_BTC pair
for down, up in POLONIEX_REPAIRS.items():
new_key = '_'.join((down, 'btc'))
new_value = '_'.join(('BTC', up))
new_pairs[new_key] = new_value
# Add *_USD pair
for down in ['btc', 'eth', 'ltc', 'xmr', 'dash', 'xrp', 'nxt', 'str']:
up = down.upper()
new_key = '_'.join((down, 'usd'))
new_value = '_'.join(('USDT', up))
new_pairs[new_key] = new_value
POLONIEX_REPAIRS.update(new_pairs)
update()
| mit | 6,310,330,788,044,527,000 | 18.686567 | 74 | 0.392722 | false | 2.278066 | false | false | false |
Droriel/python_training | generator/contact.py | 1 | 7547 | # -*- coding: utf-8 -*-
import random
import string
from model.contact import PersonalData, PhoneNumbers, Emails, Www, AdditionalData, Notes, ContactBaseData, \
ContactAllData, BirthDate, AnniversaryDate
import jsonpickle
import os.path
import sys
import getopt
try:
opts, args=getopt.getopt(sys.argv[1:], 'n:f:', ['number of contacts', 'file'])
except getopt.GetoptError as err:
getopt.usage()
sys.exit(2)
n = 5
f = 'data/contacts.json'
for o, a in opts:
if o == '-n':
n = int(a)
elif o == '-f':
f = a
def random_string(maxlen):
symbols = string.ascii_letters + ' '*13 + '-'*3 + '_'*3
# + "'"*3
return ''.join([random.choice(symbols) for i in range(random.randrange(maxlen))])
def random_string_with_new_line(prefix, maxlen):
symbols = string.ascii_letters + string.digits + ' '*15 + '\n'*5 + '-'*3 + '_'*3
# + string.punctuation
return prefix + ''.join([random.choice(symbols) for i in range(random.randrange(maxlen))])
def random_email(maxlen):
symbols = string.ascii_letters + '-' * 3 + '_' * 3
return ''.join([random.choice(symbols) for i in range(random.randrange(maxlen+5))]) + '@' +\
''.join([random.choice(symbols) for i in range(random.randrange(maxlen))]) + '.' +\
''.join([random.choice(string.ascii_letters) for i in range(random.randrange(2,4))])
def random_phone_number(maxlen):
symbols = str(string.digits) * 4 + '('+ ')' + '+' + '-' + ' '
return ''.join([random.choice(symbols) for i in range(random.randrange(maxlen))])
def random_www(maxlen):
symbols = string.ascii_letters + '-'
return 'www.' + ''.join([random.choice(symbols) for i in range(random.randrange(maxlen))]) + '.'+\
''.join([random.choice(string.ascii_letters) for i in range(random.randrange(2,4))])
def random_day():
return random.randrange(1, 31)
def random_month():
return random.randrange(1, 12)
def random_year():
symbols = string.digits
return ''.join([random.choice(symbols) for i in range(4)])
testData = [ContactAllData(contactBaseData=ContactBaseData(firstname=random_string(10), lastname=random_string(18)),
personalData=PersonalData(middlename=random_string(10), nickname=random_string(10),
title=random_string(10), company=random_string(20),
address=random_string_with_new_line('Adres podstawowy: ', 30)),
phoneNumbers=PhoneNumbers(home=random_phone_number(12), mobile=random_phone_number(16),
work=random_phone_number(12), fax=random_phone_number(10)),
emails=Emails(email1=random_email(8), email2=random_email(5), email3=random_email(6)),
www=Www(www=random_www(30)),
birthDate=BirthDate(day=random_day(), month=random_month(), year=random_year()),
anniversaryDate=AnniversaryDate(day=random_day(), month=random_month(), year=random_year()),
additionalData=AdditionalData(address=random_string_with_new_line('Adres dodatkowy: ', 30) ,
phone=random_phone_number(12)),
notes=Notes(notes=random_string_with_new_line('n', 100)))
for i in range(n)]\
+ \
[ContactAllData(contactBaseData=ContactBaseData(firstname='', lastname=''),
personalData=PersonalData(middlename='', nickname='',
title='', company='',
address=''),
phoneNumbers=PhoneNumbers(home='', mobile='',
work='', fax=''),
emails=Emails(email1='', email2='', email3=''),
www=Www(www=''),
birthDate=BirthDate(day=-1, month=0, year=''),
anniversaryDate=AnniversaryDate(day=-1, month=0, year=''),
additionalData=AdditionalData(address='' ,
phone=''),
notes=Notes(notes=''))]\
+\
[ContactAllData(contactBaseData=ContactBaseData(firstname=random_string(10), lastname=random_string(18)),
personalData=PersonalData(middlename=random_string(10), nickname=random_string(10),
title=random_string(10), company=random_string(20),
address=''),
phoneNumbers=PhoneNumbers(home='', mobile='',
work='', fax=''),
emails=Emails(email1='', email2='', email3=''),
www=Www(www=''),
birthDate=BirthDate(day=31, month=12, year='1999'),
anniversaryDate=AnniversaryDate(day=1, month=1, year='2010'),
additionalData=AdditionalData(address='',
phone=''),
notes=Notes(notes=random_string_with_new_line('n', 100)))]\
+ \
[ContactAllData(contactBaseData=ContactBaseData(firstname=' a ', lastname=' b '),
personalData=PersonalData(middlename='', nickname='',
title='', company='',
address=''),
phoneNumbers=PhoneNumbers(home='', mobile='',
work='', fax=''),
emails=Emails(email1='', email2='', email3=''),
www=Www(www=''),
birthDate=BirthDate(day=-1, month=0, year=''),
anniversaryDate=AnniversaryDate(day=-1, month=0, year=''),
additionalData=AdditionalData(address='',
phone=''),
notes=Notes(notes=''))] \
+ \
[ContactAllData(contactBaseData=ContactBaseData(firstname='a b', lastname='c d'),
personalData=PersonalData(middlename='', nickname='',
title='', company='',
address=''),
phoneNumbers=PhoneNumbers(home='', mobile='',
work='', fax=''),
emails=Emails(email1='', email2='', email3=''),
www=Www(www=''),
birthDate=BirthDate(day=-1, month=0, year=''),
anniversaryDate=AnniversaryDate(day=-1, month=0, year=''),
additionalData=AdditionalData(address='',
phone=''),
notes=Notes(notes=''))]
file = os.path.join(os.path.dirname(os.path.abspath(__file__)), '..' , f)
with open(file, 'w', encoding='utf8') as out:
jsonpickle.set_encoder_options('json', indent=2)
out.write(jsonpickle.encode(testData)) | apache-2.0 | -4,843,098,957,106,365,000 | 50.69863 | 119 | 0.481118 | false | 4.476275 | false | false | false |
asoliveira/NumShip | source/Navio-back.py | 1 | 56391 | # -*- coding: utf-8 -*-
#
#This file is part of a program called NumShip
#Copyright (C) 2011,2012 Alex Sandro Oliveira
#NumShip is free software: you can redistribute it and/or modify
#it under the terms of the GNU General Public License as published by
#the Free Software Foundation, either version 3 of the License, or
#(at your option) any later version.
#This program is distributed in the hope that it will be useful,
#but WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
#GNU General Public License for more details.
#You should have received a copy of the GNU General Public License
#along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
from Casco import *
from Leme import *
import scipy as sp
from scipy import linalg
from scipy import stats
from Prop import *
class inte(object):
"""
Classe que realisa a integração no tempo
:version:191010
:author: Alex
"""
def __init__(self):
"""
"""
pass
def rk4(self, function, x, t0, dt, par = None):
"""
Integrador runge-kutta
"""
k1 = function(x, t0, par)
k2 = function(x + 1./2*dt*k1, t0 + 1./2*dt, par)
k3 = function(x + 1./2*dt*k2, t0 + 1./2*dt, par)
k4 = function(x + dt*k3, t0 + dt, par)
xt = x + 1./6*(k1+ 2.*k2+ 2.*k3+ k4)*dt
return xt
def euler(self, f, x, t0, dt, par= None ):
"""
"""
return x + f(x, t0, par)*dt
class navio:
"""
Classe de navios
"""
tipo = 'Escolhido de acordo com self.Tipo'
data = '10-11-2010'
autor = 'Alex'
def __init__(self, DicionarioDerivadas, Nome = 'Teste', Tipo = 'TP'):
""""
Construtor do navio
__________________________
Variáveis de entrada:
Nome (string)-- Nome do Navio. Não possui relevância;
Tipo ('TP')-- Tipo de modelo numérico adotado para a construção do Leme
"""
self.nome = Nome
self.vel = sp.zeros((6, 1))
self.acel = sp.zeros((6, 1))
self.pos = sp.zeros((6, 1))
self.dic = DicionarioDerivadas
self.tipo = Tipo
self.integrador = inte()
self.uc = sp.array(self.dic['unom'])
if Tipo == 'TP':
self.leme = lemeTris(DicionarioDerivadas)
self.casco = cascoTris(DicionarioDerivadas)
self.prop = prop()
elif Tipo == 'MARAD':
self.leme = lemeMarAd(DicionarioDerivadas)
self.casco = cascoMarAd(DicionarioDerivadas)
self.prop = propMarAd(DicionarioDerivadas)
def MostraVel(self):
"""
Retorna a Velocidade da embarcação
"""
return self.vel
def MostraAcel(self):
"""
Retorna a aceleração da embarcação
"""
return self.acel
def MostraLeme(self):
"""
Retorna o leme em rad da embarcação
"""
return self.leme.MostraLeme()
def MostraLemeCom(self):
"""
Retorna o leme em rad da embarcação
"""
return self.leme.MostraLemeCom()
def MostraPos(self):
"""
Retorna a posição da embarcação
"""
return self.pos
def MostraRotCom(self):
"""
Retorna a rotação comandada
"""
return self.prop.MostraRotCom()
def MostraRot(self):
"""
Retorna a rotação
"""
return self.prop.MostraRot()
def MostraVelCom(self):
"""
Retorna a velocidade comandada
"""
return self.uc
def MudaVelCom(self, uc):
"""
Muda a velocidade comandada
"""
self.uc = uc.copy()
self.prop.MudaVelCom(uc)
def MudaLemeCom(self, AngLeme):
"""
Muda o leme comandado da embarcação
__________________________
Variáveis de entrada:
"""
temp = AngLeme.copy()
self.leme.MudaLemeCom(temp)
def MudaVel(self, Velocidade):
"""
Muda a velocidade da embarcação
__________________________
Variáveis de entrada:
Velocidade -- velocidade (m/s)
"""
temp = Velocidade.copy()
self.vel = temp
self.casco.MudaVel(temp)
self.leme.MudaVel(temp)
self.prop.MudaVel(temp)
def MudaPos(self, Posicao):
"""
Muda a posição da embarcação
__________________________
Variáveis de entrada:
Posição -- posição (m)
"""
temp = Posicao.copy()
self.pos = temp
self.casco.MudaPos(temp)
self.leme.MudaPos(temp)
self.prop.MudaPos(temp)
def MudaRotCom(self, Rot):
"""
Muda a rotação Comandada da embarcação
"""
self.prop.MudaRotCom(Rot)
def CalcFx(self):
"""
Calcula a força em Surge
"""
m = self.dic['m']*(self.dic['rho']*(self.dic['lpp']**3)/2)
u = self.MostraVel()[0]
v = self.MostraVel()[1]
p = self.MostraVel()[3]
r = self.MostraVel()[5]
xg = self.dic['xg']
zg = self.dic['zg']
cori = m*(v*r + xg*(r**2) - zg*p*r)
if self.tipo == 'MARAD':
saida = (self.casco.Fx() + self.prop.Fx() +
self.leme.Fx(self.MostraRot(),
self.MostraVelCom()/self.MostraVel()[0]) + cori)
elif self.tipo == 'TP':
saida = self.casco.Fx() + self.leme.Fx() + self.prop.Fx() + cori
return saida
def CalcFy(self):
"""
Calcula a força em Sway
"""
m = self.dic['m']*(self.dic['rho']*(self.dic['lpp']**3)/2)
u = self.MostraVel()[0]
v = self.MostraVel()[1]
p = self.MostraVel()[3]
r = self.MostraVel()[5]
xg = self.dic['xg']
zg = self.dic['zg']
cori = -m*u*r
if self.tipo == 'MARAD':
saida = (self.casco.Fy() + self.leme.Fy(self.MostraRot()) +
self.prop.Fy() + cori)
elif self.tipo == 'TP':
saida = self.casco.Fy() + self.leme.Fy() + self.prop.Fy() + cori
return saida
def CalcK(self):
"""
Calcula o momento de Roll
"""
m = self.dic['m']*(self.dic['rho']*(self.dic['lpp']**3)/2)
u = self.MostraVel()[0]
v = self.MostraVel()[1]
p = self.MostraVel()[3]
r = self.MostraVel()[5]
xg = self.dic['xg']
zg = self.dic['zg']
cori = m*zg*u*r
if self.tipo == 'MARAD':
saida = (self.casco.K() + self.leme.K(self.MostraRot()) +
self.prop.K() + cori)
elif self.tipo == 'TP':
saida = self.casco.K() + self.leme.K() + self.prop.K() + cori
return saida
def CalcN(self):
"""
Calcula o momento de Yaw
"""
m = self.dic['m']*(self.dic['rho']*(self.dic['lpp']**3)/2)
u = self.MostraVel()[0]
v = self.MostraVel()[1]
p = self.MostraVel()[3]
r = self.MostraVel()[5]
xg = self.dic['xg']
zg = self.dic['zg']
cori = -m*xg*u*r
if self.tipo == 'MARAD':
saida = (self.casco.N() + self.leme.N(self.MostraRot()) +
self.prop.N() + cori)
elif self.tipo == 'TP':
saida = self.casco.N() + self.leme.N() + self.prop.N() + cori
return saida
def VetF(self, p=None):
"""
Vetor de forças
_________________________
Variáveis de entrada:
p -- tupla
p[0] (integer)-- Graus de liberdade
p[1] (tupla)-- Com pesos
"""
if p == None:
GrausDeLib =4
peso = None
elif len(p)==1:
GrausDeLib =p[0]
peso = None
elif len(p)==2:
GrausDeLib =p[0]
peso = p[1]
if peso == None:
if GrausDeLib == 4:
saida = sp.array([self.CalcFx(), self.CalcFy(),
self.CalcK(), self.CalcN()])
elif GrausDeLib == 3:
saida = sp.array([self.CalcFx(), self.CalcFy(), self.CalcN()])
else:
lemearq = self.MostraLeme()
velarq = self.MostraVel()
uc = self.MostraVelCom()
####################
self.leme.MudaLemeDir(sp.array(0.))
self.MudaVelCom(velarq[0]) #condição eta=1
## ####################
## Aquilo que depende somente de u
##
## ####################
veltemp = sp.zeros((6, 1))
veltemp[0] = velarq[0]
self.MudaVel(veltemp)
fu = self.VetF((GrausDeLib, ))
####################
veltemp = sp.zeros((6, 1))
veltemp[0] = velarq[0]
veltemp[1] = velarq[1]
self.MudaVel(veltemp)
# leme = 0 e eta = 1
fbeta = self.VetF((GrausDeLib, )) - fu
it = 0
fbeta1 = fbeta.copy()
for arg in peso[0]:
fbeta[it] = arg* fbeta[it]
it +=1
####################
veltemp = sp.zeros((6, 1))
veltemp[5] = velarq[5]
veltemp[0] = velarq[0]
self.MudaVel(veltemp)
fr = self.VetF((GrausDeLib, )) - fu
fr1 = fr.copy()
it = 0
for arg in peso[1]:
fr[it] = arg* fr[it]
it +=1
####################
self.leme.MudaLemeDir(lemearq)
veltemp = sp.zeros((6, 1))
veltemp[0] = velarq[0]
self.MudaVel(veltemp)
fleme = self.VetF((GrausDeLib, )) - fu
fleme1 = fleme.copy()
it = 0
for arg in peso[2]:
fleme[it] = arg* fleme[it]
it +=1
####################
self.MudaVel(velarq)
self.MudaVelCom(uc)
fbetarl = self.VetF((GrausDeLib, )) - (fbeta1 + fr1 + fleme1)
it = 0
for arg in peso[3]:
fbetarl[it] = arg* fbetarl[it]
it +=1
del it
saida = fbeta + fr + fleme + fbetarl
return saida
def H (self, GrausDeLib=4):
"""
Matriz de massa menos matriz de massa adicional
_________________________
Variáveis de entrada:
GrausDeLib (integer)-- Graus de liberdade
"""
H = None
H = self.casco.M(GrausDeLib) - self.casco.Ma(GrausDeLib)
return sp.mat(H)
def MatRot(self, p=None):
"""
Retorna a matrix de rotação de do referêncial solidárial para o
inercial
"""
if p== None:
roll= self.MostraPos()[3]
pitch = self.MostraPos()[4]
yaw = self.MostraPos()[5]
else:
roll= p[0]
pitch = p[1]
yaw = p[2]
Rot = sp.array([[sp.cos(yaw)*sp.cos(pitch),
-sp.sin(yaw)*sp.cos(roll) + sp.cos(yaw)*sp.sin(pitch)*sp.sin(roll),
sp.sin(yaw)*sp.sin(roll) + sp.cos(yaw)*sp.cos(roll)*sp.sin(pitch) ],
[sp.sin(yaw)*sp.cos(pitch),
sp.cos(yaw)*sp.cos(roll) + sp.sin(roll)*sp.sin(pitch)*sp.sin(yaw),
-sp.cos(yaw)*sp.sin(roll) + sp.sin(yaw)*sp.cos(roll)*sp.sin(pitch) ],
[-sp.sin(pitch), sp.cos(pitch)*sp.sin(roll),
sp.cos(pitch)*sp.cos(roll)] ])
Rot.shape = (3, 3)
Rot= sp.matrix(Rot)
return Rot
def f2 (self, VetF, H):
"""
Calcula o valor de f(x) na equação
x' = f(x)
onde x são é o vetor de velocidades no sistema solidário
_________________________
Variáveis de entrada:
GrausDeLib (integer)-- Graus de liberdade
"""
GrausDeLib = len(VetF)
if GrausDeLib == 4:
a= sp.zeros((6, 6))
a[5, 5] = 1.
a[4, 4] = 1.
a[:4, :4]= H
b= sp.zeros((6, 1))
b [4, 0] = self.vel[3]
b [5, 0] = self.vel[5]*sp.cos(self.MostraPos()[3])
b[:4, :]= VetF
elif GrausDeLib == 3:
a= sp.zeros((4, 4))
a[3, 3] = 1.
a[:3, :3]= H
b= sp.zeros((4, 1))
b[:3, :]= VetF
b[3, 0] = self.MostraVel()[5]
saida = linalg.solve(a, b )
return saida
def f(self, velocidade=None, t=None, p=(4, )):
"""
O p é uma tupla com o valor dos graus de liberdade
"""
GrausDeLib = p[0]
if velocidade !=None:
velarq = self.MostraVel()
posarq = self.MostraPos()
veltemp = sp.zeros((6, 1))
postemp = sp.zeros((6, 1))
if GrausDeLib==3:
veltemp[:2] = velocidade[:2]
veltemp[5] = velocidade[2]
postemp[5] = velocidade[3]
elif GrausDeLib==4:
veltemp[:2] = velocidade[:2]
veltemp[3] = velocidade[2]
veltemp[5] = velocidade[3]
postemp[3] = velocidade[4]
postemp[5] = velocidade[5]
self.MudaVel(veltemp)
self.MudaPos(postemp)
if GrausDeLib == 4:
a= sp.zeros((6, 6))
a[5, 5] = 1.
a[4, 4] = 1.
a[:4, :4]= self.H(GrausDeLib)
b= sp.zeros((6, 1))
b [4, 0] = self.vel[3]
b [5, 0] = self.vel[5]*sp.cos(self.MostraPos()[3])
b[:4, :]= self.VetF(p)
elif GrausDeLib == 3:
a= sp.zeros((4, 4))
a[3, 3] = 1.
a[:3, :3]= self.H(GrausDeLib)
b= sp.zeros((4, 1))
b[:3, :]= self.VetF(p)
b[3, 0] = self.MostraVel()[5]
saida = linalg.solve(a, b)
if velocidade !=None:
self.MudaVel(velarq)
self.MudaPos(posarq)
return saida
def fvein(self, x, t, p):
"""
x = sp.array(u, v , w)
p = ( roll, pitch, yaw)
"""
return sp.array(self.MatRot(p[0])*p[1])
def simula (self, met='euler', t0=0., dt=0.5, t=100., GrausDeLib=4,
velocidade=None, tipo='ZigZag', leme=sp.array(20.),
proa=sp.array(20.), RotCom =sp.array(1), osa=sp.array(0.05),
ospath=sp.array(150), erro=sp.array(0.05),
errotf=sp.array(0.05), errotd=sp.array(0.05)):
"""
Simulador de manobras padrão
_________________________
Variáveis de entrada:
GrausDeLib (integer)-- Graus de liberdade;
met -- Método de integração. Default- Euler;
t0 -- Tempo inicial;
dt -- Passo no tempo;
t -- Tempo final
tipo - tipo de manobra simulada. Zig-Zag10/10 e Curva_de_Giro_port ou
Curva_de_Giro_starboard . Default -Zig-Zag
__________________________
Saída:
Tupla de sp.array
(veloHis, posHis, acelHis, fHis, veloInerHis, lemeHis)
Em cada elemento da tupla:
A primeira coluna é o passo de tempo e as demais são as variáveis
veloHis -- histórico de velocidades;
posHis -- histórico de posições
acelHis --- histórico de acelerações
fHis -- histórico de forças
veloInerHis -- histórico de velocidades no sistema inercial
lemeHis -- histórico do comando de leme
"""
#
# Tipo de Simulação a ser realizada:
#
self.MudaPos( sp.array([ [0.], [0.], [0.], [0.], [0.], [0.] ]))
self.MudaVel(sp.array([ [self.dic['unom']], [0.], [0.], [0.], [0.],
[0.] ]))
self.MudaRotCom(RotCom)
self.MudaVelCom(self.dic['unom'])
#Log é o parâmetro que indica quando a simulação armazenou os dados do
#relatório
if tipo == 'Curva_de_Giro_port':
self.MudaLemeCom(sp.array(leme*sp.pi/180))
log = False
elif tipo == 'Curva_de_Giro_starboard':
self.MudaLemeCom(sp.array(-leme*sp.pi/180))
log = False
elif tipo == 'ZigZag':
self.MudaLemeCom(sp.array(leme*sp.pi/180))
exe = 0
###############################
##
## Dados relacionados a curva de zizag
##
###############################
if (tipo == 'ZigZag' and (((exe%2 == 0) and self.MostraPos()[5] <=
-(proa*sp.pi/180) ) or (exe%2 != 0 and self.MostraPos()[5] >=
(proa*sp.pi/180) ))):
self.MudaLemeCom(self.MostraLeme()*(-1))
if exe!=0:
dic['reach'] = erro
dic['ospath'] = ospath
dic['osangle'] = abs(osa - dic['proa'])
dados.append(dic.copy())
exe += 1
dic['exeNummber'] = exe
dic['time'] = tp - sp.array(dt)
dic['path'] = self.MostraPos()[1]
dic['proa'] = self.MostraPos()[5]
if tipo=='ZigZag' and exe!=0:
if abs(self.MostraPos()[1]- dic['path'])>ospath:
ospath = abs(self.MostraPos()[1]- dic['path'])
if abs(self.MostraPos()[5])>abs(osa):
osa = self.MostraPos()[5]
if abs(self.MostraPos()[5] - PosIni[5]) < erro:
erro = abs(self.MostraPos()[5] - PosIni[5])
###############################
##
## Dados relacionados a curva de Giro
##
###############################
if ((tipo == 'Curva_de_Giro_port' or
tipo == 'Curva_de_Giro_starboard') and not log):
if (abs(abs(self.MostraPos()[5] - PosIni[5]) -
(sp.array(90)*sp.pi/180)) <= errotf):
errotf = (abs(abs(self.MostraPos()[5] - PosIni[5]) -
(sp.array(90)*sp.pi/180)))
dic['transfer'] = abs(self.MostraPos()[1] - PosIni[1])
dic['advance'] = abs(self.MostraPos()[0] - PosIni[0])
if abs(abs(self.MostraPos()[5] - PosIni[5]) - sp.pi) <= errotd:
errotd = abs(abs(self.MostraPos()[5] - PosIni[5]) - sp.pi)
dic['taticalDiameter'] = abs(self.MostraPos()[1] -
PosIni[1])
if abs(self.MostraPos()[5] - PosIni[5]) > sp.pi :
log = True
dados.append(dic)
Rot = self.MatRot()
#
# inc = Velocidades Lineares no Sistema Inecial
#
VelIn = Rot*sp.matrix(self.vel[0:3])
PosIne = self.MostraPos()[0:3]
##################################
#
# Guardando os parâmetros
#
##################################
# Velocidade Inercial
d= sp.hstack(VelIn)
veloInerHis[cont, 1:] = d #
veloInerHis[cont, 0] = tp #
# Histórico Leme
lemeHis[cont, 0] = tp
lemeHis[cont, 1] = self.MostraLeme()
# Histórico da posição
temp = sp.hstack(self.MostraPos())
posHis[cont, :] = sp.hstack((tp, temp))
# Histórico da Velocidade
temp = sp.hstack(self.MostraVel())
veloHis[cont, :] = sp.hstack((tp, temp))
# Histórico das Forças
temp =sp.hstack(sp.array(self.VetF(GrausDeLib)))
if GrausDeLib == 4:
fHis[cont, :] = sp.hstack((tp, temp))
elif GrausDeLib == 3:
fHis[cont, :3] = sp.hstack((tp, temp[:2]))
fHis[cont, 4] = temp[2]
# Histórico Propulsor
propHis[cont, :] = sp.hstack((tp, self.MostraRot()))
# Histórico das Acelerações
Acel = self.f(GrausDeLib)
if GrausDeLib == 4:
vetor = sp.zeros((6, 1))
vetor[:2] = Acel[:2]
vetor[3] = Acel[2]
vetor [5] = Acel[3]
elif GrausDeLib == 3:
vetor = sp.zeros((6, 1))
vetor[:2] = Acel[:2]
vetor [5] = Acel[2]
acelHis[cont, :] = sp.hstack((tp, sp.hstack(vetor)))
del temp
##############################
#
# Criação de vetor de graus de liberdade
#
##############################
if GrausDeLib == 4:
xIn = sp.zeros([6, 1])
xIn [0] = self.MostraVel()[0]
xIn [1] = self.MostraVel()[1]
xIn [2] = self.MostraVel()[3]
xIn [3] = self.MostraVel()[5]
xIn [4] = self.MostraPos()[3]
xIn [5] = self.MostraPos()[5]
elif GrausDeLib == 3:
xIn = sp.zeros([4, 1])
xIn [0] = self.MostraVel()[0]
xIn [1] = self.MostraVel()[1]
xIn [2] = self.MostraVel()[5]
xIn [3] = self.MostraPos()[5]
##################################
#
# Integração da Aceleração solidária
#
##################################
if met == 'euler':
xIn = self.integrador.euler(Acel, xIn, dt )
elif met =='rk4':
xIn = self.integrador.rk4(self.facel, dt, tp, xIn)
##################################
if GrausDeLib == 4:
x = sp.zeros((6, 1))
x[0] = xIn[0]
x[1] = xIn[1]
x[3] = xIn[2]
x[5] = xIn[3]
elif GrausDeLib==3:
x = sp.zeros((6, 1))
x[0] = xIn[0]
x[1] = xIn[1]
x[5] = xIn[2]
self.MudaVel(x)
del x
##################################
##
## Integração da velocidade inercial
##
###################################
posfutura = sp.zeros((6, 1))
posfutura[:3] = self.integrador.euler(VelIn, PosIne, dt)
##################################
if GrausDeLib== 4:
posfutura[3] = xIn[4]
posfutura[5] = xIn[5]
elif GrausDeLib== 3:
posfutura[5] = xIn[3]
self.MudaPos(posfutura)
cont += 1
del posfutura
self.prop.MudaRot(tp)
self.leme.MudaLeme(tp)
return (veloHis, posHis, acelHis, fHis, veloInerHis, lemeHis, propHis,
dados)
def getCurvaGiro(self, peso=None, met='euler', t0=0., dt=0.5, t=100.,
GrausDeLib=3, tipo='port', leme=sp.array(20.),
RotCom=None, VelCom= None, Vel=None, Eta='vel',
PosIne=sp.array([[0.], [0.], [0.], [0.], [0.], [0.] ]),
errotf=sp.array(0.05), errotd=sp.array(0.05),
errosr=sp.array(0.001), saida='txt'):
"""
"""
if RotCom == None:
RotCom = self.dic['rotnom']
if VelCom == None:
VelCom = self.dic['unom']
if Vel == None:
Vel = sp.array([ [self.dic['unom']], [0.], [0.], [0.], [0.], [0.]
])
self.MudaPos( PosIne)
self.MudaVel(Vel)
self.MudaRotCom(RotCom)
self.MudaVelCom(VelCom)
#Log é o parâmetro que indica quando a simulação armazenou os dados do
#relatório
if tipo == 'port':
self.MudaLemeCom(sp.array(leme*sp.pi/180))
log = False
log1 = False
elif tipo == 'starboard':
self.MudaLemeCom(sp.array(-leme*sp.pi/180))
log = False
log1 = False
#
# Criando espaço na memória para armazenar os parâmetros da curva
#
nlin = len(sp.arange(t0, t, dt)) #Número de linhas das colunas a serem
#criadas
if saida == 'mem':
lemeHis = sp.zeros((nlin, 2)) #historico do leme
veloHis = sp.zeros((nlin, 7)) #histórico da velocidade
veloInerHis = sp.zeros((nlin, 4))#histórico da velocidade no
#sistema inercial Verificar depois a necessidade
posHis = sp.zeros([nlin, 7]) #histórico da posição no sistema
#inercial
fHis = sp.zeros((nlin, 5)) #histórico de forças
acelHis = sp.zeros((nlin, 7)) #histórico de acelerações
propHis = sp.zeros((nlin, 2)) #histórico Máquina
EtaHis = sp.zeros((nlin, 2)) #histórico Eta
betaHis = sp.zeros((nlin, 2)) #histórico beta
elif saida == 'txt':
os.makedirs('./saida/CurvaGiro')
os.chdir('./saida/CurvaGiro')
lemeHis = open('leme.dat', 'w')#historico do leme
lemeHis.write('#Navio ' + self.nome + '\n' + '#Manobra de Curva
Giro\n#\n')
lemeHis.write('#Valor do leme em rad\n')
lemeHis.write('#temp'.center(5) + ' ' + 'leme'.rjust(8) + ' ' +
'\n')
veloHis = open('velo.dat', 'w') #histórico da velocidade
veloHis.write('#Navio ' + self.nome + '\n' + '#Manobra de Curva
Giro\n#\n')
veloHis.write('#Velocidade Sistema Solidário \n#\n')
veloHis.write('#temp'.center(5) + ' ' + 'u'.rjust(11) + ' ' +
'v'.rjust(11) + ' ' + 'w'.rjust(11) + ' ' + 'dot roll'.rjust(11) + ' ' + '
dot pitch'.rjust(11) + ' ' + 'dot yaw'.rjust(11) + ' ' + '\n')
veloInerHis = open('veloiner.dat', 'w')#histórico da velocidade no
#sistema inercial Verificar depois a necessidade
veloInerHis.write('#Navio ' + self.nome + '\n' + '#Manobra de
Curva Giro\n#\n')
veloInerHis.write('#Velocidade Inercial\n#\n')
veloInerHis.write('#temp'.center(5) + ' ' + 'u'.rjust(11) + ' ' +
'v'.rjust(11) + ' ' + 'r'.rjust(11) + '\n')
posHis = open('pos.dat', 'w')#histórico da posição no sistema
#inercial
posHis.write('#Navio ' + self.nome + '\n' + '#Manobra de Curva
Giro\n#\n')
posHis.write('#Posição e Orientação\n#\n')
posHis.write('#temp'.center(5) + ' ' + 'x'.rjust(11) + ' ' +
'y'.rjust(11) + ' ' + 'z'.rjust(11) + ' ' + 'roll'.rjust(11) + ' ' +
'pitch'.rjust(11) + ' ' + 'yaw'.rjust(11) + ' ' + '\n')
fHis = open('forcas.dat', 'w') #histórico de forças
fHis.write('#Navio ' + self.nome + '\n' + '#Manobra de Curva
Giro\n#\n')
fHis.write('#Forças e Momentos\n#\n')
fHis.write('#temp'.center(5) + ' ' + 'X'.rjust(11) + ' ' +
'Y'.rjust(11) + ' ' + 'K'.rjust(11) + ' ' + 'N'.rjust(11) + ' ' + '\n')
acelHis = open('acel.dat', 'w') #histórico de acelerações
acelHis.write('#Navio ' + self.nome + '\n' + '#Manobra de Curva
Giro\n#\n')
acelHis.write('#Aceleração\n#\n')
acelHis.write('#temp'.center(5) + ' ' + 'u'.rjust(11) + ' ' +
'v'.rjust(11) + ' ' + 'w'.rjust(11) + ' ' + 'ddotroll'.rjust(11) + ' ' + '
ddotpitch'.rjust(11) + ' ' + 'ddotyaw'.rjust(11) + ' ' + '\n')
propHis = open('propulsor.dat', 'w') #histórico Máquina
propHis.write('#Navio ' + self.nome + '\n' + '#Manobra de Curva
Giro\n#\n')
propHis.write('#Rotações do propulsor\n#\n')
propHis.write('#temp'.center(5) + ' ' + 'rot'.rjust(8) + '\n')
EtaHis = open('Eta.dat', 'w') #histórico Eta
EtaHis.write('#Navio ' + self.nome + '\n' + '#Manobra de Curva
Giro\n#\n')
EtaHis.write('#Eta \n#\n')
EtaHis.write('#temp'.center(5) + ' ' + 'rot'.rjust(8) + ' ' + '\n')
betaHis = open('beta.dat', 'w') #histórico Eta
betaHis.write('#Navio ' + self.nome + '\n' + '#Manobra de Curva
Giro\n#\n')
betaHis.write('#Beta \n#\n')
betaHis.write('#temp'.center(5) + ' ' + 'rot'.rjust(8) + ' ' +
'\n')
os.chdir('..')
os.chdir('..')
dados = []
dic = {}
PosIni = self.MostraPos().copy()
del nlin #não preciso mais
cont =0 #Contador
if peso == None:
par = (GrausDeLib, )
else:
par = (GrausDeLib, peso)
#
# Iteração
#
for tp in sp.arange(t0, t, dt):
if not log1:
if cont == 0:
V1 = sp.sqrt(self.MostraVel()[0]**2 +
self.MostraVel()[1]**2)
elif cont == 1:
V2 = sp.sqrt(self.MostraVel()[0]**2 +
self.MostraVel()[1]**2)
elif cont == 2:
V3 = sp.sqrt(self.MostraVel()[0]**2 +
self.MostraVel()[1]**2)
elif cont == 3:
V4 = sp.sqrt(self.MostraVel()[0]**2 +
self.MostraVel()[1]**2)
else:
V1 = V2
V2 = V3
V3 = V4
V4 = sp.sqrt(self.MostraVel()[0]**2 +
self.MostraVel()[1]**2)
if log:
if stats.tstd((V1, V2, V3, V4))<errosr:
dic['steadytr'] = (sp.sqrt(self.MostraVel()[0]**2 +
self.MostraVel()[1]**2) /
self.MostraVel()[5])
dados.append(dic.copy())
log1= True
if not log:
if (abs(abs(self.MostraPos()[5] - PosIni[5]) - (sp.pi/2))
<= errotf):
errotf = (abs(abs(self.MostraPos()[5] - PosIni[5]) -
(sp.pi/2)))
dic['transfer'] = abs(self.MostraPos()[1] - PosIni[1])
dic['advance'] = abs(self.MostraPos()[0] - PosIni[0])
if (abs(abs(self.MostraPos()[5] - PosIni[5]) - sp.pi) <=
errotd):
errotd = abs(abs(self.MostraPos()[5] - PosIni[5]) -
sp.pi)
dic['taticalDiameter'] = abs(self.MostraPos()[1] -
PosIni[1])
if abs(self.MostraPos()[5] - PosIni[5]) > sp.pi:
log = True
###################################
ft = self.VetF(par)
###################################
##
## inc = Velocidades Lineares no Sistema Inecial
##
###################################
MatRot = self.MatRot()
VelIn = sp.array(MatRot*self.MostraVel()[0:3])
PosIne = self.MostraPos()[0:3]
##################################
##
## Guardando os parâmetros
##
##################################
# Velocidade Inercial
if saida == 'txt':
veloInerHis.write('%.2f'.rjust(5)%(tp) + ' ')
for arg in VelIn:
veloInerHis.write('%.5e'.rjust(11)%(arg) + ' ')
veloInerHis.write('\n')
elif saida == 'mem':
d = sp.hstack(VelIn)
veloInerHis[cont, 1:] = d #
veloInerHis[cont, 0] = tp #
# Histórico Leme
if saida == 'txt':
lemeHis.write('%.2f'.rjust(5)%(tp) + ' ')
lemeHis.write('%.2f'.rjust(5)%(self.MostraLeme()) + '\n')
elif saida == 'mem':
lemeHis[cont, 0] = tp
lemeHis[cont, 1] = self.MostraLeme()
# Histórico da posição
if saida == 'txt':
posHis.write('%.2f'.rjust(5)%(tp) + ' ')
for arg in self.MostraPos():
posHis.write('%.5e'.rjust(11)%(arg) + ' ')
posHis.write('\n')
elif saida == 'mem':
temp = sp.hstack(self.MostraPos())
posHis[cont, :] = sp.hstack((tp, temp))
del temp
# Histórico da Velocidade
if saida == 'txt':
veloHis.write('%.2f'.rjust(5)%(tp) + ' ')
for arg in self.MostraVel():
veloHis.write('%.5e'.rjust(11)%(arg) + ' ')
veloHis.write('\n')
elif saida == 'mem':
temp = sp.hstack(self.MostraVel())
veloHis[cont, :] = sp.hstack((tp, temp))
del temp
# Histórico das Forças
if saida == 'txt':
temp = sp.zeros((4, 1))
if GrausDeLib == 4:
temp= ft
elif GrausDeLib == 3:
temp[:2] = ft[:2]
temp[3] = ft[2]
fHis.write('%.2f'.rjust(5)%(tp) + ' ')
for arg in temp:
fHis.write('%.5e'.rjust(11)%(arg) + ' ')
fHis.write('\n')
elif saida == 'mem':
temp = sp.hstack(sp.array(ft))
if GrausDeLib == 4:
fHis[cont, :] = sp.hstack((tp, temp))
elif GrausDeLib == 3:
fHis[cont, :3] = sp.hstack((tp, temp[:2]))
fHis[cont, 4] = temp[2]
# Histórico Propulsor
if saida == 'txt':
propHis.write('%.2f'.rjust(5)%(tp) + ' ')
propHis.write('%.2f'.rjust(5)%self.MostraRot() + '\n')
elif saida == 'mem':
propHis[cont, :] = sp.hstack((tp, self.MostraRot()))
# Histórico Eta
if saida == 'txt':
EtaHis.write('%.2f'.rjust(5)%(tp) + ' ')
if Eta == 'rot':
EtaHis.write('%.2f'.rjust(5) % (self.MostraRotCom() /
self.MostraRot()) + '\n')
elif Eta == 'vel':
EtaHis.write('%.2f'.rjust(5) %
(self.MostraVelCom() / self.MostraVel()[0]) +
'\n')
elif saida == 'mem':
if Eta== 'rot':
EtaHis[cont, :] = sp.hstack((tp, self.MostraRotCom() /
self.MostraRot()))
elif Eta == 'vel':
EtaHis[cont, :] = sp.hstack((tp,
self.MostraVelCom() /
self.MostraVel()[0]))
# Histórico Beta
if saida == 'txt':
betaHis.write('%.2f'.rjust(5)%(tp) + ' ')
betaHis.write('%.2f'.rjust(5)%(sp.arctan(-self.MostraVel()[1]
/ self.MostraVel()[0])) + '\n')
elif saida == 'mem':
betaHis[cont, :] = sp.hstack((tp,
sp.arctan(-self.MostraVel()[1] /
self.MostraVel()[0])))
# Histórico das Acelerações
Acel = self.f2(ft, self.H(GrausDeLib))
vetor = sp.zeros((6, 1))
if GrausDeLib == 4:
vetor[:2] = Acel[:2]
vetor[3] = Acel[2]
vetor [5] = Acel[3]
elif GrausDeLib == 3:
vetor[:2] = Acel[:2]
vetor [5] = Acel[2]
if saida == 'txt':
acelHis.write('%.2f'.rjust(5)%(tp) + ' ')
for arg in vetor:
acelHis.write('%.5e'.rjust(11)%(arg[0]) + ' ')
acelHis.write('\n')
elif saida == 'mem':
acelHis[cont, :] = sp.hstack((tp, sp.hstack(vetor)))
del temp
##############################
#
# Criação de vetor de graus de liberdade
#
##############################
if GrausDeLib == 4:
vt = sp.zeros([6, 1])
vt [0] = self.MostraVel()[0]
vt [1] = self.MostraVel()[1]
vt [2] = self.MostraVel()[3]
vt [3] = self.MostraVel()[5]
vt [4] = self.MostraPos()[3]
vt [5] = self.MostraPos()[5]
elif GrausDeLib == 3:
vt = sp.zeros([4, 1])
vt [0] = self.MostraVel()[0]
vt [1] = self.MostraVel()[1]
vt [2] = self.MostraVel()[5]
vt [3] = self.MostraPos()[5]
##################################
##
## Integração da Aceleração solidária
##
##################################
if met == 'euler':
vt = self.integrador.euler(self.f, vt, tp, dt ,par )
elif met =='rk4':
vt = self.integrador.rk4(self.f, vt, tp, dt, par)
##################################
if GrausDeLib == 4:
v = sp.zeros((6, 1))
v[0] = vt[0]
v[1] = vt[1]
v[3] = vt[2]
v[5] = vt[3]
elif GrausDeLib == 3:
v = sp.zeros((6, 1))
v[0] = vt[0]
v[1] = vt[1]
v[5] = vt[2]
self.MudaVel(v)
del v
##################################
##
## Integração da velocidade inercial
##
###################################
x = sp.zeros((6, 1))
if met == 'euler':
x[:3] = self.integrador.euler(self.fvein ,
self.MostraPos()[:3], tp, dt ,
(self.MostraPos()[3:] ,
self.MostraVel()[:3]))
elif met == 'rk4':
x[:3] = self.integrador.rk4(self.fvein, self.MostraPos()[:3],
tp, dt, (self.MostraPos()[3:],
self.MostraVel()[:3]))
##################################
if GrausDeLib == 4:
x[3] = vt[4]
x[5] = vt[5]
elif GrausDeLib == 3:
x[5] = vt[3]
self.MudaPos(x)
del x
cont += 1
self.prop.MudaRot(tp)
self.leme.MudaLeme(tp)
if saida == 'txt':
arq = [veloHis, posHis, acelHis, fHis, veloInerHis, lemeHis,
propHis, EtaHis]
for arg in arq:
arg.close()
return dados
elif saida == 'mem':
return (veloHis, posHis, acelHis, fHis, veloInerHis, lemeHis,
propHis, EtaHis, dados, betaHis)
def getCurvaZigZag(self, peso=None, met='euler', t0=0., dt=0.5, t=100.,
GrausDeLib=3, tipo='port', lemesp.array(20.),
RotCom=None, VelComNone, VelNone, proa=
sp.array([20.]), Eta='vel', PosInesp.array([[0.], [0.],
[0.], [0.], [0.], [0.]]), osasp.array(0.0),
ospathsp.array(0.0), erro=sp.array(0.005), saida'txt'):
"""
Simulador de manobras padrão
_________________________
Variáveis de entrada:
GrausDeLib (integer)-- Graus de liberdade;
met -- Método de integração. Default- Euler;
t0 -- Tempo inicial;
dt -- Passo no tempo;
t -- Tempo final
tipo - tipo de manobra simulada. Zig-Zag10/10 e Curva_de_Giro_port ou
Curva_de_Giro_starboard . Default -Zig-Zag
__________________________
Saída:
Tupla de sp.array
(veloHis, posHis, acelHis, fHis, veloInerHis, lemeHis)
Em cada elemento da tupla:
A primeira coluna é o passo de tempo e as demais são as variáveis
veloHis -- histórico de velocidades;
posHis -- histórico de posições
acelHis --- histórico de acelerações
fHis -- histórico de forças
veloInerHis -- histórico de velocidades no sistema inercial
lemeHis -- histórico do comando de leme
"""
if RotCom == None:
RotCom = self.dic['rotnom']
if VelCom == None:
VelCom = self.dic['unom']
if Vel == None:
Vel = sp.array([[self.dic['unom']], [0.], [0.], [0.], [0.], [0.]
])
self.MudaPos( PosIne)
self.MudaVel(Vel)
self.MudaRotCom(RotCom)
self.MudaVelCom(VelCom)
if tipo == 'port':
self.MudaLemeCom(sp.array(leme*sp.pi/180))
exe=0
elif tipo == 'starboard':
self.MudaLemeCom(sp.array(-leme*sp.pi/180))
exe=1
#
# Criando espaço na memória para armazenar os parâmetros da curva
#
#Número de linhas das colunas a seremcriadas
nlin = len(sp.arange(t0, t, dt))
if saida == 'mem':
lemeHis = sp.zeros((nlin, 2)) #historico do leme
veloHis = sp.zeros((nlin, 7)) #histórico da velocidade
veloInerHis = sp.zeros((nlin, 4))#histórico da velocidade no
#sistema inercial Verificar depois a necessidade
posHis = sp.zeros([nlin, 7]) #histórico da posição no sistema
#inercial
fHis = sp.zeros((nlin, 5)) #histórico de forças
acelHis = sp.zeros((nlin, 7)) #histórico de acelerações
propHis = sp.zeros((nlin, 2)) #histórico Máquina
EtaHis = sp.zeros((nlin, 2)) #histórico Eta
elif saida == 'txt':
os.makedirs('./saida/ZigZag')
os.chdir('./saida/ZigZag')
lemeHis = open('leme.dat', 'w')#historico do leme
lemeHis.write('#Navio ' + self.nome + '\n' + '#Manobra de Curva
Zig-Zag\n#\n')
lemeHis.write('#Valor do leme em rad\n')
lemeHis.write('#temp'.center(5) + ' ' + 'leme'.rjust(8) + ' ' +
'\n')
veloHis = open('velo.dat', 'w') #histórico da velocidade
veloHis.write('#Navio ' + self.nome + '\n' + '#Manobra de Curva
Zig-Zag\n#\n')
veloHis.write('#Velocidade Sistema Solidário \n#\n')
veloHis.write('#temp'.center(5) + ' ' + 'u'.rjust(11) + ' ' +
'v'.rjust(11) + ' ' + 'w'.rjust(11) + ' ' + 'dot
roll'.rjust(11) + ' ' + ' dot pitch'.rjust(11) + '
' + 'dot yaw'.rjust(11) + ' ' + '\n')
veloInerHis = open('veloiner.dat', 'w')#histórico da velocidade no
#sistema inercial Verificar depois a necessidade
veloInerHis.write('#Navio ' + self.nome + '\n' + '#Manobra de
Curva Zig-Zag\n#\n')
veloInerHis.write('#Velocidade Inercial\n#\n')
veloInerHis.write('#temp'.center(5) + ' ' + 'u'.rjust(11) + ' ' +
'v'.rjust(11) + ' ' + 'r'.rjust(11) + '\n')
posHis = open('pos.dat', 'w')#histórico da posição no sistema
#inercial
posHis.write('#Navio ' + self.nome + '\n' + '#Manobra de Curva
Zig-Zag\n#\n')
posHis.write('#Posição e Orientação\n#\n')
posHis.write('#temp'.center(5) + ' ' + 'x'.rjust(11) + ' ' +
'y'.rjust(11) + ' ' + 'z'.rjust(11) + ' ' +
'roll'.rjust(11) + ' ' + 'pitch'.rjust(11) + ' ' +
'yaw'.rjust(11) + ' ' + '\n')
fHis = open('forcas.dat', 'w') #histórico de forças
fHis.write('#Navio ' + self.nome + '\n' + '#Manobra de Curva
Zig-Zag\n#\n')
fHis.write('#Forças e Momentos\n#\n')
fHis.write('#temp'.center(5) + ' ' + 'X'.rjust(11) + ' ' +
'Y'.rjust(11) + ' ' + 'K'.rjust(11) + ' ' +
'N'.rjust(11) + ' ' + '\n')
acelHis = open('acel.dat', 'w') #histórico de acelerações
acelHis.write('#Navio ' + self.nome + '\n' + '#Manobra de Curva
Zig-Zag\n#\n')
acelHis.write('#Aceleração\n#\n')
acelHis.write('#temp'.center(5) + ' ' + 'u'.rjust(11) + ' ' +
'v'.rjust(11) + ' ' + 'w'.rjust(11) + ' ' +
'ddotroll'.rjust(11) + ' ' + ' ddotpitch'.rjust(11)
+ ' ' + 'ddotyaw'.rjust(11) + ' ' + '\n')
propHis = open('propulsor.dat', 'w') #histórico Máquina
propHis.write('#Navio ' + self.nome + '\n' + '#Manobra de Curva
Zig-Zag\n#\n')
propHis.write('#Rotações do propulsor\n#\n')
propHis.write('#temp'.center(5) + ' ' + 'rot'.rjust(8) + '\n')
EtaHis = open('Eta.dat', 'w') #histórico Eta
EtaHis.write('#Navio ' + self.nome + '\n' + '#Manobra de Curva
Zig-Zag\n#\n')
EtaHis.write('#Eta \n#\n')
EtaHis.write('#temp'.center(5) + ' ' + 'rot'.rjust(8) + ' ' + '\n')
os.chdir('..')
os.chdir('..')
dados = []
dic = {}
PosIni = self.MostraPos().copy()
del nlin #não preciso mais
cont =0 #Contador
if peso == None:
par = (GrausDeLib, )
else:
par = (GrausDeLib, peso)
#
# Iteração
#
for tp in sp.arange(t0, t, dt):
###############################
##
## Verificando o momento em que será realizada a mudança do leme
##
###############################
if (((exe%2 == 0) and self.MostraPos()[5] <=
-(proa * sp.pi / 180)) or (exe%2 != 0 and
self.MostraPos()[5] >= (proa * sp.pi / 180))):
self.MudaLemeCom(self.MostraLeme() * (-1))
if ((exe != 0 and tipo == 'port') or (exe != 1
and tipo == 'starboard')):
dic['reach'] = erro
dic['ospath'] = ospath
dic['osangle'] = osa
dados.append(dic.copy())
osa = sp.array(0.0)
ospath = sp.array(0)
erro = sp.array(0.05)
logospath = False
logosa = False
exe += 1
if tipo =='port':
dic['exeNummber'] = exe
elif tipo=='starboard':
dic['exeNummber'] = exe - 1
dic['time'] = tp - sp.array(dt)
dic['path'] = self.MostraPos()[1]
dic['proa'] = self.MostraPos()[5]
###############################
##
## Atualizando os parâmetros
##
###############################
if ((exe!=0 and tipo == 'port') or (exe!=1 and tipo ==
'starboard')):
if ((logospath == False) and
(abs(self.MostraPos()[1] - dic['path']) >= ospath)):
#(sp.sign(self.MostraPos()[1])== sp.sign(dic['path'])) and
ospath = abs(self.MostraPos()[1] - dic['path'])
else:
logospath = True
if ((logosa == False) and (abs(self.MostraPos()[5] -
dic['proa']) >= osa)): #(sp.sign(self.MostraPos()[5])==
#sp.sign(dic['proa'])) and
osa = abs(self.MostraPos()[5] - dic['proa'])
else:
logosa = True
if abs(abs(self.MostraPos()[5]) - abs(PosIni[5])) < erro:
erro = abs(self.MostraPos()[5] - PosIni[5])
#
# inc = Velocidades Lineares no Sistema Inecial
#
MatRot = self.MatRot()
VelIn = MatRot * sp.matrix(self.vel[0:3])
PosIne = self.MostraPos()[0:3]
###################################
#################################
##
## Cálculo das forças de Maneira Modular
##
###################################
ft = self.VetF(par)
##################################
##################################
##
## Guardando os parâmetros
##
##################################
# Velocidade Inercial
if saida == 'txt':
veloInerHis.write('%.2f'.rjust(5)%(tp) + ' ')
for arg in VelIn:
veloInerHis.write('%.5e'.rjust(11)%(arg) + ' ')
veloInerHis.write('\n')
elif saida == 'mem':
d = sp.hstack(VelIn)
veloInerHis[cont, 1:] = d #
veloInerHis[cont, 0] = tp #
# Histórico Leme
if saida == 'txt':
lemeHis.write('%.2f'.rjust(5)%(tp) + ' ')
lemeHis.write('%.2f'.rjust(5)%(self.MostraLeme()) + '\n')
elif saida == 'mem':
lemeHis[cont, 0] = tp
lemeHis[cont, 1] = self.MostraLeme()
# Histórico da posição
if saida == 'txt':
posHis.write('%.2f'.rjust(5)%(tp) + ' ')
for arg in self.MostraPos():
posHis.write('%.5e'.rjust(11)%(arg) + ' ')
posHis.write('\n')
elif saida == 'mem':
temp = sp.hstack(self.MostraPos())
posHis[cont, :] = sp.hstack((tp, temp))
del temp
# Histórico da Velocidade
if saida == 'txt':
veloHis.write('%.2f'.rjust(5)%(tp) + ' ')
for arg in self.MostraVel():
veloHis.write('%.5e'.rjust(11)%(arg) + ' ')
veloHis.write('\n')
elif saida == 'mem':
temp = sp.hstack(self.MostraVel())
veloHis[cont, :] = sp.hstack((tp, temp))
del temp
# Histórico das Forças
if saida == 'txt':
temp = sp.zeros((4, 1))
if GrausDeLib == 4:
temp = ft
elif GrausDeLib == 3:
temp[:2] = ft[:2]
temp[3] = ft[2]
fHis.write('%.2f'.rjust(5)%(tp) + ' ')
for arg in temp:
fHis.write('%.5e'.rjust(11)%(arg) + ' ')
fHis.write('\n')
elif saida == 'mem':
temp = sp.hstack(sp.array(ft))
if GrausDeLib == 4:
fHis[cont, :] = sp.hstack((tp, temp))
elif GrausDeLib == 3:
fHis[cont, :3] = sp.hstack((tp, temp[:2]))
fHis[cont, 4] = temp[2]
# Histórico Propulsor
if saida == 'txt':
propHis.write('%.2f'.rjust(5)%(tp) + ' ')
propHis.write('%.2f'.rjust(5)%self.MostraRot() + '\n')
elif saida == 'mem':
propHis[cont, :] = sp.hstack((tp, self.MostraRot()))
# Histórico Eta
if saida == 'txt':
EtaHis.write('%.2f'.rjust(5)%(tp) + ' ')
if Eta == 'rot':
EtaHis.write('%.2f'.rjust(5) % (self.MostraRotCom() /
self.MostraRot()) + '\n')
elif Eta == 'vel':
EtaHis.write('%.2f'.rjust(5) % (self.MostraVelCom() /
self.MostraVel()[0]) + '\n')
elif saida == 'mem':
if Eta== 'rot':
EtaHis[cont, :] = sp.hstack((tp, self.MostraRotCom() /
self.MostraRot()))
elif Eta == 'vel':
EtaHis[cont, :] = sp.hstack((tp, self.MostraVelCom() /
self.MostraVel()[0]))
# Histórico das Acelerações
Acel = self.f2(ft, self.H(GrausDeLib))
vetor = sp.zeros((6, 1))
if GrausDeLib == 4:
vetor[:2] = Acel[:2]
vetor[3] = Acel[2]
vetor [5] = Acel[3]
elif GrausDeLib == 3:
vetor[:2] = Acel[:2]
vetor [5] = Acel[2]
if saida == 'txt':
acelHis.write('%.2f'.rjust(5)%(tp) + ' ')
for arg in vetor:
acelHis.write('%.5e'.rjust(11)%(arg[0]) + ' ')
acelHis.write('\n')
elif saida == 'mem':
acelHis[cont, :] = sp.hstack((tp, sp.hstack(vetor)))
del vetor
##############################
##
## Criação de vetor de graus de liberdade
##
##############################
if GrausDeLib == 4:
vt = sp.zeros([6, 1])
vt [0] = self.MostraVel()[0]
vt [1] = self.MostraVel()[1]
vt [2] = self.MostraVel()[3]
vt [3] = self.MostraVel()[5]
vt [4] = self.MostraPos()[3]
vt [5] = self.MostraPos()[5]
elif GrausDeLib == 3:
vt = sp.zeros([4, 1])
vt [0] = self.MostraVel()[0]
vt [1] = self.MostraVel()[1]
vt [2] = self.MostraVel()[5]
vt [3] = self.MostraPos()[5]
##################################
##
## Integração da Aceleração solidária
##
##################################
if met == 'euler':
vt = self.integrador.euler(self.f, vt, tp, dt ,par )
elif met =='rk4':
vt = self.integrador.rk4(self.f, vt, tp, dt, par)
##################################
if GrausDeLib == 4:
v = sp.zeros((6, 1))
v[0] = vt[0]
v[1] = vt[1]
v[3] = vt[2]
v[5] = vt[3]
elif GrausDeLib ==3:
v = sp.zeros((6, 1))
v[0] = vt[0]
v[1] = vt[1]
v[5] = vt[2]
self.MudaVel(v)
del v
##################################
##
## Integração da velocidade inercial
##
###################################
x = sp.zeros((6, 1))
if met == 'euler':
x[:3] = self.integrador.euler(self.fvein, self.MostraPos()[:3],
tp, dt, (self.MostraPos()[3:],
self.MostraVel()[:3]))
elif met == 'rk4':
x[:3] = self.integrador.rk4(self.fvein, self.MostraPos()[:3],
tp, dt, (self.MostraPos()[3:],
self.MostraVel()[:3]))
##################################
if GrausDeLib == 4:
x[3] = vt[4]
x[5] = vt[5]
elif GrausDeLib == 3:
x[5] = vt[3]
self.MudaPos(x)
cont += 1
del x
self.prop.MudaRot(tp)
self.leme.MudaLeme(tp)
if saida == 'txt':
arq = [veloHis, posHis, acelHis, fHis, veloInerHis, lemeHis,
propHis, EtaHis]
for arg in arq:
arg.close()
return dados
elif saida == 'mem':
return (veloHis, posHis, acelHis, fHis, veloInerHis, lemeHis,
propHis, EtaHis, dados)
def simulaTestb(self, p, intervalo=sp.array(5.), V= None ):
"""
Retorna uma matrix com o valor das forças variando de acordo com que
varia a velocidade
u= Vcos(beta) v = Vsen(beta) com beta variando de 0 a 180 graus em um
intervalo = intervalo
"""
if V == None:
V = self.dic['unom']
Velocidade = sp.zeros((6, 1))
saida = sp.zeros([len( sp.arange(0., sp.pi, intervalo * sp.pi / 180)),
5])
contlinha = 0
for beta in sp.arange(0., sp.pi, intervalo * sp.pi / 180):
Velocidade[0] = sp.array(V) * sp.cos(beta)
Velocidade[1] = -sp.array(V) * sp.sin(beta)
self.MudaVelCom(Velocidade[0]) #condição que força \eta=1
self.MudaVel(Velocidade)
v = sp.sqrt(Velocidade[0] ** 2 + Velocidade[1] ** 2)
rho = self.dic['rho']
lpp = self.dic['lpp']
vetF = self.VetF((4, p))
# vetF = sp.hstack(vetF)
saida[contlinha, :] = sp.hstack([beta, vetF[0] * (2 / (rho * (lpp *
(v ** 2)))), vetF[1] * (2 / (rho *
(lpp* (v ** 2)))), vetF[2] *
(2 / (rho * ((lpp * v) ** 2))),
vetF[3] * (2 / (rho * ((lpp * v) **
2)))])
contlinha += 1
return saida
| gpl-3.0 | -2,103,145,341,740,217,300 | 33.131995 | 79 | 0.426835 | false | 3.087203 | false | false | false |
CCS-Lab/hBayesDM | Python/hbayesdm/models/_dd_hyperbolic_single.py | 1 | 9923 | from typing import Sequence, Union, Any
from collections import OrderedDict
from numpy import Inf, exp
import pandas as pd
from hbayesdm.base import TaskModel
from hbayesdm.preprocess_funcs import dd_single_preprocess_func
__all__ = ['dd_hyperbolic_single']
class DdHyperbolicSingle(TaskModel):
def __init__(self, **kwargs):
super().__init__(
task_name='dd',
model_name='hyperbolic',
model_type='single',
data_columns=(
'subjID',
'delay_later',
'amount_later',
'delay_sooner',
'amount_sooner',
'choice',
),
parameters=OrderedDict([
('k', (0, 0.1, 1)),
('beta', (0, 1, 5)),
]),
regressors=OrderedDict([
]),
postpreds=['y_pred'],
parameters_desc=OrderedDict([
('k', 'discounting rate'),
('beta', 'inverse temperature'),
]),
additional_args_desc=OrderedDict([
]),
**kwargs,
)
_preprocess_func = dd_single_preprocess_func
def dd_hyperbolic_single(
data: Union[pd.DataFrame, str, None] = None,
niter: int = 4000,
nwarmup: int = 1000,
nchain: int = 4,
ncore: int = 1,
nthin: int = 1,
inits: Union[str, Sequence[float]] = 'vb',
ind_pars: str = 'mean',
model_regressor: bool = False,
vb: bool = False,
inc_postpred: bool = False,
adapt_delta: float = 0.95,
stepsize: float = 1,
max_treedepth: int = 10,
**additional_args: Any) -> TaskModel:
"""Delay Discounting Task - Hyperbolic Model
Individual Bayesian Modeling of the Delay Discounting Task
using Hyperbolic Model [Mazur1987]_ with the following parameters:
"k" (discounting rate), "beta" (inverse temperature).
.. [Mazur1987] Mazur, J. E. (1987). An adjustment procedure for studying delayed reinforcement.
User data should contain the behavioral data-set of all subjects of interest for
the current analysis. When loading from a file, the datafile should be a
**tab-delimited** text file, whose rows represent trial-by-trial observations
and columns represent variables.
For the Delay Discounting Task, there should be 6 columns of data
with the labels "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". It is not necessary for the columns to be
in this particular order; however, it is necessary that they be labeled
correctly and contain the information below:
- "subjID": A unique identifier for each subject in the data-set.
- "delay_later": An integer representing the delayed days for the later option (e.g. 1, 6, 28).
- "amount_later": A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9).
- "delay_sooner": An integer representing the delayed days for the sooner option (e.g. 0).
- "amount_sooner": A floating point number representing the amount for the sooner option (e.g. 10).
- "choice": If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0.
.. note::
User data may contain other columns of data (e.g. ``ReactionTime``,
``trial_number``, etc.), but only the data within the column names listed
above will be used during the modeling. As long as the necessary columns
mentioned above are present and labeled correctly, there is no need to
remove other miscellaneous data columns.
.. note::
``adapt_delta``, ``stepsize``, and ``max_treedepth`` are advanced options that
give the user more control over Stan's MCMC sampler. It is recommended that
only advanced users change the default values, as alterations can profoundly
change the sampler's behavior. See [Hoffman2014]_ for more information on the
sampler control parameters. One can also refer to 'Section 34.2. HMC Algorithm
Parameters' of the `Stan User's Guide and Reference Manual`__.
.. [Hoffman2014]
Hoffman, M. D., & Gelman, A. (2014).
The No-U-Turn sampler: adaptively setting path lengths in Hamiltonian Monte Carlo.
Journal of Machine Learning Research, 15(1), 1593-1623.
__ https://mc-stan.org/users/documentation/
Parameters
----------
data
Data to be modeled. It should be given as a Pandas DataFrame object,
a filepath for a data file, or ``"example"`` for example data.
Data columns should be labeled as: "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice".
niter
Number of iterations, including warm-up. Defaults to 4000.
nwarmup
Number of iterations used for warm-up only. Defaults to 1000.
``nwarmup`` is a numerical value that specifies how many MCMC samples
should not be stored upon the beginning of each chain. For those
familiar with Bayesian methods, this is equivalent to burn-in samples.
Due to the nature of the MCMC algorithm, initial values (i.e., where the
sampling chains begin) can have a heavy influence on the generated
posterior distributions. The ``nwarmup`` argument can be set to a
higher number in order to curb the effects that initial values have on
the resulting posteriors.
nchain
Number of Markov chains to run. Defaults to 4.
``nchain`` is a numerical value that specifies how many chains (i.e.,
independent sampling sequences) should be used to draw samples from
the posterior distribution. Since the posteriors are generated from a
sampling process, it is good practice to run multiple chains to ensure
that a reasonably representative posterior is attained. When the
sampling is complete, it is possible to check the multiple chains for
convergence by running the following line of code:
.. code:: python
output.plot(type='trace')
ncore
Number of CPUs to be used for running. Defaults to 1.
nthin
Every ``nthin``-th sample will be used to generate the posterior
distribution. Defaults to 1. A higher number can be used when
auto-correlation within the MCMC sampling is high.
``nthin`` is a numerical value that specifies the "skipping" behavior
of the MCMC sampler. That is, only every ``nthin``-th sample is used to
generate posterior distributions. By default, ``nthin`` is equal to 1,
meaning that every sample is used to generate the posterior.
inits
String or list specifying how the initial values should be generated.
Options are ``'fixed'`` or ``'random'``, or your own initial values.
ind_pars
String specifying how to summarize the individual parameters.
Current options are: ``'mean'``, ``'median'``, or ``'mode'``.
model_regressor
Whether to export model-based regressors. Currently not available for this model.
vb
Whether to use variational inference to approximately draw from a
posterior distribution. Defaults to ``False``.
inc_postpred
Include trial-level posterior predictive simulations in
model output (may greatly increase file size). Defaults to ``False``.
adapt_delta
Floating point value representing the target acceptance probability of a new
sample in the MCMC chain. Must be between 0 and 1. See note below.
stepsize
Integer value specifying the size of each leapfrog step that the MCMC sampler
can take on each new iteration. See note below.
max_treedepth
Integer value specifying how many leapfrog steps the MCMC sampler can take
on each new iteration. See note below.
**additional_args
Not used for this model.
Returns
-------
model_data
An ``hbayesdm.TaskModel`` instance with the following components:
- ``model``: String value that is the name of the model ('dd_hyperbolic_single').
- ``all_ind_pars``: Pandas DataFrame containing the summarized parameter values
(as specified by ``ind_pars``) for each subject.
- ``par_vals``: OrderedDict holding the posterior samples over different parameters.
- ``fit``: A PyStan StanFit object that contains the fitted Stan model.
- ``raw_data``: Pandas DataFrame containing the raw data used to fit the model,
as specified by the user.
Examples
--------
.. code:: python
from hbayesdm import rhat, print_fit
from hbayesdm.models import dd_hyperbolic_single
# Run the model and store results in "output"
output = dd_hyperbolic_single(data='example', niter=2000, nwarmup=1000, nchain=4, ncore=4)
# Visually check convergence of the sampling chains (should look like "hairy caterpillars")
output.plot(type='trace')
# Plot posterior distributions of the hyper-parameters (distributions should be unimodal)
output.plot()
# Check Rhat values (all Rhat values should be less than or equal to 1.1)
rhat(output, less=1.1)
# Show the LOOIC and WAIC model fit estimates
print_fit(output)
"""
return DdHyperbolicSingle(
data=data,
niter=niter,
nwarmup=nwarmup,
nchain=nchain,
ncore=ncore,
nthin=nthin,
inits=inits,
ind_pars=ind_pars,
model_regressor=model_regressor,
vb=vb,
inc_postpred=inc_postpred,
adapt_delta=adapt_delta,
stepsize=stepsize,
max_treedepth=max_treedepth,
**additional_args)
| gpl-3.0 | 7,724,618,514,999,984,000 | 40.518828 | 145 | 0.640331 | false | 4.244226 | false | false | false |
kashev/pysc | util/anagram_dict_builder.py | 1 | 1580 | #!/usr/bin/env python3
# pysc
# Kashev Dalmia | @kashev | [email protected]
# anagram_dict_builder.py
""" A script which builds an anagram dictionary from a dictionary. """
# Credit: Jeff Knupp
# https://github.com/jeffknupp/presser/blob/master/make_anagrams.py
import collections
import os
import string
def build_anagram_dict(infile, outfile):
with open(infile, 'r') as file_handle:
words = collections.defaultdict(list)
letters = set(string.ascii_lowercase + '\n')
for word in file_handle:
# Check to see if the word contains only letters in the set
# (no apostraphies, only an issue if using a poor dictionary)
# and that the word is of a reasonable length
if len(set(word) - letters) == 0 and len(word) < 20 or True:
word = word.strip()
letter_key = ''.join(sorted(word))
words[letter_key].append(word)
anagram_dictionary = [' '.join([key] + value)
for key, value in words.items()]
anagram_dictionary.sort()
with open(outfile, 'w') as file_handle:
file_handle.write('\n'.join(anagram_dictionary))
def main():
""" main function. """
# Change to script directory.
abspath = os.path.abspath(__file__)
dname = os.path.dirname(abspath)
os.chdir(dname)
for sd in ["sowpods", "twl", "wwf"]:
infile = '../dict/{}.txt'.format(sd)
outfile = '../dict/{}_anagram.txt'.format(sd)
build_anagram_dict(infile, outfile)
if __name__ == '__main__':
main()
| mit | 3,450,408,199,887,984,000 | 31.244898 | 73 | 0.605063 | false | 3.487859 | false | false | false |
guillaume-havard/testdjango | sitetest/stats/middleware.py | 1 | 1123 | from django.db.models import F
from stats.models import Page
class StatsMiddleware(object):
def process_view(self, request, view_func, view_args, view_kwargs):
""" Incrémente le nombre de page vues à chaque appel de vues """
try:
# Le compteur lié à la page est récupéré et incrémenté
p = Page.objects.get(url=request.path)
p.nb_visites = F('nb_visites') + 1
p.save()
except Page.DoesNotExist:
# Un nouveau compteur à 1 par défaut est créé
Page(url=request.path).save()
def process_response(self, request, response):
""" Affiche le nombre de fois que la page a été vue """
if response.status_code == 200:
p = Page.objects.get(url=request.path)
response.content += bytes(
"Cette page a été vue {0} fois.".format(p.nb_visites), "utf8")
return response
# Ici l'objet F permet de faire des requette directement en base de donnée (plus rapide)
# Attention normalement nous ne modifions pas le contenue de la réponse via un missleware !! | mit | -3,092,915,929,755,777,500 | 39.925926 | 92 | 0.631341 | false | 3.275964 | false | false | false |
priyom/priyomdb | Schema/Patches/patch_3.py | 1 | 1691 | """
File name: patch_3.py
This file is part of: priyomdb
LICENSE
The contents of this file are subject to the Mozilla Public License
Version 1.1 (the "License"); you may not use this file except in
compliance with the License. You may obtain a copy of the License at
http://www.mozilla.org/MPL/
Software distributed under the License is distributed on an "AS IS"
basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
License for the specific language governing rights and limitations under
the License.
Alternatively, the contents of this file may be used under the terms of
the GNU General Public license (the "GPL License"), in which case the
provisions of GPL License are applicable instead of those above.
FEEDBACK & QUESTIONS
For feedback and questions about priyomdb please e-mail one of the
authors:
Jonas Wielicki <[email protected]>
"""
def apply(store):
statements = [
"""CREATE TABLE `eventClass` (
`ID` INT NOT NULL AUTO_INCREMENT,
`Title` VARCHAR(255) NOT NULL COMMENT 'title of the event class',
PRIMARY KEY (`ID`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8""",
"""CREATE TABLE `events` (
`ID` INT NOT NULL AUTO_INCREMENT,
`Created` BIGINT NOT NULL COMMENT 'creation date of row',
`Modified` BIGINT NOT NULL COMMENT 'last modification date of row',
`StationID` INT NOT NULL COMMENT 'station to which the ID is associated',
`EventClassID` INT DEFAULT NULL COMMENT 'event class, NULL for raw event',
`Description` TEXT NOT NULL COMMENT 'descriptive text of the event',
PRIMARY KEY (`ID`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8"""
]
for statement in statements:
store.execute(statement)
| gpl-3.0 | -6,247,857,135,208,938,000 | 34.978723 | 78 | 0.735068 | false | 3.782998 | false | false | false |
pedrohml/smartbot | smartbot/joke_behaviour.py | 1 | 2199 | # coding: utf-8
from smartbot import Behaviour
from smartbot import Utils
from smartbot import ExternalAPI
import re
import os
import random
class JokeBehaviour(Behaviour):
def __init__(self, bot):
super(JokeBehaviour, self).__init__(bot)
self.language = self.bot.config.get('main', 'language') if self.bot.config.has_option('main', 'language') else 'en-US'
def addHandlers(self):
self.bot.addCommandHandler('joke', self.jokeSearch)
self.bot.addCommandHandler('jalk', self.jalkSearch)
def removeHandlers(self):
self.bot.removeCommandHandler('joke', self.jokeSearch)
self.bot.removeCommandHandler('jalk', self.jalkSearch)
def jokeSearch(self, telegramBot, update):
p = re.compile('([^ ]*) (.*)')
query = (p.match(update.message.text).groups()[1] or '').strip()
self.logDebug(u'Joke search (chat_id: %s, query: %s)' % (update.message.chat_id, query or 'None'))
jokes = ExternalAPI.searchJoke(query)
if jokes:
self.bot.sendMessage(chat_id=update.message.chat_id, text=random.choice(jokes))
def jalkSearch(self, telegramBot, update):
p = re.compile('([^ ]*) (.*)')
query = (p.match(update.message.text).groups()[1] or '').strip()
self.logDebug(u'Jalk search (chat_id: %s, query: %s)' % (update.message.chat_id, query or 'None'))
jokes = ExternalAPI.searchJoke(query)
if jokes:
jokes = filter(lambda c: len(re.split('\W+', c, re.MULTILINE)) < 200, jokes)
jokes = sorted(jokes, lambda x, y: len(x) - len(y))
if jokes:
joke = jokes[0]
audioFile = ExternalAPI.textToSpeech(joke, language=self.language, encode='mp3')
if os.path.exists(audioFile) and os.path.getsize(audioFile) > 0:
self.bot.sendAudio(chat_id=update.message.chat_id, audio=audioFile, performer=self.bot.getInfo().username)
else:
self.bot.sendMessage(chat_id=update.message.chat_id, text=u'Não consigo contar')
else:
self.bot.sendMessage(chat_id=update.message.chat_id, text=u'Não encontrei piada curta')
| mit | 4,034,820,672,840,083,500 | 44.770833 | 126 | 0.622667 | false | 3.288922 | false | false | false |
mhubig/intelhex | scripts/hex2dump.py | 1 | 3960 | #!/usr/bin/python
# Copyright (c) 2008,2010,2011,2012,2013 Alexander Belchenko
# All rights reserved.
#
# Redistribution and use in source and binary forms,
# with or without modification, are permitted provided
# that the following conditions are met:
#
# * Redistributions of source code must retain
# the above copyright notice, this list of conditions
# and the following disclaimer.
# * Redistributions in binary form must reproduce
# the above copyright notice, this list of conditions
# and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the author nor the names
# of its contributors may be used to endorse
# or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,
# BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
# AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
# OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
# STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Show content of hex file as hexdump."""
VERSION = '1.5.1'
USAGE = '''hex2dump: show content of hex file as hexdump.
Usage:
python hex2dump.py [options] HEXFILE
Options:
-h, --help this help message.
-v, --version version info.
-r, --range=START:END specify address range for dumping
(ascii hex value).
Range can be in form 'START:' or ':END'.
Arguments:
HEXFILE name of hex file for processing (use '-' to read
from stdin)
'''
import sys
def hex2dump(hexfile, start=None, end=None):
import intelhex
if hexfile == '-':
hexfile = sys.stdin
try:
ih = intelhex.IntelHex(hexfile)
except (IOError, intelhex.IntelHexError), e:
sys.stderr.write('Error reading file: %s\n' % e)
return 1
if not (start is None and end is None):
ih = ih[slice(start,end)]
ih.dump()
return 0
def main(argv=None):
import getopt
if argv is None:
argv = sys.argv[1:]
start = None
end = None
try:
opts, args = getopt.getopt(sys.argv[1:], "hvp:r:",
["help", "version", "range="])
for o, a in opts:
if o in ("-h", "--help"):
print(USAGE)
return 0
elif o in ("-v", "--version"):
print(VERSION)
return 0
elif o in ("-r", "--range"):
try:
l = a.split(":")
if l[0] != '':
start = int(l[0], 16)
if l[1] != '':
end = int(l[1], 16)
except:
raise getopt.GetoptError('Bad range value(s)')
if not args:
raise getopt.GetoptError('Hex file is not specified')
if len(args) > 1:
raise getopt.GetoptError('Too many arguments')
except getopt.GetoptError, msg:
txt = 'ERROR: '+str(msg) # that's required to get not-so-dumb result from 2to3 tool
print(txt)
print(USAGE)
return 2
try:
return hex2dump(args[0], start, end)
except IOError, e:
import errno
if e.errno not in (0, errno.EPIPE):
raise
if __name__ == '__main__':
import sys
sys.exit(main())
| bsd-3-clause | -4,853,036,239,676,083,000 | 31.727273 | 92 | 0.603283 | false | 4.120708 | false | false | false |
oliver-sanders/cylc | cylc/flow/network/schema.py | 1 | 52672 | # THIS FILE IS PART OF THE CYLC SUITE ENGINE.
# Copyright (C) 2008-2019 NIWA & British Crown (Met Office) & Contributors.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""GraphQL API schema via Graphene implementation."""
import asyncio
from functools import partial
import logging
from textwrap import dedent
from typing import Callable, AsyncGenerator, Any
from graphene import (
Boolean, Field, Float, ID, InputObjectType, Int,
List, Mutation, ObjectType, Schema, String, Union, Enum
)
from graphene.types.generic import GenericScalar
from graphene.utils.str_converters import to_snake_case
from cylc.flow.task_state import (
TASK_STATUSES_ORDERED,
TASK_STATUS_DESC,
# TASK_STATUS_RUNAHEAD,
TASK_STATUS_WAITING,
TASK_STATUS_QUEUED,
TASK_STATUS_EXPIRED,
TASK_STATUS_READY,
TASK_STATUS_SUBMIT_FAILED,
TASK_STATUS_SUBMIT_RETRYING,
TASK_STATUS_SUBMITTED,
TASK_STATUS_RETRYING,
TASK_STATUS_RUNNING,
TASK_STATUS_FAILED,
TASK_STATUS_SUCCEEDED
)
from cylc.flow.data_store_mgr import (
ID_DELIM, FAMILIES, FAMILY_PROXIES,
JOBS, TASKS, TASK_PROXIES
)
from cylc.flow.suite_status import StopMode
def sstrip(text):
"""Simple function to dedent and strip text.
Examples:
>>> print(sstrip('''
... foo
... bar
... baz
... '''))
foo
bar
baz
"""
return dedent(text).strip()
PROXY_NODES = 'proxy_nodes'
NODE_MAP = {
'Task': TASKS,
'TaskProxy': TASK_PROXIES,
'Family': FAMILIES,
'FamilyProxy': FAMILY_PROXIES,
'Job': JOBS,
'Node': PROXY_NODES,
}
CYCLING_TYPES = [
'family_proxies',
'family_proxy',
'jobs',
'job',
'task_proxies',
'task_proxy',
]
PROXY_TYPES = [
'family_proxies',
'family_proxy',
'task_proxies',
'task_proxy',
]
DEF_TYPES = [
'families',
'family',
'tasks',
'task',
]
def parse_workflow_id(item):
"""Split workflow id argument to individual workflow attributes.
Args:
item (owner|workflow:status):
It's possible to traverse workflows,
defaults to UI Server owner, and ``*`` glob for workflow.
Returns:
A tuple of id components in respective order. For example:
(owner, name, status)
"""
owner, workflow, status = (None, None, None)
if ':' in item:
head, status = item.rsplit(':', 1)
else:
head, status = (item, None)
if head.count(ID_DELIM):
owner, workflow = head.split(ID_DELIM, 1)
else:
# more common to filter on workflow (with owner constant)
workflow = head
return (owner, workflow, status)
def parse_node_id(item, node_type=None):
"""Parse definition, job, or proxy id argument returning components.
Args:
item (str): A string representing a node ID. Jobs fill out
cycle|name|num first, cycle is irrelevant to Def
owner|workflow is always last.
For example:
name
cycle|na*
workflow|cycle|name
owner|workflow|cycle|name|submit_num:state
cycle|*|submit_num
node_type (str):
the type of the node to be parsed.
Returns:
A tuple of string id components in respective order. For example:
(owner, workflow, cycle, name, submit_num, state)
None type is set for missing components.
"""
if ':' in item:
head, state = item.rsplit(':', 1)
else:
head, state = (item, None)
if ID_DELIM in head:
dil_count = head.count(ID_DELIM)
parts = head.split(ID_DELIM, dil_count)
else:
return (None, None, None, head, None, state)
if node_type in DEF_TYPES:
owner, workflow, name = [None] * (2 - dil_count) + parts
parts = [owner, workflow, None, name, None]
elif node_type in PROXY_TYPES:
parts = [None] * (3 - dil_count) + parts + [None]
elif dil_count < 4:
if dil_count < 3:
parts = [None, None] + parts + [None] * (2 - dil_count)
else:
parts = [None] * (4 - dil_count) + parts
parts += [state]
return tuple(parts)
# ** Query Related **#
# Field args (i.e. for queries etc):
class SortArgs(InputObjectType):
keys = List(String, default_value=['id'])
reverse = Boolean(default_value=False)
jobs_args = dict(
ids=List(ID, default_value=[]),
exids=List(ID, default_value=[]),
states=List(String, default_value=[]),
exstates=List(String, default_value=[]),
sort=SortArgs(default_value=None),
)
all_jobs_args = dict(
workflows=List(ID, default_value=[]),
exworkflows=List(ID, default_value=[]),
ids=List(ID, default_value=[]),
exids=List(ID, default_value=[]),
states=List(String, default_value=[]),
exstates=List(String, default_value=[]),
sort=SortArgs(default_value=None),
)
def_args = dict(
ids=List(ID, default_value=[]),
exids=List(ID, default_value=[]),
mindepth=Int(default_value=-1),
maxdepth=Int(default_value=-1),
sort=SortArgs(default_value=None),
)
all_def_args = dict(
workflows=List(ID, default_value=[]),
exworkflows=List(ID, default_value=[]),
ids=List(ID, default_value=[]),
exids=List(ID, default_value=[]),
mindepth=Int(default_value=-1),
maxdepth=Int(default_value=-1),
sort=SortArgs(default_value=None),
)
proxy_args = dict(
ghosts=Boolean(default_value=False),
ids=List(ID, default_value=[]),
exids=List(ID, default_value=[]),
states=List(String, default_value=[]),
exstates=List(String, default_value=[]),
is_held=Boolean(),
mindepth=Int(default_value=-1),
maxdepth=Int(default_value=-1),
sort=SortArgs(default_value=None),
)
all_proxy_args = dict(
ghosts=Boolean(default_value=False),
workflows=List(ID, default_value=[]),
exworkflows=List(ID, default_value=[]),
ids=List(ID, default_value=[]),
exids=List(ID, default_value=[]),
states=List(String, default_value=[]),
exstates=List(String, default_value=[]),
is_held=Boolean(),
mindepth=Int(default_value=-1),
maxdepth=Int(default_value=-1),
sort=SortArgs(default_value=None),
)
edge_args = dict(
ids=List(ID, default_value=[]),
exids=List(ID, default_value=[]),
states=List(String, default_value=[]),
exstates=List(String, default_value=[]),
mindepth=Int(default_value=-1),
maxdepth=Int(default_value=-1),
sort=SortArgs(default_value=None),
)
all_edge_args = dict(
workflows=List(ID, default_value=[]),
exworkflows=List(ID, default_value=[]),
sort=SortArgs(default_value=None),
)
nodes_edges_args = dict(
ghosts=Boolean(default_value=False),
ids=List(ID, default_value=[]),
exids=List(ID, default_value=[]),
states=List(String, default_value=[]),
exstates=List(String, default_value=[]),
is_held=Boolean(),
distance=Int(default_value=1),
mindepth=Int(default_value=-1),
maxdepth=Int(default_value=-1),
sort=SortArgs(default_value=None),
)
nodes_edges_args_all = dict(
ghosts=Boolean(default_value=False),
workflows=List(ID, default_value=[]),
exworkflows=List(ID, default_value=[]),
ids=List(ID, default_value=[]),
exids=List(ID, default_value=[]),
states=List(String, default_value=[]),
exstates=List(String, default_value=[]),
is_held=Boolean(),
distance=Int(default_value=1),
mindepth=Int(default_value=-1),
maxdepth=Int(default_value=-1),
sort=SortArgs(default_value=None),
)
# Resolvers are used to collate data needed for query resolution.
# Treated as implicit static methods;
# https://docs.graphene-python.org/en/latest/types
# /objecttypes/#implicit-staticmethod
# they can exist inside or outside the query object types.
#
# Here we define them outside the queries so they can be used with
# multiple resolution calls, both at root query or object field level.
#
# The first argument has a naming convention;
# https://docs.graphene-python.org/en/latest/types
# /objecttypes/#naming-convention
# with name 'root' used here, it provides context to the resolvers.
# Resolvers:
async def get_workflows(root, info, **args):
args['workflows'] = [parse_workflow_id(w_id) for w_id in args['ids']]
args['exworkflows'] = [parse_workflow_id(w_id) for w_id in args['exids']]
resolvers = info.context.get('resolvers')
return await resolvers.get_workflows(args)
async def get_nodes_all(root, info, **args):
"""Resolver for returning job, task, family nodes"""
field_name = to_snake_case(info.field_name)
field_ids = getattr(root, field_name, None)
if hasattr(args, 'id'):
args['ids'] = [args.get('id')]
if field_ids:
args['ids'] = field_ids
elif field_ids == []:
return []
try:
obj_type = str(info.return_type.of_type).replace('!', '')
except AttributeError:
obj_type = str(info.return_type)
node_type = NODE_MAP[obj_type]
args['ids'] = [parse_node_id(n_id, node_type) for n_id in args['ids']]
args['exids'] = [parse_node_id(n_id, node_type) for n_id in args['exids']]
args['workflows'] = [
parse_workflow_id(w_id) for w_id in args['workflows']]
args['exworkflows'] = [
parse_workflow_id(w_id) for w_id in args['exworkflows']]
resolvers = info.context.get('resolvers')
return await resolvers.get_nodes_all(node_type, args)
async def get_nodes_by_ids(root, info, **args):
"""Resolver for returning job, task, family node"""
field_name = to_snake_case(info.field_name)
field_ids = getattr(root, field_name, None)
if hasattr(args, 'id'):
args['ids'] = [args.get('id')]
if field_ids:
if isinstance(field_ids, str):
field_ids = [field_ids]
args['native_ids'] = field_ids
elif field_ids == []:
return []
try:
obj_type = str(info.return_type.of_type).replace('!', '')
except AttributeError:
obj_type = str(info.return_type)
node_type = NODE_MAP[obj_type]
args['ids'] = [parse_node_id(n_id, node_type) for n_id in args['ids']]
args['exids'] = [parse_node_id(n_id, node_type) for n_id in args['exids']]
resolvers = info.context.get('resolvers')
return await resolvers.get_nodes_by_ids(node_type, args)
async def get_node_by_id(root, info, **args):
"""Resolver for returning job, task, family node"""
field_name = to_snake_case(info.field_name)
if field_name == 'source_node':
field_id = getattr(root, 'source', None)
elif field_name == 'target_node':
field_id = getattr(root, 'target', None)
else:
field_id = getattr(root, field_name, None)
if field_id:
args['id'] = field_id
if args.get('id', None) is None:
return None
try:
obj_type = str(info.return_type.of_type).replace('!', '')
except AttributeError:
obj_type = str(info.return_type)
resolvers = info.context.get('resolvers')
return await resolvers.get_node_by_id(NODE_MAP[obj_type], args)
async def get_edges_all(root, info, **args):
args['workflows'] = [
parse_workflow_id(w_id) for w_id in args['workflows']]
args['exworkflows'] = [
parse_workflow_id(w_id) for w_id in args['exworkflows']]
resolvers = info.context.get('resolvers')
return await resolvers.get_edges_all(args)
async def get_edges_by_ids(root, info, **args):
field_name = to_snake_case(info.field_name)
field_ids = getattr(root, field_name, None)
if field_ids:
args['native_ids'] = list(field_ids)
elif field_ids == []:
return []
resolvers = info.context.get('resolvers')
return await resolvers.get_edges_by_ids(args)
async def get_nodes_edges(root, info, **args):
"""Resolver for returning job, task, family nodes"""
node_type = NODE_MAP['TaskProxy']
workflow = getattr(root, 'id', None)
if workflow:
args['workflows'] = [parse_workflow_id(workflow)]
args['exworkflows'] = []
else:
args['workflows'] = [
parse_workflow_id(w_id) for w_id in args['workflows']]
args['exworkflows'] = [
parse_workflow_id(w_id) for w_id in args['exworkflows']]
args['ids'] = [parse_node_id(n_id, node_type) for n_id in args['ids']]
args['exids'] = [parse_node_id(n_id, node_type) for n_id in args['exids']]
resolvers = info.context.get('resolvers')
root_nodes = await resolvers.get_nodes_all(node_type, args)
return await resolvers.get_nodes_edges(root_nodes, args)
def resolve_state_totals(root, info, **args):
state_totals = {state: 0 for state in TASK_STATUSES_ORDERED}
# Update with converted protobuf map container
state_totals.update(
dict(getattr(root, to_snake_case(info.field_name), {})))
return state_totals
# Types:
class DefMeta(ObjectType):
class Meta:
description = """
Meta data fields,
including custom fields in a generic user-defined dump"""
title = String(default_value=None)
description = String(default_value=None)
URL = String(default_value=None)
user_defined = List(String, default_value=[])
class TimeZone(ObjectType):
class Meta:
description = """Time zone info."""
hours = Int()
minutes = Int()
string_basic = String()
string_extended = String()
class Workflow(ObjectType):
class Meta:
description = """Global workflow info."""
id = ID(required=True)
name = String()
status = String()
status_msg = String()
host = String()
port = Int()
owner = String()
tasks = List(
lambda: Task,
description="""Task definitions.""",
args=def_args,
resolver=get_nodes_by_ids)
families = List(
lambda: Family,
description="""Family definitions.""",
args=def_args,
resolver=get_nodes_by_ids)
task_proxies = List(
lambda: TaskProxy,
description="""Task cycle instances.""",
args=proxy_args,
resolver=get_nodes_by_ids)
family_proxies = List(
lambda: FamilyProxy,
description="""Family cycle instances.""",
args=proxy_args,
resolver=get_nodes_by_ids)
edges = Field(
lambda: Edges,
args=edge_args,
description="""Graph edges""")
nodes_edges = Field(
lambda: NodesEdges,
args=nodes_edges_args,
resolver=get_nodes_edges)
api_version = Int()
cylc_version = String()
last_updated = Float()
meta = Field(DefMeta)
newest_runahead_cycle_point = String()
newest_cycle_point = String()
oldest_cycle_point = String()
reloaded = Boolean()
run_mode = String()
is_held_total = Int()
state_totals = GenericScalar(resolver=resolve_state_totals)
workflow_log_dir = String()
time_zone_info = Field(TimeZone)
tree_depth = Int()
ns_defn_order = List(String)
job_log_names = List(String)
states = List(String)
class Job(ObjectType):
class Meta:
description = """Jobs."""
id = ID(required=True)
submit_num = Int()
state = String()
# name and cycle_point for filtering/sorting
name = String(required=True)
cycle_point = String(required=True)
task_proxy = Field(
lambda: TaskProxy,
description="""Associated Task Proxy""",
required=True,
resolver=get_node_by_id)
submitted_time = String()
started_time = String()
finished_time = String()
batch_sys_job_id = ID()
batch_sys_name = String()
env_script = String()
err_script = String()
exit_script = String()
execution_time_limit = Float()
host = String()
init_script = String()
job_log_dir = String()
owner = String()
post_script = String()
pre_script = String()
script = String()
work_sub_dir = String()
batch_sys_conf = List(String)
environment = List(String)
directives = List(String)
param_env_tmpl = List(String)
param_var = List(String)
extra_logs = List(String)
messages = List(String)
class Task(ObjectType):
class Meta:
description = """Task definition, static fields"""
id = ID(required=True)
name = String(required=True)
meta = Field(DefMeta)
mean_elapsed_time = Float()
depth = Int()
proxies = List(
lambda: TaskProxy,
description="""Associated cycle point proxies""",
args=proxy_args,
resolver=get_nodes_by_ids)
namespace = List(String, required=True)
class PollTask(ObjectType):
class Meta:
description = """Polling task edge"""
local_proxy = ID(required=True)
workflow = String()
remote_proxy = ID(required=True)
req_state = String()
graph_string = String()
class Condition(ObjectType):
class Meta:
description = """Prerequisite conditions."""
task_proxy = Field(
lambda: TaskProxy,
description="""Associated Task Proxy""",
resolver=get_node_by_id)
expr_alias = String()
req_state = String()
satisfied = Boolean()
message = String()
class Prerequisite(ObjectType):
class Meta:
description = """Task prerequisite."""
expression = String()
conditions = List(
Condition,
description="""Condition monomers of a task prerequisites.""")
cycle_points = List(String)
satisfied = Boolean()
class TaskProxy(ObjectType):
class Meta:
description = """Task cycle instance."""
id = ID(required=True)
task = Field(
Task,
description="""Task definition""",
required=True,
resolver=get_node_by_id)
state = String()
cycle_point = String()
is_held = Boolean()
spawned = Boolean()
depth = Int()
job_submits = Int()
latest_message = String()
outputs = List(String, default_value=[])
broadcasts = List(String, default_value=[])
# name & namespace for filtering/sorting
name = String(required=True)
namespace = List(String, required=True)
prerequisites = List(Prerequisite)
jobs = List(
Job,
description="""Task jobs.""",
args=jobs_args,
resolver=get_nodes_by_ids)
parents = List(
lambda: FamilyProxy,
description="""Task parents.""",
args=proxy_args,
resolver=get_nodes_by_ids)
first_parent = Field(
lambda: FamilyProxy,
description="""Task first parent.""",
args=proxy_args,
resolver=get_node_by_id)
ancestors = List(
lambda: FamilyProxy,
description="""First parent ancestors.""",
args=proxy_args,
resolver=get_nodes_by_ids)
class Family(ObjectType):
class Meta:
description = """Task definition, static fields"""
id = ID(required=True)
name = String(required=True)
meta = Field(DefMeta)
depth = Int()
proxies = List(
lambda: FamilyProxy,
description="""Associated cycle point proxies""",
args=proxy_args,
resolver=get_nodes_by_ids)
parents = List(
lambda: Family,
description="""Family definition parent.""",
args=def_args,
resolver=get_nodes_by_ids)
child_tasks = List(
Task,
description="""Descendant definition tasks.""",
args=def_args,
resolver=get_nodes_by_ids)
child_families = List(
lambda: Family,
description="""Descendant desc families.""",
args=def_args,
resolver=get_nodes_by_ids)
class FamilyProxy(ObjectType):
class Meta:
description = """Family composite."""
id = ID(required=True)
cycle_point = String()
# name & namespace for filtering/sorting
name = String(required=True)
family = Field(
Family,
description="""Family definition""",
required=True,
resolver=get_node_by_id)
state = String()
is_held = Boolean()
depth = Int()
parents = List(
lambda: FamilyProxy,
description="""Family parent proxies.""",
args=proxy_args,
resolver=get_nodes_by_ids)
child_tasks = List(
TaskProxy,
description="""Descendant task proxies.""",
args=proxy_args,
resolver=get_nodes_by_ids)
child_families = List(
lambda: FamilyProxy,
description="""Descendant family proxies.""",
args=proxy_args,
resolver=get_nodes_by_ids)
first_parent = Field(
lambda: FamilyProxy,
description="""Task first parent.""",
args=proxy_args,
resolver=get_node_by_id)
ancestors = List(
lambda: FamilyProxy,
description="""First parent ancestors.""",
args=proxy_args,
resolver=get_nodes_by_ids)
class Node(Union):
class Meta:
types = (TaskProxy, FamilyProxy)
@classmethod
def resolve_type(cls, instance, info):
if hasattr(instance, 'task'):
return TaskProxy
return FamilyProxy
class Edge(ObjectType):
class Meta:
description = """Dependency edge task/family proxies"""
id = ID(required=True)
source = ID()
source_node = Field(
Node,
resolver=get_node_by_id)
target = ID()
target_node = Field(
Node,
resolver=get_node_by_id)
suicide = Boolean()
cond = Boolean()
class Edges(ObjectType):
class Meta:
description = """Dependency edge"""
edges = List(
Edge,
required=True,
args=edge_args,
resolver=get_edges_by_ids)
workflow_polling_tasks = List(PollTask)
leaves = List(String)
feet = List(String)
class NodesEdges(ObjectType):
class Meta:
description = """Related Nodes & Edges."""
nodes = List(
TaskProxy,
description="""Task nodes from and including root.""")
edges = List(
Edge,
description="""Edges associated with the nodes.""")
# Query declaration
class Queries(ObjectType):
class Meta:
description = """Multi-Workflow root level queries."""
workflows = List(
Workflow,
description=Workflow._meta.description,
ids=List(ID, default_value=[]),
exids=List(ID, default_value=[]),
resolver=get_workflows)
job = Field(
Job,
description=Job._meta.description,
id=ID(required=True),
resolver=get_node_by_id)
jobs = List(
Job,
description=Job._meta.description,
args=all_jobs_args,
resolver=get_nodes_all)
task = Field(
Task,
description=Task._meta.description,
id=ID(required=True),
resolver=get_node_by_id)
tasks = List(
Task,
description=Task._meta.description,
args=all_def_args,
resolver=get_nodes_all)
task_proxy = Field(
TaskProxy,
description=TaskProxy._meta.description,
id=ID(required=True),
resolver=get_node_by_id)
task_proxies = List(
TaskProxy,
description=TaskProxy._meta.description,
args=all_proxy_args,
resolver=get_nodes_all)
family = Field(
Family,
description=Family._meta.description,
id=ID(required=True),
resolver=get_node_by_id)
families = List(
Family,
description=Family._meta.description,
args=all_def_args,
resolver=get_nodes_all)
family_proxy = Field(
FamilyProxy,
description=FamilyProxy._meta.description,
id=ID(required=True),
resolver=get_node_by_id)
family_proxies = List(
FamilyProxy,
description=FamilyProxy._meta.description,
args=all_proxy_args,
resolver=get_nodes_all)
edges = List(
Edge,
description=Edge._meta.description,
args=all_edge_args,
resolver=get_edges_all)
nodes_edges = Field(
NodesEdges,
description=NodesEdges._meta.description,
args=nodes_edges_args_all,
resolver=get_nodes_edges)
# ** Mutation Related ** #
# Generic containers
class GenericResponse(ObjectType):
class Meta:
description = """Container for command queued response"""
result = GenericScalar()
# Mutators are used to call the internals of the parent program in the
# resolution of mutation requests (or can make external calls themselves).
# Like query resolvers (read above), they are treated as implicit
# static methods, with object context pass in as the first argument.
# Mutators:
async def mutator(root, info, command=None, workflows=None,
exworkflows=None, **args):
"""Call the resolver method that act on the workflow service
via the internal command queue."""
if workflows is None:
workflows = []
if exworkflows is None:
exworkflows = []
w_args = {}
w_args['workflows'] = [parse_workflow_id(w_id) for w_id in workflows]
w_args['exworkflows'] = [parse_workflow_id(w_id) for w_id in exworkflows]
if args.get('args', False):
args.update(args.get('args', {}))
args.pop('args')
resolvers = info.context.get('resolvers')
res = await resolvers.mutator(info, command, w_args, args)
return GenericResponse(result=res)
async def nodes_mutator(root, info, command, ids, workflows=None,
exworkflows=None, **args):
"""Call the resolver method, dealing with multiple node id arguments,
which acts on the workflow service via the internal command queue."""
if command == 'put_messages':
node_type = 'jobs'
else:
node_type = 'task_proxy'
ids = [parse_node_id(n_id, node_type) for n_id in ids]
# if the workflows arg is empty extract from proxy args
if workflows is None:
workflows = set()
for owner, workflow, _, _, _, _ in ids:
if owner and workflow:
workflows.add(f'{owner}{ID_DELIM}{workflow}')
elif workflow:
workflows.add(workflow)
if not workflows:
return GenericResponse(result="Error: No given Workflow(s)")
if exworkflows is None:
exworkflows = []
w_args = {}
w_args['workflows'] = [parse_workflow_id(w_id) for w_id in workflows]
w_args['exworkflows'] = [parse_workflow_id(w_id) for w_id in exworkflows]
if args.get('args', False):
args.update(args.get('args', {}))
args.pop('args')
resolvers = info.context.get('resolvers')
res = await resolvers.nodes_mutator(info, command, ids, w_args, args)
return GenericResponse(result=res)
# Input types:
class WorkflowID(String):
"""A registered workflow."""
class CyclePoint(String):
"""An integer or date-time cyclepoint."""
class CyclePointGlob(String):
"""A glob for integer or date-time cyclepoints.
The wildcard character (`*`) can be used to perform globbing.
For example `2000*` might match `2000-01-01T00:00Z`.
"""
class RuntimeConfiguration(String):
"""A configuration item for a task or family e.g. `script`."""
class BroadcastSetting(InputObjectType):
"""A task/family runtime setting as a key, value pair."""
key = RuntimeConfiguration(
description=sstrip('''
The cylc namespace for the setting to modify.
e.g. `[environment]variable_name`.
'''),
required=True
)
value = String(
description='The value of the modification',
required=True
)
class BroadcastMode(Enum):
Set = 'put_broadcast'
Clear = 'clear_broadcast'
@property
def description(self):
if self == BroadcastMode.Set:
return 'Create a new broadcast.'
if self == BroadcastMode.Clear:
return 'Revoke an existing broadcast.'
return ''
class TaskStatus(Enum):
"""The status of a task in a workflow."""
# NOTE: this is an enumeration purely for the GraphQL schema
# TODO: the task statuses should be formally declared in a Python
# enumeration rendering this class unnecessary
# NOTE: runahead purposefully omitted to hide users from the task pool
# Runahead = TASK_STATUS_RUNAHEAD
Waiting = TASK_STATUS_WAITING
Queued = TASK_STATUS_QUEUED
Expired = TASK_STATUS_EXPIRED
Ready = TASK_STATUS_READY
SubmitFailed = TASK_STATUS_SUBMIT_FAILED
SubmitRetrying = TASK_STATUS_SUBMIT_RETRYING
Submitted = TASK_STATUS_SUBMITTED
Retrying = TASK_STATUS_RETRYING
Running = TASK_STATUS_RUNNING
Failed = TASK_STATUS_FAILED
Succeeded = TASK_STATUS_SUCCEEDED
@property
def description(self):
return TASK_STATUS_DESC.get(self.value, '')
class TaskState(InputObjectType):
"""The state of a task, a combination of status and other fields."""
status = TaskStatus()
is_held = Boolean(description=sstrip('''
If a task is held no new job submissions will be made
'''))
class TaskName(String):
"""The name a task.
* Must be a task not a family.
* Does not include the cycle point.
* Any parameters must be expanded (e.g. can't be `foo<bar>`).
"""
class NamespaceName(String):
"""The name of a task or family."""
class NamespaceIDGlob(String):
"""A glob search for an active task or family.
Can use the wildcard character (`*`), e.g `foo*` might match `foot`.
"""
class TaskID(String):
"""The name of an active task."""
class JobID(String):
"""A job submission from an active task."""
class TimePoint(String):
"""A date-time in the ISO8601 format."""
LogLevels = Enum(
'LogLevels',
list(logging._nameToLevel.items()),
description=lambda x: f'Python logging level: {x.name} = {x.value}.'
if x else ''
)
class SuiteStopMode(Enum):
"""The mode used to stop a running workflow."""
# Note: contains only the REQUEST_* values from StopMode
Clean = StopMode.REQUEST_CLEAN
Now = StopMode.REQUEST_NOW
NowNow = StopMode.REQUEST_NOW_NOW
@property
def description(self):
return StopMode(self.value).describe()
# Mutations:
# TODO: re-instate:
# - get-broadcast (can just use GraphQL query BUT needs CLI access too)
# - expire-broadcast
class Broadcast(Mutation):
class Meta:
description = sstrip('''
Override or add new [runtime] config in targeted namespaces in
a running suite.
Uses for broadcast include making temporary changes to task
behaviour, and task-to-downstream-task communication via
environment variables.
A broadcast can target any [runtime] namespace for all cycles or
for a specific cycle. If a task is affected by specific-cycle and
all-cycle broadcasts at once, the specific takes precedence. If
a task is affected by broadcasts to multiple ancestor
namespaces, the result is determined by normal [runtime]
inheritance. In other words, it follows this order:
`all:root -> all:FAM -> all:task -> tag:root -> tag:FAM ->
tag:task`
Broadcasts persist, even across suite restarts, until they expire
when their target cycle point is older than the oldest current in
the suite, or until they are explicitly cancelled with this
command. All-cycle broadcasts do not expire.
For each task the final effect of all broadcasts to all namespaces
is computed on the fly just prior to job submission. The
`--cancel` and `--clear` options simply cancel (remove) active
broadcasts, they do not act directly on the final task-level
result. Consequently, for example, you cannot broadcast to "all
cycles except Tn" with an all-cycle broadcast followed by a cancel
to Tn (there is no direct broadcast to Tn to cancel); and you
cannot broadcast to "all members of FAMILY except member_n" with a
general broadcast to FAMILY followed by a cancel to member_n (there
is no direct broadcast to member_n to cancel).
''')
resolver = partial(mutator, command='broadcast')
class Arguments:
workflows = List(WorkflowID, required=True)
mode = BroadcastMode(
default_value=1,
required=True
)
cycle_points = List(
CyclePoint,
description=sstrip('''
List of cycle points to target or `*` to cancel all all-cycle
broadcasts without canceling all specific-cycle broadcasts.
'''),
default_value=['*'])
tasks = List(
NamespaceName,
description='Target namespaces.',
default_value=['root']
)
settings = List(
BroadcastSetting,
description='Target settings.'
)
# TODO: work out how to implement this feature, it needs to be
# handled client-side which makes it slightly awkward in
# api-on-the-fly land
# files = List(
# String,
# description=sstrip('''
# File with config to broadcast. Can be used multiple times
# ''')
# )
result = GenericScalar()
class Hold(Mutation):
class Meta:
description = sstrip('''
Hold a workflow or tasks within it.
''')
resolver = partial(mutator, command='hold')
class Arguments:
workflows = List(WorkflowID, required=True)
tasks = List(
NamespaceIDGlob,
description='Hold the specified tasks rather than the workflow.'
)
time = TimePoint(description=sstrip('''
Get the workflow to hold after the specified wallclock time
has passed.
'''))
result = GenericScalar()
class Nudge(Mutation):
class Meta:
description = sstrip('''
Cause the Cylc task processing loop to be invoked on a running
suite.
This happens automatically when the state of any task changes
such that task processing (dependency negotiation etc.)
is required, or if a clock-trigger task is ready to run.
''')
resolver = partial(mutator, command='nudge')
class Arguments:
workflows = List(WorkflowID, required=True)
result = GenericScalar()
class Ping(Mutation):
class Meta:
description = sstrip('''
Send a test message to a running suite.
''')
resolver = partial(mutator, command='ping_suite')
class Arguments:
workflows = List(WorkflowID, required=True)
result = GenericScalar()
class Message(Mutation):
class Meta:
description = sstrip('''
Record task job messages.
Send task job messages to:
- The job stdout/stderr.
- The job status file, if there is one.
- The suite server program, if communication is possible.
Task jobs use this to record and report status such
as success and failure. Applications run by task jobs can use
this command to report messages and to report registered task
outputs.
''')
resolver = partial(nodes_mutator, command='put_messages')
class Arguments:
workflows = List(WorkflowID, required=True)
task_job = String(required=True)
event_time = String(default_value=None)
messages = List(
List(String),
description="""List in the form `[[severity, message], ...]`.""",
default_value=None
)
result = GenericScalar()
class Release(Mutation):
class Meta:
description = sstrip('''
Release a held workflow or tasks within it.
See also the opposite command `hold`.
''')
resolver = partial(mutator, command='release')
class Arguments:
workflows = List(WorkflowID, required=True)
tasks = List(
NamespaceIDGlob,
description=sstrip('''
Release matching tasks rather than the workflow as whole.
''')
)
result = GenericScalar()
class Reload(Mutation):
class Meta:
description = sstrip('''
Tell a suite to reload its definition at run time.
All settings including task definitions, with the
exception of suite log configuration, can be changed on reload.
Note that defined tasks can be be added to or removed from a
running suite using "insert" and "remove" without reloading. This
command also allows addition and removal of actual task
definitions, and therefore insertion of tasks that were not defined
at all when the suite started (you will still need to manually
insert a particular instance of a newly defined task). Live task
proxies that are orphaned by a reload (i.e. their task definitions
have been removed) will be removed from the task pool if they have
not started running yet. Changes to task definitions take effect
immediately, unless a task is already running at reload time.
If the suite was started with Jinja2 template variables
set on the command line (cylc run --set FOO=bar REG) the same
template settings apply to the reload (only changes to the suite.rc
file itself are reloaded).
If the modified suite definition does not parse,
failure to reload will be reported but no harm will be done to the
running suite.
''')
resolver = partial(mutator, command='reload_suite')
class Arguments:
workflows = List(WorkflowID, required=True)
result = GenericScalar()
class SetVerbosity(Mutation):
class Meta:
description = sstrip('''
Change the logging severity level of a running suite.
Only messages at or above the chosen severity level will be logged;
for example, if you choose `WARNING`, only warnings and critical
messages will be logged.
''')
resolver = partial(mutator, command='set_verbosity')
class Arguments:
workflows = List(WorkflowID, required=True)
level = LogLevels(required=True)
result = GenericScalar()
class Stop(Mutation):
class Meta:
description = sstrip(f'''
Tell a suite server program to shut down.
By default suites wait for all submitted and running tasks to
complete before shutting down. You can change this behaviour
with the "mode" option.
''')
resolver = partial(mutator, command='stop_workflow')
class Arguments:
workflows = List(WorkflowID, required=True)
mode = SuiteStopMode(
# TODO default
)
cycle_point = CyclePoint(
description='Stop after the suite reaches this cycle.'
)
clock_time = TimePoint(
description='Stop after wall-clock time passes this point.'
)
task = TaskID(
description='Stop after this task succeeds.'
)
result = GenericScalar()
class Checkpoint(Mutation):
class Meta:
description = 'Tell the suite to checkpoint its current state.'
resolver = partial(mutator, command='take_checkpoints')
class Arguments:
workflows = List(WorkflowID, required=True)
name = String(
description='The checkpoint name.',
required=True
)
result = GenericScalar()
class ExtTrigger(Mutation):
class Meta:
description = sstrip('''
Report an external event message to a suite server program.
It is expected that a task in the suite has registered the same
message as an external trigger - a special prerequisite to be
satisfied by an external system, via this command, rather than by
triggering off other tasks.
The ID argument should uniquely distinguish one external trigger
event from the next. When a task's external trigger is satisfied by
an incoming message, the message ID is broadcast to all downstream
tasks in the cycle point as `$CYLC_EXT_TRIGGER_ID` so that they can
use it - e.g. to identify a new data file that the external
triggering system is responding to.
Use the retry options in case the target suite is down or out of
contact.
Note: To manually trigger a task use "Trigger" not
"ExtTrigger".
''')
resolver = partial(mutator, command='put_ext_trigger')
class Arguments:
workflows = List(WorkflowID, required=True)
message = String(
description='External trigger message.',
required=True
)
id = String(
description='Unique trigger ID.',
required=True
)
result = GenericScalar()
class TaskMutation:
class Arguments:
workflows = List(
WorkflowID,
required=True
)
tasks = List(
NamespaceIDGlob,
required=True
)
result = GenericScalar()
class DryRun(Mutation, TaskMutation):
class Meta:
description = sstrip('''
[For internal use] Prepare the job file for a task.
''')
resolver = partial(mutator, command='dry_run_tasks')
class Arguments(TaskMutation.Arguments):
check_syntax = Boolean(
description='Check shell syntax.',
default_value=True
)
class Insert(Mutation, TaskMutation):
class Meta:
description = sstrip('''
Insert new task proxies into the task pool of a running workflow.
For example to enable re-triggering earlier tasks already removed
from the pool.
Note: inserted cycling tasks cycle on as normal, even if another
instance of the same task exists at a later cycle (instances of the
same task at different cycles can coexist, but a newly spawned task
will not be added to the pool if it catches up to another task with
the same ID).
See also "Submit", for running tasks without the scheduler.
''')
resolver = partial(mutator, command='insert_tasks')
class Arguments(TaskMutation.Arguments):
check_point = Boolean(
description=sstrip('''
Check that the provided cycle point is on one of the task's
recurrences as defined in the suite configuration before
inserting.
'''),
default_value=True
)
stop_point = CyclePoint(
description='hold/stop cycle point for inserted task.'
)
class Kill(Mutation, TaskMutation):
# TODO: This should be a job mutation?
class Meta:
description = sstrip('''
Kill jobs of active tasks and update their statuses accordingly.
''')
resolver = partial(mutator, command='kill_tasks')
class Poll(Mutation, TaskMutation):
class Meta:
description = sstrip('''
Poll (query) task jobs to verify and update their statuses.
''')
resolver = partial(mutator, command='poll_tasks')
class Arguments(TaskMutation.Arguments):
poll_succeeded = Boolean(
description='Allow polling of succeeded tasks.',
default_value=False
)
class Remove(Mutation, TaskMutation):
class Meta:
description = sstrip('''
Remove one or more task instances from a running workflow.
Tasks will be forced to spawn successors before removal if they
have not done so already, unless you change the `spawn` option.
''')
resolver = partial(mutator, command='remove_tasks')
class Arguments(TaskMutation.Arguments):
spawn = Boolean(
description='Spawn successors before removal.',
default_value=True
)
class Reset(Mutation, TaskMutation):
class Meta:
description = sstrip(f'''
Force task instances to a specified state.
Outputs are automatically updated to reflect the new task state,
except for custom message outputs which can be manipulated directly
with `outputs`.
Prerequisites reflect the state of other tasks; they are not
changed except to unset them on resetting state to
`{TASK_STATUS_WAITING}` or earlier.
Note: To hold and release tasks use "Hold" and "Release", not this
command.
''')
resolver = partial(mutator, command='reset_task_states')
class Arguments(TaskMutation.Arguments):
state = TaskStatus(
description='Reset the task status to this.'
)
outputs = List(
String,
description=sstrip('''
Find task output by message string or trigger string, set
complete or incomplete with `!OUTPUT`, `*` to set all
complete, `!*` to set all incomplete.
''')
)
class Spawn(Mutation, TaskMutation):
class Meta:
description = sstrip(f'''
Force task proxies to spawn successors at their own next cycle
point.
Tasks normally spawn on reaching the {TASK_STATUS_SUBMITTED}
status. Spawning them early allows running successive instances of
the same task out of order. See also the `spawn to max active
cycle points` workflow configuration.
Note this command does not operate on tasks at any arbitrary point
in the abstract workflow graph - tasks not already in the pool must
be inserted first with "Insert".
''')
resolver = partial(mutator, command='spawn_tasks')
class Trigger(Mutation, TaskMutation):
class Meta:
description = sstrip('''
Manually trigger tasks.
TODO: re-implement edit funtionality!
For single tasks you can use `edit` to edit the generated job
script before it submits, to apply one-off changes. A diff between
the original and edited job script will be saved to the task job
log directory.
Warning: waiting tasks that are queue-limited will be queued if
triggered, to submit as normal when released by the queue; queued
tasks will submit immediately if triggered, even if that violates
the queue limit (so you may need to trigger a queue-limited task
twice to get it to submit immediately).
Note: tasks not already in the pool must be inserted first with
"Insert" in order to be matched.
''')
resolver = partial(mutator, command='trigger_tasks')
class Arguments(TaskMutation.Arguments):
# back_out = Boolean()
# TODO: remove or re-implement?
pass
# Mutation declarations
class Mutations(ObjectType):
# workflow actions
broadcast = Broadcast.Field(description=Message._meta.description)
ext_trigger = ExtTrigger.Field(
description=ExtTrigger._meta.description)
hold = Hold.Field(description=Hold._meta.description)
nudge = Nudge.Field(description=Nudge._meta.description)
message = Message.Field(description=Message._meta.description)
ping = Ping.Field(description=Ping._meta.description)
release = Release.Field(description=Release._meta.description)
reload = Reload.Field(description=Reload._meta.description)
set_verbosity = SetVerbosity.Field(
description=SetVerbosity._meta.description)
stop = Stop.Field(description=Stop._meta.description)
checkpoint = Checkpoint.Field(
description=Checkpoint._meta.description)
# task actions
dry_run = DryRun.Field(description=DryRun._meta.description)
insert = Insert.Field(description=Insert._meta.description)
kill = Kill.Field(description=Kill._meta.description)
poll = Poll.Field(description=Poll._meta.description)
remove = Remove.Field(description=Remove._meta.description)
reset = Reset.Field(description=Reset._meta.description)
spawn = Spawn.Field(description=Spawn._meta.description)
trigger = Trigger.Field(description=Trigger._meta.description)
# job actions
# TODO
# ** Subscription Related ** #
def to_subscription(func: Callable, sleep_seconds: float = 5.) -> Callable:
"""Wraps a function in a while-true-sleep, transforming
the function into an async-generator, used by the
websockets/subscriptions.
Args:
func (Callable): a callable.
sleep_seconds (float): asyncio sleep interval in seconds.
Returns:
Callable: a callable async-generator wrapping the original callable.
"""
async def gen(*args: Any, **kwargs: Any) -> AsyncGenerator[Any, None]:
"""
Args:
*args: Variable length argument list, varies as per schema.
**kwargs: Arbitrary keyword arguments, varies as per schema.
Returns:
AsyncGenerator[Any, None]: an async generator that will
yield values from resolvers.
"""
while True:
yield await func(*args, **kwargs)
await asyncio.sleep(sleep_seconds)
return gen
class Subscriptions(ObjectType):
"""Defines the subscriptions available in the schema."""
class Meta:
description = """Multi-Workflow root level subscriptions."""
workflows = List(
Workflow,
description=Workflow._meta.description,
ids=List(ID, default_value=[]),
exids=List(ID, default_value=[]),
resolver=to_subscription(get_workflows))
job = Field(
Job,
description=Job._meta.description,
id=ID(required=True),
resolver=to_subscription(get_node_by_id))
jobs = List(
Job,
description=Job._meta.description,
args=all_jobs_args,
resolver=to_subscription(get_nodes_all))
task = Field(
Task,
description=Task._meta.description,
id=ID(required=True),
resolver=to_subscription(get_node_by_id))
tasks = List(
Task,
description=Task._meta.description,
args=all_def_args,
resolver=to_subscription(get_nodes_all))
task_proxy = Field(
TaskProxy,
description=TaskProxy._meta.description,
id=ID(required=True),
resolver=to_subscription(get_node_by_id))
task_proxies = List(
TaskProxy,
description=TaskProxy._meta.description,
args=all_proxy_args,
resolver=to_subscription(get_nodes_all))
family = Field(
Family,
description=Family._meta.description,
id=ID(required=True),
resolver=to_subscription(get_node_by_id))
families = List(
Family,
description=Family._meta.description,
args=all_def_args,
resolver=to_subscription(get_nodes_all))
family_proxy = Field(
FamilyProxy,
description=FamilyProxy._meta.description,
id=ID(required=True),
resolver=to_subscription(get_node_by_id))
family_proxies = List(
FamilyProxy,
description=FamilyProxy._meta.description,
args=all_proxy_args,
resolver=to_subscription(get_nodes_all))
edges = List(
Edge,
description=Edge._meta.description,
args=all_edge_args,
resolver=to_subscription(get_edges_all))
nodes_edges = Field(
NodesEdges,
description=NodesEdges._meta.description,
args=nodes_edges_args_all,
resolver=to_subscription(get_nodes_edges))
schema = Schema(query=Queries, subscription=Subscriptions, mutation=Mutations)
| gpl-3.0 | 7,159,439,198,366,642,000 | 30.371054 | 79 | 0.621867 | false | 4.039264 | false | false | false |
yuanming-hu/taichi | examples/mgpcg_advanced.py | 1 | 9080 | import math
import time
import numpy as np
import taichi as ti
@ti.data_oriented
class MGPCG:
'''
Grid-based MGPCG solver for the possion equation.
See `examples/stable_fluid.py <https://github.com/taichi-dev/taichi/blob/master/examples/stable_fluid.py>`_ for a usage example.
.. note::
This solver only runs on CPU and CUDA backends since it requires the
``pointer`` SNode.
'''
def __init__(self, dim=2, N=512, n_mg_levels=6, real=float):
'''
:parameter dim: Dimensionality of the fields.
:parameter N: Grid resolution.
:parameter n_mg_levels: Number of multigrid levels.
'''
# grid parameters
self.use_multigrid = True
self.N = N
self.n_mg_levels = n_mg_levels
self.pre_and_post_smoothing = 2
self.bottom_smoothing = 50
self.dim = dim
self.real = real
self.N_ext = self.N // 2 # number of ext cells set so that that total grid size is still power of 2
self.N_tot = 2 * self.N
# setup sparse simulation data arrays
self.r = [ti.field(dtype=self.real)
for _ in range(self.n_mg_levels)] # residual
self.z = [ti.field(dtype=self.real)
for _ in range(self.n_mg_levels)] # M^-1 self.r
self.x = ti.field(dtype=self.real) # solution
self.p = ti.field(dtype=self.real) # conjugate gradient
self.Ap = ti.field(dtype=self.real) # matrix-vector product
self.alpha = ti.field(dtype=self.real) # step size
self.beta = ti.field(dtype=self.real) # step size
self.sum = ti.field(dtype=self.real) # storage for reductions
indices = ti.ijk if self.dim == 3 else ti.ij
self.grid = ti.root.pointer(indices, [self.N_tot // 4]).dense(
indices, 4).place(self.x, self.p, self.Ap)
for l in range(self.n_mg_levels):
self.grid = ti.root.pointer(indices,
[self.N_tot // (4 * 2**l)]).dense(
indices,
4).place(self.r[l], self.z[l])
ti.root.place(self.alpha, self.beta, self.sum)
@ti.func
def init_r(self, I, r_I):
I = I + self.N_ext
self.r[0][I] = r_I
self.z[0][I] = 0
self.Ap[I] = 0
self.p[I] = 0
self.x[I] = 0
@ti.kernel
def init(self, r: ti.template(), k: ti.template()):
'''
Set up the solver for $\nabla^2 x = k r$, a scaled Poisson problem.
:parameter k: (scalar) A scaling factor of the right-hand side.
:parameter r: (ti.field) Unscaled right-hand side.
'''
for I in ti.grouped(ti.ndrange(*[self.N] * self.dim)):
self.init_r(I, r[I] * k)
@ti.func
def get_x(self, I):
I = I + self.N_ext
return self.x[I]
@ti.kernel
def get_result(self, x: ti.template()):
'''
Get the solution field.
:parameter x: (ti.field) The field to store the solution
'''
for I in ti.grouped(ti.ndrange(*[self.N] * self.dim)):
x[I] = self.get_x(I)
@ti.func
def neighbor_sum(self, x, I):
ret = ti.cast(0.0, self.real)
for i in ti.static(range(self.dim)):
offset = ti.Vector.unit(self.dim, i)
ret += x[I + offset] + x[I - offset]
return ret
@ti.kernel
def compute_Ap(self):
for I in ti.grouped(self.Ap):
self.Ap[I] = 2 * self.dim * self.p[I] - self.neighbor_sum(
self.p, I)
@ti.kernel
def reduce(self, p: ti.template(), q: ti.template()):
self.sum[None] = 0
for I in ti.grouped(p):
self.sum[None] += p[I] * q[I]
@ti.kernel
def update_x(self):
for I in ti.grouped(self.p):
self.x[I] += self.alpha[None] * self.p[I]
@ti.kernel
def update_r(self):
for I in ti.grouped(self.p):
self.r[0][I] -= self.alpha[None] * self.Ap[I]
@ti.kernel
def update_p(self):
for I in ti.grouped(self.p):
self.p[I] = self.z[0][I] + self.beta[None] * self.p[I]
@ti.kernel
def restrict(self, l: ti.template()):
for I in ti.grouped(self.r[l]):
res = self.r[l][I] - (2 * self.dim * self.z[l][I] -
self.neighbor_sum(self.z[l], I))
self.r[l + 1][I // 2] += res * 0.5
@ti.kernel
def prolongate(self, l: ti.template()):
for I in ti.grouped(self.z[l]):
self.z[l][I] = self.z[l + 1][I // 2]
@ti.kernel
def smooth(self, l: ti.template(), phase: ti.template()):
# phase = red/black Gauss-Seidel phase
for I in ti.grouped(self.r[l]):
if (I.sum()) & 1 == phase:
self.z[l][I] = (self.r[l][I] + self.neighbor_sum(
self.z[l], I)) / (2 * self.dim)
def apply_preconditioner(self):
self.z[0].fill(0)
for l in range(self.n_mg_levels - 1):
for i in range(self.pre_and_post_smoothing << l):
self.smooth(l, 0)
self.smooth(l, 1)
self.z[l + 1].fill(0)
self.r[l + 1].fill(0)
self.restrict(l)
for i in range(self.bottom_smoothing):
self.smooth(self.n_mg_levels - 1, 0)
self.smooth(self.n_mg_levels - 1, 1)
for l in reversed(range(self.n_mg_levels - 1)):
self.prolongate(l)
for i in range(self.pre_and_post_smoothing << l):
self.smooth(l, 1)
self.smooth(l, 0)
def solve(self,
max_iters=-1,
eps=1e-12,
abs_tol=1e-12,
rel_tol=1e-12,
verbose=False):
'''
Solve a Poisson problem.
:parameter max_iters: Specify the maximal iterations. -1 for no limit.
:parameter eps: Specify a non-zero value to prevent ZeroDivisionError.
:parameter abs_tol: Specify the absolute tolerance of loss.
:parameter rel_tol: Specify the tolerance of loss relative to initial loss.
'''
self.reduce(self.r[0], self.r[0])
initial_rTr = self.sum[None]
tol = max(abs_tol, initial_rTr * rel_tol)
# self.r = b - Ax = b since self.x = 0
# self.p = self.r = self.r + 0 self.p
if self.use_multigrid:
self.apply_preconditioner()
else:
self.z[0].copy_from(self.r[0])
self.update_p()
self.reduce(self.z[0], self.r[0])
old_zTr = self.sum[None]
# Conjugate gradients
iter = 0
while max_iters == -1 or iter < max_iters:
# self.alpha = rTr / pTAp
self.compute_Ap()
self.reduce(self.p, self.Ap)
pAp = self.sum[None]
self.alpha[None] = old_zTr / (pAp + eps)
# self.x = self.x + self.alpha self.p
self.update_x()
# self.r = self.r - self.alpha self.Ap
self.update_r()
# check for convergence
self.reduce(self.r[0], self.r[0])
rTr = self.sum[None]
if verbose:
print(f'iter {iter}, |residual|_2={math.sqrt(rTr)}')
if rTr < tol:
break
# self.z = M^-1 self.r
if self.use_multigrid:
self.apply_preconditioner()
else:
self.z[0].copy_from(self.r[0])
# self.beta = new_rTr / old_rTr
self.reduce(self.z[0], self.r[0])
new_zTr = self.sum[None]
self.beta[None] = new_zTr / (old_zTr + eps)
# self.p = self.z + self.beta self.p
self.update_p()
old_zTr = new_zTr
iter += 1
class MGPCG_Example(MGPCG):
def __init__(self):
super().__init__(dim=3, N=128, n_mg_levels=4)
self.N_gui = 512 # gui resolution
self.pixels = ti.field(dtype=float,
shape=(self.N_gui, self.N_gui)) # image buffer
@ti.kernel
def init(self):
for I in ti.grouped(ti.ndrange(*[self.N] * self.dim)):
r_I = 5.0
for k in ti.static(range(self.dim)):
r_I *= ti.cos(5 * np.pi * I[k] / self.N)
self.init_r(I, r_I)
@ti.kernel
def paint(self):
if ti.static(self.dim == 3):
kk = self.N_tot * 3 // 8
for i, j in self.pixels:
ii = int(i * self.N / self.N_gui) + self.N_ext
jj = int(j * self.N / self.N_gui) + self.N_ext
self.pixels[i, j] = self.x[ii, jj, kk] / self.N_tot
def run(self, verbose=False):
self.init()
self.solve(max_iters=400, verbose=verbose)
self.paint()
ti.imshow(self.pixels)
ti.kernel_profiler_print()
if __name__ == '__main__':
ti.init(kernel_profiler=True)
solver = MGPCG_Example()
t = time.time()
solver.run(verbose=True)
print(f'Solver time: {time.time() - t:.3f} s')
| mit | 2,937,493,401,941,731,300 | 30.527778 | 128 | 0.508921 | false | 3.21075 | false | false | false |
Brunel-Visualization/Brunel | python/brunel/brunel_util.py | 1 | 1472 | # Copyright (c) 2015 IBM Corporation and others.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# You may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# If the JVM cannot be located automatically, use this variable to get it from an environment variable. It should be the fully qualified
# path to the JVM. Typically jvm.dll on Windows or libjvm.so on Unix
import os
JVM_PATH = ""
D3_LOC = "https://cdnjs.cloudflare.com/ajax/libs/d3/4.13.0/d3.min"
TOPO_JSON_LOC = "https://cdnjs.cloudflare.com/ajax/libs/topojson/1.6.20/topojson.min"
JS_LOC = "/nbextensions/brunel_ext"
BRUNEL_CONFIG = os.getenv("BRUNEL_CONFIG", "")
opts = BRUNEL_CONFIG.strip().split(";")
for opt in opts:
keyval = opt.strip().split("=");
if keyval[0].strip().lower() == "jvm":
JVM_PATH = keyval[1]
elif keyval[0].strip().lower() == "locd3":
D3_LOC = keyval[1]
elif keyval[0].strip().lower() == "locjavascript":
JS_LOC = keyval[1]
elif keyval[0].strip().lower() == "loctopojson":
TOPO_JSON_LOC = keyval[1]
| apache-2.0 | 7,878,519,849,403,592,000 | 39.888889 | 137 | 0.69769 | false | 3.213974 | false | false | false |
scenarios/tensorflow | tensorflow/contrib/distributions/python/ops/bijector.py | 2 | 92884 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
r"""Bijector Ops.
An API for invertible, differentiable transformations of random variables.
## Background
Differentiable, bijective transformations of continuous random variables alter
the calculations made in the cumulative/probability distribution functions and
sample function. This module provides a standard interface for making these
manipulations.
For more details and examples, see the `Bijector` docstring.
To apply a `Bijector`, use `distributions.TransformedDistribution`.
## Bijectors
@@Affine
@@AffineLinearOperator
@@Bijector
@@Chain
@@CholeskyOuterProduct
@@Exp
@@Identity
@@Inline
@@Invert
@@SigmoidCentered
@@SoftmaxCentered
@@Softplus
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
import collections
import contextlib
import itertools
import math
import re
import numpy as np
import six
from tensorflow.contrib import framework as contrib_framework
from tensorflow.contrib.distributions.python.ops import distribution_util
from tensorflow.contrib.distributions.python.ops import operator_pd_cholesky
from tensorflow.contrib.distributions.python.ops import operator_pd_diag
from tensorflow.contrib.distributions.python.ops import operator_pd_identity
from tensorflow.contrib.distributions.python.ops import operator_pd_vdvt_update
from tensorflow.contrib.distributions.python.ops.shape import _DistributionShape
from tensorflow.contrib.linalg.python.ops import linear_operator
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import tensor_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import check_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import linalg_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn_ops
__all__ = [
"Affine",
"AffineLinearOperator",
"Bijector",
"Chain",
"CholeskyOuterProduct",
"Exp",
"Identity",
"Inline",
"Invert",
"PowerTransform",
"SigmoidCentered",
"SoftmaxCentered",
"Softplus",
]
def _as_tensor(x, name):
"""Convenience to convert to `Tensor` or leave as `None`."""
return None if x is None else ops.convert_to_tensor(x, name=name)
class _Mapping(collections.namedtuple("_Mapping",
["x", "y", "ildj", "condition_kwargs"])):
"""Helper class to make it easier to manage caching in `Bijector`."""
def __new__(cls, x=None, y=None, ildj=None, condition_kwargs=None):
"""Custom __new__ so namedtuple items have defaults.
Args:
x: `Tensor`. Forward.
y: `Tensor`. Inverse.
ildj: `Tensor`. Inverse log det Jacobian.
condition_kwargs: Python dictionary. Extra args supplied to
forward/inverse/etc functions.
Returns:
mapping: New instance of _Mapping.
"""
return super(_Mapping, cls).__new__(cls, x, y, ildj, condition_kwargs)
@property
def x_key(self):
"""Returns key used for caching Y=g(X)."""
return (self.x,) + self._deep_tuple(tuple(sorted(
self.condition_kwargs.items())))
@property
def y_key(self):
"""Returns key used for caching X=g^{-1}(Y)."""
return (self.y,) + self._deep_tuple(tuple(sorted(
self.condition_kwargs.items())))
def merge(self, x=None, y=None, ildj=None,
condition_kwargs=None, mapping=None):
"""Returns new _Mapping with args merged with self.
Args:
x: `Tensor`. Forward.
y: `Tensor`. Inverse.
ildj: `Tensor`. Inverse log det Jacobian.
condition_kwargs: Python dictionary. Extra args supplied to
forward/inverse/etc functions.
mapping: Instance of _Mapping to merge. Can only be specified if no other
arg is specified.
Returns:
mapping: New instance of `_Mapping` which has inputs merged with self.
Raises:
ValueError: if mapping and any other arg is not `None`.
"""
if mapping is None:
mapping = _Mapping(x=x, y=y, ildj=ildj,
condition_kwargs=condition_kwargs)
elif not all(arg is None for arg in [x, y, ildj, condition_kwargs]):
raise ValueError("Cannot specify mapping and individual args.")
return _Mapping(
x=self._merge(self.x, mapping.x),
y=self._merge(self.y, mapping.y),
ildj=self._merge(self.ildj, mapping.ildj),
condition_kwargs=self._merge(self.condition_kwargs,
mapping.condition_kwargs))
def _merge(self, old, new):
"""Helper to merge which handles merging one value."""
if old is None:
return new
elif new is not None and old != new:
raise ValueError("Incompatible values: %s != %s" % (old, new))
return old
def _deep_tuple(self, x):
"""Converts lists of lists to tuples of tuples."""
return (tuple(map(self._deep_tuple, x))
if isinstance(x, (list, tuple)) else x)
@six.add_metaclass(abc.ABCMeta)
class Bijector(object):
"""Interface for transforming a `Distribution` sample.
A `Bijector` implements a
[diffeomorphism](https://en.wikipedia.org/wiki/Diffeomorphism), i.e., a
bijective, differentiable function. A `Bijector` is used by
`TransformedDistribution` but can be generally used for transforming a
`Distribution` generated `Tensor`. A `Bijector` is characterized by three
operations:
1. Forward Evaluation
Useful for turning one random outcome into another random outcome from a
different distribution.
2. Inverse Evaluation
Useful for "reversing" a transformation to compute one probability in
terms of another.
3. (log o det o Jacobian o inverse)(x)
"The log of the determinant of the matrix of all first-order partial
derivatives of the inverse function."
Useful for inverting a transformation to compute one probability in terms
of another. Geometrically, the det(Jacobian) is the volume of the
transformation and is used to scale the probability.
By convention, transformations of random variables are named in terms of the
forward transformation. The forward transformation creates samples, the
inverse is useful for computing probabilities.
Example Use:
- Basic properties:
```python
x = ... # A tensor.
# Evaluate forward transformation.
fwd_x = my_bijector.forward(x)
x == my_bijector.inverse(fwd_x)
x != my_bijector.forward(fwd_x) # Not equal because g(x) != g(g(x)).
```
- Computing a log-likelihood:
```python
def transformed_log_pdf(bijector, log_pdf, x):
return (bijector.inverse_log_det_jacobian(x) +
log_pdf(bijector.inverse(x)))
```
- Transforming a random outcome:
```python
def transformed_sample(bijector, x):
return bijector.forward(x)
```
Example transformations:
- "Exponential"
```
Y = g(X) = exp(X)
X ~ Normal(0, 1) # Univariate.
```
Implies:
```
g^{-1}(Y) = log(Y)
|Jacobian(g^{-1})(y)| = 1 / y
Y ~ LogNormal(0, 1), i.e.,
prob(Y=y) = |Jacobian(g^{-1})(y)| * prob(X=g^{-1}(y))
= (1 / y) Normal(log(y); 0, 1)
```
Here is an example of how one might implement the `Exp` bijector:
```
class Exp(Bijector):
def __init__(self, event_ndims=0, validate_args=False, name="exp"):
super(Exp, self).__init__(batch_ndims=0, event_ndims=event_ndims,
validate_args=validate_args, name=name)
def _forward(self, x):
return math_ops.exp(x)
def _inverse_and_inverse_log_det_jacobian(self, y):
x = math_ops.log(y)
return x, -self._forward_log_det_jacobian(x)
def _forward_log_det_jacobian(self, x):
if self.shaper is None:
raise ValueError("Jacobian requires known event_ndims.")
_, _, event_dims = self.shaper.get_dims(x)
return math_ops.reduce_sum(x, reduction_indices=event_dims)
```
- "Affine"
```
Y = g(X) = sqrtSigma * X + mu
X ~ MultivariateNormal(0, I_d)
```
Implies:
```
g^{-1}(Y) = inv(sqrtSigma) * (Y - mu)
|Jacobian(g^{-1})(y)| = det(inv(sqrtSigma))
Y ~ MultivariateNormal(mu, sqrtSigma) , i.e.,
prob(Y=y) = |Jacobian(g^{-1})(y)| * prob(X=g^{-1}(y))
= det(sqrtSigma)^(-d) *
MultivariateNormal(inv(sqrtSigma) * (y - mu); 0, I_d)
```
Example of why a `Bijector` needs to understand sample, batch, event
partitioning:
- Consider the `Exp` `Bijector` applied to a `Tensor` which has sample, batch,
and event (S, B, E) shape semantics. Suppose
the `Tensor`'s partitioned-shape is `(S=[4], B=[2], E=[3, 3])`.
For `Exp`, the shape of the `Tensor` returned by `forward` and `inverse` is
unchanged, i.e., `[4, 2, 3, 3]`. However the shape returned by
`inverse_log_det_jacobian` is `[4, 2]` because the Jacobian is a reduction
over the event dimensions.
Subclass Requirements:
- Typically subclasses implement `_forward` and one or both of:
- `_inverse`, `_inverse_log_det_jacobian`,
- `_inverse_and_inverse_log_det_jacobian`.
- If the `Bijector`'s use is limited to `TransformedDistribution` (or friends
like `QuantizedDistribution`) then depending on your use, you may not need
to implement all of `_forward` and `_inverse` functions. Examples:
1. Sampling (e.g., `sample`) only requires `_forward`.
2. Probability functions (e.g., `prob`, `cdf`, `survival`) only require
`_inverse` (and related).
3. Only calling probability functions on the output of `sample` means
`_inverse` can be implemented as a cache lookup.
See `Example Use` [above] which shows how these functions are used to
transform a distribution. (Note: `_forward` could theoretically be
implemented as a cache lookup but this would require controlling the
underlying sample generation mechanism.)
- If computation can be shared among `_inverse` and
`_inverse_log_det_jacobian` it is preferable to implement
`_inverse_and_inverse_log_det_jacobian`. This usually reduces
graph-construction overhead because a `Distribution`'s implementation of
`log_prob` will need to evaluate both the inverse Jacobian as well as the
inverse function.
- If an additional use case needs just `inverse` or just
`inverse_log_det_jacobian` then he or she may also wish to implement these
functions to avoid computing the `inverse_log_det_jacobian` or the
`inverse`, respectively.
- Subclasses should implement `_get_forward_event_shape`,
`_forward_event_shape` (and `inverse` counterparts) if the transformation is
shape-changing. By default the event-shape is assumed unchanged from input.
Tips for implementing `_inverse` and `_inverse_log_det_jacobian`:
- As case 3 [above] indicates, under some circumstances the inverse function
can be implemented as a cache lookup.
- The inverse `log o det o Jacobian` can be implemented as the negative of the
forward `log o det o Jacobian`. This is useful if the `inverse` is
implemented as a cache or the inverse Jacobian is computationally more
expensive (e.g., `CholeskyOuterProduct` `Bijector`). The following
demonstrates the suggested implementation.
```python
def _inverse_and_log_det_jacobian(self, y):
x = # ... implement inverse, possibly via cache.
return x, -self._forward_log_det_jac(x) # Note negation.
```
By overriding the `_inverse_and_log_det_jacobian` function we have access to
the inverse in one call.
The correctness of this approach can be seen from the following claim.
- Claim:
Assume `Y=g(X)` is a bijection whose derivative exists and is nonzero
for its domain, i.e., `d/dX g(X)!=0`. Then:
```none
(log o det o jacobian o g^{-1})(Y) = -(log o det o jacobian o g)(X)
```
- Proof:
From the bijective, nonzero differentiability of `g`, the
[inverse function theorem](
https://en.wikipedia.org/wiki/Inverse_function_theorem)
implies `g^{-1}` is differentiable in the image of `g`.
Applying the chain rule to `y = g(x) = g(g^{-1}(y))` yields
`I = g'(g^{-1}(y))*g^{-1}'(y)`.
The same theorem also implies `g{-1}'` is non-singular therefore:
`inv[ g'(g^{-1}(y)) ] = g^{-1}'(y)`.
The claim follows from [properties of determinant](
https://en.wikipedia.org/wiki/Determinant#Multiplicativity_and_matrix_groups).
- If possible, prefer a direct implementation of the inverse Jacobian. This
should have superior numerical stability and will often share subgraphs with
the `_inverse` implementation.
"""
@abc.abstractmethod
def __init__(self,
batch_ndims=None,
event_ndims=None,
graph_parents=None,
is_constant_jacobian=False,
validate_args=False,
dtype=None,
name=None):
"""Constructs Bijector.
A `Bijector` transforms random variables into new random variables.
Examples:
```python
# Create the Y = g(X) = X transform which operates on 4-Tensors of vectors.
identity = Identity(batch_ndims=4, event_ndims=1)
# Create the Y = g(X) = exp(X) transform which operates on matrices.
exp = Exp(batch_ndims=0, event_ndims=2)
```
See `Bijector` subclass docstring for more details and specific examples.
Args:
batch_ndims: number of dimensions associated with batch coordinates.
event_ndims: number of dimensions associated with event coordinates.
graph_parents: Python list of graph prerequisites of this `Bijector`.
is_constant_jacobian: `Boolean` indicating that the Jacobian is not a
function of the input.
validate_args: `Boolean`, default `False`. Whether to validate input with
asserts. If `validate_args` is `False`, and the inputs are invalid,
correct behavior is not guaranteed.
dtype: `tf.dtype` supported by this `Bijector`. `None` means dtype is not
enforced.
name: The name to give Ops created by the initializer.
"""
if batch_ndims is None or event_ndims is None:
self._shaper = None # Apparently subclass will create.
else:
self._shaper = _DistributionShape(
batch_ndims=batch_ndims,
event_ndims=event_ndims,
validate_args=validate_args)
self._graph_parents = graph_parents or []
self._is_constant_jacobian = is_constant_jacobian
self._validate_args = validate_args
self._dtype = dtype
self._from_y = {}
self._from_x = {}
# Using abbreviation ildj for "inverse log det Jacobian."
# This variable is not `None` iff is_constant_jacobian is `True`.
self._constant_ildj = None
if name:
self._name = name
else:
# We want the default convention to be snake_case rather than CamelCase
# since `Chain` uses bijector.name as the condition_kwargs dictionary key.
def camel_to_snake(name):
s1 = re.sub("(.)([A-Z][a-z]+)", r"\1_\2", name)
return re.sub("([a-z0-9])([A-Z])", r"\1_\2", s1).lower()
self._name = camel_to_snake(type(self).__name__)
@property
def shaper(self):
"""Returns shape object used to manage shape constraints."""
return self._shaper
@property
def graph_parents(self):
"""Returns this `Bijector`'s graph_parents as a Python list."""
return self._graph_parents
@property
def is_constant_jacobian(self):
"""Returns true iff the Jacobian is not a function of x.
Note: Jacobian is either constant for both forward and inverse or neither.
Returns:
`Boolean`.
"""
return self._is_constant_jacobian
@property
def validate_args(self):
"""Returns True if Tensor arguments will be validated."""
return self._validate_args
@property
def dtype(self):
"""dtype of `Tensor`s transformable by this distribution."""
return self._dtype
@property
def name(self):
"""Returns the string name of this `Bijector`."""
return self._name
def _forward_event_shape(self, input_shape):
"""Subclass implementation for `forward_event_shape` public function."""
return input_shape
def forward_event_shape(self, input_shape, name="forward_event_shape"):
"""Shape of a single sample from a single batch as an `int32` 1D `Tensor`.
Args:
input_shape: `Tensor`, `int32` vector indicating event-portion shape
passed into `forward` function.
name: name to give to the op
Returns:
forward_event_shape: `Tensor`, `int32` vector indicating event-portion
shape after applying `forward`.
"""
with self._name_scope(name, [input_shape]):
input_shape = ops.convert_to_tensor(input_shape, dtype=dtypes.int32,
name="input_shape")
return self._forward_event_shape(input_shape)
def _get_forward_event_shape(self, input_shape):
"""Subclass implementation for `get_forward_event_shape` public function."""
return input_shape
def get_forward_event_shape(self, input_shape):
"""Shape of a single sample from a single batch as a `TensorShape`.
Same meaning as `forward_event_shape`. May be only partially defined.
Args:
input_shape: `TensorShape` indicating event-portion shape passed into
`forward` function.
Returns:
forward_event_shape: `TensorShape` indicating event-portion shape after
applying `forward`. Possibly unknown.
"""
return self._get_forward_event_shape(tensor_shape.TensorShape(input_shape))
def _inverse_event_shape(self, output_shape):
"""Subclass implementation for `inverse_event_shape` public function."""
return output_shape
def inverse_event_shape(self, output_shape, name="inverse_event_shape"):
"""Shape of a single sample from a single batch as an `int32` 1D `Tensor`.
Args:
output_shape: `Tensor`, `int32` vector indicating event-portion shape
passed into `inverse` function.
name: name to give to the op
Returns:
inverse_event_shape: `Tensor`, `int32` vector indicating event-portion
shape after applying `inverse`.
"""
with self._name_scope(name, [output_shape]):
output_shape = ops.convert_to_tensor(output_shape, dtype=dtypes.int32,
name="output_shape")
return self._inverse_event_shape(output_shape)
def _get_inverse_event_shape(self, output_shape):
"""Subclass implementation for `get_inverse_event_shape` public function."""
return self._get_inverse_event_shape(tensor_shape.TensorShape(output_shape))
def get_inverse_event_shape(self, output_shape):
"""Shape of a single sample from a single batch as a `TensorShape`.
Same meaning as `inverse_event_shape`. May be only partially defined.
Args:
output_shape: `TensorShape` indicating event-portion shape passed into
`inverse` function.
Returns:
inverse_event_shape: `TensorShape` indicating event-portion shape after
applying `inverse`. Possibly unknown.
"""
return self._get_inverse_event_shape(output_shape)
def _forward(self, x):
"""Subclass implementation for `forward` public function."""
raise NotImplementedError("forward not implemented.")
def forward(self, x, name="forward", **condition_kwargs):
"""Returns the forward `Bijector` evaluation, i.e., X = g(Y).
Args:
x: `Tensor`. The input to the "forward" evaluation.
name: The name to give this op.
**condition_kwargs: Named arguments forwarded to subclass implementation.
Returns:
`Tensor`.
Raises:
TypeError: if `self.dtype` is specified and `x.dtype` is not
`self.dtype`.
NotImplementedError: if `_forward` is not implemented.
"""
with self._name_scope(name, [x]):
x = ops.convert_to_tensor(x, name="x")
self._maybe_assert_dtype(x)
mapping = self._lookup(x=x, condition_kwargs=condition_kwargs)
if mapping.y is not None:
return mapping.y
mapping = mapping.merge(y=self._forward(x, **condition_kwargs))
self._cache(mapping)
return mapping.y
def _inverse(self, y):
"""Subclass implementation for `inverse` public function."""
raise NotImplementedError("inverse not implemented")
def inverse(self, y, name="inverse", **condition_kwargs):
"""Returns the inverse `Bijector` evaluation, i.e., X = g^{-1}(Y).
Args:
y: `Tensor`. The input to the "inverse" evaluation.
name: The name to give this op.
**condition_kwargs: Named arguments forwarded to subclass implementation.
Returns:
`Tensor`.
Raises:
TypeError: if `self.dtype` is specified and `y.dtype` is not
`self.dtype`.
NotImplementedError: if neither `_inverse` nor
`_inverse_and_inverse_log_det_jacobian` are implemented.
"""
with self._name_scope(name, [y]):
y = ops.convert_to_tensor(y, name="y")
self._maybe_assert_dtype(y)
mapping = self._lookup(y=y, condition_kwargs=condition_kwargs)
if mapping.x is not None:
return mapping.x
ildj = None
try:
x = self._inverse(y, **condition_kwargs)
except NotImplementedError as original_error:
# Since _inverse was not implemented, try to see if it's implemented
# by the _inverse_and_inverse_log_det_jacobian member.
try:
x, ildj = self._inverse_and_inverse_log_det_jacobian(
y, **condition_kwargs)
except NotImplementedError:
raise original_error
if self._constant_ildj is not None:
ildj = self._constant_ildj # Use the "global" result.
elif self.is_constant_jacobian:
self._constant_ildj = ildj
x = x if mapping.x is None else mapping.x
mapping = mapping.merge(x=x, ildj=ildj)
self._cache(mapping)
return mapping.x
def _inverse_log_det_jacobian(self, y):
"""Subclass implementation for `inverse_log_det_jacobian` public function.""" # pylint: disable=line-too-long
raise NotImplementedError("inverse_log_det_jacobian not implemented.")
def inverse_log_det_jacobian(
self, y, name="inverse_log_det_jacobian", **condition_kwargs):
"""Returns the (log o det o Jacobian o inverse)(y).
Mathematically, returns: `log(det(dX/dY))(Y)`. (Recall that: `X=g^{-1}(Y)`.)
Note that `forward_log_det_jacobian` is the negative of this function.
Args:
y: `Tensor`. The input to the "inverse" Jacobian evaluation.
name: The name to give this op.
**condition_kwargs: Named arguments forwarded to subclass implementation.
Returns:
`Tensor`.
Raises:
TypeError: if `self.dtype` is specified and `y.dtype` is not
`self.dtype`.
NotImplementedError: if neither `_inverse_log_det_jacobian` nor
`_inverse_and_inverse_log_det_jacobian` are implemented.
"""
with self._name_scope(name, [y]):
if self._constant_ildj is not None:
return self._constant_ildj
y = ops.convert_to_tensor(y, name="y")
self._maybe_assert_dtype(y)
mapping = self._lookup(y=y, condition_kwargs=condition_kwargs)
if mapping.ildj is not None:
return mapping.ildj
try:
x = mapping.x
ildj = self._inverse_log_det_jacobian(y, **condition_kwargs)
except NotImplementedError as original_error:
# Since _inverse_log_det_jacobian was not implemented, try to see if
# it's implemented by the _inverse_and_inverse_log_det_jacobian member.
try:
x, ildj = self._inverse_and_inverse_log_det_jacobian(
y, **condition_kwargs)
except NotImplementedError:
raise original_error
if mapping.x is not None:
x = mapping.x
if self.is_constant_jacobian:
self._constant_ildj = ildj
x = x if mapping.x is None else mapping.x
mapping = mapping.merge(x=x, ildj=ildj)
self._cache(mapping)
return mapping.ildj
def _inverse_and_inverse_log_det_jacobian(self, y):
"""Subclass implementation for `inverse_and_inverse_log_det_jacobian` public function.""" # pylint: disable=line-too-long
raise NotImplementedError(
"inverse_and_inverse_log_det_jacobian not implemented.")
def inverse_and_inverse_log_det_jacobian(
self, y, name="inverse_and_inverse_log_det_jacobian", **condition_kwargs):
"""Returns both the inverse evaluation and inverse_log_det_jacobian.
Enables possibly more efficient calculation when both inverse and
corresponding Jacobian are needed.
See `inverse()`, `inverse_log_det_jacobian()` for more details.
Args:
y: `Tensor`. The input to the "inverse" Jacobian evaluation.
name: The name to give this op.
**condition_kwargs: Named arguments forwarded to subclass implementation.
Returns:
`Tensor`.
Raises:
TypeError: if `self.dtype` is specified and `y.dtype` is not
`self.dtype`.
NotImplementedError: if neither `_inverse_and_inverse_log_det_jacobian`
nor {`_inverse`, `_inverse_log_det_jacobian`} are implemented.
"""
with self._name_scope(name, [y]):
y = ops.convert_to_tensor(y, name="y")
self._maybe_assert_dtype(y)
mapping = self._lookup(y=y, condition_kwargs=condition_kwargs)
if mapping.x is not None and mapping.ildj is not None:
return mapping.x, mapping.ildj
try:
x, ildj = self._inverse_and_inverse_log_det_jacobian(
y, **condition_kwargs)
except NotImplementedError as original_error:
# Since _inverse_and_inverse_log_det_jacobian was not implemented, try
# to see if we can separately use _inverse and
# _inverse_log_det_jacobian members.
try:
# We want this same try/except to catch either NotImplementedError.
x = self._inverse(y, **condition_kwargs)
if self._constant_ildj is None:
ildj = self._inverse_log_det_jacobian(y, **condition_kwargs)
except NotImplementedError:
raise original_error
if self._constant_ildj is not None:
ildj = self._constant_ildj # Ignore any ildj we may/not have.
elif self.is_constant_jacobian:
self._constant_ildj = ildj
# We use the mapped version of x, even if we re-computed x above with a
# call to self._inverse_and_inverse_log_det_jacobian. This prevents
# re-evaluation of the inverse in a common case.
x = x if mapping.x is None else mapping.x
mapping = mapping.merge(x=x, ildj=ildj)
self._cache(mapping)
return mapping.x, mapping.ildj
def _forward_log_det_jacobian(self, x):
"""Subclass implementation for `forward_log_det_jacobian` public function.""" # pylint: disable=line-too-long
raise NotImplementedError(
"forward_log_det_jacobian not implemented.")
def forward_log_det_jacobian(
self, x, name="forward_log_det_jacobian", **condition_kwargs):
"""Returns both the forward_log_det_jacobian.
Args:
x: `Tensor`. The input to the "forward" Jacobian evaluation.
name: The name to give this op.
**condition_kwargs: Named arguments forwarded to subclass implementation.
Returns:
`Tensor`.
Raises:
TypeError: if `self.dtype` is specified and `y.dtype` is not
`self.dtype`.
NotImplementedError: if neither `_forward_log_det_jacobian`
nor {`_inverse`, `_inverse_log_det_jacobian`} are implemented.
"""
with self._name_scope(name, [x]):
if self._constant_ildj is not None:
# Need "-1. *" to avoid invalid-unary-operand-type linter warning.
return -1. * self._constant_ildj
x = ops.convert_to_tensor(x, name="x")
self._maybe_assert_dtype(x)
mapping = self._lookup(x=x, condition_kwargs=condition_kwargs)
if mapping.ildj is not None:
return -mapping.ildj
y = None
try:
ildj = -self._forward_log_det_jacobian(x, **condition_kwargs)
except NotImplementedError as original_error:
try:
# We want this same try/except to catch either NotImplementedError.
y = self.inverse(x, **condition_kwargs) if y is None else y
ildj = self.inverse_log_det_jacobian(y, **condition_kwargs)
except NotImplementedError:
raise original_error
if self.is_constant_jacobian:
self._constant_ildj = ildj
y = y if mapping.y is None else mapping.y
mapping = mapping.merge(y=y, ildj=ildj)
self._cache(mapping)
return -mapping.ildj
@contextlib.contextmanager
def _name_scope(self, name=None, values=None):
"""Helper function to standardize op scope."""
with ops.name_scope(self.name):
with ops.name_scope(
name, values=(values or []) + self.graph_parents) as scope:
yield scope
def _maybe_assert_dtype(self, x):
"""Helper to check dtype when self.dtype is known."""
if self.dtype is not None and self.dtype.base_dtype != x.dtype.base_dtype:
raise TypeError("Input had dtype %s but expected %s." %
(self.dtype, x.dtype))
def _cache(self, mapping):
"""Helper which stores mapping info in forward/inverse dicts."""
if self._constant_ildj is not None:
# Fold in ildj if known constant Jacobian.
mapping = mapping.merge(ildj=self._constant_ildj)
# Merging from lookup is an added check that we're not overwriting anything
# which is not None.
mapping = mapping.merge(mapping=self._lookup(
mapping.x, mapping.y, mapping.condition_kwargs))
if mapping.x is None and mapping.y is None:
raise ValueError("Caching expects at least one of (x,y) to be known, "
"i.e., not None.")
self._from_x[mapping.x_key] = mapping
self._from_y[mapping.y_key] = mapping
def _lookup(self, x=None, y=None, condition_kwargs=None):
"""Helper which retrieves mapping info from forward/inverse dicts."""
mapping = _Mapping(x=x, y=y, condition_kwargs=condition_kwargs)
# Since _cache requires both x,y to be set, we only need to do one cache
# lookup since the mapping is always in both or neither.
if mapping.x is not None:
return self._from_x.get(mapping.x_key, mapping)
if mapping.y is not None:
return self._from_y.get(mapping.y_key, mapping)
return mapping
class Inline(Bijector):
# pylint: disable=line-too-long
"""Bijector constructed from callables implementing forward, inverse, and inverse_log_det_jacobian.
Example Use:
```python
exp = Inline(
forward_fn=tf.exp,
inverse_fn=tf.log,
inverse_log_det_jacobian_fn=(
lambda y: -tf.reduce_sum(tf.log(y), reduction_indices=-1)),
name="exp")
```
The above example is equivalent to the `Bijector` `Exp(event_ndims=1)`.
"""
# pylint: enable=line-too-long
def __init__(self,
forward_fn=None,
inverse_fn=None,
inverse_log_det_jacobian_fn=None,
forward_log_det_jacobian_fn=None,
get_forward_event_shape_fn=None,
forward_event_shape_fn=None,
get_inverse_event_shape_fn=None,
inverse_event_shape_fn=None,
is_constant_jacobian=False,
validate_args=False,
name="inline"):
"""Creates a `Bijector` from callables.
Args:
forward_fn: Python callable implementing the forward transformation.
inverse_fn: Python callable implementing the inverse transformation.
inverse_log_det_jacobian_fn: Python callable implementing the
log o det o jacobian of the inverse transformation.
forward_log_det_jacobian_fn: Python callable implementing the
log o det o jacobian of the forward transformation.
get_forward_event_shape_fn: Python callable implementing non-identical
static event shape changes. Default: shape is assumed unchanged.
forward_event_shape_fn: Python callable implementing non-identical event
shape changes. Default: shape is assumed unchanged.
get_inverse_event_shape_fn: Python callable implementing non-identical
static event shape changes. Default: shape is assumed unchanged.
inverse_event_shape_fn: Python callable implementing non-identical event
shape changes. Default: shape is assumed unchanged.
is_constant_jacobian: `Boolean` indicating that the Jacobian is constant
for all input arguments.
validate_args: `Boolean` indicating whether arguments should be checked
for correctness.
name: `String`, name given to ops managed by this object.
"""
super(Inline, self).__init__(
batch_ndims=0,
event_ndims=0,
is_constant_jacobian=is_constant_jacobian,
validate_args=validate_args,
name=name)
self._forward_fn = forward_fn
self._inverse_fn = inverse_fn
self._inverse_log_det_jacobian_fn = inverse_log_det_jacobian_fn
self._forward_log_det_jacobian_fn = forward_log_det_jacobian_fn
self._get_forward_event_shape_fn = get_forward_event_shape_fn
self._forward_event_shape_fn = forward_event_shape_fn
self._get_inverse_event_shape_fn = get_inverse_event_shape_fn
self._inverse_event_shape_fn = inverse_event_shape_fn
def _get_forward_event_shape(self, input_shape):
if self._get_forward_event_shape_fn is None:
# By default assume shape doesn't change.
return input_shape
return self._get_forward_event_shape_fn(input_shape)
def _forward_event_shape(self, input_shape):
if self._forward_event_shape_fn is None:
# By default assume shape doesn't change.
return input_shape
return self._forward_event_shape_fn(input_shape)
def _get_inverse_event_shape(self, output_shape):
if self._get_inverse_event_shape_fn is None:
# By default assume shape doesn't change.
return output_shape
return self._get_inverse_event_shape_fn(output_shape)
def _inverse_event_shape(self, output_shape):
if self._inverse_event_shape_fn is None:
# By default assume shape doesn't change.
return output_shape
return self._inverse_event_shape_fn(output_shape)
def _forward(self, x, **condition_kwargs):
if not callable(self._forward_fn):
raise NotImplementedError(
"forward_fn is not a callable function.")
return self._forward_fn(x, **condition_kwargs)
def _inverse(self, y, **condition_kwargs):
if not callable(self._inverse_fn):
raise NotImplementedError(
"inverse_fn is not a callable function.")
return self._inverse_fn(y, **condition_kwargs)
def _inverse_log_det_jacobian(self, y, **condition_kwargs):
if not callable(self._inverse_log_det_jacobian_fn):
raise NotImplementedError(
"inverse_log_det_jacobian_fn is not a callable function.")
return self._inverse_log_det_jacobian_fn(y, **condition_kwargs)
def _forward_log_det_jacobian(self, y, **condition_kwargs):
if not callable(self._forward_log_det_jacobian_fn):
raise NotImplementedError(
"forward_log_det_jacobian_fn is not a callable function.")
return self._forward_log_det_jacobian_fn(y, **condition_kwargs)
class Invert(Bijector):
"""Bijector which inverts another Bijector.
Example Use: [ExpGammaDistribution (see Background & Context)](
https://reference.wolfram.com/language/ref/ExpGammaDistribution.html)
models `Y=log(X)` where `X ~ Gamma`.
```python
exp_gamma_distribution = TransformedDistribution(
Gamma(alpha=1., beta=2.),
bijector.Invert(bijector.Exp())
```
"""
def __init__(self, bijector, validate_args=False, name=None):
"""Creates a `Bijector` which swaps the meaning of `inverse` and `forward`.
Note: An inverted bijector's `inverse_log_det_jacobian` is often more
efficient if the base bijector implements `_forward_log_det_jacobian`. If
`_forward_log_det_jacobian` is not implemented then the following code is
used:
```python
y = self.inverse(x, **condition_kwargs)
return -self.inverse_log_det_jacobian(y, **condition_kwargs)
```
Args:
bijector: Bijector instance.
validate_args: `Boolean` indicating whether arguments should be checked
for correctness.
name: `String`, name given to ops managed by this object.
"""
self._bijector = bijector
super(Invert, self).__init__(
graph_parents=bijector.graph_parents,
is_constant_jacobian=bijector.is_constant_jacobian,
validate_args=validate_args,
dtype=bijector.dtype,
name=name or "_".join(["invert", bijector.name]))
self._shaper = bijector.shaper
def _get_forward_event_shape(self, input_shape):
return self.bijector.get_inverse_event_shape(input_shape)
def _forward_event_shape(self, input_shape):
return self.bijector.inverse_event_shape(input_shape)
def _get_inverse_event_shape(self, output_shape):
return self.bijector.get_forward_event_shape(output_shape)
def _inverse_event_shape(self, output_shape):
return self.bijector.forward_event_shape(output_shape)
@property
def bijector(self):
return self._bijector
def _forward(self, x, **condition_kwargs):
return self.bijector.inverse(x, **condition_kwargs)
def _inverse_and_inverse_log_det_jacobian(self, y, **condition_kwargs):
return (self.bijector.forward(y, **condition_kwargs),
self.bijector.forward_log_det_jacobian(y, **condition_kwargs))
def _forward_log_det_jacobian(self, x, **condition_kwargs):
return self.bijector.inverse_log_det_jacobian(x, **condition_kwargs)
class Chain(Bijector):
"""Bijector which applies a sequence of bijectors.
Example Use:
```python
chain = Chain([Exp(), Softplus()], name="one_plus_exp")
```
Results in:
* Forward:
```python
exp = Exp()
softplus = Softplus()
Chain([exp, softplus]).forward(x)
= exp.forward(softplus.forward(x))
= tf.exp(tf.log(1. + tf.exp(x)))
= 1. + tf.exp(x)
```
* Inverse:
```python
exp = Exp()
softplus = Softplus()
Chain([exp, softplus]).inverse(y)
= softplus.inverse(exp.inverse(y))
= tf.log(tf.exp(tf.log(y)) - 1.)
= tf.log(y - 1.)
```
"""
def __init__(self, bijectors=(), validate_args=False, name=None):
"""Instantiates `Chain` bijector.
Args:
bijectors: Python list of bijector instances. An empty list makes this
bijector equivalent to the `Identity` bijector.
validate_args: `Boolean` indicating whether arguments should be checked
for correctness.
name: `String`, name given to ops managed by this object. Default: E.g.,
`Chain([Exp(), Softplus()]).name == "chain_of_exp_of_softplus"`.
Raises:
ValueError: if bijectors have different dtypes.
"""
self._bijectors = bijectors
dtype = list(set([b.dtype for b in bijectors]))
if len(dtype) > 2:
raise ValueError("incompatible dtypes: %s" % dtype)
elif len(dtype) == 2:
dtype = dtype[1] if dtype[0] is None else dtype[0]
elif len(dtype) == 1:
dtype = dtype[0]
else:
dtype = None
super(Chain, self).__init__(
graph_parents=list(itertools.chain.from_iterable(
b.graph_parents for b in bijectors)),
is_constant_jacobian=all(b.is_constant_jacobian for b in bijectors),
validate_args=validate_args,
dtype=dtype,
name=name or ("identity" if not bijectors else
"_of_".join(["chain"] + [b.name for b in bijectors])))
@property
def bijectors(self):
return self._bijectors
def _shape_helper(self, func_name, input_shape, reverse):
new_shape = input_shape
for b in reversed(self.bijectors) if reverse else self.bijectors:
func = getattr(b, func_name, None)
if func is None:
raise ValueError("unable to call %s on bijector %s (%s)" %
(func_name, b.name, func))
new_shape = func(new_shape)
return new_shape
def _get_forward_event_shape(self, input_shape):
return self._shape_helper("get_forward_event_shape", input_shape,
reverse=True)
def _forward_event_shape(self, input_shape):
return self._shape_helper("forward_event_shape", input_shape, reverse=True)
def _get_inverse_event_shape(self, output_shape):
return self._shape_helper("get_inverse_event_shape", output_shape,
reverse=False)
def _inverse_event_shape(self, output_shape):
return self._shape_helper("inverse_event_shape", output_shape,
reverse=False)
def _forward(self, x, **condition_kwargs):
y = x
for b in reversed(self.bijectors):
y = b.forward(y, **condition_kwargs.get(b.name, {}))
return y
def _inverse_and_inverse_log_det_jacobian(self, y, **condition_kwargs):
x = y
ildj = constant_op.constant(0., dtype=x.dtype,
name="inverse_log_det_jacobian")
for b in self.bijectors:
x, j = b.inverse_and_inverse_log_det_jacobian(
x, **condition_kwargs.get(b.name, {}))
ildj += j
return x, ildj
def _forward_log_det_jacobian(self, x, **condition_kwargs):
y = x
fldj = constant_op.constant(0., dtype=x.dtype,
name="forward_log_det_jacobian")
for b in reversed(self.bijectors):
bijector_condition_kwargs = condition_kwargs.get(b.name, {})
fldj += b.forward_log_det_jacobian(y, **bijector_condition_kwargs)
y = b.forward(y, **bijector_condition_kwargs)
return fldj
class Identity(Bijector):
"""Bijector which computes Y = g(X) = X.
Example Use:
```python
# Create the Y=g(X)=X transform which is intended for Tensors with 1 batch
# ndim and 1 event ndim (i.e., vector of vectors).
identity = Identity(batch_ndims=1, event_ndims=1)
x = [[1., 2],
[3, 4]]
x == identity.forward(x) == identity.inverse(x)
```
"""
def __init__(self, validate_args=False, name="identity"):
super(Identity, self).__init__(
is_constant_jacobian=True,
validate_args=validate_args,
name=name)
def _forward(self, x):
return x
def _inverse_and_inverse_log_det_jacobian(self, y):
return y, constant_op.constant(0., dtype=y.dtype)
def _forward_log_det_jacobian(self, x):
return constant_op.constant(0., dtype=x.dtype)
class PowerTransform(Bijector):
"""Bijector which computes `Y = g(X) = (1 + X * c)**(1 / c), X >= -1 / c`.
The [power transform](https://en.wikipedia.org/wiki/Power_transform) maps
inputs from `[0, inf]` to `[-1/c, inf]`; this is equivalent to the `inverse`
of this bijector.
This bijector is equivalent to the `Exp` bijector when `c=0`.
"""
def __init__(self,
power=0.,
event_ndims=0,
validate_args=False,
name="power_transform"):
"""Instantiates the `PowerTransform` bijector.
Args:
power: Python `float` scalar indicating the transform power, i.e.,
`Y = g(X) = (1 + X * c)**(1 / c)` where `c` is the `power`.
event_ndims: Python scalar indicating the number of dimensions associated
with a particular draw from the distribution.
validate_args: `Boolean` indicating whether arguments should be checked
for correctness.
name: `String` name given to ops managed by this object.
Raises:
ValueError: if `power < 0` or is not known statically.
"""
self._graph_parents = []
self._name = name
self._validate_args = validate_args
with self._name_scope("init", values=[power]):
power = tensor_util.constant_value(
ops.convert_to_tensor(power, name="power"))
if power is None or power < 0:
raise ValueError("`power` must be a non-negative TF constant.")
self._power = power
super(PowerTransform, self).__init__(
batch_ndims=0,
event_ndims=event_ndims,
validate_args=validate_args,
name=name)
@property
def power(self):
"""The `c` in: `Y = g(X) = (1 + X * c)**(1 / c)`."""
return self._power
def _forward(self, x):
x = self._maybe_assert_valid_x(x)
if self.power == 0.:
return math_ops.exp(x)
# TODO(jvdillon): If large x accuracy is an issue, consider using
# (1. + x * self.power)**(1. / self.power) when x >> 1.
return math_ops.exp(math_ops.log1p(x * self.power) / self.power)
def _inverse_and_inverse_log_det_jacobian(self, y):
y = self._maybe_assert_valid_y(y)
if self.shaper is None:
raise ValueError("Jacobian cannot be computed with unknown event_ndims")
_, _, event_dims = self.shaper.get_dims(y)
if self.power == 0.:
x = math_ops.log(y)
ildj = -math_ops.reduce_sum(x, reduction_indices=event_dims)
return x, ildj
# TODO(jvdillon): If large y accuracy is an issue, consider using
# (y**self.power - 1.) / self.power when y >> 1.
x = math_ops.expm1(math_ops.log(y) * self.power) / self.power
ildj = (self.power - 1.) * math_ops.reduce_sum(
math_ops.log(y),
reduction_indices=event_dims)
return x, ildj
def _forward_log_det_jacobian(self, x):
x = self._maybe_assert_valid_x(x)
if self.shaper is None:
raise ValueError("Jacobian cannot be computed with unknown event_ndims")
_, _, event_dims = self.shaper.get_dims(x)
if self.power == 0.:
return math_ops.reduce_sum(x, reduction_indices=event_dims)
return (1. / self.power - 1.) * math_ops.reduce_sum(
math_ops.log1p(x * self.power),
reduction_indices=event_dims)
def _maybe_assert_valid_x(self, x):
if not self.validate_args or self.power == 0.:
return x
is_valid = check_ops.assert_non_negative(
1. + self.power * x,
message="Forward transformation input must be at least {}.".format(
-1. / self.power))
return control_flow_ops.with_dependencies([is_valid], x)
def _maybe_assert_valid_y(self, y):
if not self.validate_args:
return y
is_valid = check_ops.assert_positive(
y, message="Inverse transformation input must be greater than 0.")
return control_flow_ops.with_dependencies([is_valid], y)
class Exp(PowerTransform):
"""Bijector which computes Y = g(X) = exp(X).
Example Use:
```python
# Create the Y=g(X)=exp(X) transform which works only on Tensors with 1
# batch ndim and 2 event ndims (i.e., vector of matrices).
exp = Exp(batch_ndims=1, event_ndims=2)
x = [[[1., 2],
[3, 4]],
[[5, 6],
[7, 8]]]
exp(x) == exp.forward(x)
log(x) == exp.inverse(x)
```
Note: the exp(.) is applied element-wise but the Jacobian is a reduction
over the event space.
"""
def __init__(self,
event_ndims=0,
validate_args=False,
name="exp"):
"""Instantiates the `Exp` bijector.
Args:
event_ndims: Scalar `int32` `Tensor` indicating the number of dimensions
associated with a particular draw from the distribution.
validate_args: `Boolean` indicating whether arguments should be checked
for correctness.
name: `String` name given to ops managed by this object.
"""
super(Exp, self).__init__(
event_ndims=event_ndims,
validate_args=validate_args,
name=name)
# TODO(srvasude): Deprecate this class with a dedicated Linear Operator
# corresponding to TriL + V D V.T.
class _TriLPlusVDVTLightweightOperatorPD(object):
"""Helper/hidden class fake an OperatorPD for TriL+VDV.T."""
def __init__(self, tril, v, diag=None, validate_args=False):
"""Creates an instance of _TriLPlusVDVTLightweightOperatorPD.
WARNING: This object is not to be used outside of `Affine` where it is
currently being temporarily used for refactoring purposes.
Args:
tril: `Tensor` of shape `[B1,..,Bb, d, d]`.
v: `Tensor` of shape `[B1,...,Bb, d, k]`.
diag: `Tensor` of shape `[B1,...,Bb, k, k]` or None
validate_args: `Boolean` indicating whether arguments should be checked
for correctness.
"""
self._m = tril
self._v = v
self._validate_args = validate_args
self._inputs = [tril, v]
if diag is not None:
self._inputs += [diag]
self._d = operator_pd_diag.OperatorPDDiag(diag, verify_pd=validate_args)
self._d_inv = operator_pd_diag.OperatorPDDiag(1. / diag,
verify_pd=validate_args)
return
if v.get_shape().is_fully_defined():
v_shape = v.get_shape().as_list()
id_shape = v_shape[:-2] + [v_shape[-1], v_shape[-1]]
else:
v_shape = array_ops.shape(v)
id_shape = array_ops.concat([v_shape[:-2], [v_shape[-1], v_shape[-1]]], 0)
self._d = operator_pd_identity.OperatorPDIdentity(
id_shape, v.dtype, verify_pd=self.validate_args)
self._d_inv = self._d
@property
def inputs(self):
return self._inputs
@property
def dtype(self):
return self._m.dtype.base_dtype
@property
def validate_args(self):
return self._validate_args
def rank(self):
"""Returns `rank(self)`."""
return array_ops.rank(self._m)
def sqrt_matmul(self, x):
"""Computes `matmul(self, x)`.
Doesn't actually do the sqrt! Named as such to agree with API.
Args:
x: `Tensor`
Returns:
self_times_x: `Tensor`
"""
m_x = math_ops.matmul(self._m, x)
vt_x = math_ops.matmul(self._v, x, adjoint_a=True)
d_vt_x = self._d.matmul(vt_x)
v_d_vt_x = math_ops.matmul(self._v, d_vt_x)
return m_x + v_d_vt_x
def sqrt_solve(self, x):
"""Computes `solve(self, x)`.
Doesn't actually do the sqrt! Named as such to agree with API.
To compute (M + V D V.T), we use the the Woodbury matrix identity:
inv(M + V D V.T) = inv(M) - inv(M) V inv(C) V.T inv(M)
where,
C = inv(D) + V.T inv(M) V.
See: https://en.wikipedia.org/wiki/Woodbury_matrix_identity
Args:
x: `Tensor`
Returns:
inv_of_self_times_x: `Tensor`
"""
minv_x = linalg_ops.matrix_triangular_solve(self._m, x)
vt_minv_x = math_ops.matmul(self._v, minv_x, transpose_a=True)
cinv_vt_minv_x = linalg_ops.matrix_solve(
self._woodbury_sandwiched_term(), vt_minv_x)
v_cinv_vt_minv_x = math_ops.matmul(self._v, cinv_vt_minv_x)
minv_v_cinv_vt_minv_x = linalg_ops.matrix_triangular_solve(
self._m, v_cinv_vt_minv_x)
return minv_x - minv_v_cinv_vt_minv_x
def sqrt_log_abs_det(self):
"""Computes (log o abs o det)(X) for matrix X.
Doesn't actually do the sqrt! Named as such to agree with API.
To compute det(M + V D V.T), we use the matrix determinant lemma:
det(Tril + V D V.T) = det(C) det(D) det(M)
where C is defined as in `_inverse`, ie,
C = inv(D) + V.T inv(M) V.
See: https://en.wikipedia.org/wiki/Matrix_determinant_lemma
Returns:
log_abs_det: `Tensor`.
"""
log_det_c = math_ops.log(math_ops.abs(
linalg_ops.matrix_determinant(self._woodbury_sandwiched_term())))
# Reduction is ok because we always prepad inputs to this class.
log_det_m = math_ops.reduce_sum(math_ops.log(math_ops.abs(
array_ops.matrix_diag_part(self._m))), reduction_indices=[-1])
return log_det_c + 2. * self._d.sqrt_log_abs_det() + log_det_m
def _woodbury_sandwiched_term(self):
"""Computes the sandwiched term in the Woodbury identity.
Computes the "`C`" in the the identity:
inv(M + V D V.T) = inv(M) - inv(M) V inv(C) V.T inv(M)
where,
C = inv(D) + V.T inv(M) V.
See: https://en.wikipedia.org/wiki/Woodbury_matrix_identity
Returns:
woodbury_sandwich_term: A `Tensor` to be used like `C`, above.
"""
minv_v = linalg_ops.matrix_triangular_solve(self._m, self._v)
vt_minv_v = math_ops.matmul(self._v, minv_v, adjoint_a=True)
return self._d_inv.add_to_tensor(vt_minv_v)
class Affine(Bijector):
# pylint: disable=line-too-long
"""Bijector which computes `Y = g(X; shift, scale) = matmul(scale, X) + shift` where `scale = c * I + diag(D1) + tril(L) + V @ diag(D2) @ V.T`.
Write `A @ X` for `matmul(A, X)`. In TF parlance, the `scale` term is
logically equivalent to:
```python
scale = (
scale_identity_multiplier * tf.diag(tf.ones(d)) +
tf.diag(scale_diag) +
scale_tril +
scale_perturb_factor @ diag(scale_perturb_diag) @
tf.transpose([scale_perturb_factor])
)
```
The `scale` term is applied without necessarily materializing constituent
matrices, i.e., the matmul is [matrix-free](
https://en.wikipedia.org/wiki/Matrix-free_methods) when possible.
Examples:
```python
# Y = X
b = Affine()
# Y = X + shift
b = Affine(shift=[1., 2, 3])
# Y = 2 * I @ X.T + shift
b = Affine(shift=[1., 2, 3],
scale_identity_multiplier=2.)
# Y = tf.diag(d1) @ X.T + shift
b = Affine(shift=[1., 2, 3],
scale_diag=[-1., 2, 1]) # Implicitly 3x3.
# Y = (I + v * v.T) @ X.T + shift
b = Affine(shift=[1., 2, 3],
scale_perturb_factor=[[1., 0],
[0, 1],
[1, 1]])
# Y = (diag(d1) + v * diag(d2) * v.T) @ X.T + shift
b = Affine(shift=[1., 2, 3],
scale_diag=[1., 3, 3], # Implicitly 3x3.
scale_perturb_diag=[2., 1], # Implicitly 2x2.
scale_perturb_factor=[[1., 0],
[0, 1],
[1, 1]])
```
"""
# pylint: enable=line-too-long
def __init__(self,
shift=None,
scale_identity_multiplier=None,
scale_diag=None,
scale_tril=None,
scale_perturb_factor=None,
scale_perturb_diag=None,
event_ndims=1,
validate_args=False,
name="affine"):
"""Instantiates the `Affine` bijector.
This `Bijector` is initialized with `shift` `Tensor` and `scale` arguments,
giving the forward operation:
```none
Y = g(X) = scale @ X + shift
```
where the `scale` term is logically equivalent to:
```python
scale = (
scale_identity_multiplier * tf.diag(tf.ones(d)) +
tf.diag(scale_diag) +
scale_tril +
scale_perturb_factor @ diag(scale_perturb_diag) @
tf.transpose([scale_perturb_factor])
)
```
If none of `scale_identity_multiplier`, `scale_diag`, or `scale_tril` are
specified then `scale += IdentityMatrix`. Otherwise specifying a
`scale` argument has the semantics of `scale += Expand(arg)`, i.e.,
`scale_diag != None` means `scale += tf.diag(scale_diag)`.
Args:
shift: Numeric `Tensor`. If this is set to `None`, no shift is applied.
scale_identity_multiplier: floating point rank 0 `Tensor` representing a
scaling done to the identity matrix.
When `scale_identity_multiplier = scale_diag=scale_tril = None` then
`scale += IdentityMatrix`. Otherwise no scaled-identity-matrix is added
to `scale`.
scale_diag: Numeric `Tensor` representing the diagonal matrix.
`scale_diag` has shape [N1, N2, ... k], which represents a k x k
diagonal matrix.
When `None` no diagonal term is added to `scale`.
scale_tril: Numeric `Tensor` representing the diagonal matrix.
`scale_diag` has shape [N1, N2, ... k, k], which represents a k x k
lower triangular matrix.
When `None` no `scale_tril` term is added to `scale`.
The upper triangular elements above the diagonal are ignored.
scale_perturb_factor: Numeric `Tensor` representing factor matrix with
last two dimensions of shape `(k, r)`.
When `None`, no rank-r update is added to `scale`.
scale_perturb_diag: Numeric `Tensor` representing the diagonal matrix.
`scale_perturb_diag` has shape [N1, N2, ... r], which represents an
r x r Diagonal matrix.
When `None` low rank updates will take the form `scale_perturb_factor *
scale_perturb_factor.T`.
event_ndims: Scalar `int32` `Tensor` indicating the number of dimensions
associated with a particular draw from the distribution. Must be 0 or 1.
validate_args: `Boolean` indicating whether arguments should be checked
for correctness.
name: `String` name given to ops managed by this object.
Raises:
ValueError: if `perturb_diag` is specified but not `perturb_factor`.
TypeError: if `shift` has different `dtype` from `scale` arguments.
"""
self._graph_parents = []
self._name = name
self._validate_args = validate_args
# Ambiguous definition of low rank update.
if scale_perturb_diag is not None and scale_perturb_factor is None:
raise ValueError("When scale_perturb_diag is specified, "
"scale_perturb_factor must be specified.")
# Special case, only handling a scaled identity matrix. We don't know its
# dimensions, so this is special cased.
# We don't check identity_multiplier, since below we set it to 1. if all
# other scale args are None.
self._is_only_identity_multiplier = (scale_tril is None and
scale_diag is None and
scale_perturb_factor is None)
# When no args are specified, pretend the scale matrix is the identity
# matrix.
if self._is_only_identity_multiplier and scale_identity_multiplier is None:
scale_identity_multiplier = 1.
with self._name_scope("init", values=[
shift, scale_identity_multiplier, scale_diag, scale_tril,
scale_perturb_diag, scale_perturb_factor, event_ndims]):
event_ndims = ops.convert_to_tensor(event_ndims, name="event_ndims")
if validate_args:
is_less_than_two = check_ops.assert_less(
event_ndims, 2,
message="event_ndims must be 0 or 1")
event_ndims = control_flow_ops.with_dependencies(
[is_less_than_two], event_ndims)
self._shift = _as_tensor(shift, "shift")
# self._create_scale_operator returns an OperatorPD in all cases except if
# self._is_only_identity_multiplier; in which case it returns a scalar
# Tensor.
self._scale = self._create_scale_operator(
identity_multiplier=scale_identity_multiplier,
diag=scale_diag,
tril=scale_tril,
perturb_diag=scale_perturb_diag,
perturb_factor=scale_perturb_factor,
event_ndims=event_ndims,
validate_args=validate_args)
if (self._shift is not None and
self._shift.dtype.base_dtype != self._scale.dtype.base_dtype):
raise TypeError("shift.dtype({}) does not match scale.dtype({})".format(
self._shift.dtype, self._scale.dtype))
super(Affine, self).__init__(
batch_ndims=self._infer_batch_ndims(),
event_ndims=event_ndims,
graph_parents=(
[event_ndims] +
[self._scale] if contrib_framework.is_tensor(self._scale)
else self._scale.inputs +
[self._shift] if self._shift is not None else []),
is_constant_jacobian=True,
validate_args=validate_args,
name=name)
def _create_scale_operator(self, identity_multiplier, diag, tril,
perturb_diag, perturb_factor, event_ndims,
validate_args):
"""Construct `scale` from various components.
Args:
identity_multiplier: floating point rank 0 `Tensor` representing a scaling
done to the identity matrix.
diag: Numeric `Tensor` representing the diagonal matrix. `scale_diag` has
shape [N1, N2, ... k], which represents a k x k diagonal matrix.
tril: Numeric `Tensor` representing the diagonal matrix. `scale_tril` has
shape [N1, N2, ... k], which represents a k x k lower triangular matrix.
perturb_diag: Numeric `Tensor` representing the diagonal matrix of the
low rank update.
perturb_factor: Numeric `Tensor` representing factor matrix.
event_ndims: Scalar `int32` `Tensor` indicating the number of dimensions
associated with a particular draw from the distribution. Must be 0 or 1
validate_args: `Boolean` indicating whether arguments should be checked
for correctness.
Returns:
scale and batch_ndims. In the case of scaling by a constant, scale is a
floating point `Tensor`. Otherwise, scale is an `OperatorPD`.
Raises:
ValueError: if all of `tril`, `diag` and `identity_multiplier` are `None`.
"""
identity_multiplier = _as_tensor(identity_multiplier, "identity_multiplier")
diag = _as_tensor(diag, "diag")
tril = _as_tensor(tril, "tril")
perturb_diag = _as_tensor(perturb_diag, "perturb_diag")
perturb_factor = _as_tensor(perturb_factor, "perturb_factor")
identity_multiplier = self._maybe_validate_identity_multiplier(
identity_multiplier, validate_args)
if perturb_factor is not None:
perturb_factor = self._process_matrix(
perturb_factor, min_rank=2, event_ndims=event_ndims)
if perturb_diag is not None:
perturb_diag = self._process_matrix(
perturb_diag, min_rank=1, event_ndims=event_ndims)
# The following if-statments are ordered by increasingly stronger
# assumptions in the base matrix, i.e., we process in the order:
# TriL, Diag, Identity.
if tril is not None:
tril = self._preprocess_tril(
identity_multiplier, diag, tril, event_ndims)
if perturb_factor is None:
return operator_pd_cholesky.OperatorPDCholesky(
tril, verify_pd=validate_args)
return _TriLPlusVDVTLightweightOperatorPD(
tril=tril, v=perturb_factor, diag=perturb_diag,
validate_args=validate_args)
if diag is not None:
diag = self._preprocess_diag(identity_multiplier, diag, event_ndims)
if perturb_factor is None:
return operator_pd_diag.OperatorPDSqrtDiag(
diag, verify_pd=validate_args)
return operator_pd_vdvt_update.OperatorPDSqrtVDVTUpdate(
operator=operator_pd_diag.OperatorPDDiag(
diag, verify_pd=validate_args),
v=perturb_factor,
diag=perturb_diag,
verify_pd=validate_args)
if identity_multiplier is not None:
if perturb_factor is None:
return identity_multiplier
# Infer the shape from the V and D.
v_shape = array_ops.shape(perturb_factor)
identity_shape = array_ops.concat((v_shape[:-1], (v_shape[-2],)), 0)
scaled_identity = operator_pd_identity.OperatorPDIdentity(
identity_shape,
perturb_factor.dtype.base_dtype,
scale=identity_multiplier,
verify_pd=validate_args)
return operator_pd_vdvt_update.OperatorPDSqrtVDVTUpdate(
operator=scaled_identity,
v=perturb_factor,
diag=perturb_diag,
verify_pd=validate_args)
raise ValueError("One of tril, diag and/or identity_multiplier must be "
"specified.")
def _maybe_validate_identity_multiplier(self, identity_multiplier,
validate_args):
"""Check that the init arg `identity_multiplier` is valid."""
if identity_multiplier is None or not validate_args:
return identity_multiplier
if validate_args:
identity_multiplier = control_flow_ops.with_dependencies(
[check_ops.assert_positive(identity_multiplier)],
identity_multiplier)
return identity_multiplier
def _preprocess_tril(self, identity_multiplier, diag, tril, event_ndims):
"""Helper to preprocess a lower triangular matrix."""
tril = array_ops.matrix_band_part(tril, -1, 0) # Zero out TriU.
if identity_multiplier is None and diag is None:
return self._process_matrix(tril, min_rank=2, event_ndims=event_ndims)
new_diag = array_ops.matrix_diag_part(tril)
if identity_multiplier is not None:
new_diag += identity_multiplier
if diag is not None:
new_diag += diag
tril = array_ops.matrix_set_diag(tril, new_diag)
return self._process_matrix(tril, min_rank=2, event_ndims=event_ndims)
def _preprocess_diag(self, identity_multiplier, diag, event_ndims):
"""Helper to preprocess a diagonal matrix."""
if identity_multiplier is not None:
diag += identity_multiplier
return self._process_matrix(diag, min_rank=1, event_ndims=event_ndims)
def _process_matrix(self, matrix, min_rank, event_ndims):
"""Helper to __init__ which gets matrix in batch-ready form."""
# Pad the matrix so that matmul works in the case of a matrix and vector
# input. Keep track if the matrix was padded, to distinguish between a
# rank 3 tensor and a padded rank 2 tensor.
# TODO(srvasude): Remove side-effects from functions. Its currently unbroken
# but error-prone since the function call order may change in the future.
self._rank_two_event_ndims_one = math_ops.logical_and(
math_ops.equal(array_ops.rank(matrix), min_rank),
math_ops.equal(event_ndims, 1))
left = array_ops.where(self._rank_two_event_ndims_one, 1, 0)
pad = array_ops.concat(
[array_ops.ones(
[left], dtype=dtypes.int32), array_ops.shape(matrix)],
0)
return array_ops.reshape(matrix, pad)
def _infer_batch_ndims(self):
"""Return batch_ndims."""
if self._is_only_identity_multiplier:
return 0
# The real batch dims is one less when we pad in the case of event_ndims =
# 1, and the rank of the underlying scale being 2. This allows us to have
# non-negative sample dims.
return (self._scale.rank() - 2 -
array_ops.where(self._rank_two_event_ndims_one, 1, 0))
@property
def shift(self):
return self._shift
@property
def scale(self):
# TODO(srvasude): Remove this exception once TriLPlusVDVT is properly
# implemented.
if isinstance(self._scale, _TriLPlusVDVTLightweightOperatorPD):
raise NotImplementedError("Cannot access scale when Tril+VDV.T.")
return self._scale
def _forward(self, x):
y = x
if self._is_only_identity_multiplier:
y *= self._scale
if self.shift is not None:
return y + self.shift
return y
y, sample_shape = self.shaper.make_batch_of_event_sample_matrices(y)
y = self._scale.sqrt_matmul(y)
y = self.shaper.undo_make_batch_of_event_sample_matrices(y, sample_shape)
if self.shift is not None:
return y + self.shift
return y
def _inverse(self, y):
x = y
if self.shift is not None:
x -= self.shift
if self._is_only_identity_multiplier:
return x / self._scale
x, sample_shape = self.shaper.make_batch_of_event_sample_matrices(x)
x = self._scale.sqrt_solve(x)
x = self.shaper.undo_make_batch_of_event_sample_matrices(x, sample_shape)
return x
def _inverse_log_det_jacobian(self, y):
return -self._forward_log_det_jacobian(y)
def _forward_log_det_jacobian(self, x):
if self._is_only_identity_multiplier:
# TODO(jvdillon): We don't pad in this case and instead let the fldj be
# applied via broadcast.
d = math_ops.cast(array_ops.shape(x)[-1], dtype=self._scale.dtype)
return math_ops.log(math_ops.abs(self._scale)) * array_ops.where(
math_ops.equal(self.shaper.event_ndims, 0), 1., d)
fldj = self._scale.sqrt_log_abs_det()
# We need to squeeze off the padded dimension.
start = array_ops.where(self._rank_two_event_ndims_one, 1, 0)
return array_ops.reshape(fldj, array_ops.shape(fldj)[start:])
class AffineLinearOperator(Bijector):
"""Bijector which computes `Y = g(X; shift, scale) = scale @ X.T + shift`.
`shift` is a numeric `Tensor` and `scale` is a `LinearOperator`.
If `X` is a scalar then the forward transformation is: `scale * X + shift`
where `*` denotes the scalar product.
Note: we don't always simply transpose `X` (but write it this way for
brevity). Actually the input `X` undergoes the following transformation
before being premultiplied by `scale`:
1. If there are no sample dims, we call `X = tf.expand_dims(X, 0)`, i.e.,
`new_sample_shape = [1]`. Otherwise do nothing.
2. The sample shape is flattened to have one dimension, i.e.,
`new_sample_shape = [n]` where `n = tf.reduce_prod(old_sample_shape)`.
3. The sample dim is cyclically rotated left by 1, i.e.,
`new_shape = [B1,...,Bb, k, n]` where `n` is as above, `k` is the
event_shape, and `B1,...,Bb` are the batch shapes for each of `b` batch
dimensions.
(For more details see `shape.make_batch_of_event_sample_matrices`.)
The result of the above transformation is that `X` can be regarded as a batch
of matrices where each column is a draw from the distribution. After
premultiplying by `scale`, we take the inverse of this procedure. The input
`Y` also undergoes the same transformation before/after premultiplying by
`inv(scale)`.
Example Use:
```python
linalg = tf.contrib.linalg
x = [1., 2, 3]
shift = [-1., 0., 1]
diag = [1., 2, 3]
scale = linalg.LinearOperatorDiag(diag)
affine = AffineLinearOperator(shift, scale)
# In this case, `forward` is equivalent to:
# diag * scale + shift
y = affine.forward(x) # [0., 4, 10]
shift = [2., 3, 1]
tril = [[1., 0, 0],
[2, 1, 0],
[3, 2, 1]]
scale = linalg.LinearOperatorTriL(tril)
affine = AffineLinearOperator(shift, scale)
# In this case, `forward` is equivalent to:
# np.squeeze(np.matmul(tril, np.expand_dims(x, -1)), -1) + shift
y = affine.forward(x) # [3., 7, 11]
```
"""
def __init__(self,
shift=None,
scale=None,
event_ndims=1,
validate_args=False,
name="affine_linear_operator"):
"""Instantiates the `AffineLinearOperator` bijector.
Args:
shift: Numeric `Tensor`.
scale: Subclass of `LinearOperator`. Represents the (batch) positive
definite matrix `M` in `R^{k x k}`.
event_ndims: Scalar `integer` `Tensor` indicating the number of dimensions
associated with a particular draw from the distribution. Must be 0 or 1.
validate_args: `Boolean` indicating whether arguments should be checked
for correctness.
name: `String` name given to ops managed by this object.
Raises:
ValueError: if `event_ndims` is not 0 or 1.
TypeError: if `scale` is not a `LinearOperator`.
TypeError: if `shift.dtype` does not match `scale.dtype`.
ValueError: if not `scale.is_non_singular`.
"""
self._graph_parents = []
self._name = name
self._validate_args = validate_args
graph_parents = []
with self._name_scope("init", values=[shift]):
event_ndims = ops.convert_to_tensor(event_ndims, name="event_ndims")
if tensor_util.constant_value(event_ndims) is not None:
event_ndims = tensor_util.constant_value(event_ndims)
if event_ndims not in (0, 1):
raise ValueError("event_ndims({}) was not 0 or 1".format(event_ndims))
else:
if validate_args:
# Shape tool will catch if event_ndims is negative.
event_ndims = control_flow_ops.with_dependencies(
[check_ops.assert_less(
event_ndims, 2, message="event_ndims must be 0 or 1")],
event_ndims)
graph_parents += [event_ndims]
if shift is not None:
shift = ops.convert_to_tensor(shift, name="shift")
graph_parents += [shift]
self._shift = shift
if scale is not None:
if (shift is not None and
shift.dtype.base_dtype != scale.dtype.base_dtype):
raise TypeError(
"shift.dtype({}) is incompatible with scale.dtype({}).".format(
shift.dtype, scale.dtype))
if not isinstance(scale, linear_operator.LinearOperator):
raise TypeError("scale is not an instance of tf.LinearOperator")
if validate_args and not scale.is_non_singular:
raise ValueError("Scale matrix must be non-singular.")
graph_parents += scale.graph_parents
if scale.tensor_rank is not None:
batch_ndims = scale.tensor_rank - 2
else:
batch_ndims = scale.tensor_rank_tensor() - 2
graph_parents += [batch_ndims]
else:
batch_ndims = 0 # We won't need shape inference when scale is None.
self._scale = scale
super(AffineLinearOperator, self).__init__(
batch_ndims=batch_ndims,
event_ndims=event_ndims,
graph_parents=graph_parents,
is_constant_jacobian=True,
validate_args=validate_args,
name=name)
@property
def shift(self):
"""The `shift` `Tensor` in `Y = scale @ X.T + shift`."""
return self._shift
@property
def scale(self):
"""The `scale` `LinearOperator` in `Y = scale @ X.T + shift`."""
return self._scale
def _forward(self, x):
y = x
if self.scale is not None:
y, sample_shape = self.shaper.make_batch_of_event_sample_matrices(
y, expand_batch_dim=False)
with ops.control_dependencies([self.scale.assert_non_singular()] if
self.validate_args else []):
y = self.scale.apply(y)
y = self.shaper.undo_make_batch_of_event_sample_matrices(
y, sample_shape, expand_batch_dim=False)
if self.shift is not None:
y += self.shift
return y
def _inverse(self, y):
x = y
if self.shift is not None:
x -= self.shift
if self.scale is not None:
x, sample_shape = self.shaper.make_batch_of_event_sample_matrices(
x, expand_batch_dim=False)
# Solve fails if the op is singular so we may safely skip this assertion.
x = self.scale.solve(x)
x = self.shaper.undo_make_batch_of_event_sample_matrices(
x, sample_shape, expand_batch_dim=False)
return x
def _inverse_log_det_jacobian(self, y):
return -self._forward_log_det_jacobian(y)
def _forward_log_det_jacobian(self, x): # pylint: disable=unused-argument
if self.scale is None:
return constant_op.constant(0, dtype=x.dtype.base_dtype)
with ops.control_dependencies([self.scale.assert_non_singular()] if
self.validate_args else []):
return self.scale.log_abs_determinant()
class Softplus(Bijector):
"""Bijector which computes `Y = g(X) = Log[1 + exp(X)]`.
The softplus `Bijector` has the following two useful properties:
* The domain is the positive real numbers
* `softplus(x) approx x`, for large `x`, so it does not overflow as easily as
the `Exp` `Bijector`.
Example Use:
```python
# Create the Y=g(X)=softplus(X) transform which works only on Tensors with 1
# batch ndim and 2 event ndims (i.e., vector of matrices).
softplus = Softplus(batch_ndims=1, event_ndims=2)
x = [[[1., 2],
[3, 4]],
[[5, 6],
[7, 8]]]
log(1 + exp(x)) == softplus.forward(x)
log(exp(x) - 1) == softplus.inverse(x)
```
Note: log(.) and exp(.) are applied element-wise but the Jacobian is a
reduction over the event space.
"""
def __init__(self,
event_ndims=0,
validate_args=False,
name="softplus"):
super(Softplus, self).__init__(
batch_ndims=0,
event_ndims=event_ndims,
validate_args=validate_args,
name=name)
def _forward(self, x):
return nn_ops.softplus(x)
def _inverse_and_inverse_log_det_jacobian(self, y):
if self.shaper is None:
raise ValueError("Jacobian cannot be computed with unknown event_ndims")
_, _, event_dims = self.shaper.get_dims(y)
# Could also do:
# ildj = math_ops.reduce_sum(y - distribution_util.softplus_inverse(y),
# reduction_indices=event_dims)
# but the following is more numerically stable. Ie,
# Y = Log[1 + exp{X}] ==> X = Log[exp{Y} - 1]
# ==> dX/dY = exp{Y} / (exp{Y} - 1)
# = 1 / (1 - exp{-Y}),
# which is the most stable for large Y > 0. For small Y, we use
# 1 - exp{-Y} approx Y.
ildj = -math_ops.reduce_sum(math_ops.log(-math_ops.expm1(-y)),
reduction_indices=event_dims)
return distribution_util.softplus_inverse(y), ildj
def _forward_log_det_jacobian(self, x): # pylint: disable=unused-argument
if self.shaper is None:
raise ValueError("Jacobian cannot be computed with unknown event_ndims")
_, _, event_dims = self.shaper.get_dims(x)
return -math_ops.reduce_sum(
nn_ops.softplus(-x), reduction_indices=event_dims)
class SoftmaxCentered(Bijector):
"""Bijector which computes `Y = g(X) = exp([X 0]) / sum(exp([X 0]))`.
To implement [softmax](https://en.wikipedia.org/wiki/Softmax_function) as a
bijection, the forward transformation appends a value to the input and the
inverse removes this coordinate. The appended coordinate represents a pivot,
e.g., `softmax(x) = exp(x-c) / sum(exp(x-c))` where `c` is the implicit last
coordinate.
Because we append a coordinate, this bijector only supports `event_ndim in [0,
1]`, i.e., scalars and vectors.
Example Use:
```python
bijector.SoftmaxCentered(event_ndims=1).forward(tf.log([2, 3, 4]))
# Result: [0.2, 0.3, 0.4, 0.1]
# Extra result: 0.1
bijector.SoftmaxCentered(event_ndims=1).inverse([0.2, 0.3, 0.4, 0.1])
# Result: tf.log([2, 3, 4])
# Extra coordinate removed.
```
At first blush it may seem like the [Invariance of domain](
https://en.wikipedia.org/wiki/Invariance_of_domain) theorem implies this
implementation is not a bijection. However, the appended dimension
makes the (forward) image non-open and the theorem does not directly apply.
"""
def __init__(self,
event_ndims=0,
validate_args=False,
name="softmax_centered"):
self._graph_parents = []
self._name = name
with self._name_scope("init", values=[event_ndims]):
event_ndims = ops.convert_to_tensor(event_ndims, name="event_ndims")
event_ndims = tensor_util.constant_value(event_ndims)
if event_ndims is None or event_ndims not in [0, 1]:
raise ValueError("`event_ndims` must be a TF constant which is 0 or 1")
self._static_event_ndims = event_ndims
super(SoftmaxCentered, self).__init__(
batch_ndims=0, # We'll regard all non-event dims as sample dims.
event_ndims=event_ndims,
validate_args=validate_args,
name=name)
def _get_forward_event_shape(self, input_shape):
if input_shape.ndims is None:
return input_shape
if input_shape.ndims != self._static_event_ndims:
raise ValueError("input_shape.dims = %d != %d" %
(input_shape.ndims, self._static_event_ndims))
if input_shape.ndims == 0:
return tensor_shape.TensorShape([2])
if input_shape.ndims == 1:
return tensor_shape.TensorShape(input_shape[0] + 1)
# Unreachable code:
raise ValueError("event_ndims = %d must be 0 or 1" % input_shape.ndims)
def _forward_event_shape(self, input_shape):
ndims = array_ops.shape(input_shape)
if self.validate_args:
# It is not possible for a negative shape so we need only check <= 1.
is_zero_or_one = check_ops.assert_equal(
ndims, 0 if self._static_event_ndims == 0 else 1,
message="event_ndims must be 0 or 1")
ndims = control_flow_ops.with_dependencies([is_zero_or_one], ndims)
if self._static_event_ndims == 0:
return ops.convert_to_tensor(
[2], dtype=dtypes.int32, name="output_shape")
return input_shape + 1
def _get_inverse_event_shape(self, output_shape):
if output_shape.ndims is None:
return output_shape
if output_shape.ndims != 1:
raise ValueError("output_shape.ndims = %d != 1" % output_shape.ndims)
if self._static_event_ndims == 0:
return tensor_shape.TensorShape([])
return tensor_shape.TensorShape(output_shape[0] - 1)
def _inverse_event_shape(self, output_shape):
ndims = array_ops.shape(output_shape)[0]
if self.validate_args:
# It is not possible for a negative shape so we need only check <= 1.
is_one = check_ops.assert_equal(
ndims, 1, message="event_ndims must be 1")
ndims = control_flow_ops.with_dependencies([is_one], ndims)
if self._static_event_ndims == 0:
return ops.convert_to_tensor([], dtype=dtypes.int32, name="output_shape")
return array_ops.expand_dims(output_shape[0] - 1, dim=0)
def _forward(self, x):
# Pad the last dim with a zeros vector. We need this because it lets us
# infer the scale in the inverse function.
y = array_ops.expand_dims(x, dim=-1) if self._static_event_ndims == 0 else x
ndims = (y.get_shape().ndims if y.get_shape().ndims is not None
else array_ops.rank(y))
y = array_ops.pad(y,
paddings=array_ops.concat(
(array_ops.zeros(
(ndims - 1, 2), dtype=dtypes.int32), [[0, 1]]),
0))
# Set shape hints.
if x.get_shape().ndims is not None:
shape = x.get_shape().as_list()
if self._static_event_ndims == 0:
shape += [2]
elif shape[-1] is not None:
shape[-1] += 1
shape = tensor_shape.TensorShape(shape)
y.get_shape().assert_is_compatible_with(shape)
y.set_shape(shape)
# Since we only support event_ndims in [0, 1] and we do padding, we always
# reduce over the last dimension, i.e., dim=-1 (which is the default).
return nn_ops.softmax(y)
def _inverse(self, y):
# To derive the inverse mapping note that:
# y[i] = exp(x[i]) / normalization
# and
# y[end] = 1 / normalization.
# Thus:
# x[i] = log(exp(x[i])) - log(y[end]) - log(normalization)
# = log(exp(x[i])/normalization) - log(y[end])
# = log(y[i]) - log(y[end])
shape = (np.asarray(y.get_shape().as_list(), dtype=np.int32)
if y.get_shape().is_fully_defined()
else array_ops.shape(y, name="shape"))
ndims = y.get_shape().ndims or math_ops.rank(y, name="ndims")
# Do this first to make sure CSE catches that it'll happen again in
# _inverse_log_det_jacobian.
x = math_ops.log(y)
# We now extract the last coordinate of the rightmost dimension.
# Our trick is to slice from [0,0,...,shape[-1]-1] to shape[:-1]+[1].
begin = array_ops.one_hot(indices=ndims-1,
depth=ndims,
on_value=shape[-1]-np.array(1, dtype=shape.dtype),
dtype=shape.dtype)
size = array_ops.concat((shape[:-1], np.asarray([1], dtype=shape.dtype)), 0)
log_normalization = -array_ops.strided_slice(x, begin, begin + size)
# Here we slice out all but the last coordinate; see above for idea.
begin = array_ops.zeros_like(shape)
size = array_ops.concat((shape[:-1], [shape[-1] - 1]), 0)
x = array_ops.strided_slice(x, begin, begin + size)
x += log_normalization
if self._static_event_ndims == 0:
x = array_ops.squeeze(x, squeeze_dims=[ndims-1])
# Set shape hints.
if y.get_shape().ndims is not None:
shape = y.get_shape().as_list()
if self._static_event_ndims == 0:
shape = shape[:-1]
elif shape[-1] is not None:
shape[-1] -= 1
shape = tensor_shape.TensorShape(shape)
x.get_shape().assert_is_compatible_with(shape)
x.set_shape(shape)
return x
def _inverse_log_det_jacobian(self, y):
# WLOG, consider the vector case:
# x = log(y[:-1]) - log(y[-1])
# where,
# y[-1] = 1 - sum(y[:-1]).
# We have:
# det{ dX/dY } = det{ diag(1 ./ y[:-1]) + 1 / y[-1] }
# = det{ inv{ diag(y[:-1]) - y[:-1]' y[:-1] } } (1)
# = 1 / det{ diag(y[:-1]) - y[:-1]' y[:-1] }
# = 1 / { (1 + y[:-1]' inv(diag(y[:-1])) y[:-1]) *
# det(diag(y[:-1])) } (2)
# = 1 / { y[-1] prod(y[:-1]) }
# = 1 / prod(y)
# (1) - https://en.wikipedia.org/wiki/Sherman%E2%80%93Morrison_formula
# or by noting that det{ dX/dY } = 1 / det{ dY/dX } from Bijector
# docstring "Tip".
# (2) - https://en.wikipedia.org/wiki/Matrix_determinant_lemma
return -math_ops.reduce_sum(math_ops.log(y), reduction_indices=-1)
def _forward_log_det_jacobian(self, x):
if self._static_event_ndims == 0:
return x - 2. * nn_ops.softplus(x)
else:
# This code is similar to nn_ops.log_softmax but different because we have
# an implicit zero column to handle. I.e., instead of:
# reduce_sum(logits - reduce_sum(exp(logits), dim))
# we must do:
# log_normalization = 1 + reduce_sum(exp(logits))
# -log_normalization + reduce_sum(logits - log_normalization)
log_normalization = nn_ops.softplus(
math_ops.reduce_logsumexp(x, reduction_indices=-1, keep_dims=True))
fldj = (-log_normalization +
math_ops.reduce_sum(x - log_normalization,
reduction_indices=-1,
keep_dims=True))
return array_ops.squeeze(fldj, squeeze_dims=-1)
class SigmoidCentered(SoftmaxCentered):
"""Bijector which computes Y = g(X) = exp([X 0]) / (1 + exp(-X)).
Equivalent to: `bijector.SoftmaxCentered(event_ndims=0)`.
See `bijector.SoftmaxCentered` for more details.
"""
def __init__(self, validate_args=False, name="sigmoid_centered"):
super(SigmoidCentered, self).__init__(
validate_args=validate_args, name=name)
class CholeskyOuterProduct(Bijector):
# pylint: disable=line-too-long
"""Bijector which computes Y = g(X) = X X.T where X is a lower-triangular, positive-diagonal matrix.
`event_ndims` must be 0 or 2, i.e., scalar or matrix.
Note: the upper-triangular part of X is ignored (whether or not its zero).
Examples:
```python
bijector.CholeskyOuterProduct(event_ndims=2).forward(x=[[1., 0], [2, 1]])
# Result: [[1, 1], [1, 5]], i.e., x x.T
bijector.SoftmaxCentered(event_ndims=2).inverse(y=[[1., 1], [1, 5]])
# Result: [[1, 0], [2, 1]], i.e., chol(y).
```
"""
# pylint: enable=line-too-long
def __init__(self, event_ndims=2, validate_args=False,
name="cholesky_outer_product"):
"""Instantiates the `CholeskyOuterProduct` bijector.
Args:
event_ndims: `constant` `int32` scalar `Tensor` indicating the number of
dimensions associated with a particular draw from the distribution. Must
be 0 or 2.
validate_args: `Boolean` indicating whether arguments should be checked
for correctness.
name: `String` name given to ops managed by this object.
Raises:
ValueError: if event_ndims is neither 0 or 2.
"""
self._graph_parents = []
self._name = name
with self._name_scope("init", values=[event_ndims]):
event_ndims = ops.convert_to_tensor(event_ndims, name="event_ndims")
event_ndims = tensor_util.constant_value(event_ndims)
if event_ndims is None or event_ndims not in [0, 2]:
raise ValueError("`event_ndims` must be a TF constant which is 0 or 2")
self._static_event_ndims = event_ndims
super(CholeskyOuterProduct, self).__init__(
validate_args=validate_args,
name=name)
def _forward(self, x):
if self._static_event_ndims == 0:
return math_ops.square(x)
if self.validate_args:
is_matrix = check_ops.assert_rank_at_least(x, 2)
shape = array_ops.shape(x)
is_square = check_ops.assert_equal(shape[-2], shape[-1])
x = control_flow_ops.with_dependencies([is_matrix, is_square], x)
# For safety, explicitly zero-out the upper triangular part.
x = array_ops.matrix_band_part(x, -1, 0)
return math_ops.matmul(x, x, adjoint_b=True)
def _inverse_and_inverse_log_det_jacobian(self, y):
x = (math_ops.sqrt(y) if self._static_event_ndims == 0
else linalg_ops.cholesky(y))
return x, -self._forward_log_det_jacobian(x)
def _forward_log_det_jacobian(self, x):
# Let Y be a symmetric, positive definite matrix and write:
# Y = X X.T
# where X is lower-triangular.
#
# Observe that,
# dY[i,j]/dX[a,b]
# = d/dX[a,b] { X[i,:] X[j,:] }
# = sum_{d=1}^p { I[i=a] I[d=b] X[j,d] + I[j=a] I[d=b] X[i,d] }
#
# To compute the Jacobian dX/dY we must represent X,Y as vectors. Since Y is
# symmetric and X is lower-triangular, we need vectors of dimension:
# d = p (p + 1) / 2
# where X, Y are p x p matrices, p > 0. We use a row-major mapping, i.e.,
# k = { i (i + 1) / 2 + j i>=j
# { undef i<j
# and assume zero-based indexes. When k is undef, the element is dropped.
# Example:
# j k
# 0 1 2 3 /
# 0 [ 0 . . . ]
# i 1 [ 1 2 . . ]
# 2 [ 3 4 5 . ]
# 3 [ 6 7 8 9 ]
# Write vec[.] to indicate transforming a matrix to vector via k(i,j). (With
# slight abuse: k(i,j)=undef means the element is dropped.)
#
# We now show d vec[Y] / d vec[X] is lower triangular. Assuming both are
# defined, observe that k(i,j) < k(a,b) iff (1) i<a or (2) i=a and j<b.
# In both cases dvec[Y]/dvec[X]@[k(i,j),k(a,b)] = 0 since:
# (1) j<=i<a thus i,j!=a.
# (2) i=a>j thus i,j!=a.
#
# Since the Jacobian is lower-triangular, we need only compute the product
# of diagonal elements:
# d vec[Y] / d vec[X] @[k(i,j), k(i,j)]
# = X[j,j] + I[i=j] X[i,j]
# = 2 X[j,j].
# Since there is a 2 X[j,j] term for every lower-triangular element of X we
# conclude:
# |Jac(d vec[Y]/d vec[X])| = 2^p prod_{j=0}^{p-1} X[j,j]^{p-j}.
if self._static_event_ndims == 0:
if self.validate_args:
is_positive = check_ops.assert_positive(
x, message="All elements must be positive.")
x = control_flow_ops.with_dependencies([is_positive], x)
return math.log(2.) + math_ops.log(x)
diag = array_ops.matrix_diag_part(x)
if self.validate_args:
is_matrix = check_ops.assert_rank_at_least(
x, 2, message="Input must be a (batch of) matrix.")
shape = array_ops.shape(x)
is_square = check_ops.assert_equal(
shape[-2], shape[-1],
message="Input must be a (batch of) square matrix.")
# Assuming lower-triangular means we only need check diag>0.
is_positive_definite = check_ops.assert_positive(
diag, message="Input must be positive definite.")
x = control_flow_ops.with_dependencies(
[is_matrix, is_square, is_positive_definite], x)
# Create a column vector equal to: [p, p-1, ..., 2, 1].T.
if x.get_shape().ndims is None or x.get_shape()[-1].value is None:
p = array_ops.shape(x)[-1]
else:
p = x.get_shape()[-1].value
exponents = array_ops.expand_dims(
math_ops.linspace(math_ops.cast(p, dtype=x.dtype), 1., p),
dim=1)
sum_weighted_log_diag = array_ops.squeeze(
math_ops.matmul(math_ops.log(diag), exponents), squeeze_dims=-1)
fldj = p * math.log(2.) + sum_weighted_log_diag
if x.get_shape().ndims is not None:
fldj.set_shape(x.get_shape()[:-2])
return fldj
| apache-2.0 | -2,422,548,313,249,080,300 | 36.574434 | 145 | 0.632617 | false | 3.590414 | false | false | false |
byronrau/tweepyScripts | twitterUserTimeline/twitterUserTimeline.py | 1 | 3599 | #!/usr/bin/python
import tweepy
import sys
import os
import codecs
import unicodecsv as csv
# API and ACCESS KEYS
API_KEY = 'jz3feMK2gN0kaN377FsTXY7uY'
API_SECRET = 'sGfCEayfwORloC9SvHy6BmDjifUsUEIF0EF51SgiYUgs054n7H'
# Don't buffer stdout, so we can tail the log output redirected to a file
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
# Max Tweets
maxTweets = int(sys.argv[1])
# Filename
fName = sys.argv[2]
tweetsPerQry = 200
# List of users read from users.txt
users = []
#open users.txt file and gets the list of users
with open('users.txt', 'r') as f:
for line in f:
users.append(line.strip())
sinceId = None
if(len(sys.argv) > 3):
if(sys.argv[3] != '-1'):
sinceId = sys.argv[3]
last_id = -1L
if(len(sys.argv) > 4):
last_id = long(sys.argv[4])
def getHashtags(hashes):
hashStr = ''
for i, h in enumerate(hashes):
if i == len(hashes)-1:
hashStr = hashStr + h['text']
else:
hashStr = hashStr + h['text'] + ','
return hashStr
def getMentions(mentions):
mentionStr = ''
for i, m in enumerate(mentions):
if i == len(mentions)-1:
mentionStr = mentionStr + m['screen_name']
else:
mentionStr = mentionStr + m['screen_name'] + ','
return mentionStr
auth = tweepy.AppAuthHandler(API_KEY, API_SECRET)
api = tweepy.API(auth, wait_on_rate_limit=True, wait_on_rate_limit_notify=True)
if (not api):
print ("Can't Authenticate Bye!")
sys.exit(-1)
tweetCount = 0
print("Downloading max {0} tweets".format(maxTweets))
with open(fName, 'ab') as f:
writer = csv.writer(f, encoding='utf-8')
writer.writerow(['Username','Created_at','ID','Tweet','RetweetCount','Name','Location','URL','Description','TweetCount','FollowersCount','FriendsCount','hashtags','mentions'])
for user in users:
tweetCount = 0
last_id = 0
while tweetCount < maxTweets:
print 'User is ' + user + ' Tweet count ' + str(tweetCount) + ' max Tweets ' + str(maxTweets) + ' SinceId ' + str(sinceId) + ' last_id ' + str(last_id)
try:
if (last_id <= 0):
if (not sinceId):
new_tweets = api.user_timeline(screen_name=user, count=tweetsPerQry)
else:
new_tweets = api.user_timeline(screen_name=user, count=tweetsPerQry, since_id=sinceId)
else:
if (not sinceId):
new_tweets = api.user_timeline(screen_name=user, count=tweetsPerQry, max_id=str(last_id - 1))
else:
new_tweets = api.user_timeline(screen_name=user, count=tweetsPerQry, max_id=str(last_id - 1), since_id=sinceId)
if not new_tweets:
print("No more tweets found")
break
for tweet in new_tweets:
try:
hashTags = getHashtags(tweet.entities.get('hashtags'))
mentions = getMentions(tweet.entities.get('user_mentions'))
writer.writerow([tweet.user.screen_name,tweet.created_at,tweet.id_str,tweet.text,str(tweet.retweet_count),tweet.user.name, tweet.user.location, str(tweet.user.url),tweet.user.description,str(tweet.user.statuses_count),str(tweet.user.followers_count),str(tweet.user.friends_count),hashTags,mentions])
except tweepy.TweepError as e:
print("some error : " + str(e) + " for user: " + user)
break
tweetCount += len(new_tweets)
print("Downloaded {0} tweets".format(tweetCount))
last_id = new_tweets[-1].id
except tweepy.TweepError as e:
# Just exit if any error
print("some error : " + str(e))
break
print ("Downloaded {0} tweets, Saved to {1}".format(tweetCount, fName)) | mit | 5,696,373,822,382,111,000 | 30.578947 | 311 | 0.639066 | false | 3.039696 | false | false | false |
djfroofy/beatlounge | tutor/song5.py | 1 | 2172 | from itertools import cycle
from bl.ugen import W
from bl.arp import ChordPatternArp, OrderedArp, RandomArp, ArpMap
from bl.scheduler import clock
from bl.orchestra.midi import ChordPlayer
from tutor.complib import piano_f
pattern = [3, 3, [3, 1], 1, 2, 1, 2, 1, [3, 2, 1, 0, 4], 0, 1, 2, 3, 4, 3, 2,
[3, 2], 0, 0, [0, 1, 2], 2, 1, 2, 0, [0, 1, 2, 3], 3, 2, 1, 0,
[5, 4, 1], 5, 4, 3, 4, 2, 1, 5, 0, [5, 0]]
notes = cycle([[38, 50, 62, 65, 69, 80],
[38, 50, 62, 65, 69, 84],
[38, 50, 62, 65, 67, 84],
[38, 50, 62, 65, 69, 84],
[36, 50, 62, 65, 69, 84],
[36, 55, 62, 65, 69, 84],
[36, 55, 62, 67, 69, 84],
[36, 55, 60, 67, 69, 84],
[36, 53, 55, 67, 69, 84],
[36, 53, 55, 67, 69, 81],
[36, 53, 55, 65, 69, 81],
[36, 53, 55, 65, 67, 81],
[38, 53, 55, 65, 67, 81],
[38, 53, 55, 67, 69, 81],
[38, 53, 55, 67, 69, 74],
[38, 53, 55, 65, 67, 74],
[36, 53, 55, 65, 67, 74],
[36, 55, 57, 65, 67, 74],
[36, 55, 57, 60, 67, 74],
[36, 55, 57, 60, 64, 74],
[36, 55, 57, 60, 64, 80],
[36, 55, 57, 60, 64, 81],
[36, 55, 57, 60, 64, 84],
[36, 55, 57, 60, 63, 84],
[36, 55, 57, 60, 64, 84],
[36, 55, 57, 60, 69, 84],
[36, 55, 57, 60, 69, 81],
[36, 55, 57, 60, 69, 78],
[36, 53, 55, 60, 69, 78],
[36, 53, 55, 62, 69, 78]])
piano = piano_f()
piano.controlChange(reverb=120, sustain=100, chorus=50, vibrato=15)
r = W((0, 5), (12, 2), (-12, 3))
f = lambda chord: [r() + n for n in chord]
arp = ArpMap(f, ChordPatternArp(notes.next(), pattern))
player = ChordPlayer(piano, arp,
velocity=OrderedArp([127, 80, 90, 80, 90, 120, 120, 80]),
release=RandomArp([11, 10, 9, 8]))
resetter = clock.schedule(lambda: arp.reset(notes.next())
).startAfter((2, 1), (2, 1))
player.resumePlaying()
| mit | 760,528,155,715,385,200 | 36.448276 | 78 | 0.422652 | false | 2.721805 | false | false | false |
MathGen/oppgavegenerator | oppgavegen/views/game_views.py | 1 | 8226 | from django.contrib.auth.decorators import login_required
from django.shortcuts import render_to_response, redirect
from django.http import HttpResponseForbidden
from django.template import RequestContext
from oppgavegen.generation_folder.generation import generate_level
from oppgavegen.view_logic.rating import *
from oppgavegen.view_logic.progress import *
from oppgavegen.models import Set, Chapter, Level
from oppgavegen.forms import QuestionForm
from oppgavegen.view_logic.submit_and_answer import *
@login_required
def game(request, set_id):
context = RequestContext(request)
set = Set.objects.get(pk=set_id)
set_title = set.name
if request.user in set.users.all():
goto = render_to_response('game/screen.html', {'set_id': set_id, 'set_title': set_title}, context)
else:
goto = render_to_response('game/set_notmember.html', {'set': set}, context)
return goto
def chapters(request, set_id):
game_set = Set.objects.get(pk=set_id)
set_title = game_set.name
is_requirement = game_set.is_requirement
is_password_protected = game_set.password_protected
set_chapters = game_set.chapters.all()
context = RequestContext(request)
medals = [] # Both lists get updated in chapter_progress
completed = []
if is_requirement:
# In case we want to do something special if the set is a requirement type set
progress_number = chapter_progress(request.user, game_set, medals, completed)
else:
progress_number = chapter_progress(request.user, game_set, medals, completed)
order = game_set.order
set_chapters_ordered = []
for x in order.split(','):
for chapter in set_chapters:
if chapter.pk == int(x):
set_chapters_ordered.append(chapter)
break
if request.is_ajax():
response = render_to_response('game/chapters.html',
{'chapters': set_chapters_ordered, 'medals': json.dumps(medals),
'completed': json.dumps(completed), 'progress_number': progress_number,
'set_id': set_id, 'is_requirement': is_requirement,
'is_password_protected': is_password_protected}, context)
else:
response = render_to_response('game/chapters_noajax.html',
{'chapters': set_chapters_ordered, 'medals': json.dumps(medals),
'completed': json.dumps(completed), 'progress_number': progress_number,
'set_id': set_id, "set_title": set_title, "is_requirement": is_requirement,
'is_password_protected': is_password_protected}, context)
return response
def levels(request, chapter_id):
game_chapter = Chapter.objects.get(pk=chapter_id)
in_requirement_set = game_chapter.in_requirement_set
chapter_levels = game_chapter.levels.all()
chapter_title = game_chapter.name
context = RequestContext(request)
if in_requirement_set:
progress_number = len(chapter_levels)
else:
progress_number = calculate_progress(request.user, game_chapter)
star_per_level = get_stars_per_level(request.user, game_chapter)
order = game_chapter.order
chapter_levels_ordered = []
for x in order.split(','):
for chapter in chapter_levels:
if chapter.pk == int(x):
chapter_levels_ordered.append(chapter)
break
if request.is_ajax():
return render_to_response('game/levels.html',
{'levels': chapter_levels_ordered, 'chapter_title': chapter_title,
'progress_number': progress_number, 'spl': star_per_level, 'chapter_id': chapter_id,
'in_requirement_set':in_requirement_set},
context)
else:
return render_to_response('game/levels_noajax.html',
{'levels': chapter_levels_ordered, 'chapter_title': chapter_title,
'progress_number': progress_number, 'spl': star_per_level, 'chapter_id': chapter_id,
'in_requirement_set':in_requirement_set},
context)
@login_required
def get_template(request):
"""Gets a template for a given level"""
context = RequestContext(request)
#if request.method == 'POST':
context_dict = {'message': 'Noe har gått feil.'}
form = request.POST
if int(form.get('level_id')) == None:
return redirect('/')
level_id = int(form.get('level_id'))
chapter_id = int(form.get('chapter_id'))
set_id = int(form.get('set_id'))
set = Set.objects.get(pk=set_id)
#if check_for_level_skip(request.user, Chapter.objects.get(pk=chapter_id), level_id):
# return render_to_response('game/template.html', context_dict, context)
context['set_title'] = set.name
context['set_id'] = set_id
context['chapter_id'] = chapter_id
context['chapter_title'] = Chapter.objects.get(pk=chapter_id).name
context['level_title'] = Level.objects.get(pk=level_id).name
context['level_id'] = level_id
context_dict = generate_level(request.user, level_id)
context_dict['rating'] = get_user_rating(request.user)
level = Level.objects.get(pk=level_id)
context_dict['stars'] = get_user_stars_for_level(request.user, level)
context_dict['ulp'] = get_user_rating_for_level(request.user, level)
if request.is_ajax():
return render_to_response('game/template.html', context_dict, context)
else:
return render_to_response('game/template_noajax.html', context_dict, context)
def get_solution(request, level=1):
"""Returns a render of answers.html"""
context = RequestContext(request)
cheat_message = '\\text{Ulovlig tegn har blitt brukt i svar}'
required_message = '\\text{Svaret ditt har ikke utfylt alle krav}'
render_to = 'game/answer.html'
if request.method == 'POST':
form = QuestionForm(request.POST)
if form.is_valid():
form_values = form.process()
template = Template.objects.get(pk=form_values['primary_key'])
user_answer = form_values['user_answer']
try:
disallowed = json.loads(template.disallowed)
except ValueError:
disallowed = []
try:
required = json.loads(template.required)
except ValueError:
required = []
context_dict = make_answer_context_dict(form_values)
if (cheat_check(user_answer, disallowed, form_values['variable_dictionary'].split('§'))) and\
(form_values['template_type'] == 'normal') and (context_dict['user_won']):
context_dict['answer'] = cheat_message
return render_to_response(render_to, context_dict, context)
elif (required_check(user_answer, required, form_values['variable_dictionary'].split('§'))) and \
(form_values['template_type'] == 'normal') and (context_dict['user_won']):
context_dict['answer'] = required_message
return render_to_response(render_to, context_dict, context)
if request.is_ajax():
new_user_rating, new_star = change_level_rating(template, request.user, context_dict['user_won'],
form_values['template_type'], level)
context_dict['chapter_id'] = request.POST['chapter_id']
context_dict['ulp'] = int(new_user_rating)
context_dict['new_star'] = new_star
context_dict['stars'] = get_user_stars_for_level(request.user, Level.objects.get(pk=level))
return render_to_response(render_to, context_dict, context)
else:
change_elo(template, request.user, context_dict['user_won'], form_values['template_type'])
render_to_response(render_to, context_dict, context)
else:
print(form.errors)
| bsd-3-clause | -671,454,598,317,702,500 | 44.181319 | 119 | 0.60501 | false | 3.899004 | false | false | false |
mtommasi/pygiftparser | setup.py | 1 | 1462 | #!/usr/bin/python3
# -*- coding: utf-8 -*-
import os
from setuptools import setup
try:
long_description = open("README.rst").read()
except IOError:
long_description = ""
LOCALEDIR = os.path.join('share', 'locale')
setup(
name="pygiftparser",
version="1.1",
url="https://github.com/mtommasi/pygiftparser",
description="GIFT parser in python that parses a Gift source code and loads data in a Question/Answer model for further use in an application",
license="MIT",
author="Marc Tommasi - UdL/INRIA",
author_email="[email protected]",
py_modules=['pygiftparser.parser',
'pygiftparser.i18n',
'pygiftparser.answer',
'pygiftparser.question',
'pygiftparser.utils'],
install_requires=['yattag', 'markdown', 'MarkdownSuperscript'],
long_description=long_description,
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 3.4",
"Topic :: Text Processing"
],
data_files=[(os.path.join('share',
'locale',
lang,
'LC_MESSAGES'),
[os.path.join('share',
'locale',
lang,
'LC_MESSAGES',
'pygiftparser.mo')])
for lang in os.listdir(LOCALEDIR)]
)
| mit | -5,657,474,728,144,140,000 | 33 | 147 | 0.53078 | false | 4.11831 | false | false | false |
agartland/utils | custom_legends.py | 1 | 2291 | import matplotlib.pyplot as plt
import numpy as np
__all__ = ['colorLegend',
'symbolLegend']
def colorLegend(colors, labels, alphas=None, edgecolor='black',loc='best', axh=None, **legendKwargs):
"""Custom matplotlib legend with colors and labels etc.
Useful in cases where it is awkward to include labels on the appropriate plot() objects.
Parameters specify the characteristics of each line in the legend.
Parameters
----------
colors : list of valid matplotlib colors
labels : list of strings
alphas : list of alpha values
edgecolor : single valid matplotlib color
All remaining kwargs are passed to legend()
"""
if axh is None:
axh = plt.gca()
if alphas is None:
alphas = np.ones(len(colors))
circles = (plt.Circle((0, 0), fc=c, ec=edgecolor, alpha=a) for c, a in zip(colors, alphas))
lh = axh.legend(circles,
labels,
loc=loc,
**legendKwargs)
return lh
def symbolLegend(symbols, labels, facecolors=None, edgecolors=None, alphas=None,loc='best', **legendKwargs):
"""Custom matplotlib legend with lines, symbols and labels etc.
Useful in cases where it is awkward to include labels on the appropriate plot() objects.
Parameters specify the characteristics of each line in the legend.
Parameters
----------
symbols : list of valid matplotlib symbols
E.g. 'xs^*.<>' or other matplotlib.markers
labels : list of strings
facecolors : list of valid matplotlib colors
edgecolors : list of valid matplotlib colors
alphas : list of alpha values
All remaining kwargs are passed to legend()
"""
if alphas is None:
alphas = np.ones(len(symbols))
if edgecolors is None:
edgecolors = ['black'] * len(symbols)
if facecolors is None:
facecolors = ['white'] * len(symbols)
lh = plt.legend((plt.Line2D([0], [0], ls = '', marker = s, markerfacecolor = mfc, markeredgecolor = ec, alpha = a) for s, mfc, ec, a in zip(symbols, facecolors, edgecolors, alphas)),
labels,
loc,
numpoints=1,
**legendKwargs)
return lh
| mit | 6,611,612,067,164,619,000 | 34.951613 | 186 | 0.61196 | false | 4.1883 | false | false | false |
radez/packstack | packstack/installer/utils/shell.py | 1 | 4512 | # -*- coding: utf-8 -*-
import re
import types
import logging
import subprocess
from ..exceptions import (ExecuteRuntimeError, ScriptRuntimeError,
NetworkError)
from .strings import mask_string
block_fmt = ("\n============= %(title)s ==========\n%(content)s\n"
"======== END OF %(title)s ========")
def execute(cmd, workdir=None, can_fail=True, mask_list=None,
use_shell=False, log=True):
"""
Runs shell command cmd. If can_fail is set to False
ExecuteRuntimeError is raised if command returned non-zero return
code. Otherwise
"""
mask_list = mask_list or []
repl_list = [("'", "'\\''")]
if not isinstance(cmd, types.StringType):
import pipes
masked = ' '.join((pipes.quote(i) for i in cmd))
else:
masked = cmd
masked = mask_string(masked, mask_list, repl_list)
if log:
logging.info("Executing command:\n%s" % masked)
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, cwd=workdir,
shell=use_shell, close_fds=True)
out, err = proc.communicate()
masked_out = mask_string(out, mask_list, repl_list)
masked_err = mask_string(err, mask_list, repl_list)
if log:
logging.debug(block_fmt % {'title': 'STDOUT', 'content': masked_out})
if proc.returncode:
if log:
logging.debug(block_fmt % {'title': 'STDERR',
'content': masked_err})
if can_fail:
msg = 'Failed to execute command: %s' % masked_out
raise ExecuteRuntimeError(msg, stdout=out, stderr=err)
return proc.returncode, out
class ScriptRunner(object):
_pkg_search = 'rpm -q'
def __init__(self, ip=None):
self.script = []
self.ip = ip
def append(self, s):
self.script.append(s)
def clear(self):
self.script = []
def execute(self, can_fail=True, mask_list=None, log=True):
mask_list = mask_list or []
repl_list = [("'", "'\\''")]
script = "\n".join(self.script)
masked = mask_string(script, mask_list, repl_list)
if log:
logging.info("[%s] Executing script:\n%s" %
(self.ip or 'localhost', masked))
_PIPE = subprocess.PIPE # pylint: disable=E1101
if self.ip:
cmd = ["ssh", "-o", "StrictHostKeyChecking=no",
"-o", "UserKnownHostsFile=/dev/null",
"root@%s" % self.ip, "bash -x"]
else:
cmd = ["bash", "-x"]
obj = subprocess.Popen(cmd, stdin=_PIPE, stdout=_PIPE, stderr=_PIPE,
close_fds=True, shell=False)
script = "function t(){ exit $? ; } \n trap t ERR \n" + script
out, err = obj.communicate(script)
masked_out = mask_string(out, mask_list, repl_list)
masked_err = mask_string(err, mask_list, repl_list)
if log:
logging.debug(block_fmt % {'title': 'STDOUT',
'content': masked_out})
if obj.returncode:
if log:
logging.debug(block_fmt % {'title': 'STDERR',
'content': masked_err})
if can_fail:
pattern = (r'^ssh\:')
if re.search(pattern, err):
raise NetworkError(masked_err, stdout=out, stderr=err)
else:
msg = 'Failed to run remote script: %s' % masked_out
raise ScriptRuntimeError(msg, stdout=out, stderr=err)
return obj.returncode, out
def template(self, src, dst, varsdict):
with open(src) as fp:
content = fp.read() % varsdict
self.append("cat > %s <<- EOF\n%s\nEOF\n" % (dst, content))
def if_not_exists(self, path, command):
self.append("[ -e %s ] || %s" % (path, command))
def if_exists(self, path, command):
self.append("[ -e %s ] && %s" % (path, command))
def if_installed(self, pkg, command):
self.append("%s %s && %s" % (self._pkg_search, pkg, command))
def if_not_installed(self, pkg, command):
self.append("%s %s || %s" % (self._pkg_search, pkg, command))
def chown(self, target, uid, gid):
self.append("chown %s:%s %s" % (uid, gid, target))
def chmod(self, target, mode):
self.append("chmod %s %s" % (mode, target))
| apache-2.0 | -8,818,339,688,155,416,000 | 33.707692 | 77 | 0.526596 | false | 3.750623 | false | false | false |
hivesolutions/netius | src/netius/clients/mjpg.py | 1 | 4936 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Hive Netius System
# Copyright (c) 2008-2020 Hive Solutions Lda.
#
# This file is part of Hive Netius System.
#
# Hive Netius System is free software: you can redistribute it and/or modify
# it under the terms of the Apache License as published by the Apache
# Foundation, either version 2.0 of the License, or (at your option) any
# later version.
#
# Hive Netius System is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# Apache License for more details.
#
# You should have received a copy of the Apache License along with
# Hive Netius System. If not, see <http://www.apache.org/licenses/>.
__author__ = "João Magalhães <[email protected]>"
""" The author(s) of the module """
__version__ = "1.0.0"
""" The version of the module """
__revision__ = "$LastChangedRevision$"
""" The revision number of the module """
__date__ = "$LastChangedDate$"
""" The last change date of the module """
__copyright__ = "Copyright (c) 2008-2020 Hive Solutions Lda."
""" The copyright for the module """
__license__ = "Apache License, Version 2.0"
""" The license for the module """
import os
import netius
from . import http
class MJPGProtocol(http.HTTPProtocol):
MAGIC_JPEG = b"\xff\xd8\xff\xe0"
""" The magic signature for the JPEG infra-structure, this
sequence of bytes is going to be used to detect new frames
coming from the HTTP based stream """
EOI_JPEG = b"\xff\xd9"
""" The sequence of bytes that indicate the end of the current
image, when these bytes are detected on the stream the message
should be "flushed" to the current output (emit) """
def __init__(self, *args, **kwargs):
http.HTTPProtocol.__init__(self, *args, **kwargs)
self.buffer_l = []
def add_buffer(self, data):
self.buffer_l.append(data)
def get_buffer(self, delete = True):
if not self.buffer_l: return b""
buffer = b"".join(self.buffer_l)
if delete: del self.buffer_l[:]
return buffer
def on_partial(self, data):
http.HTTPProtocol.on_partial(self, data)
# retrieves the reference to the top class that is going to
# be used for the correct parsing of the image
cls = self.__class__
# tries to find the end of image (EOI) indicator in the current
# received data, and in case it's not found add the (partial)
# data to the current buffer, to be latter processed
eoi_index = data.find(cls.EOI_JPEG)
if eoi_index == -1: self.buffer_l.append(data); return
# calculates the size of the end of image (EOI) token so that
# this value will be used for the calculus of the image data
eoi_size = len(cls.EOI_JPEG)
# adds the partial valid data of the current chunk to the buffer
# and then joins the current buffer as the frame data, removing
# the multipart header from it (to become a valid image)
self.buffer_l.append(data[:eoi_index + eoi_size])
frame = b"".join(self.buffer_l)
multipart_index = frame.find(b"\r\n\r\n")
frame = frame[multipart_index + 4:]
# clears the current buffer and adds the remaining part of the
# current chunk, that may be already part of a new image
del self.buffer_l[:]
self.buffer_l.append(data[eoi_index + eoi_size:])
# calls the proper event handler for the new frame data that has
# just been received, triggering the processing of the frame
self.on_frame_mjpg(frame)
def on_frame_mjpg(self, data):
self.trigger("frame", self, data)
class MJPGClient(http.HTTPClient):
protocol = MJPGProtocol
if __name__ == "__main__":
index = 0
limit = 30
def on_frame(protocol, data):
global index
index += 1
if index >= limit: return protocol.close()
base_path = netius.conf("IMAGES_PATH", "images")
base_path = os.path.abspath(base_path)
base_path = os.path.normpath(base_path)
if not os.path.exists(base_path): os.makedirs(base_path)
path = os.path.join(base_path, "%08d.jpg" % index)
file = open(path, "wb")
try: file.write(data)
finally: file.close()
print("Saved frame %08d of %d bytes" % (index, len(data)))
def on_finish(protocol):
netius.compat_loop(loop).stop()
url = netius.conf("MJPG_URL", "http://euglena.stanford.edu:20005/?action=stream")
client = MJPGClient()
loop, protocol = client.get(url)
protocol.bind("frame", on_frame)
protocol.bind("finish", on_finish)
loop.run_forever()
loop.close()
else:
__path__ = []
| apache-2.0 | 5,063,929,398,906,030,000 | 32.263889 | 85 | 0.625659 | false | 3.612006 | false | false | false |
gameduell/pysupplies | tests/test_params.py | 1 | 3109 | import pytest
from supplies.annotate import delay
from supplies.params import param, Params
__author__ = 'dwae'
class Foo(Params):
@param
def bar(self, val: (1, 42)=23):
return val
@delay
def bla(self):
return ...
class Bar(Foo, Params):
@param
def baz(self, val: str='f00'):
return val
def test_basic():
foo = Foo()
assert foo.bar == 23
foo.bar = 1
assert foo.bar == 1
foo.bar = 42
assert foo.bar == 42
foo = Foo(bar=13)
assert foo.bar == 13
foo.bar = 37
assert foo.bar == 37
bar = Bar()
assert bar.bar == 23
assert bar.baz == 'f00'
bar = Bar(baz='')
assert bar.bar == 23
assert bar.baz == ''
bar = Bar(bar=6)
assert bar.bar == 6
assert bar.baz == 'f00'
bar = Bar(bar=12, baz='foo')
assert bar.bar == 12
assert bar.baz == 'foo'
bar.bar = 2
bar.baz = 'to'
assert bar.bar == 2
assert bar.baz == 'to'
with pytest.raises(TypeError):
Bar(bar=1, nil=None)
def test_export():
bar = Bar(bar=42, baz='foo')
params = bar.params
assert {'bar', 'baz'} == params.names
assert params.bar.name == 'bar'
assert params['baz'].name == 'baz'
assert params['bar'].value == 42
assert params.baz.value == 'foo'
assert params.bar.default == 23
assert params.baz.default == 'f00'
assert 'bar=42' in str(bar)
assert "baz='foo'" in repr(bar)
assert bar.bla is ...
with pytest.raises(KeyError):
params['bla']
with pytest.raises(AttributeError):
params.bla
class Convert(Params):
@param
def a(self, val=1):
return int(val)
@param
def b(self, val=''):
return str(val)
def test_convert():
conv = Convert()
assert conv.a == 1
assert conv.b == ''
conv = Convert(a='13', b=37)
assert conv.a == 13
assert conv.b == '37'
conv.a = '42'
assert conv.a == 42
conv.b = None
assert conv.b == str(None)
class Dependent(Params):
@param
def a(self, val=1):
return val
@param
def b(self, val=None):
if val is None:
return self.a + 1
else:
return val
@param
def c(self, val):
return self.a + val
@param
def d(self, val=3):
return self.a + val
def test_depend():
dep = Dependent()
assert dep.a == 1
assert dep.b == 2
dep.a = 2
assert dep.a == 2
assert dep.b == 3
dep.a = 1
assert dep.a == 1
assert dep.b == 2
dep.b = 4
dep.a = 5
assert dep.a == 5
assert dep.b == 4
dep.c = 3
assert dep.c == 8
dep.a = 3
dep.b = 2
assert dep.c == 6
assert dep.b == 2
del dep.b
assert dep.b == 4
del dep.a
assert dep.b == 2
del dep.c
with pytest.raises(TypeError):
dep.c
assert dep.d == 4
dep.a = 3
assert dep.d == 6
del dep.a
assert dep.d == 4
dep.d = 4
assert dep.d == 5
dep.a = 4
assert dep.d == 8
del dep.d
assert dep.d == 7
del dep.a
assert dep.d == 4
| mit | -5,483,111,285,384,543,000 | 15.537234 | 41 | 0.525893 | false | 3.248694 | true | false | false |
ddico/odoo | addons/fleet/models/fleet_vehicle_cost.py | 1 | 9422 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models, _
from odoo.exceptions import UserError
from dateutil.relativedelta import relativedelta
class FleetVehicleLogContract(models.Model):
_inherit = ['mail.thread', 'mail.activity.mixin']
_name = 'fleet.vehicle.log.contract'
_description = 'Contract information on a vehicle'
_order = 'state desc,expiration_date'
def compute_next_year_date(self, strdate):
oneyear = relativedelta(years=1)
start_date = fields.Date.from_string(strdate)
return fields.Date.to_string(start_date + oneyear)
vehicle_id = fields.Many2one('fleet.vehicle', 'Vehicle', default=1, required=True, help='Vehicle concerned by this log')
cost_subtype_id = fields.Many2one('fleet.service.type', 'Type', help='Cost type purchased with this cost', domain=[('category', '=', 'contract')])
amount = fields.Float('Cost')
date = fields.Date(help='Date when the cost has been executed')
company_id = fields.Many2one('res.company', 'Company', default=lambda self: self.env.company)
currency_id = fields.Many2one('res.currency', related='company_id.currency_id')
name = fields.Char(string='Name', compute='_compute_contract_name', store=True)
active = fields.Boolean(default=True)
user_id = fields.Many2one('res.users', 'Responsible', default=lambda self: self.env.user, index=True)
start_date = fields.Date('Contract Start Date', default=fields.Date.context_today,
help='Date when the coverage of the contract begins')
expiration_date = fields.Date('Contract Expiration Date', default=lambda self:
self.compute_next_year_date(fields.Date.context_today(self)),
help='Date when the coverage of the contract expirates (by default, one year after begin date)')
days_left = fields.Integer(compute='_compute_days_left', string='Warning Date')
insurer_id = fields.Many2one('res.partner', 'Vendor')
purchaser_id = fields.Many2one(related='vehicle_id.driver_id', string='Driver')
ins_ref = fields.Char('Reference', size=64, copy=False)
state = fields.Selection([
('futur', 'Incoming'),
('open', 'In Progress'),
('expired', 'Expired'),
('closed', 'Closed')
], 'Status', default='open', readonly=True,
help='Choose whether the contract is still valid or not',
tracking=True,
copy=False)
notes = fields.Text('Terms and Conditions', help='Write here all supplementary information relative to this contract', copy=False)
cost_generated = fields.Float('Recurring Cost')
cost_frequency = fields.Selection([
('no', 'No'),
('daily', 'Daily'),
('weekly', 'Weekly'),
('monthly', 'Monthly'),
('yearly', 'Yearly')
], 'Recurring Cost Frequency', default='monthly', help='Frequency of the recuring cost', required=True)
service_ids = fields.Many2many('fleet.service.type', string="Included Services")
@api.depends('vehicle_id', 'cost_subtype_id')
def _compute_contract_name(self):
for record in self:
name = record.vehicle_id.name
if record.cost_subtype_id.name:
name = record.cost_subtype_id.name + ' ' + name
record.name = name
@api.depends('expiration_date', 'state')
def _compute_days_left(self):
"""return a dict with as value for each contract an integer
if contract is in an open state and is overdue, return 0
if contract is in a closed state, return -1
otherwise return the number of days before the contract expires
"""
for record in self:
if record.expiration_date and record.state in ['open', 'expired']:
today = fields.Date.from_string(fields.Date.today())
renew_date = fields.Date.from_string(record.expiration_date)
diff_time = (renew_date - today).days
record.days_left = diff_time > 0 and diff_time or 0
else:
record.days_left = -1
def write(self, vals):
res = super(FleetVehicleLogContract, self).write(vals)
if vals.get('expiration_date') or vals.get('user_id'):
self.activity_reschedule(['fleet.mail_act_fleet_contract_to_renew'], date_deadline=vals.get('expiration_date'), new_user_id=vals.get('user_id'))
return res
def contract_close(self):
for record in self:
record.state = 'closed'
def contract_draft(self):
for record in self:
record.state = 'futur'
def contract_open(self):
for record in self:
record.state = 'open'
@api.model
def scheduler_manage_contract_expiration(self):
# This method is called by a cron task
# It manages the state of a contract, possibly by posting a message on the vehicle concerned and updating its status
params = self.env['ir.config_parameter'].sudo()
delay_alert_contract = int(params.get_param('hr_fleet.delay_alert_contract', default=30))
date_today = fields.Date.from_string(fields.Date.today())
outdated_days = fields.Date.to_string(date_today + relativedelta(days=+delay_alert_contract))
nearly_expired_contracts = self.search([('state', '=', 'open'), ('expiration_date', '<', outdated_days)])
for contract in nearly_expired_contracts.filtered(lambda contract: contract.user_id):
contract.activity_schedule(
'fleet.mail_act_fleet_contract_to_renew', contract.expiration_date,
user_id=contract.user_id.id)
expired_contracts = self.search([('state', 'not in', ['expired', 'closed']), ('expiration_date', '<',fields.Date.today() )])
expired_contracts.write({'state': 'expired'})
futur_contracts = self.search([('state', 'not in', ['futur', 'closed']), ('start_date', '>', fields.Date.today())])
futur_contracts.write({'state': 'futur'})
now_running_contracts = self.search([('state', '=', 'futur'), ('start_date', '<=', fields.Date.today())])
now_running_contracts.write({'state': 'open'})
def run_scheduler(self):
self.scheduler_manage_contract_expiration()
class FleetVehicleLogServices(models.Model):
_name = 'fleet.vehicle.log.services'
_inherit = ['mail.thread', 'mail.activity.mixin']
_rec_name = 'service_type_id'
_description = 'Services for vehicles'
@api.model
def default_get(self, default_fields):
res = super(FleetVehicleLogServices, self).default_get(default_fields)
service = self.env.ref('fleet.type_service_service_8', raise_if_not_found=False)
res.update({
'date': fields.Date.context_today(self),
'service_type_id': service.id if service else None,
})
return res
active = fields.Boolean(default=True)
vehicle_id = fields.Many2one('fleet.vehicle', 'Vehicle', default=1, required=True, help='Vehicle concerned by this log')
amount = fields.Float('Cost')
description = fields.Char('Description')
odometer_id = fields.Many2one('fleet.vehicle.odometer', 'Odometer', help='Odometer measure of the vehicle at the moment of this log')
odometer = fields.Float(compute="_get_odometer", inverse='_set_odometer', string='Odometer Value',
help='Odometer measure of the vehicle at the moment of this log')
odometer_unit = fields.Selection(related='vehicle_id.odometer_unit', string="Unit", readonly=True)
date = fields.Date(help='Date when the cost has been executed')
company_id = fields.Many2one('res.company', 'Company', default=lambda self: self.env.company)
currency_id = fields.Many2one('res.currency', related='company_id.currency_id')
purchaser_id = fields.Many2one(related='vehicle_id.driver_id', string="Driver")
inv_ref = fields.Char('Vendor Reference')
vendor_id = fields.Many2one('res.partner', 'Vendor')
notes = fields.Text()
service_type_id = fields.Many2one('fleet.service.type', 'Service Type', required=True)
state = fields.Selection([
('todo', 'To Do'),
('running', 'Running'),
('done', 'Done'),
('cancelled', 'Cancelled'),
], default='todo', string='Stage')
def _get_odometer(self):
self.odometer = 0
for record in self:
if record.odometer_id:
record.odometer = record.odometer_id.value
def _set_odometer(self):
for record in self:
if not record.odometer:
raise UserError(_('Emptying the odometer value of a vehicle is not allowed.'))
odometer = self.env['fleet.vehicle.odometer'].create({
'value': record.odometer,
'date': record.date or fields.Date.context_today(record),
'vehicle_id': record.vehicle_id.id
})
self.odometer_id = odometer
@api.model_create_multi
def create(self, vals_list):
for data in vals_list:
if 'odometer' in data and not data['odometer']:
# if received value for odometer is 0, then remove it from the
# data as it would result to the creation of a
# odometer log with 0, which is to be avoided
del data['odometer']
return super(FleetVehicleLogServices, self).create(vals_list) | agpl-3.0 | 1,315,773,969,023,667,500 | 48.335079 | 156 | 0.641159 | false | 3.737406 | false | false | false |
bird-house/bird-feeder | birdfeeder/walker.py | 1 | 5030 | import os
from netCDF4 import Dataset as NCDataset
from dateutil import parser as dateparser
from datetime import datetime
from birdfeeder.utils import humanize_filesize
import logging
logger = logging.getLogger(__name__)
SPATIAL_VARIABLES = [
'longitude', 'lon',
'latitude', 'lat',
'altitude', 'alt', 'level', 'height',
'rotated_pole',
'rotated_latitude_longitude',
'time']
class Dataset(object):
def __init__(self, filepath, basedir='/'):
self.filepath = filepath
self.path = os.path.sep + os.path.relpath(filepath, basedir)
self.bytes = os.path.getsize(filepath)
self.size = humanize_filesize(self.bytes)
self.name = os.path.basename(filepath)
self.url = 'file://' + filepath
self.content_type = 'application/netcdf'
self.resourcename = filepath
self._last_modified = None
self.attributes = {}
self._parse(filepath)
def __str__(self):
return "attributes={0}".format(self.attributes)
@property
def last_modified(self):
if self._last_modified is None:
mtime = os.path.getmtime(self.filepath)
self._last_modified = datetime.fromtimestamp(mtime).strftime('%Y-%m-%dT%H:%M:%SZ')
return self._last_modified
@property
def variable(self):
return self.attributes.get('variable')
@property
def variable_long_name(self):
return self.attributes.get('variable_long_name')
@property
def cf_standard_name(self):
return self.attributes.get('cf_standard_name')
@property
def units(self):
return self.attributes.get('units')
@property
def comments(self):
return self.attributes.get('comments')
@property
def institute(self):
return self.attributes.get('institute_id')
@property
def experiment(self):
return self.attributes.get('experiment_id')
@property
def project(self):
return self.attributes.get('project_id')
@property
def model(self):
return self.attributes.get('model_id')
@property
def frequency(self):
return self.attributes.get('frequency')
@property
def creation_date(self):
if 'creation_date' in self.attributes:
return self.attributes['creation_date'][0]
else:
return None
def _add_attribute(self, key, value):
if not key in self.attributes:
self.attributes[key] = []
self.attributes[key].append(value)
def _parse(self, filepath):
filepath = os.path.abspath(filepath)
logger.debug("parse %s", filepath)
try:
ds = NCDataset(filepath, 'r')
# loop over global attributes
for attname in ds.ncattrs():
attvalue = getattr(ds, attname)
if 'date' in attname.lower():
# must format dates in Solr format, if possible
try:
solr_dt = dateparser.parse(attvalue)
self._add_attribute(attname, solr_dt.strftime('%Y-%m-%dT%H:%M:%SZ') )
except:
pass # disregard this attribute
else:
self._add_attribute(attname, attvalue)
# loop over dimensions
for key, dim in ds.dimensions.items():
self._add_attribute('dimension', "%s:%s" % (key, len(dim)) )
# loop over variable attributes
for key, variable in ds.variables.items():
if key.lower() in ds.dimensions:
# skip dimension variables
continue
if '_bnds' in key.lower():
continue
if key.lower() in SPATIAL_VARIABLES:
continue
self._add_attribute('variable', key)
self._add_attribute('variable_long_name', getattr(variable, 'long_name', None) )
cf_standard_name = getattr(variable, 'standard_name', None)
if cf_standard_name is not None:
self._add_attribute('cf_standard_name', getattr(variable, 'standard_name', None) )
self._add_attribute('units', getattr(variable, 'units', None) )
except Exception as e:
logging.error(e)
finally:
try:
ds.close()
except:
pass
def crawl(start_dir):
if not os.path.isdir(start_dir):
raise Exception("Invalid start directory: %s", start_dir)
logger.info('start directory = %s', start_dir)
for directory, subdirs, files in os.walk(start_dir):
# loop over files in this directory
for filename in files:
# only parse .nc files
if filename.endswith('.nc'):
filepath = os.path.join(directory, filename)
yield Dataset(filepath, basedir=start_dir)
| apache-2.0 | -8,682,199,717,071,598,000 | 28.763314 | 102 | 0.56501 | false | 4.273577 | false | false | false |
dvcolgan/ludumdare27 | game/management/commands/generate_map_pngs.py | 1 | 2676 | from django.core.management.base import BaseCommand, CommandError
from game.models import *
from settings import MIN_COL, MAX_COL, MIN_ROW, MAX_ROW, GRID_SIZE
from PIL import Image
from PIL import ImageDraw
def hex_to_rgb(value):
value = value.lstrip('#')
lv = len(value)
if lv == 1:
v = int(value, 16)*17
return v, v, v
if lv == 3:
return tuple(int(value[i:i+1], 16)*17 for i in range(0, 3))
return tuple(int(value[i:i+lv/3], 16) for i in range(0, lv, lv/3))
class Command(BaseCommand):
args = ''
help = 'Generate pngs of the board for zooming'
def handle(self, *args, **options):
squares = Square.objects.order_by('row', 'col')
width = (20 + MAX_COL - MIN_COL) * GRID_SIZE
height = (20 + MAX_ROW - MIN_ROW) * GRID_SIZE
im = Image.new('RGB', (width, height), 'black')
#http://effbot.org/imagingbook/imagedraw.htm
draw = ImageDraw.Draw(im)
for square in squares:
if square.owner != None:
fill_color = square.owner.color
else:
fill_color = '#ffffff'
x1 = square.col*GRID_SIZE+width/2
y1 = square.row*GRID_SIZE+height/2
x2 = square.col*GRID_SIZE+GRID_SIZE+width/2
y2 = square.row*GRID_SIZE+GRID_SIZE+height/2
draw.rectangle(((x1, y1), (x2, y2)), fill=fill_color)
for i, unit in enumerate(square.units.all()):
if i == 0:
ax1 = x1 + GRID_SIZE/4 - GRID_SIZE/8
ay1 = y1 + GRID_SIZE/4 - GRID_SIZE/8
ax2 = x1 + GRID_SIZE/4 + GRID_SIZE/8
ay2 = y1 + GRID_SIZE/4 + GRID_SIZE/8
if i == 1:
ax1 = x1 + 3*GRID_SIZE/4 - GRID_SIZE/8
ay1 = y1 + 3*GRID_SIZE/4 - GRID_SIZE/8
ax2 = x1 + GRID_SIZE/4 + GRID_SIZE/8
ay2 = y1 + GRID_SIZE/4 + GRID_SIZE/8
if i == 2:
ax1 = x1 + 3*GRID_SIZE/4 - GRID_SIZE/8
ay1 = y1 + 3*GRID_SIZE/4 - GRID_SIZE/8
ax2 = x1 + 3*GRID_SIZE/4 + GRID_SIZE/8
ay2 = y1 + 3*GRID_SIZE/4 + GRID_SIZE/8
if i == 3:
ax1 = x1 + GRID_SIZE/4 - GRID_SIZE/8
ay1 = y1 + GRID_SIZE/4 - GRID_SIZE/8
ax2 = x1 + 3*GRID_SIZE/4 + GRID_SIZE/8
ay2 = y1 + 3*GRID_SIZE/4 + GRID_SIZE/8
draw.ellipse(((ax1, ay1, ax2, ay2)), outline='#000000', fill=unit.owner.color)
im.save('static/images/minimap.png', 'PNG')
print 'Saved full image'
| mit | -4,220,150,537,329,599,500 | 36.690141 | 94 | 0.501495 | false | 3.111628 | false | false | false |
petr-kalinin/progrobot | tools/import_python3.py | 1 | 9519 | #!/usr/bin/python3
from pymongo import MongoClient
import os
import os.path
import re
import bs4
import itertools
from bs4 import BeautifulSoup
import utils
class ReferenceItem:
def __init__(self):
self.name = ""
self.module = ""
self.usage = ""
self.short = ""
self.full = ""
self.fullest = ""
self.href = ""
self.copyright = ""
self.subitems = []
def __str__(self):
return ("name: " + self.name + "\n"
+ "href: " + self.href + "\n"
+ "module: " + str(self.module) + "\n"
+ "usage: " + str(self.usage) + "\n"
+ "short: " + self.short + "\n\n"
#+ "full: " + self.full + "\n\n"
#+ "fullest: " + self.fullest + "\n\n"
+ "subitems: " + str(self.subitems)
+ "copyright: " + self.copyright)
def to_dict(self):
return {"name" : self.name,
"href": self.href,
"module" : self.module,
"usage" : self.usage,
"short" : self.short,
"full" : self.full,
"fullest" : self.fullest,
"subitems" : self.subitems,
"copyright": self.copyright}
def hasclass(tag, classes):
for cl in tag.get("class", []):
if cl in classes:
return True
return False
def create_ref(refs, name, module, base_href):
if not name in refs:
refs[name] = ReferenceItem()
refs[name].name = name
if module:
refs[name].module = "import " + module
refs[name].href = base_href + "#" + name
refs[name].copyright = "ⓒ Python developers, " + refs[name].href
parent = ".".join(name.split(".")[:-1])
if parent != "" and parent[0] == "@":
parent = parent[1:]
if not parent in refs:
refs[parent] = ReferenceItem()
subitem = (name, "")
if not subitem in refs[parent].subitems:
refs[parent].subitems.append(subitem)
def can_be_short(text):
#print("Testing string `" + text + "`")
if re.match("New in version", text):
return False
if re.match("Source code:", text):
return False
return True
def next_tag(tag):
while not tag.next_sibling and tag.parent:
tag = tag.parent
if tag.next_sibling:
return tag.next_sibling
else:
return None
def parse_file(filename, refs):
base_href = "https://docs.python.org/" + filename[2:]
soup = BeautifulSoup(open(filename), 'lxml')
module_a = soup.h1.a
if not "headerlink" in module_a.get("class"):
module = module_a.string
else:
module = None
#print("found module", module)
currentName = module
if currentName:
create_ref(refs, currentName, module, base_href)
tag = soup.h1.next_sibling
while tag is not None:
#print("Tag: `", tag, "`")
if isinstance(tag, bs4.element.Comment):
tag = tag.next_element
continue
if isinstance(tag, bs4.element.NavigableString):
text = tag.strip()
if text != "" and currentName:
if refs[currentName].short == "":
if can_be_short(text):
refs[currentName].short = text
refs[currentName].full += text
tag = tag.next_element
continue
#if currentName:
# print(currentName, tag.name, "`"+refs[currentName].full+"`", "\n\n")
if hasclass(tag, ["sphinxsidebar"]):
break
elif hasclass(tag, ["section", "seealso"]):
currentName = None
tag = tag.next_element
elif hasclass(tag, ['class', 'classmethod', 'method', 'function', 'data', 'exception', 'attribute', 'staticmethod', 'cmdoption']):
currentName = tag.dt.get('id')
usage = "".join(tag.dt.strings).strip()
if currentName and usage[0] == "@":
currentName = "@" + currentName
if currentName:
create_ref(refs, currentName, module, base_href)
refs[currentName].usage = usage[:-1].strip()
tag = tag.dd.next_element
elif tag.name in ('p', 'pre', 'code', 'li', 'dt', 'dd', 'tr', 'td', 'th'):
if (tag.name == 'p'
and len(tag.contents) == 1
and isinstance(tag.contents[0], bs4.element.Tag)
and tag.contents[0].name=="strong"):
currentName = None
if currentName:
if refs[currentName].short == "":
text = "".join(tag.strings)
if can_be_short(text):
refs[currentName].short = "".join(str(x) for x in tag.contents)
refs[currentName].full += str(tag)
tag = next_tag(tag)
if not tag:
break
else:
tag = tag.next_element
return refs
def insert_ref(ref, reference, index):
result = reference.insert_one(ref.to_dict())
#print("insert: ", ref.to_dict())
names = [ref.name]
for name in names:
split_name = name.strip('@ ').split(".")
if len(split_name) > 3:
print(split_name," --- ", ref.name)
for i in range(len(split_name)):
perm = [x.lower() for x in split_name[i:]]
subname = " ".join(sorted(perm))
doc = {
"reference_id" : result.inserted_id,
"name" : subname,
"relevance" : 1-i/len(split_name),
"full_name" : ref.name
}
#print("index: ", doc)
index.insert_one(doc)
def process_file(filename, refs):
print("\n-----------\n" + filename)
print(".", end="", flush=True)
parse_file(filename, refs)
def finalize(refs):
for ref_name, ref in refs.items():
if ref.name == "":
ref.name = ref_name
new_subitems = []
for item in ref.subitems:
new_subitems.append((item[0], utils.first_sentence(refs[item[0]].short)))
ref.subitems = new_subitems
os.chdir("../raw_data/python3/docs.python.org")
client = MongoClient()
client.drop_database("python3")
db = client.python3
reference = db.reference
index = db.index
index.create_index("name")
refs = {}
for directory, subdirs, files in os.walk("."):
for f in files:
process_file(os.path.join(directory, f), refs)
#process_file("3/library/itertools.html", refs)
#process_file("3/library/re.html", refs)
#process_file("3/library/json.html", refs)
#process_file("3/library/pprint.html", refs)
#process_file("3/library/unittest.html", refs)
#process_file("3/library/ctypes.html", refs)
finalize(refs)
#print(refs['datetime.datetime'].subitems)
for ref in refs.values():
if ref.name != "":
#print(ref)
#print("\n")
insert_ref(ref, reference, index)
#------- Testing
def assert_starts_with(text, start):
if not text.startswith(start):
print("Text `" + text + "` does not start with `" + start + "`")
raise AssertionError()
def assert_ends_with(text, start):
if not text.endswith(start):
print("Text `" + text + "` does not end with `" + start + "`")
raise AssertionError()
def find_subitem(ref, subitem):
found = None
for item in ref.subitems:
if item[0] == subitem:
assert not found
found = item
return found
def check_urllib_parse():
assert_starts_with(refs["urllib.parse"].short, "This module")
item = find_subitem(refs["urllib"], "urllib.parse")
assert_starts_with(item[1], "This module")
assert_ends_with(item[1], "“base URL.”")
def check_unittest_mock():
assert_starts_with(refs["unittest.mock"].short, '<a class="reference internal"')
item = find_subitem(refs["unittest"], "unittest.mock")
assert_starts_with(item[1], '<a class="reference internal"')
def check_urllib():
assert_ends_with(refs["urllib"].full, "files</li>")
def check_re():
assert len(refs["re"].subitems) > 0
assert "re.match" in refs
assert refs["re"].subitems[0][0] == "re.compile"
assert_ends_with(refs["re"].subitems[0][1], "described below.")
assert len(refs["re"].subitems[0][1].strip()) > 0
def check_unittest():
assert_ends_with(refs["unittest"].full, "executing the tests.</dd>")
def check_unittest_skip():
assert "@unittest.skip" in refs
assert find_subitem(refs["unittest"], "@unittest.skip")
def check_utcnow():
assert "datetime.datetime.utcnow" in refs
assert find_subitem(refs["datetime.datetime"], "datetime.datetime.utcnow")
def check_pprint():
assert "pprint.pprint" in refs
assert_ends_with(refs["pprint.pprint"].full, "</pre>")
def check_itertools():
assert_ends_with(refs['itertools'].full, 'vector2))</span></code>.</p>')
def check_ctypes():
assert "ctypes.Array._length_" in refs
assert find_subitem(refs["ctypes.Array"], "ctypes.Array._length_")
def check_paragraph_signs():
found = False
for ref in refs:
if "¶" in refs[ref].full:
print("¶ found in ", ref)
found = True
assert not found
check_paragraph_signs()
check_ctypes()
check_itertools()
check_re()
check_pprint()
check_utcnow()
check_urllib_parse()
check_unittest_mock()
check_urllib()
check_unittest()
check_unittest_skip()
| agpl-3.0 | 1,616,040,906,343,496,200 | 30.916107 | 138 | 0.551887 | false | 3.639878 | true | false | false |
vitobasso/audio-ml | src/train_raw.py | 1 | 2372 | __author__ = 'victor'
from pybrain.datasets import SupervisedDataSet
from pybrain.supervised.trainers import RPropMinusTrainer
from pybrain import FeedForwardNetwork, FullConnection, IdentityConnection, TanhLayer
from datasource import *
# dataset
timeWidth = 5140 # num of samples to input to the net
mixer = MixedStream('piano', 'acapella', timeWidth)
# training
batchsize = 100
epochs = 1000
def build_net(width):
net = FeedForwardNetwork()
# layers
net.addInputModule(TanhLayer(width, name='in'))
net.addOutputModule(TanhLayer(width, name='out'))
net.addModule(TanhLayer(100, name='h1'))
net.addModule(TanhLayer(50, name='h2'))
net.addModule(TanhLayer(100, name='h3'))
# connections
net.addConnection(FullConnection(net['in'], net['h1']))
net.addConnection(FullConnection(net['h1'], net['h2']))
# net.addConnection(FullConnection(net['h1'], net['h3']))
# net.addConnection(FullConnection(net['h1'], net['out']))
net.addConnection(FullConnection(net['h2'], net['h3']))
# net.addConnection(FullConnection(net['h2'], net['out']))
net.addConnection(FullConnection(net['h3'], net['out']))
net.addConnection(IdentityConnection(net['in'], net['out']))
net.sortModules()
return net
def train(mix, target):
print 'preparing to train, netwidth=%d, batchsize=%d, epochs=%d' % (timeWidth, batchsize, epochs)
net = build_net(timeWidth)
trainer = RPropMinusTrainer(net, batchlearning=True, learningrate=0.1, lrdecay=1, momentum=0.03, weightdecay=0.01)
def train_batch(i):
batch = SupervisedDataSet(timeWidth, timeWidth)
begin = i * batchsize
end = begin + batchsize
for i in np.arange(begin, end):
batch.addSample(mix[i], target[i])
trainer.setData(batch)
err = trainer.train()
return err
print 'training...'
plot_cont(train_batch, epochs)
# print 'saving net...'
# err = trainer.train() # train an extra time just to get the final error
# savenet(trainer.module, partlen, err)
return net
def test(net, mix):
print 'testing...'
result = np.empty(timeWidth)
for i in np.arange(500):
netout = net.activate(mix[i])
result = np.append(result, netout, axis=0)
wavwrite(result, outputfile='output.wav')
net = train(mixer, mixer.stream1)
test(net, mixer) | gpl-2.0 | -8,458,553,163,728,513,000 | 28.296296 | 118 | 0.670742 | false | 3.308229 | false | false | false |
olivierverdier/sfepy | sfepy/postprocess/dataset_manager.py | 1 | 9486 | """
Code to help with managing a TVTK data set in Pythonic ways.
"""
# Author: Prabhu Ramachandran <[email protected]>
# Copyright (c) 2008, Enthought, Inc.
# License: BSD Style.
from enthought.traits.api import (HasTraits, Instance, Array, Str,
Property, Dict)
from enthought.tvtk.api import tvtk
from enthought.tvtk.array_handler import array2vtk
######################################################################
# Utility functions.
######################################################################
def get_array_type(arr):
"""Returns if the array is a scalar ('scalars'), vector
('vectors') or tensor ('tensors'). It looks at the number of
components to decide. If it has a wierd number of components it
returns the empty string.
"""
n = arr.number_of_components
ret = {1: 'scalars', 3: 'vectors', 4: 'scalars', 9:'tensors'}
return ret.get(n) or ''
def get_attribute_list(data):
""" Gets scalar, vector and tensor information from the given data
(either cell or point data).
"""
attr = {'scalars':[], 'vectors':[], 'tensors':[]}
if data is not None:
n = data.number_of_arrays
for i in range(n):
name = data.get_array_name(i)
t = get_array_type(data.get_array(i))
if len(t) > 0 and name is not None:
attr[t].extend([name])
def _mk_first(lst, value):
"""Makes the specified `value` the first item in `lst`."""
lst.remove(value)
lst.insert(0, value)
attr1 = attr.copy()
for a in attr:
v = getattr(data, a)
if v is not None:
name = v.name
if name is not None:
try:
_mk_first(attr[a], v.name)
except ValueError:
# Sometimes we have a multi-component scalar.
attr1[a].insert(0, name)
return attr1
def get_all_attributes(obj):
"""Gets the scalar, vector and tensor attributes that are
available in the given VTK data object.
"""
point_attr = get_attribute_list(obj.point_data)
cell_attr = get_attribute_list(obj.cell_data)
return point_attr, cell_attr
################################################################################
# `DatasetManager` class.
################################################################################
class DatasetManager(HasTraits):
# The TVTK dataset we manage.
dataset = Instance(tvtk.DataSet)
# Our output, this is the dataset modified by us with different
# active arrays.
output = Property(Instance(tvtk.DataSet))
# The point scalars for the dataset. You may manipulate the arrays
# in-place. However adding new keys in this dict will not set the
# data in the `dataset` for that you must explicitly call
# `add_array`.
point_scalars = Dict(Str, Array)
# Point vectors.
point_vectors = Dict(Str, Array)
# Point tensors.
point_tensors = Dict(Str, Array)
# The cell scalars for the dataset.
cell_scalars = Dict(Str, Array)
cell_vectors = Dict(Str, Array)
cell_tensors = Dict(Str, Array)
# This filter allows us to change the attributes of the data
# object and will ensure that the pipeline is properly taken care
# of. Directly setting the array in the VTK object will not do
# this.
_assign_attribute = Instance(tvtk.AssignAttribute, args=(),
allow_none=False)
######################################################################
# Public interface.
######################################################################
def add_array(self, array, name, category='point'):
"""
Add an array to the dataset to specified category ('point' or
'cell').
"""
assert len(array.shape) <= 2, "Only 2D arrays can be added."
data = getattr(self.dataset, '%s_data'%category)
if len(array.shape) == 2:
assert array.shape[1] in [1, 3, 4, 9], \
"Only Nxm arrays where (m in [1,3,4,9]) are supported"
va = tvtk.to_tvtk(array2vtk(array))
va.name = name
data.add_array(va)
mapping = {1:'scalars', 3: 'vectors', 4: 'scalars',
9: 'tensors'}
dict = getattr(self, '%s_%s'%(category,
mapping[array.shape[1]]))
dict[name] = array
else:
va = tvtk.to_tvtk(array2vtk(array))
va.name = name
data.add_array(va)
dict = getattr(self, '%s_scalars'%(category))
dict[name] = array
def remove_array(self, name, category='point'):
"""Remove an array by its name and optional category (point and
cell). Returns the removed array.
"""
type = self._find_array(name, category)
data = getattr(self.dataset, '%s_data'%category)
data.remove_array(name)
d = getattr(self, '%s_%s'%(category, type))
return d.pop(name)
def rename_array(self, name1, name2, category='point'):
"""Rename a particular array from `name1` to `name2`.
"""
type = self._find_array(name1, category)
data = getattr(self.dataset, '%s_data'%category)
arr = data.get_array(name1)
arr.name = name2
d = getattr(self, '%s_%s'%(category, type))
d[name2] = d.pop(name1)
def activate(self, name, category='point'):
"""Make the specified array the active one.
"""
type = self._find_array(name, category)
self._activate_data_array(type, category, name)
def update(self):
"""Update the dataset when the arrays are changed.
"""
self.dataset.modified()
self._assign_attribute.update()
######################################################################
# Non-public interface.
######################################################################
def _dataset_changed(self, value):
self._setup_data()
self._assign_attribute.input = value
def _get_output(self):
return self._assign_attribute.output
def _setup_data(self):
"""Updates the arrays from what is available in the input data.
"""
input = self.dataset
pnt_attr, cell_attr = get_all_attributes(input)
self._setup_data_arrays(cell_attr, 'cell')
self._setup_data_arrays(pnt_attr, 'point')
def _setup_data_arrays(self, attributes, d_type):
"""Given the dict of the attributes from the
`get_all_attributes` function and the data type (point/cell)
data this will setup the object and the data.
"""
attrs = ['scalars', 'vectors', 'tensors']
aa = self._assign_attribute
input = self.dataset
data = getattr(input, '%s_data'%d_type)
for attr in attrs:
values = attributes[attr]
# Get the arrays from VTK, create numpy arrays and setup our
# traits.
arrays = {}
for name in values:
va = data.get_array(name)
npa = va.to_array()
# Now test if changes to the numpy array are reflected
# in the VTK array, if they are we are set, else we
# have to set the VTK array back to the numpy array.
if len(npa.shape) > 1:
old = npa[0,0]
npa[0][0] = old - 1
if abs(va[0][0] - npa[0,0]) > 1e-8:
va.from_array(npa)
npa[0][0] = old
else:
old = npa[0]
npa[0] = old - 1
if abs(va[0] - npa[0]) > 1e-8:
va.from_array(npa)
npa[0] = old
arrays[name] = npa
setattr(self, '%s_%s'%(d_type, attr), arrays)
def _activate_data_array(self, data_type, category, name):
"""Activate (or deactivate) a particular array.
Given the nature of the data (scalars, vectors etc.) and the
type of data (cell or points) it activates the array given by
its name.
Parameters:
-----------
data_type: one of 'scalars', 'vectors', 'tensors'
category: one of 'cell', 'point'.
name: string of array name to activate.
"""
input = self.dataset
data = None
data = getattr(input, category + '_data')
method = getattr(data, 'set_active_%s'%data_type)
if len(name) == 0:
# If the value is empty then we deactivate that attribute.
method(None)
else:
aa = self._assign_attribute
method(name)
aa.assign(name, data_type.upper(), category.upper() +'_DATA')
aa.update()
def _find_array(self, name, category='point'):
"""Return information on which kind of attribute contains the
specified named array in a particular category."""
types = ['scalars', 'vectors', 'tensors']
for type in types:
attr = '%s_%s'%(category, type)
d = getattr(self, attr)
if name in d.keys():
return type
raise KeyError('No %s array named %s available in dataset'
%(category, name))
| bsd-3-clause | -8,936,205,743,073,525,000 | 35.767442 | 81 | 0.519713 | false | 4.127937 | false | false | false |
iproduct/course-social-robotics | 11-dnn-keras/venv/Lib/site-packages/matplotlib/sphinxext/mathmpl.py | 1 | 3759 | import hashlib
from pathlib import Path
from docutils import nodes
from docutils.parsers.rst import Directive, directives
import sphinx
import matplotlib as mpl
from matplotlib import cbook
from matplotlib.mathtext import MathTextParser
mathtext_parser = MathTextParser("Bitmap")
# Define LaTeX math node:
class latex_math(nodes.General, nodes.Element):
pass
def fontset_choice(arg):
return directives.choice(arg, MathTextParser._font_type_mapping)
def math_role(role, rawtext, text, lineno, inliner,
options={}, content=[]):
i = rawtext.find('`')
latex = rawtext[i+1:-1]
node = latex_math(rawtext)
node['latex'] = latex
node['fontset'] = options.get('fontset', 'cm')
return [node], []
math_role.options = {'fontset': fontset_choice}
class MathDirective(Directive):
has_content = True
required_arguments = 0
optional_arguments = 0
final_argument_whitespace = False
option_spec = {'fontset': fontset_choice}
def run(self):
latex = ''.join(self.content)
node = latex_math(self.block_text)
node['latex'] = latex
node['fontset'] = self.options.get('fontset', 'cm')
return [node]
# This uses mathtext to render the expression
def latex2png(latex, filename, fontset='cm'):
latex = "$%s$" % latex
with mpl.rc_context({'mathtext.fontset': fontset}):
if Path(filename).exists():
depth = mathtext_parser.get_depth(latex, dpi=100)
else:
try:
depth = mathtext_parser.to_png(filename, latex, dpi=100)
except Exception:
cbook._warn_external(
f"Could not render math expression {latex}")
depth = 0
return depth
# LaTeX to HTML translation stuff:
def latex2html(node, source):
inline = isinstance(node.parent, nodes.TextElement)
latex = node['latex']
fontset = node['fontset']
name = 'math-{}'.format(
hashlib.md5((latex + fontset).encode()).hexdigest()[-10:])
destdir = Path(setup.app.builder.outdir, '_images', 'mathmpl')
destdir.mkdir(parents=True, exist_ok=True)
dest = destdir / f'{name}.png'
depth = latex2png(latex, dest, fontset)
if inline:
cls = ''
else:
cls = 'class="center" '
if inline and depth != 0:
style = 'style="position: relative; bottom: -%dpx"' % (depth + 1)
else:
style = ''
return (f'<img src="{setup.app.builder.imgpath}/mathmpl/{name}.png"'
f' {cls}{style}/>')
def setup(app):
setup.app = app
# Add visit/depart methods to HTML-Translator:
def visit_latex_math_html(self, node):
source = self.document.attributes['source']
self.body.append(latex2html(node, source))
def depart_latex_math_html(self, node):
pass
# Add visit/depart methods to LaTeX-Translator:
def visit_latex_math_latex(self, node):
inline = isinstance(node.parent, nodes.TextElement)
if inline:
self.body.append('$%s$' % node['latex'])
else:
self.body.extend(['\\begin{equation}',
node['latex'],
'\\end{equation}'])
def depart_latex_math_latex(self, node):
pass
app.add_node(latex_math,
html=(visit_latex_math_html, depart_latex_math_html),
latex=(visit_latex_math_latex, depart_latex_math_latex))
app.add_role('mathmpl', math_role)
app.add_directive('mathmpl', MathDirective)
if sphinx.version_info < (1, 8):
app.add_role('math', math_role)
app.add_directive('math', MathDirective)
metadata = {'parallel_read_safe': True, 'parallel_write_safe': True}
return metadata
| gpl-2.0 | 8,906,122,994,853,540,000 | 28.833333 | 73 | 0.611865 | false | 3.721782 | false | false | false |
mitodl/bootcamp-ecommerce | profiles/utils_test.py | 1 | 2363 | """User utils tests"""
import pytest
from profiles.utils import ensure_active_user, is_duplicate_username_error, usernameify
@pytest.mark.parametrize(
"full_name,email,expected_username",
[
[" John Doe ", None, "john-doe"],
["Tabby Tabberson", None, "tabby-tabberson"],
["Àccèntèd Ñame, Ësq.", None, "àccèntèd-ñame-ësq"],
["-Dashy_St._Underscores-", None, "dashy-st-underscores"],
["Repeated-----Chars___Jr.", None, "repeated-chars-jr"],
["Numbers123 !$!@ McStrange!!##^", None, "numbers-mcstrange"],
["Кирил Френков", None, "кирил-френков"],
["年號", None, "年號"],
["abcdefghijklmnopqrstuvwxyz", None, "abcdefghijklmnopqrst"],
["ai bi cı dI eİ fI", None, "ai-bi-ci-di-ei-fi"],
["", "[email protected]", "someemail"],
],
)
def test_usernameify(mocker, full_name, email, expected_username):
"""usernameify should turn a user's name into a username, or use the email if necessary"""
# Change the username max length to 20 for test data simplicity's sake
temp_username_max_len = 20
mocker.patch("profiles.utils.USERNAME_MAX_LEN", temp_username_max_len)
patched_log_error = mocker.patch("profiles.utils.log.error")
assert usernameify(full_name, email=email) == expected_username
assert patched_log_error.called == bool(email and not full_name)
def test_usernameify_fail():
"""usernameify should raise an exception if the full name and email both fail to produce a username"""
with pytest.raises(ValueError):
assert usernameify("!!!", email="[email protected]")
@pytest.mark.parametrize(
"exception_text,expected_value",
[
["DETAILS: (username)=(ABCDEFG) already exists", True],
["DETAILS: (email)=(ABCDEFG) already exists", False],
],
)
def test_is_duplicate_username_error(exception_text, expected_value):
"""
is_duplicate_username_error should return True if the exception text provided indicates a duplicate username error
"""
assert is_duplicate_username_error(exception_text) is expected_value
def test_ensure_active_user(user):
"""
Test that ensure_active_user activates
"""
user.is_active = False
user.save()
assert not user.is_active
ensure_active_user(user)
assert user.is_active
| bsd-3-clause | -5,640,492,194,927,244,000 | 35.809524 | 118 | 0.660198 | false | 3.420354 | true | false | false |
akshayms/eho | eho/server/storage/models.py | 1 | 8401 | # Copyright (c) 2013 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from uuid import uuid4
from eho.server.storage.storage import DB
class NodeTemplate(DB.Model):
__tablename__ = 'NodeTemplate'
id = DB.Column(DB.String(36), primary_key=True)
name = DB.Column(DB.String(80), unique=True, nullable=False)
node_type_id = DB.Column(DB.String(36), DB.ForeignKey('NodeType.id'),
nullable=False)
tenant_id = DB.Column(DB.String(36), nullable=False) # is it needed?
flavor_id = DB.Column(DB.String(36), nullable=False)
node_template_configs = DB.relationship('NodeTemplateConfig',
cascade="all,delete",
backref='node_template')
cluster_node_counts = DB.relationship('ClusterNodeCount',
cascade="all,delete",
backref='node_template')
nodes = DB.relationship('Node', cascade="all,delete",
backref='node_template')
def __init__(self, name, node_type_id, tenant_id, flavor_id):
self.id = uuid4().hex
self.name = name
self.node_type_id = node_type_id
self.tenant_id = tenant_id
self.flavor_id = flavor_id
def __repr__(self):
return '<NodeTemplate %s / %s>' % (self.name, self.node_type_id)
class Cluster(DB.Model):
__tablename__ = 'Cluster'
id = DB.Column(DB.String(36), primary_key=True)
name = DB.Column(DB.String(80), unique=True, nullable=False)
base_image_id = DB.Column(DB.String(36), nullable=False)
status = DB.Column(DB.String(80))
tenant_id = DB.Column(DB.String(36), nullable=False)
nodes = DB.relationship('Node', cascade="all,delete", backref='cluster')
service_urls = DB.relationship('ServiceUrl', cascade="all,delete",
backref='cluster')
node_counts = DB.relationship('ClusterNodeCount', cascade="all,delete",
backref='cluster')
# node_templates: [(node_template_id, count), ...]
def __init__(self, name, base_image_id, tenant_id, status=None):
self.id = uuid4().hex
self.name = name
self.base_image_id = base_image_id
if not status:
status = 'Starting'
self.status = status
self.tenant_id = tenant_id
def __repr__(self):
return '<Cluster %s / %s>' % (self.name, self.status)
NODE_TYPE_NODE_PROCESS = DB.Table('NodeType_NodeProcess', DB.metadata,
DB.Column('node_type_id', DB.String(36),
DB.ForeignKey('NodeType.id')),
DB.Column('node_process_id', DB.String(36),
DB.ForeignKey('NodeProcess.id')))
class NodeType(DB.Model):
__tablename__ = 'NodeType'
id = DB.Column(DB.String(36), primary_key=True)
name = DB.Column(DB.String(80), unique=True, nullable=False)
processes = DB.relationship('NodeProcess',
cascade="all,delete",
secondary=NODE_TYPE_NODE_PROCESS,
backref='node_types')
node_templates = DB.relationship('NodeTemplate', cascade="all,delete",
backref='node_type')
def __init__(self, name):
self.id = uuid4().hex
self.name = name
def __repr__(self):
return '<NodeType %s>' % self.name
class NodeProcess(DB.Model):
__tablename__ = 'NodeProcess'
id = DB.Column(DB.String(36), primary_key=True)
name = DB.Column(DB.String(80), unique=True, nullable=False)
node_process_properties = DB.relationship('NodeProcessProperty',
cascade="all,delete",
backref='node_process')
def __init__(self, name):
self.id = uuid4().hex
self.name = name
def __repr__(self):
return '<NodeProcess %s>' % self.name
class NodeProcessProperty(DB.Model):
__tablename__ = 'NodeProcessProperty'
__table_args__ = (
DB.UniqueConstraint('node_process_id', 'name'),
)
id = DB.Column(DB.String(36), primary_key=True)
node_process_id = DB.Column(DB.String(36), DB.ForeignKey('NodeProcess.id'))
name = DB.Column(DB.String(80), nullable=False)
required = DB.Column(DB.Boolean, nullable=False)
default = DB.Column(DB.String(36))
node_template_configs = DB.relationship('NodeTemplateConfig',
cascade="all,delete",
backref='node_process_property')
def __init__(self, node_process_id, name, required=True, default=None):
self.id = uuid4().hex
self.node_process_id = node_process_id
self.name = name
self.required = required
self.default = default
def __repr__(self):
return '<NodeProcessProperty %s>' % self.name
class NodeTemplateConfig(DB.Model):
__tablename__ = 'NodeTemplateConfig'
__table_args__ = (
DB.UniqueConstraint('node_template_id', 'node_process_property_id'),
)
id = DB.Column(DB.String(36), primary_key=True)
node_template_id = DB.Column(
DB.String(36),
DB.ForeignKey('NodeTemplate.id'))
node_process_property_id = DB.Column(
DB.String(36),
DB.ForeignKey('NodeProcessProperty.id'))
value = DB.Column(DB.String(36))
def __init__(self, node_template_id, node_process_property_id, value):
self.id = uuid4().hex
self.node_template_id = node_template_id
self.node_process_property_id = node_process_property_id
self.value = value
def __repr__(self):
return '<NodeTemplateConfig %s.%s / %s>' \
% (self.node_template_id, self.node_process_property_id,
self.value)
class ClusterNodeCount(DB.Model):
__tablename__ = 'ClusterNodeCount'
__table_args__ = (
DB.UniqueConstraint('cluster_id', 'node_template_id'),
)
id = DB.Column(DB.String(36), primary_key=True)
cluster_id = DB.Column(DB.String(36), DB.ForeignKey('Cluster.id'))
node_template_id = DB.Column(DB.String(36),
DB.ForeignKey('NodeTemplate.id'))
count = DB.Column(DB.Integer, nullable=False)
def __init__(self, cluster_id, node_template_id, count):
self.id = uuid4().hex
self.cluster_id = cluster_id
self.node_template_id = node_template_id
self.count = count
def __repr__(self):
return '<ClusterNodeCount %s / %s>' \
% (self.node_template_id, self.count)
class Node(DB.Model):
__tablename__ = 'Node'
# do we need own id?
vm_id = DB.Column(DB.String(36), primary_key=True)
cluster_id = DB.Column(DB.String(36), DB.ForeignKey('Cluster.id'))
node_template_id = DB.Column(DB.String(36),
DB.ForeignKey('NodeTemplate.id'))
def __init__(self, vm_id, cluster_id, node_template_id):
self.vm_id = vm_id
self.cluster_id = cluster_id
self.node_template_id = node_template_id
def __repr__(self):
return '<Node based on %s>' % self.node_template.name
class ServiceUrl(DB.Model):
__tablename__ = 'ServiceUrl'
id = DB.Column(DB.String(36), primary_key=True)
cluster_id = DB.Column(DB.String(36), DB.ForeignKey('Cluster.id'))
name = DB.Column(DB.String(80))
url = DB.Column(DB.String(80), nullable=False)
def __init__(self, cluster_id, name, url):
self.id = uuid4().hex
self.cluster_id = cluster_id
self.name = name
self.url = url
def __repr__(self):
return '<ServiceUrl %s / %s>' % (self.name, self.url)
| apache-2.0 | 8,855,471,069,699,424,000 | 34.901709 | 79 | 0.578145 | false | 3.767265 | true | false | false |
VJftw/invoke-tools | invoke_tools/lxc/docker.py | 1 | 5987 | """
invoke_tools.lxc.docker
"""
from __future__ import print_function
import os
import json
import getpass
class Docker:
"""
Docker
"""
@staticmethod
def __print_line(line):
"""
"""
try:
line = line.decode('utf-8')
except:
print("Could not decode line")
return
try:
line = json.loads(line)
if "stream" in line:
line = line["stream"]
pass
elif "status" in line:
o = line["status"]
if "progress" in line:
o += "{0}".format(line["progress"])
if "id" in line:
o = "{0} {1}".format(line["id"], o)
if line["status"] == "Download complete" or "Pulling" in line["status"] or "Digest" in line["status"] or "Image" in line["status"] or "image" in line["status"]:
line = " {0}{1}\n".format(o, " " * 80)
else:
line = " {0}\r".format(o)
except:
pass
print(line, end="", flush=True)
@staticmethod
def pull(cli, image):
print("#")
print("# Pulling Docker image: {0}".format(image))
print("#")
for line in cli.pull(image, stream=True):
Docker.__print_line(line)
@staticmethod
def build(cli, dockerfile, tag):
print("#")
print("# Building Docker image from '{0}' with tag '{1}'".format(dockerfile, tag))
print("#")
for line in cli.build(
dockerfile=dockerfile,
pull=True,
path=".",
rm=True,
tag=tag):
Docker.__print_line(line)
print()
@staticmethod
def execute(cli, container_id, cmd):
print("#")
print("# Executing on {1}: {0}".format(cmd, container_id))
print("#")
exec_container = cli.exec_create(
container=container_id,
cmd=cmd
# user='root' if CI else 'app'
)
for line in cli.exec_start(
exec_id=exec_container.get('Id'),
stream=True):
Docker.__print_line(line)
print()
inspect = cli.exec_inspect(exec_container.get('Id'))
exit_code = inspect.get('ExitCode')
if exit_code != 0:
cli.stop(container_id)
cli.remove_container(container_id)
raise Exception("Exit Code: {0}\n{1}".format(exit_code, inspect))
@staticmethod
def clean(cli, objs):
print("#")
print("# Cleaning files & directories: {0}".format(objs))
print("#")
cli.pull("alpine:latest")
container = cli.create_container(
image='alpine:latest',
volumes=[
'{0}:/app'.format(os.getcwd())
],
working_dir='/app',
host_config=cli.create_host_config(binds=[
'{0}:/app'.format(os.getcwd())
]),
command='/bin/sh -c "rm -rf {0}"'.format(" ".join(objs))
)
response = cli.start(container=container.get('Id'))
cli.wait(container=container.get('Id'), timeout=600)
print(response)
cli.remove_container(container.get('Id'))
print()
@staticmethod
def push(cli, tags):
"""
"""
for tag in tags:
print("#")
print("# Pushing {0} to Registry".format(tag))
print("#")
for line in cli.push(tag, stream=True):
Docker.__print_line(line)
print()
@staticmethod
def login(cli):
"""
"""
if os.getenv('DOCKER_EMAIL') and os.getenv('DOCKER_USERNAME') and os.getenv('DOCKER_PASSWORD'):
email = os.getenv('DOCKER_EMAIL')
username = os.getenv('DOCKER_USERNAME')
password = os.getenv('DOCKER_PASSWORD')
else:
email = input('Docker email:')
username = input('Docker username:')
password = getpass.getpass('Docker password:')
cli.login(
username=username,
email=email,
password=password,
registry='https://index.docker.io/v1/'
)
print()
return cli, username
@staticmethod
def run(
cli,
tag,
command,
volumes=None,
working_dir="",
environment=None,
links=None,
detach=False,
privileged=False):
"""
"""
if environment is None:
environment = {}
if volumes is None:
volumes = []
print("#")
print("# Running on {1}: {0}".format(command, tag))
print("#")
params = dict()
params['image'] = tag
params['command'] = command
if len(volumes) > 0:
params['volumes'] = volumes
params['host_config'] = cli.create_host_config(binds=volumes, links=links, privileged=privileged)
if working_dir != "":
params['working_dir'] = working_dir
if environment:
params['environment'] = environment
if links:
params['host_config'] = cli.create_host_config(binds=volumes, links=links, privileged=privileged)
if privileged:
params['host_config'] = cli.create_host_config(binds=volumes, links=links, privileged=privileged)
container = cli.create_container(**params)
cli.start(container.get('Id'))
if detach:
return container
for line in cli.attach(container=container.get('Id'), stream=True, logs=True):
Docker.__print_line(line)
exit_code = cli.wait(container=container.get('Id'))
cli.remove_container(container.get('Id'))
if exit_code != 0:
raise Exception("Exit Code: {0}".format(exit_code))
| mit | 3,646,104,151,735,492,000 | 28.063107 | 176 | 0.496743 | false | 4.258179 | true | false | false |
msoula/cosmicbox | board/cosmicbox/cosmicboxfs/lib/psogen.py | 1 | 4151 | #!/usr/bin/python
# Modificated ShoutBox Library
# enables further modifications for the ShoutBox
# Run without to generate htmlfile
# Run the following to enter a new line from command line
# psogen.py input Anonymous default "Text"
import os, re
import messages, broadcast
datafilename = os.environ["SHOUTBOX_CHATFILE"]
htmlfilename = os.environ["SHOUTBOX_GEN_HTMLFILE"]
style=("name { font-weight: bold; font-family:Tahoma } "
"data { font-family: Tahoma } "
"data.def { color: #000000 } "
"data.blue { color: #0000FF } "
"data.green { color: #008000 } "
"data.orange { color: #FF8040 } "
"data.red { color: #FF0000 }")
try:
raw_dest = os.environ["SHOUTBOX_BROADCAST_DESTINATIONS"]
finished_dest = re.sub ( '#' , '"' , raw_dest )
broadcast_destination = eval ( finished_dest )
except KeyError:
broadcast_destination = False
#--------------
# Generates Shoutbox-HTML-Frame ...
# Imports:
# content - String containing preformatted data
#--------------
def generate_html(content):
htmlstring = "<html><head><meta http-equiv='cache-control' content='no-cache'><meta name='GENERATOR' content='PyShoutOut'><title>Shout-Out Data</title>"
htmlstring += "<style type='text/css'>" + style + "</style></head><body>"
htmlstring += content
htmlstring += "</body></html>"
return htmlstring
#--------------
# Generates HTML Data based on given content and write it to static html file
# Imports:
# content - String containing preformatted data
#--------------
def generate_html_into_file(content):
htmlstring = generate_html ( content )
htmlfile = open( htmlfilename , 'w' )
htmlfile.write( htmlstring )
htmlfile.close()
#--------------
# Generates HTML Data based on datafilename 's content
#--------------
def generate_html_from_file():
old = read_data_file()
generate_html_into_file( old )
#--------------
# Generates and Displays generated HTML
#--------------
def generate_html_to_display_from_file():
old = read_data_file()
htmlstring = generate_html ( old )
print htmlstring
#--------------
# Reads Data file from datafilename given name
#--------------
def read_data_file():
datafile = open(datafilename, 'r')
old = datafile.read()
datafile.close()
return old
#--------------
# Function for saving new Shoubox-Content & Regenerate static HTML file -- usually called by HTML-Form
#--------------
def process_form( name , indata , color ):
content = save_input( name , indata , color )
if broadcast_destination == False:
generate_html_into_file ( content )
#--------------
# Acutally Saves SB-Content to datafile
#--------------
def save_input( name , indata , color ):
content = prepare_line ( name, indata, color )
if broadcast_destination != False:
return writeToNetwork( content , broadcast_destination )
else:
return writeToDisk ( content )
def writeToNetwork ( content , broadcast_destination ):
message = messages.shoutbox_message()
message.set(content)
casting = broadcast.broadcast( )
casting.setDestination(broadcast_destination)
casting.set( message.get_message() )
casting.send()
return None
def writeToDisk ( content ):
old = read_data_file()
finalcontent = content + old
datafile = open(datafilename, 'r+')
datafile.write(finalcontent)
datafile.close()
return finalcontent
def prepare_line ( name, indata, color ):
datapass = re.sub("<", "<", indata)
data = re.sub(">", ">", datapass)
content = "<name>" + name + ":</name> <data class='" + color + "'>" + data + "</data><br>\n"
return content
#--------------
# Testing or Generating static HTML File
#--------------
if __name__ == "__main__":
import sys
if sys.argv.count("input") >= 1 :
save_input( sys.argv[2] , sys.argv[3] , sys.argv[4] )
generate_html_to_display_from_file()
print "Entered Text."
generate_html_from_file ()
print "Generated HTML-Shoutbox File."
| gpl-2.0 | -128,107,883,562,125,100 | 28.65 | 158 | 0.611419 | false | 3.593939 | false | false | false |
geosim/QAD | qad_joindisjoin_cmd.py | 1 | 25015 | # -*- coding: utf-8 -*-
"""
/***************************************************************************
QAD Quantum Aided Design plugin
comando JOIN e DISJOIN per aggregare e disgregare le geometrie
(multipoint, multilinestring, poligon e multipoligon)
-------------------
begin : 2016-04-06
copyright : iiiii
email : hhhhh
developers : bbbbb aaaaa ggggg
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from qgis.core import *
from qgis.gui import *
from qad_generic_cmd import QadCommandClass
from qad_snapper import *
from qad_getpoint import *
from qad_ssget_cmd import QadSSGetClass
from qad_msg import QadMsg
from qad_textwindow import *
import qad_utils
import qad_layer
from qad_variables import *
from qad_entsel_cmd import QadEntSelClass
# Classe che gestisce il comando JOIN
class QadJOINCommandClass(QadCommandClass):
def instantiateNewCmd(self):
""" istanzia un nuovo comando dello stesso tipo """
return QadJOINCommandClass(self.plugIn)
def getName(self):
return QadMsg.translate("Command_list", "JOIN")
def getEnglishName(self):
return "JOIN"
def connectQAction(self, action):
QObject.connect(action, SIGNAL("triggered()"), self.plugIn.runJOINCommand)
def getIcon(self):
return QIcon(":/plugins/qad/icons/join.png")
def getNote(self):
# impostare le note esplicative del comando
return QadMsg.translate("Command_JOIN", "Join existing geometries.")
def __init__(self, plugIn):
QadCommandClass.__init__(self, plugIn)
self.entity = QadEntity()
self.SSGetClass = None
self.entSelClass = None
def __del__(self):
QadCommandClass.__del__(self)
if self.SSGetClass is not None: del self.SSGetClass
def getPointMapTool(self, drawMode = QadGetPointDrawModeEnum.NONE):
if self.step == 1: # quando si é in fase di selezione entità
return self.entSelClass.getPointMapTool(drawMode)
elif self.step == 2: # quando si é in fase di selezione gruppo entità
return self.SSGetClass.getPointMapTool()
else:
return QadCommandClass.getPointMapTool(self, drawMode)
def getCurrentContextualMenu(self):
if self.step == 1: # quando si é in fase di selezione entità
return self.entSelClass.getCurrentContextualMenu()
elif self.step == 2: # quando si é in fase di selezione gruppo entità
return self.SSGetClass.getCurrentContextualMenu()()
else:
return self.contextualMenu
def reinitSSGetClass(self):
if self.SSGetClass is not None: del self.SSGetClass
self.SSGetClass = QadSSGetClass(self.plugIn)
self.SSGetClass.onlyEditableLayers = True
self.SSGetClass.checkDimLayers = False # scarto le quote
geometryType = self.entity.layer.geometryType()
if geometryType == QGis.Point:
self.SSGetClass.checkPointLayer = True
self.SSGetClass.checkLineLayer = False
self.SSGetClass.checkPolygonLayer = False
elif geometryType == QGis.Line:
self.SSGetClass.checkPointLayer = False
self.SSGetClass.checkLineLayer = True
self.SSGetClass.checkPolygonLayer = True
elif geometryType == QGis.Polygon:
self.SSGetClass.checkPointLayer = False
self.SSGetClass.checkLineLayer = True
self.SSGetClass.checkPolygonLayer = True
#============================================================================
# addEntitySetToPoint
#============================================================================
def addEntitySetToPoint(self, entitySet, removeOriginals = True):
"""
Aggiunge il set di entità al punto da modificare
"""
geom = self.entity.getGeometry()
layerList = []
layerList.append(self.entity.layer)
for layerEntitySet in entitySet.layerEntitySetList:
layer = layerEntitySet.layer
if layer.geometryType() != QGis.Point:
self.showMsg(QadMsg.translate("QAD", "Invalid object."))
return False
if removeOriginals: layerList.append(layer)
coordTransform = QgsCoordinateTransform(layer.crs(), self.entity.layer.crs())
for featureId in layerEntitySet.featureIds:
# se la feature è quella di entity è errore
if layer.id() == self.entity.layerId() and featureId == self.entity.featureId:
self.showMsg(QadMsg.translate("QAD", "Invalid object."))
return False
f = layerEntitySet.getFeature(featureId)
# trasformo la geometria nel crs del layer dell'entità da modificare
geomToAdd = f.geometry()
geomToAdd.transform(coordTransform)
simplifiedGeoms = qad_utils.asPointOrPolyline(geomToAdd)
for simplifiedGeom in simplifiedGeoms:
point = simplifiedGeom.asPoint()
# aggiungo una parte
if geom.addPart([point]) != 0: # 0 in case of success
self.showMsg(QadMsg.translate("QAD", "Invalid object."))
return False
f = self.entity.getFeature()
f.setGeometry(geom)
layerList = entitySet.getLayerList()
layerList.append(self.entity.layer)
self.plugIn.beginEditCommand("Feature edited", layerList)
# plugIn, layer, feature, refresh, check_validity
if qad_layer.updateFeatureToLayer(self.plugIn, self.entity.layer, f, False, False) == False:
self.plugIn.destroyEditCommand()
return False
if removeOriginals:
for layerEntitySet in entitySet.layerEntitySetList:
if qad_layer.deleteFeaturesToLayer(self.plugIn, layerEntitySet.layer, layerEntitySet.featureIds, False) == False:
self.plugIn.destroyEditCommand()
return
self.plugIn.endEditCommand()
return True
#============================================================================
# addEntitySetToPolyline
#============================================================================
def addEntitySetToPolyline(self, entitySet, removeOriginals = True):
"""
Aggiunge il set di entità alla polilinea da modificare
"""
geom = self.entity.getGeometry()
layerList = []
layerList.append(self.entity.layer)
for layerEntitySet in entitySet.layerEntitySetList:
layer = layerEntitySet.layer
if layer.geometryType() != QGis.Polygon and layer.geometryType() != QGis.Line:
self.showMsg(QadMsg.translate("QAD", "Invalid object."))
return False
if removeOriginals: layerList.append(layer)
coordTransform = QgsCoordinateTransform(layer.crs(), self.entity.layer.crs())
for featureId in layerEntitySet.featureIds:
# se la feature è quella di entity è errore
if layer.id() == self.entity.layerId() and featureId == self.entity.featureId:
self.showMsg(QadMsg.translate("QAD", "Invalid object."))
return False
f = layerEntitySet.getFeature(featureId)
# trasformo la geometria nel crs del layer dell'entità da modificare
geomToAdd = f.geometry()
geomToAdd.transform(coordTransform)
# Riduco la geometria in point o polyline
simplifiedGeoms = qad_utils.asPointOrPolyline(geomToAdd)
for simplifiedGeom in simplifiedGeoms:
points = simplifiedGeom.asPolyline() # vettore di punti
# aggiungo una parte
if geom.addPart(points) != 0: # 0 in case of success
self.showMsg(QadMsg.translate("QAD", "Invalid object."))
return False
f = self.entity.getFeature()
f.setGeometry(geom)
layerList = entitySet.getLayerList()
layerList.append(self.entity.layer)
self.plugIn.beginEditCommand("Feature edited", layerList)
# plugIn, layer, feature, refresh, check_validity
if qad_layer.updateFeatureToLayer(self.plugIn, self.entity.layer, f, False, False) == False:
self.plugIn.destroyEditCommand()
return False
if removeOriginals:
for layerEntitySet in entitySet.layerEntitySetList:
if qad_layer.deleteFeaturesToLayer(self.plugIn, layerEntitySet.layer, layerEntitySet.featureIds, False) == False:
self.plugIn.destroyEditCommand()
return
self.plugIn.endEditCommand()
return True
#============================================================================
# addEntitySetToPolygon
#============================================================================
def addEntitySetToPolygon(self, entitySet, removeOriginals = True):
"""
Aggiunge il set di entità al poligono da modificare
"""
geom = self.entity.getGeometry()
layerList = []
layerList.append(self.entity.layer)
for layerEntitySet in entitySet.layerEntitySetList:
layer = layerEntitySet.layer
if layer.geometryType() != QGis.Polygon and layer.geometryType() != QGis.Line:
self.showMsg(QadMsg.translate("QAD", "Invalid object."))
return False
if removeOriginals: layerList.append(layer)
coordTransform = QgsCoordinateTransform(layer.crs(), self.entity.layer.crs())
for featureId in layerEntitySet.featureIds:
# se la feature è quella di entity è errore
if layer.id() == self.entity.layerId() and featureId == self.entity.featureId:
self.showMsg(QadMsg.translate("QAD", "Invalid object."))
return False
f = layerEntitySet.getFeature(featureId)
# trasformo la geometria nel crs del layer del poligono da modificare
geomToAdd = f.geometry()
geomToAdd.transform(coordTransform)
# se il poligono è contenuto nella geometria da aggiungere
if geomToAdd.contains(geom):
# Riduco la geometria in point o polyline
simplifiedGeoms = qad_utils.asPointOrPolyline(geom)
# deve essere un poligono senza ring
if len(simplifiedGeoms) != 1 or simplifiedGeoms[0].wkbType() != QGis.WKBLineString:
self.showMsg(QadMsg.translate("QAD", "Invalid object."))
return False
points = simplifiedGeoms[0].asPolyline() # vettore di punti
# aggiungo un'isola
if geomToAdd.addRing(points) != 0: # 0 in case of success
self.showMsg(QadMsg.translate("QAD", "Invalid object."))
return False
del geom
geom = QgsGeometry.fromPolygon(geomToAdd.asPolygon())
else: # se il poligono non è contenuto nella geometria da aggiungere
# Riduco la geometria in point o polyline
simplifiedGeoms = qad_utils.asPointOrPolyline(geomToAdd)
for simplifiedGeom in simplifiedGeoms:
points = simplifiedGeom.asPolyline() # vettore di punti
# se la geometria da aggiungere è contenuta nel poligono
if geom.contains(QgsGeometry.fromPolyline(points)):
# aggiungo un'isola
if geom.addRing(points) != 0: # 0 in case of success
self.showMsg(QadMsg.translate("QAD", "Invalid object."))
return False
else:
# aggiungo una parte
if geom.addPart(points) != 0: # 0 in case of success
self.showMsg(QadMsg.translate("QAD", "Invalid object."))
return False
f = self.entity.getFeature()
f.setGeometry(geom)
layerList = entitySet.getLayerList()
layerList.append(self.entity.layer)
self.plugIn.beginEditCommand("Feature edited", layerList)
# plugIn, layer, feature, refresh, check_validity
if qad_layer.updateFeatureToLayer(self.plugIn, self.entity.layer, f, False, False) == False:
self.plugIn.destroyEditCommand()
return False
if removeOriginals:
for layerEntitySet in entitySet.layerEntitySetList:
if qad_layer.deleteFeaturesToLayer(self.plugIn, layerEntitySet.layer, layerEntitySet.featureIds, False) == False:
self.plugIn.destroyEditCommand()
return
self.plugIn.endEditCommand()
return True
#============================================================================
# waitForEntsel
#============================================================================
def waitForEntsel(self, msgMapTool, msg):
if self.entSelClass is not None:
del self.entSelClass
self.step = 1
self.entSelClass = QadEntSelClass(self.plugIn)
self.entSelClass.msg = QadMsg.translate("Command_JOIN", "Select object to join to: ")
# scarto la selezione di quote
self.entSelClass.checkDimLayers = False
self.entSelClass.onlyEditableLayers = True
self.entSelClass.deselectOnFinish = True
self.entSelClass.run(msgMapTool, msg)
#============================================================================
# waitForSSsel
#============================================================================
def waitForSSsel(self, msgMapTool, msg):
self.reinitSSGetClass()
self.step = 2
self.showMsg(QadMsg.translate("Command_JOIN", "\nSelect objects to join: "))
self.SSGetClass.run(msgMapTool, msg)
def run(self, msgMapTool = False, msg = None):
if self.plugIn.canvas.mapSettings().destinationCrs().geographicFlag():
self.showMsg(QadMsg.translate("QAD", "\nThe coordinate reference system of the project must be a projected coordinate system.\n"))
return True # fine comando
if self.step == 0:
self.waitForEntsel(msgMapTool, msg) # seleziona l'oggetto a cui aggregarsi
return False # continua
#=========================================================================
# RISPOSTA ALLA SELEZIONE ENTITA' DA MODIFICARE
elif self.step == 1:
if self.entSelClass.run(msgMapTool, msg) == True:
if self.entSelClass.entity.isInitialized():
self.entity.set(self.entSelClass.entity)
self.waitForSSsel(msgMapTool, msg)
else:
if self.entSelClass.canceledByUsr == True: # fine comando
return True
self.showMsg(QadMsg.translate("QAD", "No geometries in this position."))
self.waitForEntsel(msgMapTool, msg)
return False # continua
#=========================================================================
# RISPOSTA ALLA RICHIESTA DEL GRUPPO DI SELEZIONE (da step = 1)
elif self.step == 2:
if self.SSGetClass.run(msgMapTool, msg) == True:
if self.SSGetClass.entitySet.count() > 0:
geometryType = self.entity.layer.geometryType()
if geometryType == QGis.Point:
self.addEntitySetToPoint(self.SSGetClass.entitySet)
elif geometryType == QGis.Line:
self.addEntitySetToPolyline(self.SSGetClass.entitySet)
elif geometryType == QGis.Polygon:
self.addEntitySetToPolygon(self.SSGetClass.entitySet)
return True
self.waitForSSsel(msgMapTool, msg)
return False
# Classe che gestisce il comando DISJOIN
class QadDISJOINCommandClass(QadCommandClass):
def instantiateNewCmd(self):
""" istanzia un nuovo comando dello stesso tipo """
return QadDISJOINCommandClass(self.plugIn)
def getName(self):
return QadMsg.translate("Command_list", "DISJOIN")
def getEnglishName(self):
return "DISJOIN"
def connectQAction(self, action):
QObject.connect(action, SIGNAL("triggered()"), self.plugIn.runDISJOINCommand)
def getIcon(self):
return QIcon(":/plugins/qad/icons/disjoin.png")
def getNote(self):
# impostare le note esplicative del comando
return QadMsg.translate("Command_DISJOIN", "Disjoin existing geometries.")
def __init__(self, plugIn):
QadCommandClass.__init__(self, plugIn)
self.entity = QadEntity()
self.SSGetClass = QadSSGetClass(plugIn)
self.SSGetClass.onlyEditableLayers = False
self.SSGetClass.checkDimLayers = False # scarto le quote
self.entSelClass = None
self.currSubGeom = None
self.currAtSubGeom = None
def __del__(self):
QadCommandClass.__del__(self)
del self.SSGetClass
def getPointMapTool(self, drawMode = QadGetPointDrawModeEnum.NONE):
if self.step == 1: # quando si é in fase di selezione entità
return self.entSelClass.getPointMapTool(drawMode)
else:
return QadCommandClass.getPointMapTool(self, drawMode)
def getCurrentContextualMenu(self):
if self.step == 1: # quando si é in fase di selezione entità
return self.entSelClass.getCurrentContextualMenu()
else:
return self.contextualMenu
#============================================================================
# setCurrentSubGeom
#============================================================================
def setCurrentSubGeom(self, entSelClass):
"""
Setta la sottogeometria corrente
"""
self.currSubGeom = None
self.currAtSubGeom = None
# verifico che sia stata selezionata un'entità
if entSelClass.entity.isInitialized() == False:
self.showMsg(QadMsg.translate("QAD", "No geometries in this position."))
return False
# verifico che sia stata selezionata attraverso un punto
# (per capire quale sottogeometria è stata selezionata)
if entSelClass.point is None: return False
self.entity.set(entSelClass.entity)
geom = self.layerToMapCoordinates(entSelClass.entity.layer, entSelClass.entity.getGeometry())
# ritorna una tupla (<The squared cartesian distance>,
# <minDistPoint>
# <afterVertex>
# <leftOf>)
dummy = qad_utils.closestSegmentWithContext(entSelClass.point, geom)
if dummy[2] is None:
return False
# ritorna la sotto-geometria al vertice <atVertex> e la sua posizione nella geometria (0-based)
self.currSubGeom, self.currAtSubGeom = qad_utils.getSubGeomAtVertex(geom, dummy[2])
if self.currSubGeom is None or self.currAtSubGeom is None:
self.currSubGeom = None
self.currAtSubGeom = None
return False
return True
#============================================================================
# disjoinCurrentSubGeomToPolygon
#============================================================================
def disjoinCurrentSubGeomToPolygon(self):
"""
Sconnette la sotto-geometria corrente del poligono da modificare creando una nuova entità
"""
layer = self.entity.layer
# la posizione é espressa con una lista (<index ogg. princ> [<index ogg. sec.>])
part = self.currAtSubGeom[0]
ring = self.currAtSubGeom[1] if len(self.currAtSubGeom) == 2 else None
geom = self.entity.getGeometry()
wkbType = geom.wkbType()
if wkbType == QGis.WKBMultiPoint or wkbType == QGis.WKBMultiLineString:
if geom.deletePart(part) == False: # disgrego una parte
self.showMsg(QadMsg.translate("QAD", "Invalid object."))
return False
newGeom = self.mapToLayerCoordinates(layer, self.currSubGeom)
elif wkbType == QGis.WKBPolygon or wkbType == QGis.WKBMultiPolygon:
if ring is not None: # disgrego un'isola
if geom.deleteRing(ring + 1, part) == False: # cancello una isola (Ring 0 is outer ring and can't be deleted)
self.showMsg(QadMsg.translate("QAD", "Invalid object."))
return False
newGeom = QgsGeometry.fromPolygon([self.mapToLayerCoordinates(layer, self.currSubGeom).asPolyline()])
else: # disgrego una parte
if wkbType == QGis.WKBPolygon:
self.showMsg(QadMsg.translate("QAD", "Invalid object."))
return False
newGeom = QgsGeometry.fromPolygon([self.mapToLayerCoordinates(layer, self.currSubGeom).asPolyline()])
ring = 0
ringGeom = qad_utils.getSubGeomAt(geom, [part, ring])
# se la parte ha delle isole
while ringGeom is not None:
# aggiungo un'isola
points = ringGeom.asPolyline() # vettore di punti
if newGeom.addRing(points) != 0: # 0 in case of success
self.showMsg(QadMsg.translate("QAD", "Invalid object."))
return False
ring = ring + 1
ringGeom = qad_utils.getSubGeomAt(geom, [part, ring])
if geom.deletePart(part) == False: # cancello una parte
self.showMsg(QadMsg.translate("QAD", "Invalid object."))
return False
else:
self.showMsg(QadMsg.translate("QAD", "Invalid object."))
return False
f = self.entity.getFeature()
f.setGeometry(geom)
self.plugIn.beginEditCommand("Feature edited", self.entity.layer)
# plugIn, layer, feature, refresh, check_validity
if qad_layer.updateFeatureToLayer(self.plugIn, self.entity.layer, f, False, False) == False:
self.plugIn.destroyEditCommand()
return False
# Aggiungo nuova feature
newF = QgsFeature(f)
newF.setGeometry(newGeom)
if qad_layer.addFeatureToLayer(self.plugIn, self.entity.layer, newF, None, False, False) == False:
self.plugIn.destroyEditCommand()
return False
self.plugIn.endEditCommand()
return True
#============================================================================
# waitForEntsel
#============================================================================
def waitForEntsel(self, msgMapTool, msg):
if self.entSelClass is not None:
del self.entSelClass
self.step = 1
self.entSelClass = QadEntSelClass(self.plugIn)
self.entSelClass.msg = QadMsg.translate("Command_DISJOIN", "Select object to disjoin: ")
# scarto la selezione di quote
self.entSelClass.checkDimLayers = False
self.entSelClass.onlyEditableLayers = True
self.entSelClass.deselectOnFinish = True
self.entSelClass.run(msgMapTool, msg)
def run(self, msgMapTool = False, msg = None):
if self.plugIn.canvas.mapSettings().destinationCrs().geographicFlag():
self.showMsg(QadMsg.translate("QAD", "\nThe coordinate reference system of the project must be a projected coordinate system.\n"))
return True # fine comando
if self.step == 0:
self.waitForEntsel(msgMapTool, msg) # seleziona l'oggetto da disgregare
return False # continua
#=========================================================================
# RISPOSTA ALLA SELEZIONE ENTITA' DA MODIFICARE
elif self.step == 1:
if self.entSelClass.run(msgMapTool, msg) == True:
if self.setCurrentSubGeom(self.entSelClass) == True:
if self.disjoinCurrentSubGeomToPolygon() == True:
return True
else:
if self.entSelClass.canceledByUsr == True: # fine comando
return True
self.showMsg(QadMsg.translate("QAD", "No geometries in this position."))
self.waitForEntsel(msgMapTool, msg)
return False # continua | gpl-3.0 | -2,137,112,879,578,955,000 | 39.693811 | 139 | 0.577587 | false | 4.073199 | false | false | false |
SUSE/kiwi | kiwi/utils/sysconfig.py | 1 | 2856 | # Copyright (c) 2015 SUSE Linux GmbH. All rights reserved.
#
# This file is part of kiwi.
#
# kiwi is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# kiwi is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with kiwi. If not, see <http://www.gnu.org/licenses/>
#
import os
class SysConfig:
"""
**Read and Write sysconfig style files**
:param str source_file: source file path
"""
def __init__(self, source_file):
self.source_file = source_file
self.data_dict = {}
self.data_list = []
self._read()
def __setitem__(self, key, value):
if key not in self.data_dict:
self.data_list.append(key)
self.data_dict[key] = value
def __getitem__(self, key):
return self.data_dict[key]
def __contains__(self, key):
return key in self.data_dict
def get(self, key):
return self.data_dict.get(key)
def write(self):
"""
Write back source file with changed content but in same order
"""
with open(self.source_file, 'w') as source:
for line in self.data_list:
if line in self.data_dict:
key = line
value = self.data_dict[key]
source.write('{0}={1}'.format(key, value))
else:
source.write(line)
source.write(os.linesep)
def _read(self):
"""
Read file into a list and a key/value dictionary
Only lines which are not considered a comment and
containing the structure key=value are parsed into
the key/value dictionary. In order to keep the order
of lines a list is stored too. Those lines matching
the key/value format are stored with their key in
the list as a placeholder
"""
if os.path.exists(self.source_file):
with open(self.source_file) as source:
for line in source.readlines():
line = line.strip()
if '#' not in line and '=' in line:
elements = line.split('=')
key = elements.pop(0).strip()
value = '='.join(elements)
self.data_dict[key] = value
self.data_list.append(key)
else:
self.data_list.append(line)
| gpl-3.0 | -2,067,535,882,005,381,400 | 33 | 70 | 0.572129 | false | 4.243685 | false | false | false |
lucadealfaro/crowdranker | controllers/feedback.py | 1 | 10966 | # -*- coding: utf-8 -*-
import access
import util
@auth.requires_login()
def index():
"""Produces a list of the feedback obtained for a given venue,
or for all venues."""
venue_id = request.args(0)
if venue_id == 'all':
q = (db.submission.user == get_user_email())
else:
q = ((db.submission.user == get_user_email())
& (db.submission.venue_id == venue_id))
db.submission.id.represent = lambda x, r: A(T('View'), _class='btn', _href=URL('submission', 'view_own_submission', args=['v', r.id]))
db.submission.id.label = T('Submission')
db.submission.id.readable = True
db.submission.venue_id.readable = True
grid = SQLFORM.grid(q,
fields=[db.submission.id, db.submission.venue_id,
db.submission.date_created, db.submission.date_updated, ],
csv=False, details=False, create=False, editable=False, deletable=False,
args=request.args[:1],
maxtextlength=24,
)
return dict(grid=grid)
@auth.requires_login()
def view_feedback():
"""Shows detailed feedback for a user in a venue.
This controller accepts various types of arguments:
* 's', submission_id
* 'u', venue_id, username
* 'v', venue_id (in which case, shows own submission to that venue)
"""
if len(request.args) == 0:
redirect(URL('default', 'index'))
if request.args(0) == 's':
# submission_id
n_args = 2
subm = db.submission(request.args(1)) or redirect(URL('default', 'index'))
c = db.venue(subm.venue_id) or redirect(URL('default', 'index'))
username = subm.user
elif request.args(0) == 'v':
# venue_id
n_args = 2
c = db.venue(request.args(1)) or redirect(URL('default', 'index'))
username = get_user_email()
subm = db((db.submission.user == username) & (db.submission.venue_id == c.id)).select().first()
else:
# venue_id, username
n_args = 3
c = db.venue(request.args(1)) or redirect(URL('default', 'index'))
username = request.args(2) or redirect(URL('default', 'index'))
subm = db((db.submission.user == username) & (db.submission.venue_id == c.id)).select().first()
# Checks permissions.
props = db(db.user_properties.user == get_user_email()).select().first()
if props == None:
session.flash = T('Not authorized.')
redirect(URL('default', 'index'))
is_author = (username == get_user_email())
can_view_feedback = access.can_view_feedback(c, props) or is_author
if (not can_view_feedback):
session.flash = T('Not authorized.')
redirect(URL('default', 'index'))
if not (access.can_view_feedback(c, props) or datetime.utcnow() > c.rate_close_date):
session.flash = T('The ratings are not yet available.')
redirect(URL('feedback', 'index', args=['all']))
# Produces the link to edit the feedback.
edit_feedback_link = None
if subm is not None and access.can_observe(c, props):
edit_feedback_link = A(T('Edit feedback'), _class='btn',
_href=URL('submission', 'edit_feedback', args=[subm.id]))
# Produces the download link.
download_link = None
if subm is not None and c.allow_file_upload and subm.content is not None:
if is_author:
download_link = A(T('Download'), _class='btn',
_href=URL('submission', 'download_author', args=[subm.id, subm.content]))
else:
download_link = A(T('Download'), _class='btn',
_href=URL('submission', 'download_manager', args=[subm.id, subm.content]))
venue_link = A(c.name, _href=URL('venues', 'view_venue', args=[c.id]))
# Submission link.
subm_link = None
if subm is not None and c.allow_link_submission:
subm_link = A(subm.link, _href=subm.link)
# Submission content and feedback.
subm_comment = None
subm_feedback = None
if subm is not None:
raw_subm_comment = keystore_read(subm.comment)
if raw_subm_comment is not None and len(raw_subm_comment) > 0:
subm_comment = MARKMIN(keystore_read(subm.comment))
raw_feedback = keystore_read(subm.feedback)
if raw_feedback is not None and len(raw_feedback) > 0:
subm_feedback = MARKMIN(raw_feedback)
# Display settings.
db.submission.percentile.readable = True
db.submission.comment.readable = True
db.submission.feedback.readable = True
if access.can_observe(c, props):
db.submission.quality.readable = True
db.submission.error.readable = True
# Reads the grade information.
submission_grade = submission_percentile = None
review_grade = review_percentile = user_reputation = None
final_grade = final_percentile = None
assigned_grade = None
if c.grades_released:
grade_info = db((db.grades.user == username) & (db.grades.venue_id == c.id)).select().first()
if grade_info is not None:
submission_grade = represent_quality(grade_info.submission_grade, None)
submission_percentile = represent_percentage(grade_info.submission_percentile, None)
review_grade = represent_quality_10(grade_info.accuracy, None)
review_percentile = represent_percentage(grade_info.accuracy_percentile, None)
user_reputation = represent_01_as_percentage(grade_info.reputation, None)
final_grade = represent_quality(grade_info.grade, None)
final_percentile = represent_percentage(grade_info.percentile, None)
assigned_grade = represent_quality(grade_info.assigned_grade, None)
# Makes a grid of comments.
db.task.submission_name.readable = False
db.task.assigned_date.readable = False
db.task.completed_date.readable = False
db.task.rejected.readable = True
db.task.helpfulness.readable = db.task.helpfulness.writable = True
# Prevent editing the comments; the only thing editable should be the "is bogus" field.
db.task.comments.writable = False
db.task.comments.readable = True
ranking_link = None
if access.can_observe(c, props):
db.task.user.readable = True
db.task.completed_date.readable = True
links = [
dict(header=T('Review details'), body= lambda r:
A(T('View'), _class='btn', _href=URL('ranking', 'view_comparison', args=[r.id]))),
]
details = False
if subm is not None:
ranking_link = A(T('details'), _href=URL('ranking', 'view_comparisons_given_submission', args=[subm.id]))
reviews_link = A(T('details'), _href=URL('ranking', 'view_comparisons_given_user', args=[username, c.id]))
db.task.user.represent = lambda v, r: A(v, _href=URL('ranking', 'view_comparisons_given_user',
args=[v, c.id], user_signature=True))
else:
user_reputation = None
links = [
dict(header=T('Review feedback'), body = lambda r:
A(T('Give feedback'), _class='btn',
_href=URL('feedback', 'reply_to_review', args=[r.id], user_signature=True))),
]
details = False
ranking_link = None
reviews_link = None
if subm is not None:
q = ((db.task.submission_id == subm.id) & (db.task.is_completed == True))
# q = (db.task.submission_id == subm.id)
else:
q = (db.task.id == -1)
grid = SQLFORM.grid(q,
fields=[db.task.id, db.task.user, db.task.rejected, db.task.comments, db.task.helpfulness, ],
details = details,
csv=False, create=False, editable=False, deletable=False, searchable=False,
links=links,
args=request.args[:n_args],
maxtextlength=24,
)
return dict(subm=subm, download_link=download_link, subm_link=subm_link, username=username,
subm_comment=subm_comment, subm_feedback=subm_feedback,
edit_feedback_link=edit_feedback_link,
is_admin=is_user_admin(),
submission_grade=submission_grade, submission_percentile=submission_percentile,
review_grade=review_grade, review_percentile=review_percentile,
user_reputation=user_reputation,
final_grade=final_grade, final_percentile=final_percentile,
assigned_grade=assigned_grade,
venue_link=venue_link, grid=grid, ranking_link=ranking_link,
reviews_link=reviews_link)
@auth.requires_signature()
def reply_to_review():
t = db.task(request.args(0)) or redirect(URL('default', 'index'))
db.task.submission_name.readable = False
db.task.assigned_date.readable = False
db.task.completed_date.readable = False
db.task.comments.readable = False
db.task.helpfulness.readable = db.task.helpfulness.writable = True
db.task.feedback.readable = db.task.feedback.writable = True
form = SQLFORM(db.task, record=t)
form.vars.feedback = keystore_read(t.feedback)
if form.process(onvalidation=validate_review_feedback(t)).accepted:
session.flash = T('Updated.')
redirect(URL('feedback', 'view_feedback', args=['s', t.submission_id]))
link_to_submission = A(T('View submission'), _href=URL('submission', 'view_own_submission', args=['v', t.submission_id]))
review_comments = MARKMIN(keystore_read(t.comments))
return dict(form=form, link_to_submission=link_to_submission, review_comments=review_comments)
def validate_review_feedback(t):
def f(form):
if not form.errors:
feedback_id = keystore_update(t.feedback, form.vars.feedback)
form.vars.feedback = feedback_id
return f
@auth.requires_login()
def view_my_reviews():
"""This controller displays the reviews a user has written for a venue, along with
the feedback they received."""
c = db.venue(request.args(0)) or redirect(URL('rating', 'review_index'))
link_to_venue = A(c.name, _href=URL('venues', 'view_venue', args=[c.id]))
link_to_eval = A(T('My evaluation in this venue'), _class='btn',
_href=URL('feedback', 'view_feedback', args=['v', c.id]))
q = ((db.task.user == get_user_email()) & (db.task.venue_id == c.id))
db.task.rejected.readable = True
db.task.helpfulness.readable = True
db.task.comments.readable = True
db.task.feedback.readable = True
# To prevent chopping
db.task.submission_name.represent = represent_text_field
grid = SQLFORM.grid(q,
fields=[db.task.submission_name, db.task.rejected, db.task.helpfulness],
details=True,
editable=False, deletable=False, create=False, searchable=False,
csv=False,
args=request.args[:1],
maxtextlength=24,
)
return dict(grid=grid, link_to_venue=link_to_venue, link_to_eval=link_to_eval)
| bsd-3-clause | -9,091,172,184,251,331,000 | 45.66383 | 138 | 0.621922 | false | 3.551166 | false | false | false |
gabrielmagno/nano-dlna | nanodlna/dlna.py | 1 | 1582 | #!/usr/bin/env python3
# encoding: UTF-8
import os
import pkgutil
import sys
from xml.sax.saxutils import escape as xmlescape
if sys.version_info.major == 3:
import urllib.request as urllibreq
else:
import urllib2 as urllibreq
def send_dlna_action(device, data, action):
action_data = pkgutil.get_data(
"nanodlna", "templates/action-{0}.xml".format(action)).decode("UTF-8")
action_data = action_data.format(**data).encode("UTF-8")
headers = {
"Content-Type": "text/xml; charset=\"utf-8\"",
"Content-Length": "{0}".format(len(action_data)),
"Connection": "close",
"SOAPACTION": "\"{0}#{1}\"".format(device["st"], action)
}
request = urllibreq.Request(device["action_url"], action_data, headers)
urllibreq.urlopen(request)
def play(files_urls, device):
video_data = {
"uri_video": files_urls["file_video"],
"type_video": os.path.splitext(files_urls["file_video"])[1][1:],
}
if "file_subtitle" in files_urls and files_urls["file_subtitle"]:
video_data.update({
"uri_sub": files_urls["file_subtitle"],
"type_sub": os.path.splitext(files_urls["file_subtitle"])[1][1:]
})
metadata = pkgutil.get_data(
"nanodlna",
"templates/metadata-video_subtitle.xml").decode("UTF-8")
video_data["metadata"] = xmlescape(metadata.format(**video_data))
else:
video_data["metadata"] = ""
send_dlna_action(device, video_data, "SetAVTransportURI")
send_dlna_action(device, video_data, "Play")
| mit | 4,625,877,574,989,528,000 | 27.763636 | 78 | 0.616941 | false | 3.358811 | false | false | false |
morevnaproject/RenderChan | renderchan/contrib/pencil2d.py | 1 | 5111 |
__author__ = 'Konstantin Dmitriev'
from renderchan.module import RenderChanModule
from renderchan.utils import is_true_string
from distutils.version import StrictVersion
import subprocess
import tempfile
import os
from xml.etree import ElementTree
class RenderChanPencil2dModule(RenderChanModule):
def __init__(self):
RenderChanModule.__init__(self)
self.conf['binary']=self.findBinary("pencil2d")
self.conf["packetSize"]=0
# Extra params
self.extraParams["transparency"]="0"
self.extraParams["width"]="-1"
self.extraParams["height"]="-1"
self.extraParams["startFrame"]="1"
self.extraParams["endFrame"]="last"
self.version=StrictVersion('0.5.4') #default value
def checkRequirements(self):
RenderChanModule.checkRequirements(self)
if self.active:
# The CLI features depend on the version
with tempfile.TemporaryDirectory() as tmpPath:
# The exporting of a fake file is a workaround for older versions which just start the program when passed only -v
proc = subprocess.Popen([self.conf['binary'], "-v", "--export-sequence", os.path.join(tmpPath,"test")], stdout=subprocess.PIPE)
try:
outs, errs = proc.communicate(timeout=5)
except TimeoutExpired:
proc.kill()
outs, errs = proc.communicate()
rc = proc.poll()
if rc == 0:
try:
for line in outs.decode("utf-8"):
if line.startswith("Pencil2D "):
# Get the version from stdout. An example of the output: "Pencil2D 0.6.0\n"
self.version = line.rstrip().split(" ")[-1]
self.version = ".".join(self.version.split(".")[0:3])
self.version = StrictVersion(self.version)
except:
self.active = False
else:
self.active = False
if self.active == False:
print("WARNING: Failed to initialize Pencil2D module. The possible reasons for that could be: missing X connection, or the version of Pencil2D on your system is unsupported (too old?). In latter case please consider to get latest version at https://www.pencil2d.org/.")
return self.active
def analyze(self, filename):
info={ "dependencies":[] }
if filename.endswith(".pcl"):
with open(filename, 'r') as f:
tree = ElementTree.parse(f)
root = tree.getroot()
info["dependencies"].extend((os.path.join(filename + ".data", element.get("src")) for element in root.findall(".//*[@src]")))
else:
# We don't actually have to do anything here because there are no dependencies and the default values
# automatically update for changes in the internal width, height, camera etc.
# This is how we would open it if we needed to
"""with ZipFile(filename) as zipdir:
with zipdir.open('main.xml') as mainfile:
tree = ElementTree.parse(mainfile)
root = tree.getroot()"""
return info
def getInputFormats(self):
if self.version >= StrictVersion('0.6.0'):
return ["pcl", "pclx"]
else:
return ["pcl"]
def getOutputFormats(self):
if self.version > StrictVersion('0.6.0'):
return ["png", "jpg", "jpeg", "tif", "tiff", "bmp", "mp4", "avi", "gif", "webm"]
elif self.version == StrictVersion('0.6.0'):
return ["png", "jpg", "jpeg", "tif", "tiff", "bmp"]
else:
return ["png"]
def render(self, filename, outputPath, startFrame, endFrame, format, updateCompletion, extraParams={}):
comp = 0.0
updateCompletion(comp)
output = os.path.join(outputPath,"file")
if not os.path.exists(outputPath):
os.mkdir(outputPath)
if self.version > StrictVersion('0.6.0'):
commandline=[self.conf['binary'], filename, "-o", output, "--width", extraParams['width'], "--height", extraParams['height'], "--start", str(startFrame), "--end", str(endFrame)]
if is_true_string(extraParams['transparency']):
commandline.append("--transparency")
if ('camera' in extraParams) and (extraParams['camera']):
commandline.extend(["--camera", extraParams['camera']])
elif self.version == StrictVersion('0.6.0'):
commandline=[self.conf['binary'], filename, "--export-sequence", output, "--width", extraParams['width'], "--height", extraParams['height']]
if is_true_string(extraParams['transparency']):
commandline.append("--transparency")
else:
commandline=[self.conf['binary'], filename, "--export-sequence", output]
print(commandline)
subprocess.check_call(commandline)
updateCompletion(1.0)
| bsd-3-clause | -8,211,885,996,274,041,000 | 43.443478 | 285 | 0.576208 | false | 4.294958 | false | false | false |
eggplantbren/NSwMCMC | python/straightline2.py | 1 | 2303 | import copy
import numpy as np
import numpy.random as rng
import scipy.special
from utils import randh
from numba import jit
# How many parameters are there?
num_params = 4
# Some data
data = np.loadtxt("road.txt")
N = data.shape[0] # Number of data points
# Plot the data
import matplotlib.pyplot as plt
plt.plot(data[:,0], data[:,1], "o")
plt.xlabel("Age of person (years)")
plt.ylabel("Maximum vision distance (feet)")
plt.show()
# Some idea of how big the Metropolis proposals should be
jump_sizes = np.array([1000.0, 1000.0, 20.0, 5.0])
@jit
def from_prior():
"""
A function to generate parameter values from the prior.
Returns a numpy array of parameter values.
"""
m = 1000.0*rng.randn()
b = 1000.0*rng.randn()
log_sigma = -10.0 + 20.0*rng.rand()
log_nu = 5.0*rng.rand()
return np.array([m, b, log_sigma, log_nu])
@jit
def log_prior(params):
"""
Evaluate the (log of the) prior distribution
"""
# Rename the parameters
m, b, log_sigma, log_nu = params
logp = 0.0
# Normal prior for m and b
# Metropolis only needs the ratio, so I've left out the 2pi bits
logp += -0.5*(m/1000.0)**2
logp += -0.5*(b/1000.0)**2
if log_sigma < -10.0 or log_sigma > 10.0:
return -np.Inf
if log_nu < 0.0 or log_nu > 5.0:
return -np.Inf
return logp
@jit
def log_likelihood(params):
"""
Evaluate the (log of the) likelihood function
"""
# Rename the parameters
m, b, log_sigma, log_nu = params
# Get sigma and nu
sigma = np.exp(log_sigma)
nu = np.exp(log_nu)
# First calculate the straight line
line = m*data[:,0] + b
# t distribution (compare with the pdf on wikipedia, under
# Non-standardized Student's t-distribution)
return N*scipy.special.gammaln(0.5*(nu+1.0))\
-N*0.5*np.log(nu*np.pi) - N*scipy.special.gammaln(0.5*nu) - N*np.log(sigma)\
-0.5*(nu+1.0)*np.sum(np.log(1.0 + ((data[:,1] - line)**2/sigma**2)/nu))
@jit
def proposal(params):
"""
Generate new values for the parameters, for the Metropolis algorithm.
"""
# Copy the parameters
new = copy.deepcopy(params)
# Which one should we change?
which = rng.randint(num_params)
new[which] += jump_sizes[which]*randh()
return new
| gpl-2.0 | 5,760,810,357,822,276,000 | 22.989583 | 87 | 0.6231 | false | 3.058433 | false | false | false |
ActiveState/code | recipes/Python/286240_Python_portscanners/recipe-286240.py | 1 | 3523 | # a simple portscanner with multithreading
import socket as sk
import sys
import threading
MAX_THREADS = 50
def usage():
print "\npyScan 0.1"
print "usage: pyScan <host> [start port] [end port]"
class Scanner(threading.Thread):
def __init__(self, host, port):
threading.Thread.__init__(self)
# host and port
self.host = host
self.port = port
# build up the socket obj
self.sd = sk.socket(sk.AF_INET, sk.SOCK_STREAM)
def run(self):
try:
# connect to the given host:port
self.sd.connect((self.host, self.port))
print "%s:%d OPEN" % (self.host, self.port)
self.sd.close()
except: pass
class pyScan:
def __init__(self, args=[]):
# arguments vector
self.args = args
# start port and end port
self.start, self.stop = 1, 1024
# host name
self.host = ""
# check the arguments
if len(self.args) == 4:
self.host = self.args[1]
try:
self.start = int(self.args[2])
self.stop = int(self.args[3])
except ValueError:
usage()
return
if self.start > self.stop:
usage()
return
elif len(self.args) == 2:
self.host = self.args[1]
else:
usage()
return
try:
sk.gethostbyname(self.host)
except:
print "hostname '%s' unknown" % self.host
self.scan(self.host, self.start, self.stop)
def scan(self, host, start, stop):
self.port = start
while self.port <= stop:
while threading.activeCount() < MAX_THREADS:
Scanner(host, self.port).start()
self.port += 1
if __name__ == "__main__":
pyScan(sys.argv)
#############################################################
# a simple portscanner with multithreading
# QUEUE BASED VERSION
import socket
import sys
import threading, Queue
MAX_THREADS = 50
class Scanner(threading.Thread):
def __init__(self, inq, outq):
threading.Thread.__init__(self)
self.setDaemon(1)
# queues for (host, port)
self.inq = inq
self.outq = outq
def run(self):
while 1:
host, port = self.inq.get()
sd = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
# connect to the given host:port
sd.connect((host, port))
except socket.error:
# set the CLOSED flag
self.outq.put((host, port, 'CLOSED'))
else:
self.outq.put((host, port, 'OPEN'))
sd.close()
def scan(host, start, stop, nthreads=MAX_THREADS):
toscan = Queue.Queue()
scanned = Queue.Queue()
scanners = [Scanner(toscan, scanned) for i in range(nthreads)]
for scanner in scanners:
scanner.start()
hostports = [(host, port) for port in xrange(start, stop+1)]
for hostport in hostports:
toscan.put(hostport)
results = {}
for host, port in hostports:
while (host, port) not in results:
nhost, nport, nstatus = scanned.get()
results[(nhost, nport)] = nstatus
status = results[(host, port)]
if status <> 'CLOSED':
print '%s:%d %s' % (host, port, status)
if __name__ == '__main__':
scan('localhost', 0, 1024)
| mit | 749,350,839,647,121,400 | 26.523438 | 74 | 0.518024 | false | 3.884234 | false | false | false |
bacaldwell/ironic | ironic/tests/unit/common/test_image_service.py | 1 | 16700 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import os
import shutil
import mock
from oslo_config import cfg
import requests
import sendfile
import six
import six.moves.builtins as __builtin__
from six.moves import http_client
from ironic.common import exception
from ironic.common.glance_service.v1 import image_service as glance_v1_service
from ironic.common import image_service
from ironic.common import keystone
from ironic.tests import base
if six.PY3:
import io
file = io.BytesIO
class HttpImageServiceTestCase(base.TestCase):
def setUp(self):
super(HttpImageServiceTestCase, self).setUp()
self.service = image_service.HttpImageService()
self.href = 'http://127.0.0.1:12345/fedora.qcow2'
@mock.patch.object(requests, 'head', autospec=True)
def test_validate_href(self, head_mock):
response = head_mock.return_value
response.status_code = http_client.OK
self.service.validate_href(self.href)
head_mock.assert_called_once_with(self.href)
response.status_code = http_client.NO_CONTENT
self.assertRaises(exception.ImageRefValidationFailed,
self.service.validate_href,
self.href)
response.status_code = http_client.BAD_REQUEST
self.assertRaises(exception.ImageRefValidationFailed,
self.service.validate_href,
self.href)
@mock.patch.object(requests, 'head', autospec=True)
def test_validate_href_error_code(self, head_mock):
head_mock.return_value.status_code = http_client.BAD_REQUEST
self.assertRaises(exception.ImageRefValidationFailed,
self.service.validate_href, self.href)
head_mock.assert_called_once_with(self.href)
@mock.patch.object(requests, 'head', autospec=True)
def test_validate_href_error(self, head_mock):
head_mock.side_effect = requests.ConnectionError()
self.assertRaises(exception.ImageRefValidationFailed,
self.service.validate_href, self.href)
head_mock.assert_called_once_with(self.href)
@mock.patch.object(requests, 'head', autospec=True)
def _test_show(self, head_mock, mtime, mtime_date):
head_mock.return_value.status_code = http_client.OK
head_mock.return_value.headers = {
'Content-Length': 100,
'Last-Modified': mtime
}
result = self.service.show(self.href)
head_mock.assert_called_once_with(self.href)
self.assertEqual({'size': 100, 'updated_at': mtime_date,
'properties': {}}, result)
def test_show_rfc_822(self):
self._test_show(mtime='Tue, 15 Nov 2014 08:12:31 GMT',
mtime_date=datetime.datetime(2014, 11, 15, 8, 12, 31))
def test_show_rfc_850(self):
self._test_show(mtime='Tuesday, 15-Nov-14 08:12:31 GMT',
mtime_date=datetime.datetime(2014, 11, 15, 8, 12, 31))
def test_show_ansi_c(self):
self._test_show(mtime='Tue Nov 15 08:12:31 2014',
mtime_date=datetime.datetime(2014, 11, 15, 8, 12, 31))
@mock.patch.object(requests, 'head', autospec=True)
def test_show_no_content_length(self, head_mock):
head_mock.return_value.status_code = http_client.OK
head_mock.return_value.headers = {}
self.assertRaises(exception.ImageRefValidationFailed,
self.service.show, self.href)
head_mock.assert_called_with(self.href)
@mock.patch.object(shutil, 'copyfileobj', autospec=True)
@mock.patch.object(requests, 'get', autospec=True)
def test_download_success(self, req_get_mock, shutil_mock):
response_mock = req_get_mock.return_value
response_mock.status_code = http_client.OK
response_mock.raw = mock.MagicMock(spec=file)
file_mock = mock.Mock(spec=file)
self.service.download(self.href, file_mock)
shutil_mock.assert_called_once_with(
response_mock.raw.__enter__(), file_mock,
image_service.IMAGE_CHUNK_SIZE
)
req_get_mock.assert_called_once_with(self.href, stream=True)
@mock.patch.object(requests, 'get', autospec=True)
def test_download_fail_connerror(self, req_get_mock):
req_get_mock.side_effect = requests.ConnectionError()
file_mock = mock.Mock(spec=file)
self.assertRaises(exception.ImageDownloadFailed,
self.service.download, self.href, file_mock)
@mock.patch.object(shutil, 'copyfileobj', autospec=True)
@mock.patch.object(requests, 'get', autospec=True)
def test_download_fail_ioerror(self, req_get_mock, shutil_mock):
response_mock = req_get_mock.return_value
response_mock.status_code = http_client.OK
response_mock.raw = mock.MagicMock(spec=file)
file_mock = mock.Mock(spec=file)
shutil_mock.side_effect = IOError
self.assertRaises(exception.ImageDownloadFailed,
self.service.download, self.href, file_mock)
req_get_mock.assert_called_once_with(self.href, stream=True)
class FileImageServiceTestCase(base.TestCase):
def setUp(self):
super(FileImageServiceTestCase, self).setUp()
self.service = image_service.FileImageService()
self.href = 'file:///home/user/image.qcow2'
self.href_path = '/home/user/image.qcow2'
@mock.patch.object(os.path, 'isfile', return_value=True, autospec=True)
def test_validate_href(self, path_exists_mock):
self.service.validate_href(self.href)
path_exists_mock.assert_called_once_with(self.href_path)
@mock.patch.object(os.path, 'isfile', return_value=False, autospec=True)
def test_validate_href_path_not_found_or_not_file(self, path_exists_mock):
self.assertRaises(exception.ImageRefValidationFailed,
self.service.validate_href, self.href)
path_exists_mock.assert_called_once_with(self.href_path)
@mock.patch.object(os.path, 'getmtime', return_value=1431087909.1641912,
autospec=True)
@mock.patch.object(os.path, 'getsize', return_value=42, autospec=True)
@mock.patch.object(image_service.FileImageService, 'validate_href',
autospec=True)
def test_show(self, _validate_mock, getsize_mock, getmtime_mock):
_validate_mock.return_value = self.href_path
result = self.service.show(self.href)
getsize_mock.assert_called_once_with(self.href_path)
getmtime_mock.assert_called_once_with(self.href_path)
_validate_mock.assert_called_once_with(mock.ANY, self.href)
self.assertEqual({'size': 42,
'updated_at': datetime.datetime(2015, 5, 8,
12, 25, 9, 164191),
'properties': {}}, result)
@mock.patch.object(os, 'link', autospec=True)
@mock.patch.object(os, 'remove', autospec=True)
@mock.patch.object(os, 'access', return_value=True, autospec=True)
@mock.patch.object(os, 'stat', autospec=True)
@mock.patch.object(image_service.FileImageService, 'validate_href',
autospec=True)
def test_download_hard_link(self, _validate_mock, stat_mock, access_mock,
remove_mock, link_mock):
_validate_mock.return_value = self.href_path
stat_mock.return_value.st_dev = 'dev1'
file_mock = mock.Mock(spec=file)
file_mock.name = 'file'
self.service.download(self.href, file_mock)
_validate_mock.assert_called_once_with(mock.ANY, self.href)
self.assertEqual(2, stat_mock.call_count)
access_mock.assert_called_once_with(self.href_path, os.R_OK | os.W_OK)
remove_mock.assert_called_once_with('file')
link_mock.assert_called_once_with(self.href_path, 'file')
@mock.patch.object(sendfile, 'sendfile', autospec=True)
@mock.patch.object(os.path, 'getsize', return_value=42, autospec=True)
@mock.patch.object(__builtin__, 'open', autospec=True)
@mock.patch.object(os, 'access', return_value=False, autospec=True)
@mock.patch.object(os, 'stat', autospec=True)
@mock.patch.object(image_service.FileImageService, 'validate_href',
autospec=True)
def test_download_copy(self, _validate_mock, stat_mock, access_mock,
open_mock, size_mock, copy_mock):
_validate_mock.return_value = self.href_path
stat_mock.return_value.st_dev = 'dev1'
file_mock = mock.MagicMock(spec=file)
file_mock.name = 'file'
input_mock = mock.MagicMock(spec=file)
open_mock.return_value = input_mock
self.service.download(self.href, file_mock)
_validate_mock.assert_called_once_with(mock.ANY, self.href)
self.assertEqual(2, stat_mock.call_count)
access_mock.assert_called_once_with(self.href_path, os.R_OK | os.W_OK)
copy_mock.assert_called_once_with(file_mock.fileno(),
input_mock.__enter__().fileno(),
0, 42)
size_mock.assert_called_once_with(self.href_path)
@mock.patch.object(os, 'remove', side_effect=OSError, autospec=True)
@mock.patch.object(os, 'access', return_value=True, autospec=True)
@mock.patch.object(os, 'stat', autospec=True)
@mock.patch.object(image_service.FileImageService, 'validate_href',
autospec=True)
def test_download_hard_link_fail(self, _validate_mock, stat_mock,
access_mock, remove_mock):
_validate_mock.return_value = self.href_path
stat_mock.return_value.st_dev = 'dev1'
file_mock = mock.MagicMock(spec=file)
file_mock.name = 'file'
self.assertRaises(exception.ImageDownloadFailed,
self.service.download, self.href, file_mock)
_validate_mock.assert_called_once_with(mock.ANY, self.href)
self.assertEqual(2, stat_mock.call_count)
access_mock.assert_called_once_with(self.href_path, os.R_OK | os.W_OK)
@mock.patch.object(sendfile, 'sendfile', side_effect=OSError,
autospec=True)
@mock.patch.object(os.path, 'getsize', return_value=42, autospec=True)
@mock.patch.object(__builtin__, 'open', autospec=True)
@mock.patch.object(os, 'access', return_value=False, autospec=True)
@mock.patch.object(os, 'stat', autospec=True)
@mock.patch.object(image_service.FileImageService, 'validate_href',
autospec=True)
def test_download_copy_fail(self, _validate_mock, stat_mock, access_mock,
open_mock, size_mock, copy_mock):
_validate_mock.return_value = self.href_path
stat_mock.return_value.st_dev = 'dev1'
file_mock = mock.MagicMock(spec=file)
file_mock.name = 'file'
input_mock = mock.MagicMock(spec=file)
open_mock.return_value = input_mock
self.assertRaises(exception.ImageDownloadFailed,
self.service.download, self.href, file_mock)
_validate_mock.assert_called_once_with(mock.ANY, self.href)
self.assertEqual(2, stat_mock.call_count)
access_mock.assert_called_once_with(self.href_path, os.R_OK | os.W_OK)
size_mock.assert_called_once_with(self.href_path)
class ServiceGetterTestCase(base.TestCase):
@mock.patch.object(keystone, 'get_admin_auth_token', autospec=True)
@mock.patch.object(glance_v1_service.GlanceImageService, '__init__',
return_value=None, autospec=True)
def test_get_glance_image_service(self, glance_service_mock, token_mock):
image_href = 'image-uuid'
self.context.auth_token = 'fake'
image_service.get_image_service(image_href, context=self.context)
glance_service_mock.assert_called_once_with(mock.ANY, None, 1,
self.context)
self.assertFalse(token_mock.called)
@mock.patch.object(keystone, 'get_admin_auth_token', autospec=True)
@mock.patch.object(glance_v1_service.GlanceImageService, '__init__',
return_value=None, autospec=True)
def test_get_glance_image_service_url(self, glance_service_mock,
token_mock):
image_href = 'glance://image-uuid'
self.context.auth_token = 'fake'
image_service.get_image_service(image_href, context=self.context)
glance_service_mock.assert_called_once_with(mock.ANY, None, 1,
self.context)
self.assertFalse(token_mock.called)
@mock.patch.object(keystone, 'get_admin_auth_token', autospec=True)
@mock.patch.object(glance_v1_service.GlanceImageService, '__init__',
return_value=None, autospec=True)
def test_get_glance_image_service_no_token(self, glance_service_mock,
token_mock):
image_href = 'image-uuid'
self.context.auth_token = None
token_mock.return_value = 'admin-token'
image_service.get_image_service(image_href, context=self.context)
glance_service_mock.assert_called_once_with(mock.ANY, None, 1,
self.context)
token_mock.assert_called_once_with()
self.assertEqual('admin-token', self.context.auth_token)
@mock.patch.object(keystone, 'get_admin_auth_token', autospec=True)
@mock.patch.object(glance_v1_service.GlanceImageService, '__init__',
return_value=None, autospec=True)
def test_get_glance_image_service_token_not_needed(self,
glance_service_mock,
token_mock):
image_href = 'image-uuid'
self.context.auth_token = None
self.config(auth_strategy='noauth', group='glance')
image_service.get_image_service(image_href, context=self.context)
glance_service_mock.assert_called_once_with(mock.ANY, None, 1,
self.context)
self.assertFalse(token_mock.called)
self.assertIsNone(self.context.auth_token)
@mock.patch.object(image_service.HttpImageService, '__init__',
return_value=None, autospec=True)
def test_get_http_image_service(self, http_service_mock):
image_href = 'http://127.0.0.1/image.qcow2'
image_service.get_image_service(image_href)
http_service_mock.assert_called_once_with()
@mock.patch.object(image_service.HttpImageService, '__init__',
return_value=None, autospec=True)
def test_get_https_image_service(self, http_service_mock):
image_href = 'https://127.0.0.1/image.qcow2'
image_service.get_image_service(image_href)
http_service_mock.assert_called_once_with()
@mock.patch.object(image_service.FileImageService, '__init__',
return_value=None, autospec=True)
def test_get_file_image_service(self, local_service_mock):
image_href = 'file:///home/user/image.qcow2'
image_service.get_image_service(image_href)
local_service_mock.assert_called_once_with()
def test_get_image_service_unknown_protocol(self):
image_href = 'usenet://alt.binaries.dvd/image.qcow2'
self.assertRaises(exception.ImageRefValidationFailed,
image_service.get_image_service, image_href)
def test_out_range_auth_strategy(self):
self.assertRaises(ValueError, cfg.CONF.set_override,
'auth_strategy', 'fake', 'glance',
enforce_type=True)
def test_out_range_glance_protocol(self):
self.assertRaises(ValueError, cfg.CONF.set_override,
'glance_protocol', 'fake', 'glance',
enforce_type=True)
| apache-2.0 | 4,742,086,651,356,532,000 | 47.688047 | 78 | 0.627365 | false | 3.671137 | true | false | false |
CommonsCloud/Core-API | CommonsCloudAPI/models/user.py | 1 | 8946 | """
For CommonsCloud copyright information please see the LICENSE document
(the "License") included with this software package. This file may not
be used in any manner except in compliance with the License
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import hashlib
"""
Import Flask Dependencies
"""
from flask.ext.security import current_user
from flask.ext.security import UserMixin
from flask.ext.security import RoleMixin
from flask.ext.security import SQLAlchemyUserDatastore
from werkzeug import generate_password_hash
from werkzeug import check_password_hash
"""
Import Commons Cloud Dependencies
"""
from CommonsCloudAPI.extensions import db
from CommonsCloudAPI.extensions import logger
from CommonsCloudAPI.extensions import sanitize
from CommonsCloudAPI.extensions import status as status_
from CommonsCloudAPI.models.base import CommonsModel
from CommonsCloudAPI.models.template import UserTemplates
from CommonsCloudAPI.models.application import UserApplications
user_roles = db.Table('user_roles',
db.Column('user', db.Integer(), db.ForeignKey('user.id')),
db.Column('role', db.Integer(), db.ForeignKey('role.id')),
extend_existing = True
)
"""
This defines our basic Role model, we have to have this becasue of the
Flask-Security module. If you remove it Flask-Security gets fussy.
"""
class Role(db.Model, RoleMixin):
__tablename__ = 'role'
__table_args__ = {
'extend_existing': True
}
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(80), unique=True)
description = db.Column(db.String(255))
"""
This defines our basic User model, we have to have this becasue of the
Flask-Security module. If you remove it Flask-Security gets fussy.
"""
class User(db.Model, UserMixin, CommonsModel):
__public__ = {'default': ['id', 'name', 'email', 'active', 'confirmed_at']}
__tablename__ = 'user'
__table_args__ = {
'extend_existing': True
}
"""
Define the fields that we will use to create the User table in our
database for use with our SQLAlchemy model
"""
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(255))
email = db.Column(db.String(255))
password = db.Column(db.String(255))
active = db.Column(db.Boolean())
confirmed_at = db.Column(db.DateTime())
roles = db.relationship('Role', secondary=user_roles, backref=db.backref('users'))
applications = db.relationship('UserApplications', backref=db.backref('users'))
templates = db.relationship('UserTemplates', backref=db.backref('users'))
fields = db.relationship('UserFields', backref=db.backref('users'))
def __init__(self, name="", email="", password="", active=True, roles=[], permissions=[]):
self.name = name
self.email = email
self.password = password
self.active = active
self.roles = roles
self.permissions = permissions
"""
Set the user password using the pbkdf2:sha1 method and a salt_length of 64
"""
def set_password(self, password):
self.password = generate_password_hash(password, method='pbkdf2:sha1', salt_length=64)
"""
Check to see if the password entered by the user matches the password saved
in the database associated with the acting user
@param (object) self
@param (string) password
The password to check against the database
@return (bool)
The boolean of whether or not the passwords match
"""
def check_password(self, password):
return check_password_hash(self.password, password)
def user_create(self, new_user):
new_user_ = User(**new_user)
db.session.add(new_user_)
db.session.commit()
return new_user_
"""
Get the SQLAlchemy User object for the current_user
@param (object) self
@return (object) user_
The object of the current user, not to be confused with current_user
"""
def user_get(self, user_id):
user_ = User.query.get(user_id)
if not user_:
return status_.status_404('We couldn\'t find the user you were looking for.'), 404
return {
'active': user_.active,
'member_since': user_.confirmed_at.strftime('%b %d, %Y'),
'picture': self.user_picture(user_.email),
'email': user_.email,
'id': user_.id,
'name': user_.name
}
"""
Get the a list of User objects for the entire system
@return (array) users_
The array of objects for all users in system
"""
def user_list(self):
users_ = User.query.all()
return users_
"""
Get the a list of User objects limited to a specific Application
@return (array) users_
The array of objects for all Application Users in system
"""
def application_user_list(self, application_id):
user_list = []
ApplicationUsers = UserApplications.query.filter_by(application_id=application_id).all()
for user in ApplicationUsers:
user_list.append(user.user_id)
return User.query.filter(User.id.in_(user_list)).all()
"""
Get the a list of User objects limited to a specific Template
@return (array) users_
The array of objects for all Template Users in system
"""
def template_user_list(self, template_id):
user_list = []
TemplateUsers = UserTemplates.query.filter_by(template_id=template_id).all()
for user in TemplateUsers:
user_list.append(user.user_id)
return User.query.filter(User.id.in_(user_list)).all()
def user_update(self, user_object_):
"""
Before updating any information we first have to load the User object for the
user we wish to act upon. To make extra sure that one user doesn't update another
by sending an alertnative 'id' through with the post request. We only act on the
`current_user` as defined by the security module.
"""
user_ = User.query.get(current_user.id)
"""
Content that needs sanitized
"""
user_.name = sanitize.sanitize_string(user_object_.get('name', current_user.name))
user_.email = sanitize.sanitize_string(user_object_.get('email', current_user.email))
"""
Booleans and Arrays are not sanitized right now ... they probably should be
"""
# user_.active = user_object_.get('active', current_user.active)
# user_.roles = user_object_.get('roles', current_user.roles)
# user_.permissions = user_object_.get('permissions', current_user.permissions)
"""
Save all of our updates to the database
"""
db.session.commit()
return user_
"""
Remove a user entirely from our system
This should be a multiple step process:
1. User arrives at the "Remove account" page
2. Message is displayed warning the user of ramifications of account removal
3. User must type in their current password
"""
def user_remove(self):
pass
def user_picture(self, email):
user_email = email.lower()
user_hash = hashlib.md5(user_email).hexdigest()
picture_url = '//www.gravatar.com/avatar/' + user_hash
return picture_url
"""
Get a list of Users that have access to the Application requested by
the user, but make sure the User requesting this information is logged
in already and has `is_admin` permission to the requested Applciation
"""
def application_users(self, application_id):
allowed_applications = self.allowed_applications('is_admin')
if not application_id in allowed_applications:
logger.warning('User %d with Applications %s tried to access Users for Application %d', \
self.current_user.id, allowed_applications, application_id)
return status_.status_401('You are not allowed to view the Users of this Application because you do not have the permission to do so'), 401
return self.application_user_list(application_id)
"""
Get a list of Users that have access to the Template requested by
the user, but make sure the User requesting this information is logged
in already and has `is_admin` permission to the requested Template
"""
def template_users(self, template_id):
allowed_templates = self.allowed_templates('is_admin')
if not template_id in allowed_templates:
logger.warning('User %d tried to access Users for Template %d', \
self.current_user.id, template_id)
return status_.status_401('You are not allowed to view the Users of this Template because you do not have the permission to do so'), 401
return self.template_user_list(template_id)
"""
The last thing we need to do is actually hook these things up to the
User Datastore provided by SQLAlchemy's Engine's datastore that provides
Flask-Security with User/Role information so we can lock down access
to the system and it's resources.
"""
user_datastore = SQLAlchemyUserDatastore(db, User, Role)
| agpl-3.0 | 2,422,971,609,378,438,700 | 28.331148 | 145 | 0.705455 | false | 3.927129 | false | false | false |
mjschultz/redlock | tests/test_lock.py | 1 | 1801 | from redlock import RedLock
import time
def test_default_connection_details_value():
"""
Test that RedLock instance could be created with
default value of `connection_details` argument.
"""
lock = RedLock("test_simple_lock")
def test_simple_lock():
"""
Test a RedLock can be acquired.
"""
lock = RedLock("test_simple_lock", [{"host": "localhost"}], ttl=1000)
locked = lock.acquire()
lock.release()
assert locked == True
def test_context_manager():
"""
Test a RedLock can be released by the context manager automically.
"""
with RedLock("test_context_manager", [{"host": "localhost"}], ttl=1000):
lock = RedLock("test_context_manager", [{"host": "localhost"}], ttl=1000)
locked = lock.acquire()
assert locked == False
lock = RedLock("test_context_manager", [{"host": "localhost"}], ttl=1000)
locked = lock.acquire()
assert locked == True
lock.release()
def test_fail_to_lock_acquired():
lock1 = RedLock("test_fail_to_lock_acquired", [{"host": "localhost"}], ttl=1000)
lock2 = RedLock("test_fail_to_lock_acquired", [{"host": "localhost"}], ttl=1000)
lock1_locked = lock1.acquire()
lock2_locked = lock2.acquire()
lock1.release()
assert lock1_locked == True
assert lock2_locked == False
def test_lock_expire():
lock1 = RedLock("test_lock_expire", [{"host": "localhost"}], ttl=500)
lock1.acquire()
time.sleep(1)
# Now lock1 has expired, we can accquire a lock
lock2 = RedLock("test_lock_expire", [{"host": "localhost"}], ttl=1000)
locked = lock2.acquire()
assert locked == True
lock1.release()
lock3 = RedLock("test_lock_expire", [{"host": "localhost"}], ttl=1000)
locked = lock3.acquire()
assert locked == False
| mit | 3,412,506,755,901,172,000 | 26.707692 | 84 | 0.62965 | false | 3.573413 | true | false | false |
chme/plugin.audio.mpdclient2 | mpdclient/Navigation.py | 1 | 19566 | #
# Copyright (c) chme
#
# This file is part of the mpdclient kodi plugin
#
# This plugin is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of
# the License, or (at your option) any later version.
#
# This plugin is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
import urllib
import xbmcgui
import xbmcplugin
from lib import mpd
from mpdclient.Env import Env
from mpdclient.Language import Language
class NavUrl:
__KEY_NAVID = "navid"
__KEY_PARAM = "param"
__SEPARATOR = "###"
@staticmethod
def build_url(env, navid, params=[]):
return env.base_url() + "?" + urllib.urlencode({NavUrl.__KEY_NAVID: navid}) + "&" + urllib.urlencode({NavUrl.__KEY_PARAM: NavUrl.__SEPARATOR.join(params)})
@staticmethod
def get_navid(env):
return env.param_string(NavUrl.__KEY_NAVID)
@staticmethod
def get_params(env):
return env.param_string(NavUrl.__KEY_PARAM).split(NavUrl.__SEPARATOR)
class Nav:
NAV_FILE = "file"
NAV_PL = "playlist"
NAV_LIST = "list"
NAV_FIND = "find"
NAV_QUEUE = "queue"
NAV_PLAYLISTS = "playlists"
ACTION_ADD = "add"
ACTION_LOAD = "load"
ACTION_FINDADD = "findadd"
ACTION_REMOVE = "remove"
ACTION_CLEAR = "clear"
ACTION_PLAY = "play"
ACTION_PAUSE = "pause"
ACTION_PREV = "prev"
ACTION_NEXT = "next"
ACTION_STOP = "stop"
ACTION_OUTPUTS = "outputs"
def __init__(self):
self.__env = Env()
self.__mpc = mpd.MPDClient()
return
def handle(self):
self.__connect_mpd()
params = NavUrl.get_params(self.__env)
navid = NavUrl.get_navid(self.__env)
if navid == Nav.NAV_FILE:
xbmcplugin.setContent(self.__env.addon_handle(), "files")
xbmcplugin.addSortMethod(self.__env.addon_handle(), xbmcplugin.SORT_METHOD_TITLE)
self.__nav_file(self.__env, self.__mpc, params)
elif navid == Nav.NAV_PL:
xbmcplugin.setContent(self.__env.addon_handle(), "songs")
xbmcplugin.addSortMethod(self.__env.addon_handle(), xbmcplugin.SORT_METHOD_NONE)
self.__nav_pl(self.__env, self.__mpc, params)
elif navid == Nav.NAV_PLAYLISTS:
xbmcplugin.setContent(self.__env.addon_handle(), "files")
xbmcplugin.addSortMethod(self.__env.addon_handle(), xbmcplugin.SORT_METHOD_TRACKNUM)
self.__nav_playlists(self.__env, self.__mpc, params)
elif navid == Nav.NAV_LIST:
if "albumartist" == params[0]:
xbmcplugin.setContent(self.__env.addon_handle(), "artists")
xbmcplugin.addSortMethod(self.__env.addon_handle(), xbmcplugin.SORT_METHOD_ARTIST)
elif "album" == params[0]:
xbmcplugin.setContent(self.__env.addon_handle(), "albums")
xbmcplugin.addSortMethod(self.__env.addon_handle(), xbmcplugin.SORT_METHOD_ALBUM)
elif "genre" == params[0]:
xbmcplugin.setContent(self.__env.addon_handle(), "files")
xbmcplugin.addSortMethod(self.__env.addon_handle(), xbmcplugin.SORT_METHOD_GENRE)
self.__nav_list(self.__env, self.__mpc, params)
elif navid == Nav.NAV_FIND:
xbmcplugin.setContent(self.__env.addon_handle(), "songs")
#xbmcplugin.addSortMethod(self.__env.addon_handle(), xbmcplugin.SORT_METHOD_TITLE)
xbmcplugin.addSortMethod(self.__env.addon_handle(), xbmcplugin.SORT_METHOD_TRACKNUM)
self.__nav_find(self.__env, self.__mpc, params)
elif navid == Nav.NAV_QUEUE:
xbmcplugin.setContent(self.__env.addon_handle(), "songs")
xbmcplugin.addSortMethod(self.__env.addon_handle(), xbmcplugin.SORT_METHOD_NONE)
self.__nav_queue(self.__env, self.__mpc, params)
elif navid == Nav.ACTION_ADD:
self.__action_add(self.__env, self.__mpc, params)
elif navid == Nav.ACTION_LOAD:
self.__action_load(self.__env, self.__mpc, params)
elif navid == Nav.ACTION_FINDADD:
self.__action_findadd(self.__env, self.__mpc, params)
elif navid == Nav.ACTION_REMOVE:
self.__action_remove(self.__env, self.__mpc, params)
elif navid == Nav.ACTION_CLEAR:
self.__action_clear(self.__env, self.__mpc, params)
elif navid == Nav.ACTION_PLAY:
self.__action_play(self.__env, self.__mpc, params)
elif navid == Nav.ACTION_PAUSE:
self.__action_pause(self.__env, self.__mpc, params)
elif navid == Nav.ACTION_PREV:
self.__action_prev(self.__env, self.__mpc, params)
elif navid == Nav.ACTION_NEXT:
self.__action_next(self.__env, self.__mpc, params)
elif navid == Nav.ACTION_STOP:
self.__action_stop(self.__env, self.__mpc, params)
elif navid == Nav.ACTION_OUTPUTS:
self.__action_outputs(self.__env, self.__mpc, params)
else:
xbmcplugin.setContent(self.__env.addon_handle(), "files")
self.__nav_root(self.__env, self.__mpc, params)
self.__deinit()
def __connect_mpd(self):
self.__mpc.connect(
self.__env.setting("host"), self.__env.setting("port"))
return
def __deinit(self):
self.__mpc.disconnect()
xbmcplugin.endOfDirectory(self.__env.addon_handle())
def __nav_root(self, env, mpc, params=[]):
item = ItemRoot()
item.add(env, Nav.NAV_QUEUE, env.localized(Language.QUEUE), [], "DefaultMusicPlaylists.png")
item.add(env, Nav.NAV_FILE, env.localized(Language.FILES), ["/"], "")
item.add(env, Nav.NAV_LIST, env.localized(
Language.ARTISTS), ["albumartist"], "DefaultMusicArtists.png")
item.add(env, Nav.NAV_LIST, env.localized(
Language.ALBUMS), ["album"], "DefaultMusicAlbums.png")
item.add(env, Nav.NAV_LIST, env.localized(
Language.GENRE), ["genre"], "DefaultMusicGenres.png")
item.add(env, Nav.NAV_PLAYLISTS, env.localized(Language.PLAYLISTS), [], "DefaultMusicPlaylists.png")
return
def __nav_file(self, env, mpc, params=[]):
path = params[0]
item = ItemFile()
for metadata in mpc.lsinfo(path):
item.add(env, metadata)
return
def __nav_playlists(self, env, mpc, params=[]):
item = ItemFile()
for metadata in mpc.listplaylists():
item.add(env, metadata)
return
def __nav_pl(self, env, mpc, params=[]):
path = params[0]
item = ItemFile()
for metadata in mpc.listplaylistinfo(path):
item.add(env, metadata)
return
def __nav_list(self, env, mpc, params=[]):
item = ItemTag()
for tag in mpc.list(*params):
item.add(env, mpc, params[0], tag, params[1:])
return
def __nav_find(self, env, mpc, params=[]):
item = ItemFile()
for metadata in mpc.find(*params):
item.add(env, metadata)
return
def __nav_queue(self, env, mpc, params=[]):
item = ItemFile()
for metadata in mpc.playlistinfo():
item.add(env, metadata)
return
def __action_add(self, env, mpc, params=[]):
mpc.add(params[0])
xbmcgui.Dialog().notification(
"MPD", self.__env.localized(Language.SONGS_ADDED), xbmcgui.NOTIFICATION_INFO, 5000)
# mpc.play()
return
def __action_load(self, env, mpc, params=[]):
mpc.load(params[0])
xbmcgui.Dialog().notification(
"MPD", self.__env.localized(Language.SONGS_ADDED), xbmcgui.NOTIFICATION_INFO, 5000)
# mpc.play()
return
def __action_findadd(self, env, mpc, params=[]):
mpc.findadd(*params)
xbmcgui.Dialog().notification(
"MPD", self.__env.localized(Language.SONGS_ADDED), xbmcgui.NOTIFICATION_INFO, 5000)
# mpc.play()
return
def __action_play(self, env, mpc, params=[]):
if params[0] >=0:
mpc.play(int(params[0]))
else:
mpc.play()
return
def __action_pause(self, env, mpc, params=[]):
mpc.pause()
return
def __action_stop(self, env, mpc, params=[]):
mpc.stop()
return
def __action_prev(self, env, mpc, params=[]):
mpc.previous()
return
def __action_next(self, env, mpc, params=[]):
mpc.next()
return
def __action_remove(self, env, mpc, params=[]):
mpc.delete(params[0])
return
def __action_clear(self, env, mpc, params=[]):
mpc.clear()
return
def __action_outputs(self, env, mpc, params=[]):
outputs = []
outputids = []
for output in mpc.outputs():
if output["outputenabled"] == "1":
enabled = " [enabled]"
else:
enabled = " [disabled]"
outputs.append(output["outputname"] + enabled)
outputids.append(output["outputid"])
ret = xbmcgui.Dialog().select("Toggle outputs", outputs, False)
if ret >= 0:
mpc.toggleoutput(outputids[ret])
# xbmcgui.Dialog().notification("MPD",
# self.__env.localized(Language.SONGS_ADDED),
# xbmcgui.NOTIFICATION_INFO,
# 2000)
return
class Item:
def global_contextmenu(self, env, pospl=-1):
return [(env.localized(Language.PLAY), "RunPlugin(" + NavUrl.build_url(env, Nav.ACTION_PLAY, [str(pospl)]) + ")"),
(env.localized(Language.PAUSE),
"RunPlugin(" + NavUrl.build_url(env, Nav.ACTION_PAUSE) + ")"),
(env.localized(Language.STOP),
"RunPlugin(" + NavUrl.build_url(env, Nav.ACTION_STOP) + ")"),
(env.localized(Language.PREVIOUS),
"RunPlugin(" + NavUrl.build_url(env, Nav.ACTION_PREV) + ")"),
(env.localized(Language.NEXT),
"RunPlugin(" + NavUrl.build_url(env, Nav.ACTION_NEXT) + ")"),
(env.localized(Language.CLEAR),
"RunPlugin(" + NavUrl.build_url(env, Nav.ACTION_CLEAR) + ")"),
(env.localized(Language.OUTPUTS), "RunPlugin(" + NavUrl.build_url(env, Nav.ACTION_OUTPUTS) + ")"), ]
class ItemRoot(Item):
def add(self, env, navid, name, param, icon="DefaultFolder.png"):
li = xbmcgui.ListItem(name, iconImage=icon)
li.addContextMenuItems(self.global_contextmenu(env), True)
url = NavUrl.build_url(env, navid, param)
xbmcplugin.addDirectoryItem(
handle=env.addon_handle(),
url=url,
listitem=li,
isFolder=True)
return
class ItemTag(Item):
def add(self, env, mpc, tag, val, what):
#t = [tag, val] + what + ["0:1"]
#print t
#mpc.find(*t)
if "albumartist" == tag:
self.__add_artist(env, val, what)
elif "album" == tag:
self.__add_album(env, val, what)
elif "genre" == tag:
self.__add_genre(env, val, what)
return
def __add_artist(self, env, artist, what):
li = xbmcgui.ListItem(artist, iconImage="DefaultMusicArtists.png")
li.setInfo("music", {#"genre": metadata.get("genre", env.localized(Language.UNKNOWN)),
#"year": metadata.get("date", None),
#"title": metadata.get("title", ""),
#"album": metadata.get("album", env.localized(Language.UNKNOWN)),
"artist": artist,
#"duration": metadata.get("time", 0),
#"tracknumber": metadata.get("track", None),
# "rating": "0", # TODO
# "playcount": 0, # TODO
# "lastplayed": "", # TODO
# "lyrics": "", # TODO
}
)
li.addContextMenuItems(
[(env.localized(Language.ADD), "RunPlugin(" + NavUrl.build_url(env,
Nav.ACTION_FINDADD, ["albumartist", artist] + what) + ")")]
+ self.global_contextmenu(env), True)
url = NavUrl.build_url(
env, Nav.NAV_LIST, ["album", "albumartist", artist] + what)
xbmcplugin.addDirectoryItem(
handle=env.addon_handle(),
url=url,
listitem=li,
isFolder=True)
return
def __add_album(self, env, album, what):
li = xbmcgui.ListItem(album, iconImage="DefaultMusicAlbums.png")
li.setInfo("music", {#"genre": metadata.get("genre", env.localized(Language.UNKNOWN)),
#"year": metadata.get("date", None),
#"title": metadata.get("title", ""),
"album": album,
#"artist": artist,
#"duration": metadata.get("time", 0),
#"tracknumber": metadata.get("track", None),
# "rating": "0", # TODO
# "playcount": 0, # TODO
# "lastplayed": "", # TODO
# "lyrics": "", # TODO
}
)
li.addContextMenuItems(
[(env.localized(Language.ADD), "RunPlugin(" + NavUrl.build_url(env,
Nav.ACTION_FINDADD, ["album", album] + what) + ")")]
+ self.global_contextmenu(env), True)
url = NavUrl.build_url(env, Nav.NAV_FIND, ["album", album] + what)
xbmcplugin.addDirectoryItem(
handle=env.addon_handle(),
url=url,
listitem=li,
isFolder=True)
return
def __add_genre(self, env, genre, what):
li = xbmcgui.ListItem(genre, iconImage="DefaultMusicGenres.png")
li.setInfo("music", {"genre": genre,
#"year": metadata.get("date", None),
#"title": metadata.get("title", ""),
#"album": album,
#"artist": artist,
#"duration": metadata.get("time", 0),
#"tracknumber": metadata.get("track", None),
# "rating": "0", # TODO
# "playcount": 0, # TODO
# "lastplayed": "", # TODO
# "lyrics": "", # TODO
}
)
li.addContextMenuItems(
[(env.localized(Language.ADD), "RunPlugin(" + NavUrl.build_url(env,
Nav.ACTION_FINDADD, ["genre", genre] + what) + ")")]
+ self.global_contextmenu(env), True)
url = NavUrl.build_url(
env, Nav.NAV_LIST, ["albumartist", "genre", genre] + what)
xbmcplugin.addDirectoryItem(
handle=env.addon_handle(),
url=url,
listitem=li,
isFolder=True)
return
class ItemFile(Item):
def add(self, env, metadata):
if "directory" in metadata:
self.__add_dir(env, metadata)
elif "playlist" in metadata:
self.__add_playlist(env, metadata)
elif "file" in metadata:
self.__add_song(env, metadata)
return
def __add_dir(self, env, metadata):
path = metadata["directory"]
name = path[path.rfind("/") + 1:]
li = xbmcgui.ListItem(name, iconImage="DefaultFolder.png")
li.addContextMenuItems(
[(env.localized(Language.ADD),
"RunPlugin(" + NavUrl.build_url(env, Nav.ACTION_ADD, [path]) + ")")]
+ self.global_contextmenu(env), True)
url = NavUrl.build_url(env, Nav.NAV_FILE, [path])
xbmcplugin.addDirectoryItem(
handle=env.addon_handle(),
url=url,
listitem=li,
isFolder=True)
return
def __add_playlist(self, env, metadata):
path = metadata["playlist"]
name = path[path.rfind("/") + 1:]
li = xbmcgui.ListItem(name, iconImage="DefaultMusicPlaylists.png")
li.addContextMenuItems(
[(env.localized(Language.ADD), "RunPlugin(" +
NavUrl.build_url(env, Nav.ACTION_LOAD, [path]) + ")")]
+ self.global_contextmenu(env), True)
url = NavUrl.build_url(env, Nav.NAV_PL, [path])
xbmcplugin.addDirectoryItem(
handle=env.addon_handle(),
url=url,
listitem=li,
isFolder=True)
return
def __add_song(self, env, metadata):
path = metadata["file"]
name = path[path.rfind("/") + 1:]
# If pos is given, this lists the current playlist and tracknumber
# is the position in the playlist instead of the album.
is_queue = "pos" in metadata
if is_queue:
pospl = int(metadata.get("pos", "-1"))
tracknumber = int(metadata.get("pos", "-1")) + 1
else:
pospl = -1
tracknumber = metadata.get("track", None)
li = xbmcgui.ListItem(name, iconImage="DefaultMusicSongs.png")
li.setInfo("music", {"genre": metadata.get("genre", env.localized(Language.UNKNOWN)),
"year": metadata.get("date", None),
"title": metadata.get("title", ""),
"album": metadata.get("album", env.localized(Language.UNKNOWN)),
"artist": metadata.get("artist", env.localized(Language.UNKNOWN)),
"duration": metadata.get("time", 0),
"tracknumber": tracknumber,
# "rating": "0", # TODO
# "playcount": 0, # TODO
# "lastplayed": "", # TODO
# "lyrics": "", # TODO
}
)
if is_queue:
li.addContextMenuItems(
[(env.localized(Language.REMOVE), "RunPlugin(" +
NavUrl.build_url(env, Nav.ACTION_REMOVE, [metadata.get("pos", "-1")]) + ")"), ]
+ self.global_contextmenu(env, pospl), True)
url = NavUrl.build_url(env, Nav.ACTION_PLAY, [str(pospl)])
else:
li.addContextMenuItems(
[(env.localized(Language.ADD), "RunPlugin(" +
NavUrl.build_url(env, Nav.ACTION_ADD, [path]) + ")"), ]
+ self.global_contextmenu(env), True)
url = NavUrl.build_url(env, Nav.ACTION_ADD, [path])
xbmcplugin.addDirectoryItem(
handle=env.addon_handle(), url=url, listitem=li)
return
| gpl-3.0 | -9,181,020,625,052,426,000 | 39.342268 | 163 | 0.525759 | false | 3.828214 | false | false | false |
Psycojoker/hackeragenda | hackeragenda/settings.py | 1 | 5897 | # Django settings for hackeragenda project.
import os
from collections import OrderedDict
PROJECT_PATH = os.path.abspath(os.path.split(__file__)[0])
SUBPROJECT_PATH = os.path.split(PROJECT_PATH)[0]
BASE_DIR = PROJECT_PATH # to avoid stupid warning from django 1.6
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', '[email protected]'),
)
ALLOWED_HOSTS = ['*']
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'db.sqlite',
}
}
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
TIME_ZONE = 'Europe/Brussels'
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
USE_I18N = True
USE_L10N = True
USE_TZ = False
MEDIA_ROOT = ''
MEDIA_URL = '/medias/'
STATIC_ROOT = SUBPROJECT_PATH + '/static_deploy/static/'
STATIC_URL = '/static/'
STATICFILES_DIRS = (
os.path.join(SUBPROJECT_PATH, "static"),
)
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
SECRET_KEY = 't)^bq6!v8!vj$+t+!4x1+uj100d73_8pt5d1(gh=py=lz7$^vm'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(SUBPROJECT_PATH, "templates"),],
# 'APP_DIRS': True,
'OPTIONS': {
'loaders': [
'hamlpy.template.loaders.HamlPyFilesystemLoader',
'hamlpy.template.loaders.HamlPyAppDirectoriesLoader',
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
],
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
PREDEFINED_FILTERS = OrderedDict()
PREDEFINED_FILTERS["default"] = {
"source": [
'afpyro',
'agenda_du_libre_be',
'belgian_blender_user_group',
'belgium_python_meetup',
'bhackspace',
'blender_brussels',
'brixel',
'bxlug',
'constantvzw',
'F_LAT',
'foam',
'hsbxl',
'incubhacker',
'jeudi_du_libre_mons',
'ko_lab',
'makilab',
'neutrinet',
'npbbxl',
'okfnbe',
'okno',
'opengarage',
'opengarage_meetings',
'openstreetmap_belgium',
'opentechschool',
'owaspbe',
'realize',
'source',
'syn2cat',
'urlab',
'voidwarranties',
'whitespace',
'wolfplex',
],
"exclude_source": [],
"tag": [],
"exclude_tag": ["meeting", "on_reservation"],
}
PREDEFINED_FILTERS["all"] = {
"source": [],
"exclude_source": [],
"tag": [],
"exclude_tag": [],
}
PREDEFINED_FILTERS["hackerspaces"] = {
"source": [
"brixel",
"bhackspace",
"hsbxl",
"incubhacker",
"opengarage",
"syn2cat",
"urlab",
"voidwarranties",
"whitespace",
"wolfplex",
"ko_lab"
],
"exclude_source": [],
"tag": [],
"exclude_tag": [],
}
PREDEFINED_FILTERS["*lab"] = {
"source": [],
"exclude_source": [],
"tag": ["fablab"],
"exclude_tag": [],
}
PREDEFINED_FILTERS["art"] = {
"source": [],
"exclude_source": [],
"tag": ["art"],
"exclude_tag": [],
}
PREDEFINED_FILTERS["code"] = {
"source": [],
"exclude_source": [],
"tag": ["code"],
"exclude_tag": [],
}
if DEBUG:
MIDDLEWARE += (
'debug_toolbar.middleware.DebugToolbarMiddleware',
# Needed for the admin interface
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
)
INTERNAL_IPS = ('127.0.0.1',)
ROOT_URLCONF = 'hackeragenda.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'hackeragenda.wsgi.application'
LEAFLET_CONFIG = {
'DEFAULT_CENTER': (50.6407351, 4.66696),
'DEFAULT_ZOOM': 7,
'MIN_ZOOM': 2,
}
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.admin',
'authentication',
'administration',
'events',
'taggit',
'gunicorn',
'leaflet',
)
AGENDA = "be"
if DEBUG:
INSTALLED_APPS += (
'debug_toolbar',
'django_pdb',
'django_extensions',
)
FIXTURE_DIRS = (
'fixtures',
)
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
SOUTH_MIGRATION_MODULES = {
'taggit': 'taggit.south_migrations',
}
LOGIN_REDIRECT_URL = '/administration/'
try:
from settings_local import *
except ImportError:
pass
| gpl-3.0 | -9,104,221,644,388,092,000 | 21.76834 | 72 | 0.575886 | false | 3.381307 | false | false | false |
lizardsystem/lizard-map | lizard_map/daterange.py | 1 | 2259 | """Handle the date range setting and remembering
"""
import datetime
import logging
from django.conf import settings
import pytz
# NOTE: this module is obsolete as date ranges are entirely handled in
# javascript and should be passed as request parameter
logger = logging.getLogger(__name__)
# Session data postfixed with '_3' as the meaning changed between versions.
SESSION_DT_RANGETYPE = 'dt_rangetype_3'
SESSION_DT_START = 'dt_start_3'
SESSION_DT_END = 'dt_end_3'
default_start_days = getattr(settings, 'DEFAULT_START_DAYS', -2)
default_end_days = getattr(settings, 'DEFAULT_END_DAYS', 0)
def default_start(now):
"""Return default start date when period is PERIOD_OTHER."""
return now + datetime.timedelta(days=default_start_days)
def default_end(now):
"""Return default end date when period is PERIOD_OTHER."""
return now + datetime.timedelta(days=default_end_days)
def current_period(request):
"""
Return the current period, either default or from session.
TODO: mix together with current_start_end_dates (but is has a lot
of impact)
"""
default_period = getattr(settings, 'DEFAULT_RANGE_TYPE', 'week_plus_one')
if request is None:
return default_period
else:
return request.session.get(SESSION_DT_RANGETYPE, default_period)
def current_start_end_dates(request, for_form=False, today=None,
retrieve_period_function=current_period):
"""Return the current start datetime and end datetime.
If for_form is True, this function returns the datetime's as a dictionary
so the client can pass that directly into a form class. If for_form is not
True, this functions returns them as a tuple.
Other parameter:
*today*
datetime to initialize the current datetime (for testing purposes)
*retrieve_period_function*
function to retrieve the period type (for testing purposes)
"""
today = datetime.datetime.now(tz=pytz.UTC)
session = request.session
dt_start = session.get(SESSION_DT_START, default_start(today))
dt_end = session.get(SESSION_DT_END, default_end(today))
if for_form:
return dict(dt_start=dt_start, dt_end=dt_end)
else:
return (dt_start, dt_end)
| lgpl-3.0 | -1,085,294,113,683,170,700 | 29.527027 | 78 | 0.698539 | false | 3.815878 | false | false | false |
ajaygarg84/sugar | src/jarabe/frame/activitiestray.py | 1 | 30803 | # Copyright (C) 2006-2007 Red Hat, Inc.
# Copyright (C) 2008 One Laptop Per Child
# Copyright (C) 2010 Collabora Ltd. <http://www.collabora.co.uk/>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
import logging
from gettext import gettext as _
import tempfile
import os
from gi.repository import GObject
from gi.repository import GConf
from gi.repository import Gio
from gi.repository import GLib
from gi.repository import Gtk
from sugar3.graphics import style
from sugar3.graphics.tray import HTray
from sugar3.graphics.xocolor import XoColor
from sugar3.graphics.radiotoolbutton import RadioToolButton
from sugar3.graphics.toolbutton import ToolButton
from sugar3.graphics.icon import Icon, get_icon_file_name
from sugar3.graphics.palette import Palette
from sugar3.graphics.menuitem import MenuItem
from sugar3.graphics.palettemenu import PaletteMenuBox
from sugar3.graphics.palettemenu import PaletteMenuItem
from sugar3.graphics.palettemenu import PaletteMenuItemSeparator
from sugar3.datastore import datastore
from sugar3 import mime
from sugar3 import env
from jarabe.model import shell
from jarabe.model import invites
from jarabe.model import bundleregistry
from jarabe.model import filetransfer
from jarabe.view.palettes import JournalPalette, CurrentActivityPalette
from jarabe.view.pulsingicon import PulsingIcon
from jarabe.frame.frameinvoker import FrameWidgetInvoker
from jarabe.frame.notification import NotificationIcon
import jarabe.frame
class ActivityButton(RadioToolButton):
def __init__(self, home_activity, group):
RadioToolButton.__init__(self, group=group)
self.set_palette_invoker(FrameWidgetInvoker(self))
self.palette_invoker.cache_palette = False
self._home_activity = home_activity
self._notify_launch_hid = None
self._icon = PulsingIcon()
self._icon.props.base_color = home_activity.get_icon_color()
self._icon.props.pulse_color = \
XoColor('%s,%s' % (style.COLOR_BUTTON_GREY.get_svg(),
style.COLOR_TOOLBAR_GREY.get_svg()))
if home_activity.get_icon_path():
self._icon.props.file = home_activity.get_icon_path()
else:
self._icon.props.icon_name = 'image-missing'
self.set_icon_widget(self._icon)
self._icon.show()
if home_activity.props.launch_status == shell.Activity.LAUNCHING:
self._icon.props.pulsing = True
self._notify_launch_hid = home_activity.connect( \
'notify::launch-status', self.__notify_launch_status_cb)
elif home_activity.props.launch_status == shell.Activity.LAUNCH_FAILED:
self._on_failed_launch()
def create_palette(self):
if self._home_activity.is_journal():
palette = JournalPalette(self._home_activity)
else:
palette = CurrentActivityPalette(self._home_activity)
palette.connect('done', self.__palette_item_selected_cb)
palette.set_group_id('frame')
self.set_palette(palette)
def __palette_item_selected_cb(self, widget):
frame = jarabe.frame.get_view()
frame.hide()
def _on_failed_launch(self):
# TODO http://bugs.sugarlabs.org/ticket/2007
pass
def __notify_launch_status_cb(self, home_activity, pspec):
home_activity.disconnect(self._notify_launch_hid)
self._notify_launch_hid = None
if home_activity.props.launch_status == shell.Activity.LAUNCH_FAILED:
self._on_failed_launch()
else:
self._icon.props.pulsing = False
class InviteButton(ToolButton):
"""Invite to shared activity"""
__gsignals__ = {
'remove-invite': (GObject.SignalFlags.RUN_FIRST, None, ([])),
}
def __init__(self, invite):
ToolButton.__init__(self)
self._invite = invite
self.connect('clicked', self.__clicked_cb)
self.connect('destroy', self.__destroy_cb)
bundle_registry = bundleregistry.get_registry()
bundle = bundle_registry.get_bundle(invite.get_bundle_id())
self._icon = Icon()
self._icon.props.xo_color = invite.get_color()
if bundle is not None:
self._icon.props.file = bundle.get_icon()
else:
self._icon.props.icon_name = 'image-missing'
self.set_icon_widget(self._icon)
self._icon.show()
palette = InvitePalette(invite)
palette.props.invoker = FrameWidgetInvoker(self)
palette.set_group_id('frame')
palette.connect('remove-invite', self.__remove_invite_cb)
self.set_palette(palette)
self._notif_icon = NotificationIcon()
self._notif_icon.connect('button-release-event',
self.__button_release_event_cb)
self._notif_icon.props.xo_color = invite.get_color()
if bundle is not None:
self._notif_icon.props.icon_filename = bundle.get_icon()
else:
self._notif_icon.props.icon_name = 'image-missing'
frame = jarabe.frame.get_view()
frame.add_notification(self._notif_icon, Gtk.CornerType.TOP_LEFT)
def __button_release_event_cb(self, icon, event):
if self._notif_icon is not None:
frame = jarabe.frame.get_view()
frame.remove_notification(self._notif_icon)
self._notif_icon = None
self._invite.join()
self.emit('remove-invite')
def __clicked_cb(self, button):
self.palette.popup(immediate=True, state=Palette.SECONDARY)
def __remove_invite_cb(self, palette):
self.emit('remove-invite')
def __destroy_cb(self, button):
if self._notif_icon is not None:
frame = jarabe.frame.get_view()
frame.remove_notification(self._notif_icon)
self._notif_icon = None
class InvitePalette(Palette):
"""Palette for frame or notification icon for invites."""
__gsignals__ = {
'remove-invite': (GObject.SignalFlags.RUN_FIRST, None, ([])),
}
def __init__(self, invite):
Palette.__init__(self, '')
self._invite = invite
menu_item = MenuItem(_('Join'), icon_name='dialog-ok')
menu_item.connect('activate', self.__join_activate_cb)
self.menu.append(menu_item)
menu_item.show()
menu_item = MenuItem(_('Decline'), icon_name='dialog-cancel')
menu_item.connect('activate', self.__decline_activate_cb)
self.menu.append(menu_item)
menu_item.show()
bundle_id = invite.get_bundle_id()
registry = bundleregistry.get_registry()
self._bundle = registry.get_bundle(bundle_id)
if self._bundle:
name = self._bundle.get_name()
else:
name = bundle_id
self.set_primary_text(GLib.markup_escape_text(name))
def __join_activate_cb(self, menu_item):
self._invite.join()
self.emit('remove-invite')
def __decline_activate_cb(self, menu_item):
self.emit('remove-invite')
class ActivitiesTray(HTray):
def __init__(self):
HTray.__init__(self)
self._buttons = {}
self._invite_to_item = {}
self._freeze_button_clicks = False
self._home_model = shell.get_model()
self._home_model.connect('activity-added', self.__activity_added_cb)
self._home_model.connect('activity-removed',
self.__activity_removed_cb)
self._home_model.connect('active-activity-changed',
self.__activity_changed_cb)
self._home_model.connect('tabbing-activity-changed',
self.__tabbing_activity_changed_cb)
self._invites = invites.get_instance()
for invite in self._invites:
self._add_invite(invite)
self._invites.connect('invite-added', self.__invite_added_cb)
self._invites.connect('invite-removed', self.__invite_removed_cb)
filetransfer.new_file_transfer.connect(self.__new_file_transfer_cb)
def __activity_added_cb(self, home_model, home_activity):
logging.debug('__activity_added_cb: %r', home_activity)
if self.get_children():
group = self.get_children()[0]
else:
group = None
button = ActivityButton(home_activity, group)
self.add_item(button)
self._buttons[home_activity] = button
button.connect('clicked', self.__activity_clicked_cb, home_activity)
button.show()
def __activity_removed_cb(self, home_model, home_activity):
logging.debug('__activity_removed_cb: %r', home_activity)
button = self._buttons[home_activity]
self.remove_item(button)
del self._buttons[home_activity]
def _activate_activity(self, home_activity):
button = self._buttons[home_activity]
self._freeze_button_clicks = True
button.props.active = True
self._freeze_button_clicks = False
self.scroll_to_item(button)
# Redraw immediately.
# The widget may not be realized yet, and then there is no window.
x11_window = self.get_window()
if x11_window:
x11_window.process_updates(True)
def __activity_changed_cb(self, home_model, home_activity):
logging.debug('__activity_changed_cb: %r', home_activity)
# Only select the new activity, if there is no tabbing activity.
if home_model.get_tabbing_activity() is None:
self._activate_activity(home_activity)
def __tabbing_activity_changed_cb(self, home_model, home_activity):
logging.debug('__tabbing_activity_changed_cb: %r', home_activity)
# If the tabbing_activity is set to None just do nothing.
# The active activity will be updated a bit later (and it will
# be set to the activity that is currently selected).
if home_activity is None:
return
self._activate_activity(home_activity)
def __activity_clicked_cb(self, button, home_activity):
if not self._freeze_button_clicks and button.props.active:
logging.debug('ActivitiesTray.__activity_clicked_cb')
window = home_activity.get_window()
if window:
window.activate(Gtk.get_current_event_time())
frame = jarabe.frame.get_view()
frame.hide()
def __remove_invite_cb(self, icon, invite):
self._invites.remove_invite(invite)
def __invite_added_cb(self, invites_model, invite):
self._add_invite(invite)
def __invite_removed_cb(self, invites_model, invite):
self._remove_invite(invite)
def _add_invite(self, invite):
"""Add an invite"""
item = InviteButton(invite)
item.connect('remove-invite', self.__remove_invite_cb, invite)
self.add_item(item)
item.show()
self._invite_to_item[invite] = item
def _remove_invite(self, invite):
self.remove_item(self._invite_to_item[invite])
self._invite_to_item[invite].destroy()
del self._invite_to_item[invite]
def __new_file_transfer_cb(self, **kwargs):
file_transfer = kwargs['file_transfer']
logging.debug('__new_file_transfer_cb %r', file_transfer)
if isinstance(file_transfer, filetransfer.IncomingFileTransfer):
button = IncomingTransferButton(file_transfer)
elif isinstance(file_transfer, filetransfer.OutgoingFileTransfer):
button = OutgoingTransferButton(file_transfer)
self.add_item(button)
button.show()
class BaseTransferButton(ToolButton):
"""Button with a notification attached
"""
def __init__(self, file_transfer):
ToolButton.__init__(self)
self.file_transfer = file_transfer
file_transfer.connect('notify::state', self.__notify_state_cb)
icon = Icon()
self.props.icon_widget = icon
icon.show()
self.notif_icon = NotificationIcon()
self.notif_icon.connect('button-release-event',
self.__button_release_event_cb)
self.connect('clicked', self.__button_clicked_cb)
def __button_release_event_cb(self, icon, event):
if self.notif_icon is not None:
frame = jarabe.frame.get_view()
frame.remove_notification(self.notif_icon)
self.notif_icon = None
def __button_clicked_cb(self, button):
self.palette.popup(immediate=True, state=Palette.SECONDARY)
def remove(self):
frame = jarabe.frame.get_view()
frame.remove_notification(self.notif_icon)
self.props.parent.remove(self)
def __notify_state_cb(self, file_transfer, pspec):
logging.debug('_update state: %r %r', file_transfer.props.state,
file_transfer.reason_last_change)
if file_transfer.props.state == filetransfer.FT_STATE_CANCELLED:
if file_transfer.reason_last_change == \
filetransfer.FT_REASON_LOCAL_STOPPED:
self.remove()
class IncomingTransferButton(BaseTransferButton):
"""UI element representing an ongoing incoming file transfer
"""
def __init__(self, file_transfer):
BaseTransferButton.__init__(self, file_transfer)
self._ds_object = datastore.create()
file_transfer.connect('notify::state', self.__notify_state_cb)
file_transfer.connect('notify::transferred-bytes',
self.__notify_transferred_bytes_cb)
icons = Gio.content_type_get_icon(file_transfer.mime_type).props.names
icons.append('application-octet-stream')
for icon_name in icons:
icon_name = 'transfer-from-%s' % icon_name
file_name = get_icon_file_name(icon_name)
if file_name is not None:
self.props.icon_widget.props.icon_name = icon_name
self.notif_icon.props.icon_name = icon_name
break
icon_color = file_transfer.buddy.props.color
self.props.icon_widget.props.xo_color = icon_color
self.notif_icon.props.xo_color = icon_color
frame = jarabe.frame.get_view()
frame.add_notification(self.notif_icon,
Gtk.CornerType.TOP_LEFT)
def create_palette(self):
palette = IncomingTransferPalette(self.file_transfer)
palette.connect('dismiss-clicked', self.__dismiss_clicked_cb)
palette.props.invoker = FrameWidgetInvoker(self)
palette.set_group_id('frame')
return palette
def __notify_state_cb(self, file_transfer, pspec):
if file_transfer.props.state == filetransfer.FT_STATE_OPEN:
logging.debug('__notify_state_cb OPEN')
self._ds_object.metadata['title'] = file_transfer.title
self._ds_object.metadata['description'] = file_transfer.description
self._ds_object.metadata['progress'] = '0'
self._ds_object.metadata['keep'] = '0'
self._ds_object.metadata['buddies'] = ''
self._ds_object.metadata['preview'] = ''
self._ds_object.metadata['icon-color'] = \
file_transfer.buddy.props.color.to_string()
self._ds_object.metadata['mime_type'] = file_transfer.mime_type
elif file_transfer.props.state == filetransfer.FT_STATE_COMPLETED:
logging.debug('__notify_state_cb COMPLETED')
self._ds_object.metadata['progress'] = '100'
self._ds_object.file_path = file_transfer.destination_path
datastore.write(self._ds_object, transfer_ownership=True,
reply_handler=self.__reply_handler_cb,
error_handler=self.__error_handler_cb)
elif file_transfer.props.state == filetransfer.FT_STATE_CANCELLED:
logging.debug('__notify_state_cb CANCELLED')
object_id = self._ds_object.object_id
if object_id is not None:
self._ds_object.destroy()
datastore.delete(object_id)
self._ds_object = None
def __notify_transferred_bytes_cb(self, file_transfer, pspec):
progress = file_transfer.props.transferred_bytes / \
file_transfer.file_size
self._ds_object.metadata['progress'] = str(progress * 100)
datastore.write(self._ds_object, update_mtime=False)
def __reply_handler_cb(self):
logging.debug('__reply_handler_cb %r', self._ds_object.object_id)
def __error_handler_cb(self, error):
logging.debug('__error_handler_cb %r %s', self._ds_object.object_id,
error)
def __dismiss_clicked_cb(self, palette):
self.remove()
class OutgoingTransferButton(BaseTransferButton):
"""UI element representing an ongoing outgoing file transfer
"""
def __init__(self, file_transfer):
BaseTransferButton.__init__(self, file_transfer)
icons = Gio.content_type_get_icon(file_transfer.mime_type).props.names
icons.append('application-octet-stream')
for icon_name in icons:
icon_name = 'transfer-to-%s' % icon_name
file_name = get_icon_file_name(icon_name)
if file_name is not None:
self.props.icon_widget.props.icon_name = icon_name
self.notif_icon.props.icon_name = icon_name
break
client = GConf.Client.get_default()
icon_color = XoColor(client.get_string('/desktop/sugar/user/color'))
self.props.icon_widget.props.xo_color = icon_color
self.notif_icon.props.xo_color = icon_color
frame = jarabe.frame.get_view()
frame.add_notification(self.notif_icon,
Gtk.CornerType.TOP_LEFT)
def create_palette(self):
palette = OutgoingTransferPalette(self.file_transfer)
palette.connect('dismiss-clicked', self.__dismiss_clicked_cb)
palette.props.invoker = FrameWidgetInvoker(self)
palette.set_group_id('frame')
return palette
def __dismiss_clicked_cb(self, palette):
self.remove()
class BaseTransferPalette(Palette):
"""Base palette class for frame or notification icon for file transfers
"""
__gtype_name__ = 'SugarBaseTransferPalette'
__gsignals__ = {
'dismiss-clicked': (GObject.SignalFlags.RUN_FIRST, None, ([])),
}
def __init__(self, file_transfer):
Palette.__init__(self, GLib.markup_escape_text(file_transfer.title))
self.file_transfer = file_transfer
self.progress_bar = None
self.progress_label = None
self._notify_transferred_bytes_handler = None
self.connect('popup', self.__popup_cb)
self.connect('popdown', self.__popdown_cb)
def __popup_cb(self, palette):
self.update_progress()
self._notify_transferred_bytes_handler = \
self.file_transfer.connect('notify::transferred_bytes',
self.__notify_transferred_bytes_cb)
def __popdown_cb(self, palette):
if self._notify_transferred_bytes_handler is not None:
self.file_transfer.disconnect(
self._notify_transferred_bytes_handler)
self._notify_transferred_bytes_handler = None
def __notify_transferred_bytes_cb(self, file_transfer, pspec):
self.update_progress()
def _format_size(self, size):
if size < 1024:
return _('%dB') % size
elif size < 1048576:
return _('%dKB') % (size / 1024)
else:
return _('%dMB') % (size / 1048576)
def update_progress(self):
logging.debug('update_progress: %r',
self.file_transfer.props.transferred_bytes)
if self.progress_bar is None:
return
self.progress_bar.props.fraction = \
self.file_transfer.props.transferred_bytes / \
float(self.file_transfer.file_size)
logging.debug('update_progress: %r', self.progress_bar.props.fraction)
transferred = self._format_size(
self.file_transfer.props.transferred_bytes)
total = self._format_size(self.file_transfer.file_size)
# TRANS: file transfer, bytes transferred, e.g. 128 of 1024
self.progress_label.props.label = _('%s of %s') % (transferred, total)
class IncomingTransferPalette(BaseTransferPalette):
"""Palette for frame or notification icon for incoming file transfers
"""
__gtype_name__ = 'SugarIncomingTransferPalette'
def __init__(self, file_transfer):
BaseTransferPalette.__init__(self, file_transfer)
self.file_transfer.connect('notify::state', self.__notify_state_cb)
nick = str(self.file_transfer.buddy.props.nick)
label = GLib.markup_escape_text(_('Transfer from %s') % (nick,))
self.props.secondary_text = label
self._update()
def __notify_state_cb(self, file_transfer, pspec):
self._update()
def _update(self):
box = PaletteMenuBox()
self.set_content(box)
box.show()
logging.debug('_update state: %r', self.file_transfer.props.state)
if self.file_transfer.props.state == filetransfer.FT_STATE_PENDING:
menu_item = PaletteMenuItem(_('Accept'))
icon = Icon(icon_name='dialog-ok', icon_size=Gtk.IconSize.MENU)
menu_item.set_image(icon)
icon.show()
menu_item.connect('activate', self.__accept_activate_cb)
box.append_item(menu_item)
menu_item.show()
menu_item = PaletteMenuItem(_('Decline'))
icon = Icon(icon_name='dialog-cancel', icon_size=Gtk.IconSize.MENU)
menu_item.set_image(icon)
icon.show()
menu_item.connect('activate', self.__decline_activate_cb)
box.append_item(menu_item)
menu_item.show()
separator = PaletteMenuItemSeparator()
box.append_item(separator)
separator.show()
inner_box = Gtk.VBox()
inner_box.set_spacing(style.DEFAULT_PADDING)
box.append_item(inner_box, vertical_padding=0)
inner_box.show()
if self.file_transfer.description:
label = Gtk.Label(label=self.file_transfer.description)
inner_box.add(label)
label.show()
mime_type = self.file_transfer.mime_type
type_description = mime.get_mime_description(mime_type)
size = self._format_size(self.file_transfer.file_size)
label = Gtk.Label(label='%s (%s)' % (size, type_description))
inner_box.add(label)
label.show()
elif self.file_transfer.props.state in \
[filetransfer.FT_STATE_ACCEPTED, filetransfer.FT_STATE_OPEN]:
menu_item = PaletteMenuItem(_('Cancel'))
icon = Icon(icon_name='dialog-cancel', icon_size=Gtk.IconSize.MENU)
menu_item.set_image(icon)
icon.show()
menu_item.connect('activate', self.__cancel_activate_cb)
box.append_item(menu_item)
menu_item.show()
separator = PaletteMenuItemSeparator()
box.append_item(separator)
separator.show()
inner_box = Gtk.VBox()
inner_box.set_spacing(style.DEFAULT_PADDING)
box.append_item(inner_box, vertical_padding=0)
inner_box.show()
self.progress_bar = Gtk.ProgressBar()
inner_box.add(self.progress_bar)
self.progress_bar.show()
self.progress_label = Gtk.Label(label='')
inner_box.add(self.progress_label)
self.progress_label.show()
self.update_progress()
elif self.file_transfer.props.state == filetransfer.FT_STATE_COMPLETED:
menu_item = PaletteMenuItem(_('Dismiss'))
icon = Icon(icon_name='dialog-cancel', icon_size=Gtk.IconSize.MENU)
menu_item.set_image(icon)
icon.show()
menu_item.connect('activate', self.__dismiss_activate_cb)
box.append_item(menu_item)
menu_item.show()
self.update_progress()
elif self.file_transfer.props.state == filetransfer.FT_STATE_CANCELLED:
if self.file_transfer.reason_last_change == \
filetransfer.FT_REASON_REMOTE_STOPPED:
menu_item = PaletteMenuItem(_('Dismiss'))
icon = Icon(icon_name='dialog-cancel',
icon_size=Gtk.IconSize.MENU)
menu_item.set_image(icon)
icon.show()
menu_item.connect('activate', self.__dismiss_activate_cb)
box.append_item(menu_item)
menu_item.show()
inner_box = Gtk.VBox()
inner_box.set_spacing(style.DEFAULT_PADDING)
box.append_item(inner_box, vertical_padding=0)
inner_box.show()
text = _('The other participant canceled the file transfer')
label = Gtk.Label(label=text)
inner_box.add(label)
label.show()
def __accept_activate_cb(self, menu_item):
#TODO: figure out the best place to get rid of that temp file
extension = mime.get_primary_extension(self.file_transfer.mime_type)
if extension is None:
extension = '.bin'
fd, file_path = tempfile.mkstemp(suffix=extension,
prefix=self._sanitize(self.file_transfer.title),
dir=os.path.join(env.get_profile_path(), 'data'))
os.close(fd)
os.unlink(file_path)
self.file_transfer.accept(file_path)
def _sanitize(self, file_name):
file_name = file_name.replace('/', '_')
file_name = file_name.replace('.', '_')
file_name = file_name.replace('?', '_')
return file_name
def __decline_activate_cb(self, menu_item):
self.file_transfer.cancel()
def __cancel_activate_cb(self, menu_item):
self.file_transfer.cancel()
def __dismiss_activate_cb(self, menu_item):
self.emit('dismiss-clicked')
class OutgoingTransferPalette(BaseTransferPalette):
"""Palette for frame or notification icon for outgoing file transfers
"""
__gtype_name__ = 'SugarOutgoingTransferPalette'
def __init__(self, file_transfer):
BaseTransferPalette.__init__(self, file_transfer)
self.progress_bar = None
self.progress_label = None
self.file_transfer.connect('notify::state', self.__notify_state_cb)
nick = str(file_transfer.buddy.props.nick)
label = GLib.markup_escape_text(_('Transfer to %s') % (nick,))
self.props.secondary_text = label
self._update()
def __notify_state_cb(self, file_transfer, pspec):
self._update()
def _update(self):
new_state = self.file_transfer.props.state
logging.debug('_update state: %r', new_state)
box = PaletteMenuBox()
self.set_content(box)
box.show()
if new_state == filetransfer.FT_STATE_PENDING:
menu_item = PaletteMenuItem(_('Cancel'))
icon = Icon(icon_name='dialog-cancel', icon_size=Gtk.IconSize.MENU)
menu_item.set_image(icon)
icon.show()
menu_item.connect('activate', self.__cancel_activate_cb)
box.append_item(menu_item)
menu_item.show()
separator = PaletteMenuItemSeparator()
box.append_item(separator)
separator.show()
inner_box = Gtk.VBox()
inner_box.set_spacing(style.DEFAULT_PADDING)
box.append_item(inner_box, vertical_padding=0)
inner_box.show()
if self.file_transfer.description:
label = Gtk.Label(label=self.file_transfer.description)
inner_box.add(label)
label.show()
mime_type = self.file_transfer.mime_type
type_description = mime.get_mime_description(mime_type)
size = self._format_size(self.file_transfer.file_size)
label = Gtk.Label(label='%s (%s)' % (size, type_description))
inner_box.add(label)
label.show()
elif new_state in [filetransfer.FT_STATE_ACCEPTED,
filetransfer.FT_STATE_OPEN]:
menu_item = PaletteMenuItem(_('Cancel'))
icon = Icon(icon_name='dialog-cancel', icon_size=Gtk.IconSize.MENU)
menu_item.set_image(icon)
icon.show()
menu_item.connect('activate', self.__cancel_activate_cb)
box.append_item(menu_item)
menu_item.show()
separator = PaletteMenuItemSeparator()
box.append_item(separator)
separator.show()
inner_box = Gtk.VBox()
inner_box.set_spacing(style.DEFAULT_PADDING)
box.append_item(inner_box, vertical_padding=0)
inner_box.show()
self.progress_bar = Gtk.ProgressBar()
inner_box.add(self.progress_bar)
self.progress_bar.show()
self.progress_label = Gtk.Label(label='')
inner_box.add(self.progress_label)
self.progress_label.show()
self.update_progress()
elif new_state in [filetransfer.FT_STATE_COMPLETED,
filetransfer.FT_STATE_CANCELLED]:
menu_item = PaletteMenuItem(_('Dismiss'))
icon = Icon(icon_name='dialog-cancel', icon_size=Gtk.IconSize.MENU)
menu_item.set_image(icon)
icon.show()
menu_item.connect('activate', self.__dismiss_activate_cb)
box.append_item(menu_item)
menu_item.show()
self.update_progress()
def __cancel_activate_cb(self, menu_item):
self.file_transfer.cancel()
def __dismiss_activate_cb(self, menu_item):
self.emit('dismiss-clicked')
| gpl-2.0 | -8,059,462,500,647,446,000 | 36.610501 | 79 | 0.610103 | false | 3.875079 | false | false | false |
0xbc/pyvex | setup.py | 1 | 5029 | # pylint: disable=no-name-in-module,import-error
import os
import urllib2
import subprocess
import sys
import shutil
import glob
import tarfile
import multiprocessing
import platform
try:
from setuptools import setup
from setuptools import find_packages
packages = find_packages()
except ImportError:
from distutils.core import setup
packages = [x.strip('./').replace('/','.') for x in os.popen('find -name "__init__.py" | xargs -n1 dirname').read().strip().split('\n')]
from distutils.util import get_platform
from distutils.errors import LibError
from distutils.command.build import build as _build
if sys.platform in ('win32', 'cygwin'):
LIBRARY_FILE = 'pyvex.dll'
STATIC_LIBRARY_FILE = 'pyvex.lib'
elif sys.platform == 'darwin':
LIBRARY_FILE = "libpyvex.dylib"
STATIC_LIBRARY_FILE = 'libpyvex.a'
else:
LIBRARY_FILE = "libpyvex.so"
STATIC_LIBRARY_FILE = 'libpyvex.a'
VEX_LIB_NAME = "vex" # can also be vex-amd64-linux
VEX_PATH = os.path.join('..', 'vex')
if not os.path.exists(VEX_PATH):
VEX_URL = 'https://github.com/angr/vex/archive/master.tar.gz'
with open('master.tar.gz', 'wb') as v:
v.write(urllib2.urlopen(VEX_URL).read())
with tarfile.open('master.tar.gz') as tar:
tar.extractall()
VEX_PATH='vex-master'
def _build_vex():
e = os.environ.copy()
e['MULTIARCH'] = '1'
e['DEBUG'] = '1'
cmd1 = ['nmake', '/f', 'Makefile-msvc', 'all']
cmd2 = ['make', '-f', 'Makefile-gcc', '-j', str(multiprocessing.cpu_count()), 'all']
for cmd in (cmd1, cmd2):
try:
if subprocess.call(cmd, cwd=VEX_PATH, env=e) == 0:
break
except OSError:
continue
else:
raise LibError("Unable to build libVEX.")
def _build_pyvex():
e = os.environ.copy()
e['VEX_LIB_PATH'] = os.path.join('..', VEX_PATH)
e['VEX_INCLUDE_PATH'] = os.path.join('..', VEX_PATH, 'pub')
e['VEX_LIB_FILE'] = os.path.join('..', VEX_PATH, 'libvex.lib')
cmd1 = ['nmake', '/f', 'Makefile-msvc']
cmd2 = ['make', '-j', str(multiprocessing.cpu_count())]
for cmd in (cmd1, cmd2):
try:
if subprocess.call(cmd, cwd='pyvex_c', env=e) == 0:
break
except OSError as err:
continue
else:
raise LibError("Unable to build libpyvex.")
def _shuffle_files():
shutil.rmtree('pyvex/lib', ignore_errors=True)
shutil.rmtree('pyvex/include', ignore_errors=True)
os.mkdir('pyvex/lib')
os.mkdir('pyvex/include')
shutil.copy(os.path.join('pyvex_c', LIBRARY_FILE), 'pyvex/lib')
shutil.copy(os.path.join('pyvex_c', STATIC_LIBRARY_FILE), 'pyvex/lib')
shutil.copy('pyvex_c/pyvex.h', 'pyvex/include')
for f in glob.glob(os.path.join(VEX_PATH, 'pub', '*')):
shutil.copy(f, 'pyvex/include')
def _build_ffi():
import make_ffi
try:
make_ffi.doit(os.path.join(VEX_PATH,'pub'))
except Exception as e:
print repr(e)
raise
class build(_build):
def run(self):
self.execute(_build_vex, (), msg="Building libVEX")
self.execute(_build_pyvex, (), msg="Building libpyvex")
self.execute(_shuffle_files, (), msg="Copying libraries and headers")
self.execute(_build_ffi, (), msg="Creating CFFI defs file")
_build.run(self)
cmdclass = { 'build': build }
try:
from setuptools.command.develop import develop as _develop
from setuptools.command.bdist_egg import bdist_egg as _bdist_egg
class develop(_develop):
def run(self):
self.execute(_build_vex, (), msg="Building libVEX")
self.execute(_build_pyvex, (), msg="Building libpyvex")
self.execute(_shuffle_files, (), msg="Copying libraries and headers")
self.execute(_build_ffi, (), msg="Creating CFFI defs file")
_develop.run(self)
cmdclass['develop'] = develop
class bdist_egg(_bdist_egg):
def run(self):
self.run_command('build')
_bdist_egg.run(self)
cmdclass['bdist_egg'] = bdist_egg
except ImportError:
print "Proper 'develop' support unavailable."
if 'bdist_wheel' in sys.argv and '--plat-name' not in sys.argv:
sys.argv.append('--plat-name')
name = get_platform()
if 'linux' in name:
# linux_* platform tags are disallowed because the python ecosystem is fubar
# linux builds should be built in the centos 5 vm for maximum compatibility
sys.argv.append('manylinux1_' + platform.machine())
else:
# https://www.python.org/dev/peps/pep-0425/
sys.argv.append(name.replace('.', '_').replace('-', '_'))
setup(
name="pyvex", version='6.7.3.26', description="A Python interface to libVEX and VEX IR.",
packages=['pyvex', 'pyvex.lift', 'pyvex.lift.util'],
cmdclass=cmdclass,
install_requires=[ 'pycparser', 'cffi>=1.0.3', 'archinfo' ],
setup_requires=[ 'pycparser', 'cffi>=1.0.3' ],
include_package_data=True,
package_data={
'pyvex': ['lib/*', 'include/*']
}
)
| bsd-2-clause | 5,826,168,124,604,355,000 | 32.751678 | 140 | 0.616624 | false | 3.302035 | false | false | false |
eonpatapon/contrail-controller | src/config/fabric-ansible/job_manager/job_messages.py | 1 | 9479 | class MsgBundle(object):
JOB_TEMPLATE_MISSING = 1,
JOB_EXECUTION_ID_MISSING = 2,
JOB_SUMMARY_MESSAGE_HDR = 3,
JOB_RESULT_STATUS_NONE = 4,
JOB_MULTI_DEVICE_FAILED_MESSAGE_HDR = 5,
JOB_SINGLE_DEVICE_FAILED_MESSAGE_HDR = 6,
PLAYBOOK_RESULTS_MESSAGE = 7,
PLAYBOOK_EXIT_WITH_ERROR = 8,
PLAYBOOK_RETURN_WITH_ERROR = 9,
NO_PLAYBOOK_INPUT_DATA = 10,
SANDESH_INITIALIZATION_TIMEOUT_ERROR = 11,
INPUT_SCHEMA_INPUT_NOT_FOUND = 12,
DEVICE_JSON_NOT_FOUND = 13,
NO_DEVICE_DATA_FOUND = 14,
NO_CREDENTIALS_FOUND = 15,
INVALID_SCHEMA = 16,
SEND_JOB_LOG_ERROR = 17,
SEND_JOB_EXC_UVE_ERROR = 18,
PLAYBOOK_INPUT_PARSING_ERROR = 19,
PLAYBOOK_EXECUTE_ERROR = 20,
CREATE_JOB_SUMMARY_ERROR = 21,
DEVICE_VENDOR_FAMILY_MISSING = 22,
READ_JOB_TEMPLATE_ERROR = 23,
GET_PLAYBOOK_INFO_ERROR = 24,
PLAYBOOK_NOT_FOUND = 25,
PLAYBOOK_INFO_DEVICE_MISMATCH = 26,
RUN_PLAYBOOK_PROCESS_ERROR = 27,
RUN_PLAYBOOK_ERROR = 28,
SEND_PROUTER_OBJECT_LOG_ERROR = 29,
CLOSE_SANDESH_EXCEPTION = 30,
RUN_PLAYBOOK_PROCESS_TIMEOUT = 31,
PLAYBOOK_EXECUTION_COMPLETE = 32,
START_JOB_MESSAGE = 33,
VNC_INITIALIZATION_ERROR = 34,
JOB_ERROR = 35,
JOB_EXECUTION_COMPLETE = 36,
START_EXE_PB_MSG = 37,
STOP_EXE_PB_MSG = 38,
JOB_EXC_REC_HDR = 39,
EXC_JOB_ERR_HDR = 40,
PLAYBOOK_STATUS_FAILED = 41,
PLAYBOOK_OUTPUT_MISSING = 42,
EMPTY_DEVICE_LIST = 43,
PRODUCT_NAME_MISSING = 44
_msgs = {
'en': {
JOB_TEMPLATE_MISSING: 'job_template_id is missing '
'in the job input',
JOB_EXECUTION_ID_MISSING: 'job_execution_id is missing'
' in the job input',
JOB_SUMMARY_MESSAGE_HDR: 'Job summary: ',
JOB_RESULT_STATUS_NONE: 'Error in getting the '
'job completion '
'status after job execution. \n',
JOB_MULTI_DEVICE_FAILED_MESSAGE_HDR: 'Job failed with '
'for devices: ',
JOB_SINGLE_DEVICE_FAILED_MESSAGE_HDR: 'Job failed. \n',
PLAYBOOK_RESULTS_MESSAGE: 'Detailed job results: \n',
PLAYBOOK_EXIT_WITH_ERROR: 'Playbook "{playbook_uri}" exited'
' with error.',
PLAYBOOK_RETURN_WITH_ERROR: 'Playbook returned '
'with error',
PLAYBOOK_STATUS_FAILED: 'Playbook completed with status Failure.',
PLAYBOOK_OUTPUT_MISSING: 'Playbook completed without sending the'
'output with status details.',
NO_PLAYBOOK_INPUT_DATA: 'Playbook input data'
' is not passed. '
'Aborting execution.',
SANDESH_INITIALIZATION_TIMEOUT_ERROR: 'Sandesh '
'initialization '
'timeout after 15s',
INPUT_SCHEMA_INPUT_NOT_FOUND: 'Required: input paramater'
' in execute-job',
DEVICE_JSON_NOT_FOUND: 'No Device details found for'
' any device',
NO_DEVICE_DATA_FOUND: 'Device details for the device '
'"{device_id}" not found',
NO_CREDENTIALS_FOUND: 'Discovered device "{device_id}" '
'does not have credentials',
INVALID_SCHEMA: 'Error while validating input schema'
' for job template "{job_template_id}" '
': {exc_obj.message}',
SEND_JOB_LOG_ERROR: 'Error while creating the job'
' log for job template '
'"{job_template_fqname}" '
'and execution id "{job_execution_id}"'
' : {exc_msg}',
SEND_JOB_EXC_UVE_ERROR: 'Error while sending the job'
' execution UVE for job '
'template "{job_template_fqname}"'
' and execution id '
'"{job_execution_id}" : {exc_msg}',
PLAYBOOK_INPUT_PARSING_ERROR: 'Exiting due playbook'
' input parsing error:'
' {exc_msg}',
PLAYBOOK_EXECUTE_ERROR: 'Exception in playbook process'
' for playbook "{playbook_uri}" '
'(exec_id: {execution_id}): {exc_msg} ',
CREATE_JOB_SUMMARY_ERROR: 'Error while generating the'
' job summary message'
' : {exc_msg}',
DEVICE_VENDOR_FAMILY_MISSING: 'device_vendor or '
'device_family not found'
' for "{device_id}"',
PRODUCT_NAME_MISSING: 'device_product name not found '
' for "{device_id}"',
READ_JOB_TEMPLATE_ERROR: 'Error while reading the '
'job template "{job_template_id}"'
' from database',
GET_PLAYBOOK_INFO_ERROR: 'Error while getting the playbook'
' information from the job'
' template "{job_template_id}"'
' : {exc_msg}',
PLAYBOOK_NOT_FOUND: 'Playbook "{playbook_uri}" '
'does not exist',
PLAYBOOK_INFO_DEVICE_MISMATCH: 'Playbook info not found'
' in the job template'
' for "{device_vendor}"'
' and "{device_family}"',
RUN_PLAYBOOK_PROCESS_ERROR: 'Exception in executing '
'the playbook '
'for "{playbook_uri}"'
' : {exc_msg}',
RUN_PLAYBOOK_ERROR: 'Error while executing the playbook'
' "{playbook_uri}" : {exc_msg}',
SEND_PROUTER_OBJECT_LOG_ERROR: 'Error while creating '
'prouter object log'
' for router '
'"{prouter_fqname}" '
'and execution id '
'"{job_execution_id}"'
' : {exc_msg}',
CLOSE_SANDESH_EXCEPTION: 'Error in confirming the'
' SANDESH message send operation.'
' The Job Logs might '
'not be complete.',
RUN_PLAYBOOK_PROCESS_TIMEOUT: 'Timeout while executing'
' the playbook '
'for "{playbook_uri}" : '
'{exc_msg}. Playbook'
' process is aborted.',
PLAYBOOK_EXECUTION_COMPLETE: 'Completed playbook execution'
' for job template '
'"{job_template_name}" with '
'execution'
' id "{job_execution_id}"',
START_JOB_MESSAGE: 'Starting execution for job '
'template "{job_template_name}"'
' and execution id "{job_execution_id}"',
VNC_INITIALIZATION_ERROR: 'Exiting due to vnc api '
'initialization error: {exc_msg}',
JOB_ERROR: 'Exiting job due to error: {exc_msg} ',
JOB_EXECUTION_COMPLETE: 'Job execution completed '
'successfully.',
START_EXE_PB_MSG: 'Starting to execute the '
'playbook "{playbook_name}"',
STOP_EXE_PB_MSG: 'Finished executing the '
'playbook "{playbook_name}"',
JOB_EXC_REC_HDR: 'Job Exception recieved: ',
EXC_JOB_ERR_HDR: 'Error while executing job ',
EMPTY_DEVICE_LIST: 'Need to pass a valid device list '
}
}
@classmethod
def getMessage(cls, msg_id, locale='en', *args, **kwargs):
if locale not in MsgBundle._msgs:
return 'Failed to construct job message due to invalid '\
'locale: %s' % locale
if msg_id not in MsgBundle._msgs[locale]:
return 'Failed to construct job message due to invalid '\
'message id: %s' % msg_id
try:
return MsgBundle._msgs[locale][msg_id].format(*args, **kwargs)
except KeyError as ex:
return 'Failed to construct job message due to missing message '\
'arguments: %s' % ex.message
| apache-2.0 | -8,469,327,516,854,512,000 | 51.661111 | 78 | 0.450364 | false | 4.648847 | false | false | false |
vitchyr/rlkit | rlkit/torch/sac/sac.py | 1 | 8191 | from collections import OrderedDict, namedtuple
from typing import Tuple
import numpy as np
import torch
import torch.optim as optim
from rlkit.core.loss import LossFunction, LossStatistics
from torch import nn as nn
import rlkit.torch.pytorch_util as ptu
from rlkit.core.eval_util import create_stats_ordered_dict
from rlkit.torch.torch_rl_algorithm import TorchTrainer
from rlkit.core.logging import add_prefix
import gtimer as gt
SACLosses = namedtuple(
'SACLosses',
'policy_loss qf1_loss qf2_loss alpha_loss',
)
class SACTrainer(TorchTrainer, LossFunction):
def __init__(
self,
env,
policy,
qf1,
qf2,
target_qf1,
target_qf2,
discount=0.99,
reward_scale=1.0,
policy_lr=1e-3,
qf_lr=1e-3,
optimizer_class=optim.Adam,
soft_target_tau=1e-2,
target_update_period=1,
plotter=None,
render_eval_paths=False,
use_automatic_entropy_tuning=True,
target_entropy=None,
):
super().__init__()
self.env = env
self.policy = policy
self.qf1 = qf1
self.qf2 = qf2
self.target_qf1 = target_qf1
self.target_qf2 = target_qf2
self.soft_target_tau = soft_target_tau
self.target_update_period = target_update_period
self.use_automatic_entropy_tuning = use_automatic_entropy_tuning
if self.use_automatic_entropy_tuning:
if target_entropy is None:
# Use heuristic value from SAC paper
self.target_entropy = -np.prod(
self.env.action_space.shape).item()
else:
self.target_entropy = target_entropy
self.log_alpha = ptu.zeros(1, requires_grad=True)
self.alpha_optimizer = optimizer_class(
[self.log_alpha],
lr=policy_lr,
)
self.plotter = plotter
self.render_eval_paths = render_eval_paths
self.qf_criterion = nn.MSELoss()
self.vf_criterion = nn.MSELoss()
self.policy_optimizer = optimizer_class(
self.policy.parameters(),
lr=policy_lr,
)
self.qf1_optimizer = optimizer_class(
self.qf1.parameters(),
lr=qf_lr,
)
self.qf2_optimizer = optimizer_class(
self.qf2.parameters(),
lr=qf_lr,
)
self.discount = discount
self.reward_scale = reward_scale
self._n_train_steps_total = 0
self._need_to_update_eval_statistics = True
self.eval_statistics = OrderedDict()
def train_from_torch(self, batch):
gt.blank_stamp()
losses, stats = self.compute_loss(
batch,
skip_statistics=not self._need_to_update_eval_statistics,
)
"""
Update networks
"""
if self.use_automatic_entropy_tuning:
self.alpha_optimizer.zero_grad()
losses.alpha_loss.backward()
self.alpha_optimizer.step()
self.policy_optimizer.zero_grad()
losses.policy_loss.backward()
self.policy_optimizer.step()
self.qf1_optimizer.zero_grad()
losses.qf1_loss.backward()
self.qf1_optimizer.step()
self.qf2_optimizer.zero_grad()
losses.qf2_loss.backward()
self.qf2_optimizer.step()
self._n_train_steps_total += 1
self.try_update_target_networks()
if self._need_to_update_eval_statistics:
self.eval_statistics = stats
# Compute statistics using only one batch per epoch
self._need_to_update_eval_statistics = False
gt.stamp('sac training', unique=False)
def try_update_target_networks(self):
if self._n_train_steps_total % self.target_update_period == 0:
self.update_target_networks()
def update_target_networks(self):
ptu.soft_update_from_to(
self.qf1, self.target_qf1, self.soft_target_tau
)
ptu.soft_update_from_to(
self.qf2, self.target_qf2, self.soft_target_tau
)
def compute_loss(
self,
batch,
skip_statistics=False,
) -> Tuple[SACLosses, LossStatistics]:
rewards = batch['rewards']
terminals = batch['terminals']
obs = batch['observations']
actions = batch['actions']
next_obs = batch['next_observations']
"""
Policy and Alpha Loss
"""
dist = self.policy(obs)
new_obs_actions, log_pi = dist.rsample_and_logprob()
log_pi = log_pi.unsqueeze(-1)
if self.use_automatic_entropy_tuning:
alpha_loss = -(self.log_alpha * (log_pi + self.target_entropy).detach()).mean()
alpha = self.log_alpha.exp()
else:
alpha_loss = 0
alpha = 1
q_new_actions = torch.min(
self.qf1(obs, new_obs_actions),
self.qf2(obs, new_obs_actions),
)
policy_loss = (alpha*log_pi - q_new_actions).mean()
"""
QF Loss
"""
q1_pred = self.qf1(obs, actions)
q2_pred = self.qf2(obs, actions)
next_dist = self.policy(next_obs)
new_next_actions, new_log_pi = next_dist.rsample_and_logprob()
new_log_pi = new_log_pi.unsqueeze(-1)
target_q_values = torch.min(
self.target_qf1(next_obs, new_next_actions),
self.target_qf2(next_obs, new_next_actions),
) - alpha * new_log_pi
q_target = self.reward_scale * rewards + (1. - terminals) * self.discount * target_q_values
qf1_loss = self.qf_criterion(q1_pred, q_target.detach())
qf2_loss = self.qf_criterion(q2_pred, q_target.detach())
"""
Save some statistics for eval
"""
eval_statistics = OrderedDict()
if not skip_statistics:
eval_statistics['QF1 Loss'] = np.mean(ptu.get_numpy(qf1_loss))
eval_statistics['QF2 Loss'] = np.mean(ptu.get_numpy(qf2_loss))
eval_statistics['Policy Loss'] = np.mean(ptu.get_numpy(
policy_loss
))
eval_statistics.update(create_stats_ordered_dict(
'Q1 Predictions',
ptu.get_numpy(q1_pred),
))
eval_statistics.update(create_stats_ordered_dict(
'Q2 Predictions',
ptu.get_numpy(q2_pred),
))
eval_statistics.update(create_stats_ordered_dict(
'Q Targets',
ptu.get_numpy(q_target),
))
eval_statistics.update(create_stats_ordered_dict(
'Log Pis',
ptu.get_numpy(log_pi),
))
policy_statistics = add_prefix(dist.get_diagnostics(), "policy/")
eval_statistics.update(policy_statistics)
if self.use_automatic_entropy_tuning:
eval_statistics['Alpha'] = alpha.item()
eval_statistics['Alpha Loss'] = alpha_loss.item()
loss = SACLosses(
policy_loss=policy_loss,
qf1_loss=qf1_loss,
qf2_loss=qf2_loss,
alpha_loss=alpha_loss,
)
return loss, eval_statistics
def get_diagnostics(self):
stats = super().get_diagnostics()
stats.update(self.eval_statistics)
return stats
def end_epoch(self, epoch):
self._need_to_update_eval_statistics = True
@property
def networks(self):
return [
self.policy,
self.qf1,
self.qf2,
self.target_qf1,
self.target_qf2,
]
@property
def optimizers(self):
return [
self.alpha_optimizer,
self.qf1_optimizer,
self.qf2_optimizer,
self.policy_optimizer,
]
def get_snapshot(self):
return dict(
policy=self.policy,
qf1=self.qf1,
qf2=self.qf2,
target_qf1=self.target_qf1,
target_qf2=self.target_qf2,
)
| mit | -7,586,187,944,391,447,000 | 30.026515 | 99 | 0.553534 | false | 3.718112 | false | false | false |
medularis/py-star | py_star/astemu.py | 1 | 4712 | from __future__ import absolute_import, print_function, unicode_literals
from os import fork, kill, waitpid
from signal import SIGTERM
import socket
from time import sleep
from . import compat_six as six
class Event(dict):
""" Events are encoded as dicts with a header fieldname to
content-list map. Normally (for all typical asterisk events) the
content-list only has one element. For multiple elements
multiple lines with the same header (but different content) are
sent. This tests cases where asterisk events contain multiple
instances of the same header.
The key 'CONTENT' is special, it denotes text that is appended
to an event (e.g. for testing the output of the command action)
"""
sort_order = dict(
(x, n) for n, x in enumerate((
'Event',
'Response',
'Username',
'Privilege',
'Secret',
'Command',
'Channel',
'ChannelState',
'ChannelStateDesc',
'CallerIDNum',
'CallerIDName',
'AccountCode',
'Context',
'Exten',
'Reason',
'Uniqueid',
'ActionID',
'OldAccountCode',
'Cause',
'Cause-txt',
))
)
sort_order ['CONTENT'] = 100000
def sort(self, x):
return self.sort_order.get(x[0], 10000)
def as_string(self, id):
ret = []
if 'Response' in self:
self ['ActionID'] = [id]
for k,v in sorted(self.items(), key=self.sort):
if k == 'CONTENT':
ret.append(v)
else :
if isinstance(v, six.string_types):
ret.append (": ".join ((k, v)))
else:
for x in v:
ret.append (": ".join ((k, x)))
ret.append ('')
ret.append ('')
return '\r\n'.join (ret).encode('utf-8')
@property
def name(self):
return self.get('Event','')
@property
def headers(self):
return self
class AsteriskEmu(object):
""" Emulator for asterisk management interface.
Used for unittests of :mod:`py_star.manager`.
Now factored into a standalone module for others to use in
unittests of programs that build on :mod:`py_star.manager`.
By default let the operating system decide the port number to
bind to, resulting port is stored in self.port.
"""
default_events = dict(
Login=(Event(Response=('Success', ),
Message=('Authentication accepted', )),),
Logoff=(Event(Response=('Goodbye', ),
Message=('Thanks for all the fish.', )),)
)
def __init__(self, chatscript, port = 0):
s = socket.socket (socket.AF_INET, socket.SOCK_STREAM)
s.bind(('localhost', port))
s.listen(1)
pid = fork()
if not pid:
# won't return
self.asterisk_emu(s, chatscript)
self.childpid = pid
host, self.port = s.getsockname()
s.close()
def asterisk_emu(self, sock, chatscript):
""" Emulate asterisk management interface on a socket.
Chatscript is a dict of command names to event list mapping.
The event list contains events to send when the given
command is recognized.
"""
while True:
conn, addr = sock.accept()
f = conn.makefile('rwb')
conn.close()
f.write('Asterisk Call Manager/1.1\r\n'.encode('utf-8'))
f.flush()
cmd = lastid = ''
try:
for l in f:
l = l.decode('utf-8')
if l.startswith ('ActionID:'):
lastid = l.split(':', 1)[1].strip()
elif l.startswith ('Action:'):
cmd = l.split(':', 1)[1].strip()
elif not l.strip():
for d in chatscript, self.default_events:
if cmd in d:
for event in d[cmd]:
f.write(event.as_string(id = lastid))
f.flush()
if cmd == 'Logoff':
f.close()
break
except:
pass
sleep(10000) # wait for being killed
def close(self):
if self.childpid:
kill(self.childpid, SIGTERM)
waitpid(self.childpid, 0)
self.childpid = None
| bsd-3-clause | 8,920,116,669,416,906,000 | 32.41844 | 73 | 0.493633 | false | 4.436911 | false | false | false |
cherokee/pyscgi | CTK/ProgressBar.py | 1 | 1975 | # CTK: Cherokee Toolkit
#
# Authors:
# Alvaro Lopez Ortega <[email protected]>
#
# Copyright (C) 2010-2011 Alvaro Lopez Ortega
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of version 2 of the GNU General Public
# License as published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
#
import os
from Widget import Widget
from util import props_to_str
HEADERS = [
'<link type="text/css" href="/CTK/css/CTK.css" rel="stylesheet" />',
'<script type="text/javascript" src="/CTK/js/jquery-ui-1.7.2.custom.min.js"></script>'
]
HTML = """
<div id="%(id)s" %(props)s></div>
"""
PERCENT_INIT_JS = """
$('#%(id)s').progressbar({ value: %(value)s });
"""
class ProgressBar (Widget):
def __init__ (self, props={}):
Widget.__init__ (self)
self.id = "progressbar_%d" %(self.uniq_id)
self.value = props.pop ('value', 0)
self.props = props.copy()
if 'class' in props:
self.props['class'] += ' progressbar'
else:
self.props['class'] = 'progressbar'
def Render (self):
render = Widget.Render (self)
props = {'id': self.id,
'value': self.value,
'props': props_to_str (self.props)}
render.html += HTML %(props)
render.js += PERCENT_INIT_JS %(props)
render.headers += HEADERS
return render
def JS_to_set (self, value):
return "$('#%s').progressbar ('option', 'value', %s);" %(self.id, value)
| bsd-3-clause | -5,721,624,957,532,368,000 | 28.924242 | 90 | 0.620759 | false | 3.440767 | false | false | false |
jabaier/iic1103.20152.s5 | strings_listas_ej0.py | 1 | 1320 |
# defina una función que dado una lista de numeros
# retorne la suma de ellos
def sumalista(l):
# calcula l[0] + l[1] + l[2] + ... + l[??]
# el largo de la lista l se obtiene con len(l)
suma = 0
i = 0
while i < len(l):
suma = suma + l[i]
i = i + 1
return suma
def sumalista_cool(l):
suma = 0
for e in l:
suma = suma + e
return suma
def desafio_google(l):
i = 0
while i < len(l):
if sumalista(l[:i])==sumalista(l[i+1:]):
return i
i = i + 1
return -1
def mayusculas():
i = ord('A')
limite = ord('Z')
may=''
while i <= limite:
may = may + chr(i)
i = i + 1
return may
def minusculas():
return mayusculas().lower()
def encriptar_rot(mensaje, incremento):
M = mayusculas()
m = minusculas()
respuesta = ''
for c in mensaje:
indiceM = M.find(c)
indicem = m.find(c)
if indiceM > -1:
respuesta = respuesta + M[(indiceM+incremento)%26]
elif indicem > -1:
respuesta = respuesta + m[(indicem+incremento)%26]
else:
respuesta = respuesta + c
return respuesta
men = "Andate al cerro que mas te guste, querido"
enc = encriptar_rot(men,13)
desenc = encriptar_rot(enc,13)
print(enc)
print(desenc)
| unlicense | -7,947,223,740,876,585,000 | 19.292308 | 62 | 0.541319 | false | 2.730849 | false | false | false |
jalavik/plotextractor | plotextractor/converter.py | 1 | 7853 | # -*- coding: utf-8 -*-
#
# This file is part of plotextractor.
# Copyright (C) 2010, 2011, 2015 CERN.
#
# plotextractor is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# plotextractor is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with plotextractor; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
import os
import tarfile
from invenio.utils.shell import run_shell_command, run_process_with_timeout, Timeout
from .output_utils import get_converted_image_name, \
write_message
def untar(original_tarball, sdir):
"""
Here we decide if our file is actually a tarball (sometimes the
'tarballs' gotten from arXiv aren't actually tarballs. If they
'contain' only the TeX file, then they are just that file.), then
we untar it if so and decide which of its constituents are the
TeX file and which are the images.
@param: tarball (string): the name of the tar file from arXiv
@param: dir (string): the directory where we would like it untarred to
@return: (image_list, tex_file) (([string, string, ...], string)):
list of images in the tarball and the name of the TeX file in the
tarball.
"""
if not tarfile.is_tarfile(original_tarball):
return ([], [], None)
tarball = tarfile.open(original_tarball)
tarball.extractall(sdir)
tex_output_contains = 'TeX'
tex_file_extension = 'tex'
image_output_contains = 'image'
eps_output_contains = '- type eps'
ps_output_contains = 'Postscript'
file_list = []
image_list = []
might_be_tex = []
for extracted_file in tarball.getnames():
if extracted_file == '':
break
if extracted_file.startswith('./'):
extracted_file = extracted_file[2:]
# ensure we are actually looking at the right file
extracted_file = os.path.join(sdir, extracted_file)
# Add to full list of extracted files
file_list.append(extracted_file)
dummy1, cmd_out, dummy2 = run_shell_command('file %s', (extracted_file,))
# is it TeX?
if cmd_out.find(tex_output_contains) > -1:
might_be_tex.append(extracted_file)
# is it an image?
elif cmd_out.lower().find(image_output_contains) > cmd_out.find(':') \
or \
cmd_out.lower().find(eps_output_contains) > cmd_out.find(':')\
or \
cmd_out.find(ps_output_contains) > cmd_out.find(':'):
# we have "image" in the output, and it is not in the filename
# i.e. filename.ext: blah blah image blah blah
image_list.append(extracted_file)
# if neither, maybe it is TeX or an image anyway, otherwise,
# we don't care
else:
if extracted_file.split('.')[-1].lower() == tex_file_extension:
# we might have tex source!
might_be_tex.append(extracted_file)
elif extracted_file.split('.')[-1] in ['eps', 'png', \
'ps', 'jpg', 'pdf']:
# we might have an image!
image_list.append(extracted_file)
if might_be_tex == []:
# well, that's tragic
# could not find TeX file in tar archive
return ([], [], [])
return (file_list, image_list, might_be_tex)
def check_for_gzip(tfile):
"""
Was that tarball also gzipped? Let's find out!
@param: file (string): the name of the object (so we can gunzip, if
that's necessary)
@output: a gunzipped file in the directory of choice, if that's necessary
@return new_file (string): The name of the file after gunzipping or the
original name of the file if that wasn't necessary
"""
gzip_contains = 'gzip compressed data'
dummy1, cmd_out, dummy2 = run_shell_command('file %s', (tfile,))
if cmd_out.find(gzip_contains) > -1:
# we have a gzip!
# so gzip is retarded and won't accept any file that doesn't end
# with .gz. sad.
run_shell_command('cp %s %s', (tfile, tfile + '.tar.gz'))
new_dest = os.path.join(os.path.split(tfile)[0], 'tmp.tar')
run_shell_command('touch %s', (new_dest,))
dummy1, cmd_out, cmd_err = run_shell_command('gunzip -c %s',
(tfile + '.tar.gz',))
if cmd_err != '':
write_message('Error while gunzipping ' + tfile)
return tfile
tarfile = open(new_dest, 'w')
tarfile.write(cmd_out)
tarfile.close()
run_shell_command('rm %s', (tfile + '.tar.gz',))
return new_dest
return tfile
def convert_images(image_list):
"""
Here we figure out the types of the images that were extracted from
the tarball and determine how to convert them into PNG.
@param: image_list ([string, string, ...]): the list of image files
extracted from the tarball in step 1
@return: image_list ([str, str, ...]): The list of image files when all
have been converted to PNG format.
"""
png_output_contains = 'PNG image'
ret_list = []
for image_file in image_list:
if os.path.isdir(image_file):
continue
# FIXME: here and everywhere else in the plot extractor
# library the run shell command statements should be (1)
# called with timeout in order to prevent runaway imagemagick
# conversions; (2) the arguments should be passed properly so
# that they are escaped.
dummy1, cmd_out, dummy2 = run_shell_command('file %s', (image_file,))
if cmd_out.find(png_output_contains) > -1:
ret_list.append(image_file)
else:
# we're just going to assume that ImageMagick can convert all
# the image types that we may be faced with
# for sure it can do EPS->PNG and JPG->PNG and PS->PNG
# and PSTEX->PNG
converted_image_file = get_converted_image_name(image_file)
cmd_list = ['convert', image_file, converted_image_file]
try:
dummy1, cmd_out, cmd_err = run_process_with_timeout(cmd_list)
if cmd_err == '' or os.path.exists(converted_image_file):
ret_list.append(converted_image_file)
else:
write_message('convert failed on ' + image_file)
except Timeout:
write_message('convert timed out on ' + image_file)
return ret_list
def extract_text(tarball):
"""
We check to see if there's a file called tarball.pdf, and, if there is,
we run pdftotext on it. Simple as that.
@param: tarball (string): the raw name of the tarball
@return: None
"""
try:
os.stat(tarball + '.pdf')
cmd_list = ['pdftotext', tarball + '.pdf ', tarball + '.txt']
dummy1, dummy2, cmd_err = run_process_with_timeout(cmd_list)
if cmd_err != '':
return - 1
write_message('generated ' + tarball + '.txt from ' + tarball + '.pdf')
except:
write_message('no text from ' + tarball + '.pdf')
| gpl-2.0 | 7,425,665,071,529,268,000 | 36.754808 | 84 | 0.605246 | false | 3.847624 | false | false | false |
jminuscula/dixit-online | server/src/dixit/api/game/views/player.py | 1 | 2333 |
from django.db import IntegrityError
from django.shortcuts import get_object_or_404
from rest_framework.permissions import IsAuthenticated
from rest_framework.exceptions import NotFound
from rest_framework.response import Response
from rest_framework import generics, status
from dixit.game.models import Player
from dixit.api.game.serializers.player import PlayerSerializer, PlayerCreateSerializer
from dixit.api.game.views.mixins import GameObjectMixin
class PlayerList(GameObjectMixin, generics.ListCreateAPIView):
"""
Implements Player list actions
- GET list of players for a game
- POST a new player t oa game from a player name
"""
model = Player
serializer_class = PlayerSerializer
permission_classes = (IsAuthenticated, )
def get_queryset(self):
return Player.objects.filter(game=self.get_game())
def get_serializer_class(self):
if self.request.method == 'POST':
return PlayerCreateSerializer
return PlayerSerializer
def create(self, request, *args, **kwargs):
serializer = self.get_serializer(data=request.data)
if not serializer.is_valid():
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
try:
game = self.get_game()
player = game.add_player(request.user, request.data['name'])
except IntegrityError as exc:
if 'user_id' in str(exc):
return Response({"detail": 'You are already playing this game'},
status=status.HTTP_403_FORBIDDEN)
return Response({"detail": "Username already in use"}, status=status.HTTP_403_FORBIDDEN)
data = PlayerSerializer(player).data
return Response(data, status=status.HTTP_201_CREATED)
class PlayerRetrieve(generics.RetrieveDestroyAPIView):
"""
Implements Player retrieve action
- GET player for game
"""
model = Player
serializer_class = PlayerSerializer
permission_classes = (IsAuthenticated, )
def get_object(self):
game_pk = self.kwargs['game_pk']
number = self.kwargs['player_number']
try:
return get_object_or_404(Player, game=game_pk, number=number)
except Player.DoesNotExist:
raise NotFound('player not found')
| mit | -6,770,100,734,929,883,000 | 32.811594 | 100 | 0.675954 | false | 4.352612 | false | false | false |
mdworks2016/work_development | Python/20_Third_Certification/venv/lib/python3.7/site-packages/celery/backends/database/session.py | 1 | 1896 | # -*- coding: utf-8 -*-
"""SQLAlchemy session."""
from __future__ import absolute_import, unicode_literals
from kombu.utils.compat import register_after_fork
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
from sqlalchemy.pool import NullPool
ResultModelBase = declarative_base()
__all__ = ('SessionManager',)
def _after_fork_cleanup_session(session):
session._after_fork()
class SessionManager(object):
"""Manage SQLAlchemy sessions."""
def __init__(self):
self._engines = {}
self._sessions = {}
self.forked = False
self.prepared = False
if register_after_fork is not None:
register_after_fork(self, _after_fork_cleanup_session)
def _after_fork(self):
self.forked = True
def get_engine(self, dburi, **kwargs):
if self.forked:
try:
return self._engines[dburi]
except KeyError:
engine = self._engines[dburi] = create_engine(dburi, **kwargs)
return engine
else:
return create_engine(dburi, poolclass=NullPool)
def create_session(self, dburi, short_lived_sessions=False, **kwargs):
engine = self.get_engine(dburi, **kwargs)
if self.forked:
if short_lived_sessions or dburi not in self._sessions:
self._sessions[dburi] = sessionmaker(bind=engine)
return engine, self._sessions[dburi]
return engine, sessionmaker(bind=engine)
def prepare_models(self, engine):
if not self.prepared:
ResultModelBase.metadata.create_all(engine)
self.prepared = True
def session_factory(self, dburi, **kwargs):
engine, session = self.create_session(dburi, **kwargs)
self.prepare_models(engine)
return session()
| apache-2.0 | 277,522,112,381,838,560 | 30.6 | 78 | 0.634494 | false | 4.034043 | false | false | false |
tensorflow/compression | tensorflow_compression/python/util/packed_tensors.py | 1 | 3070 | # Copyright 2019 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Packed tensors in bit sequences."""
import tensorflow as tf
__all__ = [
"PackedTensors",
]
class PackedTensors:
"""Packed representation of compressed tensors.
This class can pack and unpack several tensor values into a single string. It
can also optionally store a model identifier.
The tensors currently must be rank 1 (vectors) and either have integer or
string type.
"""
def __init__(self, string=None):
self._example = tf.train.Example()
if string:
self.string = string
@property
def model(self):
"""A model identifier."""
buf = self._example.features.feature["MD"].bytes_list.value[0]
return buf.decode("ascii")
@model.setter
def model(self, value):
self._example.features.feature["MD"].bytes_list.value[:] = [
value.encode("ascii")]
@model.deleter
def model(self):
del self._example.features.feature["MD"]
@property
def string(self):
"""The string representation of this object."""
return self._example.SerializeToString()
@string.setter
def string(self, value):
self._example.ParseFromString(value)
def pack(self, tensors):
"""Packs `Tensor` values into this object."""
i = 1
for tensor in tensors:
feature = self._example.features.feature[chr(i)]
feature.Clear()
if tensor.shape.rank != 1:
raise RuntimeError(f"Unexpected tensor rank: {tensor.shape.rank}.")
if tensor.dtype.is_integer:
feature.int64_list.value[:] = tensor.numpy()
elif tensor.dtype == tf.string:
feature.bytes_list.value[:] = tensor.numpy()
else:
raise RuntimeError(f"Unexpected tensor dtype: '{tensor.dtype}'.")
i += 1
# Delete any remaining, previously set arrays.
while chr(i) in self._example.features.feature:
del self._example.features.feature[chr(i)]
i += 1
def unpack(self, dtypes):
"""Unpacks values from this object based on dtypes."""
tensors = []
for i, dtype in enumerate(dtypes):
dtype = tf.as_dtype(dtype)
feature = self._example.features.feature[chr(i + 1)]
if dtype.is_integer:
tensors.append(tf.constant(feature.int64_list.value, dtype=dtype))
elif dtype == tf.string:
tensors.append(tf.constant(feature.bytes_list.value, dtype=dtype))
else:
raise RuntimeError(f"Unexpected dtype: '{dtype}'.")
return tensors
| apache-2.0 | -766,657,654,993,320,700 | 30.979167 | 80 | 0.657003 | false | 4.066225 | false | false | false |
wzin/interactivespaces-python-api | tests/test_master.py | 1 | 13234 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
from mock import MagicMock
import json
import urllib
import urllib2
import sys
import os
sys.path.append(os.getcwd())
import interactivespaces
TEST_ACTIVITY_DATA = {
"id":"53",
"bundleContentHash":"hjkl",
"identifyingName":"com.endpoint.lg.browser",
"lastUploadDate":1398288057444,
"description":"Browser Activity to present \"webui\" activties to the user",
"name":"Browser Activity",
"lastStartDate":1401901320867,
"metadata":{},
"version":"1.0.0.dev"
}
TEST_LIVEACTIVITY_DATA = {
"lastDeployDate":"Mon May 05 12:50:36 PDT 2014",
"outOfDate":False,
"id":"110",
"description":"",
"name":"Evdev Demuxer on 42-a",
"active": {
"numberLiveActivityGroupRunning":1,
"runtimeState":"ACTIVE",
"deployState":"UNKNOWN",
"lastStateUpdate":"Wed Jun 04 11:17:21 PDT 2014",
"runtimeStateDescription":"space.activity.state.active",
"directRunning":False,
"directActivated":False,
"numberLiveActivityGroupActivated":1,
"deployStateDescription":"space.activity.state.unknown",
"deployStateDetail":None,
"runtimeStateDetail":"<p>foo</p>"
},
"controller": {
"id":"2",
"name":"ISCtlDispAScreen00",
"uuid":"372f0f95-6b48-487a-a1ac-383ba580fc1c"
},
"uuid":"88816d20-22f6-4f78-95ba-7843696c6bc5",
"activity": {
"id":"61",
"bundleContentHash":"qwerty",
"identifyingName":"com.endpoint.lg.evdev.demuxer",
"lastUploadDate":1398288062862,
"description":"Separates and aggregates different types of input events.",
"name":"Event Device Demuxer",
"lastStartDate":1401905841864,
"metadata":{},
"version":"1.0.0.dev"
},
"metadata":{}
}
TEST_LIVEACTIVITYGROUP_DATA = {
"id":"301",
"description":"",
"name":"Google Earth",
"metadata":{}
}
TEST_SPACE_DATA = {
"id":"401",
"description":"",
"name":"LG Express",
"metadata":{}
}
TEST_CONTROLLER_DATA = {
"state":"RUNNING",
"hostId":"ctldispascreen00",
"mode":"ENABLED",
"id":"2",
"stateDescription":"space.controller.state.running",
"modeDescription":"space.controller.mode.enabled",
"description":"Controller for Screen 00 on Display Node A",
"lastStateUpdateDate":"Wed Jun 04 12:25:57 PDT 2014",
"name":"ISCtlDispAScreen00",
"dataBundleStateDescription":"space.controller.dataBundle.state.none",
"uuid":"372f0f95-6b48-487a-a1ac-383ba580fc1c",
"dataBundleState":"NO_REQUEST",
"lastDataBundleStateUpdateDate":None,
"metadata":{}
}
TEST_NAMEDSCRIPT_DATA = {
"id":"3",
"name":"foo",
"description":"bar"
}
TEST_POST = {"foo":"bar"}
TEST_QUERY = {"zot":"zing"}
TEST_SESSION = 'e2s1'
TEST_HOST = '1.2.3.4'
TEST_PORT = 12345
def test_get_collection(data, method_to_test, expected_type, path_name):
"""Helper for testing collection getters."""
master = interactivespaces.Master(TEST_HOST, TEST_PORT)
master._api_get_json = MagicMock(return_value=[data])
result = method_to_test(master)
master._api_get_json.assert_called_once_with('{}/all'.format(path_name))
return result
class MasterTests(unittest.TestCase):
def test_constructor(self):
"""Test construction with valid arguments."""
master = interactivespaces.Master(TEST_HOST, TEST_PORT)
self.assertEqual(master.host, TEST_HOST)
self.assertEqual(master.port, TEST_PORT)
def test_api_get_json(self):
"""Test a valid call to Master._api_get_json()."""
class MockResponse(object):
def read():
return '{"result":"success","data":{"foo":"bar"}}'
def getcode():
return 200
master = interactivespaces.Master(TEST_HOST, TEST_PORT)
master._urlopen = MagicMock(return_value=MockResponse())
command = 'activity/all'
response = master._api_get_json(command)
master._urlopen.assert_called_once_with(
'http://{}:{}/{}.json'.format(TEST_HOST, TEST_PORT, command)
)
self.assertEqual('bar', response['foo'])
def test_api_get_html(self):
"""Test a valid call to Master._api_get_html()."""
class MockResponse(object):
def read():
return 'asdf'
def getcode():
return 200
master = interactivespaces.Master(TEST_HOST, TEST_PORT)
master._urlopen = MagicMock(return_value=MockResponse())
command = 'activity/new'
response = master._api_get_html(command, {"foo":"bar"})
master._urlopen.assert_called_once_with(
'http://{}:{}/{}.html?{}'.format(
TEST_HOST,
TEST_PORT,
command,
urllib.urlencode(TEST_QUERY)
)
)
self.assertEqual('asdf', response.read())
self.assertEqual(200, response.getcode())
def test_api_post_json(self):
"""Test a valid call to Master._api_post_json()."""
class MockResponse(object):
def read():
return '{"result":"success"}'
def getcode():
return 200
master = interactivespaces.Master(TEST_HOST, TEST_PORT)
master._urlopen = MagicMock(return_value=MockResponse())
command = 'liveactivity/42/configure'
master._api_post_json(command, TEST_QUERY, TEST_POST)
master._urlopen.assert_called_once_with(
'http://{}:{}/{}.json?{}'.format(
TEST_HOST,
TEST_PORT,
command,
urllib.urlencode(TEST_QUERY)
),
urllib.urlencode(TEST_POST)
)
def test_api_post_html(self):
"""Test a valid call to Master._api_post_html()."""
class MockResponse(object):
def read():
return 'asdf'
def getcode():
return 200
master = interactivespaces.Master(TEST_HOST, TEST_PORT)
master._urlopen = MagicMock(return_value=MockResponse())
command = 'namescript/new'
master._api_post_html(command, TEST_QUERY, TEST_POST)
master._urlopen.assert_called_once_with(
'http://{}:{}/{}.html?{}'.format(
TEST_HOST,
TEST_PORT,
command,
urllib.urlencode(TEST_QUERY)
),
urllib.urlencode(TEST_POST)
)
def test_get_all_activities(self):
"""Test Master.get_activities() with no pattern."""
expected_type = interactivespaces.Activity
result = test_get_collection(
data=TEST_ACTIVITY_DATA,
method_to_test=interactivespaces.Master.get_activities,
expected_type=expected_type,
path_name='activity'
)
self.assertEqual(1, len(result))
self.assertIsInstance(result[0], expected_type)
def test_get_live_activities(self):
"""Test Master.get_live_activities() with no pattern."""
expected_type = interactivespaces.LiveActivity
result = test_get_collection(
data=TEST_LIVEACTIVITY_DATA,
method_to_test=interactivespaces.Master.get_live_activities,
expected_type=expected_type,
path_name='liveactivity'
)
self.assertEqual(1, len(result))
self.assertIsInstance(result[0], expected_type)
def test_get_live_activity_groups(self):
"""Test Master.get_live_activity_groups() with no pattern."""
expected_type = interactivespaces.LiveActivityGroup
test_get_collection(
data=TEST_LIVEACTIVITYGROUP_DATA,
method_to_test=interactivespaces.Master.get_live_activity_groups,
expected_type=expected_type,
path_name='liveactivitygroup'
)
self.assertEqual(1, len(result))
self.assertIsInstance(result[0], expected_type)
def test_get_spaces(self):
"""Test Master.get_spaces() with no pattern."""
expected_type = interactivespaces.Space
test_get_collection(
data=TEST_SPACE_DATA,
method_to_test=interactivespaces.Master.get_spaces,
expected_type=expected_type,
path_name='space'
)
self.assertEqual(1, len(result))
self.assertIsInstance(result[0], expected_type)
def test_get_controllers(self):
"""Test Master.get_controllers() with no pattern."""
expected_type = interactivespaces.Controller
test_get_collection(
data=TEST_CONTROLLER_DATA,
method_to_test=interactivespaces.Master.get_controllers,
expected_type=expected_type,
ath_name='spacecontroller'
)
self.assertEqual(1, len(result))
self.assertIsInstance(result[0], expected_type)
def test_get_named_scripts(self):
"""Test Master.get_named_scripts() with no pattern."""
expected_type = interactivespaces.NamedScript
test_get_collection(
data=TEST_NAMEDSCRIPT_DATA,
method_to_test=interactivespaces.Master.get_named_scripts,
expected_type=expected_type,
path_name='namedscript'
)
self.assertEqual(1, len(result))
self.assertIsInstance(result[0], expected_type)
def test_new_live_activity(self):
"""Test a valid call to Master.new_live_activity()."""
master = interactivespaces.Master(TEST_HOST, TEST_PORT)
class MockFirstResponse():
def getcode():
return 200
def geturl():
return 'http://{}:{}/liveactivity/new.html?execution={}'.format(
TEST_HOST,
TEST_PORT,
TEST_SESSION
)
class MockSecondResponse():
def getcode():
return 200
master._api_get_html = MagicMock(return_value=MockFirstResponse())
master._api_post_html = MagicMock(return_value=MockSecondResponse())
class MockActivity():
self.id = TEST_LIVEACTIVITY_DATA['activity']['id']
class MockController():
self.id = TEST_LIVEACTIVITY_DATA['controller']['id']
test_live_activity = master.new_live_activity(
TEST_LIVEACTIVITY_DATA['name'],
TEST_LIVEACTIVITY_DATA['description'],
MockActivity(),
MockController()
)
master._api_get_html.assert_called_once_with(
'liveactivity/new',
{"mode": "embedded"}
)
master._api_post_html.assert_called_once_with(
'liveactivity/new',
{"execution": TEST_SESSION},
{
"liveActivity.name": TEST_LIVEACTIVITY_DATA['name'],
"liveActivity.description": TEST_LIVEACTIVITY_DATA['description'],
"activityId": TEST_LIVEACTIVITY_DATA['activity']['id'],
"controllerId": TEST_LIVEACTIVITY_DATA['controller']['id'],
"_eventId_save": "Save"
}
)
self.assertIsInstance(
test_live_activity,
interactivespaces.LiveActivity
)
def main():
unittest.main()
if __name__ == '__main__':
main()
| apache-2.0 | -5,224,145,098,743,237,000 | 36.174157 | 102 | 0.504458 | false | 4.481544 | true | false | false |
yollamttam/WordPrediction | EntropyBenchmarkUnigram.py | 1 | 1302 | import nltk
import glob
import pickle
import numpy as np
from fann2 import libfann
### Unigram perplexity
# obvious
alpha = 0.5
nExamples = 0
fileToEntropy = {}
# load pickle
wordsInOrder = pickle.load( open( "wordsInOrder.p", "rb" ) )
wordProb = pickle.load( open( "wordProbability.p", "rb" ) )
# load neural network
ann = libfann.neural_net()
ann.create_from_file("NN.net")
nFeatures = np.shape(wordProb)[0]-1
files = glob.glob('reuters/training/*')
files = files[:100]
fileNum = 0
for filename in files:
entropy = 0
fileNum += 1
print "%d of %d" % (fileNum,len(files))
openfile = open(filename,'r')
readfile = openfile.read()
tokens = nltk.word_tokenize(readfile)
# loop through tokens
for token in tokens:
token = token.lower()
if (token in wordsInOrder):
tokenIndex = wordsInOrder[token]
else:
tokenIndex = nFeatures
logProb = np.min((50,-1*np.log(wordProb[tokenIndex])))
entropy += logProb
entropy /= len(tokens)
print entropy
fileToEntropy[filename] = entropy
openfile.close()
avgEntropy = 0
for value in fileToEntropy.itervalues():
avgEntropy += value
avgEntropy /= len(fileToEntropy)
print avgEntropy
pickle.dump(fileToEntropy,open("fileToEntropy.p", "wb" ))
| apache-2.0 | 2,327,401,023,340,730,000 | 21.448276 | 62 | 0.659754 | false | 3.296203 | false | false | false |
sfjuocekr/PokeIV | setup.py | 1 | 1764 | #!/usr/bin/env python
#-- Setup file for py2exe
from distutils.core import setup
import py2exe
import sys, os
import Cryptodome
import requests
#find POGOProtos
sys.path.append("pgoapi\protos")
mydata = list()
path = Cryptodome.__path__[0]
root_end = path.find('Cryptodome')
for folder,folder_name,files in os.walk(path):
for file in files:
if os.path.splitext(file)[1] == '.pyd':
mydata.append((folder[root_end:], [os.path.join(folder,file)]))
path = requests.__path__[0]
root_end = path.find('requests')
for folder,folder_name,files in os.walk(path):
for file in files:
if file == 'cacert.pem':
mydata.append((folder[root_end:], [os.path.join(folder,file)]))
path = os.path.join(os.path.dirname(os.path.realpath(__file__)),'pgoapi')
root_end = 'pgoapi'
for folder,folder_name,files in os.walk(path):
for file in files:
if os.path.splitext(file)[1] == '.json':
mydata.append((root_end, [os.path.join(folder,file)]))
mydata.extend(('families.tsv','evolves.tsv','german-names.tsv','config.json'))
setup(data_files=mydata,
windows = [{'script': "pokeIV.py"}],
zipfile = None,
options= {
"py2exe":{
"packages": ['s2sphere',
'six',
'gpsoauth',
'geopy',
'requests',
'Cryptodome',
'POGOProtos',
'POGOProtos.Networking.Requests',
'POGOProtos.Networking.Requests.Messages_pb2',
'POGOProtos.Networking.Responses_pb2']
,'bundle_files': 1
,'compressed': True
,'dll_excludes': [ 'crypt32.dll', 'mpr.dll']
}
})
| mit | 1,747,010,191,501,326,800 | 29.413793 | 78 | 0.560091 | false | 3.458824 | false | false | false |
Matla/Python_Logging | logging/logging.py | 1 | 6683 | __author__ = "Mats Larsen"
__copyright__ = "Mats Larsen2014"
__credits__ = ["Morten Lind"]
__license__ = "GPL"
__maintainer__ = "Mats Larsen"
__email__ = "matsla@{ntnu.no}"
__status__ = "Development"
#--------------------------------------------------------------------
#File: logging.py
#Module Description
"""
This module is able to log data depending of the modes.
"""
#--------------------------------------------------------------------
#IMPORT
#--------------------------------------------------------------------
import traceback
import threading
import sys
import time
import numpy as np
from timermanager import TimerManager as TM
#--------------------------------------------------------------------
#CONSTANTS
#--------------------------------------------------------------------
LOG_LEVEL = 2 # Information level
ALWAYS_LOG_LEVEL = 2
FILE = 'logging'
#Modes for the logging class
modes = {'ft-sensor' : '_force_torque_logging_mode',
'Joint_Angles' : '_joint_angles',
}
#--------------------------------------------------------------------
#METHODS
#-------------------------------------------------------------------
def log(msg, log_level=LOG_LEVEL):
"""
Print a message, and track, where the log is invoked
Input:
-msg: message to be printed, ''
-log_level: informationlevel """
global LOG_LEVEL
if log_level <= LOG_LEVEL:
print(str(log_level) + ' : ' + FILE +'.py::' + traceback.extract_stack()[-2][2] + ' : ' + msg)
class Logging(threading.Thread):
""" This class create an instance to logging in a custom mode. """
class Error(Exception):
"""Exception class."""
def __init__(self, message):
self.message = message
Exception.__init__(self, self.message)
def __repr__(self):
return self.message
def __init__(self,name='logging_instance',
logging_mode=None,
file_names=[],
ref_class=None,
freq=None,
log_level=3):
# Assignment
self._name=name # name of the instance
self._current_logging_mode=logging_mode # which mode to log
self._ref=ref_class # the class to listen on
self._files = file_names # files name, to decide the name of the file
self._log_level=log_level # information level
#Threading
threading.Thread.__init__(self) # initialize th
self.daemon = True
#Event
self._thread_alive = threading.Event() # status for the thread
self._thread_terminated = threading.Event() # status for the thread terminated
#Reset
self._thread_alive.clear()
self._thread_terminated.clear()
log('Logging instance ' + self._name + ' is initialized : ', self._log_level)
self.start() # start the thread
def get_name(self):
"""Returning the name of the instance."""
return self._name
name = property(get_name,'Name Property')
def _force_torque_logging_mode(self):
"""This mode will lock the data from the ft-sensor"""
info,force,torque = self._ref.get_data_ATI(sync=True,timeout=1,data_type=None) # wait for data
if info != None:
force = np.average(force,axis=0)
torque = np.average(torque,axis=0)
info = info[0]
c_time = time.time() - self._first_time # get time stamp
for i in range(0,3):
self._data_list[i].append(info[i])
self._data_list[3+i].append(force[i])
self._data_list[6+i].append(torque[i])
self._data_list[9].append(c_time)
def _joint_angles(self):
"""This mode will log the joint angels."""
self._ref.robot_facade.wait_for_control()
time.sleep(1)
def stop_joint_angles_listner(self):
self._ref.robot_facade.unsubscribe(self.log_joint_angles_listner)
def log_joint_angles_listner(self, event_time):
self._data_list[0].append(event_time)
self._data_list[1].append(self._ref.robot_facade.act_joint_pos.tolist())
self._data_list[2].append(self._ref.robot_facade.cmd_joint_pos.tolist())
def run(self):
"""The thread is running in this loop."""
log('Logging Instance ' + self._name + ' is RUNNING', ALWAYS_LOG_LEVEL)
self._thread_alive.set() # set the thread to be alive
self._thread_terminated.clear() # clear the terminated event
self._data_list = [] # a list that contain all files
for i in self._files:
self._data_list.append([])
self._first_time = time.time()
if self._current_logging_mode == 'Joint_Angles':
self._ref.robot_facade.subscribe(self.log_joint_angles_listner)
while self._thread_alive.isSet() == True:
try:
method = getattr(self,modes[self._current_logging_mode])
except AttributeError:
raise self.Error(task + ' not found !!!! : ' + '"{}"'.format(self._name))
else:
method() # call task from the queue
if self._current_logging_mode == 'Joint_Angles':
self._ref.robot_facade.unsubscribe(self.log_joint_angles_listner)
self._ref.robot_facade.wait_for_control()
self._file_list = [] # a list that contain all files
for i in self._files:
self._file_list.append(open(i+'.txt','w'))
for i in range(0,len(self._files)):
for j in self._data_list[i]:
self._file_list[i].write(str(j) + '\n')
for i in self._file_list:
i.close
self._thread_terminated.set()
def stop(self):
"""Stop the thread, and also stop the reciver class and
close the socket."""
log('Trying to stop LOGGING', self._log_level)
if self._thread_alive.isSet(): # if the thread is alive
self._thread_alive.clear() # set flag to false
else:
raise Exception('LOGGING: '
+ 'Is already stopped')
def wait_startup(self,timeout=None):
"""Wait to this thread is started up, expect
if a timeout is given.
Inputs:
timeout:float-> timeout given in secs."""
if self._thread_alive.wait(timeout):
return True
else:
return False
def wait_terminated(self,timeout=None):
"""Wait to this thread is terminated, expect
if a timeout is given.
Inputs:
timeout:float-> timeout given in secs."""
if self._thread_terminated.wait(timeout):
return True
else:
return False
| gpl-3.0 | -5,752,381,826,589,844,000 | 38.081871 | 102 | 0.540326 | false | 4.050303 | false | false | false |
emedvedev/st2 | st2actions/st2actions/runners/windows_command_runner.py | 1 | 4081 | # Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import uuid
from eventlet.green import subprocess
from st2common import log as logging
from st2common.util.green.shell import run_command
from st2common.constants.action import LIVEACTION_STATUS_SUCCEEDED, LIVEACTION_STATUS_FAILED
from st2common.constants.runners import WINDOWS_RUNNER_DEFAULT_ACTION_TIMEOUT
from st2actions.runners.windows_runner import BaseWindowsRunner
LOG = logging.getLogger(__name__)
# constants to lookup in runner_parameters
RUNNER_HOST = 'host'
RUNNER_USERNAME = 'username'
RUNNER_PASSWORD = 'password'
RUNNER_COMMAND = 'cmd'
RUNNER_TIMEOUT = 'timeout'
def get_runner():
return WindowsCommandRunner(str(uuid.uuid4()))
class WindowsCommandRunner(BaseWindowsRunner):
"""
Runner which executes commands on a remote Windows machine.
"""
def __init__(self, runner_id, timeout=WINDOWS_RUNNER_DEFAULT_ACTION_TIMEOUT):
"""
:param timeout: Action execution timeout in seconds.
:type timeout: ``int``
"""
super(WindowsCommandRunner, self).__init__(runner_id=runner_id)
self._timeout = timeout
def pre_run(self):
super(WindowsCommandRunner, self).pre_run()
# TODO :This is awful, but the way "runner_parameters" and other variables get
# assigned on the runner instance is even worse. Those arguments should
# be passed to the constructor.
self._host = self.runner_parameters.get(RUNNER_HOST, None)
self._username = self.runner_parameters.get(RUNNER_USERNAME, None)
self._password = self.runner_parameters.get(RUNNER_PASSWORD, None)
self._command = self.runner_parameters.get(RUNNER_COMMAND, None)
self._timeout = self.runner_parameters.get(RUNNER_TIMEOUT, self._timeout)
def run(self, action_parameters):
# Make sure the dependencies are available
self._verify_winexe_exists()
args = self._get_winexe_command_args(host=self._host, username=self._username,
password=self._password,
command=self._command)
# Note: We don't send anything over stdin, we just create an unused pipe
# to avoid some obscure failures
exit_code, stdout, stderr, timed_out = run_command(cmd=args,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=False,
timeout=self._timeout)
if timed_out:
error = 'Action failed to complete in %s seconds' % (self._timeout)
else:
error = None
if exit_code != 0:
error = self._parse_winexe_error(stdout=stdout, stderr=stderr)
result = stdout
output = {
'stdout': stdout,
'stderr': stderr,
'exit_code': exit_code,
'result': result
}
if error:
output['error'] = error
status = LIVEACTION_STATUS_SUCCEEDED if exit_code == 0 else LIVEACTION_STATUS_FAILED
return (status, output, None)
| apache-2.0 | -9,076,009,354,234,646,000 | 38.621359 | 92 | 0.627542 | false | 4.416667 | false | false | false |
adityahase/frappe | frappe/core/doctype/navbar_settings/navbar_settings.py | 1 | 1160 | # -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe Technologies and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
from frappe import _
class NavbarSettings(Document):
def validate(self):
self.validate_standard_navbar_items()
def validate_standard_navbar_items(self):
doc_before_save = self.get_doc_before_save()
before_save_items = [item for item in \
doc_before_save.help_dropdown + doc_before_save.settings_dropdown if item.is_standard]
after_save_items = [item for item in \
self.help_dropdown + self.settings_dropdown if item.is_standard]
if not frappe.flags.in_patch and (len(before_save_items) > len(after_save_items)):
frappe.throw(_("Please hide the standard navbar items instead of deleting them"))
@frappe.whitelist()
def get_app_logo():
app_logo = frappe.db.get_single_value('Navbar Settings', 'app_logo')
if not app_logo:
app_logo = frappe.get_hooks('app_logo_url')[-1]
return app_logo
def get_navbar_settings():
navbar_settings = frappe.get_single('Navbar Settings')
return navbar_settings
| mit | 5,117,916,987,784,409,000 | 28 | 89 | 0.735345 | false | 3.267606 | false | false | false |
Julian/home-assistant | homeassistant/components/notify/pushbullet.py | 1 | 4409 | """
PushBullet platform for notify component.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/notify.pushbullet/
"""
import logging
from homeassistant.components.notify import (
ATTR_TARGET, ATTR_TITLE, BaseNotificationService)
from homeassistant.const import CONF_API_KEY
_LOGGER = logging.getLogger(__name__)
# pylint: disable=unused-argument
def get_service(hass, config):
"""Get the PushBullet notification service."""
from pushbullet import PushBullet
from pushbullet import InvalidKeyError
if CONF_API_KEY not in config:
_LOGGER.error("Unable to find config key '%s'", CONF_API_KEY)
return None
try:
pushbullet = PushBullet(config[CONF_API_KEY])
except InvalidKeyError:
_LOGGER.error(
"Wrong API key supplied. "
"Get it at https://www.pushbullet.com/account")
return None
return PushBulletNotificationService(pushbullet)
# pylint: disable=too-few-public-methods
class PushBulletNotificationService(BaseNotificationService):
"""Implement the notification service for Pushbullet."""
def __init__(self, pb):
"""Initialize the service."""
self.pushbullet = pb
self.pbtargets = {}
self.refresh()
def refresh(self):
"""Refresh devices, contacts, etc.
pbtargets stores all targets available from this pushbullet instance
into a dict. These are PB objects!. It sacrifices a bit of memory
for faster processing at send_message.
As of sept 2015, contacts were replaced by chats. This is not
implemented in the module yet.
"""
self.pushbullet.refresh()
self.pbtargets = {
'device': {
tgt.nickname.lower(): tgt for tgt in self.pushbullet.devices},
'channel': {
tgt.channel_tag.lower(): tgt for
tgt in self.pushbullet.channels},
}
def send_message(self, message=None, **kwargs):
"""Send a message to a specified target.
If no target specified, a 'normal' push will be sent to all devices
linked to the PB account.
Email is special, these are assumed to always exist. We use a special
call which doesn't require a push object.
"""
targets = kwargs.get(ATTR_TARGET)
title = kwargs.get(ATTR_TITLE)
refreshed = False
if not targets:
# Backward compatebility, notify all devices in own account
self.pushbullet.push_note(title, message)
_LOGGER.info('Sent notification to self')
return
# Make list if not so
if not isinstance(targets, list):
targets = [targets]
# Main loop, Process all targets specified
for target in targets:
try:
ttype, tname = target.split('/', 1)
except ValueError:
_LOGGER.error('Invalid target syntax: %s', target)
continue
# Target is email, send directly, don't use a target object
# This also seems works to send to all devices in own account
if ttype == 'email':
self.pushbullet.push_note(title, message, email=tname)
_LOGGER.info('Sent notification to email %s', tname)
continue
# Refresh if name not found. While awaiting periodic refresh
# solution in component, poor mans refresh ;)
if ttype not in self.pbtargets:
_LOGGER.error('Invalid target syntax: %s', target)
continue
tname = tname.lower()
if tname not in self.pbtargets[ttype] and not refreshed:
self.refresh()
refreshed = True
# Attempt push_note on a dict value. Keys are types & target
# name. Dict pbtargets has all *actual* targets.
try:
self.pbtargets[ttype][tname].push_note(title, message)
_LOGGER.info('Sent notification to %s/%s', ttype, tname)
except KeyError:
_LOGGER.error('No such target: %s/%s', ttype, tname)
continue
except self.pushbullet.errors.PushError:
_LOGGER.error('Notify failed to: %s/%s', ttype, tname)
continue
| mit | -4,096,327,190,134,021,000 | 33.992063 | 78 | 0.604445 | false | 4.517418 | false | false | false |
mdworks2016/work_development | Python/20_Third_Certification/venv/lib/python3.7/site-packages/django/contrib/gis/db/models/functions.py | 1 | 16962 | from decimal import Decimal
from django.contrib.gis.db.models.fields import BaseSpatialField, GeometryField
from django.contrib.gis.db.models.sql import AreaField, DistanceField
from django.contrib.gis.geos import GEOSGeometry
from django.core.exceptions import FieldError
from django.db.models import (
BooleanField, FloatField, IntegerField, TextField, Transform,
)
from django.db.models.expressions import Func, Value
from django.db.models.functions import Cast
from django.db.utils import NotSupportedError
from django.utils.functional import cached_property
NUMERIC_TYPES = (int, float, Decimal)
class GeoFuncMixin:
function = None
geom_param_pos = (0,)
def __init__(self, *expressions, **extra):
super().__init__(*expressions, **extra)
# Ensure that value expressions are geometric.
for pos in self.geom_param_pos:
expr = self.source_expressions[pos]
if not isinstance(expr, Value):
continue
try:
output_field = expr.output_field
except FieldError:
output_field = None
geom = expr.value
if not isinstance(geom, GEOSGeometry) or output_field and not isinstance(output_field, GeometryField):
raise TypeError("%s function requires a geometric argument in position %d." % (self.name, pos + 1))
if not geom.srid and not output_field:
raise ValueError("SRID is required for all geometries.")
if not output_field:
self.source_expressions[pos] = Value(geom, output_field=GeometryField(srid=geom.srid))
@property
def name(self):
return self.__class__.__name__
@cached_property
def geo_field(self):
return self.source_expressions[self.geom_param_pos[0]].field
def as_sql(self, compiler, connection, function=None, **extra_context):
if self.function is None and function is None:
function = connection.ops.spatial_function_name(self.name)
return super().as_sql(compiler, connection, function=function, **extra_context)
def resolve_expression(self, *args, **kwargs):
res = super().resolve_expression(*args, **kwargs)
# Ensure that expressions are geometric.
source_fields = res.get_source_fields()
for pos in self.geom_param_pos:
field = source_fields[pos]
if not isinstance(field, GeometryField):
raise TypeError(
"%s function requires a GeometryField in position %s, got %s." % (
self.name, pos + 1, type(field).__name__,
)
)
base_srid = res.geo_field.srid
for pos in self.geom_param_pos[1:]:
expr = res.source_expressions[pos]
expr_srid = expr.output_field.srid
if expr_srid != base_srid:
# Automatic SRID conversion so objects are comparable.
res.source_expressions[pos] = Transform(expr, base_srid).resolve_expression(*args, **kwargs)
return res
def _handle_param(self, value, param_name='', check_types=None):
if not hasattr(value, 'resolve_expression'):
if check_types and not isinstance(value, check_types):
raise TypeError(
"The %s parameter has the wrong type: should be %s." % (
param_name, check_types)
)
return value
class GeoFunc(GeoFuncMixin, Func):
pass
class GeomOutputGeoFunc(GeoFunc):
@cached_property
def output_field(self):
return GeometryField(srid=self.geo_field.srid)
class SQLiteDecimalToFloatMixin:
"""
By default, Decimal values are converted to str by the SQLite backend, which
is not acceptable by the GIS functions expecting numeric values.
"""
def as_sqlite(self, compiler, connection, **extra_context):
for expr in self.get_source_expressions():
if hasattr(expr, 'value') and isinstance(expr.value, Decimal):
expr.value = float(expr.value)
return super().as_sql(compiler, connection, **extra_context)
class OracleToleranceMixin:
tolerance = 0.05
def as_oracle(self, compiler, connection, **extra_context):
tolerance = Value(self._handle_param(
self.extra.get('tolerance', self.tolerance),
'tolerance',
NUMERIC_TYPES,
))
clone = self.copy()
clone.set_source_expressions([*self.get_source_expressions(), tolerance])
return clone.as_sql(compiler, connection, **extra_context)
class Area(OracleToleranceMixin, GeoFunc):
arity = 1
@cached_property
def output_field(self):
return AreaField(self.geo_field)
def as_sql(self, compiler, connection, **extra_context):
if not connection.features.supports_area_geodetic and self.geo_field.geodetic(connection):
raise NotSupportedError('Area on geodetic coordinate systems not supported.')
return super().as_sql(compiler, connection, **extra_context)
def as_sqlite(self, compiler, connection, **extra_context):
if self.geo_field.geodetic(connection):
extra_context['template'] = '%(function)s(%(expressions)s, %(spheroid)d)'
extra_context['spheroid'] = True
return self.as_sql(compiler, connection, **extra_context)
class Azimuth(GeoFunc):
output_field = FloatField()
arity = 2
geom_param_pos = (0, 1)
class AsGeoJSON(GeoFunc):
output_field = TextField()
def __init__(self, expression, bbox=False, crs=False, precision=8, **extra):
expressions = [expression]
if precision is not None:
expressions.append(self._handle_param(precision, 'precision', int))
options = 0
if crs and bbox:
options = 3
elif bbox:
options = 1
elif crs:
options = 2
if options:
expressions.append(options)
super().__init__(*expressions, **extra)
class AsGML(GeoFunc):
geom_param_pos = (1,)
output_field = TextField()
def __init__(self, expression, version=2, precision=8, **extra):
expressions = [version, expression]
if precision is not None:
expressions.append(self._handle_param(precision, 'precision', int))
super().__init__(*expressions, **extra)
def as_oracle(self, compiler, connection, **extra_context):
source_expressions = self.get_source_expressions()
version = source_expressions[0]
clone = self.copy()
clone.set_source_expressions([source_expressions[1]])
extra_context['function'] = 'SDO_UTIL.TO_GML311GEOMETRY' if version.value == 3 else 'SDO_UTIL.TO_GMLGEOMETRY'
return super(AsGML, clone).as_sql(compiler, connection, **extra_context)
class AsKML(AsGML):
def as_sqlite(self, compiler, connection, **extra_context):
# No version parameter
clone = self.copy()
clone.set_source_expressions(self.get_source_expressions()[1:])
return clone.as_sql(compiler, connection, **extra_context)
class AsSVG(GeoFunc):
output_field = TextField()
def __init__(self, expression, relative=False, precision=8, **extra):
relative = relative if hasattr(relative, 'resolve_expression') else int(relative)
expressions = [
expression,
relative,
self._handle_param(precision, 'precision', int),
]
super().__init__(*expressions, **extra)
class BoundingCircle(OracleToleranceMixin, GeoFunc):
def __init__(self, expression, num_seg=48, **extra):
super().__init__(expression, num_seg, **extra)
def as_oracle(self, compiler, connection, **extra_context):
clone = self.copy()
clone.set_source_expressions([self.get_source_expressions()[0]])
return super(BoundingCircle, clone).as_oracle(compiler, connection, **extra_context)
class Centroid(OracleToleranceMixin, GeomOutputGeoFunc):
arity = 1
class Difference(OracleToleranceMixin, GeomOutputGeoFunc):
arity = 2
geom_param_pos = (0, 1)
class DistanceResultMixin:
@cached_property
def output_field(self):
return DistanceField(self.geo_field)
def source_is_geography(self):
return self.geo_field.geography and self.geo_field.srid == 4326
class Distance(DistanceResultMixin, OracleToleranceMixin, GeoFunc):
geom_param_pos = (0, 1)
spheroid = None
def __init__(self, expr1, expr2, spheroid=None, **extra):
expressions = [expr1, expr2]
if spheroid is not None:
self.spheroid = self._handle_param(spheroid, 'spheroid', bool)
super().__init__(*expressions, **extra)
def as_postgresql(self, compiler, connection, **extra_context):
clone = self.copy()
function = None
expr2 = clone.source_expressions[1]
geography = self.source_is_geography()
if expr2.output_field.geography != geography:
if isinstance(expr2, Value):
expr2.output_field.geography = geography
else:
clone.source_expressions[1] = Cast(
expr2,
GeometryField(srid=expr2.output_field.srid, geography=geography),
)
if not geography and self.geo_field.geodetic(connection):
# Geometry fields with geodetic (lon/lat) coordinates need special distance functions
if self.spheroid:
# DistanceSpheroid is more accurate and resource intensive than DistanceSphere
function = connection.ops.spatial_function_name('DistanceSpheroid')
# Replace boolean param by the real spheroid of the base field
clone.source_expressions.append(Value(self.geo_field.spheroid(connection)))
else:
function = connection.ops.spatial_function_name('DistanceSphere')
return super(Distance, clone).as_sql(compiler, connection, function=function, **extra_context)
def as_sqlite(self, compiler, connection, **extra_context):
if self.geo_field.geodetic(connection):
# SpatiaLite returns NULL instead of zero on geodetic coordinates
extra_context['template'] = 'COALESCE(%(function)s(%(expressions)s, %(spheroid)s), 0)'
extra_context['spheroid'] = int(bool(self.spheroid))
return super().as_sql(compiler, connection, **extra_context)
class Envelope(GeomOutputGeoFunc):
arity = 1
class ForcePolygonCW(GeomOutputGeoFunc):
arity = 1
class GeoHash(GeoFunc):
output_field = TextField()
def __init__(self, expression, precision=None, **extra):
expressions = [expression]
if precision is not None:
expressions.append(self._handle_param(precision, 'precision', int))
super().__init__(*expressions, **extra)
def as_mysql(self, compiler, connection, **extra_context):
clone = self.copy()
# If no precision is provided, set it to the maximum.
if len(clone.source_expressions) < 2:
clone.source_expressions.append(Value(100))
return clone.as_sql(compiler, connection, **extra_context)
class GeometryDistance(GeoFunc):
output_field = FloatField()
arity = 2
function = ''
arg_joiner = ' <-> '
geom_param_pos = (0, 1)
class Intersection(OracleToleranceMixin, GeomOutputGeoFunc):
arity = 2
geom_param_pos = (0, 1)
@BaseSpatialField.register_lookup
class IsValid(OracleToleranceMixin, GeoFuncMixin, Transform):
lookup_name = 'isvalid'
output_field = BooleanField()
def as_oracle(self, compiler, connection, **extra_context):
sql, params = super().as_oracle(compiler, connection, **extra_context)
return "CASE %s WHEN 'TRUE' THEN 1 ELSE 0 END" % sql, params
class Length(DistanceResultMixin, OracleToleranceMixin, GeoFunc):
def __init__(self, expr1, spheroid=True, **extra):
self.spheroid = spheroid
super().__init__(expr1, **extra)
def as_sql(self, compiler, connection, **extra_context):
if self.geo_field.geodetic(connection) and not connection.features.supports_length_geodetic:
raise NotSupportedError("This backend doesn't support Length on geodetic fields")
return super().as_sql(compiler, connection, **extra_context)
def as_postgresql(self, compiler, connection, **extra_context):
clone = self.copy()
function = None
if self.source_is_geography():
clone.source_expressions.append(Value(self.spheroid))
elif self.geo_field.geodetic(connection):
# Geometry fields with geodetic (lon/lat) coordinates need length_spheroid
function = connection.ops.spatial_function_name('LengthSpheroid')
clone.source_expressions.append(Value(self.geo_field.spheroid(connection)))
else:
dim = min(f.dim for f in self.get_source_fields() if f)
if dim > 2:
function = connection.ops.length3d
return super(Length, clone).as_sql(compiler, connection, function=function, **extra_context)
def as_sqlite(self, compiler, connection, **extra_context):
function = None
if self.geo_field.geodetic(connection):
function = 'GeodesicLength' if self.spheroid else 'GreatCircleLength'
return super().as_sql(compiler, connection, function=function, **extra_context)
class LineLocatePoint(GeoFunc):
output_field = FloatField()
arity = 2
geom_param_pos = (0, 1)
class MakeValid(GeoFunc):
pass
class MemSize(GeoFunc):
output_field = IntegerField()
arity = 1
class NumGeometries(GeoFunc):
output_field = IntegerField()
arity = 1
class NumPoints(GeoFunc):
output_field = IntegerField()
arity = 1
class Perimeter(DistanceResultMixin, OracleToleranceMixin, GeoFunc):
arity = 1
def as_postgresql(self, compiler, connection, **extra_context):
function = None
if self.geo_field.geodetic(connection) and not self.source_is_geography():
raise NotSupportedError("ST_Perimeter cannot use a non-projected non-geography field.")
dim = min(f.dim for f in self.get_source_fields())
if dim > 2:
function = connection.ops.perimeter3d
return super().as_sql(compiler, connection, function=function, **extra_context)
def as_sqlite(self, compiler, connection, **extra_context):
if self.geo_field.geodetic(connection):
raise NotSupportedError("Perimeter cannot use a non-projected field.")
return super().as_sql(compiler, connection, **extra_context)
class PointOnSurface(OracleToleranceMixin, GeomOutputGeoFunc):
arity = 1
class Reverse(GeoFunc):
arity = 1
class Scale(SQLiteDecimalToFloatMixin, GeomOutputGeoFunc):
def __init__(self, expression, x, y, z=0.0, **extra):
expressions = [
expression,
self._handle_param(x, 'x', NUMERIC_TYPES),
self._handle_param(y, 'y', NUMERIC_TYPES),
]
if z != 0.0:
expressions.append(self._handle_param(z, 'z', NUMERIC_TYPES))
super().__init__(*expressions, **extra)
class SnapToGrid(SQLiteDecimalToFloatMixin, GeomOutputGeoFunc):
def __init__(self, expression, *args, **extra):
nargs = len(args)
expressions = [expression]
if nargs in (1, 2):
expressions.extend(
[self._handle_param(arg, '', NUMERIC_TYPES) for arg in args]
)
elif nargs == 4:
# Reverse origin and size param ordering
expressions += [
*(self._handle_param(arg, '', NUMERIC_TYPES) for arg in args[2:]),
*(self._handle_param(arg, '', NUMERIC_TYPES) for arg in args[0:2]),
]
else:
raise ValueError('Must provide 1, 2, or 4 arguments to `SnapToGrid`.')
super().__init__(*expressions, **extra)
class SymDifference(OracleToleranceMixin, GeomOutputGeoFunc):
arity = 2
geom_param_pos = (0, 1)
class Transform(GeomOutputGeoFunc):
def __init__(self, expression, srid, **extra):
expressions = [
expression,
self._handle_param(srid, 'srid', int),
]
if 'output_field' not in extra:
extra['output_field'] = GeometryField(srid=srid)
super().__init__(*expressions, **extra)
class Translate(Scale):
def as_sqlite(self, compiler, connection, **extra_context):
clone = self.copy()
if len(self.source_expressions) < 4:
# Always provide the z parameter for ST_Translate
clone.source_expressions.append(Value(0))
return super(Translate, clone).as_sqlite(compiler, connection, **extra_context)
class Union(OracleToleranceMixin, GeomOutputGeoFunc):
arity = 2
geom_param_pos = (0, 1)
| apache-2.0 | 7,931,150,428,917,694,000 | 35.24359 | 117 | 0.635538 | false | 4.020384 | false | false | false |
himaaaatti/qtile | libqtile/widget/backlight.py | 1 | 3044 | # Copyright (c) 2012 Tim Neumann
# Copyright (c) 2012, 2014 Tycho Andersen
# Copyright (c) 2013 Tao Sauvage
# Copyright (c) 2014 Sean Vig
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import os
from . import base
from libqtile.log_utils import logger
BACKLIGHT_DIR = '/sys/class/backlight'
FORMAT = '{percent: 2.0%}'
class Backlight(base.InLoopPollText):
"""
A simple widget to show the current brightness of a monitor.
"""
filenames = {}
orientations = base.ORIENTATION_HORIZONTAL
defaults = [
('backlight_name', 'acpi_video0', 'ACPI name of a backlight device'),
(
'brightness_file',
'brightness',
'Name of file with the '
'current brightness in /sys/class/backlight/backlight_name'
),
(
'max_brightness_file',
'max_brightness',
'Name of file with the '
'maximum brightness in /sys/class/backlight/backlight_name'
),
('update_interval', .2, 'The delay in seconds between updates'),
]
def __init__(self, **config):
base.InLoopPollText.__init__(self, **config)
self.add_defaults(Backlight.defaults)
def _load_file(self, name):
try:
path = os.path.join(BACKLIGHT_DIR, self.backlight_name, name)
with open(path, 'r') as f:
return f.read().strip()
except IOError:
return False
except Exception:
logger.exception("Failed to get %s" % name)
def _get_info(self):
try:
info = {
'brightness': float(self._load_file(self.brightness_file)),
'max': float(self._load_file(self.max_brightness_file)),
}
except TypeError:
return False
return info
def poll(self):
info = self._get_info()
if info is False:
return 'Error'
percent = info['brightness'] / info['max']
return FORMAT.format(percent=percent)
| mit | 7,803,237,369,298,870,000 | 32.822222 | 79 | 0.638962 | false | 4.130258 | false | false | false |
raphaelrubino/nid | nn/mono/run.py | 1 | 1143 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import numpy as np
np.random.seed( 1337 )
import data_utils
from nid import Neural_information_density
import sys
if __name__ == '__main__':
if len( sys.argv ) != 9:
print( "\nUsage: ", sys.argv[ 0 ], "<context> <target> <vocabulary> <embedding size> <dropout> <batch size> <epochs> <output model>\n" )
exit()
context, target, vocab, embedding, dropout, batch, epoch, out_model = sys.argv[ 1: ]
embedding = np.int( embedding )
dropout = np.float( dropout )
batch = np.int( batch )
epoch = np.int( epoch )
print( "Loading vocabulary" )
vocab, max_features = data_utils.load_vocab( vocab )
print( "Loading contexts" )
context = data_utils.load_context( context )
print( "Loading targets" )
target = data_utils.load_target( target ) #, max_features )
max_length = context.shape[ 1 ]
validation_size = 0.25
print( "Data loaded" )
nid = Neural_information_density( context, target, max_features, max_length, batch, validation_size )
print( "Data prepared" )
print( "Training" )
nid.train( embedding, dropout, epoch, out_model )
| mit | 8,807,348,404,547,177,000 | 26.878049 | 138 | 0.677165 | false | 3.05615 | false | true | false |
komuW/sewer | sewer/catalog.py | 1 | 2472 | import codecs, importlib, json, os
from typing import Dict, List, Sequence
from .auth import ProviderBase
class ProviderDescriptor:
def __init__(
self,
*,
name: str,
desc: str,
chals: Sequence[str],
args: Sequence[Dict[str, str]],
deps: Sequence[str],
path: str = None,
cls: str = None,
features: Sequence[str] = None,
memo: str = None,
) -> None:
"initialize a driver descriptor from one item in the catalog"
self.name = name
self.desc = desc
self.chals = chals
self.args = args
self.deps = deps
self.path = path
self.cls = cls
self.features = [] if features is None else features
self.memo = memo
def __str__(self) -> str:
return "Descriptor %s" % self.name
def get_provider(self) -> ProviderBase:
"return the class that implements this driver"
module_name = self.path if self.path else ("sewer.providers." + self.name)
module = importlib.import_module(module_name)
return getattr(module, self.cls if self.cls else "Provider")
class ProviderCatalog:
def __init__(self, filepath: str = "") -> None:
"intialize a catalog from either the default catalog.json or one named by filepath"
if not filepath:
here = os.path.abspath(os.path.dirname(__file__))
filepath = os.path.join(here, "catalog.json")
with codecs.open(filepath, "r", encoding="utf8") as f:
raw_catalog = json.load(f)
items = {} # type: Dict[str, ProviderDescriptor]
for item in raw_catalog:
k = item["name"]
if k in items:
print("WARNING: duplicate name %s skipped in catalog %s" % (k, filepath))
else:
items[k] = ProviderDescriptor(**item)
self.items = items
def get_item_list(self) -> List[ProviderDescriptor]:
"return the list of items in the catalog, sorted by name"
res = [i for i in self.items.values()]
res.sort(key=lambda i: i.name)
return res
def get_descriptor(self, name: str) -> ProviderDescriptor:
"return the ProviderDescriptor that matches name"
return self.items[name]
def get_provider(self, name: str) -> ProviderBase:
"return the class that implements the named driver"
return self.get_descriptor(name).get_provider()
| mit | -5,405,137,072,739,433,000 | 30.692308 | 91 | 0.59021 | false | 4.059113 | false | false | false |
wienerschnitzel/schnitzelserver | schnitzelserver/session/session.py | 1 | 4940 | import logging
import enum
from sqlalchemy import or_
from sqlalchemy import inspect
from schnitzelserver.moduleindex import ModuleIndex
from schnitzelserver.session.grant_access import SessionGrant
from schnitzelserver.pool import ModelPool, ViewPool
logger = logging.getLogger(__name__)
class AccessTypes(enum.Enum):
read = 'read'
write = 'write'
create = 'create'
delete = 'delete'
class Session():
"""
A Schnitzel Session
"""
def __init__(self, sql_session, user, model_pool: ModelPool, view_pool: ViewPool, module_index: ModuleIndex):
if user is None:
raise ValueError()
self._module_index = module_index or ModuleIndex()
self._model_pool = model_pool
self._user = user
self._sql_session = sql_session
self._granted_access = {}
self._view_pool = view_pool
view_pool = property(lambda self: self._view_pool)
module_index = property(lambda self: self._module_index)
model_pool = property(lambda self: self._model_pool)
user = property(lambda self: self._user)
sql_session = property(lambda self: self._sql_session)
def grant(self, model, field=None, access_type='read'):
return SessionGrant(self, model, field, access_type)
def model_access(self, model_name, field_name=None, model_id=None,
access_type=AccessTypes.read):
"""
Check user rights on a given model
:param model_name:
:param field_name:
:param model_id:
:return: set of access_rights at maximum:
{'read', 'write', 'create', 'delete'}
"""
field_match = lambda field: field == field_name
id_match = lambda _id: _id == model_id
# Query local
grants = self._granted_access.get(model_name, set())
for _id, _field, _type in grants:
if id_match(_id) and field_match(_field) and _type == access_type.value:
return True
# Query database
if self.user.groups:
Access = self.model_pool['schnitzel_model_access']
rules = self._sql_session.query(Access).filter(
Access.model_name == model_name,
Access.group_id.in_([g.id for g in self.user.groups]),
or_(Access.model_id == model_id, Access.model_id.is_(None)),
or_(Access.field_name == field_name, Access.field_name.is_(None)),
Access.access_type == access_type.value
).all()
if rules:
# cache
# for rule in rules:
# grants.add(
# (rule.model_id, rule.field_name, rule.access_type))
# self._granted_access[model_name] = grants
return True
else:
return False
def get_model_instance(self, model_name, model_id):
if not self.model_access(model_name, model_id=model_id):
print([s.id for s in self._sql_session.query(self.model_pool[model_name]).all()])
raise PermissionError("User {} does not have permission to read {} ({})".format(
self.user.username, model_name, model_id
))
instance = self._sql_session.query(self.model_pool[model_name]).get(model_id)
if instance is None:
raise ValueError("There does not exist an instance of {} with id {}".format(model_name, model_id))
return instance
def get_model_instance_as_dict(self, model_name, model_id):
model = self.model_pool[model_name]
instance = self.get_model_instance(model_name, model_id)
return {
name: getattr(instance, name) for name in inspect(model).columns.keys()
}
def create(self, model_name: str, dct: dict):
if not self.model_access(model_name, access_type=AccessTypes.create):
raise PermissionError('No creation rights on {}'.format(model_name))
new_model = self._model_pool[model_name](**dct)
self._sql_session.add(new_model)
def update(self, model_name: str, model_id: int, dct: dict):
missing_field_permissions = [field for field in dct if not self.model_access(model_name, field,
model_id, AccessTypes.write)]
if missing_field_permissions:
raise PermissionError('No write-rights on {} ({}) for fields: {}'.format(
model_name, model_id, missing_field_permissions
))
model = self._model_pool[model_name] # TODO: what about sql sessions and stuff?
model.update().where(model.id == model_id).values(**dct)
def add_module_entries(self, module_name):
db_entries = self.module_index[module_name].entries
for name, entries in db_entries.items():
for entry in entries:
self.create(name, entry) | lgpl-3.0 | 7,753,447,680,131,350,000 | 38.846774 | 114 | 0.591498 | false | 3.917526 | false | false | false |
cprakashagr/PythonClass | src/maths/Haversine.py | 1 | 1450 | from math import radians, cos, sin, asin, sqrt
import time
current_milli_time = lambda: int(round(time.time() * 1000))
def haversine(point1, point2, miles = False):
AVG_EARTH_RADIUS = 6371
lat1, lng1 = point1
lat2, lng2 = point2
# convert all latitudes/longitudes from decimal degrees to radians
lat1, lng1, lat2, lng2 = map(radians, (lat1, lng1, lat2, lng2))
# calculate haversine
lat = lat2 - lat1
lng = lng2 - lng1
d = sin(lat * 0.5) ** 2 + cos(lat1) * cos(lat2) * sin(lng * 0.5) ** 2
h = 2 * AVG_EARTH_RADIUS * asin(sqrt(d))
if miles:
return h * 0.621371 # in miles
else:
return h
pass
def main():
# lyon = (45.7597, 4.8422)
# paris = (48.8567, 2.3508)
lyon = (12.9210784, 77.6936946) # Saroj
paris = (12.9132164, 77.6234387) # Snapwiz
totalDelay = 0
start = current_milli_time()
for i in range(0,300000,1):
# print i
start = current_milli_time()
dist = haversine(lyon, paris)
end = current_milli_time()
delay = start-end
if delay > 0:
totalDelay += delay
end = current_milli_time()
print end
print start
print "That's All. Total Delay: " + str(end-start)
# time.sleep(5)
# start = time.time()
# print (haversine(lyon, paris, miles=True))
# end = time.time()
# print "%.20f" % start-end
pass
if __name__ == '__main__':
main()
| mit | -8,202,962,493,890,972,000 | 22.015873 | 73 | 0.570345 | false | 2.832031 | false | false | false |
dedupeio/dedupe-examples | pgsql_big_dedupe_example/pgsql_big_dedupe_example_init_db.py | 1 | 10090 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
This is a setup script for mysql_example. It downloads a zip file of
Illinois campaign contributions and loads them in t aMySQL database
named 'contributions'.
__Note:__ You will need to run this script first before execuing
[mysql_example.py](http://datamade.github.com/dedupe-examples/docs/mysql_example.html).
Tables created:
* raw_table - raw import of entire CSV file
* donors - all distinct donors based on name and address
* recipients - all distinct campaign contribution recipients
* contributions - contribution amounts tied to donor and recipients tables
"""
import csv
import os
import zipfile
import dj_database_url
import psycopg2
import psycopg2.extras
import unidecode
import requests
_file = 'Illinois-campaign-contributions'
contributions_zip_file = _file + '.txt.zip'
contributions_txt_file = _file + '.txt'
contributions_csv_file = _file + '.csv'
if not os.path.exists(contributions_zip_file):
print('downloading', contributions_zip_file, '(~60mb) ...')
u = requests.get(
'https://s3.amazonaws.com/dedupe-data/Illinois-campaign-contributions.txt.zip')
localFile = open(contributions_zip_file, 'wb')
localFile.write(u.content)
localFile.close()
if not os.path.exists(contributions_txt_file):
zip_file = zipfile.ZipFile(contributions_zip_file, 'r')
print('extracting %s' % contributions_zip_file)
zip_file_contents = zip_file.namelist()
for f in zip_file_contents:
if ('.txt' in f):
zip_file.extract(f)
zip_file.close()
# Create a cleaned up CSV version of file with consistent row lengths.
# Postgres COPY doesn't handle "ragged" files very well
if not os.path.exists(contributions_csv_file):
print('converting tab-delimited raw file to csv...')
with open(contributions_txt_file, 'rU') as txt_file, \
open(contributions_csv_file, 'w') as csv_file:
csv_writer = csv.writer(csv_file, quoting=csv.QUOTE_ALL)
for line in txt_file:
if not all(ord(c) < 128 for c in line):
line = unidecode.unidecode(line)
row = line.rstrip('\t\r\n').split('\t')
if len(row) != 29:
print('skipping bad row (length %s, expected 29):' % len(row))
print(row)
continue
csv_writer.writerow(row)
db_conf = dj_database_url.config()
if not db_conf:
raise Exception(
'set DATABASE_URL environment variable with your connection, e.g. '
'export DATABASE_URL=postgres://user:password@host/mydatabase'
)
conn = psycopg2.connect(database=db_conf['NAME'],
user=db_conf['USER'],
password=db_conf['PASSWORD'],
host=db_conf['HOST'],
port=db_conf['PORT'])
c = conn.cursor()
print('importing raw data from csv...')
c.execute("DROP TABLE IF EXISTS raw_table")
c.execute("DROP TABLE IF EXISTS donors")
c.execute("DROP TABLE IF EXISTS recipients")
c.execute("DROP TABLE IF EXISTS contributions")
c.execute("DROP TABLE IF EXISTS processed_donors")
c.execute("CREATE TABLE raw_table "
"(reciept_id INT, last_name VARCHAR(70), first_name VARCHAR(35), "
" address_1 VARCHAR(35), address_2 VARCHAR(36), city VARCHAR(20), "
" state VARCHAR(15), zip VARCHAR(11), report_type VARCHAR(24), "
" date_recieved VARCHAR(10), loan_amount VARCHAR(12), "
" amount VARCHAR(23), receipt_type VARCHAR(23), "
" employer VARCHAR(70), occupation VARCHAR(40), "
" vendor_last_name VARCHAR(70), vendor_first_name VARCHAR(20), "
" vendor_address_1 VARCHAR(35), vendor_address_2 VARCHAR(31), "
" vendor_city VARCHAR(20), vendor_state VARCHAR(10), "
" vendor_zip VARCHAR(10), description VARCHAR(90), "
" election_type VARCHAR(10), election_year VARCHAR(10), "
" report_period_begin VARCHAR(10), report_period_end VARCHAR(33), "
" committee_name VARCHAR(70), committee_id VARCHAR(37))")
conn.commit()
with open(contributions_csv_file, 'rU') as csv_file:
c.copy_expert("COPY raw_table "
"(reciept_id, last_name, first_name, "
" address_1, address_2, city, state, "
" zip, report_type, date_recieved, "
" loan_amount, amount, receipt_type, "
" employer, occupation, vendor_last_name, "
" vendor_first_name, vendor_address_1, "
" vendor_address_2, vendor_city, vendor_state, "
" vendor_zip, description, election_type, "
" election_year, "
" report_period_begin, report_period_end, "
" committee_name, committee_id) "
"FROM STDIN CSV HEADER", csv_file)
conn.commit()
print('creating donors table...')
c.execute("CREATE TABLE donors "
"(donor_id SERIAL PRIMARY KEY, "
" last_name VARCHAR(70), first_name VARCHAR(35), "
" address_1 VARCHAR(35), address_2 VARCHAR(36), "
" city VARCHAR(20), state VARCHAR(15), "
" zip VARCHAR(11), employer VARCHAR(70), "
" occupation VARCHAR(40))")
c.execute("INSERT INTO donors "
"(first_name, last_name, address_1, "
" address_2, city, state, zip, employer, occupation) "
"SELECT DISTINCT "
"LOWER(TRIM(first_name)), LOWER(TRIM(last_name)), "
"LOWER(TRIM(address_1)), LOWER(TRIM(address_2)), "
"LOWER(TRIM(city)), LOWER(TRIM(state)), LOWER(TRIM(zip)), "
"LOWER(TRIM(employer)), LOWER(TRIM(occupation)) "
"FROM raw_table")
conn.commit()
print('creating indexes on donors table...')
c.execute("CREATE INDEX donors_donor_info ON donors "
"(last_name, first_name, address_1, address_2, city, "
" state, zip)")
conn.commit()
print('creating recipients table...')
c.execute("CREATE TABLE recipients "
"(recipient_id SERIAL PRIMARY KEY, name VARCHAR(70))")
c.execute("INSERT INTO recipients "
"SELECT DISTINCT CAST(committee_id AS INTEGER), "
"committee_name FROM raw_table")
conn.commit()
print('creating contributions table...')
c.execute("CREATE TABLE contributions "
"(contribution_id INT, donor_id INT, recipient_id INT, "
" report_type VARCHAR(24), date_recieved DATE, "
" loan_amount VARCHAR(12), amount VARCHAR(23), "
" receipt_type VARCHAR(23), "
" vendor_last_name VARCHAR(70), "
" vendor_first_name VARCHAR(20), "
" vendor_address_1 VARCHAR(35), vendor_address_2 VARCHAR(31), "
" vendor_city VARCHAR(20), vendor_state VARCHAR(10), "
" vendor_zip VARCHAR(10), description VARCHAR(90), "
" election_type VARCHAR(10), election_year VARCHAR(10), "
" report_period_begin DATE, report_period_end DATE)")
c.execute("INSERT INTO contributions "
"SELECT reciept_id, donors.donor_id, CAST(committee_id AS INTEGER), "
" report_type, TO_DATE(TRIM(date_recieved), 'MM/DD/YYYY'), "
" loan_amount, amount, "
" receipt_type, vendor_last_name , "
" vendor_first_name, vendor_address_1,"
" vendor_address_2, "
" vendor_city, vendor_state, vendor_zip,"
" description, "
" election_type, election_year, "
" TO_DATE(TRIM(report_period_begin), 'MM/DD/YYYY'), "
" TO_DATE(TRIM(report_period_end), 'MM/DD/YYYY') "
"FROM raw_table JOIN donors ON "
"donors.first_name = LOWER(TRIM(raw_table.first_name)) AND "
"donors.last_name = LOWER(TRIM(raw_table.last_name)) AND "
"donors.address_1 = LOWER(TRIM(raw_table.address_1)) AND "
"donors.address_2 = LOWER(TRIM(raw_table.address_2)) AND "
"donors.city = LOWER(TRIM(raw_table.city)) AND "
"donors.state = LOWER(TRIM(raw_table.state)) AND "
"donors.employer = LOWER(TRIM(raw_table.employer)) AND "
"donors.occupation = LOWER(TRIM(raw_table.occupation)) AND "
"donors.zip = LOWER(TRIM(raw_table.zip))")
conn.commit()
print('creating indexes on contributions...')
c.execute("ALTER TABLE contributions ADD PRIMARY KEY(contribution_id)")
c.execute("CREATE INDEX donor_idx ON contributions (donor_id)")
c.execute("CREATE INDEX recipient_idx ON contributions (recipient_id)")
conn.commit()
print('nullifying empty strings in donors...')
c.execute(
"UPDATE donors "
"SET "
"first_name = CASE first_name WHEN '' THEN NULL ELSE first_name END, "
"last_name = CASE last_name WHEN '' THEN NULL ELSE last_name END, "
"address_1 = CASE address_1 WHEN '' THEN NULL ELSE address_1 END, "
"address_2 = CASE address_2 WHEN '' THEN NULL ELSE address_2 END, "
"city = CASE city WHEN '' THEN NULL ELSE city END, "
"state = CASE state WHEN '' THEN NULL ELSE state END, "
"employer = CASE employer WHEN '' THEN NULL ELSE employer END, "
"occupation = CASE occupation WHEN '' THEN NULL ELSE occupation END, "
"zip = CASE zip WHEN '' THEN NULL ELSE zip END"
)
conn.commit()
print('creating processed_donors...')
c.execute("CREATE TABLE processed_donors AS "
"(SELECT donor_id, "
" LOWER(city) AS city, "
" CASE WHEN (first_name IS NULL AND last_name IS NULL) "
" THEN NULL "
" ELSE LOWER(CONCAT_WS(' ', first_name, last_name)) "
" END AS name, "
" LOWER(zip) AS zip, "
" LOWER(state) AS state, "
" CASE WHEN (address_1 IS NULL AND address_2 IS NULL) "
" THEN NULL "
" ELSE LOWER(CONCAT_WS(' ', address_1, address_2)) "
" END AS address, "
" LOWER(occupation) AS occupation, "
" LOWER(employer) AS employer, "
" CAST((first_name IS NULL) AS INTEGER) AS person "
" FROM donors)")
c.execute("CREATE INDEX processed_donor_idx ON processed_donors (donor_id)")
conn.commit()
c.close()
conn.close()
print('done')
| mit | -596,923,599,316,805,100 | 39.522088 | 87 | 0.615461 | false | 3.434309 | false | false | false |
xncbf/authome | log/views.py | 1 | 3426 | from django.contrib.auth.mixins import LoginRequiredMixin
from django.contrib.auth.models import User
from django.contrib.staticfiles.templatetags.staticfiles import static
from django.urls import reverse
from django.db import connection
from django.shortcuts import render, HttpResponse
from django.utils import timezone
from django.views.generic.list import View
from dev.models import MacroLog, UserPage
from utils.services import dictfetchall
class Log(LoginRequiredMixin, View):
template_name = "log/log.html"
login_url = '/accounts/login/'
def get(self, request, *args, **kwargs):
context = {}
qs = MacroLog.objects.filter(macro__user=request.user).order_by('macro', 'user', '-created').distinct('macro',
'user')
unsorted_results = qs.all()
context['macroLog'] = sorted(unsorted_results, key=lambda t: t.created, reverse=True)
context['userPage'] = UserPage.objects.filter(macro__user=request.user).distinct('user')
return render(self.request, self.template_name, context)
def post(self, request, *args, **kwargs):
with connection.cursor() as cursor:
if request.is_ajax():
ddl_user = ','.join(request.POST.get('ddlUser').split(','))
if ddl_user:
where_str = 'AND ML.user_id IN ({0})'.format(ddl_user)
else:
where_str = ''
cursor.execute("""SELECT
ML.macro_id,
ML.created,
ML.ip,
M.title,
U.email
FROM main_macrolog ML
LEFT JOIN main_macro M ON M.id = ML.macro_id
LEFT JOIN auth_user U ON U.id = ML.user_id
WHERE M.user_id = '{0}' {1}
ORDER BY ML.created DESC
LIMIT 20""".format(request.user.pk, where_str))
obj = dictfetchall(cursor)
result = self.set_html(obj)
return HttpResponse(result)
def set_html(self, obj, html=''):
for e in obj:
user = User.objects.get(email=e.get('email'))
local_time = timezone.localtime(e.get('created'))
if user.socialaccount_set.all():
profile_url = user.socialaccount_set.all()[0].get_avatar_url()
else:
profile_url = static('images/Jigglypuff.png')
html += """<li class="collection-item user-list">
<a href="{0}">
<div>{1}</div>
<div class="chip">
<img src="{2}">{3}
</div>
<span class="secondary-content">{4}<br>{5}</span>
</a>
</li>""".format(reverse('user_manage', kwargs={'macro_id': e.get('macro_id')}),
e.get('title') or '제목없음',
profile_url,
e.get('email'),
e.get('ip'),
local_time.strftime('%y-%m-%d %H:%M'))
if len(obj) == 0:
html = '<li class="collection-item user-list">사용 흔적이 없어요!</li>'
return html
| mit | 8,670,323,591,795,827,000 | 43.763158 | 118 | 0.496179 | false | 4.241895 | false | false | false |
LabD/wagtail-personalisation | tests/factories/rule.py | 1 | 1032 | from __future__ import absolute_import, unicode_literals
import datetime
import factory
from wagtail_personalisation import rules
class DayRuleFactory(factory.DjangoModelFactory):
class Meta:
model = rules.DayRule
class DeviceRuleFactory(factory.DjangoModelFactory):
class Meta:
model = rules.DeviceRule
class QueryRuleFactory(factory.DjangoModelFactory):
class Meta:
model = rules.QueryRule
class ReferralRuleFactory(factory.DjangoModelFactory):
regex_string = "test.test"
class Meta:
model = rules.ReferralRule
class TimeRuleFactory(factory.DjangoModelFactory):
start_time = datetime.time(8, 0, 0)
end_time = datetime.time(23, 0, 0)
class Meta:
model = rules.TimeRule
class VisitCountRuleFactory(factory.DjangoModelFactory):
operator = "more_than"
count = 0
class Meta:
model = rules.VisitCountRule
class OriginCountryRuleFactory(factory.DjangoModelFactory):
class Meta:
model = rules.OriginCountryRule
| mit | 6,287,791,869,828,322,000 | 18.471698 | 59 | 0.719961 | false | 4 | false | false | false |
jfrfonseca/IndexadorDidaticoPython | fileIO.py | 1 | 11925 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
José F. R. Fonseca
See Attached License file
Controls the access to the disk. Defines the class DIskAccessControl,
an object to control the disk files. Multithread-writes the files
'''
import ast
import os
import time
from linecache import getline
from threading import Thread
'''
ONLY WRITES TO THE FILE WHEN THE CACHE OF LINES TO WRITE OVERCOMES
THIS MUCH BYTES, or if it is the last batch of files to be written.
'''
FILEACCESS_THRASHOLD = 1024*1024/32
'''
CLASSES
'''
class DiskAccessControl(): # @IgnorePep8
'''
Control the access to the disk, being the middleware to read and write the
index files
'''
def __init__(self, invertedIndexFileName, fileLen=None,
onMemInd=None, nameDict=None, mods=None):
'''
Instantiates the class as the only reference to the index files
:param invertedIndexFileName: string, name of the index file
:param fileLen: int, original number of lines of the index file, when
known
:param onMemInd: dictionary, on-memory index that translates terms
into file positions
:param nameDict: dictionary, on-memory index that translates the name
of each file indexed into a hash
into a hash
:param mods: dictionary, side-loaded modifications to be put into the
index manually.
'''
# loads the name of the index file
self.GENERATED_INVERTED_INDEX = invertedIndexFileName
# if there is a parameter fileLen, uses it. if not, counts the number
# of lines in the indexFile
if fileLen is None:
# print "GETTING THE FILE # OF LINES!"
lineNum = 0
# reads every line, and counts the number of lines in the index file @IgnorePep8
with open(self.GENERATED_INVERTED_INDEX, "r") as indFile:
for lineNum, dummy in enumerate(indFile):
pass
self.fileLength = lineNum + 1
else:
self.fileLength = fileLen
# if there is a parameter onMemInd, uses it. if not, loads it from the
# memory dump file metaindex
if onMemInd is None:
print "FILLING MEMORY INDEX WITH LAST SESSION'S!"
# OnMemoryIndex: dictionary that maps WORD to HashWord
# Loads the metaindex file into main memory, into onMemoryIndex attribute @IgnorePep8
with open("metaIndex-"+self.GENERATED_INVERTED_INDEX, "r") as metaindex: # @IgnorePep8
data = metaindex.read()
self.onMemoryIndex = ast.literal_eval(data)
else:
self.onMemoryIndex = onMemInd
# if there is a parameter namesDict, uses it. if not, loads it from the
# memory dump file namesDict, mapping a file name to its hash
if nameDict is None:
print "FILLING NAMES DICTIONARY WITH LAST SESSION'S!"
# Loads the namesDict file into main memory, into namesDict attribute @IgnorePep8
with open("namesDict-"+self.GENERATED_INVERTED_INDEX, "r") as namesDict: # @IgnorePep8
data = namesDict.read()
self.namesDictionary = ast.literal_eval(data)
else:
self.namesDictionary = nameDict
# if there is a parameter mods, uses it. if not, creates a new empty
# python dictionary to retain on-memory changes to the index
if mods is None:
self.modifications = {}
else:
self.modifications = mods
'''
METHODS ###############################################
'''
def getIndexLine(self, word):
'''
GETS a line of the index file, containing the inverted list of the word
provided. If inexistent, returns an empty list
:return a list containing the index data of the word requested.
It may be: the inverted list on the index, the modifications done
to such list in memory, or an empty list for a new term to be indexed
:param word: string to retrieve the index data of it, a term
'''
# if the word is on the onMemoryIndex, and thereby on the file index,
if word in self.onMemoryIndex.keys():
# retrieves the hash of the word in wrd
wrd = self.onMemoryIndex[word]
# if that word has already been modified, its modifications will be
# on main memory, and do not need to be retrieved from the disk.
if wrd not in self.modifications.keys():
try:
# retrieves a list of the data in the line of the index
# file on disk that contains the inverted index of the
# word, given its hash. The value of wrd must be
# summed with 1 because there is no line 0 on files
return ast.literal_eval(getline(self.GENERATED_INVERTED_INDEX, int(wrd)+1).split(":::")[1]) # @IgnorePep8
# prints-out eventual exceptions, as the hash searched in
# the index file, and the data recovered from it, as a string
# separated by "(|||)" rather than spaces
except:
print wrd, "(|||)", getline(self.GENERATED_INVERTED_INDEX, int(wrd)+1) # @IgnorePep8
else:
# returns the modifications to the index line, already on memory @IgnorePep8
return self.modifications[wrd]
# if the word searched is not in the index,
else:
# opens the index file, generates a new hash for the word to be
# indexed, and writes an empty list to the index file at the
# words's future position. Returns an empty list
with open(self.GENERATED_INVERTED_INDEX, "a") as indFile:
self.onMemoryIndex[word] = str(len(self.onMemoryIndex.keys())) # @IgnorePep8
indFile.write(self.onMemoryIndex[word]+":::"+"[]"+"\n")
self.fileLength += 1
return []
def pushIntoIndexFile(self, fileIndexedName, word, wordIndexTouple):
'''
Pushes the preshly produced inverted list of a term into the index
:param fileIndexedName: string, name of the file just indexed
:param word: string, term to be pushed into the index
:param wordIndexTouple: touple, containing the number of elements
in the positions list, and a (integer) positions list of occurences of
the term in the file indexed
'''
# gets the line of the index for the term pushed
indexLine = self.getIndexLine(word)
# if the file pushed has already been indexed before, recovers its
# hash name
if fileIndexedName in self.namesDictionary.keys():
hashName = self.namesDictionary[fileIndexedName]
# if not, creates a new hash for the file name, as a number
else:
self.namesDictionary[fileIndexedName] = hashName = str(len(self.namesDictionary.keys())) # @IgnorePep8
try:
# includes the index of the new file pushed into the respective
# line in the on memory inverted list of the term, avoiding
# repetitions. Includes the name of the file, the number of
# occurences and the positions the term indexed happens to occur.
indexLine.append((hashName, wordIndexTouple[0], (list(set(wordIndexTouple[1]))), )) # @IgnorePep8
# includes the freshly produced new index for the term in the
# on- memory modifications to be written on disk
self.modifications[self.onMemoryIndex[word]] = indexLine
# reveals an I/O error. bureaucracy
except IndexError:
print "Got an IndexError!"+str((word, self.onMemoryIndex[word], indexLine, )) # @IgnorePep8
def merge(self, outerModifications):
'''
Pushes provided modifications (made by another thread, for example,
into this instance's modifications list
:param outerModifications: dictionary, mapping terms to inverted lists,
are modifications to the index file imported from another instance
'''
# for each key of the outer modifications dictionary,
for outKey in outerModifications.keys():
if outKey in self.modifications.keys():
# if the key is on the current modifications list, joins the
# contents of both lists, and sorts by the hash of the terms
self.modifications[outKey].extend(outerModifications[outKey])
self.modifications[outKey] = sorted(self.modifications[outKey],
key=lambda mod: int(mod[0])) # @IgnorePep8
# if the outer key is not on the current modifications list,
# adds to it
else:
self.modifications[outKey] = outerModifications[outKey]
def dumpMetafiles(self):
'''
Dumps the on-memory metafiles, the dictionaries mapping terms to file
positions (hashes) and file names to hashes, to disk files.
'''
with open("metaIndex-"+self.GENERATED_INVERTED_INDEX, "w") as metaindex: # @IgnorePep8
metaindex.write(str(self.onMemoryIndex))
with open("namesDict-"+self.GENERATED_INVERTED_INDEX, "w") as namesDict: # @IgnorePep8
namesDict.write(str(self.namesDictionary))
def dumpMemory(self):
'''
Dumps the metafiles and writes the modifications to the index. It is,
by far, the most time-costly operation on the entire program, what was
to be expected, since it involves heavy file writting and reading.
'''
# Creates a new thread to write the metafiles concurrently
metafileWriter = Thread(target=self.dumpMetafiles)
metafileWriter.start()
# string writting buffer, to be written on the file
printString = ""
# for each modification on memory, got in order, writes on the string
# buffer, and when it gets full, writes to a temporary disk file the
# results of merging the modification on each line of the index,
# and the unmodified lines, ordered by the hashes of the terms
modKeys = sorted([k for k in self.modifications.keys()])
with open(self.GENERATED_INVERTED_INDEX, "r") as oldIndexFile: # @IgnorePep8
with open("TEMP_"+self.GENERATED_INVERTED_INDEX, "w+") as newIndexFile: # @IgnorePep8
for line in oldIndexFile:
# reads the data in the old index file line
lineNum = line.split(":::")[0]
# if the modifications line is to be written in the string
# writing buffer, because the read line was modified
if lineNum in modKeys: # @IgnorePep8
printString += lineNum+":::"+str(self.modifications[lineNum])+"\n" # @IgnorePep8
else:
# if the original index line is to be written on the
# file writing buffer, saves it
printString += line
# if the buffer is full to the threshold, writes it to
# the disk file
if len(printString) >= FILEACCESS_THRASHOLD:
newIndexFile.write(printString)
printString = ""
# renames the old inverted Index to become a backup
os.rename(self.GENERATED_INVERTED_INDEX, "Backup_"+str(time.time())+"_"+self.GENERATED_INVERTED_INDEX) # @IgnorePep8
# rename the new to replace the old one
os.rename("TEMP_"+self.GENERATED_INVERTED_INDEX, self.GENERATED_INVERTED_INDEX) # @IgnorePep8
# assures that the metafile writer thread is done writing
metafileWriter.join()
| gpl-2.0 | -3,419,447,673,354,494,500 | 49.312236 | 126 | 0.617243 | false | 4.301587 | false | false | false |
uberVU/elasticboard | data_processor/api.py | 1 | 7727 | from functools import partial, wraps
import datetime
import queries
from utils import crossdomain
from flask import Flask, jsonify, request
from werkzeug.contrib.cache import MemcachedCache
cache = MemcachedCache(['127.0.0.1:11211'])
CACHE_TIMEOUT = 5 * 60
app = Flask(__name__)
# app.debug = True
CHART_INTERVALS = 6
def index_name(user, repo):
return '&'.join((user, repo))
@app.errorhandler(500)
def internal_error(error):
return "Not found or bad request", 400
# http://flask.pocoo.org/docs/patterns/viewdecorators/#caching-decorator
def cached(timeout=CACHE_TIMEOUT, key='view/%s'):
def decorator(f):
@wraps(f)
def decorated_function(*args, **kwargs):
cache_key = key % request.full_path # using full path for get params
rv = cache.get(cache_key)
if rv is not None:
return rv
rv = f(*args, **kwargs)
cache.set(cache_key, rv, timeout=timeout)
return rv
return decorated_function
return decorator
# api endpoints that call the queries
@app.route('/<owner>/<repo>/most_active_people')
@crossdomain(origin='*')
@cached()
def most_active_people(owner, repo):
index = index_name(owner, repo)
data = queries.most_active_people(index)
return jsonify(data=data)
@app.route('/<owner>/<repo>/total_events')
@crossdomain(origin='*')
@cached()
def total_events(owner, repo):
index = index_name(owner, repo)
mode = request.args.get('mode', 'weekly')
if mode == 'weekly':
data = queries.past_n_weeks(index, queries.total_events, CHART_INTERVALS)
elif mode == 'monthly':
data = queries.past_n_months(index, queries.total_events, CHART_INTERVALS)
else:
data = 'Mode not supported. Use ?mode=weekly or monthly'
return jsonify(data=data)
@app.route('/<owner>/<repo>/most_active_issues')
@crossdomain(origin='*')
@cached()
def most_active_issues(owner, repo):
index = index_name(owner, repo)
data = queries.most_active_issues(index)
return jsonify(data=data)
@app.route('/<owner>/<repo>/untouched_issues')
@crossdomain(origin='*')
@cached()
def untouched_issues(owner, repo):
index = index_name(owner, repo)
label = request.args.get('label', None)
data = queries.untouched_issues(index, label)
return jsonify(data=data)
@app.route('/<owner>/<repo>/recent_events')
@crossdomain(origin='*')
@cached()
def recent_events(owner, repo):
index = index_name(owner, repo)
count = int(request.args.get('count', 200))
starting_from = int(request.args.get('starting_from', 0))
data = queries.recent_events(index, count, starting_from)
return jsonify(data=data)
@app.route('/available_repos')
@crossdomain(origin='*')
def available_repos():
data = sorted(queries.available_repos())
return jsonify(data=data)
@app.route('/<owner>/<repo>/issues_activity')
@crossdomain(origin='*')
@cached()
def issues_activity(owner, repo):
index = index_name(owner, repo)
mode = request.args.get('mode', 'weekly')
if mode == 'weekly':
opened = queries.past_n_weeks(index, partial(queries.issue_events_count, action='opened'), CHART_INTERVALS)
closed = queries.past_n_weeks(index, partial(queries.issue_events_count, action='closed'), CHART_INTERVALS)
data = {'opened': opened, 'closed': closed}
elif mode == 'monthly':
opened = queries.past_n_months(index, partial(queries.issue_events_count, action='opened'), CHART_INTERVALS)
closed = queries.past_n_months(index, partial(queries.issue_events_count, action='closed'), CHART_INTERVALS)
data = {'opened': opened, 'closed': closed}
else:
data = 'Mode not supported. Use ?mode=weekly or monthly'
return jsonify(data=data)
@app.route('/<owner>/<repo>/issues_count')
@crossdomain(origin='*')
@cached()
def issues_count(owner, repo):
index = index_name(owner, repo)
open = queries.issues_count(index, 'open')
closed = queries.issues_count(index, 'closed')
data = {'open': open, 'closed': closed}
return jsonify(data=data)
@app.route('/<owner>/<repo>/pulls_count')
@crossdomain(origin='*')
@cached()
def pulls_count(owner, repo):
index = index_name(owner, repo)
count = queries.pulls_count(index)
data = {'open': count}
return jsonify(data=data)
@app.route('/<owner>/<repo>/inactive_issues')
@crossdomain(origin='*')
@cached()
def inactive_issues(owner, repo):
index = index_name(owner, repo)
label = request.args.get('label', None)
data = queries.inactive_issues(index, label)
return jsonify(data=data)
@app.route('/<owner>/<repo>/avg_issue_time')
@crossdomain(origin='*')
@cached()
def avg_issue_time(owner, repo):
index = index_name(owner, repo)
mode = request.args.get('mode', 'weekly')
if mode == 'weekly':
times = queries.past_n_weeks(index, queries.avg_issue_time, CHART_INTERVALS)
elif mode == 'monthly':
times = queries.past_n_months(index, queries.avg_issue_time, CHART_INTERVALS)
else:
times = 'Mode not supported. Use ?mode=weekly or monthly'
return jsonify(data=times)
@app.route('/<owner>/<repo>/issues_involvement')
@crossdomain(origin='*')
@cached()
def issues_involvement(owner, repo):
index = index_name(owner, repo)
now = datetime.datetime.now()
month_start = now - datetime.timedelta(days=now.day)
data = queries.issues_involvement(index, start=month_start, end=now)
return jsonify(data=data)
@app.route('/<owner>/<repo>/milestones')
@crossdomain(origin='*')
@cached()
def milestones(owner, repo):
index = index_name(owner, repo)
milestones = queries.milestones(index)
return jsonify(data=milestones)
@app.route('/<owner>/<repo>/unassigned_issues')
@crossdomain(origin='*')
@cached()
def unassigned_issues(owner, repo):
index = index_name(owner, repo)
label = request.args.get('label', None)
issues = queries.unassigned_issues(index, label)
return jsonify(data=issues)
@app.route('/<owner>/<repo>/labels')
@crossdomain(origin='*')
@cached()
def labels(owner, repo):
index = index_name(owner, repo)
labels = queries.labels(index)
return jsonify(data=labels)
@app.route('/<owner>/<repo>/outstanding_pull_requests')
@crossdomain(origin='*')
@cached()
def outstanding_pull_requests(owner, repo):
index = index_name(owner, repo)
prs = queries.outstanding_pull_requests(index, limit=20)
return jsonify(data=prs)
@app.route('/<owner>/<repo>/popularity_evolution')
@crossdomain(origin='*')
@cached()
def popularity_evolution(owner, repo):
index = index_name(owner, repo)
mode = request.args.get('mode', 'weekly')
if mode == 'weekly':
data = queries.past_n_weeks(index, queries.popularity_events, CHART_INTERVALS)
elif mode == 'monthly':
data = queries.past_n_months(index, queries.popularity_events, CHART_INTERVALS)
else:
data = 'Mode not supported. Use ?mode=weekly or monthly'
return jsonify(data=data)
@app.route('/<owner>/<repo>/collaborators')
@crossdomain(origin='*')
@cached()
def collaborators(owner, repo):
index = index_name(owner, repo)
data = queries.collaborators(index)
return jsonify(data=data)
@app.route('/<owner>/<repo>/pull_requests')
@crossdomain(origin='*')
@cached()
def pull_requests(owner, repo):
index = index_name(owner, repo)
data = queries.pull_requests(index)
return jsonify(data=data)
@app.route('/<owner>/<repo>/issue_distribution')
@crossdomain(origin='*')
@cached()
def issue_distribution(owner, repo):
index = index_name(owner, repo)
data = queries.issue_distribution(index)
return jsonify(data=data)
if __name__ == '__main__':
app.run(host='0.0.0.0', threaded=True)
| mit | -6,479,771,487,385,263,000 | 31.330544 | 116 | 0.671541 | false | 3.319158 | false | false | false |
elhe/bread_diary_web | application/views.py | 1 | 1189 | from application import application, utils
from application.urls import DIARY_ADD_URL, DIARY_ALL_URL
from dateutil.parser import parser
from flask import render_template, request, url_for
from werkzeug.utils import redirect
__author__ = 'elhe'
@application.route('/', methods=['GET', ])
def index():
response = utils.send_http_request('get', DIARY_ALL_URL)
data = response.json()
for entry in data['entries']:
date_time = parser().parse(entry['date_time'])
entry['date_time'] = date_time.strftime('%d.%m %H:%M')
return render_template('index.html', entries=data['entries'])
@application.route('/add_food_entry', methods=['POST', 'GET'])
def add_food_entry():
if request.method == 'POST':
data = dict(name=request.form.get('food'),
weight=int(request.form.get('weight')),
date_time=request.form.get('date_time'),
bread_unit=int(request.form.get('bread_unit')),)
response = utils.send_json_request('post', DIARY_ADD_URL, data)
if response.status_code != 200:
return render_template('index.html', message='FAILS')
return redirect(url_for('index'))
| gpl-2.0 | 5,933,237,861,314,215,000 | 35.030303 | 71 | 0.637511 | false | 3.658462 | false | false | false |
Subsets and Splits