repo_name
stringlengths
5
92
path
stringlengths
4
221
copies
stringclasses
19 values
size
stringlengths
4
6
content
stringlengths
766
896k
license
stringclasses
15 values
hash
int64
-9,223,277,421,539,062,000
9,223,102,107B
line_mean
float64
6.51
99.9
line_max
int64
32
997
alpha_frac
float64
0.25
0.96
autogenerated
bool
1 class
ratio
float64
1.5
13.6
config_test
bool
2 classes
has_no_keywords
bool
2 classes
few_assignments
bool
1 class
oblalex/gnuplot.py-py3k
gp_macosx.py
1
4576
# $Id: gp_macosx.py 291 2006-03-03 08:58:48Z mhagger $ # Copyright (C) 1998-2003 Michael Haggerty <[email protected]> # # This file is licensed under the GNU Lesser General Public License # (LGPL). See LICENSE.txt for details. """gp_macosx -- an interface to the command line version of gnuplot used under Mac OS X. The only difference between this interface and gp_unix is that default_term is 'aqua'. This file implements a low-level interface to gnuplot. This file should be imported through gp.py, which in turn should be imported via 'import Gnuplot' rather than using these low-level interfaces directly. """ # ############ Configuration variables: ################################ class GnuplotOpts: """The configuration options for gnuplot on Mac OS X. See the gp_unix.py for documentation on all of the parameters. """ gnuplot_command = 'gnuplot' recognizes_persist = None # test automatically on first use prefer_persist = 0 recognizes_binary_splot = 1 prefer_inline_data = 0 # os.mkfifo should be supported on Mac OS X. Let me know if I'm # wrong. support_fifo = 1 prefer_fifo_data = 1 default_term = 'aqua' default_lpr = '| lpr' prefer_enhanced_postscript = 1 # ############ End of configuration options ############################ from os import popen def test_persist(): """Determine whether gnuplot recognizes the option '-persist'. If the configuration variable 'recognizes_persist' is set (i.e., to something other than 'None'), return that value. Otherwise, try to determine whether the installed version of gnuplot recognizes the -persist option. (If it doesn't, it should emit an error message with '-persist' in the first line.) Then set 'recognizes_persist' accordingly for future reference. """ if GnuplotOpts.recognizes_persist is None: import string g = popen('echo | %s -persist 2>&1' % GnuplotOpts.gnuplot_command, 'r') response = g.readlines() g.close() GnuplotOpts.recognizes_persist = ( (not response) or (string.find(response[0], '-persist') == -1)) return GnuplotOpts.recognizes_persist class GnuplotProcess: """Unsophisticated interface to a running gnuplot program. This represents a running gnuplot program and the means to communicate with it at a primitive level (i.e., pass it commands or data). When the object is destroyed, the gnuplot program exits (unless the 'persist' option was set). The communication is one-way; gnuplot's text output just goes to stdout with no attempt to check it for error messages. Members: 'gnuplot' -- the pipe to the gnuplot command. Methods: '__init__' -- start up the program. '__call__' -- pass an arbitrary string to the gnuplot program, followed by a newline. 'write' -- pass an arbitrary string to the gnuplot program. 'flush' -- cause pending output to be written immediately. 'close' -- close the connection to gnuplot. """ def __init__(self, persist=None): """Start a gnuplot process. Create a 'GnuplotProcess' object. This starts a gnuplot program and prepares to write commands to it. Keyword arguments: 'persist=1' -- start gnuplot with the '-persist' option, (which leaves the plot window on the screen even after the gnuplot program ends, and creates a new plot window each time the terminal type is set to 'x11'). This option is not available on older versions of gnuplot. """ if persist is None: persist = GnuplotOpts.prefer_persist if persist: if not test_persist(): raise Exception('-persist does not seem to be supported ' 'by your version of gnuplot!') self.gnuplot = popen('%s -persist' % GnuplotOpts.gnuplot_command, 'w') else: self.gnuplot = popen(GnuplotOpts.gnuplot_command, 'w') # forward write and flush methods: self.write = self.gnuplot.write self.flush = self.gnuplot.flush def close(self): if self.gnuplot is not None: self.gnuplot.close() self.gnuplot = None def __del__(self): self.close() def __call__(self, s): """Send a command string to gnuplot, followed by newline.""" self.write(s + '\n') self.flush()
lgpl-2.1
5,807,611,642,525,777,000
30.342466
79
0.629808
false
4.198165
false
false
false
grnet/agkyra
agkyra/syncer/file_client.py
1
1872
# Copyright (C) 2015 GRNET S.A. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import logging logger = logging.getLogger(__name__) from agkyra.syncer import common, messaging class FileClient(object): def list_candidate_files(self, archive): raise NotImplementedError def start_probing_file(self, objname, old_state, ref_state, callback=None): raise NotImplementedError def stage_file(self, source_state): raise NotImplementedError def prepare_target(self, state): raise NotImplementedError def start_pulling_file(self, source_handle, target_state, sync_state, callback=None): synced_source_state, synced_target_state = \ self._start(source_handle, target_state, sync_state) if callback is not None: callback(synced_source_state, synced_target_state) def _start(self, source_handle, target_state, sync_state): try: target_handle = self.prepare_target(target_state) synced_target_state = target_handle.pull(source_handle, sync_state) synced_source_state = source_handle.get_synced_state() return synced_source_state, synced_target_state finally: source_handle.unstage_file()
gpl-3.0
-1,186,452,690,488,314,600
36.44
79
0.69391
false
4.060738
false
false
false
telefonicaid/fiware-puppetwrapper
acceptance_tests/component/delete_module/features/steps.py
1
1740
__author__ = 'arobres' # -*- coding: utf-8 -*- from commons.rest_utils import RestUtils from nose.tools import assert_true, assert_false import commons.assertions as Assertions import commons.fabric_utils as Fabutils from commons.constants import URL, MODULE_NAME, REPOSITORY from lettuce import step, world, before api_utils = RestUtils() @before.each_scenario def setup(scenario): world.software_downloaded = [] @step(u'Given a downloaded module from repository') def given_a_downloaded_module_from_repository(step): for examples in step.hashes: url = examples[URL] module_name = examples[MODULE_NAME] repository = examples[REPOSITORY] response = api_utils.download_module(software_name=module_name, repository=repository, url=url) Assertions.assert_response_ok(response) assert_true(Fabutils.execute_assert_download(module_name)) world.software_downloaded.append(module_name) @step(u'When I delete the module "([^"]*)"') def when_i_delete_the_module_group1(step, module_name): world.module_name = module_name world.response = api_utils.delete_module(software_name=module_name) @step(u'Then the module is deleted from the system') def then_the_module_is_deleted_from_the_system(step): Assertions.assert_response_ok(world.response) assert_false(Fabutils.execute_assert_download(world.module_name)) @step(u'Then the module is not deleted from the system') def then_the_module_is_not_deleted_from_the_system(step): Assertions.assert_response_ok(world.response) assert_false(Fabutils.execute_assert_download(world.module_name)) for module in world.software_downloaded: assert_true(Fabutils.execute_assert_download(module))
apache-2.0
-1,643,073,405,117,868,800
30.636364
103
0.737931
false
3.473054
false
false
false
bingopodcast/bingos
bingo_emulator/fun_spot_63/game.py
1
47949
#!/usr/bin/python import logging logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s") import procgame.game, sys, os import procgame.config import random import procgame.sound sys.path.insert(0,os.path.pardir) import bingo_emulator.common.units as units import bingo_emulator.common.functions as functions from bingo_emulator.graphics import methods as graphics from bingo_emulator.graphics.fun_spot_63 import * class MulticardBingo(procgame.game.Mode): def __init__(self, game): super(MulticardBingo, self).__init__(game=game, priority=5) self.holes = [] self.game.anti_cheat.engage(self.game) self.startup() self.game.sound.register_music('motor', "audio/six_card_motor.wav") self.game.sound.register_music('search', "audio/six_card_search_old.wav") self.game.sound.register_sound('add', "audio/six_card_add_card.wav") self.game.sound.register_sound('tilt', "audio/tilt.wav") self.game.sound.register_sound('step', "audio/step.wav") self.game.sound.register_sound('eb_search', "audio/EB_Search.wav") def sw_coin_active(self, sw): self.game.tilt.disengage() self.regular_play() self.delay(name="display", delay=0.1, handler=graphics.fun_spot_63.display, param=self) if self.game.replays > 0 and self.game.selector.position < 6: self.delay(name="play_replays", delay=0.2, handler=self.play_replays) def play_replays(self): if self.game.replays > 0 and self.game.selector.position < 6: self.delay(name="play", delay=0, handler=self.regular_play) self.delay(name="display", delay=0.2, handler=graphics.fun_spot_63.display, param=self) self.delay(name="coin", delay=0.2, handler=self.play_replays) def sw_startButton_active(self, sw): if self.game.replays > 0 or self.game.switches.freeplay.is_active(): self.regular_play() self.delay(name="display", delay=0.1, handler=graphics.fun_spot_63.display, param=self) if self.game.replays > 0 and self.game.selector.position < 6: self.delay(name="play_replays", delay=0.2, handler=self.play_replays) def sw_trough4_active_for_1s(self, sw): if self.game.ball_count.position >= 4: self.timeout_actions() def timeout_actions(self): if (self.game.timer.position < 39): self.game.timer.step() self.delay(name="timeout", delay=5.0, handler=self.timeout_actions) else: self.game.timer.step() self.tilt_actions() def sw_trough8_closed(self, sw): if self.game.start.status == False: self.game.ball_count.position -= 1 self.game.returned = True self.check_lifter_status() else: self.check_lifter_status() def sw_enter_active(self, sw): if self.game.switches.left.is_active() and self.game.switches.right.is_active(): self.game.end_run_loop() os.system("/home/nbaldridge/proc/bingo_emulator/start_game.sh fun_spot_63") def check_shutter(self, start=0): if start == 1: if self.game.switches.smRunout.is_active(): if self.game.switches.shutter.is_active(): self.game.coils.shutter.disable() else: if self.game.switches.shutter.is_inactive(): if self.game.switches.smRunout.is_active(): self.game.coils.shutter.disable() def regular_play(self): self.holes = [] self.game.cu = not self.game.cu self.cancel_delayed(name="search") self.cancel_delayed(name="card1_replay_step_up") self.cancel_delayed(name="card2_replay_step_up") self.cancel_delayed(name="card3_replay_step_up") self.cancel_delayed(name="card4_replay_step_up") self.cancel_delayed(name="card5_replay_step_up") self.cancel_delayed(name="card6_replay_step_up") self.cancel_delayed(name="timeout") self.game.search_index.disengage() self.game.coils.counter.pulse() self.game.returned = False self.game.sound.stop('add') self.game.sound.play('add') if self.game.start.status == True: self.game.selector.step() if self.game.cu == 1: self.game.spotting.step() if self.game.switches.shutter.is_inactive(): self.game.coils.shutter.enable() self.replay_step_down() self.game.ball_count.reset() self.check_lifter_status() self.game.start.engage(self.game) else: self.game.card1_replay_counter.reset() self.game.card2_replay_counter.reset() self.game.card3_replay_counter.reset() self.game.card4_replay_counter.reset() self.game.card5_replay_counter.reset() self.game.card6_replay_counter.reset() self.game.timer.reset() self.game.start.engage(self.game) self.game.selector.reset() self.game.ball_count.reset() self.game.sound.play_music('motor', -1) self.regular_play() self.delay(name="display", delay=0.1, handler=graphics.fun_spot_63.display, param=self) self.game.tilt.disengage() def check_lifter_status(self): if self.game.tilt.status == False: if self.game.switches.trough8.is_closed() and self.game.switches.trough5.is_open() and self.game.switches.trough4.is_open() and self.game.switches.trough3.is_closed() and self.game.switches.trough2.is_closed(): if self.game.switches.shooter.is_open(): self.game.coils.lifter.enable() self.game.returned = False else: if self.game.start.status == False: if self.game.switches.trough4.is_open(): if self.game.switches.shooter.is_open(): if self.game.switches.gate.is_closed(): self.game.coils.lifter.enable() else: if self.game.returned == True and self.game.ball_count.position == 4: if self.game.switches.shooter.is_open(): self.game.coils.lifter.enable() self.game.returned = False def sw_smRunout_active_for_1ms(self, sw): if self.game.start.status == True: self.check_shutter(1) else: self.check_shutter() def sw_trough1_closed(self, sw): if self.game.switches.shooter.is_closed(): self.game.coils.lifter.disable() def sw_ballLift_active_for_500ms(self, sw): if self.game.tilt.status == False: if self.game.switches.shooter.is_open(): if self.game.ball_count.position < 5: self.game.coils.lifter.enable() def sw_gate_inactive_for_1ms(self, sw): self.game.start.disengage() if self.game.switches.shutter.is_active(): self.game.coils.shutter.enable() self.game.ball_count.step() if self.game.ball_count.position == 4: self.game.sound.play('tilt') self.game.sound.play('tilt') if self.game.ball_count.position >= 4: if self.game.search_index.status == False: self.search() if self.game.ball_count.position <= 4: self.check_lifter_status() # This is really nasty, but it is how we render graphics for each individual hole. # numbers are added (or removed from) a list. In this way, I can re-use the same # routine even for games where there are ball return functions like Surf Club. def check_spotting(self): if self.game.spotting.position == 0: if 22 not in self.holes: self.holes.append(22) if self.game.spotting.position == 4: if 20 not in self.holes: self.holes.append(20) elif self.game.spotting.position == 6: if 20 not in self.holes: self.holes.append(20) elif self.game.spotting.position == 9: if 15 not in self.holes: self.holes.append(15) elif self.game.spotting.position == 11: if 22 not in self.holes: self.holes.append(22) elif self.game.spotting.position == 13: if 17 not in self.holes: self.holes.append(17) elif self.game.spotting.position == 16: if 20 not in self.holes: self.holes.append(20) elif self.game.spotting.position == 18: if 21 not in self.holes: self.holes.append(21) elif self.game.spotting.position == 23: if 16 not in self.holes: self.holes.append(16) elif self.game.spotting.position == 26: if 21 not in self.holes: self.holes.append(21) elif self.game.spotting.position == 28: if 17 not in self.holes: self.holes.append(17) elif self.game.spotting.position == 30: if 16 not in self.holes: self.holes.append(16) elif self.game.spotting.position == 34: if 15 not in self.holes: self.holes.append(15) elif self.game.spotting.position == 35: if 22 not in self.holes: self.holes.append(22) elif self.game.spotting.position == 39: if 22 not in self.holes: self.holes.append(22) elif self.game.spotting.position == 41: if 21 not in self.holes: self.holes.append(21) elif self.game.spotting.position == 42: if 17 not in self.holes: self.holes.append(17) elif self.game.spotting.position == 46: if 21 not in self.holes: self.holes.append(21) elif self.game.spotting.position == 47: if 15 not in self.holes: self.holes.append(15) elif self.game.spotting.position == 49: if 16 not in self.holes: self.holes.append(16) self.delay(name="display", delay=0.1, handler=graphics.fun_spot_63.display, param=self) def sw_hole1_active_for_40ms(self, sw): if self.game.tilt.status == False and self.game.start.status == False: self.holes.append(1) if self.game.ball_count.position >= 4: if self.game.search_index.status == False: self.search() self.delay(name="display", delay=0.1, handler=graphics.fun_spot_63.display, param=self) def sw_hole2_active_for_40ms(self, sw): if self.game.tilt.status == False and self.game.start.status == False: self.holes.append(2) if self.game.ball_count.position >= 4: if self.game.search_index.status == False: self.search() self.delay(name="display", delay=0.1, handler=graphics.fun_spot_63.display, param=self) def sw_hole3_active_for_40ms(self, sw): if self.game.tilt.status == False and self.game.start.status == False: self.holes.append(3) if self.game.ball_count.position >= 4: if self.game.search_index.status == False: self.search() self.delay(name="display", delay=0.1, handler=graphics.fun_spot_63.display, param=self) def sw_hole4_active_for_40ms(self, sw): if self.game.tilt.status == False and self.game.start.status == False: self.holes.append(4) if self.game.ball_count.position >= 4: if self.game.search_index.status == False: self.search() self.delay(name="display", delay=0.1, handler=graphics.fun_spot_63.display, param=self) def sw_hole5_active_for_40ms(self, sw): if self.game.tilt.status == False and self.game.start.status == False: self.holes.append(5) if self.game.ball_count.position >= 4: if self.game.search_index.status == False: self.search() self.delay(name="display", delay=0.1, handler=graphics.fun_spot_63.display, param=self) def sw_hole6_active_for_40ms(self, sw): if self.game.tilt.status == False and self.game.start.status == False: self.holes.append(6) if self.game.ball_count.position >= 4: if self.game.search_index.status == False: self.search() self.delay(name="display", delay=0.1, handler=graphics.fun_spot_63.display, param=self) def sw_hole7_active_for_40ms(self, sw): if self.game.tilt.status == False and self.game.start.status == False: self.holes.append(7) if self.game.ball_count.position >= 4: if self.game.search_index.status == False: self.search() self.delay(name="display", delay=0.1, handler=graphics.fun_spot_63.display, param=self) def sw_hole8_active_for_40ms(self, sw): if self.game.tilt.status == False and self.game.start.status == False: self.holes.append(8) if self.game.ball_count.position >= 4: if self.game.search_index.status == False: self.search() self.delay(name="display", delay=0.1, handler=graphics.fun_spot_63.display, param=self) def sw_hole9_active_for_40ms(self, sw): if self.game.tilt.status == False and self.game.start.status == False: self.holes.append(9) if self.game.ball_count.position >= 4: if self.game.search_index.status == False: self.search() self.delay(name="display", delay=0.1, handler=graphics.fun_spot_63.display, param=self) def sw_hole10_active_for_40ms(self, sw): if self.game.tilt.status == False and self.game.start.status == False: self.holes.append(10) if self.game.ball_count.position >= 4: if self.game.search_index.status == False: self.search() self.delay(name="display", delay=0.1, handler=graphics.fun_spot_63.display, param=self) def sw_hole11_active_for_40ms(self, sw): if self.game.tilt.status == False and self.game.start.status == False: self.holes.append(11) if self.game.ball_count.position >= 4: if self.game.search_index.status == False: self.search() self.delay(name="display", delay=0.1, handler=graphics.fun_spot_63.display, param=self) def sw_hole12_active_for_40ms(self, sw): if self.game.tilt.status == False and self.game.start.status == False: self.holes.append(12) if self.game.ball_count.position >= 4: if self.game.search_index.status == False: self.search() self.delay(name="display", delay=0.1, handler=graphics.fun_spot_63.display, param=self) def sw_hole13_active_for_40ms(self, sw): if self.game.tilt.status == False and self.game.start.status == False: self.holes.append(13) if self.game.ball_count.position >= 4: if self.game.search_index.status == False: self.search() self.delay(name="display", delay=0.1, handler=graphics.fun_spot_63.display, param=self) def sw_hole14_active_for_40ms(self, sw): if self.game.tilt.status == False and self.game.start.status == False: self.holes.append(14) if self.game.ball_count.position >= 4: if self.game.search_index.status == False: self.search() self.delay(name="display", delay=0.1, handler=graphics.fun_spot_63.display, param=self) def sw_hole15_active_for_40ms(self, sw): if self.game.tilt.status == False and self.game.start.status == False: self.holes.append(15) if self.game.ball_count.position >= 4: if self.game.search_index.status == False: self.search() self.delay(name="display", delay=0.1, handler=graphics.fun_spot_63.display, param=self) def sw_hole16_active_for_40ms(self, sw): if self.game.tilt.status == False and self.game.start.status == False: self.holes.append(16) if self.game.ball_count.position >= 4: if self.game.search_index.status == False: self.search() self.delay(name="display", delay=0.1, handler=graphics.fun_spot_63.display, param=self) def sw_hole17_active_for_40ms(self, sw): if self.game.tilt.status == False and self.game.start.status == False: self.holes.append(17) if self.game.ball_count.position >= 4: if self.game.search_index.status == False: self.search() self.delay(name="display", delay=0.1, handler=graphics.fun_spot_63.display, param=self) def sw_hole18_active_for_40ms(self, sw): if self.game.tilt.status == False and self.game.start.status == False: self.holes.append(18) if self.game.ball_count.position >= 4: if self.game.search_index.status == False: self.search() self.delay(name="display", delay=0.1, handler=graphics.fun_spot_63.display, param=self) def sw_hole19_active_for_40ms(self, sw): if self.game.tilt.status == False and self.game.start.status == False: self.holes.append(19) if self.game.ball_count.position >= 4: if self.game.search_index.status == False: self.search() self.delay(name="display", delay=0.1, handler=graphics.fun_spot_63.display, param=self) def sw_hole20_active_for_40ms(self, sw): if self.game.tilt.status == False and self.game.start.status == False: self.holes.append(20) if self.game.ball_count.position >= 4: if self.game.search_index.status == False: self.search() self.delay(name="display", delay=0.1, handler=graphics.fun_spot_63.display, param=self) def sw_hole21_active_for_40ms(self, sw): if self.game.tilt.status == False and self.game.start.status == False: self.holes.append(21) if self.game.ball_count.position >= 4: if self.game.search_index.status == False: self.search() self.delay(name="display", delay=0.1, handler=graphics.fun_spot_63.display, param=self) def sw_hole22_active_for_40ms(self, sw): if self.game.tilt.status == False and self.game.start.status == False: self.holes.append(22) if self.game.ball_count.position >= 4: if self.game.search_index.status == False: self.search() self.delay(name="display", delay=0.1, handler=graphics.fun_spot_63.display, param=self) def sw_hole23_active_for_40ms(self, sw): if self.game.tilt.status == False and self.game.start.status == False: self.holes.append(23) if self.game.ball_count.position >= 4: if self.game.search_index.status == False: self.search() self.delay(name="display", delay=0.1, handler=graphics.fun_spot_63.display, param=self) def sw_hole24_active_for_40ms(self, sw): if self.game.tilt.status == False and self.game.start.status == False: self.holes.append(24) if self.game.ball_count.position >= 4: if self.game.search_index.status == False: self.search() self.delay(name="display", delay=0.1, handler=graphics.fun_spot_63.display, param=self) def sw_hole25_active_for_40ms(self, sw): if self.game.tilt.status == False and self.game.start.status == False: self.holes.append(25) if self.game.ball_count.position >= 4: if self.game.search_index.status == False: self.search() self.delay(name="display", delay=0.1, handler=graphics.fun_spot_63.display, param=self) def sw_replayReset_active(self, sw): self.game.anti_cheat.disengage() self.holes = [] graphics.fun_spot_63.display(self) self.tilt_actions() self.replay_step_down(self.game.replays) def tilt_actions(self): self.game.start.disengage() self.cancel_delayed(name="replay_reset") self.cancel_delayed(name="card1_replay_step_up") self.cancel_delayed(name="card2_replay_step_up") self.cancel_delayed(name="card3_replay_step_up") self.cancel_delayed(name="card4_replay_step_up") self.cancel_delayed(name="card5_replay_step_up") self.cancel_delayed(name="card6_replay_step_up") self.cancel_delayed(name="timeout") self.game.search_index.disengage() if self.game.ball_count.position == 0: if self.game.switches.shutter.is_active(): self.game.coils.shutter.enable() self.holes = [] self.game.selector.reset() self.game.ball_count.reset() self.game.anti_cheat.engage(game) self.game.tilt.engage(self.game) self.game.sound.stop_music() self.game.sound.play('tilt') # displays "Tilt" on the backglass, you have to recoin. self.delay(name="display", delay=0.1, handler=graphics.fun_spot_63.display, param=self) def sw_tilt_active(self, sw): if self.game.tilt.status == False: self.tilt_actions() def replay_step_down(self, number=0): if number > 0: if number > 1: self.game.replays -= 1 self.game.coils.registerDown.pulse() number -= 1 graphics.fun_spot_63.display(self) self.delay(name="replay_reset", delay=0.13, handler=self.replay_step_down, param=number) elif number == 1: self.game.replays -= 1 self.game.coils.registerDown.pulse() number -= 1 graphics.fun_spot_63.display(self) self.cancel_delayed(name="replay_reset") else: if self.game.replays > 0: self.game.replays -= 1 self.delay(name="display", delay=0.1, handler=graphics.fun_spot_63.display, param=self) self.game.coils.registerDown.pulse() def replay_step_up(self): if self.game.replays < 200: self.game.replays += 1 self.game.coils.registerUp.pulse() graphics.fun_spot_63.display(self) def search(self): # The search workflow/logic will determine if you actually have a winner, but it is a bit tricky. # if the ball is in a particular hole, the search relays need to click and/or clack, and # when you have at least three going at once, it should latch on the search index and score. # This scoring is tempered by the selector disc. You have to have the card enabled that you're # winning on. This whole process will have to happen on a rotational basis. The search should really # begin immediately upon the first ball landing in the hole. # I suspect that the best, fastest way to complete the search is actually to reimplement the mechanical # search activity. For each revolution of the search disc (which happens about every 5-7 seconds), the # game will activate() each search relay for each 'hot' rivet on the search disc. This can be on a different # wiper finger for each set of rivets on the search disc. self.game.sound.stop_music() self.game.sound.play_music('search', -1) if self.game.search_index.status == False: for i in range(0, 100): if i <= 50: self.r = self.closed_search_relays(self.game.searchdisc.position) self.game.searchdisc.spin() if i >= 51: self.r = self.closed_search_relays(self.game.searchdisc2.position + 50) self.game.searchdisc2.spin() self.wipers = self.r[0] self.card = self.r[1] self.super_line = self.r[2] # From here, I need to determine based on the value of r, whether to latch the search index and score. For Coney Island, # I need to determine the best winner on each card. To do this, I must compare the position of the replay counter before # determining the winner. Reminder that my replay counters are a 1:1 representation. self.match = [] for key in self.wipers: for number in self.holes: if number == key: self.match.append(self.wipers[key]) relays = sorted(set(self.match)) #TODO Play sound for each relay closure. s = functions.count_seq(relays) if self.game.selector.position >= self.card: if s >= 3: self.find_winner(s, self.card, self.super_line) break def find_winner(self, relays, card, super_line): if self.game.search_index.status == False and self.game.replays < 200: #Implemented per S/I card 20-100 if card == 1: if relays == 3: if self.super_line == 1: if self.game.card1_replay_counter.position < 12: self.game.search_index.engage(self.game) self.card1_replay_step_up(12 - self.game.card1_replay_counter.position) else: if self.game.card1_replay_counter.position < 4: self.game.search_index.engage(self.game) self.card1_replay_step_up(4 - self.game.card1_replay_counter.position) if relays == 4: if self.super_line == 1: if self.game.card1_replay_counter.position < 60: self.game.search_index.engage(self.game) self.card1_replay_step_up(60 - self.game.card1_replay_counter.position) else: if self.game.card1_replay_counter.position < 20: self.game.search_index.engage(self.game) self.card1_replay_step_up(20 - self.game.card1_replay_counter.position) if relays == 5: if self.game.card1_replay_counter.position < 100: self.game.search_index.engage(self.game) self.card1_replay_step_up(100 - self.game.card1_replay_counter.position) if card == 2: if relays == 3: if self.super_line == 1: if self.game.card2_replay_counter.position < 12: self.game.search_index.engage(self.game) self.card2_replay_step_up(12 - self.game.card2_replay_counter.position) else: if self.game.card2_replay_counter.position < 4: self.game.search_index.engage(self.game) self.card2_replay_step_up(4 - self.game.card2_replay_counter.position) if relays == 4: if self.super_line == 1: if self.game.card2_replay_counter.position < 60: self.game.search_index.engage(self.game) self.card2_replay_step_up(60 - self.game.card2_replay_counter.position) else: if self.game.card2_replay_counter.position < 20: self.game.search_index.engage(self.game) self.card2_replay_step_up(20 - self.game.card2_replay_counter.position) if relays == 5: if self.game.card2_replay_counter.position < 100: self.game.search_index.engage(self.game) self.card2_replay_step_up(100 - self.game.card2_replay_counter.position) if card == 3: if relays == 3: if self.super_line == 1: if self.game.card3_replay_counter.position < 12: self.game.search_index.engage(self.game) self.card3_replay_step_up(12 - self.game.card3_replay_counter.position) else: if self.game.card3_replay_counter.position < 4: self.game.search_index.engage(self.game) self.card3_replay_step_up(4 - self.game.card3_replay_counter.position) if relays == 4: if self.super_line == 1: if self.game.card3_replay_counter.position < 60: self.game.search_index.engage(self.game) self.card3_replay_step_up(60 - self.game.card3_replay_counter.position) else: if self.game.card3_replay_counter.position < 20: self.game.search_index.engage(self.game) self.card3_replay_step_up(20 - self.game.card3_replay_counter.position) if relays == 5: if self.game.card3_replay_counter.position < 100: self.game.search_index.engage(self.game) self.card3_replay_step_up(100 - self.game.card3_replay_counter.position) if card == 4: if relays == 3: if self.super_line == 1: if self.game.card4_replay_counter.position < 12: self.game.search_index.engage(self.game) self.card4_replay_step_up(12 - self.game.card4_replay_counter.position) else: if self.game.card4_replay_counter.position < 4: self.game.search_index.engage(self.game) self.card4_replay_step_up(4 - self.game.card4_replay_counter.position) if relays == 4: if self.super_line == 1: if self.game.card4_replay_counter.position < 60: self.game.search_index.engage(self.game) self.card4_replay_step_up(60 - self.game.card4_replay_counter.position) else: if self.game.card4_replay_counter.position < 20: self.game.search_index.engage(self.game) self.card4_replay_step_up(20 - self.game.card4_replay_counter.position) if relays == 5: if self.game.card4_replay_counter.position < 100: self.game.search_index.engage(self.game) self.card4_replay_step_up(100 - self.game.card4_replay_counter.position) if card == 5: if relays == 3: if self.super_line == 1: if self.game.card5_replay_counter.position < 12: self.game.search_index.engage(self.game) self.card5_replay_step_up(12 - self.game.card5_replay_counter.position) else: if self.game.card5_replay_counter.position < 4: self.game.search_index.engage(self.game) self.card5_replay_step_up(4 - self.game.card5_replay_counter.position) if relays == 4: if self.super_line == 1: if self.game.card5_replay_counter.position < 60: self.game.search_index.engage(self.game) self.card5_replay_step_up(60 - self.game.card5_replay_counter.position) else: if self.game.card5_replay_counter.position < 20: self.game.search_index.engage(self.game) self.card5_replay_step_up(20 - self.game.card5_replay_counter.position) if relays == 5: if self.game.card5_replay_counter.position < 100: self.game.search_index.engage(self.game) self.card5_replay_step_up(100 - self.game.card5_replay_counter.position) if card == 6: if relays == 3: if self.super_line == 1: if self.game.card6_replay_counter.position < 12: self.game.search_index.engage(self.game) self.card6_replay_step_up(12 - self.game.card6_replay_counter.position) else: if self.game.card6_replay_counter.position < 4: self.game.search_index.engage(self.game) self.card6_replay_step_up(4 - self.game.card6_replay_counter.position) if relays == 4: if self.super_line == 1: if self.game.card6_replay_counter.position < 60: self.game.search_index.engage(self.game) self.card6_replay_step_up(60 - self.game.card6_replay_counter.position) else: if self.game.card6_replay_counter.position < 20: self.game.search_index.engage(self.game) self.card6_replay_step_up(20 - self.game.card6_replay_counter.position) if relays == 5: if self.game.card6_replay_counter.position < 100: self.game.search_index.engage(self.game) self.card6_replay_step_up(100 - self.game.card6_replay_counter.position) def card1_replay_step_up(self, number): self.game.sound.stop_music() if number >= 1: self.game.card1_replay_counter.step() number -= 1 self.replay_step_up() if self.game.replays == 200: number = 0 self.delay(name="card1_replay_step_up", delay=0.1, handler=self.card1_replay_step_up, param=number) else: self.game.search_index.disengage() self.cancel_delayed(name="card1_replay_step_up") self.search() def card2_replay_step_up(self, number): self.game.sound.stop_music() if number >= 1: self.game.card2_replay_counter.step() number -= 1 self.replay_step_up() if self.game.replays == 200: number = 0 self.delay(name="card2_replay_step_up", delay=0.1, handler=self.card2_replay_step_up, param=number) else: self.game.search_index.disengage() self.cancel_delayed(name="card2_replay_step_up") self.search() def card3_replay_step_up(self, number): self.game.sound.stop_music() if number >= 1: self.game.card3_replay_counter.step() number -= 1 self.replay_step_up() if self.game.replays == 200: number = 0 self.delay(name="card3_replay_step_up", delay=0.1, handler=self.card3_replay_step_up, param=number) else: self.game.search_index.disengage() self.cancel_delayed(name="card3_replay_step_up") self.search() def card4_replay_step_up(self, number): self.game.sound.stop_music() if number >= 1: self.game.card4_replay_counter.step() number -= 1 self.replay_step_up() if self.game.replays == 200: number = 0 self.delay(name="card4_replay_step_up", delay=0.1, handler=self.card4_replay_step_up, param=number) else: self.game.search_index.disengage() self.cancel_delayed(name="card4_replay_step_up") self.search() def card5_replay_step_up(self, number): self.game.sound.stop_music() if number >= 1: self.game.card5_replay_counter.step() number -= 1 self.replay_step_up() if self.game.replays == 200: number = 0 self.delay(name="card5_replay_step_up", delay=0.1, handler=self.card5_replay_step_up, param=number) else: self.game.search_index.disengage() self.cancel_delayed(name="card5_replay_step_up") self.search() def card6_replay_step_up(self, number): self.game.sound.stop_music() if number >= 1: self.game.card6_replay_counter.step() number -= 1 self.replay_step_up() if self.game.replays == 200: number = 0 self.delay(name="card6_replay_step_up", delay=0.1, handler=self.card6_replay_step_up, param=number) else: self.game.search_index.disengage() self.cancel_delayed(name="card6_replay_step_up") self.search() def closed_search_relays(self, rivets): # This function is critical, as it will determine which card is returned, etc. I need to check both the position of the # replay counter for the card, as well as the selector unit to ensure that the card is selected. We will get a row back # that has the numbers on the position which will return the search relay connected. When three out of the five relays # are connected, we get a winner! self.pos = {} # Card 1 self.pos[0] = {} self.pos[1] = {5:1, 1:2, 9:3, 25:4, 3:5} self.pos[2] = {8:1, 22:2, 10:3, 19:4, 7:5} self.pos[3] = {6:1, 18:2, 16:3, 11:4, 17:5} self.pos[4] = {24:1, 21:2, 14:3, 20:4, 13:5} self.pos[5] = {12:1, 23:2, 2:3, 4:4, 15:5} self.pos[6] = {5:1, 8:2, 6:3, 24:4, 12:5} self.pos[7] = {1:1, 22:2, 18:3, 21:4, 23:5} self.pos[8] = {9:1, 10:2, 16:3, 14:4, 2:5} self.pos[9] = {25:1, 19:2, 11:3, 20:4, 4:5} self.pos[10] = {3:1, 7:2, 17:3, 13:4, 15:5} self.pos[11] = {5:1, 22:2, 16:3, 20:4, 15:5} self.pos[12] = {3:1, 19:2, 16:3, 21:4, 12:5} self.pos[13] = {} self.pos[14] = {} self.pos[15] = {} self.pos[16] = {} self.pos[17] = {} # There are five blank positions in between cards. Early games have less to search! # Card 2 self.pos[18] = {9:1, 24:2, 16:3, 4:4, 6:5} self.pos[19] = {13:1, 19:2, 14:3, 20:4, 25:5} self.pos[20] = {2:1, 18:2, 15:3, 12:4, 17:5} self.pos[21] = {1:1, 22:2, 11:3, 21:4, 8:5} self.pos[22] = {10:1, 7:2, 5:3, 23:4, 3:5} self.pos[23] = {9:1, 13:2, 2:3, 1:4, 10:5} self.pos[24] = {24:1, 19:2, 18:3, 22:4, 7:5} self.pos[25] = {16:1, 14:2, 15:3, 11:4, 5:5} self.pos[26] = {4:1, 20:2, 12:3, 21:4, 23:5} self.pos[27] = {6:1, 25:2, 17:3, 8:4, 3:5} self.pos[28] = {9:1, 19:2, 15:3, 21:4, 3:5} self.pos[29] = {6:1, 20:2, 15:3, 22:4, 10:5} self.pos[30] = {} self.pos[31] = {} self.pos[32] = {} self.pos[33] = {} self.pos[34] = {} # Another five blank positions. Can you believe it? # Card 3 self.pos[35] = {3:1, 7:2, 10:3, 4:4, 9:5} self.pos[36] = {24:1, 21:2, 18:3, 22:4, 8:5} self.pos[37] = {15:1, 14:2, 17:3, 11:4, 2:5} self.pos[38] = {13:1, 20:2, 12:3, 19:4, 23:5} self.pos[39] = {6:1, 25:2, 16:3, 1:4, 5:5} self.pos[40] = {3:1, 24:2, 15:3, 13:4, 6:5} self.pos[41] = {7:1, 21:2, 14:3, 20:4, 25:5} self.pos[42] = {10:1, 18:2, 17:3, 12:4, 16:5} self.pos[43] = {4:1, 22:2, 11:3, 19:4, 1:5} self.pos[44] = {9:1, 8:2, 2:3, 23:4, 5:5} self.pos[45] = {3:1, 21:2, 17:3, 19:4, 5:5} self.pos[46] = {9:1, 22:2, 17:3, 20:4, 6:5} self.pos[47] = {} self.pos[48] = {} self.pos[49] = {} self.pos[50] = {} # Start of the second search disc modeled as part # of the same array for simplicity. Parent function # calls this subset. # Card #4 self.pos[51] = {6:1, 7:2, 3:3, 24:4, 1:5} self.pos[52] = {23:1, 14:2, 12:3, 18:4, 2:5} self.pos[53] = {5:1, 19:2, 20:3, 16:4, 22:5} self.pos[54] = {11:1, 17:2, 9:3, 15:4, 25:5} self.pos[55] = {10:1, 13:2, 21:3, 4:4, 8:5} self.pos[56] = {6:1, 23:2, 5:3, 11:4, 10:5} self.pos[57] = {7:1, 14:2, 19:3, 17:4, 13:5} self.pos[58] = {3:1, 12:2, 20:3, 9:4, 21:5} self.pos[59] = {24:1, 18:2, 16:3, 15:4, 4:5} self.pos[60] = {1:1, 2:2, 22:3, 25:4, 8:5} self.pos[61] = {6:1, 14:2, 20:3, 15:4, 8:5} self.pos[62] = {1:1, 18:2, 20:3, 17:4, 10:5} self.pos[63] = {} self.pos[64] = {} self.pos[65] = {} self.pos[66] = {} self.pos[67] = {} # Card #5 self.pos[68] = {8:1, 23:2, 10:3, 13:4, 4:5} self.pos[69] = {2:1, 17:2, 16:3, 14:4, 24:5} self.pos[70] = {20:1, 12:2, 22:3, 19:4, 5:5} self.pos[71] = {25:1, 15:2, 9:3, 18:4, 11:5} self.pos[72] = {1:1, 7:2, 21:3, 3:4, 6:5} self.pos[73] = {8:1, 2:2, 20:3, 25:4, 1:5} self.pos[74] = {23:1, 17:2, 12:3, 15:4, 7:5} self.pos[75] = {10:1, 16:2, 22:3, 9:4, 21:5} self.pos[76] = {13:1, 14:2, 19:3, 18:4, 3:5} self.pos[77] = {4:1, 24:2, 5:3, 11:4, 6:5} self.pos[78] = {8:1, 17:2, 22:3, 18:4, 6:5} self.pos[79] = {4:1, 14:2, 22:3, 15:4, 1:5} self.pos[80] = {} self.pos[81] = {} self.pos[82] = {} self.pos[83] = {} self.pos[84] = {} # Card #6 self.pos[85] = {4:1, 6:2, 1:3, 23:4, 5:5} self.pos[86] = {25:1, 15:2, 3:3, 17:4, 13:5} self.pos[87] = {9:1, 19:2, 21:3, 12:4, 20:5} self.pos[88] = {10:1, 18:2, 16:3, 14:4, 8:5} self.pos[89] = {7:1, 24:2, 22:3, 2:4, 11:5} self.pos[90] = {4:1, 25:2, 9:3, 10:4, 7:5} self.pos[91] = {6:1, 15:2, 19:3, 18:4, 24:5} self.pos[92] = {1:1, 3:2, 21:3, 16:4, 22:5} self.pos[93] = {23:1, 17:2, 12:3, 14:4, 2:5} self.pos[94] = {5:1, 13:2, 20:3, 8:4, 11:5} self.pos[95] = {4:1, 15:2, 21:3, 14:4, 11:5} self.pos[96] = {5:1, 17:2, 21:3, 18:4, 7:5} self.pos[97] = {} self.pos[98] = {} self.pos[99] = {} self.pos[100] = {} super_line = 0 if rivets in range(0,18): card = 1 if rivets in range(18,35): card = 2 if rivets in range(35,50): card = 3 if rivets in range(50,68): card = 4 if rivets in range(68,85): card = 5 if rivets in range(85,100): card = 6 if rivets == 4: super_line = 1 elif rivets == 21: super_line = 1 elif rivets == 38: super_line = 1 elif rivets == 54: super_line = 1 elif rivets == 71: super_line = 1 elif rivets == 88: super_line = 1 return (self.pos[rivets], card, super_line) # Define reset as the knock-off, anti-cheat relay disabled, and replay reset enabled. Motors turn while credits are knocked off. # When meter reaches zero and the zero limit switch is hit, turn off motor sound and leave backglass gi on, but with tilt displayed. def startup(self): self.tilt_actions() class FunSpot63(procgame.game.BasicGame): """ Fun Spot '63 was an Ohio Dime Game without a replay button """ def __init__(self, machine_type): super(FunSpot63, self).__init__(machine_type) pygame.mixer.pre_init(44100,-16,2,512) self.sound = procgame.sound.SoundController(self) self.sound.set_volume(1.0) # NOTE: trough_count only counts the number of switches present in the trough. It does _not_ count # the number of balls present. In this game, there should be 8 balls. self.trough_count = 6 # Subclass my units unique to this game - modifications must be made to set up mixers and steppers unique to the game # NOTE: 'top' positions are indexed using a 0 index, so the top on a 24 position unit is actually 23. self.searchdisc = units.Search("searchdisc", 49) self.searchdisc2 = units.Search("searchdisc2", 49) #Seach relays self.s1 = units.Relay("s1") self.s2 = units.Relay("s2") self.s3 = units.Relay("s3") self.s4 = units.Relay("s4") self.s5 = units.Relay("s5") self.search_index = units.Relay("search_index") #Spotting disc in Lotta Fun actually keeps track of spotted numbers self.spotting = units.Stepper("spotting", 49, "Lotta Fun", "continuous") self.spotting.position = random.randint(0,49) #Replay Counter self.card1_replay_counter = units.Stepper("card1_replay_counter", 200) self.card2_replay_counter = units.Stepper("card2_replay_counter", 200) self.card3_replay_counter = units.Stepper("card3_replay_counter", 200) self.card4_replay_counter = units.Stepper("card4_replay_counter", 200) self.card5_replay_counter = units.Stepper("card5_replay_counter", 200) self.card6_replay_counter = units.Stepper("card6_replay_counter", 200) #Initialize stepper units used to keep track of features or timing. self.selector = units.Stepper("selector", 6) self.timer = units.Stepper("timer", 40) self.ball_count = units.Stepper("ball_count", 7) #Check for status of the replay register zero switch. If positive #and machine is just powered on, this will zero out the replays. self.replay_reset = units.Relay("replay_reset") # Now, the control unit can be in one of two positions, essentially. # This alternates by coin, and is used to portion the Spotted Numbers. self.cu = 1 #When engage()d, light 6v circuit, and enable game features, scoring, #etc. Disengage()d means that the machine is 'soft' tilted. self.anti_cheat = units.Relay("anti_cheat") #When engage()d, spin. self.start = units.Relay("start") #Tilt is separate from anti-cheat in that the trip will move the shutter #when the game is tilted with 1st ball in the lane. Also prevents you #from picking back up by killing the anti-cheat. Can be engaged by #tilt bob, slam tilt switches, or timer at 39th step. #Immediately kills motors. self.tilt = units.Relay("tilt") self.replays = 0 self.returned = False def reset(self): super(FunSpot63, self).reset() self.logger = logging.getLogger('game') self.load_config('bingo.yaml') main_mode = MulticardBingo(self) self.modes.add(main_mode) game = FunSpot63(machine_type='pdb') game.reset() game.run_loop()
gpl-3.0
1,453,764,986,287,668,500
46.380435
222
0.550981
false
3.422973
false
false
false
mrakgr/futhark
examples/life/quadlife_alt.py
1
55476
import sys import numpy as np import ctypes as ct import pyopencl as cl import pyopencl.array import time import argparse FUT_BLOCK_DIM = "16" cl_group_size = np.int32(512) synchronous = False fut_opencl_src = """typedef char int8_t; typedef short int16_t; typedef int int32_t; typedef long int64_t; typedef uchar uint8_t; typedef ushort uint16_t; typedef uint uint32_t; typedef ulong uint64_t; static inline int8_t add8(int8_t x, int8_t y) { return x + y; } static inline int16_t add16(int16_t x, int16_t y) { return x + y; } static inline int32_t add32(int32_t x, int32_t y) { return x + y; } static inline int64_t add64(int64_t x, int64_t y) { return x + y; } static inline int8_t sub8(int8_t x, int8_t y) { return x - y; } static inline int16_t sub16(int16_t x, int16_t y) { return x - y; } static inline int32_t sub32(int32_t x, int32_t y) { return x - y; } static inline int64_t sub64(int64_t x, int64_t y) { return x - y; } static inline int8_t mul8(int8_t x, int8_t y) { return x * y; } static inline int16_t mul16(int16_t x, int16_t y) { return x * y; } static inline int32_t mul32(int32_t x, int32_t y) { return x * y; } static inline int64_t mul64(int64_t x, int64_t y) { return x * y; } static inline uint8_t udiv8(uint8_t x, uint8_t y) { return x / y; } static inline uint16_t udiv16(uint16_t x, uint16_t y) { return x / y; } static inline uint32_t udiv32(uint32_t x, uint32_t y) { return x / y; } static inline uint64_t udiv64(uint64_t x, uint64_t y) { return x / y; } static inline uint8_t umod8(uint8_t x, uint8_t y) { return x % y; } static inline uint16_t umod16(uint16_t x, uint16_t y) { return x % y; } static inline uint32_t umod32(uint32_t x, uint32_t y) { return x % y; } static inline uint64_t umod64(uint64_t x, uint64_t y) { return x % y; } static inline int8_t sdiv8(int8_t x, int8_t y) { int8_t q = x / y; int8_t r = x % y; return q - ((r != 0 && r < 0 != y < 0) ? 1 : 0); } static inline int16_t sdiv16(int16_t x, int16_t y) { int16_t q = x / y; int16_t r = x % y; return q - ((r != 0 && r < 0 != y < 0) ? 1 : 0); } static inline int32_t sdiv32(int32_t x, int32_t y) { int32_t q = x / y; int32_t r = x % y; return q - ((r != 0 && r < 0 != y < 0) ? 1 : 0); } static inline int64_t sdiv64(int64_t x, int64_t y) { int64_t q = x / y; int64_t r = x % y; return q - ((r != 0 && r < 0 != y < 0) ? 1 : 0); } static inline int8_t smod8(int8_t x, int8_t y) { int8_t r = x % y; return r + (r == 0 || (x > 0 && y > 0) || (x < 0 && y < 0) ? 0 : y); } static inline int16_t smod16(int16_t x, int16_t y) { int16_t r = x % y; return r + (r == 0 || (x > 0 && y > 0) || (x < 0 && y < 0) ? 0 : y); } static inline int32_t smod32(int32_t x, int32_t y) { int32_t r = x % y; return r + (r == 0 || (x > 0 && y > 0) || (x < 0 && y < 0) ? 0 : y); } static inline int64_t smod64(int64_t x, int64_t y) { int64_t r = x % y; return r + (r == 0 || (x > 0 && y > 0) || (x < 0 && y < 0) ? 0 : y); } static inline int8_t squot8(int8_t x, int8_t y) { return x / y; } static inline int16_t squot16(int16_t x, int16_t y) { return x / y; } static inline int32_t squot32(int32_t x, int32_t y) { return x / y; } static inline int64_t squot64(int64_t x, int64_t y) { return x / y; } static inline int8_t srem8(int8_t x, int8_t y) { return x % y; } static inline int16_t srem16(int16_t x, int16_t y) { return x % y; } static inline int32_t srem32(int32_t x, int32_t y) { return x % y; } static inline int64_t srem64(int64_t x, int64_t y) { return x % y; } static inline uint8_t shl8(uint8_t x, uint8_t y) { return x << y; } static inline uint16_t shl16(uint16_t x, uint16_t y) { return x << y; } static inline uint32_t shl32(uint32_t x, uint32_t y) { return x << y; } static inline uint64_t shl64(uint64_t x, uint64_t y) { return x << y; } static inline uint8_t lshr8(uint8_t x, uint8_t y) { return x >> y; } static inline uint16_t lshr16(uint16_t x, uint16_t y) { return x >> y; } static inline uint32_t lshr32(uint32_t x, uint32_t y) { return x >> y; } static inline uint64_t lshr64(uint64_t x, uint64_t y) { return x >> y; } static inline int8_t ashr8(int8_t x, int8_t y) { return x >> y; } static inline int16_t ashr16(int16_t x, int16_t y) { return x >> y; } static inline int32_t ashr32(int32_t x, int32_t y) { return x >> y; } static inline int64_t ashr64(int64_t x, int64_t y) { return x >> y; } static inline uint8_t and8(uint8_t x, uint8_t y) { return x & y; } static inline uint16_t and16(uint16_t x, uint16_t y) { return x & y; } static inline uint32_t and32(uint32_t x, uint32_t y) { return x & y; } static inline uint64_t and64(uint64_t x, uint64_t y) { return x & y; } static inline uint8_t or8(uint8_t x, uint8_t y) { return x | y; } static inline uint16_t or16(uint16_t x, uint16_t y) { return x | y; } static inline uint32_t or32(uint32_t x, uint32_t y) { return x | y; } static inline uint64_t or64(uint64_t x, uint64_t y) { return x | y; } static inline uint8_t xor8(uint8_t x, uint8_t y) { return x ^ y; } static inline uint16_t xor16(uint16_t x, uint16_t y) { return x ^ y; } static inline uint32_t xor32(uint32_t x, uint32_t y) { return x ^ y; } static inline uint64_t xor64(uint64_t x, uint64_t y) { return x ^ y; } static inline char ult8(uint8_t x, uint8_t y) { return x < y; } static inline char ult16(uint16_t x, uint16_t y) { return x < y; } static inline char ult32(uint32_t x, uint32_t y) { return x < y; } static inline char ult64(uint64_t x, uint64_t y) { return x < y; } static inline char ule8(uint8_t x, uint8_t y) { return x <= y; } static inline char ule16(uint16_t x, uint16_t y) { return x <= y; } static inline char ule32(uint32_t x, uint32_t y) { return x <= y; } static inline char ule64(uint64_t x, uint64_t y) { return x <= y; } static inline char slt8(int8_t x, int8_t y) { return x < y; } static inline char slt16(int16_t x, int16_t y) { return x < y; } static inline char slt32(int32_t x, int32_t y) { return x < y; } static inline char slt64(int64_t x, int64_t y) { return x < y; } static inline char sle8(int8_t x, int8_t y) { return x <= y; } static inline char sle16(int16_t x, int16_t y) { return x <= y; } static inline char sle32(int32_t x, int32_t y) { return x <= y; } static inline char sle64(int64_t x, int64_t y) { return x <= y; } static inline int8_t pow8(int8_t x, int8_t y) { int8_t res = 1, rem = y; while (rem != 0) { if (rem & 1) res *= x; rem >>= 1; x *= x; } return res; } static inline int16_t pow16(int16_t x, int16_t y) { int16_t res = 1, rem = y; while (rem != 0) { if (rem & 1) res *= x; rem >>= 1; x *= x; } return res; } static inline int32_t pow32(int32_t x, int32_t y) { int32_t res = 1, rem = y; while (rem != 0) { if (rem & 1) res *= x; rem >>= 1; x *= x; } return res; } static inline int64_t pow64(int64_t x, int64_t y) { int64_t res = 1, rem = y; while (rem != 0) { if (rem & 1) res *= x; rem >>= 1; x *= x; } return res; } static inline int8_t sext_i8_i8(int8_t x) { return x; } static inline int16_t sext_i8_i16(int8_t x) { return x; } static inline int32_t sext_i8_i32(int8_t x) { return x; } static inline int64_t sext_i8_i64(int8_t x) { return x; } static inline int8_t sext_i16_i8(int16_t x) { return x; } static inline int16_t sext_i16_i16(int16_t x) { return x; } static inline int32_t sext_i16_i32(int16_t x) { return x; } static inline int64_t sext_i16_i64(int16_t x) { return x; } static inline int8_t sext_i32_i8(int32_t x) { return x; } static inline int16_t sext_i32_i16(int32_t x) { return x; } static inline int32_t sext_i32_i32(int32_t x) { return x; } static inline int64_t sext_i32_i64(int32_t x) { return x; } static inline int8_t sext_i64_i8(int64_t x) { return x; } static inline int16_t sext_i64_i16(int64_t x) { return x; } static inline int32_t sext_i64_i32(int64_t x) { return x; } static inline int64_t sext_i64_i64(int64_t x) { return x; } static inline uint8_t zext_i8_i8(uint8_t x) { return x; } static inline uint16_t zext_i8_i16(uint8_t x) { return x; } static inline uint32_t zext_i8_i32(uint8_t x) { return x; } static inline uint64_t zext_i8_i64(uint8_t x) { return x; } static inline uint8_t zext_i16_i8(uint16_t x) { return x; } static inline uint16_t zext_i16_i16(uint16_t x) { return x; } static inline uint32_t zext_i16_i32(uint16_t x) { return x; } static inline uint64_t zext_i16_i64(uint16_t x) { return x; } static inline uint8_t zext_i32_i8(uint32_t x) { return x; } static inline uint16_t zext_i32_i16(uint32_t x) { return x; } static inline uint32_t zext_i32_i32(uint32_t x) { return x; } static inline uint64_t zext_i32_i64(uint32_t x) { return x; } static inline uint8_t zext_i64_i8(uint64_t x) { return x; } static inline uint16_t zext_i64_i16(uint64_t x) { return x; } static inline uint32_t zext_i64_i32(uint64_t x) { return x; } static inline uint64_t zext_i64_i64(uint64_t x) { return x; } static inline float fdiv32(float x, float y) { return x / y; } static inline float fadd32(float x, float y) { return x + y; } static inline float fsub32(float x, float y) { return x - y; } static inline float fmul32(float x, float y) { return x * y; } static inline float fpow32(float x, float y) { return pow(x, y); } static inline char cmplt32(float x, float y) { return x < y; } static inline char cmple32(float x, float y) { return x <= y; } static inline float sitofp_i8_f32(int8_t x) { return x; } static inline float sitofp_i16_f32(int16_t x) { return x; } static inline float sitofp_i32_f32(int32_t x) { return x; } static inline float sitofp_i64_f32(int64_t x) { return x; } static inline float uitofp_i8_f32(uint8_t x) { return x; } static inline float uitofp_i16_f32(uint16_t x) { return x; } static inline float uitofp_i32_f32(uint32_t x) { return x; } static inline float uitofp_i64_f32(uint64_t x) { return x; } static inline int8_t fptosi_f32_i8(float x) { return x; } static inline int16_t fptosi_f32_i16(float x) { return x; } static inline int32_t fptosi_f32_i32(float x) { return x; } static inline int64_t fptosi_f32_i64(float x) { return x; } static inline uint8_t fptoui_f32_i8(float x) { return x; } static inline uint16_t fptoui_f32_i16(float x) { return x; } static inline uint32_t fptoui_f32_i32(float x) { return x; } static inline uint64_t fptoui_f32_i64(float x) { return x; } __kernel void map_kernel_1022(int32_t m_880, __global unsigned char *world_mem_1109, int32_t n_879, __global unsigned char *mem_1112) { const uint kernel_thread_index_1022 = get_global_id(0); if (kernel_thread_index_1022 >= n_879 * m_880) return; int32_t i_1023; int32_t i_1024; char b_1025; // compute thread index { i_1023 = squot32(kernel_thread_index_1022, m_880); i_1024 = kernel_thread_index_1022 - squot32(kernel_thread_index_1022, m_880) * m_880; } // read kernel parameters { b_1025 = *(__global char *) &world_mem_1109[i_1023 * m_880 + i_1024]; } int8_t res_1026; if (b_1025) { res_1026 = 1; } else { res_1026 = 0; } // write kernel result { *(__global int8_t *) &mem_1112[i_1023 * m_880 + i_1024] = res_1026; } } __kernel void map_kernel_1176(int32_t m_880, __global unsigned char *mem_1114) { const uint global_thread_index_1176 = get_global_id(0); if (global_thread_index_1176 >= m_880) return; int32_t i_1177; // compute thread index { i_1177 = global_thread_index_1176; } // read kernel parameters { } // write kernel result { *(__global int32_t *) &mem_1114[i_1177 * 4] = 0; } } __kernel void map_kernel_1180(int32_t m_880, __global unsigned char *mem_1114, int32_t n_879, __global unsigned char *mem_1117) { const uint global_thread_index_1180 = get_global_id(0); if (global_thread_index_1180 >= n_879 * m_880) return; int32_t i_1181; int32_t j_1182; int32_t input_1183; // compute thread index { i_1181 = squot32(global_thread_index_1180, m_880); j_1182 = global_thread_index_1180 - squot32(global_thread_index_1180, m_880) * m_880; } // read kernel parameters { input_1183 = *(__global int32_t *) &mem_1114[j_1182 * 4]; } // write kernel result { *(__global int32_t *) &mem_1117[(i_1181 * m_880 + j_1182) * 4] = input_1183; } } __kernel void map_kernel_1048(int32_t n_889, int32_t m_890, __global unsigned char *mem_1130, __global unsigned char *all_history_mem_1119, __global unsigned char *mem_1133, __global unsigned char *mem_1137) { const uint kernel_thread_index_1048 = get_global_id(0); if (kernel_thread_index_1048 >= n_889 * m_890) return; int32_t i_1049; int32_t i_1050; int32_t not_curried_1051; // compute thread index { i_1049 = squot32(kernel_thread_index_1048, m_890); i_1050 = kernel_thread_index_1048 - squot32(kernel_thread_index_1048, m_890) * m_890; } // read kernel parameters { not_curried_1051 = *(__global int32_t *) &all_history_mem_1119[(i_1049 * m_890 + i_1050) * 4]; } int32_t res_1052 = not_curried_1051 & 3; int32_t arg_1053 = ashr32(not_curried_1051, 2); char cond_1054 = slt32(255, arg_1053); int32_t res_1055; if (cond_1054) { res_1055 = 255; } else { res_1055 = arg_1053; } int8_t y_1057 = sext_i32_i8(res_1055); // write kernel result { *(__global int8_t *) &mem_1133[i_1049 * m_890 + i_1050] = y_1057; for (int i_1188 = 0; i_1188 < 3; i_1188++) { *(__global int8_t *) &mem_1137[3 * (m_890 * i_1049) + (m_890 * i_1188 + i_1050)] = *(__global int8_t *) &mem_1130[3 * res_1052 + i_1188]; } } } __kernel void map_kernel_1037(__global unsigned char *mem_1137, int32_t n_889, __global unsigned char *mem_1133, int32_t m_890, __global unsigned char *mem_1141) { const uint kernel_thread_index_1037 = get_global_id(0); if (kernel_thread_index_1037 >= n_889 * m_890 * 3) return; int32_t i_1038; int32_t i_1039; int32_t i_1040; int8_t y_1041; int8_t binop_param_noncurried_1042; // compute thread index { i_1038 = squot32(kernel_thread_index_1037, m_890 * 3); i_1039 = squot32(kernel_thread_index_1037 - squot32(kernel_thread_index_1037, m_890 * 3) * (m_890 * 3), 3); i_1040 = kernel_thread_index_1037 - squot32(kernel_thread_index_1037, m_890 * 3) * (m_890 * 3) - squot32(kernel_thread_index_1037 - squot32(kernel_thread_index_1037, m_890 * 3) * (m_890 * 3), 3) * 3; } // read kernel parameters { y_1041 = *(__global int8_t *) &mem_1133[i_1038 * m_890 + i_1039]; binop_param_noncurried_1042 = *(__global int8_t *) &mem_1137[i_1038 * (3 * m_890) + i_1040 * m_890 + i_1039]; } int8_t res_1043 = binop_param_noncurried_1042 - y_1041; // write kernel result { *(__global int8_t *) &mem_1141[i_1038 * (m_890 * 3) + i_1039 * 3 + i_1040] = res_1043; } } __kernel void map_kernel_1100(int32_t n_910, __global unsigned char *mem_1149, __global unsigned char *mem_1151) { const uint kernel_thread_index_1100 = get_global_id(0); if (kernel_thread_index_1100 >= n_910) return; int32_t i_1101; // compute thread index { i_1101 = kernel_thread_index_1100; } // read kernel parameters { } int32_t x_1103 = i_1101 - 1; int32_t res_1104 = smod32(x_1103, n_910); int32_t x_1105 = i_1101 + 1; int32_t res_1106 = smod32(x_1105, n_910); // write kernel result { *(__global int32_t *) &mem_1149[i_1101 * 4] = res_1106; *(__global int32_t *) &mem_1151[i_1101 * 4] = res_1104; } } __kernel void map_kernel_1064(__global unsigned char *mem_1149, __global unsigned char *world_mem_1153, int32_t n_910, __global unsigned char *mem_1151, int32_t m_911, __global unsigned char *mem_1147, __global unsigned char *history_mem_1155, __global unsigned char *mem_1158, __global unsigned char *mem_1161) { const uint kernel_thread_index_1064 = get_global_id(0); if (kernel_thread_index_1064 >= n_910 * m_911) return; int32_t i_1065; int32_t i_1066; int32_t res_1068; int32_t res_1069; int32_t x_1070; // compute thread index { i_1065 = squot32(kernel_thread_index_1064, m_911); i_1066 = kernel_thread_index_1064 - squot32(kernel_thread_index_1064, m_911) * m_911; } // read kernel parameters { res_1068 = *(__global int32_t *) &mem_1149[i_1065 * 4]; res_1069 = *(__global int32_t *) &mem_1151[i_1065 * 4]; x_1070 = *(__global int32_t *) &history_mem_1155[(i_1065 * m_911 + i_1066) * 4]; } int32_t x_1072 = i_1066 + 1; int32_t res_1073 = smod32(x_1072, m_911); int32_t x_1074 = i_1066 - 1; int32_t res_1075 = smod32(x_1074, m_911); int8_t x_1076 = *(__global int8_t *) &world_mem_1153[res_1069 * m_911 + i_1066]; int8_t y_1077 = *(__global int8_t *) &world_mem_1153[i_1065 * m_911 + res_1075]; int8_t x_1078 = x_1076 + y_1077; int8_t y_1079 = *(__global int8_t *) &world_mem_1153[i_1065 * m_911 + i_1066]; int8_t x_1080 = x_1078 + y_1079; int8_t y_1081 = *(__global int8_t *) &world_mem_1153[i_1065 * m_911 + res_1073]; int8_t x_1082 = x_1080 + y_1081; int8_t y_1083 = *(__global int8_t *) &world_mem_1153[res_1068 * m_911 + i_1066]; int8_t res_1084 = x_1082 + y_1083; int32_t i_1085 = sext_i8_i32(res_1084); int8_t res_1086 = *(__global int8_t *) &mem_1147[i_1085]; int32_t res_1087 = x_1070 & 3; int32_t arg_1088 = ashr32(x_1070, 2); char cond_1089 = slt32(128, arg_1088); int32_t res_1090; if (cond_1089) { res_1090 = 128; } else { res_1090 = arg_1088; } int8_t y_1091 = sext_i32_i8(res_1087); char cond_1092 = res_1086 == y_1091; int32_t x_1093 = res_1090 + 1; int32_t x_1094 = x_1093 << 2; int32_t y_1095 = sext_i8_i32(res_1086); int32_t res_1096 = x_1094 | y_1095; int32_t res_1097; if (cond_1092) { res_1097 = res_1096; } else { res_1097 = y_1095; } // write kernel result { *(__global int32_t *) &mem_1158[(i_1065 * m_911 + i_1066) * 4] = res_1097; *(__global int8_t *) &mem_1161[i_1065 * m_911 + i_1066] = res_1086; } } """ # Hacky parser/reader for values written in Futhark syntax. Used for # reading stdin when compiling standalone programs with the Python # code generator. lookahead_buffer = [] def reset_lookahead(): global lookahead_buffer lookahead_buffer = [] def get_char(f): global lookahead_buffer if len(lookahead_buffer) == 0: return f.read(1) else: c = lookahead_buffer[0] lookahead_buffer = lookahead_buffer[1:] return c def unget_char(f, c): global lookahead_buffer lookahead_buffer = [c] + lookahead_buffer def peek_char(f): c = get_char(f) if c: unget_char(f, c) return c def skip_spaces(f): c = get_char(f) while c != None: if c.isspace(): c = get_char(f) elif c == '-': # May be line comment. if peek_char(f) == '-': # Yes, line comment. Skip to end of line. while (c != '\n' and c != None): c = get_char(f) else: break else: break if c: unget_char(f, c) def parse_specific_char(f, expected): got = get_char(f) if got != expected: unget_char(f, got) raise ValueError return True def parse_specific_string(f, s): for c in s: parse_specific_char(f, c) return True def optional(p, *args): try: return p(*args) except ValueError: return None def sepBy(p, sep, *args): elems = [] x = optional(p, *args) if x != None: elems += [x] while optional(sep, *args) != None: x = p(*args) elems += [x] return elems def parse_int(f): s = '' c = get_char(f) while c != None: if c.isdigit(): s += c c = get_char(f) else: unget_char(f, c) break optional(read_int_trailer, f) return s def parse_int_signed(f): s = '' c = get_char(f) if c == '-' and peek_char(f).isdigit(): s = c + parse_int(f) else: unget_char(f, c) s = parse_int(f) return s def read_int_trailer(f): parse_specific_char(f, 'i') while peek_char(f).isdigit(): get_char(f) def read_comma(f): skip_spaces(f) parse_specific_char(f, ',') return ',' def read_int(f): skip_spaces(f) return int(parse_int_signed(f)) def read_char(f): skip_spaces(f) parse_specific_char(f, '\'') c = get_char(f) parse_specific_char(f, '\'') return c def read_double(f): skip_spaces(f) c = get_char(f) if (c == '-'): sign = '-' else: unget_char(f,c) sign = '' bef = optional(parse_int, f) if bef == None: bef = '0' parse_specific_char(f, '.') aft = parse_int(f) elif optional(parse_specific_char, f, '.'): aft = parse_int(f) else: aft = '0' if (optional(parse_specific_char, f, 'E') or optional(parse_specific_char, f, 'e')): expt = parse_int_signed(f) else: expt = '0' optional(read_float_trailer, f) return float(sign + bef + '.' + aft + 'E' + expt) def read_float(f): return read_double(f) def read_float_trailer(f): parse_specific_char(f, 'f') while peek_char(f).isdigit(): get_char(f) def read_bool(f): skip_spaces(f) if peek_char(f) == 'T': parse_specific_string(f, 'True') return True elif peek_char(f) == 'F': parse_specific_string(f, 'False') return False else: raise ValueError def read_array_elems(f, elem_reader): skip_spaces(f) parse_specific_char(f, '[') xs = sepBy(elem_reader, read_comma, f) skip_spaces(f) parse_specific_char(f, ']') return xs def read_array_helper(f, elem_reader, rank): def nested_row_reader(_): return read_array_helper(f, elem_reader, rank-1) if rank == 1: row_reader = elem_reader else: row_reader = nested_row_reader return read_array_elems(f, row_reader) def expected_array_dims(l, rank): if rank > 1: n = len(l) if n == 0: elem = [] else: elem = l[0] return [n] + expected_array_dims(elem, rank-1) else: return [len(l)] def verify_array_dims(l, dims): if dims[0] != len(l): raise ValueError if len(dims) > 1: for x in l: verify_array_dims(x, dims[1:]) def read_double_signed(f): skip_spaces(f) c = get_char(f) if c == '-' and peek_char(f).isdigit(): v = -1 * read_double(f) else: unget_char(f, c) v = read_double(f) return v def read_array(f, elem_reader, rank, bt): elems = read_array_helper(f, elem_reader, rank) dims = expected_array_dims(elems, rank) verify_array_dims(elems, dims) return np.array(elems, dtype=bt) # Scalar functions. import numpy as np def signed(x): if type(x) == np.uint8: return np.int8(x) elif type(x) == np.uint16: return np.int16(x) elif type(x) == np.uint32: return np.int32(x) else: return np.int64(x) def unsigned(x): if type(x) == np.int8: return np.uint8(x) elif type(x) == np.int16: return np.uint16(x) elif type(x) == np.int32: return np.uint32(x) else: return np.uint64(x) def shlN(x,y): return x << y def ashrN(x,y): return x >> y def sdivN(x,y): return x / y def smodN(x,y): return x % y def udivN(x,y): return signed(unsigned(x) / unsigned(y)) def umodN(x,y): return signed(unsigned(x) % unsigned(y)) def squotN(x,y): return np.int32(float(x) / float(y)) def sremN(x,y): return np.fmod(x,y) def powN(x,y): return x ** y def fpowN(x,y): return x ** y def sleN(x,y): return x <= y def sltN(x,y): return x < y def uleN(x,y): return unsigned(x) <= unsigned(y) def ultN(x,y): return unsigned(x) < unsigned(y) def lshr8(x,y): return np.int8(np.uint8(x) >> np.uint8(y)) def lshr16(x,y): return np.int16(np.uint16(x) >> np.uint16(y)) def lshr32(x,y): return np.int32(np.uint32(x) >> np.uint32(y)) def lshr64(x,y): return np.int64(np.uint64(x) >> np.uint64(y)) def sext_T_i8(x): return np.int8(x) def sext_T_i16(x): return np.int16(x) def sext_T_i32(x): return np.int32(x) def sext_T_i64(x): return np.int32(x) def zext_i8_i8(x): return np.int8(np.uint8(x)) def zext_i8_i16(x): return np.int16(np.uint8(x)) def zext_i8_i32(x): return np.int32(np.uint8(x)) def zext_i8_i64(x): return np.int64(np.uint8(x)) def zext_i16_i8(x): return np.int8(np.uint16(x)) def zext_i16_i16(x): return np.int16(np.uint16(x)) def zext_i16_i32(x): return np.int32(np.uint16(x)) def zext_i16_i64(x): return np.int64(np.uint16(x)) def zext_i32_i8(x): return np.int8(np.uint32(x)) def zext_i32_i16(x): return np.int16(np.uint32(x)) def zext_i32_i32(x): return np.int32(np.uint32(x)) def zext_i32_i64(x): return np.int64(np.uint32(x)) def zext_i64_i8(x): return np.int8(np.uint64(x)) def zext_i64_i16(x): return np.int16(np.uint64(x)) def zext_i64_i32(x): return np.int32(np.uint64(x)) def zext_i64_i64(x): return np.int64(np.uint64(x)) shl8 = shl16 = shl32 = shl64 = shlN ashr8 = ashr16 = ashr32 = ashr64 = ashrN sdiv8 = sdiv16 = sdiv32 = sdiv64 = sdivN smod8 = smod16 = smod32 = smod64 = smodN udiv8 = udiv16 = udiv32 = udiv64 = udivN umod8 = umod16 = umod32 = umod64 = umodN squot8 = squot16 = squot32 = squot64 = squotN srem8 = srem16 = srem32 = srem64 = sremN pow8 = pow16 = pow32 = pow64 = powN fpow32 = fpow64 = fpowN sle8 = sle16 = sle32 = sle64 = sleN slt8 = slt16 = slt32 = slt64 = sltN ule8 = ule16 = ule32 = ule64 = uleN ult8 = ult16 = ult32 = ult64 = ultN sext_i8_i8 = sext_i16_i8 = sext_i32_i8 = sext_i64_i8 = sext_T_i8 sext_i8_i16 = sext_i16_i16 = sext_i32_i16 = sext_i64_i16 = sext_T_i16 sext_i8_i32 = sext_i16_i32 = sext_i32_i32 = sext_i64_i32 = sext_T_i32 sext_i8_i64 = sext_i16_i64 = sext_i32_i64 = sext_i64_i64 = sext_T_i64 def ssignum(x): return np.sign(x) def usignum(x): if x < 0: return ssignum(-x) else: return ssignum(x) def sitofp_T_f32(x): return np.float32(x) sitofp_i8_f32 = sitofp_i16_f32 = sitofp_i32_f32 = sitofp_i64_f32 = sitofp_T_f32 def sitofp_T_f64(x): return np.float64(x) sitofp_i8_f64 = sitofp_i16_f64 = sitofp_i32_f64 = sitofp_i64_f64 = sitofp_T_f64 def uitofp_T_f32(x): return np.float32(unsigned(x)) uitofp_i8_f32 = uitofp_i16_f32 = uitofp_i32_f32 = uitofp_i64_f32 = uitofp_T_f32 def uitofp_T_f64(x): return np.float64(unsigned(x)) uitofp_i8_f64 = uitofp_i16_f64 = uitofp_i32_f64 = uitofp_i64_f64 = uitofp_T_f64 def fptosi_T_i8(x): return np.int8(np.trunc(x)) fptosi_f32_i8 = fptosi_f64_i8 = fptosi_T_i8 def fptosi_T_i16(x): return np.int16(np.trunc(x)) fptosi_f32_i16 = fptosi_f64_i16 = fptosi_T_i16 def fptosi_T_i32(x): return np.int32(np.trunc(x)) fptosi_f32_i32 = fptosi_f64_i32 = fptosi_T_i32 def fptosi_T_i64(x): return np.int64(np.trunc(x)) fptosi_f32_i64 = fptosi_f64_i64 = fptosi_T_i64 def fptoui_T_i8(x): return np.uint8(np.trunc(x)) fptoui_f32_i8 = fptoui_f64_i8 = fptoui_T_i8 def fptoui_T_i16(x): return np.uint16(np.trunc(x)) fptoui_f32_i16 = fptoui_f64_i16 = fptoui_T_i16 def fptoui_T_i32(x): return np.uint32(np.trunc(x)) fptoui_f32_i32 = fptoui_f64_i32 = fptoui_T_i32 def fptoui_T_i64(x): return np.uint64(np.trunc(x)) fptoui_f32_i64 = fptoui_f64_i64 = fptoui_T_i64 def fpconv_f32_f64(x): return np.float64(x) def fpconv_f64_f32(x): return np.float32(x) def futhark_log64(x): return np.float64(np.log(x)) def futhark_sqrt64(x): return np.sqrt(x) def futhark_exp64(x): return np.exp(x) def futhark_cos64(x): return np.cos(x) def futhark_sin64(x): return np.sin(x) def futhark_atan2_64(x, y): return np.arctan2(x, y) def futhark_isnan64(x): return np.isnan(x) def futhark_isinf64(x): return np.isinf(x) def futhark_log32(x): return np.float32(np.log(x)) def futhark_sqrt32(x): return np.float32(np.sqrt(x)) def futhark_exp32(x): return np.exp(x) def futhark_cos32(x): return np.cos(x) def futhark_sin32(x): return np.sin(x) def futhark_atan2_32(x, y): return np.arctan2(x, y) def futhark_isnan32(x): return np.isnan(x) def futhark_isinf32(x): return np.isinf(x) class quadlife_alt: def __init__(self): self.ctx = cl.create_some_context(interactive=False) self.queue = cl.CommandQueue(self.ctx) # XXX: Assuming just a single device here. platform_name = self.ctx.get_info(cl.context_info.DEVICES)[0].platform.name device_type = self.ctx.get_info(cl.context_info.DEVICES)[0].type lockstep_width = 1 if ((platform_name == "NVIDIA CUDA") and (device_type == cl.device_type.GPU)): lockstep_width = np.int32(32) if ((platform_name == "AMD Accelerated Parallel Processing") and (device_type == cl.device_type.GPU)): lockstep_width = np.int32(64) if (len(fut_opencl_src) >= 0): program = cl.Program(self.ctx, fut_opencl_src).build(["-DFUT_BLOCK_DIM={}".format(FUT_BLOCK_DIM), "-DLOCKSTEP_WIDTH={}".format(lockstep_width)]) self.map_kernel_1022_var = program.map_kernel_1022 self.map_kernel_1176_var = program.map_kernel_1176 self.map_kernel_1180_var = program.map_kernel_1180 self.map_kernel_1048_var = program.map_kernel_1048 self.map_kernel_1037_var = program.map_kernel_1037 self.map_kernel_1100_var = program.map_kernel_1100 self.map_kernel_1064_var = program.map_kernel_1064 def futhark_init(self, world_mem_size_1108, world_mem_1109, n_879, m_880): nesting_size_1020 = (m_880 * n_879) bytes_1110 = (n_879 * m_880) mem_1112 = cl.Buffer(self.ctx, cl.mem_flags.READ_WRITE, long(long(bytes_1110) if (bytes_1110 > np.int32(0)) else np.int32(1))) group_size_1174 = np.int32(512) num_groups_1175 = squot32((((n_879 * m_880) + group_size_1174) - np.int32(1)), group_size_1174) if ((np.int32(1) * (num_groups_1175 * group_size_1174)) != np.int32(0)): self.map_kernel_1022_var.set_args(np.int32(m_880), world_mem_1109, np.int32(n_879), mem_1112) cl.enqueue_nd_range_kernel(self.queue, self.map_kernel_1022_var, (long((num_groups_1175 * group_size_1174)),), (long(group_size_1174),)) if synchronous: self.queue.finish() bytes_1113 = (np.int32(4) * m_880) mem_1114 = cl.Buffer(self.ctx, cl.mem_flags.READ_WRITE, long(long(bytes_1113) if (bytes_1113 > np.int32(0)) else np.int32(1))) group_size_1178 = np.int32(512) num_groups_1179 = squot32(((m_880 + group_size_1178) - np.int32(1)), group_size_1178) if ((np.int32(1) * (num_groups_1179 * group_size_1178)) != np.int32(0)): self.map_kernel_1176_var.set_args(np.int32(m_880), mem_1114) cl.enqueue_nd_range_kernel(self.queue, self.map_kernel_1176_var, (long((num_groups_1179 * group_size_1178)),), (long(group_size_1178),)) if synchronous: self.queue.finish() x_1116 = (np.int32(4) * n_879) bytes_1115 = (x_1116 * m_880) mem_1117 = cl.Buffer(self.ctx, cl.mem_flags.READ_WRITE, long(long(bytes_1115) if (bytes_1115 > np.int32(0)) else np.int32(1))) group_size_1184 = np.int32(512) num_groups_1185 = squot32((((n_879 * m_880) + group_size_1184) - np.int32(1)), group_size_1184) if ((np.int32(1) * (num_groups_1185 * group_size_1184)) != np.int32(0)): self.map_kernel_1180_var.set_args(np.int32(m_880), mem_1114, np.int32(n_879), mem_1117) cl.enqueue_nd_range_kernel(self.queue, self.map_kernel_1180_var, (long((num_groups_1185 * group_size_1184)),), (long(group_size_1184),)) if synchronous: self.queue.finish() out_mem_1170 = mem_1112 out_memsize_1171 = bytes_1110 out_mem_1172 = mem_1117 out_memsize_1173 = bytes_1115 return (out_memsize_1171, out_mem_1170, out_memsize_1173, out_mem_1172) def futhark_render_frame(self, all_history_mem_size_1118, all_history_mem_1119, n_889, m_890): mem_1121 = cl.Buffer(self.ctx, cl.mem_flags.READ_WRITE, long(long(np.int32(3)) if (np.int32(3) > np.int32(0)) else np.int32(1))) cl.enqueue_copy(self.queue, mem_1121, np.array(np.int8(0), dtype=ct.c_int8), device_offset=long(np.int32(0)), is_blocking=synchronous) cl.enqueue_copy(self.queue, mem_1121, np.array(np.int8(0), dtype=ct.c_int8), device_offset=long(np.int32(1)), is_blocking=synchronous) cl.enqueue_copy(self.queue, mem_1121, np.array(np.int8(-1), dtype=ct.c_int8), device_offset=long(np.int32(2)), is_blocking=synchronous) mem_1123 = cl.Buffer(self.ctx, cl.mem_flags.READ_WRITE, long(long(np.int32(3)) if (np.int32(3) > np.int32(0)) else np.int32(1))) cl.enqueue_copy(self.queue, mem_1123, np.array(np.int8(0), dtype=ct.c_int8), device_offset=long(np.int32(0)), is_blocking=synchronous) cl.enqueue_copy(self.queue, mem_1123, np.array(np.int8(-1), dtype=ct.c_int8), device_offset=long(np.int32(1)), is_blocking=synchronous) cl.enqueue_copy(self.queue, mem_1123, np.array(np.int8(0), dtype=ct.c_int8), device_offset=long(np.int32(2)), is_blocking=synchronous) mem_1125 = cl.Buffer(self.ctx, cl.mem_flags.READ_WRITE, long(long(np.int32(3)) if (np.int32(3) > np.int32(0)) else np.int32(1))) cl.enqueue_copy(self.queue, mem_1125, np.array(np.int8(-1), dtype=ct.c_int8), device_offset=long(np.int32(0)), is_blocking=synchronous) cl.enqueue_copy(self.queue, mem_1125, np.array(np.int8(0), dtype=ct.c_int8), device_offset=long(np.int32(1)), is_blocking=synchronous) cl.enqueue_copy(self.queue, mem_1125, np.array(np.int8(0), dtype=ct.c_int8), device_offset=long(np.int32(2)), is_blocking=synchronous) mem_1127 = cl.Buffer(self.ctx, cl.mem_flags.READ_WRITE, long(long(np.int32(3)) if (np.int32(3) > np.int32(0)) else np.int32(1))) cl.enqueue_copy(self.queue, mem_1127, np.array(np.int8(-1), dtype=ct.c_int8), device_offset=long(np.int32(0)), is_blocking=synchronous) cl.enqueue_copy(self.queue, mem_1127, np.array(np.int8(-1), dtype=ct.c_int8), device_offset=long(np.int32(1)), is_blocking=synchronous) cl.enqueue_copy(self.queue, mem_1127, np.array(np.int8(0), dtype=ct.c_int8), device_offset=long(np.int32(2)), is_blocking=synchronous) mem_1130 = cl.Buffer(self.ctx, cl.mem_flags.READ_WRITE, long(long(np.int32(12)) if (np.int32(12) > np.int32(0)) else np.int32(1))) if ((np.int32(3) * np.int32(1)) != np.int32(0)): cl.enqueue_copy(self.queue, mem_1130, mem_1121, dest_offset=long(np.int32(0)), src_offset=long(np.int32(0)), byte_count=long((np.int32(3) * np.int32(1)))) if synchronous: self.queue.finish() if ((np.int32(3) * np.int32(1)) != np.int32(0)): cl.enqueue_copy(self.queue, mem_1130, mem_1123, dest_offset=long(np.int32(3)), src_offset=long(np.int32(0)), byte_count=long((np.int32(3) * np.int32(1)))) if synchronous: self.queue.finish() if ((np.int32(3) * np.int32(1)) != np.int32(0)): cl.enqueue_copy(self.queue, mem_1130, mem_1125, dest_offset=long((np.int32(3) * np.int32(2))), src_offset=long(np.int32(0)), byte_count=long((np.int32(3) * np.int32(1)))) if synchronous: self.queue.finish() if ((np.int32(3) * np.int32(1)) != np.int32(0)): cl.enqueue_copy(self.queue, mem_1130, mem_1127, dest_offset=long((np.int32(3) * np.int32(3))), src_offset=long(np.int32(0)), byte_count=long((np.int32(3) * np.int32(1)))) if synchronous: self.queue.finish() nesting_size_1046 = (m_890 * n_889) bytes_1131 = (n_889 * m_890) mem_1133 = cl.Buffer(self.ctx, cl.mem_flags.READ_WRITE, long(long(bytes_1131) if (bytes_1131 > np.int32(0)) else np.int32(1))) x_1136 = (n_889 * np.int32(3)) bytes_1134 = (x_1136 * m_890) mem_1137 = cl.Buffer(self.ctx, cl.mem_flags.READ_WRITE, long(long(bytes_1134) if (bytes_1134 > np.int32(0)) else np.int32(1))) group_size_1189 = np.int32(512) num_groups_1190 = squot32((((n_889 * m_890) + group_size_1189) - np.int32(1)), group_size_1189) if ((np.int32(1) * (num_groups_1190 * group_size_1189)) != np.int32(0)): self.map_kernel_1048_var.set_args(np.int32(n_889), np.int32(m_890), mem_1130, all_history_mem_1119, mem_1133, mem_1137) cl.enqueue_nd_range_kernel(self.queue, self.map_kernel_1048_var, (long((num_groups_1190 * group_size_1189)),), (long(group_size_1189),)) if synchronous: self.queue.finish() nesting_size_1033 = (np.int32(3) * m_890) nesting_size_1035 = (nesting_size_1033 * n_889) bytes_1138 = (bytes_1131 * np.int32(3)) mem_1141 = cl.Buffer(self.ctx, cl.mem_flags.READ_WRITE, long(long(bytes_1138) if (bytes_1138 > np.int32(0)) else np.int32(1))) group_size_1191 = np.int32(512) num_groups_1192 = squot32(((((n_889 * m_890) * np.int32(3)) + group_size_1191) - np.int32(1)), group_size_1191) if ((np.int32(1) * (num_groups_1192 * group_size_1191)) != np.int32(0)): self.map_kernel_1037_var.set_args(mem_1137, np.int32(n_889), mem_1133, np.int32(m_890), mem_1141) cl.enqueue_nd_range_kernel(self.queue, self.map_kernel_1037_var, (long((num_groups_1192 * group_size_1191)),), (long(group_size_1191),)) if synchronous: self.queue.finish() out_mem_1186 = mem_1141 out_memsize_1187 = bytes_1138 return (out_memsize_1187, out_mem_1186) def futhark_steps(self, world_mem_size_1142, history_mem_size_1144, world_mem_1143, history_mem_1145, n_910, m_911, steps_914): mem_1147 = cl.Buffer(self.ctx, cl.mem_flags.READ_WRITE, long(long(np.int32(16)) if (np.int32(16) > np.int32(0)) else np.int32(1))) cl.enqueue_copy(self.queue, mem_1147, np.array(np.int8(0), dtype=ct.c_int8), device_offset=long(np.int32(0)), is_blocking=synchronous) cl.enqueue_copy(self.queue, mem_1147, np.array(np.int8(1), dtype=ct.c_int8), device_offset=long(np.int32(1)), is_blocking=synchronous) cl.enqueue_copy(self.queue, mem_1147, np.array(np.int8(1), dtype=ct.c_int8), device_offset=long(np.int32(2)), is_blocking=synchronous) cl.enqueue_copy(self.queue, mem_1147, np.array(np.int8(0), dtype=ct.c_int8), device_offset=long(np.int32(3)), is_blocking=synchronous) cl.enqueue_copy(self.queue, mem_1147, np.array(np.int8(0), dtype=ct.c_int8), device_offset=long(np.int32(4)), is_blocking=synchronous) cl.enqueue_copy(self.queue, mem_1147, np.array(np.int8(1), dtype=ct.c_int8), device_offset=long(np.int32(5)), is_blocking=synchronous) cl.enqueue_copy(self.queue, mem_1147, np.array(np.int8(1), dtype=ct.c_int8), device_offset=long(np.int32(6)), is_blocking=synchronous) cl.enqueue_copy(self.queue, mem_1147, np.array(np.int8(1), dtype=ct.c_int8), device_offset=long(np.int32(7)), is_blocking=synchronous) cl.enqueue_copy(self.queue, mem_1147, np.array(np.int8(2), dtype=ct.c_int8), device_offset=long(np.int32(8)), is_blocking=synchronous) cl.enqueue_copy(self.queue, mem_1147, np.array(np.int8(2), dtype=ct.c_int8), device_offset=long(np.int32(9)), is_blocking=synchronous) cl.enqueue_copy(self.queue, mem_1147, np.array(np.int8(2), dtype=ct.c_int8), device_offset=long(np.int32(10)), is_blocking=synchronous) cl.enqueue_copy(self.queue, mem_1147, np.array(np.int8(3), dtype=ct.c_int8), device_offset=long(np.int32(11)), is_blocking=synchronous) cl.enqueue_copy(self.queue, mem_1147, np.array(np.int8(3), dtype=ct.c_int8), device_offset=long(np.int32(12)), is_blocking=synchronous) cl.enqueue_copy(self.queue, mem_1147, np.array(np.int8(2), dtype=ct.c_int8), device_offset=long(np.int32(13)), is_blocking=synchronous) cl.enqueue_copy(self.queue, mem_1147, np.array(np.int8(2), dtype=ct.c_int8), device_offset=long(np.int32(14)), is_blocking=synchronous) cl.enqueue_copy(self.queue, mem_1147, np.array(np.int8(3), dtype=ct.c_int8), device_offset=long(np.int32(15)), is_blocking=synchronous) bytes_1148 = (np.int32(4) * n_910) mem_1149 = cl.Buffer(self.ctx, cl.mem_flags.READ_WRITE, long(long(bytes_1148) if (bytes_1148 > np.int32(0)) else np.int32(1))) mem_1151 = cl.Buffer(self.ctx, cl.mem_flags.READ_WRITE, long(long(bytes_1148) if (bytes_1148 > np.int32(0)) else np.int32(1))) group_size_1197 = np.int32(512) num_groups_1198 = squot32(((n_910 + group_size_1197) - np.int32(1)), group_size_1197) if ((np.int32(1) * (num_groups_1198 * group_size_1197)) != np.int32(0)): self.map_kernel_1100_var.set_args(np.int32(n_910), mem_1149, mem_1151) cl.enqueue_nd_range_kernel(self.queue, self.map_kernel_1100_var, (long((num_groups_1198 * group_size_1197)),), (long(group_size_1197),)) if synchronous: self.queue.finish() nesting_size_1062 = (m_911 * n_910) bytes_1156 = (bytes_1148 * m_911) bytes_1159 = (n_910 * m_911) double_buffer_mem_1166 = cl.Buffer(self.ctx, cl.mem_flags.READ_WRITE, long(long(bytes_1159) if (bytes_1159 > np.int32(0)) else np.int32(1))) double_buffer_mem_1167 = cl.Buffer(self.ctx, cl.mem_flags.READ_WRITE, long(long(bytes_1156) if (bytes_1156 > np.int32(0)) else np.int32(1))) mem_1158 = cl.Buffer(self.ctx, cl.mem_flags.READ_WRITE, long(long(bytes_1156) if (bytes_1156 > np.int32(0)) else np.int32(1))) mem_1161 = cl.Buffer(self.ctx, cl.mem_flags.READ_WRITE, long(long(bytes_1159) if (bytes_1159 > np.int32(0)) else np.int32(1))) world_mem_size_1152 = world_mem_size_1142 history_mem_size_1154 = history_mem_size_1144 world_mem_1153 = world_mem_1143 history_mem_1155 = history_mem_1145 i_920 = np.int32(0) one_1208 = np.int32(1) for counter_1207 in range(steps_914): group_size_1205 = np.int32(512) num_groups_1206 = squot32((((n_910 * m_911) + group_size_1205) - np.int32(1)), group_size_1205) if ((np.int32(1) * (num_groups_1206 * group_size_1205)) != np.int32(0)): self.map_kernel_1064_var.set_args(mem_1149, world_mem_1153, np.int32(n_910), mem_1151, np.int32(m_911), mem_1147, history_mem_1155, mem_1158, mem_1161) cl.enqueue_nd_range_kernel(self.queue, self.map_kernel_1064_var, (long((num_groups_1206 * group_size_1205)),), (long(group_size_1205),)) if synchronous: self.queue.finish() if (((n_910 * m_911) * np.int32(1)) != np.int32(0)): cl.enqueue_copy(self.queue, double_buffer_mem_1166, mem_1161, dest_offset=long(np.int32(0)), src_offset=long(np.int32(0)), byte_count=long(((n_910 * m_911) * np.int32(1)))) if synchronous: self.queue.finish() if (((n_910 * m_911) * np.int32(4)) != np.int32(0)): cl.enqueue_copy(self.queue, double_buffer_mem_1167, mem_1158, dest_offset=long(np.int32(0)), src_offset=long(np.int32(0)), byte_count=long(((n_910 * m_911) * np.int32(4)))) if synchronous: self.queue.finish() world_mem_size_tmp_1199 = bytes_1159 history_mem_size_tmp_1200 = bytes_1156 world_mem_tmp_1201 = double_buffer_mem_1166 history_mem_tmp_1202 = double_buffer_mem_1167 world_mem_size_1152 = world_mem_size_tmp_1199 history_mem_size_1154 = history_mem_size_tmp_1200 world_mem_1153 = world_mem_tmp_1201 history_mem_1155 = history_mem_tmp_1202 i_920 += one_1208 world_mem_1163 = world_mem_1153 world_mem_size_1162 = world_mem_size_1152 history_mem_1165 = history_mem_1155 history_mem_size_1164 = history_mem_size_1154 out_mem_1193 = world_mem_1163 out_memsize_1194 = world_mem_size_1162 out_mem_1195 = history_mem_1165 out_memsize_1196 = history_mem_size_1164 return (out_memsize_1194, out_mem_1193, out_memsize_1196, out_mem_1195) def init(self, world_mem_1109_ext): n_879 = np.int32(world_mem_1109_ext.shape[np.int32(0)]) m_880 = np.int32(world_mem_1109_ext.shape[np.int32(1)]) world_mem_size_1108 = np.int32(world_mem_1109_ext.nbytes) if (type(world_mem_1109_ext) == cl.array.Array): world_mem_1109 = world_mem_1109_ext.data else: world_mem_1109 = cl.Buffer(self.ctx, cl.mem_flags.READ_WRITE, long(long(world_mem_size_1108) if (world_mem_size_1108 > np.int32(0)) else np.int32(1))) if (world_mem_size_1108 != np.int32(0)): cl.enqueue_copy(self.queue, world_mem_1109, world_mem_1109_ext, is_blocking=synchronous) (out_memsize_1171, out_mem_1170, out_memsize_1173, out_mem_1172) = self.futhark_init(world_mem_size_1108, world_mem_1109, n_879, m_880) return (cl.array.Array(self.queue, (n_879, m_880), ct.c_int8, data=out_mem_1170), cl.array.Array(self.queue, (n_879, m_880), ct.c_int32, data=out_mem_1172)) def render_frame(self, all_history_mem_1119_ext): n_889 = np.int32(all_history_mem_1119_ext.shape[np.int32(0)]) m_890 = np.int32(all_history_mem_1119_ext.shape[np.int32(1)]) all_history_mem_size_1118 = np.int32(all_history_mem_1119_ext.nbytes) if (type(all_history_mem_1119_ext) == cl.array.Array): all_history_mem_1119 = all_history_mem_1119_ext.data else: all_history_mem_1119 = cl.Buffer(self.ctx, cl.mem_flags.READ_WRITE, long(long(all_history_mem_size_1118) if (all_history_mem_size_1118 > np.int32(0)) else np.int32(1))) if (all_history_mem_size_1118 != np.int32(0)): cl.enqueue_copy(self.queue, all_history_mem_1119, all_history_mem_1119_ext, is_blocking=synchronous) (out_memsize_1187, out_mem_1186) = self.futhark_render_frame(all_history_mem_size_1118, all_history_mem_1119, n_889, m_890) return cl.array.Array(self.queue, (n_889, m_890, np.int32(3)), ct.c_int8, data=out_mem_1186) def steps(self, world_mem_1143_ext, history_mem_1145_ext, steps_914_ext): n_910 = np.int32(world_mem_1143_ext.shape[np.int32(0)]) m_911 = np.int32(world_mem_1143_ext.shape[np.int32(1)]) world_mem_size_1142 = np.int32(world_mem_1143_ext.nbytes) if (type(world_mem_1143_ext) == cl.array.Array): world_mem_1143 = world_mem_1143_ext.data else: world_mem_1143 = cl.Buffer(self.ctx, cl.mem_flags.READ_WRITE, long(long(world_mem_size_1142) if (world_mem_size_1142 > np.int32(0)) else np.int32(1))) if (world_mem_size_1142 != np.int32(0)): cl.enqueue_copy(self.queue, world_mem_1143, world_mem_1143_ext, is_blocking=synchronous) n_910 = np.int32(history_mem_1145_ext.shape[np.int32(0)]) m_911 = np.int32(history_mem_1145_ext.shape[np.int32(1)]) history_mem_size_1144 = np.int32(history_mem_1145_ext.nbytes) if (type(history_mem_1145_ext) == cl.array.Array): history_mem_1145 = history_mem_1145_ext.data else: history_mem_1145 = cl.Buffer(self.ctx, cl.mem_flags.READ_WRITE, long(long(history_mem_size_1144) if (history_mem_size_1144 > np.int32(0)) else np.int32(1))) if (history_mem_size_1144 != np.int32(0)): cl.enqueue_copy(self.queue, history_mem_1145, history_mem_1145_ext, is_blocking=synchronous) steps_914 = np.int32(steps_914_ext) (out_memsize_1194, out_mem_1193, out_memsize_1196, out_mem_1195) = self.futhark_steps(world_mem_size_1142, history_mem_size_1144, world_mem_1143, history_mem_1145, n_910, m_911, steps_914) return (cl.array.Array(self.queue, (n_910, m_911), ct.c_int8, data=out_mem_1193), cl.array.Array(self.queue, (n_910, m_911), ct.c_int32, data=out_mem_1195))
bsd-3-clause
4,486,038,976,242,857,500
29.420159
150
0.537404
false
2.70206
false
false
false
bschollnick/downloader
downloader3.py
1
7816
""" Downloader, mass file downloader. """ import common import logging, logging.handlers import os import os.path import plistlib import stat import sys import time from optparse import OptionParser from yapsy.PluginManager import PluginManager SCRIPT_FILENAME = os.path.abspath(sys.argv[0]) SCRIPT_DIRECTORY = os.sep.join(os.path.split(SCRIPT_FILENAME)[0:-1]) def initialize_parser(): """ Initialize the parser, and set the basic parser options """ parser = OptionParser(usage="usage: %prog [options] filename", version="%prog 1.0") parser.add_option("-u", "--url", action="store", dest="url_to_fetch", default="", help="URL to fetch") parser.add_option("-t", "--target", action="store", dest="download_folder", default="", help="Download Folder to use",) parser.add_option("-l", "--log", action="store", dest="log_folder", default="", help="The Log Folder to use",) parser.add_option("--details", action="store_true", dest="details", default=False, help="Report details on downloads",) parser.add_option("--silent", action="store_true", dest="silent", default=False, help="Absolutely no feedback on downloading",) parser.add_option("--dllimit", action="store", dest="downloadlimit", default=0, type="int", help="Maximum # of Files to download before quitting",) parser.add_option("--skiplimit", action="store", dest="skiplimit", default=0, type="int", help="Maximum # of Files to skip before quitting",) parser.add_option("--start", action="store", dest="startingplace", default=0, type="int", help="The Offset to start at",) parser.add_option("--csv", action="store", dest="csv_file", default="", help="CSV File containing sources") return parser def plugin_parser_adds(parser, plug): """ Call the parser options from the plugin(s), to allow the plugins to install options into the parser. """ if hasattr(plug, "parser_options"): parser = plug.parser_options(parser) def parse_commandline(parser): """ Process the parser and return the options to the main. """ options = parser.parse_args()[0] return options def make_weblocation_file(filename, url): """ Make the weblocation file, to allow easy "one click" access to the gallery, etc, that originated the content. """ if not os.path.exists(filename): try: output_file = open(filename, "w") plist = dict(URL=url) plistlib.writePlist(plist, output_file) output_file.close() except IOError: pass def make_script_file(options): """ Make the shellscript file, to help automate redownload of the content. """ try: script_name = "update_capture.command" if not os.path.exists(options.download_folder + os.sep + script_name): update_script = open(options.download_folder + os.sep + script_name, "w") update_script.write("python %s " % SCRIPT_FILENAME) for x_arg in sys.argv[1:]: update_script.write('"%s"' % x_arg + " ") update_script.close() os.chmod(options.download_folder + os.sep + script_name, 511 | stat.S_IEXEC) except IOError: pass def process_commandline(): """ Process the command line options """ parser = initialize_parser() manager = PluginManager() manager.setPluginPlaces([SCRIPT_DIRECTORY + os.sep + "plugins"]) manager.collectPlugins() plugin_names = {} # Loop round the plugins and print their names. for plug in manager.getAllPlugins(): plugin_names[plug.name.lower().strip()] = plug # plugin name contains pointer to module plugin_parser_adds(parser, plug.plugin_object) options = parse_commandline(parser) if options.silent: print options if options.url_to_fetch == "": print "Please supply an URL to process." return None if options.download_folder == "": print "Please supply an download folder." return None if options.log_folder == "": options.log_folder = "~/logs" options.download_folder = os.path.abspath(options.download_folder) options.download_folder = common.clean_filename(\ unicode(options.download_folder)) if not options.download_folder.strip().endswith(os.sep): options.download_folder = options.download_folder + os.sep return (options, plugin_names) def main(): """ The main function. TaDa! """ log = logging.getLogger('Downloader') log.setLevel(logging.INFO) console_h = logging.StreamHandler(sys.stdout) console_formatter = logging.Formatter('%(message)s') console_h.setFormatter(console_formatter) log.addHandler(console_h) s_options, plugin_names = process_commandline() logdir = os.path.abspath(os.path.join(\ os.path.expanduser(s_options.log_folder))) print "Logging to ", logdir if not os.path.exists(logdir): os.makedirs(logdir) logfilename = os.path.abspath(os.path.join(logdir, "downloader.log")) print "Log file name: ", logfilename file_h = logging.handlers.RotatingFileHandler(logfilename, maxBytes=(50000), backupCount=7) file_format = logging.Formatter(\ "%(asctime)s - %(name)s - %(levelname)s - %(message)s") file_h.setFormatter(file_format) log.addHandler(file_h) # # Make Root download folder # if os.path.exists(s_options.download_folder) != True: os.makedirs(s_options.download_folder) for x_key in plugin_names.keys(): if getattr(s_options, x_key): plugin = plugin_names[x_key].plugin_object print "Using Plugin - %s" % x_key start_time = time.time() if not s_options.silent: log.info("Downloading to: %s", s_options.download_folder) make_weblocation_file(s_options.download_folder + os.sep + "downloaded_site.webloc", s_options.url_to_fetch) results = plugin.download(s_options) elapsed = int((time.time() - start_time) * 100)/100 if results != None: (total_downloaded, total_skipped, total_errors) = results print print log.info("Total Downloaded Files - %s", total_downloaded) log.info("Total Skipped Files - %s", total_skipped) log.info("Total Errors - %s", total_errors) log.info("Elapsed Time (Seconds) - %f", elapsed) log.info("Elapsed Time (Min) - %f", (elapsed/60)) if total_downloaded != 0: sys.exit(1) else: sys.exit(0) # Load the plugins from the plugin directory. if __name__ == "__main__": main()
mpl-2.0
2,206,838,923,007,199,500
30.643725
77
0.546827
false
4.347052
false
false
false
alfred82santa/tarrabmeCheckerGtk
src/tarrabme_checker_gtk/dialogs.py
1
3341
from gi.repository import Gtk, Gio from .actuators import NeoPixelsActuator __author__ = 'alfred' class PreferencesDialog(Gtk.Dialog): def __init__(self, settings): Gtk.Dialog.__init__(self, "Preferences", None, Gtk.DialogFlags.USE_HEADER_BAR | Gtk.DialogFlags.MODAL, [], use_header_bar=True) self.add_button(Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL) button = self.add_button(Gtk.STOCK_OK, Gtk.ResponseType.OK) button.get_style_context().add_class('suggested-action') self.settings = settings self.set_default_size(490, 350) self.builder = Gtk.Builder.new_from_resource('/org/me/tarrab/Checker/tarrabme-preferences.ui') scrolled_window = Gtk.ScrolledWindow() scrolled_window.set_hexpand(True) scrolled_window.set_policy(Gtk.PolicyType.NEVER, Gtk.PolicyType.AUTOMATIC) scrolled_window.add(self.builder.get_object('PerferencesView')) entry = self.builder.get_object('basepath_entry') self.settings.bind('baseurl', entry, 'text', Gio.SettingsBindFlags.DEFAULT) entry = self.builder.get_object('login_endpoint_entry') self.settings.bind('login-path', entry, 'text', Gio.SettingsBindFlags.DEFAULT) combo = self.builder.get_object('login_method_combo') self.settings.bind('login-method', combo, 'active_id', Gio.SettingsBindFlags.DEFAULT) entry = self.builder.get_object('logout_endpoint_entry') self.settings.bind('logout-path', entry, 'text', Gio.SettingsBindFlags.DEFAULT) combo = self.builder.get_object('logout_method_combo') self.settings.bind('logout-method', combo, 'active_id', Gio.SettingsBindFlags.DEFAULT) entry = self.builder.get_object('attempt_endpoint_entry') self.settings.bind('attempt-path', entry, 'text', Gio.SettingsBindFlags.DEFAULT) combo = self.builder.get_object('attempt_method_combo') self.settings.bind('attempt-method', combo, 'active_id', Gio.SettingsBindFlags.DEFAULT) entry = self.builder.get_object('attempt_list_endpoint_entry') self.settings.bind('attempt-list-path', entry, 'text', Gio.SettingsBindFlags.DEFAULT) combo = self.builder.get_object('attempt_list_method_combo') self.settings.bind('attempt-list-method', combo, 'active_id', Gio.SettingsBindFlags.DEFAULT) entry = self.builder.get_object('account_entry') self.settings.bind('account-path', entry, 'text', Gio.SettingsBindFlags.DEFAULT) adjustment = self.builder.get_object('windows_adjustment') self.settings.bind('window-count', adjustment, 'value', Gio.SettingsBindFlags.DEFAULT) adjustment = self.builder.get_object('columns_adjustment') self.settings.bind('column-count', adjustment, 'value', Gio.SettingsBindFlags.DEFAULT) adjustment = self.builder.get_object('rows_adjustment') self.settings.bind('row-count', adjustment, 'value', Gio.SettingsBindFlags.DEFAULT) entry = self.builder.get_object('neopixels_endpoint') self.settings.bind(NeoPixelsActuator.ENDPOINT_SETTING_NAME, entry, 'text', Gio.SettingsBindFlags.DEFAULT) self.get_content_area().pack_start(scrolled_window, True, True, 0) self.show_all()
gpl-2.0
3,221,447,033,717,781,500
46.056338
102
0.677641
false
3.655361
false
false
false
khosrow/metpx
sundew/unittests/unittest_senderAm.py
1
1082
# -*- coding: iso-8859-1 -*- ############################################################################################# # Name: unittest_senderAm.py # Author: Jun Hu # Date: 2012-04-30 # Description: test cases for senderAm class ############################################################################################# import sys,os,unittest sys.path.insert(1, '../sundew/lib/') os.environ['PXROOT']="." from Logger import Logger from Client import Client from CacheManager import CacheManager from senderAm import sendeAm class unittest_Template(unittest.TestCase): def setUp(self,logFile='log/Template.log'): self.logger = Logger(logFile, 'DEBUG', 'Sub') self.logger = self.logger.getLogger() def test_Template(self): self.assertEqual(None, None) def suite(): suite = unittest.TestSuite() suite.addTest(unittest.makeSuite(unittest_Template)) return suite if __name__ == '__main__': suite = unittest.TestLoader().loadTestsFromTestCase(unittest_Template) unittest.TextTestRunner(verbosity=2).run(suite)
gpl-2.0
7,791,971,487,421,272,000
30.823529
93
0.575786
false
4.259843
true
false
false
praekelt/django-ultracache
ultracache/tests/utils.py
1
1072
from collections import OrderedDict class DummyProxy(dict): def make_key(self, path, headers=None): key = path if headers is not None: key += str(frozenset(sorted(headers.items()))) return key def cache(self, request, value): headers = {k[5:].replace("_", "-").lower(): v for \ k, v in request.META.items() if k.startswith("HTTP_")} key = self.make_key(request.get_full_path(), headers) self[key] = value def is_cached(self, path, headers=None): # The test framework sends an empty cookie with each request. Avoid # copy pasta in the individual tests and just add that header here. if headers is None: headers = {u"cookie": u""} key = self.make_key(path, headers) return key in self def purge(self, path, headers=None): key = self.make_key(path, headers) if key in self: del self[key] dummy_proxy = DummyProxy() def dummy_purger(path, headers=None): dummy_proxy.purge(path, headers=headers)
bsd-3-clause
4,270,799,919,882,227,000
29.628571
75
0.602612
false
3.842294
false
false
false
ContextLab/hypertools
hypertools/tools/format_data.py
1
7323
import warnings import numpy as np import six from .._externals.ppca import PPCA from .._shared.helpers import get_type def format_data(x, vectorizer='CountVectorizer', semantic='LatentDirichletAllocation', corpus='wiki', ppca=True, text_align='hyper'): """ Formats data into a list of numpy arrays This function is useful to identify rows of your array that contain missing data or nans. The returned indices can be used to remove the rows with missing data, or label the missing data points that are interpolated using PPCA. Parameters ---------- x : numpy array, dataframe, string or (mixed) list The data to convert vectorizer : str, dict, class or class instance The vectorizer to use. Built-in options are 'CountVectorizer' or 'TfidfVectorizer'. To change default parameters, set to a dictionary e.g. {'model' : 'CountVectorizer', 'params' : {'max_features' : 10}}. See http://scikit-learn.org/stable/modules/classes.html#module-sklearn.feature_extraction.text for details. You can also specify your own vectorizer model as a class, or class instance. With either option, the class must have a fit_transform method (see here: http://scikit-learn.org/stable/data_transforms.html). If a class, pass any parameters as a dictionary to vectorizer_params. If a class instance, no parameters can be passed. semantic : str, dict, class or class instance Text model to use to transform text data. Built-in options are 'LatentDirichletAllocation' or 'NMF' (default: LDA). To change default parameters, set to a dictionary e.g. {'model' : 'NMF', 'params' : {'n_components' : 10}}. See http://scikit-learn.org/stable/modules/classes.html#module-sklearn.decomposition for details on the two model options. You can also specify your own text model as a class, or class instance. With either option, the class must have a fit_transform method (see here: http://scikit-learn.org/stable/data_transforms.html). If a class, pass any parameters as a dictionary to text_params. If a class instance, no parameters can be passed. corpus : list (or list of lists) of text samples or 'wiki', 'nips', 'sotus'. Text to use to fit the semantic model (optional). If set to 'wiki', 'nips' or 'sotus' and the default semantic and vectorizer models are used, a pretrained model will be loaded which can save a lot of time. ppca : bool Performs PPCA to fill in missing values (default: True) text_align : str Alignment algorithm to use when both text and numerical data are passed. If numerical arrays have the same shape, and the text data contains the same number of samples, the text and numerical data are automatically aligned to a common space. Example use case: an array of movie frames (frames by pixels) and text descriptions of the frame. In this case, the movie and text will be automatically aligned to the same space (default: hyperalignment). Returns ---------- data : list of numpy arrays A list of formatted arrays """ # not sure why i needed to import here, but its the only way I could get it to work from .df2mat import df2mat from .text2mat import text2mat from ..datageometry import DataGeometry # if x is not a list, make it one if type(x) is not list: x = [x] if all([isinstance(xi, six.string_types) for xi in x]): x = [x] # check data type for each element in list dtypes = list(map(get_type, x)) # handle text data: if any(map(lambda x: x in ['list_str', 'str', 'arr_str'], dtypes)): # default text args text_args = { 'vectorizer' : vectorizer, 'semantic' : semantic, 'corpus' : corpus } # filter text data text_data = [] for i,j in zip(x, dtypes): if j in ['list_str', 'str', 'arr_str']: text_data.append(np.array(i).reshape(-1, 1)) # convert text to numerical matrices text_data = text2mat(text_data, **text_args) # replace the text data with transformed data processed_x = [] textidx=0 for i, dtype in enumerate(dtypes): if dtype in ['list_str', 'str', 'arr_str']: processed_x.append(text_data[textidx]) textidx+=1 elif dtype == 'df': processed_x.append(df2mat(x[i])) elif dtype == 'geo': text_args = { 'vectorizer' : vectorizer, 'semantic' : semantic, 'corpus' : corpus } for j in format_data(x[i].get_data(), **text_args): processed_x.append(j) else: processed_x.append(x[i]) # reshape anything that is 1d if any([i.ndim<=1 for i in processed_x]): processed_x = [np.reshape(i,(i.shape[0],1)) if i.ndim==1 else i for i in processed_x] contains_text = any([dtype in ['list_str', 'str', 'arr_str'] for dtype in dtypes]) contains_num = any([dtype in ['list_num', 'array', 'df', 'arr_num'] for dtype in dtypes]) # if there are any nans in any of the lists, use ppca if ppca is True: if contains_num: num_data = [] for i,j in zip(processed_x, dtypes): if j in ['list_num', 'array', 'df', 'arr_num']: num_data.append(i) if np.isnan(np.vstack(num_data)).any(): warnings.warn('Missing data: Inexact solution computed with PPCA (see https://github.com/allentran/pca-magic for details)') num_data = fill_missing(num_data) x_temp = [] for dtype in dtypes: if dtype in ['list_str', 'str', 'arr_str']: x_temp.append(text_data.pop(0)) elif dtype in ['list_num', 'array', 'df', 'arr_num']: x_temp.append(num_data.pop(0)) processed_x = x_temp # if input data contains both text and numerical data if contains_num and contains_text: # and if they have the same number of samples if np.unique(np.array([i.shape[0] for i, j in zip(processed_x, dtypes)])).shape[0] == 1: from .align import align as aligner # align the data warnings.warn('Numerical and text data with same number of ' 'samples detected. Aligning data to a common space.') processed_x = aligner(processed_x, align=text_align, format_data=False) return processed_x def fill_missing(x): # ppca if missing data m = PPCA() m.fit(data=np.vstack(x)) x_pca = m.transform() # if the whole row is missing, return nans all_missing = [idx for idx, a in enumerate(np.vstack(x)) if all([type(b)==np.nan for b in a])] if len(all_missing)>0: for i in all_missing: x_pca[i, :] = np.nan # get the original lists back if len(x)>1: x_split = np.cumsum([i.shape[0] for i in x][:-1]) return list(np.split(x_pca, x_split, axis=0)) else: return [x_pca]
mit
-6,226,596,756,234,407,000
38.583784
139
0.604534
false
3.860306
false
false
false
RetroMelon/PatchWords
patchwords_project/patchwords/queries.py
1
3242
# The queries.py file contains a bunch of relatively complex database # queries that probably shouldn't take place inside the views. from patchwords.models import Category, Story, Paragraph, User, Favourite #gets a list of def getTopStories(start=0, end=5, category=None): #getting the stories and zipping them with their favourites if not category: stories = Story.objects.all() else: stories = Story.objects.filter(category=category) stories_ordered = zip(map(lambda x : x.favourites, stories), stories) if not stories_ordered: return [] #sorting the stories stories_ordered.sort() stories_ordered.reverse() #unzipping the stories stories_ordered = zip(*(stories_ordered))[1] return stories_ordered[start:end] def getTopCategories(quantity=20): #getting the categories and mapping them with their favourites. cats = Category.objects.all() cats_with_story_count = map(lambda x: (x.total_stories, x), cats) cats_with_story_count.sort() cats_with_story_count.reverse() #unzipping the categories cats_with_story_count = zip(*(cats_with_story_count))[1] #returning the top 20 return cats_with_story_count[:quantity] def _sortParagraphs(paragraphs): if not paragraphs: return [] #zipping paragraphs with likes #zipped = map(lambda x: (x.likes, x), paragraphs) #zipped.sort() #zipped.reverse() print paragraphs #sorting by likes then date def comparator(x, y): #if we have the same likes we should compare dates instead if x.likes == y.likes: print x.likes, "and", y.likes, "are equal." dt_difference = (x.created_datetime < y.created_datetime) return int(dt_difference) else: return y.likes - x.likes #unzipped = zip(*zipped)[1] #return unzipped print "WAAAAAT" paragraphs = sorted(paragraphs, cmp=comparator) print "HELLOOOOOO" print "parapgraphs", paragraphs return paragraphs #a wrapper around getMostPopularSubtree def getMostPopularSubtree(paragraph): return _getMostPopularSubtree([paragraph,]) #given a paragraph lis, this returns a list of lists of paragraphs. #it assumes that the first paragraph in the list is the most popular def _getMostPopularSubtree(paragraphs): #getting the most popular paragraph's children child_paragraphs = Paragraph.objects.filter(parent=paragraphs[0]) #sorting all of the children child_paragraphs = _sortParagraphs(child_paragraphs) #print "child paragraphs: \n\n", child_paragraphs #adding the children to the list of things to return return_list = [child_paragraphs,] #if the children list is not empty, then we extend te list with the most popular subtree if child_paragraphs: most_pop = _getMostPopularSubtree(child_paragraphs) if most_pop and most_pop[0]: return_list.extend(most_pop) return return_list def get_favourited_stories(request,username): user = User.objects.get(username=username) favourites = list(Favourite.objects.filter(user=user)) stories = [] for favourite in favourites: stories += [favourite.story] return stories
mit
3,769,124,615,543,503,400
30.784314
92
0.692474
false
3.606229
false
false
false
Collisionc/sickbeard_mp4_automator
delugePostProcess.py
1
5295
#!/usr/bin/env python import os import sys from autoprocess import autoProcessTV, autoProcessMovie, autoProcessTVSR, sonarr, radarr from readSettings import ReadSettings from mkvtomp4 import MkvtoMp4 from deluge_client import DelugeRPCClient import logging from logging.config import fileConfig logpath = '/var/log/sickbeard_mp4_automator' if os.name == 'nt': logpath = os.path.dirname(sys.argv[0]) elif not os.path.isdir(logpath): try: os.mkdir(logpath) except: logpath = os.path.dirname(sys.argv[0]) configPath = os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), 'logging.ini')).replace("\\", "\\\\") logPath = os.path.abspath(os.path.join(logpath, 'index.log')).replace("\\", "\\\\") fileConfig(configPath, defaults={'logfilename': logPath}) log = logging.getLogger("delugePostProcess") log.info("Deluge post processing started.") settings = ReadSettings(os.path.dirname(sys.argv[0]), "autoProcess.ini") categories = [settings.deluge['sb'], settings.deluge['cp'], settings.deluge['sonarr'], settings.deluge['radarr'], settings.deluge['sr'], settings.deluge['bypass']] remove = settings.deluge['remove'] if len(sys.argv) < 4: log.error("Not enough command line parameters present, are you launching this from deluge?") sys.exit() path = str(sys.argv[3]) torrent_name = str(sys.argv[2]) torrent_id = str(sys.argv[1]) delete_dir = None log.debug("Path: %s." % path) log.debug("Torrent: %s." % torrent_name) log.debug("Hash: %s." % torrent_id) client = DelugeRPCClient(host=settings.deluge['host'], port=int(settings.deluge['port']), username=settings.deluge['user'], password=settings.deluge['pass']) client.connect() if client.connected: log.info("Successfully connected to Deluge") else: log.error("Failed to connect to Deluge") sys.exit() torrent_data = client.call('core.get_torrent_status', torrent_id, ['files', 'label']) try: torrent_files = torrent_data[b'files'] category = torrent_data[b'label'].lower().decode() except: torrent_files = torrent_data['files'] category = torrent_data['label'].lower() files = [] log.info("List of files in torrent:") for contents in torrent_files: try: files.append(contents[b'path'].decode()) log.debug(contents[b'path'].decode()) except: files.append(contents['path']) log.info(contents['path']) if category.lower() not in categories: log.error("No valid category detected.") sys.exit() if len(categories) != len(set(categories)): log.error("Duplicate category detected. Category names must be unique.") sys.exit() if settings.deluge['convert']: # Check for custom Deluge output_dir if settings.deluge['output_dir']: settings.output_dir = settings.deluge['output_dir'] log.info("Overriding output_dir to %s." % settings.deluge['output_dir']) # Perform conversion. settings.delete = False if not settings.output_dir: suffix = "convert" settings.output_dir = os.path.join(path, ("%s-%s" % (torrent_name, suffix))) if not os.path.exists(settings.output_dir): os.mkdir(settings.output_dir) delete_dir = settings.output_dir converter = MkvtoMp4(settings) for filename in files: inputfile = os.path.join(path, filename) if MkvtoMp4(settings).validSource(inputfile): log.info("Converting file %s at location %s." % (inputfile, settings.output_dir)) try: output = converter.process(inputfile) except: log.info("Error converting file %s." % inputfile) path = converter.output_dir else: suffix = "copy" newpath = os.path.join(path, ("%s-%s" % (torrent_name, suffix))) if not os.path.exists(newpath): os.mkdir(newpath) for filename in files: inputfile = os.path.join(path, filename) log.info("Copying file %s to %s." % (inputfile, newpath)) shutil.copy(inputfile, newpath) path = newpath delete_dir = newpath # Send to Sickbeard if (category == categories[0]): log.info("Passing %s directory to Sickbeard." % path) autoProcessTV.processEpisode(path, settings) # Send to CouchPotato elif (category == categories[1]): log.info("Passing %s directory to Couch Potato." % path) autoProcessMovie.process(path, settings, torrent_name) # Send to Sonarr elif (category == categories[2]): log.info("Passing %s directory to Sonarr." % path) sonarr.processEpisode(path, settings) elif (category == categories[3]): log.info("Passing %s directory to Radarr." % path) radarr.processMovie(path, settings) elif (category == categories[4]): log.info("Passing %s directory to Sickrage." % path) autoProcessTVSR.processEpisode(path, settings) elif (category == categories[5]): log.info("Bypassing any further processing as per category.") if delete_dir: if os.path.exists(delete_dir): try: os.rmdir(delete_dir) log.debug("Successfully removed tempoary directory %s." % delete_dir) except: log.exception("Unable to delete temporary directory.") if remove: try: client.call('core.remove_torrent', torrent_id, True) except: log.exception("Unable to remove torrent from deluge.")
mit
6,938,189,214,140,944,000
34.066225
163
0.671388
false
3.449511
false
false
false
FreeOpcUa/python-opcua
opcua/common/events.py
1
8172
import copy from opcua import ua import opcua from opcua.ua.uaerrors import UaError from opcua.common import ua_utils class Event(object): """ OPC UA Event object. This is class in inherited by the common event objects such as BaseEvent, other auto standard events and custom events Events are used to trigger events on server side and are sent to clients for every events from server Developper Warning: On server side the data type of attributes should be known, thus add properties using the add_property method!!! """ def __init__(self, emitting_node=ua.ObjectIds.Server): self.server_handle = None self.select_clauses = None self.event_fields = None self.data_types = {} if isinstance(emitting_node, ua.NodeId): self.emitting_node = emitting_node else: self.emitting_node = ua.NodeId(emitting_node) # save current attributes self.internal_properties = list(self.__dict__.keys())[:] + ["internal_properties"] def __str__(self): return "{0}({1})".format( self.__class__.__name__, [str(k) + ":" + str(v) for k, v in self.__dict__.items() if k not in self.internal_properties]) __repr__ = __str__ def add_property(self, name, val, datatype): """ Add a property to event and tore its data type """ setattr(self, name, val) self.data_types[name] = datatype def get_event_props_as_fields_dict(self): """ convert all properties of the Event class to a dict of variants """ field_vars = {} for key, value in vars(self).items(): if not key.startswith("__") and key not in self.internal_properties: field_vars[key] = ua.Variant(value, self.data_types[key]) return field_vars @staticmethod def from_field_dict(fields): """ Create an Event object from a dict of name and variants """ ev = Event() for k, v in fields.items(): ev.add_property(k, v.Value, v.VariantType) return ev def to_event_fields_using_subscription_fields(self, select_clauses): """ Using a new select_clauses and the original select_clauses used during subscription, return a field list """ fields = [] for sattr in select_clauses: for idx, o_sattr in enumerate(self.select_clauses): if sattr.BrowsePath == o_sattr.BrowsePath and sattr.AttributeId == o_sattr.AttributeId: fields.append(self.event_fields[idx]) break return fields def to_event_fields(self, select_clauses): """ return a field list using a select clause and the object properties """ none_field = ua.Variant(None, ua.VariantType.Null) fields = [] for sattr in select_clauses: if not sattr.BrowsePath: name = ua.AttributeIds(sattr.AttributeId).name else: name = sattr.BrowsePath[0].Name try: val = getattr(self, name) except AttributeError: field = none_field else: field = ua.Variant(copy.deepcopy(val), self.data_types[name]) fields.append(field) return fields @staticmethod def from_event_fields(select_clauses, fields): """ Instantiate an Event object from a select_clauses and fields """ ev = Event() ev.select_clauses = select_clauses ev.event_fields = fields for idx, sattr in enumerate(select_clauses): if len(sattr.BrowsePath) == 0: name = sattr.AttributeId.name else: name = sattr.BrowsePath[0].Name ev.add_property(name, fields[idx].Value, fields[idx].VariantType) return ev def get_filter_from_event_type(eventtypes): evfilter = ua.EventFilter() evfilter.SelectClauses = select_clauses_from_evtype(eventtypes) evfilter.WhereClause = where_clause_from_evtype(eventtypes) return evfilter def select_clauses_from_evtype(evtypes): clauses = [] selected_paths = [] for evtype in evtypes: for prop in get_event_properties_from_type_node(evtype): if prop.get_browse_name() not in selected_paths: op = ua.SimpleAttributeOperand() op.AttributeId = ua.AttributeIds.Value op.BrowsePath = [prop.get_browse_name()] clauses.append(op) selected_paths.append(prop.get_browse_name()) return clauses def where_clause_from_evtype(evtypes): cf = ua.ContentFilter() el = ua.ContentFilterElement() # operands can be ElementOperand, LiteralOperand, AttributeOperand, SimpleAttribute # Create a clause where the generate event type property EventType # must be a subtype of events in evtypes argument # the first operand is the attribute event type op = ua.SimpleAttributeOperand() # op.TypeDefinitionId = evtype.nodeid op.BrowsePath.append(ua.QualifiedName("EventType", 0)) op.AttributeId = ua.AttributeIds.Value el.FilterOperands.append(op) # now create a list of all subtypes we want to accept subtypes = [] for evtype in evtypes: subtypes += [st.nodeid for st in ua_utils.get_node_subtypes(evtype)] subtypes = list(set(subtypes)) # remove duplicates for subtypeid in subtypes: op = ua.LiteralOperand() op.Value = ua.Variant(subtypeid) el.FilterOperands.append(op) el.FilterOperator = ua.FilterOperator.InList cf.Elements.append(el) return cf def get_event_properties_from_type_node(node): properties = [] curr_node = node while True: properties.extend(curr_node.get_properties()) if curr_node.nodeid.Identifier == ua.ObjectIds.BaseEventType: break parents = curr_node.get_referenced_nodes(refs=ua.ObjectIds.HasSubtype, direction=ua.BrowseDirection.Inverse, includesubtypes=True) if len(parents) != 1: # Something went wrong return None curr_node = parents[0] return properties def get_event_obj_from_type_node(node): """ return an Event object from an event type node """ if node.nodeid.Identifier in opcua.common.event_objects.IMPLEMENTED_EVENTS.keys(): return opcua.common.event_objects.IMPLEMENTED_EVENTS[node.nodeid.Identifier]() else: parent_identifier, parent_eventtype = _find_parent_eventtype(node) class CustomEvent(parent_eventtype): def __init__(self): parent_eventtype.__init__(self) self.EventType = node.nodeid curr_node = node while curr_node.nodeid.Identifier != parent_identifier: for prop in curr_node.get_properties(): name = prop.get_browse_name().Name val = prop.get_data_value() self.add_property(name, val.Value.Value, val.Value.VariantType) parents = curr_node.get_referenced_nodes(refs=ua.ObjectIds.HasSubtype, direction=ua.BrowseDirection.Inverse, includesubtypes=True) if len(parents) != 1: # Something went wrong raise UaError("Parent of event type could notbe found") curr_node = parents[0] self._freeze = True return CustomEvent() def _find_parent_eventtype(node): """ """ parents = node.get_referenced_nodes(refs=ua.ObjectIds.HasSubtype, direction=ua.BrowseDirection.Inverse, includesubtypes=True) if len(parents) != 1: # Something went wrong raise UaError("Parent of event type could notbe found") if parents[0].nodeid.Identifier in opcua.common.event_objects.IMPLEMENTED_EVENTS.keys(): return parents[0].nodeid.Identifier, opcua.common.event_objects.IMPLEMENTED_EVENTS[parents[0].nodeid.Identifier] else: return _find_parent_eventtype(parents[0])
lgpl-3.0
-8,488,097,905,301,647,000
34.376623
150
0.618453
false
4.043543
false
false
false
birdland/dlkit-doc
dlkit/services/grading.py
1
224535
# -*- coding: utf-8 -*- """Grading Open Service Interface Definitions grading version 3.0.0 The Grading OSID defines a service to apply grades or ratings. Grade Systems The grade system sessions provide the means to retrievs and manage ``GradeSystem`` definitions. A ``GradeSystem`` is a fixed set of ``Grades`` . ``GradeSystems`` may also take the form of a numerical score as well as a rating based on some system. ``GradeEntries`` belong to a single ``GradebookColumn``. Gradebook Columns A ``Gradebook`` is represented by a series of ``GradebookColumns``. A ``GradeBookColumn`` represents a something to be graded and is joined to a single ``GradeSystem``. A ``GradebookColumn`` may be constrained to a single grader. Grade Entries A ``GradebookColumn`` is comprised of a series of ``GradeEntry`` elements. A ``GradebookColumn`` may represent "Assignment 3" while a ``GradeEntry`` may represent the assignment turned in by a particular student. A ``Grade`` can be applied to a ``GradeEntry`` that relates the entry to a grader and a key ``Resource``. In the case of a class gradebook, the key resource represents the student. If there are multiple graders for the same key resource, each grader gets their own ``GradebookColumn``. Gradebooks may also be used to capture ratings about other objects. In the case where people vote for their favorite assets, the key resource represents the ``Asset`` . ``GradebookColumns`` may have a ``GradebookColumnSummary`` entry for summary results and statistics across all ``GradeEntries`` in the column. Gradebook Cataloging ``GradebookColumns`` are organized into ``Gradebooks``. ``Gradebooks`` also provide for a federated hierarchy of ``GradebookColumns``. Simple reordering of ``GradebookColumns`` can be performed by moving the ``GradebookColumn`` relative to another. The relative positioning may reference two ``GradebookColumns`` through the federation. Sub Packages The Grading OSID includes several subpackages. The Grading Transform OSID provides a means of translating one ``GradeSystem`` to another. The Grading Calculation OSID defines derived ``GradebookColumns``. The Grading Batch OSID manages ``GradeSystems,`` ``GradeEntries,`` ``Gradebooks,`` and ``GradebookColumns`` in bulk. """ from ..osid import managers as osid_managers from ..osid import sessions as osid_sessions from ..osid import objects as osid_objects from ..osid import markers as osid_markers from ..osid import records as osid_records from ..osid import queries as osid_queries from ..osid import searches as osid_searches class GradingProfile(osid_managers.OsidProfile): """The ``GradingProfile`` describes the interoperability among grading services.""" def __init__(self): self._provider_manager = None def supports_grade_system_lookup(self): """Tests if a grade system lookup service is supported. :return: true if grade system lookup is supported, false otherwise :rtype: ``boolean`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def supports_grade_system_query(self): """Tests if a grade system query service is supported. :return: ``true`` if grade system query is supported, ``false`` otherwise :rtype: ``boolean`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def supports_grade_system_admin(self): """Tests if a grade system administrative service is supported. :return: ``true`` if grade system admin is supported, ``false`` otherwise :rtype: ``boolean`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def supports_grade_entry_lookup(self): """Tests if a grade entry lookup service is supported. :return: true if grade entry lookup is supported, false otherwise :rtype: ``boolean`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def supports_grade_entry_query(self): """Tests if a grade entry query service is supported. :return: true if grade entry query is supported, false otherwise :rtype: ``boolean`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def supports_grade_entry_admin(self): """Tests if a grade entry administrative service is supported. :return: ``true`` if grade entry admin is supported, ``false`` otherwise :rtype: ``boolean`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def supports_gradebook_column_lookup(self): """Tests if a gradebook column lookup service is supported. :return: true if gradebook column lookup is supported, false otherwise :rtype: ``boolean`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def supports_gradebook_column_query(self): """Tests if a gradebook column query service is supported. :return: ``true`` if grade system query is supported, ``false`` otherwise :rtype: ``boolean`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def supports_gradebook_column_admin(self): """Tests if a gradebook column administrative service is supported. :return: ``true`` if gradebook column admin is supported, ``false`` otherwise :rtype: ``boolean`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def supports_gradebook_lookup(self): """Tests if a gradebook lookup service is supported. :return: ``true`` if gradebook lookup is supported, ``false`` otherwise :rtype: ``boolean`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def supports_gradebook_admin(self): """Tests if a gradebook administrative service is supported. :return: ``true`` if gradebook admin is supported, ``false`` otherwise :rtype: ``boolean`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def get_grade_record_types(self): """Gets the supported ``Grade`` record types. :return: a list containing the supported ``Grade`` record types :rtype: ``osid.type.TypeList`` *compliance: mandatory -- This method must be implemented.* """ return # osid.type.TypeList grade_record_types = property(fget=get_grade_record_types) def get_grade_system_record_types(self): """Gets the supported ``GradeSystem`` record types. :return: a list containing the supported ``GradeSystem`` record types :rtype: ``osid.type.TypeList`` *compliance: mandatory -- This method must be implemented.* """ return # osid.type.TypeList grade_system_record_types = property(fget=get_grade_system_record_types) def get_grade_system_search_record_types(self): """Gets the supported ``GradeSystem`` search record types. :return: a list containing the supported ``GradeSystem`` search record types :rtype: ``osid.type.TypeList`` *compliance: mandatory -- This method must be implemented.* """ return # osid.type.TypeList grade_system_search_record_types = property(fget=get_grade_system_search_record_types) def get_grade_entry_record_types(self): """Gets the supported ``GradeEntry`` record types. :return: a list containing the supported ``GradeEntry`` record types :rtype: ``osid.type.TypeList`` *compliance: mandatory -- This method must be implemented.* """ return # osid.type.TypeList grade_entry_record_types = property(fget=get_grade_entry_record_types) def get_grade_entry_search_record_types(self): """Gets the supported ``GradeEntry`` search record types. :return: a list containing the supported ``GradeEntry`` search record types :rtype: ``osid.type.TypeList`` *compliance: mandatory -- This method must be implemented.* """ return # osid.type.TypeList grade_entry_search_record_types = property(fget=get_grade_entry_search_record_types) def get_gradebook_column_record_types(self): """Gets the supported ``GradebookColumn`` record types. :return: a list containing the supported ``GradebookColumn`` record types :rtype: ``osid.type.TypeList`` *compliance: mandatory -- This method must be implemented.* """ return # osid.type.TypeList gradebook_column_record_types = property(fget=get_gradebook_column_record_types) def get_gradebook_column_search_record_types(self): """Gets the supported gradebook column search record types. :return: a list containing the supported ``GradebookColumn`` search record types :rtype: ``osid.type.TypeList`` *compliance: mandatory -- This method must be implemented.* """ return # osid.type.TypeList gradebook_column_search_record_types = property(fget=get_gradebook_column_search_record_types) def get_gradebook_column_summary_record_types(self): """Gets the supported ``GradebookColumnSummary`` record types. :return: a list containing the supported ``GradebookColumnSummary`` record types :rtype: ``osid.type.TypeList`` *compliance: mandatory -- This method must be implemented.* """ return # osid.type.TypeList gradebook_column_summary_record_types = property(fget=get_gradebook_column_summary_record_types) def get_gradebook_record_types(self): """Gets the supported ``Gradebook`` record types. :return: a list containing the supported ``Gradebook`` record types :rtype: ``osid.type.TypeList`` *compliance: mandatory -- This method must be implemented.* """ return # osid.type.TypeList gradebook_record_types = property(fget=get_gradebook_record_types) def get_gradebook_search_record_types(self): """Gets the supported gradebook search record types. :return: a list containing the supported ``Gradebook`` search record types :rtype: ``osid.type.TypeList`` *compliance: mandatory -- This method must be implemented.* """ return # osid.type.TypeList gradebook_search_record_types = property(fget=get_gradebook_search_record_types) ## # The following methods are from osid.grading.GradebookColumnLookupSession def get_gradebook_id(self): """Gets the ``Gradebook`` ``Id`` associated with this session. :return: the ``Gradebook Id`` associated with this session :rtype: ``osid.id.Id`` *compliance: mandatory -- This method must be implemented.* """ return # osid.id.Id gradebook_id = property(fget=get_gradebook_id) def get_gradebook(self): """Gets the ``Gradebook`` associated with this session. :return: the ``Gradebook`` associated with this session :rtype: ``osid.grading.Gradebook`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.Gradebook gradebook = property(fget=get_gradebook) def can_lookup_gradebook_columns(self): """Tests if this user can perform ``GradebookColumn`` lookups. A return of true does not guarantee successful authorization. A return of false indicates that it is known all methods in this session will result in a ``PermissionDenied``. This is intended as a hint to an application that may opt not to offer lookup operations to unauthorized users. :return: ``false`` if lookup methods are not authorized, ``true`` otherwise :rtype: ``boolean`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def use_comparative_gradebook_column_view(self): """The returns from the lookup methods may omit or translate elements based on this session, such as authorization, and not result in an error. This view is used when greater interoperability is desired at the expense of precision. *compliance: mandatory -- This method is must be implemented.* """ pass def use_plenary_gradebook_column_view(self): """A complete view of the ``GradebookColumn`` returns is desired. Methods will return what is requested or result in an error. This view is used when greater precision is desired at the expense of interoperability. *compliance: mandatory -- This method is must be implemented.* """ pass def use_federated_gradebook_view(self): """Federates the view for methods in this session. A federated view will include gradebook columns in gradebooks which are children of this gradebook in the gradebook hierarchy. *compliance: mandatory -- This method is must be implemented.* """ pass def use_isolated_gradebook_view(self): """Isolates the view for methods in this session. An isolated view restricts lookups to this gradebook only. *compliance: mandatory -- This method is must be implemented.* """ pass def get_gradebook_column(self, gradebook_column_id): """Gets the ``GradebookColumn`` specified by its ``Id``. In plenary mode, the exact ``Id`` is found or a ``NotFound`` results. Otherwise, the returned ``GradebookColumn`` may have a different ``Id`` than requested, such as the case where a duplicate ``Id`` was assigned to a ``GradebookColumn`` and retained for compatibility. :param gradebook_column_id: ``Id`` of the ``GradebookColumn`` :type gradebook_column_id: ``osid.id.Id`` :return: the gradebook column :rtype: ``osid.grading.GradebookColumn`` :raise: ``NotFound`` -- ``gradebook_column_id`` not found :raise: ``NullArgument`` -- ``gradebook_column_id`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method is must be implemented.* """ return # osid.grading.GradebookColumn def get_gradebook_columns_by_ids(self, gradebook_column_ids): """Gets a ``GradebookColumnList`` corresponding to the given ``IdList``. In plenary mode, the returned list contains all of the gradebook columns specified in the ``Id`` list, in the order of the list, including duplicates, or an error results if a ``Id`` in the supplied list is not found or inaccessible. Otherwise, inaccessible gradeboook columns may be omitted from the list. :param gradebook_column_ids: the list of ``Ids`` to retrieve :type gradebook_column_ids: ``osid.id.IdList`` :return: the returned ``GradebookColumn`` list :rtype: ``osid.grading.GradebookColumnList`` :raise: ``NotFound`` -- an ``Id was`` not found :raise: ``NullArgument`` -- ``grade_book_column_ids`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradebookColumnList def get_gradebook_columns_by_genus_type(self, gradebook_column_genus_type): """Gets a ``GradebookColumnList`` corresponding to the given gradebook column genus ``Type`` which does not include gradebook columns of genus types derived from the specified ``Type``. In plenary mode, the returned list contains all known gradebook columns or an error results. Otherwise, the returned list may contain only those gradebook columns that are accessible through this session. :param gradebook_column_genus_type: a gradebook column genus type :type gradebook_column_genus_type: ``osid.type.Type`` :return: the returned ``GradebookColumn`` list :rtype: ``osid.grading.GradebookColumnList`` :raise: ``NullArgument`` -- ``gradebook_column_genus_type`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradebookColumnList def get_gradebook_columns_by_parent_genus_type(self, gradebook_column_genus_type): """Gets a ``GradebookColumnList`` corresponding to the given gradebook column genus ``Type`` and include any additional columns with genus types derived from the specified ``Type``. In plenary mode, the returned list contains all known gradebook columns or an error results. Otherwise, the returned list may contain only those gradebook columns that are accessible through this session. :param gradebook_column_genus_type: a gradebook column genus type :type gradebook_column_genus_type: ``osid.type.Type`` :return: the returned ``GradebookColumn`` list :rtype: ``osid.grading.GradebookColumnList`` :raise: ``NullArgument`` -- ``gradebook_column_genus_type`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradebookColumnList def get_gradebook_columns_by_record_type(self, gradebook_column_record_type): """Gets a ``GradebookColumnList`` containing the given gradebook column record ``Type``. In plenary mode, the returned list contains all known gradebook columns or an error results. Otherwise, the returned list may contain only those gradebook columns that are accessible through this session. :param gradebook_column_record_type: a gradebook column record type :type gradebook_column_record_type: ``osid.type.Type`` :return: the returned ``GradebookColumn`` list :rtype: ``osid.grading.GradebookColumnList`` :raise: ``NullArgument`` -- ``gradebook_column_record_type`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradebookColumnList def get_gradebook_columns(self): """Gets all gradebook columns. In plenary mode, the returned list contains all known gradebook columns or an error results. Otherwise, the returned list may contain only those gradebook columns that are accessible through this session. :return: a ``GradebookColumn`` :rtype: ``osid.grading.GradebookColumnList`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradebookColumnList gradebook_columns = property(fget=get_gradebook_columns) def supports_summary(self): """Tests if a summary entry is available. :return: ``true`` if a summary entry is available, ``false`` otherwise :rtype: ``boolean`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def get_gradebook_column_summary(self, gradebook_column_id): """Gets the ``GradebookColumnSummary`` for summary results. :param gradebook_column_id: ``Id`` of the ``GradebookColumn`` :type gradebook_column_id: ``osid.id.Id`` :return: the gradebook column summary :rtype: ``osid.grading.GradebookColumnSummary`` :raise: ``NotFound`` -- ``gradebook_column_id`` is not found :raise: ``NullArgument`` -- ``gradebook_column_id`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure :raise: ``Unimplemented`` -- ``has_summary()`` is ``false`` *compliance: mandatory -- This method is must be implemented.* """ return # osid.grading.GradebookColumnSummary ## # The following methods are from osid.grading.GradebookColumnQuerySession def get_gradebook_id(self): """Gets the ``Gradebook`` ``Id`` associated with this session. :return: the ``Gradebook Id`` associated with this session :rtype: ``osid.id.Id`` *compliance: mandatory -- This method must be implemented.* """ return # osid.id.Id gradebook_id = property(fget=get_gradebook_id) def get_gradebook(self): """Gets the ``Gradebook`` associated with this session. :return: the ``Gradebook`` associated with this session :rtype: ``osid.grading.Gradebook`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.Gradebook gradebook = property(fget=get_gradebook) def can_search_gradebook_columns(self): """Tests if this user can perform ``GradebookColumn`` searches. A return of true does not guarantee successful authorization. A return of false indicates that it is known all methods in this session will result in a ``PermissionDenied``. This is intended as a hint to an application that may opt not to offer search operations to unauthorized users. :return: ``false`` if search methods are not authorized, ``true`` otherwise :rtype: ``boolean`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def use_federated_gradebook_view(self): """Federates the view for methods in this session. A federated view will include gradebook columns in gradebooks which are children of this gradebook in the gradebook hierarchy. *compliance: mandatory -- This method is must be implemented.* """ pass def use_isolated_gradebook_view(self): """Isolates the view for methods in this session. An isolated view restricts searches to this gradebook only. *compliance: mandatory -- This method is must be implemented.* """ pass def get_gradebook_column_query(self): """Gets a gradebook column query. :return: the gradebook column :rtype: ``osid.grading.GradebookColumnQuery`` *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradebookColumnQuery gradebook_column_query = property(fget=get_gradebook_column_query) def get_gradebook_columns_by_query(self, gradebook_column_query): """Gets a list of gradebook columns matching the given query. :param gradebook_column_query: the gradebook column query :type gradebook_column_query: ``osid.grading.GradebookColumnQuery`` :return: the returned ``GradebookColumnList`` :rtype: ``osid.grading.GradebookColumnList`` :raise: ``NullArgument`` -- ``gradebook_column_query`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure :raise: ``Unsupported`` -- ``gradebook_column_query`` is not of this service *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradebookColumnList ## # The following methods are from osid.grading.GradebookColumnAdminSession def get_gradebook_id(self): """Gets the ``Gradebook`` ``Id`` associated with this session. :return: the ``Gradebook Id`` associated with this session :rtype: ``osid.id.Id`` *compliance: mandatory -- This method must be implemented.* """ return # osid.id.Id gradebook_id = property(fget=get_gradebook_id) def get_gradebook(self): """Gets the ``Gradebook`` associated with this session. :return: the ``Gradebook`` associated with this session :rtype: ``osid.grading.Gradebook`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.Gradebook gradebook = property(fget=get_gradebook) def can_create_gradebook_columns(self): """Tests if this user can create gradebook columns. A return of true does not guarantee successful authorization. A return of false indicates that it is known creating a gradebook column will result in a ``PermissionDenied``. This is intended as a hint to an application that may opt not to offer create operations to an unauthorized user. :return: ``false`` if ``GradebookColumn`` creation is not authorized, ``true`` otherwise :rtype: ``boolean`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def can_create_gradebook_column_with_record_types(self, gradebook_column_record_types): """Tests if this user can create a single ``GradebookColumn`` using the desired record types. While ``GradingManager.getGradebookColumnRecordTypes()`` can be used to examine which records are supported, this method tests which record(s) are required for creating a specific ``GradebookColumn``. Providing an empty array tests if a ``GradebookColumn`` can be created with no records. :param gradebook_column_record_types: array of gradebook column record types :type gradebook_column_record_types: ``osid.type.Type[]`` :return: ``true`` if ``GradebookColumn`` creation using the specified record ``Types`` is supported, ``false`` otherwise :rtype: ``boolean`` :raise: ``NullArgument`` -- ``gradebook_column_record_types`` is ``null`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def get_gradebook_column_form_for_create(self, gradebook_column_record_types): """Gets the gradebook column form for creating new gradebook columns. A new form should be requested for each create transaction. :param gradebook_column_record_types: array of gradebook column record types :type gradebook_column_record_types: ``osid.type.Type[]`` :return: the gradebook column form :rtype: ``osid.grading.GradebookColumnForm`` :raise: ``NullArgument`` -- ``gradebook_column_record_types`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure :raise: ``Unsupported`` -- unable to get form for requested record types *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradebookColumnForm def create_gradebook_column(self, gradebook_column_form): """Creates a new ``GradebookColumn``. :param gradebook_column_form: the form for this ``GradebookColumn`` :type gradebook_column_form: ``osid.grading.GradebookColumnForm`` :return: the new ``GradebookColumn`` :rtype: ``osid.grading.GradebookColumn`` :raise: ``IllegalState`` -- ``gradebook_column_form`` already used in a create transaction :raise: ``InvalidArgument`` -- one or more of the form elements is invalid :raise: ``NullArgument`` -- ``gradebook_column_form`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure :raise: ``Unsupported`` -- ``gradebook_column_form`` did not originate from ``get_gradebook_column_form_for_create()`` *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradebookColumn def can_update_gradebook_columns(self): """Tests if this user can update gradebook columns. A return of true does not guarantee successful authorization. A return of false indicates that it is known updating a ``GradebookColumn`` will result in a ``PermissionDenied``. This is intended as a hint to an application that may opt not to offer update operations to an unauthorized user. :return: ``false`` if gradebook column modification is not authorized, ``true`` otherwise :rtype: ``boolean`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def get_gradebook_column_form_for_update(self, gradebook_column_id): """Gets the gradebook column form for updating an existing gradebook column. A new gradebook column form should be requested for each update transaction. :param gradebook_column_id: the ``Id`` of the ``GradebookColumn`` :type gradebook_column_id: ``osid.id.Id`` :return: the gradebook column form :rtype: ``osid.grading.GradebookColumnForm`` :raise: ``NotFound`` -- ``gradebook_column_id`` is not found :raise: ``NullArgument`` -- ``gradebook_column_id`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradebookColumnForm def update_gradebook_column(self, gradebook_column_form): """Updates an existing gradebook column. :param gradebook_column_form: the form containing the elements to be updated :type gradebook_column_form: ``osid.grading.GradebookColumnForm`` :raise: ``IllegalState`` -- ``gradebook_column_form`` already used in an update transaction :raise: ``InvalidArgument`` -- the form contains an invalid value :raise: ``NullArgument`` -- ``gradebook_column_form`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure :raise: ``Unsupported`` -- ``gradebook_column_form`` did not originate from ``get_gradebook_column_form_for_update()`` *compliance: mandatory -- This method must be implemented.* """ pass def sequence_gradebook_columns(self, gradebook_column_ids): """Resequences the gradebook columns. :param gradebook_column_ids: the ``Ids`` of the ``GradebookColumns`` :type gradebook_column_ids: ``osid.id.IdList`` :raise: ``NullArgument`` -- ``gradebook_column_id_list`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ pass def move_gradebook_column(self, front_gradebook_column_id, back_gradebook_column_id): """Moves a gradebook column in front of another. :param front_gradebook_column_id: the ``Id`` of a ``GradebookColumn`` :type front_gradebook_column_id: ``osid.id.Id`` :param back_gradebook_column_id: the ``Id`` of a ``GradebookColumn`` :type back_gradebook_column_id: ``osid.id.Id`` :raise: ``NotFound`` -- ``front_gradebook_column_id or back_gradebook_column_id`` is not found :raise: ``NullArgument`` -- ``front_gradebook_column_id or back_gradebook_column_id`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ pass def copy_gradebook_column_entries(self, source_gradebook_column_id, target_gradebook_column_id): """Copies gradebook column entries from one column to another. If the target grade column grade system differs from the source, the grades in the entries are transformed to the new grade system. :param source_gradebook_column_id: the ``Id`` of a ``GradebookColumn`` :type source_gradebook_column_id: ``osid.id.Id`` :param target_gradebook_column_id: the ``Id`` of a ``GradebookColumn`` :type target_gradebook_column_id: ``osid.id.Id`` :raise: ``NotFound`` -- ``source_gradebook_column_id ortarget_gradebook_column_id`` is not found :raise: ``NullArgument`` -- ``source_gradebook_column_id target_gradebook_column_id`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ pass def can_delete_gradebook_columns(self): """Tests if this user can delete gradebook columns. A return of true does not guarantee successful authorization. A return of false indicates that it is known deleting a ``GradebookColumn`` will result in a ``PermissionDenied``. This is intended as a hint to an application that may opt not to offer delete operations to an unauthorized user. :return: ``false`` if ``GradebookColumn`` deletion is not authorized, ``true`` otherwise :rtype: ``boolean`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def delete_gradebook_column(self, gradebook_column_id): """Deletes the ``GradebookColumn`` identified by the given ``Id``. :param gradebook_column_id: the ``Id`` of the ``GradebookColumn`` to delete :type gradebook_column_id: ``osid.id.Id`` :raise: ``NotFound`` -- a ``GradebookColumn`` was not found identified by the given ``Id`` :raise: ``NullArgument`` -- ``gradebook_column_id`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ pass def can_manage_gradebook_column_aliases(self): """Tests if this user can manage ``Id`` aliases for ``GradebookColumns``. A return of true does not guarantee successful authorization. A return of false indicates that it is known changing an alias will result in a ``PermissionDenied``. This is intended as a hint to an application that may opt not to offer alias operations to an unauthorized user. :return: ``false`` if ``GradebookColumn`` aliasing is not authorized, ``true`` otherwise :rtype: ``boolean`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def alias_gradebook_column(self, gradebook_column_id, alias_id): """Adds an ``Id`` to a ``GradebookColumn`` for the purpose of creating compatibility. The primary ``Id`` of the ``GradebookColumn`` is determined by the provider. The new ``Id`` performs as an alias to the primary ``Id``. If the alias is a pointer to another gradebook column, it is reassigned to the given gradebook column ``Id``. :param gradebook_column_id: the ``Id`` of a ``GradebookColumn`` :type gradebook_column_id: ``osid.id.Id`` :param alias_id: the alias ``Id`` :type alias_id: ``osid.id.Id`` :raise: ``AlreadyExists`` -- ``alias_id`` is already assigned :raise: ``NotFound`` -- ``gradebook_column_id`` not found :raise: ``NullArgument`` -- ``gradebook_column_id`` or ``alias_id`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ pass ## # The following methods are from osid.grading.GradebookLookupSession def can_lookup_gradebooks(self): """Tests if this user can perform ``Gradebook`` lookups. A return of true does not guarantee successful authorization. A return of false indicates that it is known all methods in this session will result in a ``PermissionDenied``. This is intended as a hint to an application that may opt not to offer lookup operations to unauthorized users. :return: ``false`` if lookup methods are not authorized, ``true`` otherwise :rtype: ``boolean`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def use_comparative_gradebook_view(self): """The returns from the lookup methods may omit or translate elements based on this session, such as authorization, and not result in an error. This view is used when greater interoperability is desired at the expense of precision. *compliance: mandatory -- This method is must be implemented.* """ pass def use_plenary_gradebook_view(self): """A complete view of the ``Gradebook`` returns is desired. Methods will return what is requested or result in an error. This view is used when greater precision is desired at the expense of interoperability. *compliance: mandatory -- This method is must be implemented.* """ pass def get_gradebook(self, gradebook_id): """Gets the ``Gradebook`` specified by its ``Id``. In plenary mode, the exact ``Id`` is found or a ``NotFound`` results. Otherwise, the returned ``Gradebook`` may have a different ``Id`` than requested, such as the case where a duplicate ``Id`` was assigned to a ``Gradebook`` and retained for compatility. :param gradebook_id: ``Id`` of the ``Gradebook`` :type gradebook_id: ``osid.id.Id`` :return: the gradebook :rtype: ``osid.grading.Gradebook`` :raise: ``NotFound`` -- ``gradebook_id`` not found :raise: ``NullArgument`` -- ``gradebook_id`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method is must be implemented.* """ return # osid.grading.Gradebook def get_gradebooks_by_ids(self, gradebook_ids): """Gets a ``GradebookList`` corresponding to the given ``IdList``. In plenary mode, the returned list contains all of the gradebooks specified in the ``Id`` list, in the order of the list, including duplicates, or an error results if an ``Id`` in the supplied list is not found or inaccessible. Otherwise, inaccessible ``Gradebook`` objects may be omitted from the list and may present the elements in any order including returning a unique set. :param gradebook_ids: the list of ``Ids`` to retrieve :type gradebook_ids: ``osid.id.IdList`` :return: the returned ``Gradebook`` list :rtype: ``osid.grading.GradebookList`` :raise: ``NotFound`` -- an ``Id was`` not found :raise: ``NullArgument`` -- ``gradebook_ids`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradebookList def get_gradebooks_by_genus_type(self, gradebook_genus_type): """Gets a ``GradebookList`` corresponding to the given gradebook genus ``Type`` which does not include gradebooks of types derived from the specified ``Type``. In plenary mode, the returned list contains all known gradebooks or an error results. Otherwise, the returned list may contain only those gradebooks that are accessible through this session. :param gradebook_genus_type: a gradebook genus type :type gradebook_genus_type: ``osid.type.Type`` :return: the returned ``Gradebook`` list :rtype: ``osid.grading.GradebookList`` :raise: ``NullArgument`` -- ``gradebook_genus_type`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradebookList def get_gradebooks_by_parent_genus_type(self, gradebook_genus_type): """Gets a ``GradebookList`` corresponding to the given gradebook genus ``Type`` and include any additional gradebooks with genus types derived from the specified ``Type``. In plenary mode, the returned list contains all known gradebooks or an error results. Otherwise, the returned list may contain only those gradebooks that are accessible through this session. :param gradebook_genus_type: a gradebook genus type :type gradebook_genus_type: ``osid.type.Type`` :return: the returned ``Gradebook`` list :rtype: ``osid.grading.GradebookList`` :raise: ``NullArgument`` -- ``gradebook_genus_type`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradebookList def get_gradebooks_by_record_type(self, gradebook_record_type): """Gets a ``GradebookList`` containing the given gradebook record ``Type``. In plenary mode, the returned list contains all known gradebooks or an error results. Otherwise, the returned list may contain only those gradebooks that are accessible through this session. :param gradebook_record_type: a gradebook record type :type gradebook_record_type: ``osid.type.Type`` :return: the returned ``Gradebook`` list :rtype: ``osid.grading.GradebookList`` :raise: ``NullArgument`` -- ``gradebook_record_type`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradebookList def get_gradebooks_by_provider(self, resource_id): """Gets a ``GradebookList`` for the given provider ````. In plenary mode, the returned list contains all known gradebooks or an error results. Otherwise, the returned list may contain only those gradebooks that are accessible through this session. :param resource_id: a resource ``Id`` :type resource_id: ``osid.id.Id`` :return: the returned ``Gradebook`` list :rtype: ``osid.grading.GradebookList`` :raise: ``NullArgument`` -- ``resource_id`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradebookList def get_gradebooks(self): """Gets all ``Gradebooks``. In plenary mode, the returned list contains all known gradebooks or an error results. Otherwise, the returned list may contain only those gradebooks that are accessible through this session. :return: a ``GradebookList`` :rtype: ``osid.grading.GradebookList`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradebookList gradebooks = property(fget=get_gradebooks) ## # The following methods are from osid.grading.GradebookAdminSession def can_create_gradebooks(self): """Tests if this user can create ``Gradebooks``. A return of true does not guarantee successful authorization. A return of false indicates that it is known creating a ``Gradebook`` will result in a ``PermissionDenied``. This is intended as a hint to an application that may not wish to offer create operations to unauthorized users. :return: ``false`` if ``Gradebook`` creation is not authorized, ``true`` otherwise :rtype: ``boolean`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def can_create_gradebook_with_record_types(self, gradebook_record_types): """Tests if this user can create a single ``Gradebook`` using the desired record types. While ``GradingManager.getGradebookRecordTypes()`` can be used to examine which records are supported, this method tests which record(s) are required for creating a specific ``Gradebook``. Providing an empty array tests if a ``Gradebook`` can be created with no records. :param gradebook_record_types: array of gradebook record types :type gradebook_record_types: ``osid.type.Type[]`` :return: ``true`` if ``Gradebook`` creation using the specified ``Types`` is supported, ``false`` otherwise :rtype: ``boolean`` :raise: ``NullArgument`` -- ``gradebook_record_types`` is ``null`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def get_gradebook_form_for_create(self, gradebook_record_types): """Gets the gradebook form for creating new gradebooks. A new form should be requested for each create transaction. :param gradebook_record_types: array of gradebook record types :type gradebook_record_types: ``osid.type.Type[]`` :return: the gradebook form :rtype: ``osid.grading.GradebookForm`` :raise: ``NullArgument`` -- ``gradebook_record_types`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure :raise: ``Unsupported`` -- unable to get form for requested record types *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradebookForm def create_gradebook(self, gradebook_form): """Creates a new ``Gradebook``. :param gradebook_form: the form for this ``Gradebook`` :type gradebook_form: ``osid.grading.GradebookForm`` :return: the new ``Gradebook`` :rtype: ``osid.grading.Gradebook`` :raise: ``IllegalState`` -- ``gradebook_form`` already used in a create transaction :raise: ``InvalidArgument`` -- one or more of the form elements is invalid :raise: ``NullArgument`` -- ``gradebook_form`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure :raise: ``Unsupported`` -- ``gradebook_form`` did not originate from ``get_gradebook_form_for_create()`` *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.Gradebook def can_update_gradebooks(self): """Tests if this user can update ``Gradebooks``. A return of true does not guarantee successful authorization. A return of false indicates that it is known updating a ``Gradebook`` will result in a ``PermissionDenied``. This is intended as a hint to an application that may not wish to offer update operations to unauthorized users. :return: ``false`` if ``Gradebook`` modification is not authorized, ``true`` otherwise :rtype: ``boolean`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def get_gradebook_form_for_update(self, gradebook_id): """Gets the gradebook form for updating an existing gradebook. A new gradebook form should be requested for each update transaction. :param gradebook_id: the ``Id`` of the ``Gradebook`` :type gradebook_id: ``osid.id.Id`` :return: the gradebook form :rtype: ``osid.grading.GradebookForm`` :raise: ``NotFound`` -- ``gradebook_id`` is not found :raise: ``NullArgument`` -- ``gradebook_id`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradebookForm def update_gradebook(self, gradebook_form): """Updates an existing gradebook. :param gradebook_form: the form containing the elements to be updated :type gradebook_form: ``osid.grading.GradebookForm`` :raise: ``IllegalState`` -- ``gradebook_form`` already used in an update transaction :raise: ``InvalidArgument`` -- the form contains an invalid value :raise: ``NullArgument`` -- ``gradebook_form`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure :raise: ``Unsupported`` -- ``gradebook_form did not originate from get_gradebook_form_for_update()`` *compliance: mandatory -- This method must be implemented.* """ pass def can_delete_gradebooks(self): """Tests if this user can delete gradebooks. A return of true does not guarantee successful authorization. A return of false indicates that it is known deleting a ``Gradebook`` will result in a ``PermissionDenied``. This is intended as a hint to an application that may not wish to offer delete operations to unauthorized users. :return: ``false`` if ``Gradebook`` deletion is not authorized, ``true`` otherwise :rtype: ``boolean`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def delete_gradebook(self, gradebook_id): """Deletes a ``Gradebook``. :param gradebook_id: the ``Id`` of the ``Gradebook`` to remove :type gradebook_id: ``osid.id.Id`` :raise: ``NotFound`` -- ``gradebook_id`` not found :raise: ``NullArgument`` -- ``gradebook_id`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ pass def can_manage_gradebook_aliases(self): """Tests if this user can manage ``Id`` aliases for ``Gradebooks``. A return of true does not guarantee successful authorization. A return of false indicates that it is known changing an alias will result in a ``PermissionDenied``. This is intended as a hint to an application that may opt not to offer alias operations to an unauthorized user. :return: ``false`` if ``Gradebook`` aliasing is not authorized, ``true`` otherwise :rtype: ``boolean`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def alias_gradebook(self, gradebook_id, alias_id): """Adds an ``Id`` to a ``Gradebook`` for the purpose of creating compatibility. The primary ``Id`` of the ``Gradebook`` is determined by the provider. The new ``Id`` performs as an alias to the primary ``Id`` . If the alias is a pointer to another gradebook, it is reassigned to the given gradebook ``Id``. :param gradebook_id: the ``Id`` of a ``Gradebook`` :type gradebook_id: ``osid.id.Id`` :param alias_id: the alias ``Id`` :type alias_id: ``osid.id.Id`` :raise: ``AlreadyExists`` -- ``alias_id`` is already assigned :raise: ``NotFound`` -- ``gradebook_id`` not found :raise: ``NullArgument`` -- ``gradebook_id`` or ``alias_id`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ pass class GradingManager(osid_managers.OsidManager, osid_sessions.OsidSession, GradingProfile): """The grading manager provides access to grading sessions and provides interoperability tests for various aspects of this service. The sessions included in this manager are: * ``GradeSystemLookupSession:`` a session to look up grades and grade systems * ``GradeSystemQuerySession:`` a session to query grade systems ``None`` * ``GradeSystemSearchSession:`` a session to search grade systems * ``GradeSystemAdminSession:`` a session to manage grade systems * ``GradeSystemNotificationSession`` a session for subscribing to new or deleted grades or grade systems * ``GradeSystemGradebookSession:`` a session for retrieving grade system to gradebook mappings * ``GradeSystemGradebookAssignmentSession:`` a session for managing grade system to gradebook mappings * ``GradeSystemSmartGradebookSession:`` a session for managing smart gradebooks of grade systems * ``GradeEntryLookupSession:`` a session to look up grade entries * ``GradeEntryQuerySession:`` a session to query grade entries ``None`` * ``GradeEntrySearchSession:`` a session to search grade entries * ``GradeEntryAdminSession:`` a session to create, modify and delete grade entries ``None`` * ``GradeEntryNotificationSession: a`` session to receive messages pertaining to grade entry ```` changes * ``GradebookColumnLookupSession:`` a session to look up gradebook columns * ``GradebookColumnQuerySession:`` a session to query gradebook columns ``None`` * ``GradebookColumnSearchSession:`` a session to search gradebook columns * ``GradebookColumnAdminSession:`` a session to manage gradebook columns * ``GradebookColumnNotificationSession`` a session for subscribing to new or deleted gradebook columns * ``GradebookColumnGradebookSession:`` a session for retrieving gradebook column to gradebook mappings * ``GradebookColumnGradebookAssignmentSession:`` a session for managing gradebook column to gradebook mappings * ``GradebookColumnSmartGradebookSession:`` a session for managing smart gradebooks of gradebook columns * ``GradebookLookupSession:`` a session to lookup gradebooks * ``GradebookQuerySession:`` a session to query gradebooks * ``GradebookSearchSession`` : a session to search gradebooks * ``GradebookAdminSession`` : a session to create, modify and delete gradebooks * ``GradebookNotificationSession`` : a session to receive messages pertaining to gradebook changes * ``GradebookHierarchySession:`` a session to traverse the gradebook hierarchy * ``GradebookHierarchyDesignSession:`` a session to manage the gradebook hierarchy """ def __init__(self, proxy=None): self._runtime = None self._provider_manager = None self._provider_sessions = dict() self._session_management = AUTOMATIC self._gradebook_view = DEFAULT # This is to initialize self._proxy osid.OsidSession.__init__(self, proxy) self._sub_package_provider_managers = dict() def _set_gradebook_view(self, session): """Sets the underlying gradebook view to match current view""" if self._gradebook_view == COMPARATIVE: try: session.use_comparative_gradebook_view() except AttributeError: pass else: try: session.use_plenary_gradebook_view() except AttributeError: pass def _get_provider_session(self, session_name, proxy=None): """Gets the session for the provider""" agent_key = self._get_agent_key(proxy) if session_name in self._provider_sessions[agent_key]: return self._provider_sessions[agent_key][session_name] else: session = self._instantiate_session('get_' + session_name, self._proxy) self._set_gradebook_view(session) if self._session_management != DISABLED: self._provider_sessions[agent_key][session_name] = session return session def _get_sub_package_provider_manager(self, sub_package_name): if sub_package_name in self._sub_package_provider_managers: return self._sub_package_provider_managers[sub_package_name] config = self._runtime.get_configuration() parameter_id = Id('parameter:{0}ProviderImpl@dlkit_service'.format(sub_package_name)) provider_impl = config.get_value_by_parameter(parameter_id).get_string_value() if self._proxy is None: # need to add version argument sub_package = self._runtime.get_manager(sub_package_name.upper(), provider_impl) else: # need to add version argument sub_package = self._runtime.get_proxy_manager(sub_package_name.upper(), provider_impl) self._sub_package_provider_managers[sub_package_name] = sub_package return sub_package def _get_sub_package_provider_session(self, sub_package, session_name, proxy=None): """Gets the session from a sub-package""" agent_key = self._get_agent_key(proxy) if session_name in self._provider_sessions[agent_key]: return self._provider_sessions[agent_key][session_name] else: manager = self._get_sub_package_provider_manager(sub_package) session = self._instantiate_session('get_' + session_name + '_for_bank', proxy=self._proxy, manager=manager) self._set_bank_view(session) if self._session_management != DISABLED: self._provider_sessions[agent_key][session_name] = session return session def _instantiate_session(self, method_name, proxy=None, *args, **kwargs): """Instantiates a provider session""" session_class = getattr(self._provider_manager, method_name) if proxy is None: try: return session_class(bank_id=self._catalog_id, *args, **kwargs) except AttributeError: return session_class(*args, **kwargs) else: try: return session_class(bank_id=self._catalog_id, proxy=proxy, *args, **kwargs) except AttributeError: return session_class(proxy=proxy, *args, **kwargs) def initialize(self, runtime): """OSID Manager initialize""" from .primitives import Id if self._runtime is not None: raise IllegalState('Manager has already been initialized') self._runtime = runtime config = runtime.get_configuration() parameter_id = Id('parameter:gradingProviderImpl@dlkit_service') provider_impl = config.get_value_by_parameter(parameter_id).get_string_value() if self._proxy is None: # need to add version argument self._provider_manager = runtime.get_manager('GRADING', provider_impl) else: # need to add version argument self._provider_manager = runtime.get_proxy_manager('GRADING', provider_impl) def close_sessions(self): """Close all sessions, unless session management is set to MANDATORY""" if self._session_management != MANDATORY: self._provider_sessions = dict() def use_automatic_session_management(self): """Session state will be saved unless closed by consumers""" self._session_management = AUTOMATIC def use_mandatory_session_management(self): """Session state will be saved and can not be closed by consumers""" self._session_management = MANDATORY def disable_session_management(self): """Session state will never be saved""" self._session_management = DISABLED self.close_sessions() def get_grading_batch_manager(self): """Gets the ``GradingBatchManager``. :return: a ``GradingBatchManager`` :rtype: ``osid.grading.batch.GradingBatchManager`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``Unimplemented`` -- ``supports_grading_batch() is false`` *compliance: optional -- This method must be implemented if ``supports_grading_batch()`` is true.* """ return # osid.grading.batch.GradingBatchManager grading_batch_manager = property(fget=get_grading_batch_manager) def get_grading_calculation_manager(self): """Gets the ``GradingCalculationManager``. :return: a ``GradingCalculationManager`` :rtype: ``osid.grading.calculation.GradingCalculationManager`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``Unimplemented`` -- ``supports_grading_calculation() is false`` *compliance: optional -- This method must be implemented if ``supports_grading_calculation()`` is true.* """ return # osid.grading.calculation.GradingCalculationManager grading_calculation_manager = property(fget=get_grading_calculation_manager) def get_grading_transform_manager(self): """Gets the ``GradingTransformManager``. :return: a ``GradingTransformManager`` :rtype: ``osid.grading.transform.GradingTransformManager`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``Unimplemented`` -- ``supports_grading_transform() is false`` *compliance: optional -- This method must be implemented if ``supports_grading_transform()`` is true.* """ return # osid.grading.transform.GradingTransformManager grading_transform_manager = property(fget=get_grading_transform_manager) ## # The following methods are from osid.grading.GradebookColumnLookupSession def get_gradebook_id(self): """Gets the ``Gradebook`` ``Id`` associated with this session. :return: the ``Gradebook Id`` associated with this session :rtype: ``osid.id.Id`` *compliance: mandatory -- This method must be implemented.* """ return # osid.id.Id gradebook_id = property(fget=get_gradebook_id) def get_gradebook(self): """Gets the ``Gradebook`` associated with this session. :return: the ``Gradebook`` associated with this session :rtype: ``osid.grading.Gradebook`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.Gradebook gradebook = property(fget=get_gradebook) def can_lookup_gradebook_columns(self): """Tests if this user can perform ``GradebookColumn`` lookups. A return of true does not guarantee successful authorization. A return of false indicates that it is known all methods in this session will result in a ``PermissionDenied``. This is intended as a hint to an application that may opt not to offer lookup operations to unauthorized users. :return: ``false`` if lookup methods are not authorized, ``true`` otherwise :rtype: ``boolean`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def use_comparative_gradebook_column_view(self): """The returns from the lookup methods may omit or translate elements based on this session, such as authorization, and not result in an error. This view is used when greater interoperability is desired at the expense of precision. *compliance: mandatory -- This method is must be implemented.* """ pass def use_plenary_gradebook_column_view(self): """A complete view of the ``GradebookColumn`` returns is desired. Methods will return what is requested or result in an error. This view is used when greater precision is desired at the expense of interoperability. *compliance: mandatory -- This method is must be implemented.* """ pass def use_federated_gradebook_view(self): """Federates the view for methods in this session. A federated view will include gradebook columns in gradebooks which are children of this gradebook in the gradebook hierarchy. *compliance: mandatory -- This method is must be implemented.* """ pass def use_isolated_gradebook_view(self): """Isolates the view for methods in this session. An isolated view restricts lookups to this gradebook only. *compliance: mandatory -- This method is must be implemented.* """ pass def get_gradebook_column(self, gradebook_column_id): """Gets the ``GradebookColumn`` specified by its ``Id``. In plenary mode, the exact ``Id`` is found or a ``NotFound`` results. Otherwise, the returned ``GradebookColumn`` may have a different ``Id`` than requested, such as the case where a duplicate ``Id`` was assigned to a ``GradebookColumn`` and retained for compatibility. :param gradebook_column_id: ``Id`` of the ``GradebookColumn`` :type gradebook_column_id: ``osid.id.Id`` :return: the gradebook column :rtype: ``osid.grading.GradebookColumn`` :raise: ``NotFound`` -- ``gradebook_column_id`` not found :raise: ``NullArgument`` -- ``gradebook_column_id`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method is must be implemented.* """ return # osid.grading.GradebookColumn def get_gradebook_columns_by_ids(self, gradebook_column_ids): """Gets a ``GradebookColumnList`` corresponding to the given ``IdList``. In plenary mode, the returned list contains all of the gradebook columns specified in the ``Id`` list, in the order of the list, including duplicates, or an error results if a ``Id`` in the supplied list is not found or inaccessible. Otherwise, inaccessible gradeboook columns may be omitted from the list. :param gradebook_column_ids: the list of ``Ids`` to retrieve :type gradebook_column_ids: ``osid.id.IdList`` :return: the returned ``GradebookColumn`` list :rtype: ``osid.grading.GradebookColumnList`` :raise: ``NotFound`` -- an ``Id was`` not found :raise: ``NullArgument`` -- ``grade_book_column_ids`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradebookColumnList def get_gradebook_columns_by_genus_type(self, gradebook_column_genus_type): """Gets a ``GradebookColumnList`` corresponding to the given gradebook column genus ``Type`` which does not include gradebook columns of genus types derived from the specified ``Type``. In plenary mode, the returned list contains all known gradebook columns or an error results. Otherwise, the returned list may contain only those gradebook columns that are accessible through this session. :param gradebook_column_genus_type: a gradebook column genus type :type gradebook_column_genus_type: ``osid.type.Type`` :return: the returned ``GradebookColumn`` list :rtype: ``osid.grading.GradebookColumnList`` :raise: ``NullArgument`` -- ``gradebook_column_genus_type`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradebookColumnList def get_gradebook_columns_by_parent_genus_type(self, gradebook_column_genus_type): """Gets a ``GradebookColumnList`` corresponding to the given gradebook column genus ``Type`` and include any additional columns with genus types derived from the specified ``Type``. In plenary mode, the returned list contains all known gradebook columns or an error results. Otherwise, the returned list may contain only those gradebook columns that are accessible through this session. :param gradebook_column_genus_type: a gradebook column genus type :type gradebook_column_genus_type: ``osid.type.Type`` :return: the returned ``GradebookColumn`` list :rtype: ``osid.grading.GradebookColumnList`` :raise: ``NullArgument`` -- ``gradebook_column_genus_type`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradebookColumnList def get_gradebook_columns_by_record_type(self, gradebook_column_record_type): """Gets a ``GradebookColumnList`` containing the given gradebook column record ``Type``. In plenary mode, the returned list contains all known gradebook columns or an error results. Otherwise, the returned list may contain only those gradebook columns that are accessible through this session. :param gradebook_column_record_type: a gradebook column record type :type gradebook_column_record_type: ``osid.type.Type`` :return: the returned ``GradebookColumn`` list :rtype: ``osid.grading.GradebookColumnList`` :raise: ``NullArgument`` -- ``gradebook_column_record_type`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradebookColumnList def get_gradebook_columns(self): """Gets all gradebook columns. In plenary mode, the returned list contains all known gradebook columns or an error results. Otherwise, the returned list may contain only those gradebook columns that are accessible through this session. :return: a ``GradebookColumn`` :rtype: ``osid.grading.GradebookColumnList`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradebookColumnList gradebook_columns = property(fget=get_gradebook_columns) def supports_summary(self): """Tests if a summary entry is available. :return: ``true`` if a summary entry is available, ``false`` otherwise :rtype: ``boolean`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def get_gradebook_column_summary(self, gradebook_column_id): """Gets the ``GradebookColumnSummary`` for summary results. :param gradebook_column_id: ``Id`` of the ``GradebookColumn`` :type gradebook_column_id: ``osid.id.Id`` :return: the gradebook column summary :rtype: ``osid.grading.GradebookColumnSummary`` :raise: ``NotFound`` -- ``gradebook_column_id`` is not found :raise: ``NullArgument`` -- ``gradebook_column_id`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure :raise: ``Unimplemented`` -- ``has_summary()`` is ``false`` *compliance: mandatory -- This method is must be implemented.* """ return # osid.grading.GradebookColumnSummary ## # The following methods are from osid.grading.GradebookColumnQuerySession def get_gradebook_id(self): """Gets the ``Gradebook`` ``Id`` associated with this session. :return: the ``Gradebook Id`` associated with this session :rtype: ``osid.id.Id`` *compliance: mandatory -- This method must be implemented.* """ return # osid.id.Id gradebook_id = property(fget=get_gradebook_id) def get_gradebook(self): """Gets the ``Gradebook`` associated with this session. :return: the ``Gradebook`` associated with this session :rtype: ``osid.grading.Gradebook`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.Gradebook gradebook = property(fget=get_gradebook) def can_search_gradebook_columns(self): """Tests if this user can perform ``GradebookColumn`` searches. A return of true does not guarantee successful authorization. A return of false indicates that it is known all methods in this session will result in a ``PermissionDenied``. This is intended as a hint to an application that may opt not to offer search operations to unauthorized users. :return: ``false`` if search methods are not authorized, ``true`` otherwise :rtype: ``boolean`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def use_federated_gradebook_view(self): """Federates the view for methods in this session. A federated view will include gradebook columns in gradebooks which are children of this gradebook in the gradebook hierarchy. *compliance: mandatory -- This method is must be implemented.* """ pass def use_isolated_gradebook_view(self): """Isolates the view for methods in this session. An isolated view restricts searches to this gradebook only. *compliance: mandatory -- This method is must be implemented.* """ pass def get_gradebook_column_query(self): """Gets a gradebook column query. :return: the gradebook column :rtype: ``osid.grading.GradebookColumnQuery`` *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradebookColumnQuery gradebook_column_query = property(fget=get_gradebook_column_query) def get_gradebook_columns_by_query(self, gradebook_column_query): """Gets a list of gradebook columns matching the given query. :param gradebook_column_query: the gradebook column query :type gradebook_column_query: ``osid.grading.GradebookColumnQuery`` :return: the returned ``GradebookColumnList`` :rtype: ``osid.grading.GradebookColumnList`` :raise: ``NullArgument`` -- ``gradebook_column_query`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure :raise: ``Unsupported`` -- ``gradebook_column_query`` is not of this service *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradebookColumnList ## # The following methods are from osid.grading.GradebookColumnAdminSession def get_gradebook_id(self): """Gets the ``Gradebook`` ``Id`` associated with this session. :return: the ``Gradebook Id`` associated with this session :rtype: ``osid.id.Id`` *compliance: mandatory -- This method must be implemented.* """ return # osid.id.Id gradebook_id = property(fget=get_gradebook_id) def get_gradebook(self): """Gets the ``Gradebook`` associated with this session. :return: the ``Gradebook`` associated with this session :rtype: ``osid.grading.Gradebook`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.Gradebook gradebook = property(fget=get_gradebook) def can_create_gradebook_columns(self): """Tests if this user can create gradebook columns. A return of true does not guarantee successful authorization. A return of false indicates that it is known creating a gradebook column will result in a ``PermissionDenied``. This is intended as a hint to an application that may opt not to offer create operations to an unauthorized user. :return: ``false`` if ``GradebookColumn`` creation is not authorized, ``true`` otherwise :rtype: ``boolean`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def can_create_gradebook_column_with_record_types(self, gradebook_column_record_types): """Tests if this user can create a single ``GradebookColumn`` using the desired record types. While ``GradingManager.getGradebookColumnRecordTypes()`` can be used to examine which records are supported, this method tests which record(s) are required for creating a specific ``GradebookColumn``. Providing an empty array tests if a ``GradebookColumn`` can be created with no records. :param gradebook_column_record_types: array of gradebook column record types :type gradebook_column_record_types: ``osid.type.Type[]`` :return: ``true`` if ``GradebookColumn`` creation using the specified record ``Types`` is supported, ``false`` otherwise :rtype: ``boolean`` :raise: ``NullArgument`` -- ``gradebook_column_record_types`` is ``null`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def get_gradebook_column_form_for_create(self, gradebook_column_record_types): """Gets the gradebook column form for creating new gradebook columns. A new form should be requested for each create transaction. :param gradebook_column_record_types: array of gradebook column record types :type gradebook_column_record_types: ``osid.type.Type[]`` :return: the gradebook column form :rtype: ``osid.grading.GradebookColumnForm`` :raise: ``NullArgument`` -- ``gradebook_column_record_types`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure :raise: ``Unsupported`` -- unable to get form for requested record types *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradebookColumnForm def create_gradebook_column(self, gradebook_column_form): """Creates a new ``GradebookColumn``. :param gradebook_column_form: the form for this ``GradebookColumn`` :type gradebook_column_form: ``osid.grading.GradebookColumnForm`` :return: the new ``GradebookColumn`` :rtype: ``osid.grading.GradebookColumn`` :raise: ``IllegalState`` -- ``gradebook_column_form`` already used in a create transaction :raise: ``InvalidArgument`` -- one or more of the form elements is invalid :raise: ``NullArgument`` -- ``gradebook_column_form`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure :raise: ``Unsupported`` -- ``gradebook_column_form`` did not originate from ``get_gradebook_column_form_for_create()`` *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradebookColumn def can_update_gradebook_columns(self): """Tests if this user can update gradebook columns. A return of true does not guarantee successful authorization. A return of false indicates that it is known updating a ``GradebookColumn`` will result in a ``PermissionDenied``. This is intended as a hint to an application that may opt not to offer update operations to an unauthorized user. :return: ``false`` if gradebook column modification is not authorized, ``true`` otherwise :rtype: ``boolean`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def get_gradebook_column_form_for_update(self, gradebook_column_id): """Gets the gradebook column form for updating an existing gradebook column. A new gradebook column form should be requested for each update transaction. :param gradebook_column_id: the ``Id`` of the ``GradebookColumn`` :type gradebook_column_id: ``osid.id.Id`` :return: the gradebook column form :rtype: ``osid.grading.GradebookColumnForm`` :raise: ``NotFound`` -- ``gradebook_column_id`` is not found :raise: ``NullArgument`` -- ``gradebook_column_id`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradebookColumnForm def update_gradebook_column(self, gradebook_column_form): """Updates an existing gradebook column. :param gradebook_column_form: the form containing the elements to be updated :type gradebook_column_form: ``osid.grading.GradebookColumnForm`` :raise: ``IllegalState`` -- ``gradebook_column_form`` already used in an update transaction :raise: ``InvalidArgument`` -- the form contains an invalid value :raise: ``NullArgument`` -- ``gradebook_column_form`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure :raise: ``Unsupported`` -- ``gradebook_column_form`` did not originate from ``get_gradebook_column_form_for_update()`` *compliance: mandatory -- This method must be implemented.* """ pass def sequence_gradebook_columns(self, gradebook_column_ids): """Resequences the gradebook columns. :param gradebook_column_ids: the ``Ids`` of the ``GradebookColumns`` :type gradebook_column_ids: ``osid.id.IdList`` :raise: ``NullArgument`` -- ``gradebook_column_id_list`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ pass def move_gradebook_column(self, front_gradebook_column_id, back_gradebook_column_id): """Moves a gradebook column in front of another. :param front_gradebook_column_id: the ``Id`` of a ``GradebookColumn`` :type front_gradebook_column_id: ``osid.id.Id`` :param back_gradebook_column_id: the ``Id`` of a ``GradebookColumn`` :type back_gradebook_column_id: ``osid.id.Id`` :raise: ``NotFound`` -- ``front_gradebook_column_id or back_gradebook_column_id`` is not found :raise: ``NullArgument`` -- ``front_gradebook_column_id or back_gradebook_column_id`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ pass def copy_gradebook_column_entries(self, source_gradebook_column_id, target_gradebook_column_id): """Copies gradebook column entries from one column to another. If the target grade column grade system differs from the source, the grades in the entries are transformed to the new grade system. :param source_gradebook_column_id: the ``Id`` of a ``GradebookColumn`` :type source_gradebook_column_id: ``osid.id.Id`` :param target_gradebook_column_id: the ``Id`` of a ``GradebookColumn`` :type target_gradebook_column_id: ``osid.id.Id`` :raise: ``NotFound`` -- ``source_gradebook_column_id ortarget_gradebook_column_id`` is not found :raise: ``NullArgument`` -- ``source_gradebook_column_id target_gradebook_column_id`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ pass def can_delete_gradebook_columns(self): """Tests if this user can delete gradebook columns. A return of true does not guarantee successful authorization. A return of false indicates that it is known deleting a ``GradebookColumn`` will result in a ``PermissionDenied``. This is intended as a hint to an application that may opt not to offer delete operations to an unauthorized user. :return: ``false`` if ``GradebookColumn`` deletion is not authorized, ``true`` otherwise :rtype: ``boolean`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def delete_gradebook_column(self, gradebook_column_id): """Deletes the ``GradebookColumn`` identified by the given ``Id``. :param gradebook_column_id: the ``Id`` of the ``GradebookColumn`` to delete :type gradebook_column_id: ``osid.id.Id`` :raise: ``NotFound`` -- a ``GradebookColumn`` was not found identified by the given ``Id`` :raise: ``NullArgument`` -- ``gradebook_column_id`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ pass def can_manage_gradebook_column_aliases(self): """Tests if this user can manage ``Id`` aliases for ``GradebookColumns``. A return of true does not guarantee successful authorization. A return of false indicates that it is known changing an alias will result in a ``PermissionDenied``. This is intended as a hint to an application that may opt not to offer alias operations to an unauthorized user. :return: ``false`` if ``GradebookColumn`` aliasing is not authorized, ``true`` otherwise :rtype: ``boolean`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def alias_gradebook_column(self, gradebook_column_id, alias_id): """Adds an ``Id`` to a ``GradebookColumn`` for the purpose of creating compatibility. The primary ``Id`` of the ``GradebookColumn`` is determined by the provider. The new ``Id`` performs as an alias to the primary ``Id``. If the alias is a pointer to another gradebook column, it is reassigned to the given gradebook column ``Id``. :param gradebook_column_id: the ``Id`` of a ``GradebookColumn`` :type gradebook_column_id: ``osid.id.Id`` :param alias_id: the alias ``Id`` :type alias_id: ``osid.id.Id`` :raise: ``AlreadyExists`` -- ``alias_id`` is already assigned :raise: ``NotFound`` -- ``gradebook_column_id`` not found :raise: ``NullArgument`` -- ``gradebook_column_id`` or ``alias_id`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ pass ## # The following methods are from osid.grading.GradebookLookupSession def can_lookup_gradebooks(self): """Tests if this user can perform ``Gradebook`` lookups. A return of true does not guarantee successful authorization. A return of false indicates that it is known all methods in this session will result in a ``PermissionDenied``. This is intended as a hint to an application that may opt not to offer lookup operations to unauthorized users. :return: ``false`` if lookup methods are not authorized, ``true`` otherwise :rtype: ``boolean`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def use_comparative_gradebook_view(self): """The returns from the lookup methods may omit or translate elements based on this session, such as authorization, and not result in an error. This view is used when greater interoperability is desired at the expense of precision. *compliance: mandatory -- This method is must be implemented.* """ pass def use_plenary_gradebook_view(self): """A complete view of the ``Gradebook`` returns is desired. Methods will return what is requested or result in an error. This view is used when greater precision is desired at the expense of interoperability. *compliance: mandatory -- This method is must be implemented.* """ pass def get_gradebook(self, gradebook_id): """Gets the ``Gradebook`` specified by its ``Id``. In plenary mode, the exact ``Id`` is found or a ``NotFound`` results. Otherwise, the returned ``Gradebook`` may have a different ``Id`` than requested, such as the case where a duplicate ``Id`` was assigned to a ``Gradebook`` and retained for compatility. :param gradebook_id: ``Id`` of the ``Gradebook`` :type gradebook_id: ``osid.id.Id`` :return: the gradebook :rtype: ``osid.grading.Gradebook`` :raise: ``NotFound`` -- ``gradebook_id`` not found :raise: ``NullArgument`` -- ``gradebook_id`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method is must be implemented.* """ return # osid.grading.Gradebook def get_gradebooks_by_ids(self, gradebook_ids): """Gets a ``GradebookList`` corresponding to the given ``IdList``. In plenary mode, the returned list contains all of the gradebooks specified in the ``Id`` list, in the order of the list, including duplicates, or an error results if an ``Id`` in the supplied list is not found or inaccessible. Otherwise, inaccessible ``Gradebook`` objects may be omitted from the list and may present the elements in any order including returning a unique set. :param gradebook_ids: the list of ``Ids`` to retrieve :type gradebook_ids: ``osid.id.IdList`` :return: the returned ``Gradebook`` list :rtype: ``osid.grading.GradebookList`` :raise: ``NotFound`` -- an ``Id was`` not found :raise: ``NullArgument`` -- ``gradebook_ids`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradebookList def get_gradebooks_by_genus_type(self, gradebook_genus_type): """Gets a ``GradebookList`` corresponding to the given gradebook genus ``Type`` which does not include gradebooks of types derived from the specified ``Type``. In plenary mode, the returned list contains all known gradebooks or an error results. Otherwise, the returned list may contain only those gradebooks that are accessible through this session. :param gradebook_genus_type: a gradebook genus type :type gradebook_genus_type: ``osid.type.Type`` :return: the returned ``Gradebook`` list :rtype: ``osid.grading.GradebookList`` :raise: ``NullArgument`` -- ``gradebook_genus_type`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradebookList def get_gradebooks_by_parent_genus_type(self, gradebook_genus_type): """Gets a ``GradebookList`` corresponding to the given gradebook genus ``Type`` and include any additional gradebooks with genus types derived from the specified ``Type``. In plenary mode, the returned list contains all known gradebooks or an error results. Otherwise, the returned list may contain only those gradebooks that are accessible through this session. :param gradebook_genus_type: a gradebook genus type :type gradebook_genus_type: ``osid.type.Type`` :return: the returned ``Gradebook`` list :rtype: ``osid.grading.GradebookList`` :raise: ``NullArgument`` -- ``gradebook_genus_type`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradebookList def get_gradebooks_by_record_type(self, gradebook_record_type): """Gets a ``GradebookList`` containing the given gradebook record ``Type``. In plenary mode, the returned list contains all known gradebooks or an error results. Otherwise, the returned list may contain only those gradebooks that are accessible through this session. :param gradebook_record_type: a gradebook record type :type gradebook_record_type: ``osid.type.Type`` :return: the returned ``Gradebook`` list :rtype: ``osid.grading.GradebookList`` :raise: ``NullArgument`` -- ``gradebook_record_type`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradebookList def get_gradebooks_by_provider(self, resource_id): """Gets a ``GradebookList`` for the given provider ````. In plenary mode, the returned list contains all known gradebooks or an error results. Otherwise, the returned list may contain only those gradebooks that are accessible through this session. :param resource_id: a resource ``Id`` :type resource_id: ``osid.id.Id`` :return: the returned ``Gradebook`` list :rtype: ``osid.grading.GradebookList`` :raise: ``NullArgument`` -- ``resource_id`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradebookList def get_gradebooks(self): """Gets all ``Gradebooks``. In plenary mode, the returned list contains all known gradebooks or an error results. Otherwise, the returned list may contain only those gradebooks that are accessible through this session. :return: a ``GradebookList`` :rtype: ``osid.grading.GradebookList`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradebookList gradebooks = property(fget=get_gradebooks) ## # The following methods are from osid.grading.GradebookAdminSession def can_create_gradebooks(self): """Tests if this user can create ``Gradebooks``. A return of true does not guarantee successful authorization. A return of false indicates that it is known creating a ``Gradebook`` will result in a ``PermissionDenied``. This is intended as a hint to an application that may not wish to offer create operations to unauthorized users. :return: ``false`` if ``Gradebook`` creation is not authorized, ``true`` otherwise :rtype: ``boolean`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def can_create_gradebook_with_record_types(self, gradebook_record_types): """Tests if this user can create a single ``Gradebook`` using the desired record types. While ``GradingManager.getGradebookRecordTypes()`` can be used to examine which records are supported, this method tests which record(s) are required for creating a specific ``Gradebook``. Providing an empty array tests if a ``Gradebook`` can be created with no records. :param gradebook_record_types: array of gradebook record types :type gradebook_record_types: ``osid.type.Type[]`` :return: ``true`` if ``Gradebook`` creation using the specified ``Types`` is supported, ``false`` otherwise :rtype: ``boolean`` :raise: ``NullArgument`` -- ``gradebook_record_types`` is ``null`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def get_gradebook_form_for_create(self, gradebook_record_types): """Gets the gradebook form for creating new gradebooks. A new form should be requested for each create transaction. :param gradebook_record_types: array of gradebook record types :type gradebook_record_types: ``osid.type.Type[]`` :return: the gradebook form :rtype: ``osid.grading.GradebookForm`` :raise: ``NullArgument`` -- ``gradebook_record_types`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure :raise: ``Unsupported`` -- unable to get form for requested record types *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradebookForm def create_gradebook(self, gradebook_form): """Creates a new ``Gradebook``. :param gradebook_form: the form for this ``Gradebook`` :type gradebook_form: ``osid.grading.GradebookForm`` :return: the new ``Gradebook`` :rtype: ``osid.grading.Gradebook`` :raise: ``IllegalState`` -- ``gradebook_form`` already used in a create transaction :raise: ``InvalidArgument`` -- one or more of the form elements is invalid :raise: ``NullArgument`` -- ``gradebook_form`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure :raise: ``Unsupported`` -- ``gradebook_form`` did not originate from ``get_gradebook_form_for_create()`` *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.Gradebook def can_update_gradebooks(self): """Tests if this user can update ``Gradebooks``. A return of true does not guarantee successful authorization. A return of false indicates that it is known updating a ``Gradebook`` will result in a ``PermissionDenied``. This is intended as a hint to an application that may not wish to offer update operations to unauthorized users. :return: ``false`` if ``Gradebook`` modification is not authorized, ``true`` otherwise :rtype: ``boolean`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def get_gradebook_form_for_update(self, gradebook_id): """Gets the gradebook form for updating an existing gradebook. A new gradebook form should be requested for each update transaction. :param gradebook_id: the ``Id`` of the ``Gradebook`` :type gradebook_id: ``osid.id.Id`` :return: the gradebook form :rtype: ``osid.grading.GradebookForm`` :raise: ``NotFound`` -- ``gradebook_id`` is not found :raise: ``NullArgument`` -- ``gradebook_id`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradebookForm def update_gradebook(self, gradebook_form): """Updates an existing gradebook. :param gradebook_form: the form containing the elements to be updated :type gradebook_form: ``osid.grading.GradebookForm`` :raise: ``IllegalState`` -- ``gradebook_form`` already used in an update transaction :raise: ``InvalidArgument`` -- the form contains an invalid value :raise: ``NullArgument`` -- ``gradebook_form`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure :raise: ``Unsupported`` -- ``gradebook_form did not originate from get_gradebook_form_for_update()`` *compliance: mandatory -- This method must be implemented.* """ pass def can_delete_gradebooks(self): """Tests if this user can delete gradebooks. A return of true does not guarantee successful authorization. A return of false indicates that it is known deleting a ``Gradebook`` will result in a ``PermissionDenied``. This is intended as a hint to an application that may not wish to offer delete operations to unauthorized users. :return: ``false`` if ``Gradebook`` deletion is not authorized, ``true`` otherwise :rtype: ``boolean`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def delete_gradebook(self, gradebook_id): """Deletes a ``Gradebook``. :param gradebook_id: the ``Id`` of the ``Gradebook`` to remove :type gradebook_id: ``osid.id.Id`` :raise: ``NotFound`` -- ``gradebook_id`` not found :raise: ``NullArgument`` -- ``gradebook_id`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ pass def can_manage_gradebook_aliases(self): """Tests if this user can manage ``Id`` aliases for ``Gradebooks``. A return of true does not guarantee successful authorization. A return of false indicates that it is known changing an alias will result in a ``PermissionDenied``. This is intended as a hint to an application that may opt not to offer alias operations to an unauthorized user. :return: ``false`` if ``Gradebook`` aliasing is not authorized, ``true`` otherwise :rtype: ``boolean`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def alias_gradebook(self, gradebook_id, alias_id): """Adds an ``Id`` to a ``Gradebook`` for the purpose of creating compatibility. The primary ``Id`` of the ``Gradebook`` is determined by the provider. The new ``Id`` performs as an alias to the primary ``Id`` . If the alias is a pointer to another gradebook, it is reassigned to the given gradebook ``Id``. :param gradebook_id: the ``Id`` of a ``Gradebook`` :type gradebook_id: ``osid.id.Id`` :param alias_id: the alias ``Id`` :type alias_id: ``osid.id.Id`` :raise: ``AlreadyExists`` -- ``alias_id`` is already assigned :raise: ``NotFound`` -- ``gradebook_id`` not found :raise: ``NullArgument`` -- ``gradebook_id`` or ``alias_id`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ pass class GradingProxyManager(osid_managers.OsidProxyManager, GradingProfile): """The grading manager provides access to grading sessions and provides interoperability tests for various aspects of this service. Methods in this manager accept a ``Proxy`` for passing information from server environments.The sessions included in this manager are: * ``GradeSystemLookupSession:`` a session to look up grades and grade systems * ``GradeSystemQuerySession:`` a session to query grade systems ``None`` * ``GradeSystemSearchSession:`` a session to search grade systems * ``GradeSystemAdminSession:`` a session to manage grade systems * ``GradeSystemNotificationSession`` a session for subscribing to new or deleted grades or grade systems * ``GradeSystemGradebookSession:`` a session for retrieving grade system to gradebook mappings * ``GradeSystemGradebookAssignmentSession:`` a session for managing grade system to gradebook mappings * ``GradeSystemSmartGradebookSession:`` a session for managing smart gradebooks of grade systems * ``GradeEntryLookupSession:`` a session to look up grade entries * ``GradeEntryQuerySession:`` a session to query grade entries ``None`` * ``GradeEntrySearchSession:`` a session to search grade entries * ``GradeEntryAdminSession:`` a session to create, modify and delete grade entries ``None`` * ``GradeEntryNotificationSession: a`` session to receive messages pertaining to grade entry ```` changes * ``GradebookColumnLookupSession:`` a session to look up gradebook columns * ``GradebookColumnQuerySession:`` a session to query gradebook columns ``None`` * ``GradebookColumnSearchSession:`` a session to search gradebook columns * ``GradebookColumnAdminSession:`` a session to manage gradebook columns * ``GradebookColumnDerivationSession:`` a session to manage derived gradebook columns * ``GradebookColumnNotificationSession`` a session for subscribing to new or deleted gradebook columns * ``GradebookColumnGradebookSession:`` a session for retrieving gradebook column to gradebook mappings * ``GradebookColumnGradebookAssignmentSession:`` a session for managing gradebook column to gradebook mappings * ``GradebookColumnSmartGradebookSession:`` a session for managing smart gradebooks of gradebook columns * ``GradebookLookupSession:`` a session to lookup gradebooks * ``GradebookQuerySession:`` a session to query gradebooks * ``GradebookSearchSession`` : a session to search gradebooks * ``GradebookAdminSession`` : a session to create, modify and delete gradebooks * ``GradebookNotificationSession`` : a session to receive messages pertaining to gradebook changes * ``GradebookHierarchySession:`` a session to traverse the gradebook hierarchy * ``GradebookHierarchyDesignSession:`` a session to manage the gradebook hierarchy """ def get_grading_batch_proxy_manager(self): """Gets the ``GradingBatchProxyManager``. :return: a ``GradingBatchProxyManager`` :rtype: ``osid.grading.batch.GradingBatchProxyManager`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``Unimplemented`` -- ``supports_grading_batch() is false`` *compliance: optional -- This method must be implemented if ``supports_grading_batch()`` is true.* """ return # osid.grading.batch.GradingBatchProxyManager grading_batch_proxy_manager = property(fget=get_grading_batch_proxy_manager) def get_grading_calculation_proxy_manager(self): """Gets the ``GradingCalculationProxyManager``. :return: a ``GradingCalculationProxyManager`` :rtype: ``osid.grading.calculation.GradingCalculationProxyManager`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``Unimplemented`` -- ``supports_grading_calculation() is false`` *compliance: optional -- This method must be implemented if ``supports_grading_calculation()`` is true.* """ return # osid.grading.calculation.GradingCalculationProxyManager grading_calculation_proxy_manager = property(fget=get_grading_calculation_proxy_manager) def get_grading_transform_proxy_manager(self): """Gets the ``GradingTransformProxyManager``. :return: a ``GradingTransformManager`` :rtype: ``osid.grading.transform.GradingTransformProxyManager`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``Unimplemented`` -- ``supports_grading_transform() is false`` *compliance: optional -- This method must be implemented if ``supports_grading_transform()`` is true.* """ return # osid.grading.transform.GradingTransformProxyManager grading_transform_proxy_manager = property(fget=get_grading_transform_proxy_manager) ## # The following methods are from osid.grading.GradebookColumnLookupSession def get_gradebook_id(self): """Gets the ``Gradebook`` ``Id`` associated with this session. :return: the ``Gradebook Id`` associated with this session :rtype: ``osid.id.Id`` *compliance: mandatory -- This method must be implemented.* """ return # osid.id.Id gradebook_id = property(fget=get_gradebook_id) def get_gradebook(self): """Gets the ``Gradebook`` associated with this session. :return: the ``Gradebook`` associated with this session :rtype: ``osid.grading.Gradebook`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.Gradebook gradebook = property(fget=get_gradebook) def can_lookup_gradebook_columns(self): """Tests if this user can perform ``GradebookColumn`` lookups. A return of true does not guarantee successful authorization. A return of false indicates that it is known all methods in this session will result in a ``PermissionDenied``. This is intended as a hint to an application that may opt not to offer lookup operations to unauthorized users. :return: ``false`` if lookup methods are not authorized, ``true`` otherwise :rtype: ``boolean`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def use_comparative_gradebook_column_view(self): """The returns from the lookup methods may omit or translate elements based on this session, such as authorization, and not result in an error. This view is used when greater interoperability is desired at the expense of precision. *compliance: mandatory -- This method is must be implemented.* """ pass def use_plenary_gradebook_column_view(self): """A complete view of the ``GradebookColumn`` returns is desired. Methods will return what is requested or result in an error. This view is used when greater precision is desired at the expense of interoperability. *compliance: mandatory -- This method is must be implemented.* """ pass def use_federated_gradebook_view(self): """Federates the view for methods in this session. A federated view will include gradebook columns in gradebooks which are children of this gradebook in the gradebook hierarchy. *compliance: mandatory -- This method is must be implemented.* """ pass def use_isolated_gradebook_view(self): """Isolates the view for methods in this session. An isolated view restricts lookups to this gradebook only. *compliance: mandatory -- This method is must be implemented.* """ pass def get_gradebook_column(self, gradebook_column_id): """Gets the ``GradebookColumn`` specified by its ``Id``. In plenary mode, the exact ``Id`` is found or a ``NotFound`` results. Otherwise, the returned ``GradebookColumn`` may have a different ``Id`` than requested, such as the case where a duplicate ``Id`` was assigned to a ``GradebookColumn`` and retained for compatibility. :param gradebook_column_id: ``Id`` of the ``GradebookColumn`` :type gradebook_column_id: ``osid.id.Id`` :return: the gradebook column :rtype: ``osid.grading.GradebookColumn`` :raise: ``NotFound`` -- ``gradebook_column_id`` not found :raise: ``NullArgument`` -- ``gradebook_column_id`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method is must be implemented.* """ return # osid.grading.GradebookColumn def get_gradebook_columns_by_ids(self, gradebook_column_ids): """Gets a ``GradebookColumnList`` corresponding to the given ``IdList``. In plenary mode, the returned list contains all of the gradebook columns specified in the ``Id`` list, in the order of the list, including duplicates, or an error results if a ``Id`` in the supplied list is not found or inaccessible. Otherwise, inaccessible gradeboook columns may be omitted from the list. :param gradebook_column_ids: the list of ``Ids`` to retrieve :type gradebook_column_ids: ``osid.id.IdList`` :return: the returned ``GradebookColumn`` list :rtype: ``osid.grading.GradebookColumnList`` :raise: ``NotFound`` -- an ``Id was`` not found :raise: ``NullArgument`` -- ``grade_book_column_ids`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradebookColumnList def get_gradebook_columns_by_genus_type(self, gradebook_column_genus_type): """Gets a ``GradebookColumnList`` corresponding to the given gradebook column genus ``Type`` which does not include gradebook columns of genus types derived from the specified ``Type``. In plenary mode, the returned list contains all known gradebook columns or an error results. Otherwise, the returned list may contain only those gradebook columns that are accessible through this session. :param gradebook_column_genus_type: a gradebook column genus type :type gradebook_column_genus_type: ``osid.type.Type`` :return: the returned ``GradebookColumn`` list :rtype: ``osid.grading.GradebookColumnList`` :raise: ``NullArgument`` -- ``gradebook_column_genus_type`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradebookColumnList def get_gradebook_columns_by_parent_genus_type(self, gradebook_column_genus_type): """Gets a ``GradebookColumnList`` corresponding to the given gradebook column genus ``Type`` and include any additional columns with genus types derived from the specified ``Type``. In plenary mode, the returned list contains all known gradebook columns or an error results. Otherwise, the returned list may contain only those gradebook columns that are accessible through this session. :param gradebook_column_genus_type: a gradebook column genus type :type gradebook_column_genus_type: ``osid.type.Type`` :return: the returned ``GradebookColumn`` list :rtype: ``osid.grading.GradebookColumnList`` :raise: ``NullArgument`` -- ``gradebook_column_genus_type`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradebookColumnList def get_gradebook_columns_by_record_type(self, gradebook_column_record_type): """Gets a ``GradebookColumnList`` containing the given gradebook column record ``Type``. In plenary mode, the returned list contains all known gradebook columns or an error results. Otherwise, the returned list may contain only those gradebook columns that are accessible through this session. :param gradebook_column_record_type: a gradebook column record type :type gradebook_column_record_type: ``osid.type.Type`` :return: the returned ``GradebookColumn`` list :rtype: ``osid.grading.GradebookColumnList`` :raise: ``NullArgument`` -- ``gradebook_column_record_type`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradebookColumnList def get_gradebook_columns(self): """Gets all gradebook columns. In plenary mode, the returned list contains all known gradebook columns or an error results. Otherwise, the returned list may contain only those gradebook columns that are accessible through this session. :return: a ``GradebookColumn`` :rtype: ``osid.grading.GradebookColumnList`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradebookColumnList gradebook_columns = property(fget=get_gradebook_columns) def supports_summary(self): """Tests if a summary entry is available. :return: ``true`` if a summary entry is available, ``false`` otherwise :rtype: ``boolean`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def get_gradebook_column_summary(self, gradebook_column_id): """Gets the ``GradebookColumnSummary`` for summary results. :param gradebook_column_id: ``Id`` of the ``GradebookColumn`` :type gradebook_column_id: ``osid.id.Id`` :return: the gradebook column summary :rtype: ``osid.grading.GradebookColumnSummary`` :raise: ``NotFound`` -- ``gradebook_column_id`` is not found :raise: ``NullArgument`` -- ``gradebook_column_id`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure :raise: ``Unimplemented`` -- ``has_summary()`` is ``false`` *compliance: mandatory -- This method is must be implemented.* """ return # osid.grading.GradebookColumnSummary ## # The following methods are from osid.grading.GradebookColumnQuerySession def get_gradebook_id(self): """Gets the ``Gradebook`` ``Id`` associated with this session. :return: the ``Gradebook Id`` associated with this session :rtype: ``osid.id.Id`` *compliance: mandatory -- This method must be implemented.* """ return # osid.id.Id gradebook_id = property(fget=get_gradebook_id) def get_gradebook(self): """Gets the ``Gradebook`` associated with this session. :return: the ``Gradebook`` associated with this session :rtype: ``osid.grading.Gradebook`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.Gradebook gradebook = property(fget=get_gradebook) def can_search_gradebook_columns(self): """Tests if this user can perform ``GradebookColumn`` searches. A return of true does not guarantee successful authorization. A return of false indicates that it is known all methods in this session will result in a ``PermissionDenied``. This is intended as a hint to an application that may opt not to offer search operations to unauthorized users. :return: ``false`` if search methods are not authorized, ``true`` otherwise :rtype: ``boolean`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def use_federated_gradebook_view(self): """Federates the view for methods in this session. A federated view will include gradebook columns in gradebooks which are children of this gradebook in the gradebook hierarchy. *compliance: mandatory -- This method is must be implemented.* """ pass def use_isolated_gradebook_view(self): """Isolates the view for methods in this session. An isolated view restricts searches to this gradebook only. *compliance: mandatory -- This method is must be implemented.* """ pass def get_gradebook_column_query(self): """Gets a gradebook column query. :return: the gradebook column :rtype: ``osid.grading.GradebookColumnQuery`` *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradebookColumnQuery gradebook_column_query = property(fget=get_gradebook_column_query) def get_gradebook_columns_by_query(self, gradebook_column_query): """Gets a list of gradebook columns matching the given query. :param gradebook_column_query: the gradebook column query :type gradebook_column_query: ``osid.grading.GradebookColumnQuery`` :return: the returned ``GradebookColumnList`` :rtype: ``osid.grading.GradebookColumnList`` :raise: ``NullArgument`` -- ``gradebook_column_query`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure :raise: ``Unsupported`` -- ``gradebook_column_query`` is not of this service *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradebookColumnList ## # The following methods are from osid.grading.GradebookColumnAdminSession def get_gradebook_id(self): """Gets the ``Gradebook`` ``Id`` associated with this session. :return: the ``Gradebook Id`` associated with this session :rtype: ``osid.id.Id`` *compliance: mandatory -- This method must be implemented.* """ return # osid.id.Id gradebook_id = property(fget=get_gradebook_id) def get_gradebook(self): """Gets the ``Gradebook`` associated with this session. :return: the ``Gradebook`` associated with this session :rtype: ``osid.grading.Gradebook`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.Gradebook gradebook = property(fget=get_gradebook) def can_create_gradebook_columns(self): """Tests if this user can create gradebook columns. A return of true does not guarantee successful authorization. A return of false indicates that it is known creating a gradebook column will result in a ``PermissionDenied``. This is intended as a hint to an application that may opt not to offer create operations to an unauthorized user. :return: ``false`` if ``GradebookColumn`` creation is not authorized, ``true`` otherwise :rtype: ``boolean`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def can_create_gradebook_column_with_record_types(self, gradebook_column_record_types): """Tests if this user can create a single ``GradebookColumn`` using the desired record types. While ``GradingManager.getGradebookColumnRecordTypes()`` can be used to examine which records are supported, this method tests which record(s) are required for creating a specific ``GradebookColumn``. Providing an empty array tests if a ``GradebookColumn`` can be created with no records. :param gradebook_column_record_types: array of gradebook column record types :type gradebook_column_record_types: ``osid.type.Type[]`` :return: ``true`` if ``GradebookColumn`` creation using the specified record ``Types`` is supported, ``false`` otherwise :rtype: ``boolean`` :raise: ``NullArgument`` -- ``gradebook_column_record_types`` is ``null`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def get_gradebook_column_form_for_create(self, gradebook_column_record_types): """Gets the gradebook column form for creating new gradebook columns. A new form should be requested for each create transaction. :param gradebook_column_record_types: array of gradebook column record types :type gradebook_column_record_types: ``osid.type.Type[]`` :return: the gradebook column form :rtype: ``osid.grading.GradebookColumnForm`` :raise: ``NullArgument`` -- ``gradebook_column_record_types`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure :raise: ``Unsupported`` -- unable to get form for requested record types *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradebookColumnForm def create_gradebook_column(self, gradebook_column_form): """Creates a new ``GradebookColumn``. :param gradebook_column_form: the form for this ``GradebookColumn`` :type gradebook_column_form: ``osid.grading.GradebookColumnForm`` :return: the new ``GradebookColumn`` :rtype: ``osid.grading.GradebookColumn`` :raise: ``IllegalState`` -- ``gradebook_column_form`` already used in a create transaction :raise: ``InvalidArgument`` -- one or more of the form elements is invalid :raise: ``NullArgument`` -- ``gradebook_column_form`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure :raise: ``Unsupported`` -- ``gradebook_column_form`` did not originate from ``get_gradebook_column_form_for_create()`` *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradebookColumn def can_update_gradebook_columns(self): """Tests if this user can update gradebook columns. A return of true does not guarantee successful authorization. A return of false indicates that it is known updating a ``GradebookColumn`` will result in a ``PermissionDenied``. This is intended as a hint to an application that may opt not to offer update operations to an unauthorized user. :return: ``false`` if gradebook column modification is not authorized, ``true`` otherwise :rtype: ``boolean`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def get_gradebook_column_form_for_update(self, gradebook_column_id): """Gets the gradebook column form for updating an existing gradebook column. A new gradebook column form should be requested for each update transaction. :param gradebook_column_id: the ``Id`` of the ``GradebookColumn`` :type gradebook_column_id: ``osid.id.Id`` :return: the gradebook column form :rtype: ``osid.grading.GradebookColumnForm`` :raise: ``NotFound`` -- ``gradebook_column_id`` is not found :raise: ``NullArgument`` -- ``gradebook_column_id`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradebookColumnForm def update_gradebook_column(self, gradebook_column_form): """Updates an existing gradebook column. :param gradebook_column_form: the form containing the elements to be updated :type gradebook_column_form: ``osid.grading.GradebookColumnForm`` :raise: ``IllegalState`` -- ``gradebook_column_form`` already used in an update transaction :raise: ``InvalidArgument`` -- the form contains an invalid value :raise: ``NullArgument`` -- ``gradebook_column_form`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure :raise: ``Unsupported`` -- ``gradebook_column_form`` did not originate from ``get_gradebook_column_form_for_update()`` *compliance: mandatory -- This method must be implemented.* """ pass def sequence_gradebook_columns(self, gradebook_column_ids): """Resequences the gradebook columns. :param gradebook_column_ids: the ``Ids`` of the ``GradebookColumns`` :type gradebook_column_ids: ``osid.id.IdList`` :raise: ``NullArgument`` -- ``gradebook_column_id_list`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ pass def move_gradebook_column(self, front_gradebook_column_id, back_gradebook_column_id): """Moves a gradebook column in front of another. :param front_gradebook_column_id: the ``Id`` of a ``GradebookColumn`` :type front_gradebook_column_id: ``osid.id.Id`` :param back_gradebook_column_id: the ``Id`` of a ``GradebookColumn`` :type back_gradebook_column_id: ``osid.id.Id`` :raise: ``NotFound`` -- ``front_gradebook_column_id or back_gradebook_column_id`` is not found :raise: ``NullArgument`` -- ``front_gradebook_column_id or back_gradebook_column_id`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ pass def copy_gradebook_column_entries(self, source_gradebook_column_id, target_gradebook_column_id): """Copies gradebook column entries from one column to another. If the target grade column grade system differs from the source, the grades in the entries are transformed to the new grade system. :param source_gradebook_column_id: the ``Id`` of a ``GradebookColumn`` :type source_gradebook_column_id: ``osid.id.Id`` :param target_gradebook_column_id: the ``Id`` of a ``GradebookColumn`` :type target_gradebook_column_id: ``osid.id.Id`` :raise: ``NotFound`` -- ``source_gradebook_column_id ortarget_gradebook_column_id`` is not found :raise: ``NullArgument`` -- ``source_gradebook_column_id target_gradebook_column_id`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ pass def can_delete_gradebook_columns(self): """Tests if this user can delete gradebook columns. A return of true does not guarantee successful authorization. A return of false indicates that it is known deleting a ``GradebookColumn`` will result in a ``PermissionDenied``. This is intended as a hint to an application that may opt not to offer delete operations to an unauthorized user. :return: ``false`` if ``GradebookColumn`` deletion is not authorized, ``true`` otherwise :rtype: ``boolean`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def delete_gradebook_column(self, gradebook_column_id): """Deletes the ``GradebookColumn`` identified by the given ``Id``. :param gradebook_column_id: the ``Id`` of the ``GradebookColumn`` to delete :type gradebook_column_id: ``osid.id.Id`` :raise: ``NotFound`` -- a ``GradebookColumn`` was not found identified by the given ``Id`` :raise: ``NullArgument`` -- ``gradebook_column_id`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ pass def can_manage_gradebook_column_aliases(self): """Tests if this user can manage ``Id`` aliases for ``GradebookColumns``. A return of true does not guarantee successful authorization. A return of false indicates that it is known changing an alias will result in a ``PermissionDenied``. This is intended as a hint to an application that may opt not to offer alias operations to an unauthorized user. :return: ``false`` if ``GradebookColumn`` aliasing is not authorized, ``true`` otherwise :rtype: ``boolean`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def alias_gradebook_column(self, gradebook_column_id, alias_id): """Adds an ``Id`` to a ``GradebookColumn`` for the purpose of creating compatibility. The primary ``Id`` of the ``GradebookColumn`` is determined by the provider. The new ``Id`` performs as an alias to the primary ``Id``. If the alias is a pointer to another gradebook column, it is reassigned to the given gradebook column ``Id``. :param gradebook_column_id: the ``Id`` of a ``GradebookColumn`` :type gradebook_column_id: ``osid.id.Id`` :param alias_id: the alias ``Id`` :type alias_id: ``osid.id.Id`` :raise: ``AlreadyExists`` -- ``alias_id`` is already assigned :raise: ``NotFound`` -- ``gradebook_column_id`` not found :raise: ``NullArgument`` -- ``gradebook_column_id`` or ``alias_id`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ pass ## # The following methods are from osid.grading.GradebookLookupSession def can_lookup_gradebooks(self): """Tests if this user can perform ``Gradebook`` lookups. A return of true does not guarantee successful authorization. A return of false indicates that it is known all methods in this session will result in a ``PermissionDenied``. This is intended as a hint to an application that may opt not to offer lookup operations to unauthorized users. :return: ``false`` if lookup methods are not authorized, ``true`` otherwise :rtype: ``boolean`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def use_comparative_gradebook_view(self): """The returns from the lookup methods may omit or translate elements based on this session, such as authorization, and not result in an error. This view is used when greater interoperability is desired at the expense of precision. *compliance: mandatory -- This method is must be implemented.* """ pass def use_plenary_gradebook_view(self): """A complete view of the ``Gradebook`` returns is desired. Methods will return what is requested or result in an error. This view is used when greater precision is desired at the expense of interoperability. *compliance: mandatory -- This method is must be implemented.* """ pass def get_gradebook(self, gradebook_id): """Gets the ``Gradebook`` specified by its ``Id``. In plenary mode, the exact ``Id`` is found or a ``NotFound`` results. Otherwise, the returned ``Gradebook`` may have a different ``Id`` than requested, such as the case where a duplicate ``Id`` was assigned to a ``Gradebook`` and retained for compatility. :param gradebook_id: ``Id`` of the ``Gradebook`` :type gradebook_id: ``osid.id.Id`` :return: the gradebook :rtype: ``osid.grading.Gradebook`` :raise: ``NotFound`` -- ``gradebook_id`` not found :raise: ``NullArgument`` -- ``gradebook_id`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method is must be implemented.* """ return # osid.grading.Gradebook def get_gradebooks_by_ids(self, gradebook_ids): """Gets a ``GradebookList`` corresponding to the given ``IdList``. In plenary mode, the returned list contains all of the gradebooks specified in the ``Id`` list, in the order of the list, including duplicates, or an error results if an ``Id`` in the supplied list is not found or inaccessible. Otherwise, inaccessible ``Gradebook`` objects may be omitted from the list and may present the elements in any order including returning a unique set. :param gradebook_ids: the list of ``Ids`` to retrieve :type gradebook_ids: ``osid.id.IdList`` :return: the returned ``Gradebook`` list :rtype: ``osid.grading.GradebookList`` :raise: ``NotFound`` -- an ``Id was`` not found :raise: ``NullArgument`` -- ``gradebook_ids`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradebookList def get_gradebooks_by_genus_type(self, gradebook_genus_type): """Gets a ``GradebookList`` corresponding to the given gradebook genus ``Type`` which does not include gradebooks of types derived from the specified ``Type``. In plenary mode, the returned list contains all known gradebooks or an error results. Otherwise, the returned list may contain only those gradebooks that are accessible through this session. :param gradebook_genus_type: a gradebook genus type :type gradebook_genus_type: ``osid.type.Type`` :return: the returned ``Gradebook`` list :rtype: ``osid.grading.GradebookList`` :raise: ``NullArgument`` -- ``gradebook_genus_type`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradebookList def get_gradebooks_by_parent_genus_type(self, gradebook_genus_type): """Gets a ``GradebookList`` corresponding to the given gradebook genus ``Type`` and include any additional gradebooks with genus types derived from the specified ``Type``. In plenary mode, the returned list contains all known gradebooks or an error results. Otherwise, the returned list may contain only those gradebooks that are accessible through this session. :param gradebook_genus_type: a gradebook genus type :type gradebook_genus_type: ``osid.type.Type`` :return: the returned ``Gradebook`` list :rtype: ``osid.grading.GradebookList`` :raise: ``NullArgument`` -- ``gradebook_genus_type`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradebookList def get_gradebooks_by_record_type(self, gradebook_record_type): """Gets a ``GradebookList`` containing the given gradebook record ``Type``. In plenary mode, the returned list contains all known gradebooks or an error results. Otherwise, the returned list may contain only those gradebooks that are accessible through this session. :param gradebook_record_type: a gradebook record type :type gradebook_record_type: ``osid.type.Type`` :return: the returned ``Gradebook`` list :rtype: ``osid.grading.GradebookList`` :raise: ``NullArgument`` -- ``gradebook_record_type`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradebookList def get_gradebooks_by_provider(self, resource_id): """Gets a ``GradebookList`` for the given provider ````. In plenary mode, the returned list contains all known gradebooks or an error results. Otherwise, the returned list may contain only those gradebooks that are accessible through this session. :param resource_id: a resource ``Id`` :type resource_id: ``osid.id.Id`` :return: the returned ``Gradebook`` list :rtype: ``osid.grading.GradebookList`` :raise: ``NullArgument`` -- ``resource_id`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradebookList def get_gradebooks(self): """Gets all ``Gradebooks``. In plenary mode, the returned list contains all known gradebooks or an error results. Otherwise, the returned list may contain only those gradebooks that are accessible through this session. :return: a ``GradebookList`` :rtype: ``osid.grading.GradebookList`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradebookList gradebooks = property(fget=get_gradebooks) ## # The following methods are from osid.grading.GradebookAdminSession def can_create_gradebooks(self): """Tests if this user can create ``Gradebooks``. A return of true does not guarantee successful authorization. A return of false indicates that it is known creating a ``Gradebook`` will result in a ``PermissionDenied``. This is intended as a hint to an application that may not wish to offer create operations to unauthorized users. :return: ``false`` if ``Gradebook`` creation is not authorized, ``true`` otherwise :rtype: ``boolean`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def can_create_gradebook_with_record_types(self, gradebook_record_types): """Tests if this user can create a single ``Gradebook`` using the desired record types. While ``GradingManager.getGradebookRecordTypes()`` can be used to examine which records are supported, this method tests which record(s) are required for creating a specific ``Gradebook``. Providing an empty array tests if a ``Gradebook`` can be created with no records. :param gradebook_record_types: array of gradebook record types :type gradebook_record_types: ``osid.type.Type[]`` :return: ``true`` if ``Gradebook`` creation using the specified ``Types`` is supported, ``false`` otherwise :rtype: ``boolean`` :raise: ``NullArgument`` -- ``gradebook_record_types`` is ``null`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def get_gradebook_form_for_create(self, gradebook_record_types): """Gets the gradebook form for creating new gradebooks. A new form should be requested for each create transaction. :param gradebook_record_types: array of gradebook record types :type gradebook_record_types: ``osid.type.Type[]`` :return: the gradebook form :rtype: ``osid.grading.GradebookForm`` :raise: ``NullArgument`` -- ``gradebook_record_types`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure :raise: ``Unsupported`` -- unable to get form for requested record types *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradebookForm def create_gradebook(self, gradebook_form): """Creates a new ``Gradebook``. :param gradebook_form: the form for this ``Gradebook`` :type gradebook_form: ``osid.grading.GradebookForm`` :return: the new ``Gradebook`` :rtype: ``osid.grading.Gradebook`` :raise: ``IllegalState`` -- ``gradebook_form`` already used in a create transaction :raise: ``InvalidArgument`` -- one or more of the form elements is invalid :raise: ``NullArgument`` -- ``gradebook_form`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure :raise: ``Unsupported`` -- ``gradebook_form`` did not originate from ``get_gradebook_form_for_create()`` *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.Gradebook def can_update_gradebooks(self): """Tests if this user can update ``Gradebooks``. A return of true does not guarantee successful authorization. A return of false indicates that it is known updating a ``Gradebook`` will result in a ``PermissionDenied``. This is intended as a hint to an application that may not wish to offer update operations to unauthorized users. :return: ``false`` if ``Gradebook`` modification is not authorized, ``true`` otherwise :rtype: ``boolean`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def get_gradebook_form_for_update(self, gradebook_id): """Gets the gradebook form for updating an existing gradebook. A new gradebook form should be requested for each update transaction. :param gradebook_id: the ``Id`` of the ``Gradebook`` :type gradebook_id: ``osid.id.Id`` :return: the gradebook form :rtype: ``osid.grading.GradebookForm`` :raise: ``NotFound`` -- ``gradebook_id`` is not found :raise: ``NullArgument`` -- ``gradebook_id`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradebookForm def update_gradebook(self, gradebook_form): """Updates an existing gradebook. :param gradebook_form: the form containing the elements to be updated :type gradebook_form: ``osid.grading.GradebookForm`` :raise: ``IllegalState`` -- ``gradebook_form`` already used in an update transaction :raise: ``InvalidArgument`` -- the form contains an invalid value :raise: ``NullArgument`` -- ``gradebook_form`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure :raise: ``Unsupported`` -- ``gradebook_form did not originate from get_gradebook_form_for_update()`` *compliance: mandatory -- This method must be implemented.* """ pass def can_delete_gradebooks(self): """Tests if this user can delete gradebooks. A return of true does not guarantee successful authorization. A return of false indicates that it is known deleting a ``Gradebook`` will result in a ``PermissionDenied``. This is intended as a hint to an application that may not wish to offer delete operations to unauthorized users. :return: ``false`` if ``Gradebook`` deletion is not authorized, ``true`` otherwise :rtype: ``boolean`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def delete_gradebook(self, gradebook_id): """Deletes a ``Gradebook``. :param gradebook_id: the ``Id`` of the ``Gradebook`` to remove :type gradebook_id: ``osid.id.Id`` :raise: ``NotFound`` -- ``gradebook_id`` not found :raise: ``NullArgument`` -- ``gradebook_id`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ pass def can_manage_gradebook_aliases(self): """Tests if this user can manage ``Id`` aliases for ``Gradebooks``. A return of true does not guarantee successful authorization. A return of false indicates that it is known changing an alias will result in a ``PermissionDenied``. This is intended as a hint to an application that may opt not to offer alias operations to an unauthorized user. :return: ``false`` if ``Gradebook`` aliasing is not authorized, ``true`` otherwise :rtype: ``boolean`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def alias_gradebook(self, gradebook_id, alias_id): """Adds an ``Id`` to a ``Gradebook`` for the purpose of creating compatibility. The primary ``Id`` of the ``Gradebook`` is determined by the provider. The new ``Id`` performs as an alias to the primary ``Id`` . If the alias is a pointer to another gradebook, it is reassigned to the given gradebook ``Id``. :param gradebook_id: the ``Id`` of a ``Gradebook`` :type gradebook_id: ``osid.id.Id`` :param alias_id: the alias ``Id`` :type alias_id: ``osid.id.Id`` :raise: ``AlreadyExists`` -- ``alias_id`` is already assigned :raise: ``NotFound`` -- ``gradebook_id`` not found :raise: ``NullArgument`` -- ``gradebook_id`` or ``alias_id`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ pass class Gradebook(osid_objects.OsidCatalog, osid_sessions.OsidSession): """A gradebook defines a collection of grade entries.""" # WILL THIS EVER BE CALLED DIRECTLY - OUTSIDE OF A MANAGER? def __init__(self, provider_manager, catalog, runtime, proxy, **kwargs): self._provider_manager = provider_manager self._catalog = catalog self._runtime = runtime osid.OsidObject.__init__(self, self._catalog) # This is to initialize self._object osid.OsidSession.__init__(self, proxy) # This is to initialize self._proxy self._catalog_id = catalog.get_id() self._provider_sessions = kwargs self._session_management = AUTOMATIC self._gradebook_view = DEFAULT self._object_views = dict() self._operable_views = dict() self._containable_views = dict() def _set_gradebook_view(self, session): """Sets the underlying gradebook view to match current view""" if self._gradebook_view == FEDERATED: try: session.use_federated_gradebook_view() except AttributeError: pass else: try: session.use_isolated_gradebook_view() except AttributeError: pass def _set_object_view(self, session): """Sets the underlying object views to match current view""" for obj_name in self._object_views: if self._object_views[obj_name] == PLENARY: try: getattr(session, 'use_plenary_' + obj_name + '_view')() except AttributeError: pass else: try: getattr(session, 'use_comparative_' + obj_name + '_view')() except AttributeError: pass def _set_operable_view(self, session): """Sets the underlying operable views to match current view""" for obj_name in self._operable_views: if self._operable_views[obj_name] == ACTIVE: try: getattr(session, 'use_active_' + obj_name + '_view')() except AttributeError: pass else: try: getattr(session, 'use_any_status_' + obj_name + '_view')() except AttributeError: pass def _set_containable_view(self, session): """Sets the underlying containable views to match current view""" for obj_name in self._containable_views: if self._containable_views[obj_name] == SEQUESTERED: try: getattr(session, 'use_sequestered_' + obj_name + '_view')() except AttributeError: pass else: try: getattr(session, 'use_unsequestered_' + obj_name + '_view')() except AttributeError: pass def _get_provider_session(self, session_name): """Returns the requested provider session. Instantiates a new one if the named session is not already known. """ agent_key = self._get_agent_key() if session_name in self._provider_sessions[agent_key]: return self._provider_sessions[agent_key][session_name] else: session_class = getattr(self._provider_manager, 'get_' + session_name + '_for_gradebook') if self._proxy is None: session = session_class(self._catalog.get_id()) else: session = session_class(self._catalog.get_id(), self._proxy) self._set_gradebook_view(session) self._set_object_view(session) self._set_operable_view(session) self._set_containable_view(session) if self._session_management != DISABLED: self._provider_sessions[agent_key][session_name] = session return session def get_gradebook_id(self): """Gets the Id of this gradebook.""" return self._catalog_id def get_gradebook(self): """Strange little method to assure conformance for inherited Sessions.""" return self def get_objective_hierarchy_id(self): """WHAT am I doing here?""" return self._catalog_id def get_objective_hierarchy(self): """WHAT am I doing here?""" return self def __getattr__(self, name): if '_catalog' in self.__dict__: try: return self._catalog[name] except AttributeError: pass raise AttributeError def close_sessions(self): """Close all sessions currently being managed by this Manager to save memory.""" if self._session_management != MANDATORY: self._provider_sessions = dict() else: raise IllegalState() def use_automatic_session_management(self): """Session state will be saved until closed by consumers.""" self._session_management = AUTOMATIC def use_mandatory_session_management(self): """Session state will always be saved and can not be closed by consumers.""" # Session state will be saved and can not be closed by consumers self._session_management = MANDATORY def disable_session_management(self): """Session state will never be saved.""" self._session_management = DISABLED self.close_sessions() def get_gradebook_record(self, gradebook_record_type): """Gets the gradebook record corresponding to the given ``Gradebook`` record ``Type``. This method is used to retrieve an object implementing the requested record. The ``gradebook_record_type`` may be the ``Type`` returned in ``get_record_types()`` or any of its parents in a ``Type`` hierarchy where ``has_record_type(gradebook_record_type)`` is ``true`` . :param gradebook_record_type: a gradebook record type :type gradebook_record_type: ``osid.type.Type`` :return: the gradebook record :rtype: ``osid.grading.records.GradebookRecord`` :raise: ``NullArgument`` -- ``gradebook_record_type`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``Unsupported`` -- ``has_record_type(gradebook_record_type)`` is ``false`` *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.records.GradebookRecord ## # The following methods are from osid.grading.GradeSystemLookupSession def get_gradebook_id(self): """Gets the ``GradeSystem`` ``Id`` associated with this session. :return: the ``GradeSystem Id`` associated with this session :rtype: ``osid.id.Id`` *compliance: mandatory -- This method must be implemented.* """ return # osid.id.Id gradebook_id = property(fget=get_gradebook_id) def get_gradebook(self): """Gets the ``Gradebook`` associated with this session. :return: the ``Gradebook`` associated with this session :rtype: ``osid.grading.Gradebook`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.Gradebook gradebook = property(fget=get_gradebook) def can_lookup_grade_systems(self): """Tests if this user can perform ``GradeSystem`` lookups. A return of true does not guarantee successful authorization. A return of false indicates that it is known all methods in this session will result in a ``PermissionDenied``. This is intended as a hint to an application that may opt not to offer lookup operations to unauthorized users. :return: ``false`` if lookup methods are not authorized, ``true`` otherwise :rtype: ``boolean`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def use_comparative_grade_system_view(self): """The returns from the lookup methods may omit or translate elements based on this session, such as authorization, and not result in an error. This view is used when greater interoperability is desired at the expense of precision. *compliance: mandatory -- This method is must be implemented.* """ pass def use_plenary_grade_system_view(self): """A complete view of the ``GradeSystem`` returns is desired. Methods will return what is requested or result in an error. This view is used when greater precision is desired at the expense of interoperability. *compliance: mandatory -- This method is must be implemented.* """ pass def use_federated_gradebook_view(self): """Federates the view for methods in this session. A federated view will include grade entries in gradebooks which are children of this gradebook in the gradebook hierarchy. *compliance: mandatory -- This method is must be implemented.* """ pass def use_isolated_gradebook_view(self): """Isolates the view for methods in this session. An isolated view restricts lookups to this gradebook only. *compliance: mandatory -- This method is must be implemented.* """ pass def get_grade_system(self, grade_system_id): """Gets the ``GradeSystem`` specified by its ``Id``. In plenary mode, the exact ``Id`` is found or a ``NotFound`` results. Otherwise, the returned ``GradeSystem`` may have a different ``Id`` than requested, such as the case where a duplicate ``Id`` was assigned to a ``GradeSystem`` and retained for compatibility. :param grade_system_id: ``Id`` of the ``GradeSystem`` :type grade_system_id: ``osid.id.Id`` :return: the grade system :rtype: ``osid.grading.GradeSystem`` :raise: ``NotFound`` -- ``grade_system_id`` not found :raise: ``NullArgument`` -- ``grade_system_id`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method is must be implemented.* """ return # osid.grading.GradeSystem def get_grade_system_by_grade(self, grade_id): """Gets the ``GradeSystem`` by a ``Grade`` ``Id``. :param grade_id: ``Id`` of a ``Grade`` :type grade_id: ``osid.id.Id`` :return: the grade system :rtype: ``osid.grading.GradeSystem`` :raise: ``NotFound`` -- ``grade_id`` not found :raise: ``NullArgument`` -- ``grade_id`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method is must be implemented.* """ return # osid.grading.GradeSystem def get_grade_systems_by_ids(self, grade_system_ids): """Gets a ``GradeSystemList`` corresponding to the given ``IdList``. In plenary mode, the returned list contains all of the systems specified in the ``Id`` list, in the order of the list, including duplicates, or an error results if an ``Id`` in the supplied list is not found or inaccessible. Otherwise, inaccessible ``GradeSystems`` may be omitted from the list and may present the elements in any order including returning a unique set. :param grade_system_ids: the list of ``Ids`` to retrieve :type grade_system_ids: ``osid.id.IdList`` :return: the returned ``GradeSystem`` list :rtype: ``osid.grading.GradeSystemList`` :raise: ``NotFound`` -- an ``Id was`` not found :raise: ``NullArgument`` -- ``grade_system_ids`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradeSystemList def get_grade_systems_by_genus_type(self, grade_system_genus_type): """Gets a ``GradeSystemList`` corresponding to the given grade system genus ``Type`` which does not include systems of genus types derived from the specified ``Type``. In plenary mode, the returned list contains all known systems or an error results. Otherwise, the returned list may contain only those systems that are accessible through this session. :param grade_system_genus_type: a grade system genus type :type grade_system_genus_type: ``osid.type.Type`` :return: the returned ``GradeSystem`` list :rtype: ``osid.grading.GradeSystemList`` :raise: ``NullArgument`` -- ``grade_system_genus_type`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradeSystemList def get_grade_systems_by_parent_genus_type(self, grade_system_genus_type): """Gets a ``GradeSystemList`` corresponding to the given grade system genus ``Type`` and include any additional systems with genus types derived from the specified ``Type``. In plenary mode, the returned list contains all known systems or an error results. Otherwise, the returned list may contain only those systems that are accessible through this session. :param grade_system_genus_type: a grade system genus type :type grade_system_genus_type: ``osid.type.Type`` :return: the returned ``GradeSystem`` list :rtype: ``osid.grading.GradeSystemList`` :raise: ``NullArgument`` -- ``grade_system_genus_type`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradeSystemList def get_grade_systems_by_record_type(self, grade_system_record_type): """Gets a ``GradeSystemList`` containing the given grade record ``Type``. In plenary mode, the returned list contains all known systems or an error results. Otherwise, the returned list may contain only those systems that are accessible through this session. :param grade_system_record_type: a grade system record type :type grade_system_record_type: ``osid.type.Type`` :return: the returned ``GradeSystem`` list :rtype: ``osid.grading.GradeSystemList`` :raise: ``NullArgument`` -- ``grade_system_genus_type`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradeSystemList def get_grade_systems(self): """Gets all ``GradeSystems``. In plenary mode, the returned list contains all known grade systems or an error results. Otherwise, the returned list may contain only those grade systems that are accessible through this session. :return: a ``GradeSystemList`` :rtype: ``osid.grading.GradeSystemList`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradeSystemList grade_systems = property(fget=get_grade_systems) ## # The following methods are from osid.grading.GradeSystemQuerySession def get_gradebook_id(self): """Gets the ``Gradebook`` ``Id`` associated with this session. :return: the ``Gradebook Id`` associated with this session :rtype: ``osid.id.Id`` *compliance: mandatory -- This method must be implemented.* """ return # osid.id.Id gradebook_id = property(fget=get_gradebook_id) def get_gradebook(self): """Gets the ``Gradebook`` associated with this session. :return: the ``Gradebook`` associated with this session :rtype: ``osid.grading.Gradebook`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.Gradebook gradebook = property(fget=get_gradebook) def can_search_grade_systems(self): """Tests if this user can perform ``GradeSystem`` searches. A return of true does not guarantee successful authorization. A return of false indicates that it is known all methods in this session will result in a ``PermissionDenied``. This is intended as a hint to an application that may opt not to offer search operations to unauthorized users. :return: ``false`` if search methods are not authorized, ``true`` otherwise :rtype: ``boolean`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def use_federated_gradebook_view(self): """Federates the view for methods in this session. A federated view will include grades in gradebooks which are children of this gradebook in the gradebook hierarchy. *compliance: mandatory -- This method is must be implemented.* """ pass def use_isolated_gradebook_view(self): """Isolates the view for methods in this session. An isolated view restricts searches to this gradebook only. *compliance: mandatory -- This method is must be implemented.* """ pass def get_grade_system_query(self): """Gets a grade system query. :return: a grade system query :rtype: ``osid.grading.GradeSystemQuery`` *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradeSystemQuery grade_system_query = property(fget=get_grade_system_query) def get_grade_systems_by_query(self, grade_system_query): """Gets a list of ``GradeSystem`` objects matching the given grade system query. :param grade_system_query: the grade system query :type grade_system_query: ``osid.grading.GradeSystemQuery`` :return: the returned ``GradeSystemList`` :rtype: ``osid.grading.GradeSystemList`` :raise: ``NullArgument`` -- ``grade_system_query`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure :raise: ``Unsupported`` -- ``grade_system_query`` is not of this service *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradeSystemList ## # The following methods are from osid.grading.GradeSystemAdminSession def get_gradebook_id(self): """Gets the ``Gradebook`` ``Id`` associated with this session. :return: the ``Gradebook Id`` associated with this session :rtype: ``osid.id.Id`` *compliance: mandatory -- This method must be implemented.* """ return # osid.id.Id gradebook_id = property(fget=get_gradebook_id) def get_gradebook(self): """Gets the ``Gradebook`` associated with this session. :return: the ``Gradebook`` associated with this session :rtype: ``osid.grading.Gradebook`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.Gradebook gradebook = property(fget=get_gradebook) def can_create_grade_systems(self): """Tests if this user can create ``GradeSystems``. A return of true does not guarantee successful authorization. A return of false indicates that it is known creating a ``GradeSystem`` will result in a ``PermissionDenied``. This is intended as a hint to an application that may not wish to offer create operations to unauthorized users. :return: ``false`` if ``GradeSystem`` creation is not authorized, ``true`` otherwise :rtype: ``boolean`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def can_create_grade_system_with_record_types(self, grade_system_record_types): """Tests if this user can create a single ``GradeSystem`` using the desired record types. While ``GradingManager.getGradeSystemRecordTypes()`` can be used to examine which records are supported, this method tests which record(s) are required for creating a specific ``GradeSystem``. Providing an empty array tests if a ``GradeSystem`` can be created with no records. :param grade_system_record_types: array of grade system types :type grade_system_record_types: ``osid.type.Type[]`` :return: ``true`` if ``GradeSystem`` creation using the specified ``Types`` is supported, ``false`` otherwise :rtype: ``boolean`` :raise: ``NullArgument`` -- ``grade_system_record_types`` is ``null`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def get_grade_system_form_for_create(self, grade_system_record_types): """Gets the grade system form for creating new grade systems. A new form should be requested for each create transaction. :param grade_system_record_types: array of grade system types :type grade_system_record_types: ``osid.type.Type[]`` :return: the grade system form :rtype: ``osid.grading.GradeSystemForm`` :raise: ``NullArgument`` -- ``grade_system_record_types`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure :raise: ``Unsupported`` -- unable to get form for requested record types *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradeSystemForm def create_grade_system(self, grade_system_form): """Creates a new ``GradeSystem``. :param grade_system_form: the form for this ``GradeSystem`` :type grade_system_form: ``osid.grading.GradeSystemForm`` :return: the new ``GradeSystem`` :rtype: ``osid.grading.GradeSystem`` :raise: ``IllegalState`` -- ``grade_system_form`` already used in a create transaction :raise: ``InvalidArgument`` -- one or more of the form elements is invalid :raise: ``NullArgument`` -- ``grade_system_form`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure :raise: ``Unsupported`` -- ``grade_system_form`` did not originate from ``get_grade_system_form_for_create()`` *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradeSystem def can_update_grade_systems(self): """Tests if this user can update ``GradeSystems``. A return of true does not guarantee successful authorization. A return of false indicates that it is known updating a ``GradeSystem`` will result in a ``PermissionDenied``. This is intended as a hint to an application that may not wish to offer update operations to unauthorized users. :return: ``false`` if ``GradeSystem`` modification is not authorized, ``true`` otherwise :rtype: ``boolean`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def get_grade_system_form_for_update(self, grade_system_id): """Gets the grade system form for updating an existing grade system. A new grade system form should be requested for each update transaction. :param grade_system_id: the ``Id`` of the ``GradeSystem`` :type grade_system_id: ``osid.id.Id`` :return: the grade system form :rtype: ``osid.grading.GradeSystemForm`` :raise: ``NotFound`` -- ``grade_system_id`` is not found :raise: ``NullArgument`` -- ``grade_system_id`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradeSystemForm def update_grade_system(self, grade_system_form): """Updates an existing grade system. :param grade_system_form: the form containing the elements to be updated :type grade_system_form: ``osid.grading.GradeSystemForm`` :raise: ``IllegalState`` -- ``grade_system_form`` already used in an update transaction :raise: ``InvalidArgument`` -- the form contains an invalid value :raise: ``NullArgument`` -- ``grade_system_form`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure :raise: ``Unsupported`` -- ``grade_system_form`` did not originate from ``get_grade_system_form_for_update()`` *compliance: mandatory -- This method must be implemented.* """ pass def can_delete_grade_systems(self): """Tests if this user can delete grade systems. A return of true does not guarantee successful authorization. A return of false indicates that it is known deleting a ``GradeSystem`` will result in a ``PermissionDenied``. This is intended as a hint to an application that may not wish to offer delete operations to unauthorized users. :return: ``false`` if ``GradeSystem`` deletion is not authorized, ``true`` otherwise :rtype: ``boolean`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def delete_grade_system(self, grade_system_id): """Deletes a ``GradeSystem``. :param grade_system_id: the ``Id`` of the ``GradeSystem`` to remove :type grade_system_id: ``osid.id.Id`` :raise: ``NotFound`` -- ``grade_system_id`` not found :raise: ``NullArgument`` -- ``grade_system_id`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ pass def can_manage_grade_system_aliases(self): """Tests if this user can manage ``Id`` aliases for ``GradeSystems``. A return of true does not guarantee successful authorization. A return of false indicates that it is known changing an alias will result in a ``PermissionDenied``. This is intended as a hint to an application that may opt not to offer alias operations to an unauthorized user. :return: ``false`` if ``GradeSystem`` aliasing is not authorized, ``true`` otherwise :rtype: ``boolean`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def alias_grade_system(self, grade_system_id, alias_id): """Adds an ``Id`` to a ``GradeSystem`` for the purpose of creating compatibility. The primary ``Id`` of the ``GradeSystem`` is determined by the provider. The new ``Id`` performs as an alias to the primary ``Id``. If the alias is a pointer to another grade system, it is reassigned to the given grade system ``Id``. :param grade_system_id: the ``Id`` of a ``GradeSystem`` :type grade_system_id: ``osid.id.Id`` :param alias_id: the alias ``Id`` :type alias_id: ``osid.id.Id`` :raise: ``AlreadyExists`` -- ``alias_id`` is already assigned :raise: ``NotFound`` -- ``grade_system_id`` not found :raise: ``NullArgument`` -- ``grade_system_id`` or ``alias_id`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ pass def can_create_grades(self, grade_system_id): """Tests if this user can create ``Grade`` s for a ``GradeSystem``. A return of true does not guarantee successful authorization. A return of false indicates that it is known creating a ``GradeSystem`` will result in a ``PermissionDenied``. This is intended as a hint to an application that may not wish to offer create operations to unauthorized users. :param grade_system_id: the ``Id`` of a ``GradeSystem`` :type grade_system_id: ``osid.id.Id`` :return: ``false`` if ``Grade`` creation is not authorized, ``true`` otherwise :rtype: ``boolean`` :raise: ``NullArgument`` -- ``grade_system_id`` is ``null`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def can_create_grade_with_record_types(self, grade_system_id, grade_record_types): """Tests if this user can create a single ``Grade`` using the desired record types. While ``GradingManager.getGradeRecordTypes()`` can be used to examine which records are supported, this method tests which record(s) are required for creating a specific ``Grade``. Providing an empty array tests if a ``Grade`` can be created with no records. :param grade_system_id: the ``Id`` of a ``GradeSystem`` :type grade_system_id: ``osid.id.Id`` :param grade_record_types: array of grade recod types :type grade_record_types: ``osid.type.Type[]`` :return: ``true`` if ``Grade`` creation using the specified ``Types`` is supported, ``false`` otherwise :rtype: ``boolean`` :raise: ``NullArgument`` -- ``grade_system_id`` or ``grade_record_types`` is ``null`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def get_grade_form_for_create(self, grade_system_id, grade_record_types): """Gets the grade form for creating new grades. A new form should be requested for each create transaction. :param grade_system_id: the ``Id`` of a ``GradeSystem`` :type grade_system_id: ``osid.id.Id`` :param grade_record_types: array of grade recod types :type grade_record_types: ``osid.type.Type[]`` :return: the grade form :rtype: ``osid.grading.GradeForm`` :raise: ``NotFound`` -- ``grade_system_id`` is not found :raise: ``NullArgument`` -- ``grade_system_id`` or ``grade_record_types`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure :raise: ``Unsupported`` -- unable to get form for requested record types *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradeForm def create_grade(self, grade_form): """Creates a new ``Grade``. :param grade_form: the form for this ``Grade`` :type grade_form: ``osid.grading.GradeForm`` :return: the new ``Grade`` :rtype: ``osid.grading.Grade`` :raise: ``IllegalState`` -- ``grade_form`` already used in a create transaction :raise: ``InvalidArgument`` -- one or more of the form elements is invalid :raise: ``NullArgument`` -- ``grade_form`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure :raise: ``Unsupported`` -- ``grade_form`` did not originate from ``get_grade_form_for_create()`` *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.Grade def can_update_grades(self, grade_system_id): """Tests if this user can update ``Grades``. A return of true does not guarantee successful authorization. A return of false indicates that it is known updating a ``Grade`` will result in a ``PermissionDenied``. This is intended as a hint to an application that may not wish to offer update operations to unauthorized users. :param grade_system_id: the ``Id`` of a ``GradeSystem`` :type grade_system_id: ``osid.id.Id`` :return: ``false`` if ``Grade`` modification is not authorized, ``true`` otherwise :rtype: ``boolean`` :raise: ``NullArgument`` -- ``grade_system_id`` is ``null`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def get_grade_form_for_update(self, grade_id): """Gets the grade form for updating an existing grade. A new grade form should be requested for each update transaction. :param grade_id: the ``Id`` of the ``Grade`` :type grade_id: ``osid.id.Id`` :return: the grade form :rtype: ``osid.grading.GradeForm`` :raise: ``NotFound`` -- ``grade_id`` is not found :raise: ``NullArgument`` -- ``grade_id`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradeForm def update_grade(self, grade_form): """Updates an existing grade. :param grade_form: the form containing the elements to be updated :type grade_form: ``osid.grading.GradeForm`` :raise: ``IllegalState`` -- ``grade_form`` already used in an update transaction :raise: ``InvalidArgument`` -- the form contains an invalid value :raise: ``NullArgument`` -- ``grade_id`` or ``grade_form`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure :raise: ``Unsupported`` -- ``grade_form`` did not originate from ``get_grade_form_for_update()`` *compliance: mandatory -- This method must be implemented.* """ pass def can_delete_grades(self, grade_system_id): """Tests if this user can delete grades. A return of true does not guarantee successful authorization. A return of false indicates that it is known deleting a ``Grade`` will result in a ``PermissionDenied``. This is intended as a hint to an application that may not wish to offer delete operations to unauthorized users. :param grade_system_id: the ``Id`` of a ``GradeSystem`` :type grade_system_id: ``osid.id.Id`` :return: ``false`` if ``Grade`` deletion is not authorized, ``true`` otherwise :rtype: ``boolean`` :raise: ``NullArgument`` -- ``grade_system_id`` is ``null`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def delete_grade(self, grade_id): """Deletes a ``Grade``. :param grade_id: the ``Id`` of the ``Grade`` to remove :type grade_id: ``osid.id.Id`` :raise: ``NotFound`` -- ``grade_id`` not found :raise: ``NullArgument`` -- ``grade_id`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ pass def can_manage_grade_aliases(self): """Tests if this user can manage ``Id`` aliases for ``Grades``. A return of true does not guarantee successful authorization. A return of false indicates that it is known changing an alias will result in a ``PermissionDenied``. This is intended as a hint to an application that may opt not to offer alias operations to an unauthorized user. :return: ``false`` if ``Grade`` aliasing is not authorized, ``true`` otherwise :rtype: ``boolean`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def alias_grade(self, grade_id, alias_id): """Adds an ``Id`` to a ``Grade`` for the purpose of creating compatibility. The primary ``Id`` of the ``Grade`` is determined by the provider. The new ``Id`` performs as an alias to the primary ``Id``. If the alias is a pointer to another grade, it is reassigned to the given grade ``Id``. :param grade_id: the ``Id`` of a ``Grade`` :type grade_id: ``osid.id.Id`` :param alias_id: the alias ``Id`` :type alias_id: ``osid.id.Id`` :raise: ``AlreadyExists`` -- ``alias_id`` is already assigned :raise: ``NotFound`` -- ``grade_id`` not found :raise: ``NullArgument`` -- ``grade_id`` or ``alias_id`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ pass ## # The following methods are from osid.grading.GradeEntryLookupSession def get_gradebook_id(self): """Gets the ``Gradebook`` ``Id`` associated with this session. :return: the ``Gradebook Id`` associated with this session :rtype: ``osid.id.Id`` *compliance: mandatory -- This method must be implemented.* """ return # osid.id.Id gradebook_id = property(fget=get_gradebook_id) def get_gradebook(self): """Gets the ``Gradebook`` associated with this session. :return: the ``Gradebook`` associated with this session :rtype: ``osid.grading.Gradebook`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.Gradebook gradebook = property(fget=get_gradebook) def can_lookup_grade_entries(self): """Tests if this user can perform ``GradeEntry`` lookups. A return of true does not guarantee successful authorization. A return of false indicates that it is known all methods in this session will result in a ``PermissionDenied``. This is intended as a hint to an application that may opt not to offer lookup operations to unauthorized users. :return: ``false`` if lookup methods are not authorized, ``true`` otherwise :rtype: ``boolean`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def use_comparative_grade_entry_view(self): """The returns from the lookup methods may omit or translate elements based on this session, such as authorization, and not result in an error. This view is used when greater interoperability is desired at the expense of precision. *compliance: mandatory -- This method is must be implemented.* """ pass def use_plenary_grade_entry_view(self): """A complete view of the ``GradeEntry`` returns is desired. Methods will return what is requested or result in an error. This view is used when greater precision is desired at the expense of interoperability. *compliance: mandatory -- This method is must be implemented.* """ pass def use_federated_gradebook_view(self): """Federates the view for methods in this session. A federated view will include grade entries in gradebooks which are children of this gradebook in the gradebook hierarchy. *compliance: mandatory -- This method is must be implemented.* """ pass def use_isolated_gradebook_view(self): """Isolates the view for methods in this session. An isolated view restricts lookups to this gradebook only. *compliance: mandatory -- This method is must be implemented.* """ pass def use_effective_grade_entry_view(self): """Only grade entries whose effective dates are current are returned by methods in this session. *compliance: mandatory -- This method is must be implemented.* """ pass def use_any_effective_grade_entry_view(self): """All grade entries of any effective dates are returned by methods in this session. *compliance: mandatory -- This method is must be implemented.* """ pass def get_grade_entry(self, grade_entry_id): """Gets the ``GradeEntry`` specified by its ``Id``. :param grade_entry_id: ``Id`` of the ``GradeEntry`` :type grade_entry_id: ``osid.id.Id`` :return: the grade entry :rtype: ``osid.grading.GradeEntry`` :raise: ``NotFound`` -- ``grade_entry_id`` not found :raise: ``NullArgument`` -- ``grade_entry_id`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method is must be implemented.* """ return # osid.grading.GradeEntry def get_grade_entries_by_ids(self, grade_entry_ids): """Gets a ``GradeEntryList`` corresponding to the given ``IdList``. :param grade_entry_ids: the list of ``Ids`` to retrieve :type grade_entry_ids: ``osid.id.IdList`` :return: the returned ``GradeEntry`` list :rtype: ``osid.grading.GradeEntryList`` :raise: ``NotFound`` -- an ``Id was`` not found :raise: ``NullArgument`` -- ``grade_entry_ids`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradeEntryList def get_grade_entries_by_genus_type(self, grade_entry_genus_type): """Gets a ``GradeEntryList`` corresponding to the given grade entry genus ``Type`` which does not include grade entries of genus types derived from the specified ``Type``. :param grade_entry_genus_type: a grade entry genus type :type grade_entry_genus_type: ``osid.type.Type`` :return: the returned ``GradeEntry`` list :rtype: ``osid.grading.GradeEntryList`` :raise: ``NullArgument`` -- ``grade_entry_genus_type`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradeEntryList def get_grade_entries_by_parent_genus_type(self, grade_entry_genus_type): """Gets a ``GradeEntryList`` corresponding to the given grade entry genus ``Type`` and include any additional grade entry with genus types derived from the specified ``Type``. :param grade_entry_genus_type: a grade entry genus type :type grade_entry_genus_type: ``osid.type.Type`` :return: the returned ``GradeEntry`` list :rtype: ``osid.grading.GradeEntryList`` :raise: ``NullArgument`` -- ``grade_entry_genus_type`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradeEntryList def get_grade_entries_by_record_type(self, grade_entry_record_type): """Gets a ``GradeEntryList`` containing the given grade entry record ``Type``. :param grade_entry_record_type: a grade entry record type :type grade_entry_record_type: ``osid.type.Type`` :return: the returned ``GradeEntry`` list :rtype: ``osid.grading.GradeEntryList`` :raise: ``NullArgument`` -- ``grade_entry_record_type`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradeEntryList def get_grade_entries_on_date(self, from_, to): """Gets a ``GradeEntryList`` effective during the entire given date range inclusive but not confined to the date range. :param from: start of date range :type from: ``osid.calendaring.DateTime`` :param to: end of date range :type to: ``osid.calendaring.DateTime`` :return: the returned ``GradeEntry`` list :rtype: ``osid.grading.GradeEntryList`` :raise: ``InvalidArgument`` -- ``from`` is greater than ``to`` :raise: ``NullArgument`` -- ``from or to`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradeEntryList def get_grade_entries_for_gradebook_column(self, gradebook_column_id): """Gets a ``GradeEntryList`` for the gradebook column. :param gradebook_column_id: a gradebook column ``Id`` :type gradebook_column_id: ``osid.id.Id`` :return: the returned ``GradeEntry`` list :rtype: ``osid.grading.GradeEntryList`` :raise: ``NullArgument`` -- ``gradebook_column_id`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradeEntryList def get_grade_entries_for_gradebook_column_on_date(self, gradebook_column_id, from_, to): """Gets a ``GradeEntryList`` for the given gradebook column and effective during the entire given date range inclusive but not confined to the date range. :param gradebook_column_id: a gradebook column ``Id`` :type gradebook_column_id: ``osid.id.Id`` :param from: start of date range :type from: ``osid.calendaring.DateTime`` :param to: end of date range :type to: ``osid.calendaring.DateTime`` :return: the returned ``GradeEntry`` list :rtype: ``osid.grading.GradeEntryList`` :raise: ``InvalidArgument`` -- ``from`` is greater than ``to`` :raise: ``NullArgument`` -- ``gradebook_column_id, from, or to`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradeEntryList def get_grade_entries_for_resource(self, resource_id): """Gets a ``GradeEntryList`` for the given key key resource. :param resource_id: a key resource ``Id`` :type resource_id: ``osid.id.Id`` :return: the returned ``GradeEntry`` list :rtype: ``osid.grading.GradeEntryList`` :raise: ``NullArgument`` -- ``resource_id`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradeEntryList def get_grade_entries_for_resource_on_date(self, resource_id, from_, to): """Gets a ``GradeEntryList`` for the given key resource and effective during the entire given date range inclusive but not confined to the date range. :param resource_id: a resource ``Id`` :type resource_id: ``osid.id.Id`` :param from: start of date range :type from: ``osid.calendaring.DateTime`` :param to: end of date range :type to: ``osid.calendaring.DateTime`` :return: the returned ``GradeEntry`` list :rtype: ``osid.grading.GradeEntryList`` :raise: ``InvalidArgument`` -- ``from`` is greater than ``to`` :raise: ``NullArgument`` -- ``resource_id, from, or to`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradeEntryList def get_grade_entries_for_gradebook_column_and_resource(self, gradebook_column_id, resource_id): """Gets a ``GradeEntryList`` for the gradebook column and key resource. :param gradebook_column_id: a gradebook column ``Id`` :type gradebook_column_id: ``osid.id.Id`` :param resource_id: a key resource ``Id`` :type resource_id: ``osid.id.Id`` :return: the returned ``GradeEntry`` list :rtype: ``osid.grading.GradeEntryList`` :raise: ``NullArgument`` -- ``gradebook_column_id`` or ``resource_id`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradeEntryList def get_grade_entries_for_gradebook_column_and_resource_on_date(self, gradebook_column_id, resource_id, from_, to): """Gets a ``GradeEntryList`` for the given gradebook column, resource, and effective during the entire given date range inclusive but not confined to the date range. :param gradebook_column_id: a gradebook column ``Id`` :type gradebook_column_id: ``osid.id.Id`` :param resource_id: a key resource ``Id`` :type resource_id: ``osid.id.Id`` :param from: start of date range :type from: ``osid.calendaring.DateTime`` :param to: end of date range :type to: ``osid.calendaring.DateTime`` :return: the returned ``GradeEntry`` list :rtype: ``osid.grading.GradeEntryList`` :raise: ``InvalidArgument`` -- ``from`` is greater than ``to`` :raise: ``NullArgument`` -- ``gradebook_column_id, resource, from, or to`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradeEntryList def get_grade_entries_by_grader(self, resource_id): """Gets a ``GradeEntryList`` for the given grader. :param resource_id: a resource ``Id`` :type resource_id: ``osid.id.Id`` :return: the returned ``GradeEntry`` list :rtype: ``osid.grading.GradeEntryList`` :raise: ``NullArgument`` -- ``resource_id`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradeEntryList def get_grade_entries(self): """Gets all grade entries. :return: a ``GradeEntryList`` :rtype: ``osid.grading.GradeEntryList`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradeEntryList grade_entries = property(fget=get_grade_entries) ## # The following methods are from osid.grading.GradeEntryQuerySession def get_gradebook_id(self): """Gets the ``Gradebook`` ``Id`` associated with this session. :return: the ``Gradebook Id`` associated with this session :rtype: ``osid.id.Id`` *compliance: mandatory -- This method must be implemented.* """ return # osid.id.Id gradebook_id = property(fget=get_gradebook_id) def get_gradebook(self): """Gets the ``Gradebook`` associated with this session. :return: the ``Gradebook`` associated with this session :rtype: ``osid.grading.Gradebook`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.Gradebook gradebook = property(fget=get_gradebook) def can_search_grade_entries(self): """Tests if this user can perform ``GradeEntry`` searches. A return of true does not guarantee successful authorization. A return of false indicates that it is known all methods in this session will result in a ``PermissionDenied``. This is intended as a hint to an application that may opt not to offer search operations to unauthorized users. :return: ``false`` if search methods are not authorized, ``true`` otherwise :rtype: ``boolean`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def use_federated_gradebook_view(self): """Federates the view for methods in this session. A federated view will include grade entries in gradebooks which are children of this gradebook in the gradebook hierarchy. *compliance: mandatory -- This method is must be implemented.* """ pass def use_isolated_gradebook_view(self): """Isolates the view for methods in this session. An isolated view restricts searches to this gradebook only. *compliance: mandatory -- This method is must be implemented.* """ pass def get_grade_entry_query(self): """Gets a grade entry query. :return: the grade entry query :rtype: ``osid.grading.GradeEntryQuery`` *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradeEntryQuery grade_entry_query = property(fget=get_grade_entry_query) def get_grade_entries_by_query(self, grade_entry_query): """Gets a list of entries matching the given grade entry query. :param grade_entry_query: the grade entry query :type grade_entry_query: ``osid.grading.GradeEntryQuery`` :return: the returned ``GradeEntryList`` :rtype: ``osid.grading.GradeEntryList`` :raise: ``NullArgument`` -- ``grade_entry_query`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure :raise: ``Unsupported`` -- ``grade_entry_query`` is not of this service *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradeEntryList ## # The following methods are from osid.grading.GradeEntryAdminSession def get_gradebook_id(self): """Gets the ``Gradebook`` ``Id`` associated with this session. :return: the ``Gradebook Id`` associated with this session :rtype: ``osid.id.Id`` *compliance: mandatory -- This method must be implemented.* """ return # osid.id.Id gradebook_id = property(fget=get_gradebook_id) def get_gradebook(self): """Gets the ``Gradebook`` associated with this session. :return: the ``Gradebook`` associated with this session :rtype: ``osid.grading.Gradebook`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.Gradebook gradebook = property(fget=get_gradebook) def can_create_grade_entries(self): """Tests if this user can create grade entries. A return of true does not guarantee successful authorization. A return of false indicates that it is known creating a grade entry will result in a ``PermissionDenied``. This is intended as a hint to an application that may opt not to offer create operations to an unauthorized user. :return: ``false`` if ``GradeEntry`` creation is not authorized, ``true`` otherwise :rtype: ``boolean`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def can_create_grade_entry_with_record_types(self, grade_entry_record_types): """Tests if this user can create a single ``GradeEntry`` using the desired record types. While ``GradingManager.getGradeEntryRecordTypes()`` can be used to examine which records are supported, this method tests which record(s) are required for creating a specific ``GradeEntry``. Providing an empty array tests if a ``GradeEntry`` can be created with no records. :param grade_entry_record_types: array of grade entry record types :type grade_entry_record_types: ``osid.type.Type[]`` :return: ``true`` if ``GradeEntry`` creation using the specified record ``Types`` is supported, ``false`` otherwise :rtype: ``boolean`` :raise: ``NullArgument`` -- ``grade_entry_record_types`` is ``null`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def get_grade_entry_form_for_create(self, gradebook_column_id, resource_id, grade_entry_record_types): """Gets the grade entry form for creating new grade entries. A new form should be requested for each create transaction. :param gradebook_column_id: the gradebook column :type gradebook_column_id: ``osid.id.Id`` :param resource_id: the key resource :type resource_id: ``osid.id.Id`` :param grade_entry_record_types: array of grade entry record types :type grade_entry_record_types: ``osid.type.Type[]`` :return: the grade entry form :rtype: ``osid.grading.GradeEntryForm`` :raise: ``NotFound`` -- ``gradebook_column_id or resource_id`` not found :raise: ``NullArgument`` -- ``gradebook_column_id, resource_id,`` or ``grade_entry_record_types`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure :raise: ``Unsupported`` -- unable to get form for requested record types *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradeEntryForm def create_grade_entry(self, grade_entry_form): """Creates a new ``GradeEntry``. :param grade_entry_form: the form for this ``GradeEntry`` :type grade_entry_form: ``osid.grading.GradeEntryForm`` :return: the new ``GradeEntry`` :rtype: ``osid.grading.GradeEntry`` :raise: ``IllegalState`` -- ``grade_entry_form`` already used in a create transaction :raise: ``InvalidArgument`` -- one or more of the form elements is invalid :raise: ``NullArgument`` -- ``grade_entry_form`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure :raise: ``Unsupported`` -- ``grade_entry_form`` did not originate from ``get_grade_entry_form_for_create()`` *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradeEntry def can_overridecalculated_grade_entries(self): """Tests if this user can override grade entries calculated from another. A return of true does not guarantee successful authorization. A return of false indicates that it is known creating a grade entry will result in a ``PermissionDenied``. This is intended as a hint to an application that may opt not to offer create operations to an unauthorized user. :return: ``false`` if ``GradeEntry`` override is not authorized, ``true`` otherwise :rtype: ``boolean`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def get_grade_entry_form_for_override(self, grade_entry_id, grade_entry_record_types): """Gets the grade entry form for overriding calculated grade entries. A new form should be requested for each create transaction. :param grade_entry_id: the ``Id`` of the grade entry to be overridden :type grade_entry_id: ``osid.id.Id`` :param grade_entry_record_types: array of grade entry record types :type grade_entry_record_types: ``osid.type.Type[]`` :return: the grade entry form :rtype: ``osid.grading.GradeEntryForm`` :raise: ``AlreadyExists`` -- ``grade_entry_id`` is already overridden :raise: ``NotFound`` -- ``grade_entry_id`` not found or ``grade_entry_id`` is not a calculated entry :raise: ``NullArgument`` -- ``grade_entry_id`` or ``grade_entry_record_types`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure :raise: ``Unsupported`` -- unable to get form for requested record types *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradeEntryForm def override_calculated_grade_entry(self, grade_entry_form): """Creates a new overriding ``GradeEntry``. :param grade_entry_form: the form for this ``GradeEntry`` :type grade_entry_form: ``osid.grading.GradeEntryForm`` :return: the new ``GradeEntry`` :rtype: ``osid.grading.GradeEntry`` :raise: ``IllegalState`` -- ``grade_entry_form`` already used in a create transaction :raise: ``InvalidArgument`` -- one or more of the form elements is invalid :raise: ``NullArgument`` -- ``grade_entry_form`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure :raise: ``Unsupported`` -- ``grade_entry_form`` did not originate from ``get_grade_entry_form_for_override()`` *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradeEntry def can_update_grade_entries(self): """Tests if this user can update grade entries. A return of true does not guarantee successful authorization. A return of false indicates that it is known updating a ``GradeEntry`` will result in a ``PermissionDenied``. This is intended as a hint to an application that may opt not to offer update operations to an unauthorized user. :return: ``false`` if grade entry modification is not authorized, ``true`` otherwise :rtype: ``boolean`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def get_grade_entry_form_for_update(self, grade_entry_id): """Gets the grade entry form for updating an existing entry. A new grade entry form should be requested for each update transaction. :param grade_entry_id: the ``Id`` of the ``GradeEntry`` :type grade_entry_id: ``osid.id.Id`` :return: the grade entry form :rtype: ``osid.grading.GradeEntryForm`` :raise: ``NotFound`` -- ``grade_entry_id`` is not found :raise: ``NullArgument`` -- ``grade_entry_id`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.GradeEntryForm def update_grade_entry(self, grade_entry_form): """Updates an existing grade entry. :param grade_entry_form: the form containing the elements to be updated :type grade_entry_form: ``osid.grading.GradeEntryForm`` :raise: ``IllegalState`` -- ``grade_entry_form`` already used in an update transaction :raise: ``InvalidArgument`` -- the form contains an invalid value :raise: ``NullArgument`` -- ``grade_entry_form`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure :raise: ``Unsupported`` -- ``grade_entry_form`` did not originate from ``get_grade_entry_form_for_update()`` *compliance: mandatory -- This method must be implemented.* """ pass def can_delete_grade_entries(self): """Tests if this user can delete grade entries. A return of true does not guarantee successful authorization. A return of false indicates that it is known deleting a ``GradeEntry`` will result in a ``PermissionDenied``. This is intended as a hint to an application that may opt not to offer delete operations to an unauthorized user. :return: ``false`` if ``GradeEntry`` deletion is not authorized, ``true`` otherwise :rtype: ``boolean`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def delete_grade_entry(self, grade_entry_id): """Deletes the ``GradeEntry`` identified by the given ``Id``. :param grade_entry_id: the ``Id`` of the ``GradeEntry`` to delete :type grade_entry_id: ``osid.id.Id`` :raise: ``NotFound`` -- a ``GradeEntry`` was not found identified by the given ``Id`` :raise: ``NullArgument`` -- ``grade_entry_id`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ pass def can_manage_grade_entry_aliases(self): """Tests if this user can manage ``Id`` aliases for ``GradeEntries``. A return of true does not guarantee successful authorization. A return of false indicates that it is known changing an alias will result in a ``PermissionDenied``. This is intended as a hint to an application that may opt not to offer alias operations to an unauthorized user. :return: ``false`` if ``GradeEntry`` aliasing is not authorized, ``true`` otherwise :rtype: ``boolean`` *compliance: mandatory -- This method must be implemented.* """ return # boolean def alias_grade_entry(self, grade_entry_id, alias_id): """Adds an ``Id`` to a ``GradeEntry`` for the purpose of creating compatibility. The primary ``Id`` of the ``GradeEntry`` is determined by the provider. The new ``Id`` performs as an alias to the primary ``Id``. If the alias is a pointer to another grade entry, it is reassigned to the given grade entry ``Id``. :param grade_entry_id: the ``Id`` of a ``GradeEntry`` :type grade_entry_id: ``osid.id.Id`` :param alias_id: the alias ``Id`` :type alias_id: ``osid.id.Id`` :raise: ``AlreadyExists`` -- ``alias_id`` is already assigned :raise: ``NotFound`` -- ``grade_entry_id`` not found :raise: ``NullArgument`` -- ``grade_entry_id`` or ``alias_id`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.* """ pass class GradebookList(osid_objects.OsidList): """Like all ``OsidLists,`` ``GradebookList`` provides a means for accessing ``Gradebook`` elements sequentially either one at a time or many at a time. Examples: while (gl.hasNext()) { Gradebook gradebook = gl.getNextGradebook(); } or while (gl.hasNext()) { Gradebook[] gradebooks = gl.getNextGradebooks(gl.available()); } """ def get_next_gradebook(self): """Gets the next ``Gradebook`` in this list. :return: the next ``Gradebook`` in this list. The ``has_next()`` method should be used to test that a next ``Gradebook`` is available before calling this method. :rtype: ``osid.grading.Gradebook`` :raise: ``IllegalState`` -- no more elements available in this list :raise: ``OperationFailed`` -- unable to complete request *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.Gradebook next_gradebook = property(fget=get_next_gradebook) def get_next_gradebooks(self, n): """Gets the next set of ``Gradebook`` elements in this list which must be less than or equal to the return from ``available()``. :param n: the number of ``Gradebook`` elements requested which must be less than or equal to ``available()`` :type n: ``cardinal`` :return: an array of ``Gradebook`` elements.The length of the array is less than or equal to the number specified. :rtype: ``osid.grading.Gradebook`` :raise: ``IllegalState`` -- no more elements available in this list :raise: ``OperationFailed`` -- unable to complete request *compliance: mandatory -- This method must be implemented.* """ return # osid.grading.Gradebook
mit
3,892,039,066,676,046,000
38.593546
193
0.644349
false
4.286328
true
false
false
eckardm/archivematica
src/MCPClient/lib/clientScripts/archivematicaCreateMETSMetadataCSV.py
1
4618
#!/usr/bin/env python2 # # This file is part of Archivematica. # # Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> # # Archivematica is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Archivematica is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Archivematica. If not, see <http://www.gnu.org/licenses/>. # @package Archivematica # @subpackage archivematicaClientScript # @author Joseph Perry <[email protected]> # @version svn: $Id$ #/src/dashboard/src/main/models.py import collections import csv import os import sys import traceback # archivematicaCommon import archivematicaFunctions from custom_handlers import get_script_logger from sharedVariablesAcrossModules import sharedVariablesAcrossModules def parseMetadata(SIPPath): """ Parse all metadata.csv files in SIPPath. Looking for metadata.csvs in metadata/ and objects/metadata/transfers/<transfer name>/metadata/ See parseMetadataCSV for details on parsing. :param SIPPath: Path to the SIP :return: {<filename>: OrderedDict(key: [values]) } """ all_metadata = {} metadata_csvs = archivematicaFunctions.find_metadata_files(SIPPath, 'metadata.csv') for metadataCSVFilePath in metadata_csvs: try: csv_metadata = parseMetadataCSV(metadataCSVFilePath) except Exception: print >>sys.stderr, "error parsing: ", metadataCSVFilePath traceback.print_exc(file=sys.stderr) sharedVariablesAcrossModules.globalErrorCount += 1 continue # Provide warning if this file already has differing metadata # Not using all_metadata.update(csv_metadata) because of that for entry, values in csv_metadata.iteritems(): if entry in all_metadata and all_metadata[entry] != values: print >> sys.stderr, 'Metadata for', entry, 'being updated. Old:', all_metadata[entry], 'New:', values existing = all_metadata.get(entry, collections.OrderedDict()) existing.update(values) all_metadata[entry] = existing return all_metadata def parseMetadataCSV(metadataCSVFilePath): """ Parses the metadata.csv into a dict with entries for each file. Each file's entry is an OrderedDict containing the column header and a list of values for each column. Example CSV: Filename,dc.title,dc.type,dc.type,Other metadata objects/foo.jpg,Foo,Photograph,Still Image,Taken on a sunny day objects/bar/,Bar,Photograph,Still Image,All taken on a rainy day Produces: { 'objects/foo.jpg': OrderedDict(dc.title=[Foo], dc.type=[Photograph, Still Image], Other metadata=[Taken on a sunny day]) 'objects/bar': OrderedDict(dc.title=[Bar], dc.date=[Photograph, Still Image], Other metadata=[All taken on a rainy day]) } :param metadataCSVFilePath: Path to the metadata CSV to parse :return: {<filename>: OrderedDict(<metadata name>: [<metadata value>]) } """ metadata = {} # use universal newline mode to support unusual newlines, like \r with open(metadataCSVFilePath, 'rbU') as f: reader = csv.reader(f) # Parse first row as header header = reader.next() # Strip filename column, strip whitespace from header values header = [h.strip() for h in header[1:]] # Parse data for row in reader: if not row: continue entry_name = row[0] if entry_name.endswith("/"): entry_name = entry_name[:-1] # Strip file/dir name from values row = row[1:] values = archivematicaFunctions.OrderedListsDict(zip(header, row)) if entry_name in metadata and metadata[entry_name] != values: print >> sys.stderr, 'Metadata for', entry_name, 'being overwritten. Old:', metadata[entry_name], 'New:', values metadata[entry_name] = values return collections.OrderedDict(metadata) # Return a normal OrderedDict if __name__ == '__main__': logger = get_script_logger("archivematica.mcp.client.createMETSMetadataCSV") parseMetadata(sys.argv[1])
agpl-3.0
1,642,396,789,617,826,600
37.165289
128
0.683629
false
4.008681
false
false
false
cyphactor/lifecyclemanager
extra/plugins/userlog/userlog/userlog.py
1
7748
import posixpath from trac.core import * from trac.config import * from trac.config import BoolOption from trac.web.chrome import ITemplateProvider, \ add_stylesheet from trac.web.main import IRequestHandler from trac.wiki import wiki_to_html, wiki_to_oneliner from trac.mimeview import Mimeview, is_binary from trac.util import escape, Markup from trac.util.datefmt import format_datetime, pretty_timedelta from trac.util.text import unicode_urlencode, shorten_line, CRLF from trac.versioncontrol.diff import get_diff_options, unified_diff from trac.versioncontrol import Node, Changeset import re class UserLogModule(Component): implements(IRequestHandler, ITemplateProvider) wiki_format_messages = BoolOption('changeset', 'wiki_format_messages', 'true', """Whether wiki formatting should be applied to changeset messages. If this option is disabled, changeset messages will be rendered as pre-formatted text.""") # IRequestHandler methods def match_request(self, req): match = re.match(r'/userlog(?:/(\w+).*|$)', req.path_info) if match: req.args['user'] = match.group(1) or '/' return True def process_request(self, req): user = req.args.get('user') sort = req.args.get('sort', 'ASC') db = self.env.get_db_cnx() changesets = self._get_userlog(req, db, user, sort) toc_links = [] for rev, _, _, _ in changesets: toc_links.append({'anchor': rev, 'title': 'Revision %s' % rev}) changeset_ranges = self._get_changeset_ranges(changesets) changeset_links = [] for start, end in changeset_ranges: if start != end: title = 'Changeset [%s:%s]' % (start, end) else: title = 'Changeset [%s]' % start link = req.href.changeset(old=start, old_path='/', new=end, new_path='/') changeset_links.append({'href': link, 'title': title}) req.hdf['user'] = user req.hdf['changesets'] = changesets req.hdf['toc_links'] = toc_links req.hdf['changeset_links'] = changeset_links add_stylesheet(req, 'common/css/wiki.css') add_stylesheet(req, 'userlog/css/userlog.css') return 'userlog.cs', None def _get_userlog(self, req, db, user, sort): mimeview = Mimeview(self.env) repos = self.env.get_repository() diff_options = get_diff_options(req) cursor = db.cursor() cursor.execute("SELECT rev, time, message FROM revision " "WHERE author='%s' ORDER BY time %s" % (user, sort)) # Have to sort by time because rev is a text field # and sorts lexicographically rather than numerically changesets = [] for rev, time, message in cursor: if self.wiki_format_messages: message = wiki_to_html(message, self.env, req, escape_newlines=True) else: message = html.PRE(message) prev = repos.get_node('/', rev).get_previous() if prev: prev_rev = prev[1] else: prev_rev = rev diffs = [] changes = repos.get_changes(old_path='/', old_rev=prev_rev, new_path='/', new_rev=rev) for old_node, new_node, kind, change in changes: if kind == Node.DIRECTORY: if change == Changeset.ADD: diffs.append(('%s added' % new_node.path, '')) elif change == Changeset.DELETE: diffs.append(('%s deleted' % old_node.path, '')) continue new_content = old_content = '' new_node_info = old_node_info = ('','') if old_node: old_content = old_node.get_content().read() if is_binary(old_content): continue old_node_info = (old_node.path, old_node.rev) old_content = mimeview.to_unicode(old_content, old_node.content_type) if new_node: new_content = new_node.get_content().read() if is_binary(new_content): continue new_node_info = (new_node.path, new_node.rev) new_path = new_node.path new_content = mimeview.to_unicode(new_content, new_node.content_type) else: old_node_path = repos.normalize_path(old_node.path) diff_old_path = repos.normalize_path('/') new_path = posixpath.join('/', old_node_path[len(diff_old_path)+1:]) if old_content != new_content: context = 3 options = diff_options[1] for option in options: if option.startswith('-U'): context = int(option[2:]) break if not old_node_info[0]: old_node_info = new_node_info # support for 'A'dd changes diff = 'Index: ' + new_path + CRLF diff += '=' * 67 + CRLF diff += '--- %s (revision %s)' % old_node_info + CRLF diff += '+++ %s (revision %s)' % new_node_info + CRLF for line in unified_diff(old_content.splitlines(), new_content.splitlines(), context, ignore_blank_lines='-B' in options, ignore_case='-i' in options, ignore_space_changes='-b' in options): diff += line + CRLF if change == Changeset.ADD: diffs.append(('%s added' % (new_node.path,), diff)) elif change == Changeset.DELETE: diffs.append(('%s deleted' % (old_node.path,), diff)) else: diffs.append(('%s edited' % (new_node.path,), diff)) changesets.append((int(rev), format_datetime(time), message, diffs)) return changesets def _get_changeset_ranges(self, changesets): ranges = [] # will be a list of pairs: (start, end) for rev, _, _, _ in changesets: # if rev is more than two greater than last max # or list is empty if ranges == [] or rev > (ranges[-1][1] + 1): # create a new tuple ranges.append((rev, rev)) # else if rev is greater (by one) than last max elif rev == (ranges[-1][1] + 1): ranges[-1] = (ranges[-1][0], rev) return ranges # ITemplateProvider methods def get_templates_dirs(self): """Return a list of directories containing the provided ClearSilver templates. """ from pkg_resources import resource_filename return [resource_filename(__name__, 'templates')] def get_htdocs_dirs(self): from pkg_resources import resource_filename return [('userlog', resource_filename(__name__, 'htdocs'))]
gpl-3.0
-8,103,132,112,731,082,000
43.786127
88
0.495225
false
4.457998
false
false
false
pcm17/tensorflow
tensorflow/contrib/distributions/python/ops/poisson.py
1
5426
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """The Poisson distribution class.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensorflow.contrib.distributions.python.ops import distribution from tensorflow.contrib.distributions.python.ops import distribution_util from tensorflow.python.framework import constant_op from tensorflow.python.framework import dtypes from tensorflow.python.framework import ops from tensorflow.python.framework import tensor_shape from tensorflow.python.ops import array_ops from tensorflow.python.ops import check_ops from tensorflow.python.ops import math_ops from tensorflow.python.ops import random_ops __all__ = [ "Poisson", ] _poisson_sample_note = """ Note that the input value must be a non-negative floating point tensor with dtype `dtype` and whose shape can be broadcast with `self.rate`. `x` is only legal if it is non-negative and its components are equal to integer values. """ class Poisson(distribution.Distribution): """Poisson distribution. The Poisson distribution is parameterized by an event `rate` parameter. #### Mathematical Details The probability mass function (pmf) is, ```none pmf(k; lambda, k >= 0) = (lambda^k / k!) / Z Z = exp(lambda). ``` where `rate = lambda` and `Z` is the normalizing constant. """ def __init__(self, rate, validate_args=False, allow_nan_stats=True, name="Poisson"): """Initialize a batch of Poisson distributions. Args: rate: Floating point tensor, the rate parameter of the distribution(s). `rate` must be positive. validate_args: Python `bool`, default `False`. When `True` distribution parameters are checked for validity despite possibly degrading runtime performance. When `False` invalid inputs may silently render incorrect outputs. allow_nan_stats: Python `bool`, default `True`. When `True`, statistics (e.g., mean, mode, variance) use the value "`NaN`" to indicate the result is undefined. When `False`, an exception is raised if one or more of the statistic's batch members are undefined. name: Python `str` name prefixed to Ops created by this class. """ parameters = locals() with ops.name_scope(name, values=[rate]) as ns: with ops.control_dependencies([check_ops.assert_positive(rate)] if validate_args else []): self._rate = array_ops.identity(rate, name="rate") super(Poisson, self).__init__( dtype=self._rate.dtype, reparameterization_type=distribution.NOT_REPARAMETERIZED, validate_args=validate_args, allow_nan_stats=allow_nan_stats, parameters=parameters, graph_parents=[self._rate], name=ns) @property def rate(self): """Rate parameter.""" return self._rate def _batch_shape_tensor(self): return array_ops.shape(self.rate) def _batch_shape(self): return self.rate.get_shape() def _event_shape_tensor(self): return constant_op.constant([], dtype=dtypes.int32) def _event_shape(self): return tensor_shape.scalar() @distribution_util.AppendDocstring(_poisson_sample_note) def _log_prob(self, x): return self._log_unnormalized_prob(x) - self._log_normalization() @distribution_util.AppendDocstring(_poisson_sample_note) def _prob(self, x): return math_ops.exp(self._log_prob(x)) @distribution_util.AppendDocstring(_poisson_sample_note) def _log_cdf(self, x): return math_ops.log(self.cdf(x)) @distribution_util.AppendDocstring(_poisson_sample_note) def _cdf(self, x): if self.validate_args: # We set `check_integer=False` since the CDF is defined on whole real # line. x = distribution_util.embed_check_nonnegative_discrete( x, check_integer=False) return math_ops.igammac(math_ops.floor(x + 1), self.rate) def _log_normalization(self): return self.rate def _log_unnormalized_prob(self, x): if self.validate_args: x = distribution_util.embed_check_nonnegative_discrete( x, check_integer=True) return x * math_ops.log(self.rate) - math_ops.lgamma(x + 1) def _mean(self): return array_ops.identity(self.rate) def _variance(self): return array_ops.identity(self.rate) @distribution_util.AppendDocstring( """Note: when `rate` is an integer, there are actually two modes: `rate` and `rate - 1`. In this case we return the larger, i.e., `rate`.""") def _mode(self): return math_ops.floor(self.rate) def _sample_n(self, n, seed=None): return random_ops.random_poisson( self.rate, [n], dtype=self.dtype, seed=seed)
apache-2.0
8,151,053,767,586,295,000
33.56051
80
0.679506
false
3.884037
false
false
false
harlequin/sickbeard
sickbeard/tv.py
1
60124
# Author: Nic Wolfe <[email protected]> # URL: http://code.google.com/p/sickbeard/ # # This file is part of Sick Beard. # # Sick Beard is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Sick Beard is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Sick Beard. If not, see <http://www.gnu.org/licenses/>. from __future__ import with_statement import os.path import datetime import threading import re import glob import sickbeard import xml.etree.cElementTree as etree from name_parser.parser import NameParser, InvalidNameException from lib.tvdb_api import tvdb_api, tvdb_exceptions from sickbeard import db from sickbeard import helpers, exceptions, logger from sickbeard.exceptions import ex from sickbeard import tvrage from sickbeard import config from sickbeard import image_cache from sickbeard import postProcessor from sickbeard import encodingKludge as ek from common import Quality, Overview from common import DOWNLOADED, SNATCHED, SNATCHED_PROPER, ARCHIVED, IGNORED, UNAIRED, WANTED, SKIPPED, UNKNOWN class TVShow(object): def __init__ (self, tvdbid, lang=""): self.tvdbid = tvdbid self._location = "" self.name = "" self.tvrid = 0 self.tvrname = "" self.network = "" self.genre = "" self.runtime = 0 self.quality = int(sickbeard.QUALITY_DEFAULT) self.seasonfolders = int(sickbeard.SEASON_FOLDERS_DEFAULT) self.status = "" self.airs = "" self.startyear = 0 self.paused = 0 self.air_by_date = 0 self.lang = lang self.lock = threading.Lock() self._isDirGood = False self.episodes = {} otherShow = helpers.findCertainShow(sickbeard.showList, self.tvdbid) if otherShow != None: raise exceptions.MultipleShowObjectsException("Can't create a show if it already exists") self.loadFromDB() self.saveToDB() def _getLocation(self): if ek.ek(os.path.isdir, self._location): return self._location else: raise exceptions.ShowDirNotFoundException("Show folder doesn't exist, you shouldn't be using it") if self._isDirGood: return self._location else: raise exceptions.NoNFOException("Show folder doesn't exist, you shouldn't be using it") def _setLocation(self, newLocation): logger.log(u"Setter sets location to " + newLocation, logger.DEBUG) if ek.ek(os.path.isdir, newLocation): self._location = newLocation self._isDirGood = True else: raise exceptions.NoNFOException("Invalid folder for the show!") location = property(_getLocation, _setLocation) # delete references to anything that's not in the internal lists def flushEpisodes(self): for curSeason in self.episodes: for curEp in self.episodes[curSeason]: myEp = self.episodes[curSeason][curEp] self.episodes[curSeason][curEp] = None del myEp def getEpisode(self, season, episode, file=None, noCreate=False): #return TVEpisode(self, season, episode) if not season in self.episodes: self.episodes[season] = {} ep = None if not episode in self.episodes[season] or self.episodes[season][episode] == None: if noCreate: return None logger.log(str(self.tvdbid) + ": An object for episode " + str(season) + "x" + str(episode) + " didn't exist in the cache, trying to create it", logger.DEBUG) if file != None: ep = TVEpisode(self, season, episode, file) else: ep = TVEpisode(self, season, episode) if ep != None: self.episodes[season][episode] = ep return self.episodes[season][episode] def writeShowNFO(self): result = False if not ek.ek(os.path.isdir, self._location): logger.log(str(self.tvdbid) + u": Show dir doesn't exist, skipping NFO generation") return False for cur_provider in sickbeard.metadata_provider_dict.values(): result = cur_provider.create_show_metadata(self) or result return result def writeMetadata(self): if not ek.ek(os.path.isdir, self._location): logger.log(str(self.tvdbid) + u": Show dir doesn't exist, skipping NFO generation") return self.getImages() self.writeShowNFO() self.writeEpisodeNFOs() def writeEpisodeNFOs (self): if not ek.ek(os.path.isdir, self._location): logger.log(str(self.tvdbid) + ": Show dir doesn't exist, skipping NFO generation") return logger.log(str(self.tvdbid) + ": Writing NFOs for all episodes") myDB = db.DBConnection() sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE showid = ? AND location != ''", [self.tvdbid]) for epResult in sqlResults: logger.log(str(self.tvdbid) + ": Retrieving/creating episode " + str(epResult["season"]) + "x" + str(epResult["episode"]), logger.DEBUG) curEp = self.getEpisode(epResult["season"], epResult["episode"]) curEp.createMetaFiles() # find all media files in the show folder and create episodes for as many as possible def loadEpisodesFromDir (self): if not ek.ek(os.path.isdir, self._location): logger.log(str(self.tvdbid) + ": Show dir doesn't exist, not loading episodes from disk") return logger.log(str(self.tvdbid) + ": Loading all episodes from the show directory " + self._location) # get file list mediaFiles = helpers.listMediaFiles(self._location) # create TVEpisodes from each media file (if possible) for mediaFile in mediaFiles: curEpisode = None logger.log(str(self.tvdbid) + ": Creating episode from " + mediaFile, logger.DEBUG) try: curEpisode = self.makeEpFromFile(os.path.join(self._location, mediaFile)) except (exceptions.ShowNotFoundException, exceptions.EpisodeNotFoundException), e: logger.log(u"Episode "+mediaFile+" returned an exception: "+ex(e), logger.ERROR) except exceptions.EpisodeDeletedException: logger.log(u"The episode deleted itself when I tried making an object for it", logger.DEBUG) # store the reference in the show if curEpisode != None: curEpisode.saveToDB() def loadEpisodesFromDB(self): logger.log(u"Loading all episodes from the DB") myDB = db.DBConnection() sql = "SELECT * FROM tv_episodes WHERE showid = ?" sqlResults = myDB.select(sql, [self.tvdbid]) scannedEps = {} ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy() if self.lang: ltvdb_api_parms['language'] = self.lang t = tvdb_api.Tvdb(**ltvdb_api_parms) cachedShow = t[self.tvdbid] cachedSeasons = {} for curResult in sqlResults: deleteEp = False curSeason = int(curResult["season"]) curEpisode = int(curResult["episode"]) if curSeason not in cachedSeasons: try: cachedSeasons[curSeason] = cachedShow[curSeason] except tvdb_exceptions.tvdb_seasonnotfound, e: logger.log(u"Error when trying to load the episode from TVDB: "+e.message, logger.WARNING) deleteEp = True if not curSeason in scannedEps: scannedEps[curSeason] = {} logger.log(u"Loading episode "+str(curSeason)+"x"+str(curEpisode)+" from the DB", logger.DEBUG) try: curEp = self.getEpisode(curSeason, curEpisode) # if we found out that the ep is no longer on TVDB then delete it from our database too if deleteEp: curEp.deleteEpisode() curEp.loadFromDB(curSeason, curEpisode) curEp.loadFromTVDB(tvapi=t, cachedSeason=cachedSeasons[curSeason]) scannedEps[curSeason][curEpisode] = True except exceptions.EpisodeDeletedException: logger.log(u"Tried loading an episode from the DB that should have been deleted, skipping it", logger.DEBUG) continue return scannedEps def loadEpisodesFromTVDB(self, cache=True): # There's gotta be a better way of doing this but we don't wanna # change the cache value elsewhere ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy() if not cache: ltvdb_api_parms['cache'] = 'recache' if self.lang: ltvdb_api_parms['language'] = self.lang try: t = tvdb_api.Tvdb(**ltvdb_api_parms) showObj = t[self.tvdbid] except tvdb_exceptions.tvdb_error: logger.log(u"TVDB timed out, unable to update episodes from TVDB", logger.ERROR) return None logger.log(str(self.tvdbid) + ": Loading all episodes from theTVDB...") scannedEps = {} for season in showObj: scannedEps[season] = {} for episode in showObj[season]: # need some examples of wtf episode 0 means to decide if we want it or not if episode == 0: continue try: #ep = TVEpisode(self, season, episode) ep = self.getEpisode(season, episode) except exceptions.EpisodeNotFoundException: logger.log(str(self.tvdbid) + ": TVDB object for " + str(season) + "x" + str(episode) + " is incomplete, skipping this episode") continue else: try: ep.loadFromTVDB(tvapi=t) except exceptions.EpisodeDeletedException: logger.log(u"The episode was deleted, skipping the rest of the load") continue with ep.lock: logger.log(str(self.tvdbid) + ": Loading info from theTVDB for episode " + str(season) + "x" + str(episode), logger.DEBUG) ep.loadFromTVDB(season, episode, tvapi=t) if ep.dirty: ep.saveToDB() scannedEps[season][episode] = True return scannedEps def setTVRID(self, force=False): if self.tvrid != 0 and not force: logger.log(u"No need to get the TVRage ID, it's already populated", logger.DEBUG) return logger.log(u"Attempting to retrieve the TVRage ID", logger.DEBUG) try: # load the tvrage object, it will set the ID in its constructor if possible tvrage.TVRage(self) self.saveToDB() except exceptions.TVRageException, e: logger.log(u"Couldn't get TVRage ID because we're unable to sync TVDB and TVRage: "+ex(e), logger.DEBUG) return def getImages(self, fanart=None, poster=None): poster_result = fanart_result = season_thumb_result = False for cur_provider in sickbeard.metadata_provider_dict.values(): logger.log("Running season folders for "+cur_provider.name, logger.DEBUG) poster_result = cur_provider.create_poster(self) or poster_result fanart_result = cur_provider.create_fanart(self) or fanart_result season_thumb_result = cur_provider.create_season_thumbs(self) or season_thumb_result return poster_result or fanart_result or season_thumb_result def loadLatestFromTVRage(self): try: # load the tvrage object tvr = tvrage.TVRage(self) newEp = tvr.findLatestEp() if newEp != None: logger.log(u"TVRage gave us an episode object - saving it for now", logger.DEBUG) newEp.saveToDB() # make an episode out of it except exceptions.TVRageException, e: logger.log(u"Unable to add TVRage info: " + ex(e), logger.WARNING) # make a TVEpisode object from a media file def makeEpFromFile(self, file): if not ek.ek(os.path.isfile, file): logger.log(str(self.tvdbid) + ": That isn't even a real file dude... " + file) return None logger.log(str(self.tvdbid) + ": Creating episode object from " + file, logger.DEBUG) try: myParser = NameParser() parse_result = myParser.parse(file) except InvalidNameException: logger.log(u"Unable to parse the filename "+file+" into a valid episode", logger.ERROR) return None if len(parse_result.episode_numbers) == 0 and not parse_result.air_by_date: logger.log("parse_result: "+str(parse_result)) logger.log(u"No episode number found in "+file+", ignoring it", logger.ERROR) return None # for now lets assume that any episode in the show dir belongs to that show season = parse_result.season_number if parse_result.season_number != None else 1 episodes = parse_result.episode_numbers rootEp = None # if we have an air-by-date show then get the real season/episode numbers if parse_result.air_by_date: try: # There's gotta be a better way of doing this but we don't wanna # change the cache value elsewhere ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy() if self.lang: ltvdb_api_parms['language'] = self.lang t = tvdb_api.Tvdb(**ltvdb_api_parms) epObj = t[self.tvdbid].airedOn(parse_result.air_date)[0] season = int(epObj["seasonnumber"]) episodes = [int(epObj["episodenumber"])] except tvdb_exceptions.tvdb_episodenotfound: logger.log(u"Unable to find episode with date "+str(episodes[0])+" for show "+self.name+", skipping", logger.WARNING) return None except tvdb_exceptions.tvdb_error, e: logger.log(u"Unable to contact TVDB: "+ex(e), logger.WARNING) return None for curEpNum in episodes: episode = int(curEpNum) logger.log(str(self.tvdbid) + ": " + file + " parsed to " + self.name + " " + str(season) + "x" + str(episode), logger.DEBUG) checkQualityAgain = False curEp = self.getEpisode(season, episode) if curEp == None: try: curEp = self.getEpisode(season, episode, file) except exceptions.EpisodeNotFoundException: logger.log(str(self.tvdbid) + ": Unable to figure out what this file is, skipping", logger.ERROR) continue else: # if there is a new file associated with this ep then re-check the quality if curEp.location and ek.ek(os.path.normpath, curEp.location) != ek.ek(os.path.normpath, file): logger.log(u"The old episode had a different file associated with it, I will re-check the quality based on the new filename "+file, logger.DEBUG) checkQualityAgain = True with curEp.lock: curEp.location = file curEp.checkForMetaFiles() if rootEp == None: rootEp = curEp else: rootEp.relatedEps.append(curEp) # if they replace a file on me I'll make some attempt at re-checking the quality if checkQualityAgain: newQuality = Quality.nameQuality(file) logger.log(u"Since this file has been renamed, I checked "+file+" and found quality "+Quality.qualityStrings[newQuality], logger.DEBUG) if newQuality != Quality.UNKNOWN: curEp.status = Quality.compositeStatus(DOWNLOADED, newQuality) elif sickbeard.helpers.isMediaFile(file) and curEp.status not in Quality.DOWNLOADED + [ARCHIVED, IGNORED]: oldStatus, oldQuality = Quality.splitCompositeStatus(curEp.status) newQuality = Quality.nameQuality(file) if newQuality == Quality.UNKNOWN: newQuality = Quality.assumeQuality(file) newStatus = None # if it was snatched and now exists then set the status correctly if oldStatus == SNATCHED and oldQuality <= newQuality: logger.log(u"STATUS: this ep used to be snatched with quality "+Quality.qualityStrings[oldQuality]+" but a file exists with quality "+Quality.qualityStrings[newQuality]+" so I'm setting the status to DOWNLOADED", logger.DEBUG) newStatus = DOWNLOADED # if it was snatched proper and we found a higher quality one then allow the status change elif oldStatus == SNATCHED_PROPER and oldQuality < newQuality: logger.log(u"STATUS: this ep used to be snatched proper with quality "+Quality.qualityStrings[oldQuality]+" but a file exists with quality "+Quality.qualityStrings[newQuality]+" so I'm setting the status to DOWNLOADED", logger.DEBUG) newStatus = DOWNLOADED elif oldStatus not in (SNATCHED, SNATCHED_PROPER): newStatus = DOWNLOADED if newStatus != None: with curEp.lock: logger.log(u"STATUS: we have an associated file, so setting the status from "+str(curEp.status)+" to DOWNLOADED/" + str(Quality.statusFromName(file)), logger.DEBUG) curEp.status = Quality.compositeStatus(newStatus, newQuality) with curEp.lock: curEp.saveToDB() # creating metafiles on the root should be good enough if rootEp != None: with rootEp.lock: rootEp.createMetaFiles() return rootEp def loadFromDB(self, skipNFO=False): logger.log(str(self.tvdbid) + ": Loading show info from database") myDB = db.DBConnection() sqlResults = myDB.select("SELECT * FROM tv_shows WHERE tvdb_id = ?", [self.tvdbid]) if len(sqlResults) > 1: raise exceptions.MultipleDBShowsException() elif len(sqlResults) == 0: logger.log(str(self.tvdbid) + ": Unable to find the show in the database") return else: if self.name == "": self.name = sqlResults[0]["show_name"] self.tvrname = sqlResults[0]["tvr_name"] if self.network == "": self.network = sqlResults[0]["network"] if self.genre == "": self.genre = sqlResults[0]["genre"] self.runtime = sqlResults[0]["runtime"] self.status = sqlResults[0]["status"] if self.status == None: self.status = "" self.airs = sqlResults[0]["airs"] if self.airs == None: self.airs = "" self.startyear = sqlResults[0]["startyear"] if self.startyear == None: self.startyear = 0 self.air_by_date = sqlResults[0]["air_by_date"] if self.air_by_date == None: self.air_by_date = 0 self.quality = int(sqlResults[0]["quality"]) self.seasonfolders = int(sqlResults[0]["seasonfolders"]) self.paused = int(sqlResults[0]["paused"]) self._location = sqlResults[0]["location"] if self.tvrid == 0: self.tvrid = int(sqlResults[0]["tvr_id"]) if self.lang == "": self.lang = sqlResults[0]["lang"] def loadFromTVDB(self, cache=True, tvapi=None, cachedSeason=None): logger.log(str(self.tvdbid) + ": Loading show info from theTVDB") # There's gotta be a better way of doing this but we don't wanna # change the cache value elsewhere if tvapi is None: ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy() if not cache: ltvdb_api_parms['cache'] = 'recache' if self.lang: ltvdb_api_parms['language'] = self.lang t = tvdb_api.Tvdb(**ltvdb_api_parms) else: t = tvapi myEp = t[self.tvdbid] self.name = myEp["seriesname"] self.genre = myEp['genre'] self.network = myEp['network'] if myEp["airs_dayofweek"] != None and myEp["airs_time"] != None: self.airs = myEp["airs_dayofweek"] + " " + myEp["airs_time"] if myEp["firstaired"] != None and myEp["firstaired"]: self.startyear = int(myEp["firstaired"].split('-')[0]) if self.airs == None: self.airs = "" if myEp["status"] != None: self.status = myEp["status"] if self.status == None: self.status = "" self.saveToDB() def loadNFO (self): if not os.path.isdir(self._location): logger.log(str(self.tvdbid) + ": Show dir doesn't exist, can't load NFO") raise exceptions.NoNFOException("The show dir doesn't exist, no NFO could be loaded") logger.log(str(self.tvdbid) + ": Loading show info from NFO") xmlFile = os.path.join(self._location, "tvshow.nfo") try: xmlFileObj = open(xmlFile, 'r') showXML = etree.ElementTree(file = xmlFileObj) if showXML.findtext('title') == None or (showXML.findtext('tvdbid') == None and showXML.findtext('id') == None): raise exceptions.NoNFOException("Invalid info in tvshow.nfo (missing name or id):" \ + str(showXML.findtext('title')) + " " \ + str(showXML.findtext('tvdbid')) + " " \ + str(showXML.findtext('id'))) self.name = showXML.findtext('title') if showXML.findtext('tvdbid') != None: self.tvdbid = int(showXML.findtext('tvdbid')) elif showXML.findtext('id'): self.tvdbid = int(showXML.findtext('id')) else: raise exceptions.NoNFOException("Empty <id> or <tvdbid> field in NFO") except (exceptions.NoNFOException, SyntaxError, ValueError), e: logger.log(u"There was an error parsing your existing tvshow.nfo file: " + ex(e), logger.ERROR) logger.log(u"Attempting to rename it to tvshow.nfo.old", logger.DEBUG) try: xmlFileObj.close() ek.ek(os.rename, xmlFile, xmlFile + ".old") except Exception, e: logger.log(u"Failed to rename your tvshow.nfo file - you need to delete it or fix it: " + ex(e), logger.ERROR) raise exceptions.NoNFOException("Invalid info in tvshow.nfo") if showXML.findtext('studio') != None: self.network = showXML.findtext('studio') if self.network == None and showXML.findtext('network') != None: self.network = "" if showXML.findtext('genre') != None: self.genre = showXML.findtext('genre') else: self.genre = "" # TODO: need to validate the input, I'm assuming it's good until then def nextEpisode(self): logger.log(str(self.tvdbid) + ": Finding the episode which airs next", logger.DEBUG) myDB = db.DBConnection() innerQuery = "SELECT airdate FROM tv_episodes WHERE showid = ? AND airdate >= ? AND status = ? ORDER BY airdate ASC LIMIT 1" innerParams = [self.tvdbid, datetime.date.today().toordinal(), UNAIRED] query = "SELECT * FROM tv_episodes WHERE showid = ? AND airdate >= ? AND airdate <= (" + innerQuery + ") and status = ?" params = [self.tvdbid, datetime.date.today().toordinal()] + innerParams + [UNAIRED] sqlResults = myDB.select(query, params) if sqlResults == None or len(sqlResults) == 0: logger.log(str(self.tvdbid) + ": No episode found... need to implement tvrage and also show status", logger.DEBUG) return [] else: logger.log(str(self.tvdbid) + ": Found episode " + str(sqlResults[0]["season"]) + "x" + str(sqlResults[0]["episode"]), logger.DEBUG) foundEps = [] for sqlEp in sqlResults: curEp = self.getEpisode(int(sqlEp["season"]), int(sqlEp["episode"])) foundEps.append(curEp) return foundEps # if we didn't get an episode then try getting one from tvrage # load tvrage info # extract NextEpisode info # verify that we don't have it in the DB somehow (ep mismatch) def deleteShow(self): myDB = db.DBConnection() myDB.action("DELETE FROM tv_episodes WHERE showid = ?", [self.tvdbid]) myDB.action("DELETE FROM tv_shows WHERE tvdb_id = ?", [self.tvdbid]) # remove self from show list sickbeard.showList = [x for x in sickbeard.showList if x.tvdbid != self.tvdbid] # clear the cache image_cache_dir = ek.ek(os.path.join, sickbeard.CACHE_DIR, 'images') for cache_file in ek.ek(glob.glob, ek.ek(os.path.join, image_cache_dir, str(self.tvdbid)+'.*')): logger.log(u"Deleting cache file "+cache_file) os.remove(cache_file) def populateCache(self): cache_inst = image_cache.ImageCache() logger.log(u"Checking & filling cache for show "+self.name) cache_inst.fill_cache(self) def refreshDir(self): # make sure the show dir is where we think it is if not ek.ek(os.path.isdir, self._location): return False # load from dir self.loadEpisodesFromDir() # run through all locations from DB, check that they exist logger.log(str(self.tvdbid) + ": Loading all episodes with a location from the database") myDB = db.DBConnection() sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE showid = ? AND location != ''", [self.tvdbid]) for ep in sqlResults: curLoc = os.path.normpath(ep["location"]) season = int(ep["season"]) episode = int(ep["episode"]) try: curEp = self.getEpisode(season, episode) except exceptions.EpisodeDeletedException: logger.log(u"The episode was deleted while we were refreshing it, moving on to the next one", logger.DEBUG) continue # if the path doesn't exist or if it's not in our show dir if not ek.ek(os.path.isfile, curLoc) or not os.path.normpath(curLoc).startswith(os.path.normpath(self.location)): with curEp.lock: # if it used to have a file associated with it and it doesn't anymore then set it to IGNORED if curEp.location and curEp.status in Quality.DOWNLOADED: logger.log(str(self.tvdbid) + ": Location for " + str(season) + "x" + str(episode) + " doesn't exist, removing it and changing our status to IGNORED", logger.DEBUG) curEp.status = IGNORED curEp.location = '' curEp.hasnfo = False curEp.hastbn = False curEp.saveToDB() def fixEpisodeNames(self): if not os.path.isdir(self._location): logger.log(str(self.tvdbid) + ": Show dir doesn't exist, can't rename episodes") return # load episodes from my folder self.loadEpisodesFromDir() logger.log(str(self.tvdbid) + ": Loading all episodes with a location from the database") myDB = db.DBConnection() sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE showid = ? AND location != ''", [self.tvdbid]) # build list of locations fileLocations = {} for epResult in sqlResults: goodLoc = os.path.normpath(epResult["location"]) goodSeason = int(epResult["season"]) goodEpisode = int(epResult["episode"]) if fileLocations.has_key(goodLoc): fileLocations[goodLoc].append((goodSeason, goodEpisode)) else: fileLocations[goodLoc] = [(goodSeason, goodEpisode)] logger.log(u"File results: " + str(fileLocations), logger.DEBUG) for curLocation in fileLocations: epList = fileLocations[curLocation] # get the root episode and add all related episodes to it rootEp = None for myEp in epList: curEp = self.getEpisode(myEp[0], myEp[1]) if rootEp == None: rootEp = curEp rootEp.relatedEps = [] else: rootEp.relatedEps.append(curEp) goodName = rootEp.prettyName() actualName = os.path.splitext(os.path.basename(curLocation)) if goodName == actualName[0]: logger.log(str(self.tvdbid) + ": File " + rootEp.location + " is already named correctly, skipping", logger.DEBUG) continue with rootEp.lock: result = helpers.rename_file(rootEp.location, rootEp.prettyName()) if result != False: rootEp.location = result for relEp in rootEp.relatedEps: relEp.location = result fileList = postProcessor.PostProcessor(curLocation)._list_associated_files(curLocation) logger.log(u"Files associated to "+curLocation+": "+str(fileList), logger.DEBUG) for file in fileList: result = helpers.rename_file(file, rootEp.prettyName()) if result == False: logger.log(str(self.tvdbid) + ": Unable to rename file "+file, logger.ERROR) for curEp in [rootEp]+rootEp.relatedEps: curEp.checkForMetaFiles() with rootEp.lock: rootEp.saveToDB() for relEp in rootEp.relatedEps: relEp.saveToDB() def saveToDB(self): logger.log(str(self.tvdbid) + ": Saving show info to database", logger.DEBUG) myDB = db.DBConnection() controlValueDict = {"tvdb_id": self.tvdbid} newValueDict = {"show_name": self.name, "tvr_id": self.tvrid, "location": self._location, "network": self.network, "genre": self.genre, "runtime": self.runtime, "quality": self.quality, "airs": self.airs, "status": self.status, "seasonfolders": self.seasonfolders, "paused": self.paused, "air_by_date": self.air_by_date, "startyear": self.startyear, "tvr_name": self.tvrname, "lang": self.lang } myDB.upsert("tv_shows", newValueDict, controlValueDict) def __str__(self): toReturn = "" toReturn += "name: " + self.name + "\n" toReturn += "location: " + self._location + "\n" toReturn += "tvdbid: " + str(self.tvdbid) + "\n" if self.network != None: toReturn += "network: " + self.network + "\n" if self.airs != None: toReturn += "airs: " + self.airs + "\n" if self.status != None: toReturn += "status: " + self.status + "\n" toReturn += "startyear: " + str(self.startyear) + "\n" toReturn += "genre: " + self.genre + "\n" toReturn += "runtime: " + str(self.runtime) + "\n" toReturn += "quality: " + str(self.quality) + "\n" return toReturn def wantEpisode(self, season, episode, quality, manualSearch=False): logger.log(u"Checking if we want episode "+str(season)+"x"+str(episode)+" at quality "+Quality.qualityStrings[quality], logger.DEBUG) # if the quality isn't one we want under any circumstances then just say no anyQualities, bestQualities = Quality.splitQuality(self.quality) logger.log(u"any,best = "+str(anyQualities)+" "+str(bestQualities)+" and we are "+str(quality), logger.DEBUG) if quality not in anyQualities + bestQualities: logger.log(u"I know for sure I don't want this episode, saying no", logger.DEBUG) return False myDB = db.DBConnection() sqlResults = myDB.select("SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?", [self.tvdbid, season, episode]) if not sqlResults or not len(sqlResults): logger.log(u"Unable to find the episode", logger.DEBUG) return False epStatus = int(sqlResults[0]["status"]) logger.log(u"current episode status: "+str(epStatus), logger.DEBUG) # if we know we don't want it then just say no if epStatus in (SKIPPED, IGNORED, ARCHIVED) and not manualSearch: logger.log(u"Ep is skipped, not bothering", logger.DEBUG) return False # if it's one of these then we want it as long as it's in our allowed initial qualities if quality in anyQualities + bestQualities: if epStatus in (WANTED, UNAIRED, SKIPPED): logger.log(u"Ep is wanted/unaired/skipped, definitely get it", logger.DEBUG) return True elif manualSearch: logger.log(u"Usually I would ignore this ep but because you forced the search I'm overriding the default and allowing the quality", logger.DEBUG) return True else: logger.log(u"This quality looks like something we might want but I don't know for sure yet", logger.DEBUG) curStatus, curQuality = Quality.splitCompositeStatus(epStatus) # if we are re-downloading then we only want it if it's in our bestQualities list and better than what we have if curStatus in Quality.SNATCHED + Quality.DOWNLOADED and quality in bestQualities and quality > curQuality: logger.log(u"We already have this ep but the new one is better quality, saying yes", logger.DEBUG) return True logger.log(u"None of the conditions were met so I'm just saying no", logger.DEBUG) return False def getOverview(self, epStatus): if epStatus == WANTED: return Overview.WANTED elif epStatus in (UNAIRED, UNKNOWN): return Overview.UNAIRED elif epStatus in (SKIPPED, IGNORED): return Overview.SKIPPED elif epStatus == ARCHIVED: return Overview.GOOD elif epStatus in Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_PROPER: anyQualities, bestQualities = Quality.splitQuality(self.quality) #@UnusedVariable if bestQualities: maxBestQuality = max(bestQualities) else: maxBestQuality = None epStatus, curQuality = Quality.splitCompositeStatus(epStatus) # if they don't want re-downloads then we call it good if they have anything if maxBestQuality == None: return Overview.GOOD # if they have one but it's not the best they want then mark it as qual elif curQuality < maxBestQuality: return Overview.QUAL # if it's >= maxBestQuality then it's good else: return Overview.GOOD def dirty_setter(attr_name): def wrapper(self, val): if getattr(self, attr_name) != val: setattr(self, attr_name, val) self.dirty = True return wrapper class TVEpisode(object): def __init__(self, show, season, episode, file=""): self._name = "" self._season = season self._episode = episode self._description = "" self._airdate = datetime.date.fromordinal(1) self._hasnfo = False self._hastbn = False self._status = UNKNOWN self._tvdbid = 0 self._language = "en" # setting any of the above sets the dirty flag self.dirty = True self.show = show self._location = file self.lock = threading.Lock() self.specifyEpisode(self.season, self.episode) self.relatedEps = [] self.checkForMetaFiles() name = property(lambda self: self._name, dirty_setter("_name")) season = property(lambda self: self._season, dirty_setter("_season")) episode = property(lambda self: self._episode, dirty_setter("_episode")) description = property(lambda self: self._description, dirty_setter("_description")) airdate = property(lambda self: self._airdate, dirty_setter("_airdate")) hasnfo = property(lambda self: self._hasnfo, dirty_setter("_hasnfo")) hastbn = property(lambda self: self._hastbn, dirty_setter("_hastbn")) status = property(lambda self: self._status, dirty_setter("_status")) tvdbid = property(lambda self: self._tvdbid, dirty_setter("_tvdbid")) location = property(lambda self: self._location, dirty_setter("_location")) language = property(lambda self: self._language, dirty_setter("_language")) def checkForMetaFiles(self): oldhasnfo = self.hasnfo oldhastbn = self.hastbn cur_nfo = False cur_tbn = False # check for nfo and tbn if ek.ek(os.path.isfile, self.location): for cur_provider in sickbeard.metadata_provider_dict.values(): if cur_provider.episode_metadata: new_result = cur_provider._has_episode_metadata(self) else: new_result = False cur_nfo = new_result or cur_nfo if cur_provider.episode_thumbnails: new_result = cur_provider._has_episode_thumb(self) else: new_result = False cur_tbn = new_result or cur_tbn self.hasnfo = cur_nfo self.hastbn = cur_tbn # if either setting has changed return true, if not return false return oldhasnfo != self.hasnfo or oldhastbn != self.hastbn def specifyEpisode(self, season, episode): sqlResult = self.loadFromDB(season, episode) # only load from NFO if we didn't load from DB if ek.ek(os.path.isfile, self.location) and self.name == "": try: self.loadFromNFO(self.location) except exceptions.NoNFOException: logger.log(str(self.show.tvdbid) + ": There was an error loading the NFO for episode " + str(season) + "x" + str(episode), logger.ERROR) pass # if we tried loading it from NFO and didn't find the NFO, use TVDB if self.hasnfo == False: try: result = self.loadFromTVDB(season, episode) except exceptions.EpisodeDeletedException: result = False # if we failed TVDB, NFO *and* SQL then fail if result == False and not sqlResult: raise exceptions.EpisodeNotFoundException("Couldn't find episode " + str(season) + "x" + str(episode)) # don't update if not needed if self.dirty: self.saveToDB() def loadFromDB(self, season, episode): logger.log(str(self.show.tvdbid) + ": Loading episode details from DB for episode " + str(season) + "x" + str(episode), logger.DEBUG) myDB = db.DBConnection() sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?", [self.show.tvdbid, season, episode]) if len(sqlResults) > 1: raise exceptions.MultipleDBEpisodesException("Your DB has two records for the same show somehow.") elif len(sqlResults) == 0: logger.log(str(self.show.tvdbid) + ": Episode " + str(self.season) + "x" + str(self.episode) + " not found in the database", logger.DEBUG) return False else: #NAMEIT logger.log(u"AAAAA from" + str(self.season)+"x"+str(self.episode) + " -" + self.name + " to " + str(sqlResults[0]["name"])) if sqlResults[0]["name"] != None: self.name = sqlResults[0]["name"] self.season = season self.episode = episode self.description = sqlResults[0]["description"] if self.description == None: self.description = "" self.airdate = datetime.date.fromordinal(int(sqlResults[0]["airdate"])) #logger.log(u"1 Status changes from " + str(self.status) + " to " + str(sqlResults[0]["status"]), logger.DEBUG) self.status = int(sqlResults[0]["status"]) # don't overwrite my location if sqlResults[0]["location"] != "" and sqlResults[0]["location"] != None: self.location = os.path.normpath(sqlResults[0]["location"]) self.tvdbid = int(sqlResults[0]["tvdbid"]) self.language = sqlResults[0]["lang"] self.dirty = False return True def loadFromTVDB(self, season=None, episode=None, cache=True, tvapi=None, cachedSeason=None): if season == None: season = self.season if episode == None: episode = self.episode logger.log(str(self.show.tvdbid) + ": Loading episode details from theTVDB for episode " + str(season) + "x" + str(episode), logger.DEBUG) tvdb_lang = self.show.lang try: if cachedSeason is None: if tvapi is None: # There's gotta be a better way of doing this but we don't wanna # change the cache value elsewhere ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy() if not cache: ltvdb_api_parms['cache'] = 'recache' if tvdb_lang: ltvdb_api_parms['language'] = tvdb_lang t = tvdb_api.Tvdb(**ltvdb_api_parms) else: t = tvapi myEp = t[self.show.tvdbid][season][episode] else: myEp = cachedSeason[episode] except (tvdb_exceptions.tvdb_error, IOError), e: logger.log(u"TVDB threw up an error: "+ex(e), logger.DEBUG) # if the episode is already valid just log it, if not throw it up if self.name: logger.log(u"TVDB timed out but we have enough info from other sources, allowing the error", logger.DEBUG) return else: logger.log(u"TVDB timed out, unable to create the episode", logger.ERROR) return False except (tvdb_exceptions.tvdb_episodenotfound, tvdb_exceptions.tvdb_seasonnotfound): logger.log(u"Unable to find the episode on tvdb... has it been removed? Should I delete from db?", logger.DEBUG) # if I'm no longer on TVDB but I once was then delete myself from the DB if self.tvdbid != -1: self.deleteEpisode() return if not myEp["firstaired"]: myEp["firstaired"] = str(datetime.date.fromordinal(1)) if myEp["episodename"] == None or myEp["episodename"] == "": logger.log(u"This episode ("+self.show.name+" - "+str(season)+"x"+str(episode)+") has no name on TVDB") # if I'm incomplete on TVDB but I once was complete then just delete myself from the DB for now if self.tvdbid != -1: self.deleteEpisode() return False #NAMEIT logger.log(u"BBBBBBBB from " + str(self.season)+"x"+str(self.episode) + " -" +self.name+" to "+myEp["episodename"]) self.name = myEp["episodename"] self.season = season self.episode = episode tmp_description = myEp["overview"] if tmp_description == None: self.description = "" else: self.description = tmp_description rawAirdate = [int(x) for x in myEp["firstaired"].split("-")] try: self.airdate = datetime.date(rawAirdate[0], rawAirdate[1], rawAirdate[2]) except ValueError: logger.log(u"Malformed air date retrieved from TVDB ("+self.show.name+" - "+str(season)+"x"+str(episode)+")", logger.ERROR) # if I'm incomplete on TVDB but I once was complete then just delete myself from the DB for now if self.tvdbid != -1: self.deleteEpisode() return False #early conversion to int so that episode doesn't get marked dirty self.tvdbid = int(myEp["id"]) if not ek.ek(os.path.isdir, self.show._location): logger.log(u"The show dir is missing, not bothering to change the episode statuses since it'd probably be invalid") return logger.log(str(self.show.tvdbid) + ": Setting status for " + str(season) + "x" + str(episode) + " based on status " + str(self.status) + " and existence of " + self.location, logger.DEBUG) if not ek.ek(os.path.isfile, self.location): # if we don't have the file if self.airdate >= datetime.date.today() and self.status not in Quality.SNATCHED + Quality.SNATCHED_PROPER: # and it hasn't aired yet set the status to UNAIRED logger.log(u"Episode airs in the future, changing status from " + str(self.status) + " to " + str(UNAIRED), logger.DEBUG) self.status = UNAIRED # if there's no airdate then set it to skipped (and respect ignored) elif self.airdate == datetime.date.fromordinal(1): if self.status == IGNORED: logger.log(u"Episode has no air date, but it's already marked as ignored", logger.DEBUG) else: logger.log(u"Episode has no air date, automatically marking it skipped", logger.DEBUG) self.status = SKIPPED # if we don't have the file and the airdate is in the past else: if self.status == UNAIRED: self.status = WANTED # if we somehow are still UNKNOWN then just skip it elif self.status == UNKNOWN: self.status = SKIPPED else: logger.log(u"Not touching status because we have no ep file, the airdate is in the past, and the status is "+str(self.status), logger.DEBUG) # if we have a media file then it's downloaded elif sickbeard.helpers.isMediaFile(self.location): # leave propers alone, you have to either post-process them or manually change them back if self.status not in Quality.SNATCHED_PROPER + Quality.DOWNLOADED + Quality.SNATCHED + [ARCHIVED]: logger.log(u"5 Status changes from " + str(self.status) + " to " + str(Quality.statusFromName(self.location)), logger.DEBUG) self.status = Quality.statusFromName(self.location) # shouldn't get here probably else: logger.log(u"6 Status changes from " + str(self.status) + " to " + str(UNKNOWN), logger.DEBUG) self.status = UNKNOWN # hasnfo, hastbn, status? def loadFromNFO(self, location): if not os.path.isdir(self.show._location): logger.log(str(self.show.tvdbid) + ": The show dir is missing, not bothering to try loading the episode NFO") return logger.log(str(self.show.tvdbid) + ": Loading episode details from the NFO file associated with " + location, logger.DEBUG) self.location = location if self.location != "": if self.status == UNKNOWN: if sickbeard.helpers.isMediaFile(self.location): logger.log(u"7 Status changes from " + str(self.status) + " to " + str(Quality.statusFromName(self.location)), logger.DEBUG) self.status = Quality.statusFromName(self.location) nfoFile = sickbeard.helpers.replaceExtension(self.location, "nfo") logger.log(str(self.show.tvdbid) + ": Using NFO name " + nfoFile, logger.DEBUG) if ek.ek(os.path.isfile, nfoFile): try: showXML = etree.ElementTree(file = nfoFile) except (SyntaxError, ValueError), e: logger.log(u"Error loading the NFO, backing up the NFO and skipping for now: " + ex(e), logger.ERROR) #TODO: figure out what's wrong and fix it try: ek.ek(os.rename, nfoFile, nfoFile + ".old") except Exception, e: logger.log(u"Failed to rename your episode's NFO file - you need to delete it or fix it: " + ex(e), logger.ERROR) raise exceptions.NoNFOException("Error in NFO format") for epDetails in showXML.getiterator('episodedetails'): if epDetails.findtext('season') == None or int(epDetails.findtext('season')) != self.season or \ epDetails.findtext('episode') == None or int(epDetails.findtext('episode')) != self.episode: logger.log(str(self.show.tvdbid) + ": NFO has an <episodedetails> block for a different episode - wanted " + str(self.season) + "x" + str(self.episode) + " but got " + str(epDetails.findtext('season')) + "x" + str(epDetails.findtext('episode')), logger.DEBUG) continue if epDetails.findtext('title') == None or epDetails.findtext('aired') == None: raise exceptions.NoNFOException("Error in NFO format (missing episode title or airdate)") self.name = epDetails.findtext('title') self.episode = int(epDetails.findtext('episode')) self.season = int(epDetails.findtext('season')) self.description = epDetails.findtext('plot') if self.description == None: self.description = "" if epDetails.findtext('aired'): rawAirdate = [int(x) for x in epDetails.findtext('aired').split("-")] self.airdate = datetime.date(rawAirdate[0], rawAirdate[1], rawAirdate[2]) else: self.airdate = datetime.date.fromordinal(1) self.hasnfo = True else: self.hasnfo = False if ek.ek(os.path.isfile, sickbeard.helpers.replaceExtension(nfoFile, "tbn")): self.hastbn = True else: self.hastbn = False def __str__ (self): toReturn = "" toReturn += str(self.show.name) + " - " + str(self.season) + "x" + str(self.episode) + " - " + str(self.name) + "\n" toReturn += "location: " + str(self.location) + "\n" toReturn += "description: " + str(self.description) + "\n" toReturn += "airdate: " + str(self.airdate.toordinal()) + " (" + str(self.airdate) + ")\n" toReturn += "hasnfo: " + str(self.hasnfo) + "\n" toReturn += "hastbn: " + str(self.hastbn) + "\n" toReturn += "status: " + str(self.status) + "\n" toReturn += "language: " + str(self.language) + "\n" return toReturn def createMetaFiles(self, force=False): if not os.path.isdir(self.show._location): logger.log(str(self.show.tvdbid) + ": The show dir is missing, not bothering to try to create metadata") return self.createNFO(force) self.createThumbnail(force) if self.checkForMetaFiles(): self.saveToDB() def createNFO(self, force=False): result = False for cur_provider in sickbeard.metadata_provider_dict.values(): result = cur_provider.create_episode_metadata(self) or result return result def createThumbnail(self, force=False): result = False for cur_provider in sickbeard.metadata_provider_dict.values(): result = cur_provider.create_episode_thumb(self) or result return result def deleteEpisode(self): logger.log(u"Deleting "+self.show.name+" "+str(self.season)+"x"+str(self.episode)+" from the DB", logger.DEBUG) # remove myself from the show dictionary if self.show.getEpisode(self.season, self.episode, noCreate=True) == self: logger.log(u"Removing myself from my show's list", logger.DEBUG) del self.show.episodes[self.season][self.episode] # delete myself from the DB logger.log(u"Deleting myself from the database", logger.DEBUG) myDB = db.DBConnection() sql = "DELETE FROM tv_episodes WHERE showid="+str(self.show.tvdbid)+" AND season="+str(self.season)+" AND episode="+str(self.episode) myDB.action(sql) raise exceptions.EpisodeDeletedException() def saveToDB(self, forceSave=False): if not self.dirty and not forceSave: logger.log(str(self.show.tvdbid) + ": Not saving episode to db - record is not dirty", logger.DEBUG) return logger.log(str(self.show.tvdbid) + ": Saving episode details to database", logger.DEBUG) logger.log(u"STATUS IS " + str(self.status), logger.DEBUG) myDB = db.DBConnection() newValueDict = {"tvdbid": self.tvdbid, "name": self.name, "description": self.description, "airdate": self.airdate.toordinal(), "hasnfo": self.hasnfo, "hastbn": self.hastbn, "status": self.status, "location": self.location, "lang": self.language} controlValueDict = {"showid": self.show.tvdbid, "season": self.season, "episode": self.episode} # use a custom update/insert method to get the data into the DB myDB.upsert("tv_episodes", newValueDict, controlValueDict) def fullPath (self): if self.location == None or self.location == "": return None else: return os.path.join(self.show.location, self.location) def getOverview(self): return self.show.getOverview(self.status) def prettyName (self, naming_show_name=None, naming_ep_type=None, naming_multi_ep_type=None, naming_ep_name=None, naming_sep_type=None, naming_use_periods=None, naming_quality=None): regex = "(.*) \(\d\)" goodEpString = '' self.relatedEps = sorted(self.relatedEps, key=lambda x: x.episode) if len(self.relatedEps) == 0: goodName = self.name elif len(self.relatedEps) > 1: goodName = '' else: singleName = True curGoodName = None for curName in [self.name]+[x.name for x in self.relatedEps]: match = re.match(regex, curName) if not match: singleName = False break if curGoodName == None: curGoodName = match.group(1) elif curGoodName != match.group(1): singleName = False break if singleName: goodName = curGoodName else: goodName = self.name for relEp in self.relatedEps: goodName += " & " + relEp.name if naming_show_name == None: naming_show_name = sickbeard.NAMING_SHOW_NAME if naming_ep_name == None: naming_ep_name = sickbeard.NAMING_EP_NAME if naming_ep_type == None: naming_ep_type = sickbeard.NAMING_EP_TYPE if naming_multi_ep_type == None: naming_multi_ep_type = sickbeard.NAMING_MULTI_EP_TYPE if naming_sep_type == None: naming_sep_type = sickbeard.NAMING_SEP_TYPE if naming_use_periods == None: naming_use_periods = sickbeard.NAMING_USE_PERIODS if naming_quality == None: naming_quality = sickbeard.NAMING_QUALITY if self.show.air_by_date and sickbeard.NAMING_DATES: try: goodEpString = self.airdate.strftime("%Y.%m.%d") except ValueError: pass # if we didn't set it to the air-by-date value use the season/ep if not goodEpString: goodEpString = config.naming_ep_type[naming_ep_type] % {'seasonnumber': self.season, 'episodenumber': self.episode} for relEp in self.relatedEps: goodEpString += config.naming_multi_ep_type[naming_multi_ep_type][naming_ep_type] % {'seasonnumber': relEp.season, 'episodenumber': relEp.episode} if goodName != '': goodName = config.naming_sep_type[naming_sep_type] + goodName finalName = "" if naming_show_name: finalName += self.show.name + config.naming_sep_type[naming_sep_type] finalName += goodEpString if naming_ep_name: finalName += goodName if naming_quality: epStatus, epQual = Quality.splitCompositeStatus(self.status) #@UnusedVariable if epQual != Quality.NONE: finalName += config.naming_sep_type[naming_sep_type] + Quality.qualityStrings[epQual] if naming_use_periods: finalName = re.sub("\s+", ".", finalName) return finalName
gpl-3.0
129,994,857,721,770,370
39.379215
283
0.566795
false
4.147627
false
false
false
hufsm/tu_gen2_libsigrokdecode
decoders/usb_power_delivery/pd.py
1
20599
## ## This file is part of the libsigrokdecode project. ## ## Copyright (C) 2015 Google, Inc ## Copyright (C) 2018 davidanger <[email protected]> ## Copyright (C) 2018 Peter Hazenberg <[email protected]> ## ## This program is free software; you can redistribute it and/or modify ## it under the terms of the GNU General Public License as published by ## the Free Software Foundation; either version 2 of the License, or ## (at your option) any later version. ## ## This program is distributed in the hope that it will be useful, ## but WITHOUT ANY WARRANTY; without even the implied warranty of ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ## GNU General Public License for more details. ## ## You should have received a copy of the GNU General Public License ## along with this program; if not, see <http://www.gnu.org/licenses/>. ## import sigrokdecode as srd import struct import zlib # for crc32 # BMC encoding with a 600kHz datarate UI_US = 1000000/600000.0 # Threshold to discriminate half-1 from 0 in Binary Mark Conding THRESHOLD_US = (UI_US + 2 * UI_US) / 2 # Control Message type CTRL_TYPES = { 0: 'reserved', 1: 'GOOD CRC', 2: 'GOTO MIN', 3: 'ACCEPT', 4: 'REJECT', 5: 'PING', 6: 'PS RDY', 7: 'GET SOURCE CAP', 8: 'GET SINK CAP', 9: 'DR SWAP', 10: 'PR SWAP', 11: 'VCONN SWAP', 12: 'WAIT', 13: 'SOFT RESET', 14: 'reserved', 15: 'reserved', 16: 'Not Supported', 17: 'Get_Source_Cap_Extended', 18: 'Get_Status', 19: 'FR_Swap', 20: 'Get_PPS_Status', 21: 'Get_Country_Codes', } # Data message type DATA_TYPES = { 1: 'SOURCE CAP', 2: 'REQUEST', 3: 'BIST', 4: 'SINK CAP', 5: 'Battery_Status', 6: 'Alert', 7: 'Get_Country_Info', 15: 'VDM' } # 4b5b encoding of the symbols DEC4B5B = [ 0x10, # Error 00000 0x10, # Error 00001 0x10, # Error 00010 0x10, # Error 00011 0x10, # Error 00100 0x10, # Error 00101 0x13, # Sync-3 00110 0x14, # RST-1 00111 0x10, # Error 01000 0x01, # 1 = 0001 01001 0x04, # 4 = 0100 01010 0x05, # 5 = 0101 01011 0x10, # Error 01100 0x16, # EOP 01101 0x06, # 6 = 0110 01110 0x07, # 7 = 0111 01111 0x10, # Error 10000 0x12, # Sync-2 10001 0x08, # 8 = 1000 10010 0x09, # 9 = 1001 10011 0x02, # 2 = 0010 10100 0x03, # 3 = 0011 10101 0x0A, # A = 1010 10110 0x0B, # B = 1011 10111 0x11, # Sync-1 11000 0x15, # RST-2 11001 0x0C, # C = 1100 11010 0x0D, # D = 1101 11011 0x0E, # E = 1110 11100 0x0F, # F = 1111 11101 0x00, # 0 = 0000 11110 0x10, # Error 11111 ] SYM_ERR = 0x10 SYNC1 = 0x11 SYNC2 = 0x12 SYNC3 = 0x13 RST1 = 0x14 RST2 = 0x15 EOP = 0x16 SYNC_CODES = [SYNC1, SYNC2, SYNC3] HRST_CODES = [RST1, RST1, RST1, RST2] SOP_SEQUENCES = [ (SYNC1, SYNC1, SYNC1, SYNC2), (SYNC1, SYNC1, SYNC3, SYNC3), (SYNC1, SYNC3, SYNC1, SYNC3), (SYNC1, RST2, RST2, SYNC3), (SYNC1, RST2, SYNC3, SYNC2), (RST1, SYNC1, RST1, SYNC3), (RST1, RST1, RST1, RST2), ] START_OF_PACKETS = { SOP_SEQUENCES[0]: 'SOP', SOP_SEQUENCES[1]: "SOP'", SOP_SEQUENCES[2]: 'SOP"', SOP_SEQUENCES[3]: "SOP' Debug", SOP_SEQUENCES[4]: 'SOP" Debug', SOP_SEQUENCES[5]: 'Cable Reset', SOP_SEQUENCES[6]: 'Hard Reset', } SYM_NAME = [ ['0x0', '0'], ['0x1', '1'], ['0x2', '2'], ['0x3', '3'], ['0x4', '4'], ['0x5', '5'], ['0x6', '6'], ['0x7', '7'], ['0x8', '8'], ['0x9', '9'], ['0xA', 'A'], ['0xB', 'B'], ['0xC', 'C'], ['0xD', 'D'], ['0xE', 'E'], ['0xF', 'F'], ['ERROR', 'X'], ['SYNC-1', 'S1'], ['SYNC-2', 'S2'], ['SYNC-3', 'S3'], ['RST-1', 'R1'], ['RST-2', 'R2'], ['EOP', '#'], ] RDO_FLAGS = { (1 << 23): 'unchunked', (1 << 24): 'no_suspend', (1 << 25): 'comm_cap', (1 << 26): 'cap_mismatch', (1 << 27): 'give_back' } BIST_MODES = { 0: 'Receiver', 1: 'Transmit', 2: 'Counters', 3: 'Carrier 0', 4: 'Carrier 1', 5: 'Carrier 2', 6: 'Carrier 3', 7: 'Eye', } VDM_CMDS = { 1: 'Disc Ident', 2: 'Disc SVID', 3: 'Disc Mode', 4: 'Enter Mode', 5: 'Exit Mode', 6: 'Attention', # 16..31: SVID Specific Commands # DisplayPort Commands 16: 'DP Status', 17: 'DP Configure', } VDM_ACK = ['REQ', 'ACK', 'NAK', 'BSY'] class SamplerateError(Exception): pass class Decoder(srd.Decoder): api_version = 3 id = 'usb_power_delivery' name = 'USB PD' longname = 'USB Power Delivery' desc = 'USB Power Delivery protocol.' license = 'gplv2+' inputs = ['logic'] outputs = ['usb_pd'] channels = ( {'id': 'cc1', 'name': 'CC1', 'desc': 'Configuration Channel 1'}, ) optional_channels = ( {'id': 'cc2', 'name': 'CC2', 'desc': 'Configuration Channel 2'}, ) options = ( {'id': 'fulltext', 'desc': 'Full text decoding of packets', 'default': 'no', 'values': ('yes', 'no')}, ) annotations = ( ('type', 'Packet Type'), ('preamble', 'Preamble'), ('sop', 'Start of Packet'), ('header', 'Header'), ('data', 'Data'), ('crc', 'Checksum'), ('eop', 'End Of Packet'), ('sym', '4b5b symbols'), ('warnings', 'Warnings'), ('src', 'Source Message'), ('snk', 'Sink Message'), ('payload', 'Payload'), ('text', 'Plain text'), ) annotation_rows = ( ('4b5b', 'Symbols', (7,)), ('phase', 'Parts', (1, 2, 3, 4, 5, 6)), ('payload', 'Payload', (11,)), ('type', 'Type', (0, 9, 10)), ('warnings', 'Warnings', (8,)), ('text', 'Full text', (12,)), ) binary = ( ('raw-data', 'RAW binary data'), ) stored_pdos = {} def get_request(self, rdo): pos = (rdo >> 28) & 7 op_ma = ((rdo >> 10) & 0x3ff) * 0.01 max_ma = (rdo & 0x3ff) * 0.01 mark = self.cap_mark[pos] if mark == 3: op_v = ((rdo >> 9) & 0x7ff) * 0.02 op_a = (rdo & 0x3f) * 0.05 t_settings = '%gV %gA' % (op_v, op_a) elif mark == 2: op_w = ((rdo >> 10) & 0x3ff) * 0.25 mp_w = (rdo & 0x3ff) * 0.25 t_settings = '%gW (operating)' % op_w else: op_a = ((rdo >> 10) & 0x3ff) * 0.01 max_a = (rdo & 0x3ff) * 0.01 t_settings = '%gA (operating) / %gA (max)' % (op_a, max_a) t_flags = '' for f in sorted(RDO_FLAGS.keys(), reverse = True): if rdo & f: t_flags += ' [' + RDO_FLAGS[f] + ']' if pos in self.stored_pdos.keys(): t_pdo = '#%d: %s' % (pos, self.stored_pdos[pos]) else: t_pdo = '#d' % (pos) return '(PDO %s) %s%s' % (t_pdo, t_settings, t_flags) def get_source_sink_cap(self, pdo, idx, source): t1 = (pdo >> 30) & 3 self.cap_mark[idx] = t1 flags = {} if t1 == 0: t_name = 'Fixed' if source: flags = { (1 << 29): 'dual_role_power', (1 << 28): 'suspend', (1 << 27): 'unconstrained', (1 << 26): 'comm_cap', (1 << 25): 'dual_role_data', (1 << 24): 'unchunked', } else: # Sink flags = { (1 << 29): 'dual_role_power', (1 << 28): 'high_capability', (1 << 27): 'unconstrained', (1 << 26): 'comm_cap', (1 << 25): 'dual_role_data', (0b01 << 23): 'fr_swap default power', (0b10 << 23): 'fr_swap 1.5 A', (0b11 << 23): 'fr_swap 3.0 A', } mv = ((pdo >> 10) & 0x3ff) * 0.05 ma = ((pdo >> 0) & 0x3ff) * 0.01 p = '%gV %gA (%gW)' % (mv, ma, mv*ma) self.stored_pdos[idx] = '%s %gV' % (t_name, mv) elif t1 == 1: t_name = 'Battery' flags = {} # No flags defined for Battery PDO in PD 3.0 spec minv = ((pdo >> 10) & 0x3ff) * 0.05 maxv = ((pdo >> 20) & 0x3ff) * 0.05 mw = ((pdo >> 0) & 0x3ff) * 0.25 p = '%g/%gV %gW' % (minv, maxv, mw) self.stored_pdos[idx] = '%s %g/%gV' % (t_name, minv, maxv) elif t1 == 2: t_name = 'Variable' flags = {} # No flags defined for Variable PDO in PD 3.0 spec minv = ((pdo >> 10) & 0x3ff) * 0.05 maxv = ((pdo >> 20) & 0x3ff) * 0.05 ma = ((pdo >> 0) & 0x3ff) * 0.01 p = '%g/%gV %gA' % (minv, maxv, ma) self.stored_pdos[idx] = '%s %g/%gV' % (t_name, minv, maxv) elif t1 == 3: t2 = (pdo >> 28) & 3 if t2 == 0: t_name = 'Programmable|PPS' flags = { (1 << 29): 'power_limited', } minv = ((pdo >> 8) & 0xff) * 0.1 maxv = ((pdo >> 17) & 0xff) * 0.1 ma = ((pdo >> 0) & 0xff) * 0.05 p = '%g/%gV %gA' % (minv, maxv, ma) if (pdo >> 27) & 0x1: p += ' [limited]' self.stored_pdos[idx] = '%s %g/%gV' % (t_name, minv, maxv) else: t_name = 'Reserved APDO: '+bin(t2) p = '[raw: %s]' % (bin(pdo)) self.stored_pdos[idx] = '%s %s' % (t_name, p) t_flags = '' for f in sorted(flags.keys(), reverse = True): if pdo & f: t_flags += ' [' + flags[f] + ']' return '[%s] %s%s' % (t_name, p, t_flags) def get_vdm(self, idx, data): if idx == 0: # VDM header vid = data >> 16 struct = data & (1 << 15) txt = 'VDM' if struct: # Structured VDM cmd = data & 0x1f src = data & (1 << 5) ack = (data >> 6) & 3 pos = (data >> 8) & 7 ver = (data >> 13) & 3 txt = VDM_ACK[ack] + ' ' txt += VDM_CMDS[cmd] if cmd in VDM_CMDS else 'cmd?' txt += ' pos %d' % (pos) if pos else ' ' else: # Unstructured VDM txt = 'unstruct [%04x]' % (data & 0x7fff) txt += ' SVID:%04x' % (vid) else: # VDM payload txt = 'VDO:%08x' % (data) return txt def get_bist(self, idx, data): mode = data >> 28 counter = data & 0xffff mode_name = BIST_MODES[mode] if mode in BIST_MODES else 'INVALID' if mode == 2: mode_name = 'Counter[= %d]' % (counter) # TODO: Check all 0 bits are 0 / emit warnings. return 'mode %s' % (mode_name) if idx == 0 else 'invalid BRO' def putpayload(self, s0, s1, idx): t = self.head_type() txt = '['+str(idx+1)+'] ' if t == 2: txt += self.get_request(self.data[idx]) elif t == 1 or t == 4: txt += self.get_source_sink_cap(self.data[idx], idx+1, t==1) elif t == 15: txt += self.get_vdm(idx, self.data[idx]) elif t == 3: txt += self.get_bist(idx, self.data[idx]) self.putx(s0, s1, [11, [txt, txt]]) self.text += ' - ' + txt def puthead(self): ann_type = 9 if self.head_power_role() else 10 role = 'SRC' if self.head_power_role() else 'SNK' if self.head_data_role() != self.head_power_role(): role += '/DFP' if self.head_data_role() else '/UFP' t = self.head_type() if self.head_count() == 0: shortm = CTRL_TYPES[t] else: shortm = DATA_TYPES[t] if t in DATA_TYPES else 'DAT???' longm = '(r{:d}) {:s}[{:d}]: {:s}'.format(self.head_rev(), role, self.head_id(), shortm) self.putx(0, -1, [ann_type, [longm, shortm]]) self.text += longm def head_id(self): return (self.head >> 9) & 7 def head_power_role(self): return (self.head >> 8) & 1 def head_data_role(self): return (self.head >> 5) & 1 def head_rev(self): return ((self.head >> 6) & 3) + 1 def head_type(self): return self.head & 0xF def head_count(self): return (self.head >> 12) & 7 def putx(self, s0, s1, data): self.put(self.edges[s0], self.edges[s1], self.out_ann, data) def putwarn(self, longm, shortm): self.putx(0, -1, [8, [longm, shortm]]) def compute_crc32(self): bdata = struct.pack('<H'+'I'*len(self.data), self.head & 0xffff, *tuple([d & 0xffffffff for d in self.data])) return zlib.crc32(bdata) def rec_sym(self, i, sym): self.putx(i, i+5, [7, SYM_NAME[sym]]) def get_sym(self, i, rec=True): v = (self.bits[i] | (self.bits[i+1] << 1) | (self.bits[i+2] << 2) | (self.bits[i+3] << 3) | (self.bits[i+4] << 4)) sym = DEC4B5B[v] if rec: self.rec_sym(i, sym) return sym def get_short(self): i = self.idx # Check it's not a truncated packet. if len(self.bits) - i <= 20: self.putwarn('Truncated', '!') return 0x0BAD k = [self.get_sym(i), self.get_sym(i+5), self.get_sym(i+10), self.get_sym(i+15)] # TODO: Check bad symbols. val = k[0] | (k[1] << 4) | (k[2] << 8) | (k[3] << 12) self.idx += 20 return val def get_word(self): lo = self.get_short() hi = self.get_short() return lo | (hi << 16) def find_corrupted_sop(self, k): # Start of packet are valid even if they have only 3 correct symbols # out of 4. for seq in SOP_SEQUENCES: if [k[i] == seq[i] for i in range(len(k))].count(True) >= 3: return START_OF_PACKETS[seq] return None def scan_eop(self): for i in range(len(self.bits) - 19): k = (self.get_sym(i, rec=False), self.get_sym(i+5, rec=False), self.get_sym(i+10, rec=False), self.get_sym(i+15, rec=False)) sym = START_OF_PACKETS.get(k, None) if not sym: sym = self.find_corrupted_sop(k) # We have an interesting symbol sequence. if sym: # Annotate the preamble. self.putx(0, i, [1, ['Preamble', '...']]) # Annotate each symbol. self.rec_sym(i, k[0]) self.rec_sym(i+5, k[1]) self.rec_sym(i+10, k[2]) self.rec_sym(i+15, k[3]) if sym == 'Hard Reset': self.text += 'HRST' return -1 # Hard reset elif sym == 'Cable Reset': self.text += 'CRST' return -1 # Cable reset else: self.putx(i, i+20, [2, [sym, 'S']]) return i+20 self.putx(0, len(self.bits), [1, ['Junk???', 'XXX']]) self.text += 'Junk???' self.putwarn('No start of packet found', 'XXX') return -1 # No Start Of Packet def __init__(self): self.reset() def reset(self): self.samplerate = None self.idx = 0 self.packet_seq = 0 self.previous = 0 self.startsample = None self.bits = [] self.edges = [] self.bad = [] self.half_one = False self.start_one = 0 self.stored_pdos = {} self.cap_mark = [0, 0, 0, 0, 0, 0, 0, 0] def metadata(self, key, value): if key == srd.SRD_CONF_SAMPLERATE: self.samplerate = value # 0 is 2 UI, space larger than 1.5x 0 is definitely wrong. self.maxbit = self.us2samples(3 * UI_US) # Duration threshold between half 1 and 0. self.threshold = self.us2samples(THRESHOLD_US) def start(self): self.out_ann = self.register(srd.OUTPUT_ANN) self.out_binary = self.register(srd.OUTPUT_BINARY) self.out_bitrate = self.register( srd.OUTPUT_META, meta=(int, 'Bitrate', 'Bitrate during the packet') ) def us2samples(self, us): return int(us * self.samplerate / 1000000) def decode_packet(self): self.data = [] self.idx = 0 self.text = '' if len(self.edges) < 50: return # Not a real PD packet self.packet_seq += 1 tstamp = float(self.startsample) / self.samplerate self.text += '#%-4d (%8.6fms): ' % (self.packet_seq, tstamp*1000) self.idx = self.scan_eop() if self.idx < 0: # Full text trace of the issue. self.putx(0, self.idx, [12, [self.text, '...']]) return # No real packet: ABORT. # Packet header self.head = self.get_short() self.putx(self.idx-20, self.idx, [3, ['H:%04x' % (self.head), 'HD']]) self.puthead() # Decode data payload for i in range(self.head_count()): self.data.append(self.get_word()) self.putx(self.idx-40, self.idx, [4, ['[%d]%08x' % (i, self.data[i]), 'D%d' % (i)]]) self.putpayload(self.idx-40, self.idx, i) # CRC check self.crc = self.get_word() ccrc = self.compute_crc32() if self.crc != ccrc: self.putwarn('Bad CRC %08x != %08x' % (self.crc, ccrc), 'CRC!') self.putx(self.idx-40, self.idx, [5, ['CRC:%08x' % (self.crc), 'CRC']]) # End of Packet if len(self.bits) >= self.idx + 5 and self.get_sym(self.idx) == EOP: self.putx(self.idx, self.idx + 5, [6, ['EOP', 'E']]) self.idx += 5 else: self.putwarn('No EOP', 'EOP!') # Full text trace if self.options['fulltext'] == 'yes': self.putx(0, self.idx, [12, [self.text, '...']]) # Meta data for bitrate ss, es = self.edges[0], self.edges[-1] bitrate = self.samplerate*len(self.bits) / float(es - ss) self.put(es, ss, self.out_bitrate, int(bitrate)) # Raw binary data (BMC decoded) self.put(es, ss, self.out_binary, [0, bytes(self.bits)]) def decode(self): if not self.samplerate: raise SamplerateError('Cannot decode without samplerate.') while True: pins = self.wait([{0: 'e'}, {1: 'e'}, {'skip': int(self.samplerate/1e3)}]) # First sample of the packet, just record the start date. if not self.startsample: self.startsample = self.samplenum self.previous = self.samplenum continue diff = self.samplenum - self.previous # Large idle: use it as the end of packet. if diff > self.maxbit: # The last edge of the packet. self.edges.append(self.previous) # Export the packet. self.decode_packet() # Reset for next packet. self.startsample = self.samplenum self.bits = [] self.edges = [] self.bad = [] self.half_one = False self.start_one = 0 else: # Add the bit to the packet. is_zero = diff > self.threshold if is_zero and not self.half_one: self.bits.append(0) self.edges.append(self.previous) elif not is_zero and self.half_one: self.bits.append(1) self.edges.append(self.start_one) self.half_one = False elif not is_zero and not self.half_one: self.half_one = True self.start_one = self.previous else: # Invalid BMC sequence self.bad.append((self.start_one, self.previous)) # TODO: Try to recover. self.bits.append(0) self.edges.append(self.previous) self.half_one = False self.previous = self.samplenum
gpl-3.0
-1,047,846,842,259,668,100
31.286834
96
0.468032
false
3.088769
false
false
false
chromium/chromium
tools/grit/grit/format/rc.py
7
18109
# Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. '''Support for formatting an RC file for compilation. ''' from __future__ import print_function import os import re from functools import partial import six from grit import util from grit.node import misc def Format(root, lang='en', output_dir='.'): from grit.node import empty, include, message, structure yield _FormatHeader(root, lang, output_dir) for item in root.ActiveDescendants(): if isinstance(item, empty.MessagesNode): # Write one STRINGTABLE per <messages> container. # This is hacky: it iterates over the children twice. yield 'STRINGTABLE\nBEGIN\n' for subitem in item.ActiveDescendants(): if isinstance(subitem, message.MessageNode): with subitem: yield FormatMessage(subitem, lang) yield 'END\n\n' elif isinstance(item, include.IncludeNode): with item: yield FormatInclude(item, lang, output_dir) elif isinstance(item, structure.StructureNode): with item: yield FormatStructure(item, lang, output_dir) ''' This dictionary defines the language charset pair lookup table, which is used for replacing the GRIT expand variables for language info in Product Version resource. The key is the language ISO country code, and the value is the language and character-set pair, which is a hexadecimal string consisting of the concatenation of the language and character-set identifiers. The first 4 digit of the value is the hex value of LCID, the remaining 4 digits is the hex value of character-set id(code page)of the language. LCID resource: http://msdn.microsoft.com/en-us/library/ms776294.aspx Codepage resource: http://www.science.co.il/language/locale-codes.asp We have defined three GRIT expand_variables to be used in the version resource file to set the language info. Here is an example how they should be used in the VS_VERSION_INFO section of the resource file to allow GRIT to localize the language info correctly according to product locale. VS_VERSION_INFO VERSIONINFO ... BEGIN BLOCK "StringFileInfo" BEGIN BLOCK "[GRITVERLANGCHARSETHEX]" BEGIN ... END END BLOCK "VarFileInfo" BEGIN VALUE "Translation", [GRITVERLANGID], [GRITVERCHARSETID] END END ''' _LANGUAGE_CHARSET_PAIR = { # Language neutral LCID, unicode(1200) code page. 'neutral' : '000004b0', # LANG_USER_DEFAULT LCID, unicode(1200) code page. 'userdefault' : '040004b0', 'ar' : '040104e8', 'fi' : '040b04e4', 'ko' : '041203b5', 'es' : '0c0a04e4', 'bg' : '040204e3', # No codepage for filipino, use unicode(1200). 'fil' : '046404e4', 'fr' : '040c04e4', 'lv' : '042604e9', 'sv' : '041d04e4', 'ca' : '040304e4', 'de' : '040704e4', 'lt' : '042704e9', # Do not use! This is only around for backwards # compatibility and will be removed - use fil instead 'tl' : '0c0004b0', 'zh-CN' : '080403a8', 'zh-TW' : '040403b6', 'zh-HK' : '0c0403b6', 'el' : '040804e5', 'no' : '001404e4', 'nb' : '041404e4', 'nn' : '081404e4', 'th' : '041e036a', 'he' : '040d04e7', 'iw' : '040d04e7', 'pl' : '041504e2', 'tr' : '041f04e6', 'hr' : '041a04e4', # No codepage for Hindi, use unicode(1200). 'hi' : '043904b0', 'pt-PT' : '081604e4', 'pt-BR' : '041604e4', 'uk' : '042204e3', 'cs' : '040504e2', 'hu' : '040e04e2', 'ro' : '041804e2', # No codepage for Urdu, use unicode(1200). 'ur' : '042004b0', 'da' : '040604e4', 'is' : '040f04e4', 'ru' : '041904e3', 'vi' : '042a04ea', 'nl' : '041304e4', 'id' : '042104e4', 'sr' : '081a04e2', 'en-GB' : '0809040e', 'it' : '041004e4', 'sk' : '041b04e2', 'et' : '042504e9', 'ja' : '041103a4', 'sl' : '042404e2', 'en' : '040904b0', # LCID for Mexico; Windows does not support L.A. LCID. 'es-419' : '080a04e4', # No codepage for Bengali, use unicode(1200). 'bn' : '044504b0', 'fa' : '042904e8', # No codepage for Gujarati, use unicode(1200). 'gu' : '044704b0', # No codepage for Kannada, use unicode(1200). 'kn' : '044b04b0', # Malay (Malaysia) [ms-MY] 'ms' : '043e04e4', # No codepage for Malayalam, use unicode(1200). 'ml' : '044c04b0', # No codepage for Marathi, use unicode(1200). 'mr' : '044e04b0', # No codepage for Oriya , use unicode(1200). 'or' : '044804b0', # No codepage for Tamil, use unicode(1200). 'ta' : '044904b0', # No codepage for Telugu, use unicode(1200). 'te' : '044a04b0', # No codepage for Amharic, use unicode(1200). >= Vista. 'am' : '045e04b0', 'sw' : '044104e4', 'af' : '043604e4', 'eu' : '042d04e4', 'fr-CA' : '0c0c04e4', 'gl' : '045604e4', # No codepage for Zulu, use unicode(1200). 'zu' : '043504b0', # Pseudolocales 'ar-XB' : '040d04e7', 'en-XA' : '040904b0', } # Language ID resource: http://msdn.microsoft.com/en-us/library/ms776294.aspx # # There is no appropriate sublang for Spanish (Latin America) [es-419], so we # use Mexico. SUBLANG_DEFAULT would incorrectly map to Spain. Unlike other # Latin American countries, Mexican Spanish is supported by VERSIONINFO: # http://msdn.microsoft.com/en-us/library/aa381058.aspx _LANGUAGE_DIRECTIVE_PAIR = { 'neutral' : 'LANG_NEUTRAL, SUBLANG_NEUTRAL', 'userdefault' : 'LANG_NEUTRAL, SUBLANG_DEFAULT', 'ar' : 'LANG_ARABIC, SUBLANG_DEFAULT', 'fi' : 'LANG_FINNISH, SUBLANG_DEFAULT', 'ko' : 'LANG_KOREAN, SUBLANG_KOREAN', 'es' : 'LANG_SPANISH, SUBLANG_SPANISH_MODERN', 'bg' : 'LANG_BULGARIAN, SUBLANG_DEFAULT', # LANG_FILIPINO (100) not in VC 7 winnt.h. 'fil' : '100, SUBLANG_DEFAULT', 'fr' : 'LANG_FRENCH, SUBLANG_FRENCH', 'lv' : 'LANG_LATVIAN, SUBLANG_DEFAULT', 'sv' : 'LANG_SWEDISH, SUBLANG_SWEDISH', 'ca' : 'LANG_CATALAN, SUBLANG_DEFAULT', 'de' : 'LANG_GERMAN, SUBLANG_GERMAN', 'lt' : 'LANG_LITHUANIAN, SUBLANG_LITHUANIAN', # Do not use! See above. 'tl' : 'LANG_NEUTRAL, SUBLANG_DEFAULT', 'zh-CN' : 'LANG_CHINESE, SUBLANG_CHINESE_SIMPLIFIED', 'zh-TW' : 'LANG_CHINESE, SUBLANG_CHINESE_TRADITIONAL', 'zh-HK' : 'LANG_CHINESE, SUBLANG_CHINESE_HONGKONG', 'el' : 'LANG_GREEK, SUBLANG_DEFAULT', 'no' : 'LANG_NORWEGIAN, SUBLANG_DEFAULT', 'nb' : 'LANG_NORWEGIAN, SUBLANG_NORWEGIAN_BOKMAL', 'nn' : 'LANG_NORWEGIAN, SUBLANG_NORWEGIAN_NYNORSK', 'th' : 'LANG_THAI, SUBLANG_DEFAULT', 'he' : 'LANG_HEBREW, SUBLANG_DEFAULT', 'iw' : 'LANG_HEBREW, SUBLANG_DEFAULT', 'pl' : 'LANG_POLISH, SUBLANG_DEFAULT', 'tr' : 'LANG_TURKISH, SUBLANG_DEFAULT', 'hr' : 'LANG_CROATIAN, SUBLANG_DEFAULT', 'hi' : 'LANG_HINDI, SUBLANG_DEFAULT', 'pt-PT' : 'LANG_PORTUGUESE, SUBLANG_PORTUGUESE', 'pt-BR' : 'LANG_PORTUGUESE, SUBLANG_DEFAULT', 'uk' : 'LANG_UKRAINIAN, SUBLANG_DEFAULT', 'cs' : 'LANG_CZECH, SUBLANG_DEFAULT', 'hu' : 'LANG_HUNGARIAN, SUBLANG_DEFAULT', 'ro' : 'LANG_ROMANIAN, SUBLANG_DEFAULT', 'ur' : 'LANG_URDU, SUBLANG_DEFAULT', 'da' : 'LANG_DANISH, SUBLANG_DEFAULT', 'is' : 'LANG_ICELANDIC, SUBLANG_DEFAULT', 'ru' : 'LANG_RUSSIAN, SUBLANG_DEFAULT', 'vi' : 'LANG_VIETNAMESE, SUBLANG_DEFAULT', 'nl' : 'LANG_DUTCH, SUBLANG_DEFAULT', 'id' : 'LANG_INDONESIAN, SUBLANG_DEFAULT', 'sr' : 'LANG_SERBIAN, SUBLANG_SERBIAN_LATIN', 'en-GB' : 'LANG_ENGLISH, SUBLANG_ENGLISH_UK', 'it' : 'LANG_ITALIAN, SUBLANG_DEFAULT', 'sk' : 'LANG_SLOVAK, SUBLANG_DEFAULT', 'et' : 'LANG_ESTONIAN, SUBLANG_DEFAULT', 'ja' : 'LANG_JAPANESE, SUBLANG_DEFAULT', 'sl' : 'LANG_SLOVENIAN, SUBLANG_DEFAULT', 'en' : 'LANG_ENGLISH, SUBLANG_ENGLISH_US', # No L.A. sublang exists. 'es-419' : 'LANG_SPANISH, SUBLANG_SPANISH_MEXICAN', 'bn' : 'LANG_BENGALI, SUBLANG_DEFAULT', 'fa' : 'LANG_PERSIAN, SUBLANG_DEFAULT', 'gu' : 'LANG_GUJARATI, SUBLANG_DEFAULT', 'kn' : 'LANG_KANNADA, SUBLANG_DEFAULT', 'ms' : 'LANG_MALAY, SUBLANG_DEFAULT', 'ml' : 'LANG_MALAYALAM, SUBLANG_DEFAULT', 'mr' : 'LANG_MARATHI, SUBLANG_DEFAULT', 'or' : 'LANG_ORIYA, SUBLANG_DEFAULT', 'ta' : 'LANG_TAMIL, SUBLANG_DEFAULT', 'te' : 'LANG_TELUGU, SUBLANG_DEFAULT', 'am' : 'LANG_AMHARIC, SUBLANG_DEFAULT', 'sw' : 'LANG_SWAHILI, SUBLANG_DEFAULT', 'af' : 'LANG_AFRIKAANS, SUBLANG_DEFAULT', 'eu' : 'LANG_BASQUE, SUBLANG_DEFAULT', 'fr-CA' : 'LANG_FRENCH, SUBLANG_FRENCH_CANADIAN', 'gl' : 'LANG_GALICIAN, SUBLANG_DEFAULT', 'zu' : 'LANG_ZULU, SUBLANG_DEFAULT', 'pa' : 'LANG_PUNJABI, SUBLANG_PUNJABI_INDIA', 'sa' : 'LANG_SANSKRIT, SUBLANG_SANSKRIT_INDIA', 'si' : 'LANG_SINHALESE, SUBLANG_SINHALESE_SRI_LANKA', 'ne' : 'LANG_NEPALI, SUBLANG_NEPALI_NEPAL', 'ti' : 'LANG_TIGRIGNA, SUBLANG_TIGRIGNA_ERITREA', # Pseudolocales 'ar-XB' : 'LANG_HEBREW, SUBLANG_DEFAULT', 'en-XA' : 'LANG_ENGLISH, SUBLANG_ENGLISH_US', } # A note on 'no-specific-language' in the following few functions: # Some build systems may wish to call GRIT to scan for dependencies in # a language-agnostic way, and can then specify this fake language as # the output context. It should never be used when output is actually # being generated. def GetLangCharsetPair(language): if language in _LANGUAGE_CHARSET_PAIR: return _LANGUAGE_CHARSET_PAIR[language] if language != 'no-specific-language': print('Warning:GetLangCharsetPair() found undefined language %s' % language) return '' def GetLangDirectivePair(language): if language in _LANGUAGE_DIRECTIVE_PAIR: return _LANGUAGE_DIRECTIVE_PAIR[language] # We don't check for 'no-specific-language' here because this # function should only get called when output is being formatted, # and at that point we would not want to get # 'no-specific-language' passed as the language. print('Warning:GetLangDirectivePair() found undefined language %s' % language) return 'unknown language: see tools/grit/format/rc.py' def GetLangIdHex(language): if language in _LANGUAGE_CHARSET_PAIR: langcharset = _LANGUAGE_CHARSET_PAIR[language] lang_id = '0x' + langcharset[0:4] return lang_id if language != 'no-specific-language': print('Warning:GetLangIdHex() found undefined language %s' % language) return '' def GetCharsetIdDecimal(language): if language in _LANGUAGE_CHARSET_PAIR: langcharset = _LANGUAGE_CHARSET_PAIR[language] charset_decimal = int(langcharset[4:], 16) return str(charset_decimal) if language != 'no-specific-language': print('Warning:GetCharsetIdDecimal() found undefined language %s' % language) return '' def GetUnifiedLangCode(language) : r = re.compile('([a-z]{1,2})_([a-z]{1,2})') if r.match(language) : underscore = language.find('_') return language[0:underscore] + '-' + language[underscore + 1:].upper() return language def RcSubstitutions(substituter, lang): '''Add language-based substitutions for Rc files to the substitutor.''' unified_lang_code = GetUnifiedLangCode(lang) substituter.AddSubstitutions({ 'GRITVERLANGCHARSETHEX': GetLangCharsetPair(unified_lang_code), 'GRITVERLANGID': GetLangIdHex(unified_lang_code), 'GRITVERCHARSETID': GetCharsetIdDecimal(unified_lang_code)}) def _FormatHeader(root, lang, output_dir): '''Returns the required preamble for RC files.''' assert isinstance(lang, six.string_types) assert isinstance(root, misc.GritNode) # Find the location of the resource header file, so that we can include # it. resource_header = 'resource.h' # fall back to this language_directive = '' for output in root.GetOutputFiles(): if output.attrs['type'] == 'rc_header': resource_header = os.path.abspath(output.GetOutputFilename()) resource_header = util.MakeRelativePath(output_dir, resource_header) if output.attrs['lang'] != lang: continue if output.attrs['language_section'] == '': # If no language_section is requested, no directive is added # (Used when the generated rc will be included from another rc # file that will have the appropriate language directive) language_directive = '' elif output.attrs['language_section'] == 'neutral': # If a neutral language section is requested (default), add a # neutral language directive language_directive = 'LANGUAGE LANG_NEUTRAL, SUBLANG_NEUTRAL' elif output.attrs['language_section'] == 'lang': language_directive = 'LANGUAGE %s' % GetLangDirectivePair(lang) resource_header = resource_header.replace('\\', '\\\\') return '''// This file is automatically generated by GRIT. Do not edit. #include "%s" #include <winresrc.h> #ifdef IDC_STATIC #undef IDC_STATIC #endif #define IDC_STATIC (-1) %s ''' % (resource_header, language_directive) # end _FormatHeader() function def FormatMessage(item, lang): '''Returns a single message of a string table.''' message = item.ws_at_start + item.Translate(lang) + item.ws_at_end # Escape quotation marks (RC format uses doubling-up message = message.replace('"', '""') # Replace linebreaks with a \n escape message = util.LINEBREAKS.sub(r'\\n', message) if hasattr(item.GetRoot(), 'GetSubstituter'): substituter = item.GetRoot().GetSubstituter() message = substituter.Substitute(message) name_attr = item.GetTextualIds()[0] return ' %-15s "%s"\n' % (name_attr, message) def _FormatSection(item, lang, output_dir): '''Writes out an .rc file section.''' assert isinstance(lang, six.string_types) from grit.node import structure assert isinstance(item, structure.StructureNode) if item.IsExcludedFromRc(): return '' text = item.gatherer.Translate( lang, skeleton_gatherer=item.GetSkeletonGatherer(), pseudo_if_not_available=item.PseudoIsAllowed(), fallback_to_english=item.ShouldFallbackToEnglish()) + '\n\n' # Replace the language expand_variables in version rc info. if item.ExpandVariables() and hasattr(item.GetRoot(), 'GetSubstituter'): substituter = item.GetRoot().GetSubstituter() text = substituter.Substitute(text) return text def FormatInclude(item, lang, output_dir, type=None, process_html=False): '''Formats an item that is included in an .rc file (e.g. an ICON). Args: item: an IncludeNode or StructureNode lang, output_dir: standard formatter parameters type: .rc file resource type, e.g. 'ICON' (ignored unless item is a StructureNode) process_html: False/True (ignored unless item is a StructureNode) ''' assert isinstance(lang, six.string_types) from grit.node import structure from grit.node import include assert isinstance(item, (structure.StructureNode, include.IncludeNode)) if isinstance(item, include.IncludeNode): type = item.attrs['type'].upper() process_html = item.attrs['flattenhtml'] == 'true' filename_only = item.attrs['filenameonly'] == 'true' relative_path = item.attrs['relativepath'] == 'true' else: assert (isinstance(item, structure.StructureNode) and item.attrs['type'] in ['admin_template', 'chrome_html', 'chrome_scaled_image', 'tr_html', 'txt']) filename_only = False relative_path = False # By default, we use relative pathnames to included resources so that # sharing the resulting .rc files is possible. # # The FileForLanguage() Function has the side effect of generating the file # if needed (e.g. if it is an HTML file include). file_for_lang = item.FileForLanguage(lang, output_dir) if file_for_lang is None: return '' filename = os.path.abspath(file_for_lang) if process_html: filename = item.Process(output_dir) elif filename_only: filename = os.path.basename(filename) elif relative_path: filename = util.MakeRelativePath(output_dir, filename) filename = filename.replace('\\', '\\\\') # escape for the RC format if isinstance(item, structure.StructureNode) and item.IsExcludedFromRc(): return '' name = item.attrs['name'] item_id = item.GetRoot().GetIdMap()[name] return '// ID: %d\n%-18s %-18s "%s"\n' % (item_id, name, type, filename) def _DoNotFormat(item, lang, output_dir): return '' # Formatter instance to use for each type attribute # when formatting Structure nodes. _STRUCTURE_FORMATTERS = { 'accelerators' : _FormatSection, 'dialog' : _FormatSection, 'menu' : _FormatSection, 'rcdata' : _FormatSection, 'version' : _FormatSection, 'admin_template' : partial(FormatInclude, type='ADM'), 'chrome_html' : partial(FormatInclude, type='BINDATA', process_html=True), 'chrome_scaled_image' : partial(FormatInclude, type='BINDATA'), 'tr_html' : partial(FormatInclude, type='HTML'), 'txt' : partial(FormatInclude, type='TXT'), 'policy_template_metafile': _DoNotFormat, } def FormatStructure(item, lang, output_dir): formatter = _STRUCTURE_FORMATTERS[item.attrs['type']] return formatter(item, lang, output_dir)
bsd-3-clause
2,850,999,913,179,659,000
36.727083
81
0.628859
false
3.140108
false
false
false
rainier-m/python-soccer
EPL-teamNews.py
1
5001
# -*- coding: utf-8 -*- ''' Created on Jan 30, 2015 Modified on Jan 30, 2015 @author: [email protected] A simple Python Program to scrape the ESPN FC website for content. ''' ''' Version Number of Script ''' version = '0.01.a' # Import Libraries needed for Scraping the various web pages from bs4 import BeautifulSoup import urllib2 import datetime import requests import os import platform import sys reload(sys) sys.setdefaultencoding('utf-8') # Establish the process Date & Time Stamp ts = datetime.datetime.now().strftime("%H:%M:%S") ds = datetime.datetime.now().strftime("%Y-%m-%d") date = datetime.datetime.now().strftime("%Y%m%d") # Updates the Time Stamp def updateTS(): update = datetime.datetime.now().strftime("%H:%M:%S") return update # Download Image def downloadImage(imageURL, localFileName): response = requests.get(imageURL) if response.status_code == 200: print 'Downloading %s...' % (localFileName) with open(localFileName, 'wb') as fo: for chunk in response.iter_content(4096): fo.write(chunk) return True # Program Version & System Variables parseVersion = 'ESPN Premier League Team News ' + version print ds + ' :: ' + ts + ' :: ' + parseVersion # Set Output Path for Windows or Mac environments os_System = platform.system() win_BasePath = "C:/Users/Rainier/Documents/GitHub/python-soccer" if os_System == "Windows": outputPath = win_BasePath + "/PL-Data/" outputImgPath = win_BasePath + "/PL-Data/imgs/" outputTeamPath = win_BasePath + "/PL-Data/teams/" outputMatchPath = win_BasePath + "/PL-Data/match/" else: outputPath = 'PL-Data/' outputImgPath = 'PL-Data/imgs/' outputTeamPath = 'PL-Data/teams/' outputMatchPath = 'PL-Data/match/' hr = " >>> *** ====================================================== *** <<<" shr = " >>> *** ==================== *** <<<" prefixBBC = "http://www.bbc.com" prefixESPN = "http://www.espnfc.us" teamURLs = ['/club/arsenal/359/index','/club/aston-villa/362/index','/club/burnley/379/index','/club/chelsea/363/index','/club/crystal-palace/384/index','/club/everton/368/index','/club/hull-city/306/index','/club/leicester-city/375/index','/club/liverpool/364/index','/club/manchester-city/382/index','/club/manchester-united/360/index','/club/newcastle-united/361/index','/club/queens-park-rangers/334/index','/club/southampton/376/index','/club/stoke-city/336/index','/club/sunderland/366/index','/club/swansea-city/318/index','/club/tottenham-hotspur/367/index','/club/west-bromwich-albion/383/index','/club/west-ham-united/371/index'] def teamNews(x): teamURL = x teamName = x teamName = teamName[6:len(teamName)-10] teamURL = prefixESPN + teamURL teamHTML = urllib2.urlopen(teamURL) teamSoup = BeautifulSoup(teamHTML) recentNews = teamSoup.find("div", {"id":"feed"}) recentNewsItems = recentNews.find_all("div", {"class":"feed-item-content"}) recapOutput = [] print "Team News Parsed :: " + teamName for i in recentNewsItems: recapPhotoItem = i.find("div", {"class":"thumbnail picture"}) if len(i) > 3: # recapPhotoItem = recapPhotoItem.find("img") # print recapPhotoItem["src"] # with open(outputTxt, "a") as f: # f.write('\n' + shr + '\n') # f.write(i.prettify()) # f.write('\n' + shr + '\n') # f.close() # print shr recapHeadline = i.find("h2") recapHeadlineDetails = recapHeadline.find("a") recapHeadlineDetails = recapHeadlineDetails["href"] recapHeadline = recapHeadline.get_text(strip=True) recapAge = i.find("span", {"class":"age"}) recapAge = recapAge.get_text(strip=True) recapOutput.append(date + "|" + teamName + "|" + recapHeadline + "|" + recapHeadlineDetails + "|" + recapAge) #print shr # print i.prettify() #print recapHeadlineDetails #print shr #recapDetails = recapHeadline.find("a") #recapDetails = recapDetails["href"] #print recapDetails # print recapAge.get_text(strip=True) #print updateTS() #print shr # print i else: #print i #print shr recapGameOpponents = i.find_all("div", {"class":"team-name"}) recapGameScore = i.find_all("div", {"class":"team-score"}) recapGameStatus = i.find("div", {"class":"game-info"}) recapGameHome = recapGameOpponents[0].get_text(strip=True) recapGameAway = recapGameOpponents[1].get_text(strip=True) recapHomeScore = recapGameScore[0].get_text(strip=True) recapAwayScore = recapGameScore[1].get_text(strip=True) #recapGameInfo = i.find("div", {"clas=":"game-info"}) recapOutput.append(date + "|" + teamName + "|" + recapGameHome + " " + recapHomeScore + " v. " + recapAwayScore + " "+ recapGameAway + "||") # print i for i in recapOutput: print i print hr return recapOutput teamNewstxt = 'teamNews.txt' with open(teamNewstxt, "w") as f: f.write(ds + " :: " + updateTS() + " :: " + parseVersion + '\n' ) f.close() for i in teamURLs: for x in teamNews(i): with open(teamNewstxt, "a") as f: f.write(x + '\n') f.close()
gpl-2.0
-1,380,015,014,240,211,200
34.721429
639
0.660868
false
2.849573
false
false
false
plq/spyne
spyne/protocol/_inbase.py
1
24684
# # spyne - Copyright (C) Spyne contributors. # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 # from __future__ import print_function import logging logger = logging.getLogger(__name__) import re import pytz import uuid from math import modf from time import strptime, mktime from datetime import timedelta, time, datetime, date from decimal import Decimal as D, InvalidOperation from pytz import FixedOffset try: from lxml import etree from lxml import html except ImportError: etree = None html = None from spyne.protocol._base import ProtocolMixin from spyne.model import ModelBase, XmlAttribute, Array, Null, \ ByteArray, File, ComplexModelBase, AnyXml, AnyHtml, Unicode, String, \ Decimal, Double, Integer, Time, DateTime, Uuid, Date, Duration, Boolean, Any from spyne.error import ValidationError from spyne.model.binary import binary_decoding_handlers, BINARY_ENCODING_USE_DEFAULT from spyne.util import six from spyne.model.enum import EnumBase from spyne.model.primitive.datetime import TIME_PATTERN, DATE_PATTERN from spyne.util.cdict import cdict _date_re = re.compile(DATE_PATTERN) _time_re = re.compile(TIME_PATTERN) _duration_re = re.compile( r'(?P<sign>-?)' r'P' r'(?:(?P<years>\d+)Y)?' r'(?:(?P<months>\d+)M)?' r'(?:(?P<days>\d+)D)?' r'(?:T(?:(?P<hours>\d+)H)?' r'(?:(?P<minutes>\d+)M)?' r'(?:(?P<seconds>\d+(.\d+)?)S)?)?' ) class InProtocolBase(ProtocolMixin): """This is the abstract base class for all input protocol implementations. Child classes can implement only the required subset of the public methods. An output protocol must implement :func:`serialize` and :func:`create_out_string`. An input protocol must implement :func:`create_in_document`, :func:`decompose_incoming_envelope` and :func:`deserialize`. The ProtocolBase class supports the following events: * ``before_deserialize``: Called before the deserialization operation is attempted. * ``after_deserialize``: Called after the deserialization operation is finished. The arguments the constructor takes are as follows: :param app: The application this protocol belongs to. :param mime_type: The mime_type this protocol should set for transports that support this. This is a quick way to override the mime_type by default instead of subclassing the releavant protocol implementation. """ def __init__(self, app=None, validator=None, mime_type=None, ignore_wrappers=False, binary_encoding=None, string_encoding=None): self.validator = None super(InProtocolBase, self).__init__(app=app, mime_type=mime_type, ignore_wrappers=ignore_wrappers, binary_encoding=binary_encoding, string_encoding=string_encoding) self.message = None self.validator = None self.set_validator(validator) if mime_type is not None: self.mime_type = mime_type fsh = { Any: self.any_from_bytes, Null: self.null_from_bytes, File: self.file_from_bytes, Array: self.array_from_bytes, Double: self.double_from_bytes, String: self.string_from_bytes, AnyXml: self.any_xml_from_bytes, Boolean: self.boolean_from_bytes, Integer: self.integer_from_bytes, Unicode: self.unicode_from_bytes, AnyHtml: self.any_html_from_bytes, ByteArray: self.byte_array_from_bytes, EnumBase: self.enum_base_from_bytes, ModelBase: self.model_base_from_bytes, XmlAttribute: self.xmlattribute_from_bytes, ComplexModelBase: self.complex_model_base_from_bytes } self._from_bytes_handlers = cdict(fsh) self._from_unicode_handlers = cdict(fsh) self._from_bytes_handlers[Date] = self.date_from_bytes self._from_bytes_handlers[Time] = self.time_from_bytes self._from_bytes_handlers[Uuid] = self.uuid_from_bytes self._from_bytes_handlers[Decimal] = self.decimal_from_bytes self._from_bytes_handlers[DateTime] = self.datetime_from_bytes self._from_bytes_handlers[Duration] = self.duration_from_bytes self._from_unicode_handlers[Date] = self.date_from_unicode self._from_unicode_handlers[Uuid] = self.uuid_from_unicode self._from_unicode_handlers[Time] = self.time_from_unicode self._from_unicode_handlers[Decimal] = self.decimal_from_unicode self._from_unicode_handlers[DateTime] = self.datetime_from_unicode self._from_unicode_handlers[Duration] = self.duration_from_unicode self._datetime_dsmap = { None: self._datetime_from_unicode, 'sec': self._datetime_from_sec, 'sec_float': self._datetime_from_sec_float, 'msec': self._datetime_from_msec, 'msec_float': self._datetime_from_msec_float, 'usec': self._datetime_from_usec, } def _datetime_from_sec(self, cls, value): try: return datetime.fromtimestamp(value) except TypeError: logger.error("Invalid value %r", value) raise def _datetime_from_sec_float(self, cls, value): try: return datetime.fromtimestamp(value) except TypeError: logger.error("Invalid value %r", value) raise def _datetime_from_msec(self, cls, value): try: return datetime.fromtimestamp(value // 1000) except TypeError: logger.error("Invalid value %r", value) raise def _datetime_from_msec_float(self, cls, value): try: return datetime.fromtimestamp(value / 1000) except TypeError: logger.error("Invalid value %r", value) raise def _datetime_from_usec(self, cls, value): try: return datetime.fromtimestamp(value / 1e6) except TypeError: logger.error("Invalid value %r", value) raise def create_in_document(self, ctx, in_string_encoding=None): """Uses ``ctx.in_string`` to set ``ctx.in_document``.""" def decompose_incoming_envelope(self, ctx, message): """Sets the ``ctx.method_request_string``, ``ctx.in_body_doc``, ``ctx.in_header_doc`` and ``ctx.service`` properties of the ctx object, if applicable. """ def deserialize(self, ctx, message): """Takes a MethodContext instance and a string containing ONE document instance in the ``ctx.in_string`` attribute. Returns the corresponding native python object in the ctx.in_object attribute. """ def validate_document(self, payload): """Method to be overriden to perform any sort of custom input validation on the parsed input document. """ def set_validator(self, validator): """You must override this function if you want your protocol to support validation.""" assert validator is None self.validator = None def from_bytes(self, class_, string, *args, **kwargs): if string is None: return None if isinstance(string, six.string_types) and \ len(string) == 0 and class_.Attributes.empty_is_none: return None handler = self._from_bytes_handlers[class_] return handler(class_, string, *args, **kwargs) def from_unicode(self, class_, string, *args, **kwargs): if string is None: return None #if not six.PY2: # assert isinstance(string, str), \ # "Invalid type passed to `from_unicode`: {}".format( # (class_, type(string), string)) cls_attrs = self.get_cls_attrs(class_) if isinstance(string, six.string_types) and len(string) == 0 and \ cls_attrs.empty_is_none: return None handler = self._from_unicode_handlers[class_] return handler(class_, string, *args, **kwargs) def null_from_bytes(self, cls, value): return None def any_from_bytes(self, cls, value): return value def any_xml_from_bytes(self, cls, string): try: return etree.fromstring(string) except etree.XMLSyntaxError as e: raise ValidationError(string, "%%r: %r" % e) def any_html_from_bytes(self, cls, string): try: return html.fromstring(string) except etree.ParserError as e: if e.args[0] == "Document is empty": pass else: raise def uuid_from_unicode(self, cls, string, suggested_encoding=None): attr = self.get_cls_attrs(cls) ser_as = attr.serialize_as encoding = attr.encoding if encoding is None: encoding = suggested_encoding retval = string if ser_as in ('bytes', 'bytes_le'): retval, = binary_decoding_handlers[encoding](string) try: retval = _uuid_deserialize[ser_as](retval) except (ValueError, TypeError, UnicodeDecodeError) as e: raise ValidationError(e) return retval def uuid_from_bytes(self, cls, string, suggested_encoding=None, **_): attr = self.get_cls_attrs(cls) ser_as = attr.serialize_as encoding = attr.encoding if encoding is None: encoding = suggested_encoding retval = string if ser_as in ('bytes', 'bytes_le'): retval, = binary_decoding_handlers[encoding](string) elif isinstance(string, six.binary_type): retval = string.decode('ascii') try: retval = _uuid_deserialize[ser_as](retval) except ValueError as e: raise ValidationError(e) return retval def unicode_from_bytes(self, cls, value): retval = value if isinstance(value, six.binary_type): cls_attrs = self.get_cls_attrs(cls) if cls_attrs.encoding is not None: retval = six.text_type(value, cls_attrs.encoding, errors=cls_attrs.unicode_errors) elif self.string_encoding is not None: retval = six.text_type(value, self.string_encoding, errors=cls_attrs.unicode_errors) else: retval = six.text_type(value, errors=cls_attrs.unicode_errors) return retval def string_from_bytes(self, cls, value): retval = value cls_attrs = self.get_cls_attrs(cls) if isinstance(value, six.text_type): if cls_attrs.encoding is None: raise Exception("You need to define a source encoding for " "decoding incoming unicode values.") else: retval = value.encode(cls_attrs.encoding) return retval def decimal_from_unicode(self, cls, string): cls_attrs = self.get_cls_attrs(cls) if cls_attrs.max_str_len is not None and len(string) > \ cls_attrs.max_str_len: raise ValidationError(string, "Decimal %%r longer than %d " "characters" % cls_attrs.max_str_len) try: return D(string) except InvalidOperation as e: raise ValidationError(string, "%%r: %r" % e) def decimal_from_bytes(self, cls, string): return self.decimal_from_unicode(cls, string.decode(self.default_string_encoding)) def double_from_bytes(self, cls, string): try: return float(string) except (TypeError, ValueError) as e: raise ValidationError(string, "%%r: %r" % e) def integer_from_bytes(self, cls, string): cls_attrs = self.get_cls_attrs(cls) if isinstance(string, (six.text_type, six.binary_type)) and \ cls_attrs.max_str_len is not None and \ len(string) > cls_attrs.max_str_len: raise ValidationError(string, "Integer %%r longer than %d characters" % cls_attrs.max_str_len) try: return int(string) except ValueError: raise ValidationError(string, "Could not cast %r to integer") def time_from_unicode(self, cls, string): """Expects ISO formatted times.""" match = _time_re.match(string) if match is None: raise ValidationError(string, "%%r does not match regex %r " % _time_re.pattern) fields = match.groupdict(0) microsec = fields.get('sec_frac') if microsec is None or microsec == 0: microsec = 0 else: microsec = min(999999, int(round(float(microsec) * 1e6))) return time(int(fields['hr']), int(fields['min']), int(fields['sec']), microsec) def time_from_bytes(self, cls, string): if isinstance(string, six.binary_type): string = string.decode(self.default_string_encoding) return self.time_from_unicode(cls, string) def date_from_unicode_iso(self, cls, string): """This is used by protocols like SOAP who need ISO8601-formatted dates no matter what. """ try: return date(*(strptime(string, u'%Y-%m-%d')[0:3])) except ValueError: match = cls._offset_re.match(string) if match: year = int(match.group('year')) month = int(match.group('month')) day = int(match.group('day')) return date(year, month, day) raise ValidationError(string) def enum_base_from_bytes(self, cls, value): if self.validator is self.SOFT_VALIDATION and not ( cls.validate_string(cls, value)): raise ValidationError(value) return getattr(cls, value) def model_base_from_bytes(self, cls, value): return cls.from_bytes(value) def datetime_from_unicode_iso(self, cls, string): astz = self.get_cls_attrs(cls).as_timezone match = cls._utc_re.match(string) if match: tz = pytz.utc retval = _parse_datetime_iso_match(match, tz=tz) if astz is not None: retval = retval.astimezone(astz) return retval if match is None: match = cls._offset_re.match(string) if match: tz_hr, tz_min = [int(match.group(x)) for x in ("tz_hr", "tz_min")] tz = FixedOffset(tz_hr * 60 + tz_min, {}) retval = _parse_datetime_iso_match(match, tz=tz) if astz is not None: retval = retval.astimezone(astz) return retval if match is None: match = cls._local_re.match(string) if match: retval = _parse_datetime_iso_match(match) if astz: retval = retval.replace(tzinfo=astz) return retval raise ValidationError(string) def datetime_from_unicode(self, cls, string): serialize_as = self.get_cls_attrs(cls).serialize_as return self._datetime_dsmap[serialize_as](cls, string) def datetime_from_bytes(self, cls, string): if isinstance(string, six.binary_type): string = string.decode(self.default_string_encoding) serialize_as = self.get_cls_attrs(cls).serialize_as return self._datetime_dsmap[serialize_as](cls, string) def date_from_bytes(self, cls, string): if isinstance(string, six.binary_type): string = string.decode(self.default_string_encoding) date_format = self._get_date_format(self.get_cls_attrs(cls)) try: if date_format is not None: dt = datetime.strptime(string, date_format) return date(dt.year, dt.month, dt.day) return self.date_from_unicode_iso(cls, string) except ValueError as e: match = cls._offset_re.match(string) if match: return date(int(match.group('year')), int(match.group('month')), int(match.group('day'))) else: raise ValidationError(string, "%%r: %s" % repr(e).replace("%", "%%")) def date_from_unicode(self, cls, string): date_format = self._get_date_format(self.get_cls_attrs(cls)) try: if date_format is not None: dt = datetime.strptime(string, date_format) return date(dt.year, dt.month, dt.day) return self.date_from_unicode_iso(cls, string) except ValueError as e: match = cls._offset_re.match(string) if match: return date(int(match.group('year')), int(match.group('month')), int(match.group('day'))) else: # the message from ValueError is quite nice already raise ValidationError(e.message, "%s") def duration_from_unicode(self, cls, string): duration = _duration_re.match(string).groupdict(0) if duration is None: raise ValidationError(string, "Time data '%%s' does not match regex '%s'" % (_duration_re.pattern,)) days = int(duration['days']) days += int(duration['months']) * 30 days += int(duration['years']) * 365 hours = int(duration['hours']) minutes = int(duration['minutes']) seconds = float(duration['seconds']) f, i = modf(seconds) seconds = i microseconds = int(1e6 * f) delta = timedelta(days=days, hours=hours, minutes=minutes, seconds=seconds, microseconds=microseconds) if duration['sign'] == "-": delta *= -1 return delta def duration_from_bytes(self, cls, string): if isinstance(string, six.binary_type): string = string.decode(self.default_string_encoding) return self.duration_from_unicode(cls, string) def boolean_from_bytes(self, cls, string): return string.lower() in ('true', '1') def byte_array_from_bytes(self, cls, value, suggested_encoding=None): encoding = self.get_cls_attrs(cls).encoding if encoding is BINARY_ENCODING_USE_DEFAULT: encoding = suggested_encoding return binary_decoding_handlers[encoding](value) def file_from_bytes(self, cls, value, suggested_encoding=None): encoding = self.get_cls_attrs(cls).encoding if encoding is BINARY_ENCODING_USE_DEFAULT: encoding = suggested_encoding return File.Value(data=binary_decoding_handlers[encoding](value)) def complex_model_base_from_bytes(self, cls, string, **_): raise TypeError("Only primitives can be deserialized from string.") def array_from_bytes(self, cls, string, **_): if self.get_cls_attrs(cls).serialize_as != 'sd-list': raise TypeError("Only primitives can be deserialized from string.") # sd-list being space-delimited list. retval = [] inner_type, = cls._type_info.values() for s in string.split(): retval.append(self.from_bytes(inner_type, s)) return retval def xmlattribute_from_bytes(self, cls, value): return self.from_bytes(cls.type, value) def _datetime_from_unicode(self, cls, string): cls_attrs = self.get_cls_attrs(cls) # get parser parser = cls_attrs.parser # get date_format dt_format = cls_attrs.dt_format if dt_format is None: dt_format = cls_attrs.date_format if dt_format is None: dt_format = cls_attrs.out_format if dt_format is None: dt_format = cls_attrs.format # parse the string if parser is not None: retval = parser(self, cls, string) elif dt_format is not None: if six.PY2: # FIXME: perhaps it should encode to string's encoding instead # of utf8 all the time if isinstance(dt_format, six.text_type): dt_format = dt_format.encode('utf8') if isinstance(string, six.text_type): string = string.encode('utf8') retval = datetime.strptime(string, dt_format) astz = cls_attrs.as_timezone if astz: retval = retval.astimezone(cls_attrs.as_time_zone) else: retval = self.datetime_from_unicode_iso(cls, string) return retval _uuid_deserialize = { None: lambda s: uuid.UUID(s.decode('ascii') if isinstance(s, bytes) else s), 'hex': lambda s: uuid.UUID(hex=s), 'urn': lambda s: uuid.UUID(hex=s), 'bytes': lambda s: uuid.UUID(bytes=s), 'bytes_le': lambda s: uuid.UUID(bytes_le=s), 'fields': lambda s: uuid.UUID(fields=s), 'int': lambda s: uuid.UUID(int=s), ('int', int): lambda s: uuid.UUID(int=s), ('int', str): lambda s: uuid.UUID(int=int(s)), } if six.PY2: _uuid_deserialize[None] = lambda s: uuid.UUID(s) _uuid_deserialize[('int', long)] = _uuid_deserialize[('int', int)] def _parse_datetime_iso_match(date_match, tz=None): fields = date_match.groupdict() year = int(fields.get('year')) month = int(fields.get('month')) day = int(fields.get('day')) hour = int(fields.get('hr')) minute = int(fields.get('min')) second = int(fields.get('sec')) usecond = fields.get("sec_frac") if usecond is None: usecond = 0 else: # we only get the most significant 6 digits because that's what # datetime can handle. usecond = min(999999, int(round(float(usecond) * 1e6))) return datetime(year, month, day, hour, minute, second, usecond, tz) _dt_sec = lambda cls, val: \ int(mktime(val.timetuple())) _dt_sec_float = lambda cls, val: \ mktime(val.timetuple()) + (val.microsecond / 1e6) _dt_msec = lambda cls, val: \ int(mktime(val.timetuple())) * 1000 + (val.microsecond // 1000) _dt_msec_float = lambda cls, val: \ mktime(val.timetuple()) * 1000 + (val.microsecond / 1000.0) _dt_usec = lambda cls, val: \ int(mktime(val.timetuple())) * 1000000 + val.microsecond _datetime_smap = { 'sec': _dt_sec, 'secs': _dt_sec, 'second': _dt_sec, 'seconds': _dt_sec, 'sec_float': _dt_sec_float, 'secs_float': _dt_sec_float, 'second_float': _dt_sec_float, 'seconds_float': _dt_sec_float, 'msec': _dt_msec, 'msecs': _dt_msec, 'msecond': _dt_msec, 'mseconds': _dt_msec, 'millisecond': _dt_msec, 'milliseconds': _dt_msec, 'msec_float': _dt_msec_float, 'msecs_float': _dt_msec_float, 'msecond_float': _dt_msec_float, 'mseconds_float': _dt_msec_float, 'millisecond_float': _dt_msec_float, 'milliseconds_float': _dt_msec_float, 'usec': _dt_usec, 'usecs': _dt_usec, 'usecond': _dt_usec, 'useconds': _dt_usec, 'microsecond': _dt_usec, 'microseconds': _dt_usec, } def _file_to_iter(f): try: data = f.read(65536) while len(data) > 0: yield data data = f.read(65536) finally: f.close()
lgpl-2.1
6,236,613,823,759,580,000
33.37883
84
0.581672
false
3.993529
false
false
false
JerryYanWan/BigDL
spark/dl/src/test/resources/tf/models/vgg19.py
1
1391
# # Copyright 2016 The BigDL Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import tensorflow as tf from nets import vgg from sys import argv from util import run_model def main(): """ You can also run these commands manually to generate the pb file 1. git clone https://github.com/tensorflow/models.git 2. export PYTHONPATH=Path_to_your_model_folder 3. python alexnet.py """ height, width = 224, 224 inputs = tf.Variable(tf.random_uniform((1, height, width, 3)), name='input') net, end_points = vgg.vgg_19(inputs, is_training = False) print("nodes in the graph") for n in end_points: print(n + " => " + str(end_points[n])) net_outputs = map(lambda x: tf.get_default_graph().get_tensor_by_name(x), argv[2].split(',')) run_model(net_outputs, argv[1], 'vgg_19', argv[3] == 'True') if __name__ == "__main__": main()
apache-2.0
155,971,322,021,515,800
34.666667
97
0.686556
false
3.530457
false
false
false
swarmer/autoscaler
autoscaler/server/scaling/algorithms/spline.py
1
1065
import math import scipy.interpolate from autoscaler.server.request_history import RequestHistory from autoscaler.server.scaling.utils import parse_interval class SplineScalingAlgorithm: def __init__(self, algorithm_config): self.interval_seconds = parse_interval( algorithm_config['interval'] ) self.requests_per_instance_interval = ( algorithm_config['requests_per_instance_interval'] ) def get_instance_count(self, request_history: RequestHistory): (interval1, interval2, interval3) = request_history.get_last_intervals( self.interval_seconds, 3 ) x_values = [1, 2, 3] y_values = [len(interval1), len(interval2), len(interval3)] interpolated_function = scipy.interpolate.InterpolatedUnivariateSpline( x_values, y_values, k=2, ) expected_request_count = interpolated_function(len(x_values) + 1) return max(1, math.ceil( expected_request_count / self.requests_per_instance_interval) )
mit
2,385,278,985,483,348,000
32.28125
79
0.656338
false
4.003759
false
false
false
SunyataZero/buddhist-well-being-prototype-4
bwb/practice_details.py
1
3624
import sched import threading import time from PyQt5 import QtCore from PyQt5 import QtWidgets import bwb.model ID_NOT_SET = -1 BUTTON_WIDTH_IT = 28 class PracticeCompositeWidget(QtWidgets.QWidget): time_of_day_state_changed_signal = QtCore.pyqtSignal() def __init__(self): super().__init__() self.id_it = ID_NOT_SET self.scheduler = sched.scheduler(time.time, time.sleep) vbox = QtWidgets.QVBoxLayout() self.setLayout(vbox) vbox.setAlignment(QtCore.Qt.AlignTop) # ..for details ### self.details_ll = QtWidgets.QLabel("-----") ### self.details_ll.setWordWrap(True) self.question_ll = QtWidgets.QLabel("<h4>Question</h4>") vbox.addWidget(self.question_ll) self.question_le = QtWidgets.QLineEdit() self.question_le.textChanged.connect(self.on_question_text_changed) vbox.addWidget(self.question_le) def on_time_of_day_statechanged(self, i_new_checked_state): self.update_db_time() def on_time_of_day_changed(self, i_qtime): self.update_db_time() def update_db_time(self): if self.id_it == ID_NOT_SET: return qtime = self.time_of_day_timeedit.time() if self.time_of_day_active_qcb.checkState() == QtCore.Qt.Checked: bwb.model.ReminderM.update_time_of_day(self.id_it, qtime.hour()) # Set a scheduled task practice = bwb.model.ReminderM.get(self.id_it) self.set_reminder(qtime.hour(), practice.title) else: model.ReminderM.update_time_of_day(self.id_it, model.TIME_NOT_SET) self.time_of_day_state_changed_signal.emit() def set_reminder(self, i_hour_it, i_practice_title_sg): self.schedule_thread = threading.Thread(target=self.background_function, args=(i_hour_it, i_practice_title_sg), daemon=True) self.schedule_thread.start() def background_function(self, i_hour_it, i_practice_title_sg): now = time.time() reminder_time_qdatetime = QtCore.QDateTime.currentDateTime() reminder_time_qdatetime.setTime(QtCore.QTime(i_hour_it, 50)) reminder_time_in_seconds_it = reminder_time_qdatetime.toMSecsSinceEpoch() / 1000 logging.debug("reminder_time_in_seconds_it = " + str(reminder_time_in_seconds_it)) self.scheduler.enterabs(reminder_time_in_seconds_it + 10, 1, self.popup_function, (i_practice_title_sg,)) # -Several events can be scheduled, (the enterabs function adds an event rather than replacing) self.scheduler.run() # blocking=False def popup_function(self, i_string): message_box = QtWidgets.QMessageBox.information(None, i_string, (i_string)) def on_question_text_changed(self): if self.id_it == ID_NOT_SET: return model.ReminderM.update_question_text( self.id_it, self.question_le.text().strip() ) def change_practice(self, i_practice_id_it): self.id_it = i_practice_id_it # storing the id locally self.update_gui() def update_gui(self): ###time_of_day_cb_was_blocked_bl = self.time_of_day_active_qcb.blockSignals(True) ###time_of_day_timeedit_was_blocked_bl = self.time_of_day_timeedit.blockSignals(True) practice = model.ReminderM.get(self.id_it) ##self.details_ll.setText(practice.description) self.question_le.setText(practice.question) ###self.time_of_day_active_qcb.blockSignals(time_of_day_cb_was_blocked_bl) ###self.time_of_day_timeedit.blockSignals(time_of_day_timeedit_was_blocked_bl)
gpl-3.0
638,437,109,849,312,600
37.147368
132
0.651214
false
3.238606
false
false
false
uw-it-aca/canvas-sis-provisioner
sis_provisioner/builders/__init__.py
1
4786
# Copyright 2021 UW-IT, University of Washington # SPDX-License-Identifier: Apache-2.0 from sis_provisioner.models.course import Course from sis_provisioner.models.user import User from sis_provisioner.csv.data import Collector from sis_provisioner.csv.format import UserCSV, EnrollmentCSV from sis_provisioner.dao.user import ( valid_net_id, get_person_by_netid, get_person_by_gmail_id) from sis_provisioner.dao.course import ( get_section_by_id, get_registrations_by_section) from sis_provisioner.dao.canvas import ENROLLMENT_ACTIVE from sis_provisioner.exceptions import ( UserPolicyException, CoursePolicyException, InvalidLoginIdException) from restclients_core.exceptions import DataFailureException from logging import getLogger class Builder(object): def __init__(self, items=[]): self.data = Collector() self.queue_id = None self.invalid_users = {} self.items = items self.logger = getLogger(__name__) def _init_build(self, **kwargs): return def _process(self, item): raise NotImplementedError def _write(self): return self.data.write_files() def build(self, **kwargs): self._init_build(**kwargs) for item in self.items: self._process(item) return self._write() def add_user_data_for_person(self, person, force=False): """ Creates a line of user data for the passed person. If force is not true, the data will only be created if the person has not been provisioned. Returns True for valid users, False otherwise. """ if person.uwregid in self.invalid_users: return False try: valid_net_id(person.uwnetid) except UserPolicyException as err: self.invalid_users[person.uwregid] = True self.logger.info("Skip user {}: {}".format(person.uwregid, err)) return False if force is True: self.data.add(UserCSV(person)) else: user = User.objects.get_user(person) if user.provisioned_date is None: if (self.data.add(UserCSV(person)) and user.queue_id is None): user.queue_id = self.queue_id user.save() return True def add_teacher_enrollment_data(self, section, person, status='active'): """ Generates one teacher enrollment for the passed section and person. """ if self.add_user_data_for_person(person): self.data.add(EnrollmentCSV( section=section, instructor=person, status=status)) def add_student_enrollment_data(self, registration): """ Generates one student enrollment for the passed registration. """ if self.add_user_data_for_person(registration.person): self.data.add(EnrollmentCSV(registration=registration)) def add_group_enrollment_data(self, login_id, section_id, role, status): """ Generates one enrollment for the passed group member. """ try: person = get_person_by_netid(login_id) if self.add_user_data_for_person(person): self.data.add(EnrollmentCSV( section_id=section_id, person=person, role=role, status=status)) except InvalidLoginIdException: try: person = get_person_by_gmail_id(login_id) if status == ENROLLMENT_ACTIVE: self.data.add(UserCSV(person)) self.data.add(EnrollmentCSV( section_id=section_id, person=person, role=role, status=status)) except InvalidLoginIdException as ex: self.logger.info("Skip group member {}: {}".format( login_id, ex)) def add_registrations_by_section(self, section): try: for registration in get_registrations_by_section(section): self.add_student_enrollment_data(registration) except DataFailureException as ex: self.logger.info("Skip enrollments for section {}: {}".format( section.section_label(), ex)) def get_section_resource_by_id(self, section_id): """ Fetch the section resource for the passed section ID, and add to queue. """ try: section = get_section_by_id(section_id) Course.objects.add_to_queue(section, self.queue_id) return section except (ValueError, CoursePolicyException, DataFailureException) as ex: Course.objects.remove_from_queue(section_id, ex) self.logger.info("Skip section {}: {}".format(section_id, ex)) raise
apache-2.0
5,952,493,172,330,509,000
36.390625
79
0.614919
false
4.154514
false
false
false
pytorch/fairseq
fairseq/model_parallel/models/pipeline_parallel_transformer/model.py
1
33560
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import logging import torch import torch.nn as nn import torch.nn.functional as F from fairseq import utils from fairseq.model_parallel.models.pipeline_parallel_transformer.layers import ( Embedding, TransformerDecoderEmbedding, TransformerDecoderLayer, TransformerDecoderOutputLayer, TransformerEncoderEmbedding, TransformerEncoderLayer, TransformerEncoderLayerNorm, ) from fairseq.models import ( BaseFairseqModel, FairseqDecoder, FairseqEncoder, register_model, register_model_architecture, ) from fairseq.models.fairseq_encoder import EncoderOut from fairseq.models.transformer import ( base_architecture, transformer_iwslt_de_en, transformer_wmt_en_de_big, ) from fairseq.modules import SinusoidalPositionalEmbedding logger = logging.getLogger(__name__) DEFAULT_MAX_SOURCE_POSITIONS = 1024 DEFAULT_MAX_TARGET_POSITIONS = 1024 TORCH_PIPE = False RPC_INIT = False def import_pipe(): global TORCH_PIPE global RPC_INIT try: from torch.distributed.pipeline.sync import Pipe # noqa global Pipe from torch.distributed.pipeline.sync.utils import partition_model global partition_model from torch.distributed import rpc import tempfile TORCH_PIPE = True # Initialize single process RPC agent since TORCH_PIPE requires # RRef. RRef depends on RPC being initialized and as a result we initialize # RPC with a single node. tmpfile = tempfile.NamedTemporaryFile() if not RPC_INIT: rpc.init_rpc( name="worker", rank=0, world_size=1, rpc_backend_options=rpc.TensorPipeRpcBackendOptions( init_method="file://{}".format(tmpfile.name), ) ) RPC_INIT = True logger.info('Using torch pipe') except ImportError: try: from fairscale.nn import Pipe # noqa logger.info('Using fairscale pipe') except ImportError: raise ImportError("Please install fairscale with: pip install fairscale") @register_model("pipeline_parallel_transformer") class PipelineParallelTransformerModel(BaseFairseqModel): def __init__(self, encoder, decoder, balance, devices, chunks, checkpoint): import_pipe() super().__init__() assert isinstance(encoder, FairseqEncoder) assert isinstance(decoder, FairseqDecoder) encoder_module_list = ( [encoder.embedding_layer] + list(encoder.encoder_layers) + [encoder.final_layer_norm] ) self.num_encoder_modules = len(encoder_module_list) decoder_module_list = ( [decoder.embedding_layer] + list(decoder.decoder_layers) + [decoder.decoder_output_layer] ) self.num_decoder_modules = len(decoder_module_list) module_list = encoder_module_list + decoder_module_list self.devices = devices if TORCH_PIPE: self.model = Pipe( partition_model(nn.Sequential(*module_list), balance, devices), chunks=chunks, checkpoint=checkpoint, ) else: self.model = Pipe( nn.Sequential(*module_list), balance=balance, devices=devices, chunks=chunks, checkpoint=checkpoint, ) self.encoder_max_positions = self.max_positions_helper( encoder.embedding_layer, "max_source_positions" ) self.decoder_max_positions = self.max_positions_helper( decoder.embedding_layer, "max_target_positions" ) self.adaptive_softmax = getattr(decoder, "adaptive_softmax", None) # Note: To be populated during inference self.encoder = None self.decoder = None def forward(self, src_tokens, src_lengths, prev_output_tokens): if self.training: input_lst = [src_tokens, src_lengths, prev_output_tokens] input = tuple(i.to(self.devices[0], non_blocking=True) for i in input_lst) if TORCH_PIPE: return self.model(input).local_value() else: return self.model(input) else: assert self.encoder is not None and self.decoder is not None, ( "encoder and decoder need to be initialized by " + "calling the `prepare_for_inference_()` method" ) encoder_output_tuple = self.encoder(input) return self.decoder(encoder_output_tuple) def prepare_for_inference_(self, cfg): if self.encoder is not None and self.decoder is not None: logger.info("Encoder and Decoder already initialized") return encoder_module_list = [] decoder_module_list = [] module_count = 0 for partition in self.model.partitions: for module in partition: if module_count < self.num_encoder_modules: encoder_module_list.append(module) else: decoder_module_list.append(module) module_count += 1 self.model = None self.encoder = TransformerEncoder(cfg.distributed_training, None, None, encoder_module_list) self.decoder = TransformerDecoder( cfg.distributed_training, None, None, decoder_module_list=decoder_module_list ) @staticmethod def add_args(parser): """Add model-specific arguments to the parser.""" # fmt: off parser.add_argument('--activation-fn', choices=utils.get_available_activation_fns(), help='activation function to use') parser.add_argument('--dropout', type=float, metavar='D', help='dropout probability') parser.add_argument('--attention-dropout', type=float, metavar='D', help='dropout probability for attention weights') parser.add_argument('--activation-dropout', '--relu-dropout', type=float, metavar='D', help='dropout probability after activation in FFN.') parser.add_argument('--encoder-embed-path', type=str, metavar='STR', help='path to pre-trained encoder embedding') parser.add_argument('--encoder-embed-dim', type=int, metavar='N', help='encoder embedding dimension') parser.add_argument('--encoder-ffn-embed-dim', type=int, metavar='N', help='encoder embedding dimension for FFN') parser.add_argument('--encoder-layers', type=int, metavar='N', help='num encoder layers') parser.add_argument('--encoder-attention-heads', type=int, metavar='N', help='num encoder attention heads') parser.add_argument('--encoder-normalize-before', action='store_true', help='apply layernorm before each encoder block') parser.add_argument('--encoder-learned-pos', action='store_true', help='use learned positional embeddings in the encoder') parser.add_argument('--decoder-embed-path', type=str, metavar='STR', help='path to pre-trained decoder embedding') parser.add_argument('--decoder-embed-dim', type=int, metavar='N', help='decoder embedding dimension') parser.add_argument('--decoder-ffn-embed-dim', type=int, metavar='N', help='decoder embedding dimension for FFN') parser.add_argument('--decoder-layers', type=int, metavar='N', help='num decoder layers') parser.add_argument('--decoder-attention-heads', type=int, metavar='N', help='num decoder attention heads') parser.add_argument('--decoder-learned-pos', action='store_true', help='use learned positional embeddings in the decoder') parser.add_argument('--decoder-normalize-before', action='store_true', help='apply layernorm before each decoder block') parser.add_argument('--share-decoder-input-output-embed', action='store_true', help='share decoder input and output embeddings') parser.add_argument('--share-all-embeddings', action='store_true', help='share encoder, decoder and output embeddings' ' (requires shared dictionary and embed dim)') parser.add_argument('--no-token-positional-embeddings', default=False, action='store_true', help='if set, disables positional embeddings (outside self attention)') parser.add_argument('--adaptive-softmax-cutoff', metavar='EXPR', help='comma separated list of adaptive softmax cutoff points. ' 'Must be used with adaptive_loss criterion'), parser.add_argument('--adaptive-softmax-dropout', type=float, metavar='D', help='sets adaptive softmax dropout for the tail projections') parser.add_argument('--num-embedding-chunks', type=int, metavar='N', default=1, help='Number of embedding layer chunks (enables more even distribution' 'of optimizer states across data parallel nodes' 'when using optimizer state sharding and' 'a big embedding vocabulary)') # fmt: on @classmethod def build_model_base(cls, args, task): """Build a new model instance.""" # make sure all arguments are present in older models base_architecture(args) if not hasattr(args, "max_source_positions"): args.max_source_positions = DEFAULT_MAX_SOURCE_POSITIONS if not hasattr(args, "max_target_positions"): args.max_target_positions = DEFAULT_MAX_TARGET_POSITIONS src_dict, tgt_dict = task.source_dictionary, task.target_dictionary def build_embedding(dictionary, embed_dim, path=None, num_embed_chunks=1): assert embed_dim % num_embed_chunks == 0, ( f"Number of embedding chunks = {num_embed_chunks} should be " + f"divisible by the embedding dimension = {embed_dim}" ) assert path is None or num_embed_chunks == 1, ( "Loading embedding from a path with number of embedding chunks > 1" + " is not yet supported" ) num_embeddings = len(dictionary) padding_idx = dictionary.pad() # if provided, load from preloaded dictionaries if path: emb = Embedding(num_embeddings, embed_dim, padding_idx) embed_dict = utils.parse_embedding(path) utils.load_embedding(embed_dict, dictionary, emb) else: embed_chunk_dim = embed_dim // num_embed_chunks emb = nn.ModuleList() for i in range(num_embed_chunks): emb.append(Embedding(num_embeddings, embed_chunk_dim, padding_idx)) return emb num_embed_chunks = args.num_embedding_chunks if args.share_all_embeddings: if src_dict != tgt_dict: raise ValueError("--share-all-embeddings requires a joined dictionary") if args.encoder_embed_dim != args.decoder_embed_dim: raise ValueError( "--share-all-embeddings requires --encoder-embed-dim to match --decoder-embed-dim" ) if args.decoder_embed_path and ( args.decoder_embed_path != args.encoder_embed_path ): raise ValueError( "--share-all-embeddings not compatible with --decoder-embed-path" ) encoder_embed_tokens = build_embedding( src_dict, args.encoder_embed_dim, args.encoder_embed_path, num_embed_chunks, ) decoder_embed_tokens = encoder_embed_tokens args.share_decoder_input_output_embed = True else: assert args.share_decoder_input_output_embed or num_embed_chunks == 1, ( "Not sharing decoder I/O embeddings is not yet supported with number of " + "embedding chunks > 1" ) encoder_embed_tokens = build_embedding( src_dict, args.encoder_embed_dim, args.encoder_embed_path, num_embed_chunks, ) decoder_embed_tokens = build_embedding( tgt_dict, args.decoder_embed_dim, args.decoder_embed_path, num_embed_chunks, ) encoder = cls.build_encoder(args, src_dict, encoder_embed_tokens) decoder = cls.build_decoder(args, tgt_dict, decoder_embed_tokens) return (encoder, decoder) @classmethod def build_encoder(cls, args, src_dict, embed_tokens): return TransformerEncoder(args, src_dict, embed_tokens) @classmethod def build_decoder(cls, args, tgt_dict, embed_tokens): return TransformerDecoder(args, tgt_dict, embed_tokens) @classmethod def build_model(cls, args, task): encoder, decoder = cls.build_model_base(args, task) return PipelineParallelTransformerModel( encoder=encoder, decoder=decoder, balance=utils.eval_str_list(args.pipeline_balance, type=int), devices=utils.eval_str_list(args.pipeline_devices, type=int), chunks=args.pipeline_chunks, checkpoint=args.pipeline_checkpoint, ) def output_layer(self, features, **kwargs): """Project features to the default output size (typically vocabulary size).""" return self.decoder.output_layer(features, **kwargs) def max_positions(self): """Maximum length supported by the model.""" return (self.encoder_max_positions, self.decoder_max_positions) def max_positions_helper( self, embedding_layer, max_positions_field="max_source_positions" ): """Maximum input length supported by the encoder or decoder.""" if embedding_layer.embed_positions is None: return getattr(embedding_layer, max_positions_field) return min( getattr(embedding_layer, max_positions_field), embedding_layer.embed_positions.max_positions, ) def get_normalized_probs(self, net_output, log_probs, sample=None): """Get normalized probabilities (or log probs) from a net's output.""" if hasattr(self, "adaptive_softmax") and self.adaptive_softmax is not None: if sample is not None: assert "target" in sample target = sample["target"] else: target = None out = self.adaptive_softmax.get_log_prob(net_output, target=target) return out.exp_() if not log_probs else out # A Pipe() module returns a tuple of tensors as the output. # In this case, the tuple has one element - the output tensor of logits logits = net_output if isinstance(net_output, torch.Tensor) else net_output[0] if log_probs: return utils.log_softmax(logits, dim=-1, onnx_trace=False) else: return utils.softmax(logits, dim=-1, onnx_trace=False) def max_decoder_positions(self): """Maximum length supported by the decoder.""" return self.decoder_max_positions def load_state_dict(self, state_dict, strict=True, model_cfg=None): """Copies parameters and buffers from *state_dict* into this module and its descendants. Overrides the method in :class:`nn.Module`. Compared with that method this additionally "upgrades" *state_dicts* from old checkpoints. """ self.upgrade_state_dict(state_dict) is_regular_transformer = not any("model.partitions" in k for k in state_dict) if is_regular_transformer: state_dict = self.convert_to_pipeline_parallel_state_dict(state_dict) return super().load_state_dict(state_dict, strict) def convert_to_pipeline_parallel_state_dict(self, state_dict): new_state_dict = self.state_dict() encoder_layer_idx = 0 decoder_layer_idx = 0 encoder_key_suffixes = [ "self_attn.k_proj.weight", "self_attn.k_proj.bias", "self_attn.v_proj.weight", "self_attn.v_proj.bias", "self_attn.q_proj.weight", "self_attn.q_proj.bias", "self_attn.out_proj.weight", "self_attn.out_proj.bias", "self_attn_layer_norm.weight", "self_attn_layer_norm.bias", "fc1.weight", "fc1.bias", "fc2.weight", "fc2.bias", "final_layer_norm.weight", "final_layer_norm.bias", ] decoder_key_suffixes = [ "self_attn.k_proj.weight", "self_attn.k_proj.bias", "self_attn.v_proj.weight", "self_attn.v_proj.bias", "self_attn.q_proj.weight", "self_attn.q_proj.bias", "self_attn.out_proj.weight", "self_attn.out_proj.bias", "self_attn_layer_norm.weight", "self_attn_layer_norm.bias", "encoder_attn.k_proj.weight", "encoder_attn.k_proj.bias", "encoder_attn.v_proj.weight", "encoder_attn.v_proj.bias", "encoder_attn.q_proj.weight", "encoder_attn.q_proj.bias", "encoder_attn.out_proj.weight", "encoder_attn.out_proj.bias", "encoder_attn_layer_norm.weight", "encoder_attn_layer_norm.bias", "fc1.weight", "fc1.bias", "fc2.weight", "fc2.bias", "final_layer_norm.weight", "final_layer_norm.bias", ] for pid, partition in enumerate(self.model.partitions): logger.info(f"Begin Partition {pid}") for mid, module in enumerate(partition): # fmt: off if isinstance(module, TransformerEncoderEmbedding): new_state_dict[f'model.partitions.{pid}.{mid}.embed_tokens.weight'] = state_dict['encoder.embed_tokens.weight'] new_state_dict[f'model.partitions.{pid}.{mid}.embed_positions._float_tensor'] = state_dict['encoder.embed_positions._float_tensor'] if isinstance(module, TransformerEncoderLayer): for suffix in encoder_key_suffixes: new_state_dict[f'model.partitions.{pid}.{mid}.{suffix}'] = state_dict[f'encoder.layers.{encoder_layer_idx}.{suffix}'] encoder_layer_idx += 1 if isinstance(module, TransformerDecoderLayer): for suffix in decoder_key_suffixes: new_state_dict[f'model.partitions.{pid}.{mid}.{suffix}'] = state_dict[f'decoder.layers.{decoder_layer_idx}.{suffix}'] decoder_layer_idx += 1 if isinstance(module, TransformerEncoderLayerNorm): if 'encoder.layer_norm.weight' in state_dict: new_state_dict[f'model.partitions.{pid}.{mid}.layer_norm.weight'] = state_dict['encoder.layer_norm.weight'] new_state_dict[f'model.partitions.{pid}.{mid}.layer_norm.bias'] = state_dict['encoder.layer_norm.bias'] if isinstance(module, TransformerDecoderEmbedding): new_state_dict[f'model.partitions.{pid}.{mid}.embed_tokens.weight'] = state_dict['decoder.embed_tokens.weight'] new_state_dict[f'model.partitions.{pid}.{mid}.embed_positions._float_tensor'] = state_dict['decoder.embed_positions._float_tensor'] if isinstance(module, TransformerDecoderOutputLayer): new_state_dict[f'model.partitions.{pid}.{mid}.output_projection.weight'] = state_dict['decoder.output_projection.weight'] # fmt: on return new_state_dict class TransformerEncoder(FairseqEncoder): """ Transformer encoder consisting of *args.encoder_layers* layers. Each layer is a :class:`TransformerEncoderLayer`. Args: args (argparse.Namespace): parsed command-line arguments dictionary (~fairseq.data.Dictionary): encoding dictionary embed_tokens (torch.nn.Embedding): input embedding """ def __init__(self, args, dictionary, embed_tokens, encoder_module_list=None): super().__init__(dictionary) self.register_buffer("version", torch.Tensor([3])) import_pipe() self.use_pipeline = encoder_module_list is not None if not self.use_pipeline: self.embedding_layer = TransformerEncoderEmbedding(args, embed_tokens) self.encoder_layers = nn.Sequential(*[TransformerEncoderLayer(args) for i in range(args.encoder_layers)]) if isinstance(embed_tokens, nn.ModuleList): emb_dim = sum(e.embedding_dim for e in embed_tokens) else: emb_dim = embed_tokens.embedding_dim self.final_layer_norm = TransformerEncoderLayerNorm(args, emb_dim) else: encoder_balance = utils.eval_str_list( args.pipeline_encoder_balance, type=int ) encoder_devices = utils.eval_str_list( args.pipeline_encoder_devices, type=int ) assert sum(encoder_balance) == len(encoder_module_list), ( f"Sum of encoder_balance={encoder_balance} is not equal " + f"to num_encoder_modules={len(encoder_module_list)}" ) if TORCH_PIPE: self.model = Pipe( module=partition_model(nn.Sequential(*encoder_module_list), encoder_balance, encoder_devices), chunks=args.pipeline_chunks, checkpoint=args.pipeline_checkpoint, ) else: self.model = Pipe( module=nn.Sequential(*encoder_module_list), balance=encoder_balance, devices=encoder_devices, chunks=args.pipeline_chunks, checkpoint=args.pipeline_checkpoint, ) def forward(self, src_tokens, src_lengths): """ Args: input_tuple( src_tokens (LongTensor): tokens in the source language of shape `(batch, src_len)` src_lengths (torch.LongTensor): lengths of each source sentence of shape `(batch)` ) Returns: output_tuple( - **encoder_out** (Tensor): the last encoder layer's output of shape `(src_len, batch, embed_dim)` - **encoder_padding_mask** (ByteTensor): the positions of padding elements of shape `(batch, src_len)` - prev_output_tokens - **encoder_states** (List[Tensor]): all intermediate hidden states of shape `(src_len, batch, embed_dim)`. Only populated if *return_all_hiddens* is True. ) """ dummy_prev_output_tokens = torch.zeros( 1, dtype=src_tokens.dtype, device=src_tokens.device ) input_tuple = (src_tokens, src_lengths, dummy_prev_output_tokens) if self.use_pipeline: input_tuple = tuple(i.to(self.model.devices[0]) for i in input_tuple) if TORCH_PIPE: encoder_out = self.model(input_tuple).local_value() else: encoder_out = self.model(input_tuple) else: encoder_embed_output_tuple = self.embedding_layer(input_tuple) encoder_layers_output = self.encoder_layers(encoder_embed_output_tuple) encoder_out = self.final_layer_norm(encoder_layers_output) # first element is the encoder output # second element is the encoder padding mask # the remaining elements of EncoderOut are not computed by # the PipelineParallelTransformer return EncoderOut(encoder_out[0], encoder_out[1], None, None, None, None) def reorder_encoder_out(self, encoder_out, new_order): """ Reorder encoder output according to *new_order*. Args: encoder_out: output from the ``forward()`` method new_order (LongTensor): desired order Returns: *encoder_out* rearranged according to *new_order* """ if encoder_out.encoder_out is not None: encoder_out = encoder_out._replace( encoder_out=encoder_out.encoder_out.index_select(1, new_order) ) if encoder_out.encoder_padding_mask is not None: encoder_out = encoder_out._replace( encoder_padding_mask=encoder_out.encoder_padding_mask.index_select( 0, new_order ) ) if encoder_out.encoder_embedding is not None: encoder_out = encoder_out._replace( encoder_embedding=encoder_out.encoder_embedding.index_select( 0, new_order ) ) if encoder_out.encoder_states is not None: for idx, state in enumerate(encoder_out.encoder_states): encoder_out.encoder_states[idx] = state.index_select(1, new_order) return encoder_out def max_positions(self): """Maximum input length supported by the encoder.""" if self.embedding_layer.embed_positions is None: return self.embedding_layer.max_source_positions return min( self.embedding_layer.max_source_positions, self.embedding_layer.embed_positions.max_positions, ) class TransformerDecoder(FairseqDecoder): """ Transformer decoder consisting of *args.decoder_layers* layers. Each layer is a :class:`TransformerDecoderLayer`. Args: args (argparse.Namespace): parsed command-line arguments dictionary (~fairseq.data.Dictionary): decoding dictionary embed_tokens (torch.nn.Embedding): output embedding no_encoder_attn (bool, optional): whether to attend to encoder outputs (default: False). """ def __init__( self, args, dictionary, embed_tokens, no_encoder_attn=False, decoder_module_list=None, ): super().__init__(dictionary) self.register_buffer("version", torch.Tensor([3])) import_pipe() self.use_pipeline = decoder_module_list is not None if not self.use_pipeline: self.embedding_layer = TransformerDecoderEmbedding(args, embed_tokens) self.decoder_layers = nn.Sequential(*[ TransformerDecoderLayer(args, no_encoder_attn) for _ in range(args.decoder_layers) ]) self.decoder_output_layer = TransformerDecoderOutputLayer( args, embed_tokens, dictionary ) else: decoder_balance = utils.eval_str_list( args.pipeline_decoder_balance, type=int ) decoder_devices = utils.eval_str_list( args.pipeline_decoder_devices, type=int ) assert sum(decoder_balance) == len(decoder_module_list), ( f"Sum of decoder_balance={decoder_balance} is not equal " + f"to num_decoder_modules={len(decoder_module_list)}" ) if TORCH_PIPE: self.model = Pipe( module=partition_model(nn.Sequential(*decoder_module_list), decoder_balance, decoder_devices), chunks=args.pipeline_chunks, checkpoint=args.pipeline_checkpoint, ) else: self.model = Pipe( module=nn.Sequential(*decoder_module_list), balance=decoder_balance, devices=decoder_devices, chunks=args.pipeline_chunks, checkpoint=args.pipeline_checkpoint, ) def forward( self, prev_output_tokens, encoder_out=None, ): """ Args: prev_output_tokens (LongTensor): previous decoder outputs of shape `(batch, tgt_len)`, for teacher forcing encoder_out (optional): output from the encoder, used for encoder-side attention incremental_state (dict): dictionary used for storing state during :ref:`Incremental decoding` features_only (bool, optional): only return features without applying output layer (default: False). Returns: tuple: - the decoder's output of shape `(batch, tgt_len, vocab)` - a dictionary with any model-specific outputs """ input_tuple = ( encoder_out.encoder_out, encoder_out.encoder_padding_mask, prev_output_tokens, ) if self.use_pipeline: input_tuple = tuple(i.to(self.model.devices[0]) for i in input_tuple) if TORCH_PIPE: return (self.model(input_tuple).local_value(),) else: return (self.model(input_tuple),) else: embed_layer_output = self.embedding_layer(input_tuple) state = self.decoder_layers(embed_layer_output) return (self.decoder_output_layer(state),) def output_layer(self, features, **kwargs): """Project features to the vocabulary size.""" if self.adaptive_softmax is None: # project back to size of vocabulary if self.share_input_output_embed: return F.linear(features, self.embed_tokens.weight) else: return F.linear(features, self.embed_out) else: return features def max_positions(self): """Maximum output length supported by the decoder.""" if self.embedding_layer.embed_positions is None: return self.embedding_layer.max_target_positions return min( self.embedding_layer.max_target_positions, self.embedding_layer.embed_positions.max_positions, ) def buffered_future_mask(self, tensor): dim = tensor.size(0) if ( not hasattr(self, "_future_mask") or self._future_mask is None or self._future_mask.device != tensor.device or self._future_mask.size(0) < dim ): self._future_mask = torch.triu( utils.fill_with_neg_inf(tensor.new(dim, dim)), 1 ) return self._future_mask[:dim, :dim] def upgrade_state_dict_named(self, state_dict, name): """Upgrade a (possibly old) state dict for new versions of fairseq.""" if isinstance(self.embed_positions, SinusoidalPositionalEmbedding): weights_key = "{}.embed_positions.weights".format(name) if weights_key in state_dict: del state_dict[weights_key] state_dict[ "{}.embed_positions._float_tensor".format(name) ] = torch.FloatTensor(1) for i in range(len(self.layers)): # update layer norms layer_norm_map = { "0": "self_attn_layer_norm", "1": "encoder_attn_layer_norm", "2": "final_layer_norm", } for old, new in layer_norm_map.items(): for m in ("weight", "bias"): k = "{}.layers.{}.layer_norms.{}.{}".format(name, i, old, m) if k in state_dict: state_dict[ "{}.layers.{}.{}.{}".format(name, i, new, m) ] = state_dict[k] del state_dict[k] version_key = "{}.version".format(name) if utils.item(state_dict.get(version_key, torch.Tensor([1]))[0]) <= 2: # earlier checkpoints did not normalize after the stack of layers self.layer_norm = None self.normalize = False state_dict[version_key] = torch.Tensor([1]) return state_dict @register_model_architecture( "pipeline_parallel_transformer", "transformer_iwslt_de_en_pipeline_parallel" ) def transformer_iwslt_de_en_dist(args): transformer_iwslt_de_en(args) @register_model_architecture( "pipeline_parallel_transformer", "transformer_wmt_en_de_big_pipeline_parallel" ) def transformer_wmt_en_de_big_dist(args): transformer_wmt_en_de_big(args)
mit
-946,977,085,584,911,400
42.754889
151
0.57947
false
4.349404
false
false
false
6112/project-euler
problems/051.py
1
2678
#encoding=utf-8 ## SOLVED 2014/04/18 ## 121313 # By replacing the 1st digit of the 2-digit number *3, it turns out that six of # the nine possible values: 13, 23, 43, 53, 73, and 83, are all prime. # By replacing the 3rd and 4th digits of 56**3 with the same digit, this # 5-digit number is the first example having seven primes among the ten # generated numbers, yielding the family: 56003, 56113, 56333, 56443, 56663, # 56773, and 56993. Consequently 56003, being the first member of this family, # is the smallest prime with this property. # Find the smallest prime which, by replacing part of the number (not # necessarily adjacent digits) with the same digit, is part of an eight prime # value family. import helpers.prime as prime # number of replacements of digits that have to work FAMILY_SIZE = 8 def euler(): # for each "starting" prime number for prime_number in prime.primes(200000): # list of integers for each digit prime_number_digits = list(int(digit) for digit in str(prime_number)) # set (without duplicates) of the digits in the prime number prime_number_digit_set = set(prime_number_digits) # for each digit that could be replaced in the prime number for base_digit in prime_number_digit_set: # number of digit replacements that are actual prime numbers prime_count = 0 # never replace the first digit with a zero replacements = range(10) if prime_number_digits[0] != base_digit \ else range(1, 10) # for each possible digit replacement for replacement_digit in replacements: # replace the digit base_digit with replacement_digit modified_digits = replace(prime_number_digits, base_digit, replacement_digit) # convert that list to a number modified_number = int(''.join(str(digit) \ for digit in modified_digits)) # if it's a prime, increment the prime count (duh) if prime.is_prime(modified_number): prime_count += 1 # return if the answer if we found it if prime_count == FAMILY_SIZE: return prime_number def replace(xs, base, replacement): """Replaces every 'base' in 'xs' with 'replacement'. Non destructive. Args: xs: Initial list of elements. base: Element to be replaced in the new list. replacement: Element to replace that value with. Returns: A new list with the replacement applied.""" return [x if x != base else replacement for x in xs]
mit
-6,838,346,259,325,526,000
42.901639
79
0.641897
false
4.145511
false
false
false
etkirsch/legends-of-erukar
erukar/content/skills/brutality/Cleave.py
1
4561
from erukar.system.engine import Attack, Damage, DamageScalar from erukar.ext.math import Navigator class Cleave(Attack): Name = 'Cleave' ShowInLists = True Description = 'Cleave with {} at {}' CurrentLevel = 'Swing wildly, inflicting {:0.1f}% damage per tile '\ 'in three spaces at once. Cleave uses two action points and '\ 'only rolls an attack once. Damage will hit in a in a 90 degree '\ 'arc centered on the specified space and will only hit hostile '\ 'creatures.' NextLevel = 'Increases percentage of damage per tile to {:0.1f}%.' OnSwing = 'You swing your {weapon} in the air wildly!' SeeSwing = '{attacker} swings its {weapon} wildly!' HitNothing = 'You fail to hit any target with your Cleave attack!' def ap_cost(self, *_): return 2 def command(self, creature, weapon, loc): return { 'command': 'ActivateAbility', 'abilityModule': self.__module__, 'cost': self.ap_cost(None, None), 'description': self.format_description(creature, weapon, loc), 'weapon': str(weapon.uuid), 'interaction_target': loc, } def format_description(self, target, weapon, loc): return self.Description.format(weapon.alias(), loc) def valid_at(self, cmd, loc): player = cmd.args['player_lifeform'] if player.action_points() < self.ap_cost(cmd, loc): return False if not any(Attack.weapons_in_range(player, loc)): return False return True def action_for_map(self, cmd, loc): player = cmd.args['player_lifeform'] for weapon in Attack.weapons_in_range(player, loc): yield self.command(player, weapon, loc) def perform_attack(self, cmd, player, weapon, target): roll = self.calc_attack_roll(cmd, player, weapon, target) targets = list(self.affected_enemies(cmd)) if len(targets) < 1: whoops = Cleave.HitNothing.format(weapon.alias()) cmd.append_result(player.uid, whoops) for enemy in targets: self.perform_sub_attack(cmd, player, weapon, enemy, roll) return cmd.succeed() def calc_attack_roll(self, cmd, player, weapon, target): weapon.on_calculate_attack(cmd) roll = player.calculate_attack_roll(0.8, target) for mod in self.possible_modifiers: mod_name = 'modify_attack_roll' roll = mod.modify_element(mod_name, roll) return roll def affected_enemies(self, cmd): player = cmd.args['player_lifeform'] at = cmd.args['interaction_target'] for loc in self.affected_tiles(player, at): for enemy in cmd.world.creatures_at(player, loc): yield enemy def perform_sub_attack(self, cmd, player, weapon, enemy, roll): if not enemy.is_hostile_to(player): return hit = self.attack_succeeded(cmd, player, weapon, enemy, roll) if not hit: return self.do_damage(cmd, player, weapon, enemy) self.check_for_kill(cmd, player, weapon, enemy) def is_in_valid_range(self, player, weapon, target): dist = Navigator.distance(player.coordinates, target) return dist <= weapon.attack_range(player) def mit_carried_through(level): if level < 5: return 1.0 return 1.0 - (level - 4) * 0.10 def multiplier(level): if level > 4: return 1.0 return 0.75 * 0.05*level def current_level_description(self): percent = Cleave.multiplier(self.level) * 100.0 return self.CurrentLevel.format(percent) def next_level_description(self): percent = Cleave.multiplier(self.level + 1) * 100.0 return self.NextLevel.format(percent) def affected_tiles(self, player, loc): p_x, p_y = player.coordinates x, y = loc if p_x == x: return Cleave.horizontal_tiles(p_x, y) if p_y == y: return Cleave.vertical_tiles(x, p_y) return Cleave.arc_tiles(x, y, p_x, p_y) def horizontal_tiles(x, y): return [(x-1, y), (x, y), (x+1, y)] def vertical_tiles(x, y): return [(x, y-1), (x, y), (x, y+1)] def arc_tiles(x, y, p_x, p_y): tiles = [(p_x, p_y)] # X Tiles tiles += [(p_x+1, p_y)] if x < p_x\ else [(p_x-1, p_y)] # Y Tiles tiles += [(p_x, p_y+1)] if y < p_y\ else [(p_x, p_y-1)] return tiles
agpl-3.0
-1,209,525,495,349,219,600
34.913386
74
0.585398
false
3.403731
false
false
false
sedden/django-basic-apps
basic/tools/shortcuts.py
1
1190
import os.path import hashlib from django.shortcuts import render_to_response from django.template.context import RequestContext from django.http import HttpResponseRedirect def get_image_path(instance, filename): """ Converts an image filename to a hash. """ name = hashlib.md5("%s" % instance.id).hexdigest() ext = os.path.splitext(filename) return os.path.join("%s/%s" % (instance._meta.app_label, instance._meta.module_name), '%s%s' % (name, ext[1])) def render(request, *args, **kwargs): """ Simple wrapper for render_to_response. """ kwargs['context_instance'] = RequestContext(request) return render_to_response(*args, **kwargs) def redirect(request, obj=None): """ Simple wrapper for HttpResponseRedirect that checks the request for a 'next' GET parameter then falls back to a given object or url string. """ next = request.GET.get('next', None) redirect_url = '/' if next: redirect_url = next elif isinstance(obj, str): redirect_url = obj elif obj and hasattr(obj, 'get_absolute_url'): redirect_url = obj.get_absolute_url() return HttpResponseRedirect(redirect_url)
bsd-3-clause
-5,613,188,190,341,763,000
28.775
114
0.67395
false
3.888889
false
false
false
geo2tag-logistics/Geo2Logistics
logistics/Geo2TagService.py
1
6073
import json import requests from logistics.models import Fleet SERVER_URL = "http://demo.geo2tag.org/instance/" BASE_SERVICE_NAME = "testservice" SERVICE_NAME = BASE_SERVICE_NAME channel_dict = {} points_dict = {} def getSerivceUrl(): return SERVER_URL + "service/" + SERVICE_NAME def one_time_startup(): print("Application startup execution") createService() clearAllFleetChannels() def createService(): # m = hashlib.md5() # m.update(socket.gethostbyname(socket.getfqdn()).encode('utf-8')) # global SERVICE_NAME # SERVICE_NAME = BASE_SERVICE_NAME + "_" + str(m.hexdigest()) # print("SERVICE_NAME: "+SERVICE_NAME) # # url = SERVER_URL + 'service' # data = {'name': SERVICE_NAME} # request = requests.post(url, data=data) # print(request.text) pass # возвращает url карты (при открытии driver-fleet-id) def getFleetMap(fleet_id): try: fleet = Fleet.objects.get(id=fleet_id) channel_id = getOrCreateFleetChannel(fleet) except: channel_id = "none" return getSerivceUrl() + "/map?zoom=10&latitude=59.8944&longitude=30.2642&channel_ids=[\""+str(channel_id)+"\"]" # создаёт канал для автопарка, если не существует (при добавлении точки updateDriverPos) # возвращает oid канала для fleet def getOrCreateFleetChannel(fleet): try: channel_oid = channel_dict.get(fleet.id, None) if channel_oid is not None: return channel_oid print("create channel for fleet " + str(fleet)) url = getSerivceUrl() + '/channel' full_name = str(fleet.name) + "_" + str(fleet.id) data = {'name': full_name, 'json': {'name': str(fleet.name), 'id': str(fleet.id), 'owner': fleet.owner.first_name+' '+fleet.owner.last_name}} request = requests.post(url, data=data) response = request.text channel_exists = response == 'null' if channel_exists: print(full_name+' already exists : '+str(channel_exists)) oid = None else: oid = json.loads(response)["$oid"] channel_dict[fleet.id] = oid return oid except Exception as e: print("EXCEPTION WHILE createFleetChannel: " + str(e)) # удаляет канал автопарка (при удалении автопарка) def deleteFleetChannel(fleet): try: channel_oid = channel_dict.get(fleet.id) headers = {'content-type': 'application/json'} url = getSerivceUrl() + "/channel/" + channel_oid request = requests.delete(url, headers=headers) channel_dict.pop(fleet.id) print("delete channel of fleet " + str(fleet) +" result: "+request.text) except Exception as e: print("EXCEPTION WHILE deleteFleetChannel: " + str(e)) # удаляет все каналы (при запуске приложения) def clearAllFleetChannels(): print("delete all channels") try: url = getSerivceUrl() + '/channel?number=0' request = requests.get(url) response = request.text print(response) parsed_string = json.loads(response) for channel in parsed_string: channel_oid = channel["_id"]["$oid"] headers = {'content-type': 'application/json'} url = getSerivceUrl() + "/channel/" + channel_oid print("DELETE " + url) requests.delete(url, headers=headers) channel_dict.clear() points_dict.clear() except Exception as e: print("EXCEPTION WHILE clearAllFleetChannels: " + str(e)) # обновляет текущее метоположение водителя ( при api/driver/update_pos/) def updateDriverPos(fleet, driver, lat, lon): try: channel_oid = getOrCreateFleetChannel(fleet) if channel_oid is not None: point_oid = points_dict.get(driver.id, None) url = getSerivceUrl() + '/point' data = [{"lon": float(lat), "lat": float(lon), "alt": 1.1, "json": {"name": driver.first_name + " " + driver.last_name}, "channel_id": channel_oid}] if point_oid is None: request = requests.post(url, data=json.dumps(data)) point_oid = json.loads(request.text)[0] points_dict[driver.id] = point_oid print("added point " + str(lat) + " " + str(lon) + " for driver " + str(driver) + " in fleet " + str(fleet) + " result: "+request.text) else: # delete old del_url = getSerivceUrl() + '/point/' + point_oid request = requests.delete(del_url) success = request.text == '{}' if success: points_dict.pop(driver.id) # add new request = requests.post(url, data=json.dumps(data)) point_oid = json.loads(request.text)[0] points_dict[driver.id] = point_oid print("updated point " + str(lat) + " " + str(lon) + " for driver " + str(driver) + " in fleet " + str(fleet) + " result: " + request.text) else: print("error while delete "+request.text) except Exception as e: print("EXCEPTION WHILE updateDriverPos: " + str(e)) # удаляет точку, соответствующую водителю в автопарке fleet (при исключении водителя из автопарка и при завершении поездки) def deleteDriverPos(fleet, driver): try: point_oid = points_dict.get(driver.id) url = getSerivceUrl() + '/point/' + point_oid request = requests.delete(url) points_dict.pop(driver.id) print("cleared position for driver " + str(driver) + " from fleet " + str(fleet) + " result: "+request.text) except Exception as e: print("EXCEPTION WHILE deleteDriverPos: " + str(e))
apache-2.0
-5,767,976,916,874,057,000
35.194969
159
0.596003
false
3.139662
false
false
false
jimjing/smores_choose_behavior
src/smores_choose_behavior/visualizer.py
1
1422
#!/usr/bin/env python import threading import rospy from visualization_msgs.msg import Marker, MarkerArray class Visualizer(object): def __init__(self): self._current_pub_path = [] self.run = False self.t = None self.t = threading.Thread(target=self._startPathPub) self.t.setDaemon(True) self.run = True self.t.start() def stop(self): self.run = False self.t.join() def _startPathPub(self): path_marker_pub = rospy.Publisher('PathMarker', MarkerArray, queue_size=10) rate = rospy.Rate(1) # 10hz while self.run and not rospy.is_shutdown(): id = 1 m_array = MarkerArray() for pt in self._current_pub_path: m = Marker() m.header.frame_id = "camera_link"; m.header.stamp = rospy.Time(); m.ns = "my_namespace"; m.id = id m.type = Marker.SPHERE m.action = Marker.ADD m.pose = pt m.scale.x = 0.05 m.scale.y = 0.05 m.scale.z = 0.05 m.color.r = 0.5 m.color.a = 1.0 m_array.markers.append(m) id += 1 path_marker_pub.publish(m_array) rate.sleep() def setPubPath(self, path): self._current_pub_path = path
gpl-3.0
-7,178,490,557,951,756,000
27.44
83
0.496484
false
3.761905
false
false
false
ComputerNetworks-UFRGS/OpERA
python/device/radioDevice.py
1
9121
""" Copyright 2013 OpERA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from gnuradio import gr #pylint: disable=F0401 from abstractDevice import AbstractDevice from uhdDevice import * from uhdArch import * import time #::TODO:: documentacao das funcoes das classes class APath(object): """ Class to handle a single path connection/disconnection """ PENDING = 0 CONNECTED = 1 DISABLED = 2 def __init__(self, source, arch, sink): """ CTOR @param source @param arch @param sink """ if isinstance(source, UHDBase): source = source.uhd if isinstance(sink, UHDBase): sink = sink.uhd if source and not isinstance(source, tuple): source = (source, 0) if arch and not isinstance(arch, tuple): arch = (arch, 0) if sink and not isinstance(sink, tuple): sink = (sink, 0) self._source = source self._arch = arch self._sink = sink self._state = APath.PENDING def __hasattr__(self, name): """ Function override. """ if self._source and hasattr(self._source[0], name): return hasattr(self._source[0], name) elif self._arch and hasattr(self._arch[0], name): return hasattr(self._arch[0], name) elif self._sink and hasattr(self._sink[0], name): return hasattr(self._sink[0], name) raise AttributeError def __getattr__(self, name): """ Function override. """ if self._source and hasattr(self._source[0], name): return getattr(self._source[0], name) elif self._arch and hasattr(self._arch[0], name): return getattr(self._arch[0], name) elif self._sink and hasattr(self._sink[0], name): return getattr(self._sink[0], name) raise AttributeError("%s not found in wrapper APath" % name) def connect(self, tb): """ @param tb An OpERAFlow instance. """ if self.is_connected(): return if self._source: isinstance(self._source[0], UHDBaseArch) and self._source[0].pre_connect(tb) if self._sink: isinstance(self._sink[0], UHDBaseArch) and self._sink[0].pre_connect(tb) if self._arch: isinstance(self._arch[0], UHDBaseArch) and self._arch[0].pre_connect(tb) if self._arch: self._source and tb.connect(self._source, self._arch) self._sink and tb.connect(self._arch, self._sink) else: self._source and self._sink and tb.connect(self._source, self._sink) self._state = APath.CONNECTED def disconnect(self, tb): """ @param tb OpERAFlow instance. """ if not self.is_connected(): return if self._arch: self._source and tb.disconnect(self._source, self._arch) self._sink and tb.disconnect(self._arch, self._sink) else: self._source and self._sink and tb.disconnect(self._source, self._sink) self._state = APath.DISABLED def get_state(self): """ """ return self._state def is_connected(self): """ """ return self._state == APath.CONNECTED def is_disabled(self): """ """ return self._state == APath.DISABLED def is_pending(self): """ """ return self._state == APath.PENDING class RadioDevice(AbstractDevice): """ """ def __init__(self, name="RadioDevice"): """ CTOR @param name """ AbstractDevice.__init__(self, name=name) # Dictionary of all UHD devices # Dictionary of AbstractArch of this radio device self._dict_of_uhds = {} self._dict_of_archs = {} self._tb = None # We need this flag because lock/unlock in OpERAFlow is not working # To avoid performing the "RadioDevice::connect" 1+ times, we control it with this flag. self._pending_done = False def add_arch(self, source, sink, arch, name, uhd_device): """ Add a reference to a arch in which this radio_device.is a source/sink. @param source Arch source. @param sink Architecture sink. @param arch AbstractArch device implementation. @param name Name Name of the architecture. @param uhd_device UHD device. Should be source or sink. """ # The arch has a reference to the radio. if hasattr(arch, 'set_radio_device'): arch.set_radio_device(uhd_device) self._dict_of_archs[name] = APath(source=source, arch=arch, sink=sink) self._dict_of_uhds[name] = uhd_device # makes the name be accessible by doing radio.$name setattr(self, name, self._dict_of_archs[name]) def disable_arch(self, name): """ @param name """ # Arch is not enabled if not name in self._dict_of_archs: raise AttributeError # ::TRICKY:: # lock()/unlock() are not working with python sync blocks. # So, we use stop/wait/start # For more info check the link: # http://gnuradio.org/redmine/issues/594 self._tb.stop() self._tb.wait() self._dict_of_archs[name].disconnect(self._tb) self._tb.start() def enable_arch(self, name): """ @param name """ # Arch is not enabled if not name in self._dict_of_archs: raise AttributeError self._tb.stop() self._tb.wait() self._dict_of_archs[name].connect(self._tb) self._tb.start() def connect(self): """ """ if self._pending_done: return self._pending_done = True for x in self._dict_of_archs.itervalues(): x.connect(self._tb) def set_top_block(self, tb): """ @param tb Set the top block. """ self._tb = tb def __getattr__(self, name): """ Search for a parameter/function in all archs of this Radio. So, a programer that doed radio.function, activates this __getattr__ function, which searches for 'function' in all architectures. @param name Name of parameter/function. """ if name == "_dict_of_archs": return object.getattr(self, "_dict_of_archs") #pylint: disable=E1101 else: # search for method in the architectures for key in self._dict_of_archs: if hasattr(self._dict_of_archs[key], name): return getattr(self._dict_of_archs[key], name) raise AttributeError("%r object has no attribute %s" % (self.__class__, name)) ### Implementations required for the AbstractDevice def __getter(self, str_callback): """ A gereric getter for this class. @param str_callback String with the name of the real getter function. """ arr = [] for uhd in self._dict_of_uhds.values(): uhd and arr.append(getattr(uhd, str_callback)()) return arr def _get_center_freq(self): """ """ return self.__getter('get_center_freq') def _set_center_freq(self, center_freq): """ @param center_freq """ for uhd in self._dict_of_uhds.values(): uhd and uhd.set_center_freq(center_freq) return center_freq def _get_samp_rate(self): """ Device sample rate getter. """ return self.__getter('get_samp_rate') def _set_samp_rate(self, samp_rate): """ @param samp_rate """ for uhd in self._dict_of_uhds.values(): uhd and uhd.set_samp_rate(samp_rate) return samp_rate def _get_gain(self): """ Device gain getter. """ return self.__getter('get_gain') def _set_gain(self, gain): """ @param gain """ for uhd in self._dict_of_uhds.values(): uhd and uhd.set_gain(gain) return gain def _get_bandwidth(self): """ Get the device's bandwidth. @return """ return self.__getter('get_bandwidth') def _set_bandwidth(self, bw): """ @param bw """ for uhd in self._dict_of_uhds.values(): uhd and uhd.set_bandwidth(bw) return bw
apache-2.0
-5,384,129,354,236,725,000
25.591837
97
0.558162
false
4.032272
false
false
false
tramin/frobo
frobo_nav/nodes/nav_calibrate_linear.py
1
5813
#!/usr/bin/env python """ nav_square.py - Version 1.1 2013-12-20 A basic demo of the using odometry data to move the robot along a square trajectory. Created for the Pi Robot Project: http://www.pirobot.org Copyright (c) 2012 Patrick Goebel. All rights reserved. This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version.5 This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details at: http://www.gnu.org/licenses/gpl.html """ import rospy from geometry_msgs.msg import Twist, Point, Quaternion import tf from frobo_nav.transform_utils import quat_to_angle, normalize_angle from math import degrees, radians, copysign, sqrt, pow, pi class NavSquare(): def __init__(self): # Give the node a name rospy.init_node('nav_square', anonymous=False) # Set rospy to execute a shutdown function when terminating the script rospy.on_shutdown(self.shutdown) # How fast will we check the odometry values? rate = 20 # Set the equivalent ROS rate variable r = rospy.Rate(rate) # Set the parameters for the target square goal_distance = rospy.get_param("~goal_distance", 3.0) # meters goal_angle = rospy.get_param("~goal_angle", radians(90)) # degrees converted to radians linear_speed = rospy.get_param("~linear_speed", 0.1) # meters per second angular_speed = rospy.get_param("~angular_speed", 0.6) # radians per second angular_tolerance = rospy.get_param("~angular_tolerance", radians(0)) # degrees to radians # Publisher to control the robot's speed self.cmd_vel = rospy.Publisher('/cmd_vel', Twist) # The base frame is base_footprint for the TurtleBot but base_link for Pi Robot self.base_frame = rospy.get_param('~base_frame', '/base_link') # The odom frame is usually just /odom self.odom_frame = rospy.get_param('~odom_frame', '/odom') # Initialize the tf listener self.tf_listener = tf.TransformListener() # Give tf some time to fill its buffer rospy.sleep(2) # Set the odom frame self.odom_frame = '/odom' # Find out if the robot uses /base_link or /base_footprint try: self.tf_listener.waitForTransform(self.odom_frame, '/base_footprint', rospy.Time(), rospy.Duration(1.0)) self.base_frame = '/base_footprint' except (tf.Exception, tf.ConnectivityException, tf.LookupException): try: self.tf_listener.waitForTransform(self.odom_frame, '/base_link', rospy.Time(), rospy.Duration(1.0)) self.base_frame = '/base_link' except (tf.Exception, tf.ConnectivityException, tf.LookupException): rospy.loginfo("Cannot find transform between /odom and /base_link or /base_footprint") rospy.signal_shutdown("tf Exception") # Initialize the position variable as a Point type position = Point() # Initialize the movement command move_cmd = Twist() # Set the movement command to forward motion move_cmd.linear.x = linear_speed # Get the starting position values (position, rotation) = self.get_odom() initPosition = position rospy.loginfo("Initial position at " + str(position) + " and rotation " + str(degrees(rotation)) + " degrees") x_start = position.x y_start = position.y # Keep track of the distance traveled distance = 0 # Enter the loop to move along a side while distance < goal_distance and not rospy.is_shutdown(): # Publish the Twist message and sleep 1 cycle self.cmd_vel.publish(move_cmd) r.sleep() # Get the current position (position, rotation) = self.get_odom() # Compute the Euclidean distance from the start distance = sqrt(pow((position.x - x_start), 2) + pow((position.y - y_start), 2)) # Stop the robot when we are done self.cmd_vel.publish(Twist()) #print result (position, rotation) = self.get_odom() rospy.loginfo("Final position at " + str(position) + " and rotation " + str(degrees(rotation)) + " degrees") rospy.loginfo("Difference (position.x - initPosition.x) " + str(position.x - initPosition.x) + "m") def get_odom(self): # Get the current transform between the odom and base frames try: (trans, rot) = self.tf_listener.lookupTransform(self.odom_frame, self.base_frame, rospy.Time(0)) except (tf.Exception, tf.ConnectivityException, tf.LookupException): rospy.loginfo("TF Exception") return return (Point(*trans), quat_to_angle(Quaternion(*rot))) def shutdown(self): # Always stop the robot when shutting down the node rospy.loginfo("Stopping the robot...") self.cmd_vel.publish(Twist()) rospy.sleep(1) if __name__ == '__main__': try: NavSquare() except rospy.ROSInterruptException: rospy.loginfo("Navigation terminated.")
mit
3,346,981,609,078,906,400
39.089655
116
0.607088
false
4.143264
false
false
false
rjl09c/ysp2017
katiehessian.py
1
7324
import yt import matplotlib.pyplot as plt import numpy as np from matplotlib import pylab from yt.analysis_modules.halo_finding.api import HaloFinder from pylab import* from numpy import ma from numpy import linalg as LA #derives vel with respect to x def derivx(vel,xcoords): distance = xcoords[1][0] - xcoords[0][0] velxdx = np.zeros((320,320)) for i in range(len(vel)): for x in range(len(vel)): if 0 < i < len(vel) - 1: velxdx[i,x] = ((-1/2) * vel[i-1][x]) + ((1/2) * vel[i+1][x]) elif i == 0: velxdx[i,x] = (((-3/2) * vel[i][x]) + (2 * vel[i+1][x]) + ((-1/2) * vel[i+2][x])) elif i == len(vel) - 1: velxdx[i,x] = ((-3/2) * vel[i][x]) + (2 * vel[i-1][x]) + ((-1/2) * vel[i-2][x]) return velxdx/distance #derives vel with respect to y def derivy(vel,xcoords): distance = xcoords[1][0] - xcoords[0][0] velydy = np.zeros((320,320)) for i in range(len(vel)): for x in range(len(vel)): if 0 < x < len(vel) - 1: velydy[i,x] = (((-1/2) * vel[i][x-1]) + ((1/2) * vel[i][x+1])) elif x == 0: velydy[i,x] = (((-3/2)*vel[i][x]) + (2*vel[i][x+1]) + ((-1/2) * vel[i][x + 2])) elif x == len(vel) - 1: velydy[i,x] = (((-3/2)*vel[i][x]) + (2*vel[i][x-1]) + ((-1/2) * vel[i][x-2])) return velydy/distance #second derivative of vel with respect to x def deriv2x(vel,xcoords): distance = xcoords[1][0] - xcoords[0][0] velxdx = np.zeros((320,320)) for i in range(len(vel)): for x in range(len(vel)): if 0 < i < len(vel) - 1: velxdx[i,x] = (vel[i-1][x]) + (-2 * vel[i][x]) + (vel[i+1][x]) elif i == 0: velxdx[i,x] = ((2 * vel[i][x]) + (-5 * vel[i+1][x]) + (4* vel[i+2][x]) + (-1 * vel[i+3][x])) elif i == len(vel) - 1: velxdx[i,x] = ((-3/2) * vel[i][x]) + (2 * vel[i-1][x]) + ((-1/2) * vel[i-2][x]) return velxdx/distance #second derivative of vel with respect to y def deriv2y(vel,xcoords): distance = xcoords[1][0] - xcoords[0][0] velydy = np.zeros((320,320)) for i in range(len(vel)): for x in range(len(vel)): if 0 < x < len(vel) - 1: velydy[i,x] = ((vel[i][x-1]) + (-2 * vel[i][x]) + (vel[i][x+1])) elif x == 0: velydy[i,x] = (((2)*vel[i][x]) + (-5 * vel[i][x+1]) + ((4) * vel[i][x+2]) + (-1 * vel[i][x+3])) elif x == len(vel) - 1: velydy[i,x] = (((2) * vel[i][x]) + (-5 * vel[i][x - 1]) + ((4) * vel[i][x-2]) + (-1 * vel[i][x-3])) return velydy/distance #second derivative of a mixed derivative def mixed_deriv(xcoords, ycoords, vel): distx = xcoords[1][0] - xcoords[0][0] disty = ycoords[0][1] - ycoords[0][0] mixed = np.zeros((320,320)) veldx = derivx(vel, xcoords) veldy = derivy(veldx, xcoords) #takes deriv of vel with respect to x and derives that in the y direction for i in range(len(vel)): for x in range(len(vel)): if 0 < i < len(vel) - 1 and 0 < x < len(vel) - 1: mixed[i][x] = ((vel[i+1][x+1]) - (vel[i+1][x-1]) - (vel[i-1][x+1]) + (vel[i-1][x-1]))/(4*distx*disty) #if on edges derives with respect to x first elif i == 0 or i == len(vel) - 1 or x == 0 or x == len(vel) - 1: mixed[i][x]=veldy[i][x] return mixed #create hessian matrix for each point def hess(xcoords, ycoords, vel): veldx = deriv2x(vel, xcoords) #retrieves the second derivatives of the velocity in the x direction veldy = deriv2y(vel, xcoords) #retrieves the second derivatives of the velocity in the y direction mixed = mixed_deriv(xcoords, ycoords, vel) #retrieves the second mixed derivatives of the velocity hessian = np.zeros((2,2)) allhessian = [[[] for j in range(320)] for i in range(320)] for j in range(len(veldx)): for k in range(len(veldx)): for i in range(len(hessian)): for x in range(len(hessian)): if i == 0 and x == 1: hessian[i,x] = mixed[j,k] hessian[i+1][x-1] = mixed[j,k] elif x == 0 and i == 0: hessian[i,x] = veldx[j,k] elif x == 1 and i == 1: hessian[i,x] = veldy[j,k] allhessian[j][k] = hessian allhessian = np.array(allhessian) return allhessian #find determinant def determinant(allhessian): deters = np.zeros((320,320)) for j in range(len(allhessian)): for k in range(len(allhessian)): x = allhessian[j,k] deters[j,k] = (x[0,0]*x[1,1]) - (x[1,0]*x[0,1]) return deters #find magnitude def magnitude(velx,vely, xcoords): mag = np.zeros((320,320)) yderiv = derivy(vely, xcoords) xderiv = derivx(velx, xcoords) for i in range(len(xderiv)): for x in range(len(xderiv)): mag[i][x] = (((yderiv[i,x]**2) + (xderiv[i,x]**2))**.5) return mag #finds extrema and saddlepoints def extrema(allhessian, velx, vely, xcoords): deters = determinant(allhessian) extrem = np.zeros((320,320)) mag = magnitude(velx, vely, xcoords) for j in range(len(extrem)): for k in range(len(extrem)): if mag[j][k] == 0: if deters[j,k] < 0: extrem[j, k] = -1 elif deters[j,k] == 0: extrem[j,k] = 0 else: x = allhessian[j,k] if deter[j,k] > 0 and x[0,0] > 0: extem[j, k] = -2 elif deter[j,k] > 0 and x[0,0] < 0: extrem[j, k] = 2 return extrem #creates jacobia matrix for each point def jacobian(xcoords,velx, vely): xx = derivx(velx, xcoords) xy = derivy(velx, xcoords) yx = derivx(vely, xcoords) yy = derivy(vely, xcoords) jacob = np.zeros ((2,2)) alljacob = [[[] for j in range(320)] for i in range(320)] for j in range(len(xx)): for k in range(len(xx)): for i in range(len(jacob)): for c in range(len(jacob)): if c == 0 and i == 0: jacob [i][c] = xx[j][k] elif c == 1 and i == 0: jacob[i][c] = xy[j][k] elif c ==1 and i == 1: jacob[i][c] = yy[j][k] alljacob[j][k] = jacob alljacob = np.array(alljacob) return alljacob #obtains eigenvalues for all points' jacobian matrices and then checks the extrema def evals(alljacob): eigen = [[[] for j in range(320)] for i in range(320)] extrema = np.zeros((320,320)) for j in range(len(alljacob)): for k in range(len(alljacob)): x = alljacob[j,k] eigen[j][k] = LA.eigvalsh(x) y = eigen [j][k] if y[0]>0 and y[1] > 0: extrema[j,k] = 2 elif y[0]<0 and y[1] <0: extrema[j,k] = -2 elif y[0]*y[1] < 0: extrema[j,k] = 3 return extrema #loads files and calls hess function def main(): ds = yt.load("kh_mhd_Ma=0.803333333333At=0.0hdf5_chk_0000") ad = ds.covering_grid(level=0, left_edge=ds.index.grids[0].LeftEdge, dims=ds.domain_dimensions) xcoords = np.array(ad["x"]) ycoords = np.array(ad["y"]) velx = np.array(ad["velx"]) vely = np.array(ad["vely"]) ds1 = yt.load("kh_mhd_Ma=0.803333333333At=0.0hdf5_chk_0001") dd = ds1.covering_grid(level=0, left_edge=ds1.index.grids[0].LeftEdge, dims=ds1.domain_dimensions) xcoords1 = np.array(dd["x"]) ycoords1 = np.array(dd["y"]) velx1 = np.array(dd["velx"]) vely1 = np.array(dd["vely"]) #creates Hessian matrix for x velocity for file 1 extrema(hess(xcoords, ycoords, velx), velx, vely, xcoords) #creates Hessian marix for y velocity for file 1 (extrema(hess(xcoords, ycoords, vely), velx, vely, xcoords)) #prints extrema for file1 print(evals(jacobian(xcoords, velx, vely))) '''plt.figure() plt.scatter(xcoords, ycoords,c=evals(jacobian(xcoords, velx, vely)), marker= 'o',edgecolor='none') cb = plt.colorbar() cb.set_label('Extrema') plt.show()''' main()
gpl-3.0
-7,559,592,820,612,261,000
24.255172
106
0.58834
false
2.283042
false
false
false
tehpug/Ares
leagues/models.py
1
5319
# -*- coding: utf-8 -*- from celery.result import AsyncResult from django.core.exceptions import ValidationError from django.db import models, connection from django.utils import timezone def validate_even(value): """ Validate a number to be even """ if value % 2 != 0: raise ValidationError('%(value)s is not an even number', params={'value': value}) class League(models.Model): """ Leagues for robots """ title = models.CharField( 'title', max_length=150, unique=True, help_text='Required. 150 characters or fewer.', error_messages={ 'unique': 'A league with that name already exists.', }, ) description = models.CharField(max_length=1000, blank=True, null=True) finished = models.BooleanField('finished', default=False) registration_start = models.DateTimeField('registration start time') registration_end = models.DateTimeField('registration end time') start = models.DateField('league start date') # Times to schedule matches within a day match_start_time = models.TimeField('matches start time') match_end_time = models.TimeField('matches end time') num_robots = models.PositiveSmallIntegerField('number of robots', validators=[validate_even]) game = models.ForeignKey('games.Game', related_name='leagues', on_delete=models.CASCADE) robots = models.ManyToManyField('robots.Robot', blank=True, related_name='leagues') _match_scheduler_id = models.CharField('match scheduler id', max_length=36, null=True) def __init__(self, *args, **kwargs): super(League, self).__init__(*args, **kwargs) self.__original_registration_end = self.registration_end @property def match_scheduler(self): """ Match scheduler task """ if self._match_scheduler_id: return AsyncResult(self._match_scheduler_id) return None @match_scheduler.setter def match_scheduler(self, scheduler): if isinstance(scheduler, AsyncResult): self._match_scheduler_id = scheduler.id @match_scheduler.deleter def match_scheduler(self): self._match_scheduler_id = None def has_schedule_changed(self): """ Check if the league schedule has changed or not """ return self.registration_end != self.__original_registration_end def clean(self): """ Validate the the values """ now = timezone.now() if self.registration_start <= now: raise ValidationError( u'Registration starting time must be after now') if self.registration_end <= self.registration_start: raise ValidationError( u'Registration ending time must be after its starting') if self.start <= self.registration_end.date(): raise ValidationError( u'League starting time must be after registration ending time') if self.match_end_time <= self.match_start_time: raise ValidationError( u'Match ending time must be after its starting time') def __str__(self): return '{} ({})'.format(self.title, self.game) @staticmethod def get_table(): """ Create a table of league """ table = list() with connection.cursor() as c: c.execute("""SELECT robot_id, name, SUM(P) AS P, SUM(W) AS W, SUM(L) AS L, SUM(D) AS D, SUM(robot1_score) AS GF , SUM(robot2_score) AS GA, SUM(GD) AS GD, SUM(PTS) AS PTS FROM (SELECT robot1_id AS robot_id, robots_robot.name AS name, 1 AS P, CASE WHEN robot1_score > robot2_score THEN 1 ELSE 0 END AS W, CASE WHEN robot1_score < robot2_score THEN 1 ELSE 0 END AS L, CASE WHEN robot1_score = robot2_score THEN 1 ELSE 0 END AS D, robot1_score, robot2_score, robot1_score-robot2_score AS GD, CASE WHEN robot1_score > robot2_score THEN 3 WHEN robot1_score < robot2_score THEN 0 ELSE 1 END AS PTS FROM matches_match LEFT JOIN robots_robot ON matches_match.robot1_id=robots_robot.id WHERE matches_match.finished != 0 UNION SELECT robot2_id, robots_robot.name, 1 AS Played, CASE WHEN robot1_score > robot2_score THEN 1 ELSE 0 END, CASE WHEN robot1_score < robot2_score THEN 1 ELSE 0 END, CASE WHEN robot1_score = robot2_score THEN 1 ELSE 0 END, robot1_score, robot2_score, robot1_score-robot2_score, CASE WHEN robot1_score > robot2_score THEN 3 WHEN robot1_score < robot2_score THEN 0 ELSE 1 END FROM matches_match LEFT JOIN robots_robot ON matches_match.robot2_id=robots_robot.id WHERE matches_match.finished != 0) GROUP BY robot_id ORDER BY P DESC, PTS DESC, GD DESC""") for row in c.fetchall(): table.append({ 'robot_id': row[0], 'name': row[1], 'played': row[2], 'won': row[3], 'lost': row[4], 'drawn': row[5], 'GF': row[6], 'GA': row[7], 'GD': row[8], 'points': row[9]}) return table
gpl-3.0
6,876,304,096,557,620,000
34.225166
79
0.602369
false
3.957589
false
false
false
Bajoo/client-pc
tests/unit_tests/filesync/task_consumer_test.py
1
6616
# -*- coding:utf-8 -*- import threading import pytest from bajoo.filesync import task_consumer from bajoo.promise import Promise class TestTaskConsumer(object): def _make_external_promise(self): """Helper used to make stub Promise. Returns: Promise, resolve, reject: the promise and its callbacks. """ callbacks = [] def executor(resolve, reject): callbacks.append(resolve) callbacks.append(reject) return Promise(executor), callbacks[0], callbacks[1] def test_add_empty_task(self): """Add a task who is an almost empty generator.""" with task_consumer.Context(): task_executed = [] def task(): task_executed.append(True) yield promise = task_consumer.add_task(task) promise.result(0.01) assert task_executed def test_add_task_returning_value(self): """Add a simple task who must return a value.""" with task_consumer.Context(): def task(): yield 56 promise = task_consumer.add_task(task) assert promise.result(0.01) is 56 def test_add_task_multistep(self): """Add a task who has to wait other external tasks (promise).""" p1, resolve, _ = self._make_external_promise() p2, resolve2, _ = self._make_external_promise() def task(): value = yield p1 assert value is 44 value2 = yield p2 yield value2 * 2 with task_consumer.Context(): p_task = task_consumer.add_task(task) resolve(44) resolve2(26) assert p_task.result(0.01) is 52 def test_all_step_use_dedicated_thread(self): """Ensures the code in a task is always executed in a filesync thread. The generator code is always executed in a thread belonging to the filesync threads. """ main_thread = threading.current_thread().ident p1, resolve, _ = self._make_external_promise() p2, resolve2, _ = self._make_external_promise() def task(): assert threading.current_thread().ident is not main_thread yield p1 assert threading.current_thread().ident is not main_thread yield p2 assert threading.current_thread().ident is not main_thread yield Promise.resolve(None) assert threading.current_thread().ident is not main_thread with task_consumer.Context(): p_task = task_consumer.add_task(task) resolve(None) resolve2(None) p_task.result(0.01) def test_add_task_waiting_rejected_promise(self): """Add a task who should fail due to a rejected promise.""" class Err(Exception): pass def task(): yield Promise.resolve('OK') yield Promise.reject(Err()) with task_consumer.Context(): p = task_consumer.add_task(task) with pytest.raises(Err): p.result(0.01) def test_add_task_catching_rejected_promise(self): """Add a task who will catch a rejected promise.""" class Err(Exception): pass def task(): yield Promise.resolve('OK') with pytest.raises(Err): yield Promise.reject(Err()) yield 'OK' with task_consumer.Context(): p = task_consumer.add_task(task) assert p.result(0.01) == 'OK' def test_add_failing_task(self): """Add a task who will raises an Exception.""" class Err(Exception): pass def task(): yield Promise.resolve(True) raise Err() with task_consumer.Context(): p = task_consumer.add_task(task) with pytest.raises(Err): p.result(0.1) def test_add_many_tasks(self): """Add 100 new tasks and wait them all.""" promises = [] def task(): yield Promise.resolve(1) yield Promise.resolve(2) yield Promise.resolve(3) yield 1 with task_consumer.Context(): for i in range(40): promises.append(task_consumer.add_task(task)) result = Promise.all(promises).result(0.1) print(result) assert sum(result) is 40 def test_add_concurrent_tasks(self): """Add three tasks who are required to run at the same time. The task A will wait the Task B, then B will wait A. This test "force" the tasks to be executed in a non-linear order. """ p1_a, r1_a, _ = self._make_external_promise() p1_b, r1_b, _ = self._make_external_promise() p1_c, r1_c, _ = self._make_external_promise() p2_a, r2_a, _ = self._make_external_promise() p2_b, r2_b, _ = self._make_external_promise() p2_c, r2_c, _ = self._make_external_promise() def task_A(): r1_a(None) yield p1_b r2_a(None) yield p2_c yield 'A' def task_B(): r1_b(None) yield p1_c r2_b(None) yield p2_a yield 'B' def task_C(): r1_c(None) yield p1_a r2_c(None) yield p2_b yield 'C' with task_consumer.Context(): results = Promise.all([ task_consumer.add_task(task_A), task_consumer.add_task(task_B), task_consumer.add_task(task_C) ]).result(0.01) assert results == list('ABC') def test_ensure_task_generator_are_closed(self): """Ensure the task generators are properly closed after use. If a generator has yielded the final result, and the caller don't want to iter until the end, the caller must close the generator. Closing the generator will raise an exception GeneratorExit, and so allow the generator to clean resources. Without the close, resources locked by `with` will not be released. """ is_generator_closed = [] def task(): try: yield 'RESULT' except GeneratorExit: is_generator_closed.append(True) with task_consumer.Context(): p = task_consumer.add_task(task) assert p.result(0.01) == 'RESULT' assert is_generator_closed
gpl-3.0
6,029,750,878,475,561,000
29.348624
78
0.546705
false
4.058896
true
false
false
azide0x37/tableFramer
tableFramer.py
1
1267
import requests import json from bs4 import BeautifulSoup from collections import OrderedDict class tableFramer: def __init__(self, url): self.url = url self.response = requests.get(url, headers = {'User-Agent': 'Mozilla/5.0'}) def __call__(self): souped = BeautifulSoup(self.response.text) tableHead = souped.find('thead') colNames = tableHead.findAll('th') print "colNames", colNames table = souped.find('table', summary = "Table listing details of the accident.") rows = table.findAll('tr', class_ = "infoCell") print "rows", rows dataset = [] for tr in rows: cols = tr.findAll('td') rowData = OrderedDict() counter = 1 for td in cols[1:]: text = ''.join(td.find(text=True)) try: rowData[colNames[counter]] = text counter += 1 except: counter = 0 continue dataset.append(rowData) return json.dumps(dataset)#, indent=4, separators=(',',':')) crashData = tableFramer('http://www.mshp.dps.missouri.gov/HP68/SearchAction') print crashData()
mit
2,197,101,322,134,218,800
29.166667
88
0.535912
false
4.251678
false
false
false
philipkershaw/ndg_security_server
ndg/security/server/wsgi/utils.py
2
7507
"""NDG Security WSGI utilities MashMyData Project """ __author__ = "P J Kershaw" __date__ = "21/08/11" __copyright__ = "(C) 2011 Science and Technology Facilities Council" __license__ = "BSD - see LICENSE file in top-level directory" __contact__ = "[email protected]" __revision__ = "$Id$" import logging log = logging.getLogger(__name__) class FileObjResponseIterator(object): """Helper class creates iterable WSGI response based on a given block size """ DEFAULT_BLK_SIZE = 1024 BYTE_RANGE_PREFIX = 'bytes=' BYTE_RANGE_SEP = '-' CONTENT_RANGE_FIELDNAME = 'Content-range' CONTENT_RANGE_FORMAT_STR = "bytes %d-%d/%d" INVALID_CONTENT_RANGE_FORMAT_STR = "bytes */%d" __slots__ = ( 'file_obj', 'file_size', '__block_size', 'read_lengths', 'content_length', 'content_range', 'content_range_hdr', 'closed_method' ) class IteratorError(Exception): """Base exception type for exceptions raised from FileObjResponseIterator class instances""" class InvalidRangeRequest(IteratorError): """Raise for an invalid byte range requested""" def __init__(self, *arg, **kw): FileObjResponseIterator.IteratorError.__init__(self, *arg, **kw) if len(arg) > 1: self.content_range_hdr = arg[1] else: self.content_range_hdr = None class InvalidRangeRequestSyntax(IteratorError): """Raise for invalid range request syntax""" def __init__(self, file_obj, file_size=-1, request_range=None, block_size=DEFAULT_BLK_SIZE): '''Open a file and set the blocks for reading, any input range set and the response size ''' self.file_obj = file_obj self.file_size = file_size # Find method of determining whether the file object is closed. if hasattr(file_obj, 'closed'): # Standard file interface has optional 'closed' attribute. self.closed_method = lambda : self.file_obj.closed elif hasattr(file_obj, 'isclosed'): # httplib.HTTPResponse has a non-standard 'isclosed' method. self.closed_method = self.file_obj.isclosed elif hasattr(file_obj, 'fp'): # urllib.addbase and derived classes returned by urllib and urllib2: self.closed_method = lambda : self.fp is None else: self.closed_method = None # the length of the content to return - this will be different to the # file size if the client a byte range header field setting self.content_length = 0 # None unless a valid input range was given self.content_range = None # Formatted for HTTP content range header field self.content_range_hdr = None # This will call the relevant set property method self.block_size = block_size # Array of blocks lengths for iterator to use to read the file self.read_lengths = [] if request_range is not None: # Prepare a content range header in case the range specified is # invalid content_range_hdr = (self.__class__.CONTENT_RANGE_FIELDNAME, self.__class__.INVALID_CONTENT_RANGE_FORMAT_STR % self.file_size) try: # Remove 'bytes=' prefix rangeVals = request_range.split( self.__class__.BYTE_RANGE_PREFIX)[-1] # Convert into integers taking into account that a value may be # absent startStr, endStr = rangeVals.split( self.__class__.BYTE_RANGE_SEP) start = int(startStr or 0) end = int(endStr or self.file_size - 1) except ValueError: raise self.__class__.InvalidRangeRequestSyntax('Invalid format ' 'for request range %r' % request_range) # Verify range bounds if start > end: raise self.__class__.InvalidRangeRequest('Range start index %r ' 'is greater than the end index %r' % (start, end), content_range_hdr) elif start < 0: raise self.__class__.InvalidRangeRequest('Range start index %r ' 'is less than zero' % start, content_range_hdr) elif end >= self.file_size: # This is not an error - # http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.35.1 log.warning('Range end index %r is greater than the length %r ' 'of the requested resource - reseting to %r', end, self.file_size, self.file_size - 1) end = self.file_size - 1 # Set the total content length to return self.content_length = end + 1 - start self.content_range = (start, end) self.content_range_hdr = ( self.__class__.CONTENT_RANGE_FIELDNAME, self.__class__.CONTENT_RANGE_FORMAT_STR % (self.content_range + (self.file_size,)) ) try: self.file_obj.seek(start) except AttributeError: # File seek method is optional. pass else: # Set the total content length to return self.content_length = self.file_size nReads = self.content_length / self.block_size lastReadLen = self.content_length % self.block_size self.read_lengths = [self.block_size] * nReads if lastReadLen > 0: nReads += 1 self.read_lengths.append(lastReadLen) def __iter__(self): '''Read the file object a block at a time''' # Leave read_lengths attribute intact read_lengths = self.read_lengths[:] while (self.content_length < 0) or (len(read_lengths) > 0): if self.content_length < 0: if self.closed_method(): return amt = self.block_size else: amt = read_lengths.pop() output = self.file_obj.read(amt) if not output: self.close() yield output def close(self): """Closes the file object. """ self.file_obj.close() @property def block_size(self): """block size for reading the file in the iterator and returning a response """ return self.__block_size @block_size.setter def block_size(self, value): """block size for reading the file in the iterator and returning a response """ self.__block_size = int(value) if self.__block_size < 0: raise ValueError('Expecting positive integer value for block size ' 'attribute')
bsd-3-clause
3,673,313,203,661,367,000
37.497436
83
0.520714
false
4.619692
false
false
false
cltrudeau/django-awl
awl/tests/test_utils.py
1
3111
# awl.tests.test_utils.py import sys from io import StringIO from django.test import TestCase from awl.tests.models import Link from awl.utils import (URLTree, refetch, refetch_for_update, render_page, render_page_to_string, get_field_names, get_obj_attr) from awl.waelsteng import FakeRequest # ============================================================================ class UtilsTest(TestCase): def test_url_tree(self): # print_tree() exercises everything, so run it and capture stdout tree = URLTree() saved_stdout = sys.stderr try: out = StringIO() sys.stdout = out tree.print_tree() finally: sys.stdout = saved_stdout def test_refetch(self): link = Link.objects.create(url='url', text='text') link.text = 'foo' link = refetch(link) self.assertEqual('url', link.url) self.assertEqual('text', link.text) link.text = 'foo' link = refetch_for_update(link) self.assertEqual('url', link.url) self.assertEqual('text', link.text) def test_renders(self): request = FakeRequest() expected = 'Hello World\n' result = render_page_to_string(request, 'sample.html', {'name':'World'}) self.assertEqual(expected, result) response = render_page(request, 'sample.html', {'name':'World'}) self.assertEqual(expected, response.content.decode('ascii')) def test_get_field_names(self): from awl.tests.models import Person # test defaults, ignore order expected = ['name', 'phone'] result = get_field_names(Person) self.assertEqual(set(result), set(expected)) # test ignore_auto, ignore_relations and exclude expected.extend(['id', 'building', 'address', 'courses', 'best_friend', 'person']) expected.remove('phone') result = get_field_names(Person, ignore_auto=False, ignore_relations=False, exclude=['phone']) self.assertEqual(set(result), set(expected)) def test_get_obj_attr(self): # --- data for testing class Character(object): pass class Cartoon(object): pass barney = Character() barney.name = 'Barney' betty = Character() betty.name = 'Betty' betty.husband = barney wilma = Character() wilma.name = 'Wilma' wilma.friend = betty cartoon = Cartoon() cartoon.name = 'Flinstones' cartoon.character = wilma # --- tests self.assertEqual('Flinstones', get_obj_attr(cartoon, 'name')) self.assertEqual(wilma, get_obj_attr(cartoon, 'character')) self.assertEqual(betty, get_obj_attr(cartoon, 'character__friend')) self.assertEqual(barney, get_obj_attr(cartoon, 'character__friend__husband')) with self.assertRaises(AttributeError): get_obj_attr(cartoon, 'foo') with self.assertRaises(AttributeError): get_obj_attr(cartoon, 'character__foo')
mit
4,314,477,777,599,757,000
30.11
80
0.58695
false
3.845488
true
false
false
gaganjyot/EXIFGeoLocation
main.py
1
1898
from PIL import Image from PIL.ExifTags import GPSTAGS, TAGS def _get_if_exist(data, key): if key in data: return data[key] return None def _convert_to_degrees(value): """Helper function to convert the GPS coordinates stored in the EXIF to degress in float format""" d0 = value[0][0] d1 = value[0][1] d = float(d0) / float(d1) m0 = value[1][0] m1 = value[1][1] m = float(m0) / float(m1) s0 = value[2][0] s1 = value[2][1] s = float(s0) / float(s1) return d + (m / 60.0) + (s / 3600.0) def get_exif(fn): ret = {} i = Image.open(fn) info = i._getexif() for tag, value in info.items(): decoded = TAGS.get(tag, tag) #print "TAG", decoded, value if decoded == "GPSInfo": gps_data = {} for t in value: sub_decoded = GPSTAGS.get(t, t) gps_data[sub_decoded] = value[t] ret[decoded] = gps_data lat = None lon = None gps_latitude = _get_if_exist(gps_data, "GPSLatitude") gps_latitude_ref = _get_if_exist(gps_data, 'GPSLatitudeRef') gps_longitude = _get_if_exist(gps_data, 'GPSLongitude') gps_longitude_ref = _get_if_exist(gps_data, 'GPSLongitudeRef') if gps_latitude and gps_latitude_ref and gps_longitude and gps_longitude_ref: lat = _convert_to_degrees(gps_latitude) if gps_latitude_ref != "N": lat = 0 - lat lon = _convert_to_degrees(gps_longitude) if gps_longitude_ref != "E": lon = 0 - lon ret['latitude'] = lat ret['longitude'] = lon else: ret[decoded] = value return ret info_found = get_exif("/home/gagan/Downloads/a.jpg") print(info_found["latitude"]) print(info_found["longitude"])
gpl-3.0
-757,813,787,278,791,300
27.772727
102
0.535827
false
3.200675
false
false
false
kennethreitz/pipenv
tests/integration/test_project.py
1
8289
# -*- coding=utf-8 -*- from __future__ import absolute_import, print_function import io import os import tarfile import pytest from pipenv.patched import pipfile from pipenv.project import Project from pipenv.utils import temp_environ from pipenv.vendor.vistir.path import is_in_path, normalize_path from pipenv.vendor.delegator import run as delegator_run @pytest.mark.project @pytest.mark.sources @pytest.mark.environ def test_pipfile_envvar_expansion(PipenvInstance): with PipenvInstance(chdir=True) as p: with temp_environ(): with open(p.pipfile_path, 'w') as f: f.write(""" [[source]] url = 'https://${TEST_HOST}/simple' verify_ssl = false name = "pypi" [packages] pytz = "*" """.strip()) os.environ['TEST_HOST'] = 'localhost:5000' project = Project() assert project.sources[0]['url'] == 'https://localhost:5000/simple' assert 'localhost:5000' not in str(pipfile.load(p.pipfile_path)) @pytest.mark.project @pytest.mark.sources @pytest.mark.parametrize('lock_first', [True, False]) def test_get_source(PipenvInstance, lock_first): with PipenvInstance(chdir=True) as p: with open(p.pipfile_path, 'w') as f: contents = """ [[source]] url = "{0}" verify_ssl = false name = "testindex" [[source]] url = "https://pypi.org/simple" verify_ssl = "true" name = "pypi" [packages] pytz = "*" six = {{version = "*", index = "pypi"}} [dev-packages] """.format(os.environ['PIPENV_TEST_INDEX']).strip() f.write(contents) if lock_first: # force source to be cached c = p.pipenv('lock') assert c.return_code == 0 project = Project() sources = [ ['pypi', 'https://pypi.org/simple'], ['testindex', os.environ.get('PIPENV_TEST_INDEX')] ] for src in sources: name, url = src source = [s for s in project.pipfile_sources if s.get('name') == name] assert source source = source[0] assert source['name'] == name assert source['url'] == url assert sorted(source.items()) == sorted(project.get_source(name=name).items()) assert sorted(source.items()) == sorted(project.get_source(url=url).items()) assert sorted(source.items()) == sorted(project.find_source(name).items()) assert sorted(source.items()) == sorted(project.find_source(url).items()) @pytest.mark.install @pytest.mark.project @pytest.mark.parametrize('newlines', [u'\n', u'\r\n']) def test_maintain_file_line_endings(PipenvInstance, newlines): with PipenvInstance(chdir=True) as p: # Initial pipfile + lockfile generation c = p.pipenv('install pytz') assert c.return_code == 0 # Rewrite each file with parameterized newlines for fn in [p.pipfile_path, p.lockfile_path]: with io.open(fn) as f: contents = f.read() written_newlines = f.newlines assert written_newlines == u'\n', '{0!r} != {1!r} for {2}'.format( written_newlines, u'\n', fn, ) # message because of https://github.com/pytest-dev/pytest/issues/3443 with io.open(fn, 'w', newline=newlines) as f: f.write(contents) # Run pipenv install to programatically rewrite c = p.pipenv('install chardet') assert c.return_code == 0 # Make sure we kept the right newlines for fn in [p.pipfile_path, p.lockfile_path]: with io.open(fn) as f: f.read() # Consumes the content to detect newlines. actual_newlines = f.newlines assert actual_newlines == newlines, '{0!r} != {1!r} for {2}'.format( actual_newlines, newlines, fn, ) # message because of https://github.com/pytest-dev/pytest/issues/3443 @pytest.mark.project @pytest.mark.sources @pytest.mark.needs_internet def test_many_indexes(PipenvInstance): with PipenvInstance(chdir=True) as p: with open(p.pipfile_path, 'w') as f: contents = """ [[source]] url = "{0}" verify_ssl = false name = "testindex" [[source]] url = "https://pypi.org/simple" verify_ssl = "true" name = "pypi" [[source]] url = "https://pypi.python.org/simple" verify_ssl = "true" name = "legacy" [packages] pytz = "*" six = {{version = "*", index = "pypi"}} [dev-packages] """.format(os.environ['PIPENV_TEST_INDEX']).strip() f.write(contents) c = p.pipenv('install') assert c.return_code == 0 @pytest.mark.install @pytest.mark.project def test_include_editable_packages(PipenvInstance, testsroot, pathlib_tmpdir): file_name = "tablib-0.12.1.tar.gz" package = pathlib_tmpdir.joinpath("tablib-0.12.1") source_path = os.path.abspath(os.path.join(testsroot, "pypi", "tablib", file_name)) with PipenvInstance(chdir=True) as p: with tarfile.open(source_path, "r:gz") as tarinfo: tarinfo.extractall(path=str(pathlib_tmpdir)) c = p.pipenv('install -e {0}'.format(package.as_posix())) assert c.return_code == 0 project = Project() assert "tablib" in [ package.project_name for package in project.environment.get_installed_packages() ] @pytest.mark.project @pytest.mark.virtualenv def test_run_in_virtualenv_with_global_context(PipenvInstance, virtualenv): with PipenvInstance(chdir=True, venv_root=virtualenv.as_posix(), ignore_virtualenvs=False, venv_in_project=False) as p: c = delegator_run( "pipenv run pip freeze", cwd=os.path.abspath(p.path), env=os.environ.copy() ) assert c.return_code == 0, (c.out, c.err) assert 'Creating a virtualenv' not in c.err, c.err project = Project() assert project.virtualenv_location == virtualenv.as_posix(), ( project.virtualenv_location, virtualenv.as_posix() ) c = delegator_run( "pipenv run pip install -i {} click".format(p.index_url), cwd=os.path.abspath(p.path), env=os.environ.copy() ) assert c.return_code == 0, (c.out, c.err) assert "Courtesy Notice" in c.err, (c.out, c.err) c = delegator_run( "pipenv install -i {} six".format(p.index_url), cwd=os.path.abspath(p.path), env=os.environ.copy() ) assert c.return_code == 0, (c.out, c.err) c = delegator_run( 'pipenv run python -c "import click;print(click.__file__)"', cwd=os.path.abspath(p.path), env=os.environ.copy() ) assert c.return_code == 0, (c.out, c.err) assert is_in_path(c.out.strip(), str(virtualenv)), (c.out.strip(), str(virtualenv)) c = delegator_run( "pipenv clean --dry-run", cwd=os.path.abspath(p.path), env=os.environ.copy() ) assert c.return_code == 0, (c.out, c.err) assert "click" in c.out, c.out @pytest.mark.project @pytest.mark.virtualenv def test_run_in_virtualenv(PipenvInstance): with PipenvInstance(chdir=True) as p: c = p.pipenv('run pip freeze') assert c.return_code == 0 assert 'Creating a virtualenv' in c.err project = Project() c = p.pipenv("run pip install click") assert c.return_code == 0 c = p.pipenv("install six") assert c.return_code == 0 c = p.pipenv('run python -c "import click;print(click.__file__)"') assert c.return_code == 0 assert normalize_path(c.out.strip()).startswith( normalize_path(str(project.virtualenv_location)) ) c = p.pipenv("clean --dry-run") assert c.return_code == 0 assert "click" in c.out @pytest.mark.project @pytest.mark.sources def test_no_sources_in_pipfile(PipenvInstance): with PipenvInstance(chdir=True) as p: with open(p.pipfile_path, 'w') as f: contents = """ [packages] pytest = "*" """.format(os.environ['PIPENV_TEST_INDEX']).strip() f.write(contents) c = p.pipenv('install --skip-lock') assert c.return_code == 0
mit
8,963,331,359,839,735,000
32.695122
123
0.59392
false
3.450874
true
false
false
ababino/networkb
networkb/algorithms/utils.py
1
3266
# -*- coding: utf-8 -*- """ Created on Wed May 15 20:11:59 2013 @author: andres """ import numpy import networkx from scipy import spatial from scipy import stats def find_peaks(th,gc): peaks=[] for i in range(1,len(th)-1): if gc[1][i-1]<gc[1][i] and gc[1][i]>gc[1][i+1]: peaks.append((th[i],gc[1][i])) return peaks def nodedistance(affine,vP,n1,n2): """ node distance in cm. (en general) """ ind1=vP[n1] ind2=vP[n2] if len(ind1)==3: ind1.append(1) if len(ind2)==3: ind2.append(1) v1=numpy.dot(affine, numpy.transpose(ind1))[0:3] v2=numpy.dot(affine, numpy.transpose(ind2))[0:3] d=spatial.distance.euclidean(v1,v2) return d def power_law_fit(x,y): pl = lambda A, d, x: A*x**d a, b, r_value, p_value, std_err = stats.linregress(numpy.log(x),numpy.log(y)) y_fit=pl(numpy.exp(b),a,x) return (a,y_fit) def exp_fit(x,y): exp_fun = lambda A, x0, x: A*numpy.exp(x/x0) a, b, r_value, p_value, std_err = stats.linregress(x,numpy.log(y)) A=numpy.exp(b) x0=1.0/a y_fit=exp_fun(A,x0,x) return (A,x0,y_fit) def gaussian_fit(x,y): pl = lambda A, x0, s, x: A*numpy.exp(((x-x0)**2)/s) p = numpy.polyfit(x,numpy.log(y),2) s=1./p[0] x0=-p[1]/(2*p[0]) A=numpy.exp(p[2]+(p[1]**2)/(4*p[0])) y_fit=pl(A,x0,s,x) return ((A,x0,s),y_fit) def window_correlation(x,y,w): if len(x)!=len(y): print 'vector x and y must be of the same size' print 'len(x)='+str(len(x)) print 'len(y)='+str(len(y)) return if len(x)<w: print 'window mus be smaller than len(x)' print 'len(x)='+str(len(x))+' w='+str(w) N=len(x)-w return [stats.pearsonr(x[i:i+w],y[i:i+w])[0] for i in range(N)] def find_th_jumps(bn,max_clus=2): """ Returns the thresholds where a jump occurs. A jump is defined as the join of the biggest cluster with, up to, the max_clus cluster. """ NON=bn.get_non() node_list=[node for node,dat in NON.nodes(data=True) if dat['order']==0] subNON=networkx.Graph() for n1,n2 in NON.edges_iter(nbunch=node_list): subNON.add_edge(n1,n2) node_list=networkx.connected_components(subNON)[0] subNON=NON.subgraph(node_list) max_th=max([dat['th'] for n,dat in subNON.nodes(data=True)]) N=bn.number_of_nodes() jumps=[] first_cluster=(0,[]) for node,data in NON.nodes(data=True): if NON.degree(node)>=3 and NON.node[node]['order']==0: for node2 in NON.neighbors(node): if 0<NON.node[node2]['order']<=max_clus: if 20*len(NON.node[node2]['cc'])>len(NON.node[node]['cc']) or 200*len(NON.node[node2]['cc'])>N: if NON.node[node2]['th']<max_th: jumps.append((NON.node[node2]['th'],NON.node[node2]['cc'])) if NON.node[node2]['th']>first_cluster[0]: for node3 in NON.neighbors(node): if NON.node[node3]['order']==0 and NON.node[node3]['th']==NON.node[node2]['th']: first_cluster=((NON.node[node3]['th'],NON.node[node3]['cc'])) jumps.append(first_cluster) jumps=sorted(jumps,key=lambda x: x[0],reverse=True) return jumps def nodelist2volumen(bn,nodelist,element): node2voxel=bn.node2voxel B=numpy.zeros(bn.volume_shape) for node in nodelist: (i,j,k)=node2voxel[str(node)] B[i,j,k]=element return B
mit
3,844,366,845,292,625,000
28.963303
105
0.609002
false
2.464906
false
false
false
pytest-dev/pytest-bdd
pytest_bdd/utils.py
1
1191
"""Various utility functions.""" from inspect import getframeinfo from inspect import signature as _signature from sys import _getframe CONFIG_STACK = [] def get_args(func): """Get a list of argument names for a function. :param func: The function to inspect. :return: A list of argument names. :rtype: list """ params = _signature(func).parameters.values() return [param.name for param in params if param.kind == param.POSITIONAL_OR_KEYWORD] def get_parametrize_markers_args(node): return tuple(arg for mark in node.iter_markers("parametrize") for arg in mark.args) def get_caller_module_locals(depth=2): """Get the caller module locals dictionary. We use sys._getframe instead of inspect.stack(0) because the latter is way slower, since it iterates over all the frames in the stack. """ return _getframe(depth).f_locals def get_caller_module_path(depth=2): """Get the caller module path. We use sys._getframe instead of inspect.stack(0) because the latter is way slower, since it iterates over all the frames in the stack. """ frame = _getframe(depth) return getframeinfo(frame, context=0).filename
mit
-1,642,162,315,754,404,600
27.357143
109
0.706129
false
3.904918
false
false
false
Xelaadryth/Xelabot
quest/quests/monster.py
1
4536
from random import getrandbits, randint from ..quest import Quest from ..quest_segment import QuestSegment import settings from utils.command_set import CommandSet GOLD_SAFE_REWARD = 75 GOLD_VARIANCE_SAFE = 21 EXP_SAFE_REWARD = 2 GOLD_RISKY_PENALTY = 200 GOLD_RISKY_REWARD = 300 GOLD_RISKY_REWARD_BIG = 400 GOLD_VARIANCE_RISKY = GOLD_VARIANCE_SAFE * 2 EXP_RISKY_REWARD = EXP_SAFE_REWARD * 2 EXP_RISKY_REWARD_BIG = EXP_SAFE_REWARD + 1 GOLD_TIMEOUT_PENALTY = 300 MONSTER_LEVEL = 12 LEVEL_VARIANCE = 15 class Monster(Quest): def __init__(self, quest_manager): super().__init__(quest_manager) self.starting_segment = Start class Start(QuestSegment): def set_commands(self): self.commands = CommandSet(exact_match_commands={ '!attack': self.attack, '!flee': self.flee }) def play(self): msg = ( 'In the treasure room of an abandoned ruin, a strange Void creature materializes in front of {}. ' 'Do you !attack or !flee?'.format(self.quest.party[0])) self.channel.send_msg(msg) def attack(self, display_name): if display_name not in self.quest.party: return level = randint(-LEVEL_VARIANCE, LEVEL_VARIANCE) + self.player_manager.get_level(display_name) if level < -2: gold = GOLD_RISKY_PENALTY + randint(-GOLD_VARIANCE_RISKY, GOLD_VARIANCE_RISKY) msg = ( '{0} never stood a chance, getting immediately suppressed and mobbed to death by Voidlings without ' 'so much as a chance to twitch. Maybe try leveling up! {0} loses {1} gold.'.format(display_name, gold)) self.channel.send_msg(msg) self.penalize(display_name, gold=gold) elif level < MONSTER_LEVEL: gold = GOLD_RISKY_PENALTY + randint(-GOLD_VARIANCE_RISKY, GOLD_VARIANCE_RISKY) msg = ( '{0} charges towards the Void creature and gets immediately vaporized by lazers. Pew Pew! ' '{0} loses {1} gold.'.format(display_name, gold)) self.channel.send_msg(msg) self.penalize(display_name, gold=gold) elif level < settings.LEVEL_CAP + LEVEL_VARIANCE / 3: gold = GOLD_RISKY_REWARD + randint(GOLD_VARIANCE_RISKY, GOLD_VARIANCE_RISKY) msg = ( '{0} manages to slay the Void creature after a long struggle and some celebratory crumpets. ' '{0} gains {1} gold and {2} exp.'.format(display_name, gold, EXP_RISKY_REWARD)) self.channel.send_msg(msg) self.reward(display_name, gold=gold, exp=EXP_RISKY_REWARD) else: gold = GOLD_RISKY_REWARD_BIG + randint(GOLD_VARIANCE_RISKY, GOLD_VARIANCE_RISKY) msg = ( '{0} dismembers the creature with almost surgical precision, and even discovers a new class of ' 'organ in the process. Hurrah! ' '{0} gains {1} gold and {2} exp.'.format(display_name, gold, EXP_RISKY_REWARD_BIG)) self.channel.send_msg(msg) self.reward(display_name, gold=gold, exp=EXP_RISKY_REWARD_BIG) self.complete_quest() def flee(self, display_name): if display_name not in self.quest.party: return gold = GOLD_SAFE_REWARD + randint(-GOLD_VARIANCE_SAFE, GOLD_VARIANCE_SAFE) if bool(getrandbits(1)): msg = ( '{0} manages to bravely run away in the face of overwhelming power, ' 'and even manages to snatch a few coins on the way out! ' '{0} gains {1} gold and {2} exp.'.format(display_name, gold, EXP_SAFE_REWARD)) self.reward(display_name, gold=gold, exp=EXP_SAFE_REWARD) self.channel.send_msg(msg) else: msg = ('{0} tries to run away but is torn to shreds by blade-like arms. Owie! ' '{0} loses {1} gold.'.format(display_name, gold)) self.channel.send_msg(msg) self.penalize(display_name, gold=gold) self.complete_quest() def timeout(self): self.channel.send_msg( '{0} makes no motion to attack or flee, and instead stands motionless in the face of the enemy. ' '{0} becomes covered by caustic spittle, digested alive, and slowly devoured. ' '{0} loses {1} gold.'.format(self.quest.party[0], GOLD_TIMEOUT_PENALTY)) self.penalize(self.quest.party[0], gold=GOLD_TIMEOUT_PENALTY) self.complete_quest()
mit
256,312,372,235,626,660
40.614679
119
0.612434
false
3.185393
false
false
false
smallyear/linuxLearn
salt/salt/returners/mongo_future_return.py
1
7113
# -*- coding: utf-8 -*- ''' Return data to a mongodb server Required python modules: pymongo This returner will send data from the minions to a MongoDB server. To configure the settings for your MongoDB server, add the following lines to the minion config files: .. code-block:: yaml mongo.db: <database name> mongo.host: <server ip address> mongo.user: <MongoDB username> mongo.password: <MongoDB user password> mongo.port: 27017 You can also ask for indexes creation on the most common used fields, which should greatly improve performance. Indexes are not created by default. .. code-block:: yaml mongo.indexes: true Alternative configuration values can be used by prefacing the configuration. Any values not found in the alternative configuration will be pulled from the default location: .. code-block:: yaml alternative.mongo.db: <database name> alternative.mongo.host: <server ip address> alternative.mongo.user: <MongoDB username> alternative.mongo.password: <MongoDB user password> alternative.mongo.port: 27017 This mongo returner is being developed to replace the default mongodb returner in the future and should not be considered API stable yet. To use the mongo returner, append '--return mongo' to the salt command. .. code-block:: bash salt '*' test.ping --return mongo To use the alternative configuration, append '--return_config alternative' to the salt command. .. versionadded:: 2015.5.0 .. code-block:: bash salt '*' test.ping --return mongo --return_config alternative ''' from __future__ import absolute_import # Import python libs import logging # Import Salt libs import salt.utils.jid import salt.returners import salt.ext.six as six # Import third party libs try: import pymongo version = pymongo.version version = '.'.join(version.split('.')[:2]) HAS_PYMONGO = True except ImportError: HAS_PYMONGO = False log = logging.getLogger(__name__) # Define the module's virtual name __virtualname__ = 'mongo' def __virtual__(): if not HAS_PYMONGO: return False return __virtualname__ def _remove_dots(src): ''' Remove the dots from the given data structure ''' output = {} for key, val in six.iteritems(src): if isinstance(val, dict): val = _remove_dots(val) output[key.replace('.', '-')] = val return output def _get_options(ret=None): ''' Get the mongo options from salt. ''' attrs = {'host': 'host', 'port': 'port', 'db': 'db', 'username': 'username', 'password': 'password', 'indexes': 'indexes'} _options = salt.returners.get_returner_options(__virtualname__, ret, attrs, __salt__=__salt__, __opts__=__opts__) return _options def _get_conn(ret): ''' Return a mongodb connection object ''' _options = _get_options(ret) host = _options.get('host') port = _options.get('port') db_ = _options.get('db') user = _options.get('user') password = _options.get('password') indexes = _options.get('indexes', False) # at some point we should remove support for # pymongo versions < 2.3 until then there are # a bunch of these sections that need to be supported if float(version) > 2.3: conn = pymongo.MongoClient(host, port) else: conn = pymongo.Connection(host, port) mdb = conn[db_] if user and password: mdb.authenticate(user, password) if indexes: if float(version) > 2.3: mdb.saltReturns.create_index('minion') mdb.saltReturns.create_index('jid') mdb.jobs.create_index('jid') else: mdb.saltReturns.ensure_index('minion') mdb.saltReturns.ensure_index('jid') mdb.jobs.ensure_index('jid') return conn, mdb def returner(ret): ''' Return data to a mongodb server ''' conn, mdb = _get_conn(ret) if isinstance(ret['return'], dict): back = _remove_dots(ret['return']) else: back = ret['return'] if isinstance(ret, dict): full_ret = _remove_dots(ret) else: full_ret = ret log.debug(back) sdata = {'minion': ret['id'], 'jid': ret['jid'], 'return': back, 'fun': ret['fun'], 'full_ret': full_ret} if 'out' in ret: sdata['out'] = ret['out'] # save returns in the saltReturns collection in the json format: # { 'minion': <minion_name>, 'jid': <job_id>, 'return': <return info with dots removed>, # 'fun': <function>, 'full_ret': <unformatted return with dots removed>} # # again we run into the issue with deprecated code from previous versions if float(version) > 2.3: #using .copy() to ensure that the original data is not changed, raising issue with pymongo team mdb.saltReturns.insert_one(sdata.copy()) else: mdb.saltReturns.insert(sdata.copy()) def save_load(jid, load): ''' Save the load for a given job id ''' conn, mdb = _get_conn(ret=None) if float(version) > 2.3: #using .copy() to ensure original data for load is unchanged mdb.jobs.insert_one(load.copy()) else: mdb.jobs.insert(load.copy()) def save_minions(jid, minions): # pylint: disable=unused-argument ''' Included for API consistency ''' pass def get_load(jid): ''' Return the load associated with a given job id ''' conn, mdb = _get_conn(ret=None) ret = mdb.jobs.find_one({'jid': jid}, {'_id': 0}) return ret['load'] def get_jid(jid): ''' Return the return information associated with a jid ''' conn, mdb = _get_conn(ret=None) ret = {} rdata = mdb.saltReturns.find({'jid': jid}, {'_id': 0}) if rdata: for data in rdata: minion = data['minion'] # return data in the format {<minion>: { <unformatted full return data>}} ret[minion] = data['full_ret'] return ret def get_fun(fun): ''' Return the most recent jobs that have executed the named function ''' conn, mdb = _get_conn(ret=None) ret = {} rdata = mdb.saltReturns.find_one({'fun': fun}, {'_id': 0}) if rdata: ret = rdata return ret def get_minions(): ''' Return a list of minions ''' conn, mdb = _get_conn(ret=None) ret = [] name = mdb.saltReturns.distinct('minion') ret.append(name) return ret def get_jids(): ''' Return a list of job ids ''' conn, mdb = _get_conn(ret=None) ret = [] name = mdb.jobs.distinct('jid') ret.append(name) return ret def prep_jid(nocache=False, passed_jid=None): # pylint: disable=unused-argument ''' Do any work necessary to prepare a JID, including sending a custom id ''' return passed_jid if passed_jid is not None else salt.utils.jid.gen_jid()
apache-2.0
-3,668,263,459,001,041,000
24.959854
109
0.604246
false
3.749605
true
false
false
Pinyto/cloud
api_prototype/sandbox.py
1
3475
# coding=utf-8 """ Pinyto cloud - A secure cloud database for your personal data Copyright (C) 2105 Johannes Merkert <[email protected]> This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. """ import time from multiprocessing import Process, Queue from multiprocessing.queues import Empty from api_prototype.seccomp_process import SecureHost def sandbox(code, request, real_db, queue): """ This function gets executed in a separate subprocess which does not share the memory with the main Django process. This is done a) for security reasons to minimize the risk that code inside of the sandbox is able to do anything harmful and b) for cleanly measuring the execution time for the code because the user may have to pay for it. :param code: The python code which should be executed in the sandbox :type code: str :param request: Django's request object :type request: HttpRequest :param real_db: The database connection :type real_db: service.database.CollectionWrapper :param queue: Queue for communicating with the main process :type queue: multiprocessing.Queue :return: nothing (the queue is used for returning the results) """ start_time = time.clock() secure_host = SecureHost() secure_host.start_child() try: result = secure_host.execute(code, request, real_db) finally: secure_host.kill_child() end_time = time.clock() queue.put((result, end_time - start_time)) def safely_exec(code, request, db): """ If you want to execute something in the sandbox, call this method. It will setup a process and execute the code there with seccomp. The passed database connections will used to access the users collection. :param code: The python code which should be executed in the sandbox :type code: str :param request: Django's request object which is passed into the sandbox process :type request: HttpRequest :param db: The already opened database connection :type db: service.database.CollectionWrapper :return: A tuple containing the result and the time needed to calculate the result. :rtype: (dict, timedelta) """ start_time = time.clock() queue = Queue(1) sandbox_process = Process(target=sandbox, args=(code, request, db, queue)) sandbox_process.start() result = "" child_time = 0 wait_for_data = True termination = False while wait_for_data and not termination: try: result, child_time = queue.get(True, 0.001) wait_for_data = False except Empty: wait_for_data = True if not sandbox_process.is_alive(): termination = True result = {'error': "The code could not be executed because it tried to do something illegal."} sandbox_process.join() end_time = time.clock() return result, end_time - start_time + child_time
gpl-3.0
-6,002,983,267,726,479,000
38.488636
106
0.710216
false
4.212121
false
false
false
SUSE/kiwi
kiwi/bootloader/install/__init__.py
1
2173
# Copyright (c) 2015 SUSE Linux GmbH. All rights reserved. # # This file is part of kiwi. # # kiwi is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # kiwi is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with kiwi. If not, see <http://www.gnu.org/licenses/> # import importlib from typing import Dict from abc import ( ABCMeta, abstractmethod ) from ...exceptions import ( KiwiBootLoaderInstallSetupError ) class BootLoaderInstall(metaclass=ABCMeta): """ **BootLoaderInstall Factory** :param string name: bootloader name :param string root_dir: root directory path name :param object device_provider: instance of :class:`DeviceProvider` :param dict custom_args: custom arguments dictionary """ @abstractmethod def __init__(self) -> None: return None # pragma: no cover @staticmethod def new( name: str, root_dir: str, device_provider: object, custom_args: Dict = None ): name_map = { 'grub2': 'BootLoaderInstallGrub2' if name == 'grub2' or name == 'grub2_s390x_emu' else None } for bootloader_namespace, bootloader_name in list(name_map.items()): if bootloader_name: break try: bootloader_install = importlib.import_module( 'kiwi.bootloader.install.{}'.format(bootloader_namespace) ) return bootloader_install.__dict__[bootloader_name]( root_dir, device_provider, custom_args ) except Exception: raise KiwiBootLoaderInstallSetupError( 'Support for {} bootloader installation ' 'not implemented'.format(name) )
gpl-3.0
-4,317,752,855,747,303,000
31.432836
76
0.651173
false
4.154876
false
false
false
onshape-public/onshape-clients
python/onshape_client/oas/models/bt_configured_values_column_info1025.py
1
10546
# coding: utf-8 """ Onshape REST API The Onshape REST API consumed by all clients. # noqa: E501 The version of the OpenAPI document: 1.113 Contact: [email protected] Generated by: https://openapi-generator.tech """ from __future__ import absolute_import import re # noqa: F401 import sys # noqa: F401 import six # noqa: F401 import nulltype # noqa: F401 from onshape_client.oas.model_utils import ( # noqa: F401 ModelComposed, ModelNormal, ModelSimple, date, datetime, file_type, int, none_type, str, validate_get_composed_info, ) try: from onshape_client.oas.models import bt_configured_dimension_column_info2168 except ImportError: bt_configured_dimension_column_info2168 = sys.modules[ "onshape_client.oas.models.bt_configured_dimension_column_info2168" ] try: from onshape_client.oas.models import bt_configured_feature_column_info1014 except ImportError: bt_configured_feature_column_info1014 = sys.modules[ "onshape_client.oas.models.bt_configured_feature_column_info1014" ] try: from onshape_client.oas.models import bt_configured_parameter_column_info2900 except ImportError: bt_configured_parameter_column_info2900 = sys.modules[ "onshape_client.oas.models.bt_configured_parameter_column_info2900" ] try: from onshape_client.oas.models import bt_configured_suppression_column_info2498 except ImportError: bt_configured_suppression_column_info2498 = sys.modules[ "onshape_client.oas.models.bt_configured_suppression_column_info2498" ] try: from onshape_client.oas.models import bt_configured_values_column_info1025_all_of except ImportError: bt_configured_values_column_info1025_all_of = sys.modules[ "onshape_client.oas.models.bt_configured_values_column_info1025_all_of" ] try: from onshape_client.oas.models import bt_table_column_info1222 except ImportError: bt_table_column_info1222 = sys.modules[ "onshape_client.oas.models.bt_table_column_info1222" ] try: from onshape_client.oas.models import bt_table_column_spec1967 except ImportError: bt_table_column_spec1967 = sys.modules[ "onshape_client.oas.models.bt_table_column_spec1967" ] class BTConfiguredValuesColumnInfo1025(ModelComposed): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech Do not edit the class manually. Attributes: allowed_values (dict): The key is the tuple path to the attribute and the for var_name this is (var_name,). The value is a dict with a capitalized key describing the allowed value and an allowed value. These dicts store the allowed enum values. attribute_map (dict): The key is attribute name and the value is json key in definition. discriminator_value_class_map (dict): A dict to go from the discriminator variable value to the discriminator class name. validations (dict): The key is the tuple path to the attribute and the for var_name this is (var_name,). The value is a dict that stores validations for max_length, min_length, max_items, min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, inclusive_minimum, and regex. additional_properties_type (tuple): A tuple of classes accepted as additional properties values. """ allowed_values = { ("parent_type",): { "FEATURE": "FEATURE", "INSTANCE": "INSTANCE", "MATE": "MATE", "MATE_CONNECTOR": "MATE_CONNECTOR", "UNKNOWN": "UNKNOWN", }, } validations = {} additional_properties_type = None @staticmethod def openapi_types(): """ This must be a class method so a model may have properties that are of type self, this ensures that we don't create a cyclic import Returns openapi_types (dict): The key is attribute name and the value is attribute type. """ return { "bt_type": (str,), # noqa: E501 "parent_id": (str,), # noqa: E501 "parent_name": (str,), # noqa: E501 "parent_type": (str,), # noqa: E501 "id": (str,), # noqa: E501 "node_id": (str,), # noqa: E501 "specification": ( bt_table_column_spec1967.BTTableColumnSpec1967, ), # noqa: E501 } @staticmethod def discriminator(): return { "bt_type": { "BTConfiguredFeatureColumnInfo-1014": bt_configured_feature_column_info1014.BTConfiguredFeatureColumnInfo1014, "BTConfiguredDimensionColumnInfo-2168": bt_configured_dimension_column_info2168.BTConfiguredDimensionColumnInfo2168, "BTConfiguredSuppressionColumnInfo-2498": bt_configured_suppression_column_info2498.BTConfiguredSuppressionColumnInfo2498, "BTConfiguredParameterColumnInfo-2900": bt_configured_parameter_column_info2900.BTConfiguredParameterColumnInfo2900, }, } attribute_map = { "bt_type": "btType", # noqa: E501 "parent_id": "parentId", # noqa: E501 "parent_name": "parentName", # noqa: E501 "parent_type": "parentType", # noqa: E501 "id": "id", # noqa: E501 "node_id": "nodeId", # noqa: E501 "specification": "specification", # noqa: E501 } required_properties = set( [ "_data_store", "_check_type", "_from_server", "_path_to_item", "_configuration", "_composed_instances", "_var_name_to_model_instances", "_additional_properties_model_instances", ] ) def __init__( self, _check_type=True, _from_server=False, _path_to_item=(), _configuration=None, **kwargs ): # noqa: E501 """bt_configured_values_column_info1025.BTConfiguredValuesColumnInfo1025 - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types will be type checked and a TypeError will be raised if the wrong type is input. Defaults to True _path_to_item (tuple/list): This is a list of keys or values to drill down to the model in received_data when deserializing a response _from_server (bool): True if the data is from the server False if the data is from the client (default) _configuration (Configuration): the instance to use when deserializing a file_type parameter. If passed, type conversion is attempted If omitted no type conversion is done. bt_type (str): [optional] # noqa: E501 parent_id (str): [optional] # noqa: E501 parent_name (str): [optional] # noqa: E501 parent_type (str): [optional] # noqa: E501 id (str): [optional] # noqa: E501 node_id (str): [optional] # noqa: E501 specification (bt_table_column_spec1967.BTTableColumnSpec1967): [optional] # noqa: E501 """ self._data_store = {} self._check_type = _check_type self._from_server = _from_server self._path_to_item = _path_to_item self._configuration = _configuration constant_args = { "_check_type": _check_type, "_path_to_item": _path_to_item, "_from_server": _from_server, "_configuration": _configuration, } required_args = {} # remove args whose value is Null because they are unset required_arg_names = list(required_args.keys()) for required_arg_name in required_arg_names: if required_args[required_arg_name] is nulltype.Null: del required_args[required_arg_name] model_args = {} model_args.update(required_args) model_args.update(kwargs) composed_info = validate_get_composed_info(constant_args, model_args, self) self._composed_instances = composed_info[0] self._var_name_to_model_instances = composed_info[1] self._additional_properties_model_instances = composed_info[2] unused_args = composed_info[3] for var_name, var_value in required_args.items(): setattr(self, var_name, var_value) for var_name, var_value in six.iteritems(kwargs): if ( var_name in unused_args and self._configuration is not None and self._configuration.discard_unknown_keys and not self._additional_properties_model_instances ): # discard variable. continue setattr(self, var_name, var_value) @staticmethod def _composed_schemas(): # we need this here to make our import statements work # we must store _composed_schemas in here so the code is only run # when we invoke this method. If we kept this at the class # level we would get an error beause the class level # code would be run when this module is imported, and these composed # classes don't exist yet because their module has not finished # loading return { "anyOf": [], "allOf": [ bt_configured_values_column_info1025_all_of.BTConfiguredValuesColumnInfo1025AllOf, bt_table_column_info1222.BTTableColumnInfo1222, ], "oneOf": [], } @classmethod def get_discriminator_class(cls, from_server, data): """Returns the child class specified by the discriminator""" discriminator = cls.discriminator() discr_propertyname_py = list(discriminator.keys())[0] discr_propertyname_js = cls.attribute_map[discr_propertyname_py] if from_server: class_name = data[discr_propertyname_js] else: class_name = data[discr_propertyname_py] class_name_to_discr_class = discriminator[discr_propertyname_py] return class_name_to_discr_class.get(class_name)
mit
-8,931,959,705,869,452,000
37.489051
138
0.611132
false
3.972128
true
false
false
sigmunau/nav
python/nav/ipdevpoll/pool.py
1
10770
# # Copyright (C) 2017 UNINETT AS # # This file is part of Network Administration Visualized (NAV). # # NAV is free software: you can redistribute it and/or modify it under # the terms of the GNU General Public License version 2 as published by # the Free Software Foundation. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or # FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for # more details. You should have received a copy of the GNU General Public # License along with NAV. If not, see <http://www.gnu.org/licenses/>. # """Handle sending jobs to worker processes.""" from __future__ import print_function import os import sys from twisted.protocols import amp from twisted.internet import reactor, protocol from twisted.internet.defer import inlineCallbacks, returnValue from twisted.internet.endpoints import ProcessEndpoint, StandardIOEndpoint import twisted.internet.endpoints from nav.ipdevpoll import ContextLogger from . import control, jobs def initialize_worker(): handler = JobHandler() factory = protocol.Factory() factory.protocol = lambda: ProcessAMP(is_worker=True, locator=handler) StandardIOEndpoint(reactor).listen(factory) return handler class Cancel(amp.Command): """Represent a cancel message for sending to workers""" arguments = [ ('serial', amp.Integer()), ] response = [] class Shutdown(amp.Command): """Represent a shutdown message for sending to workers""" arguments = [] response = [] class Job(amp.Command): """Represent a job for sending to a worker""" arguments = [ ('netbox', amp.Integer()), ('job', amp.String()), ('plugins', amp.ListOf(amp.String())), ('interval', amp.Integer()), # Needs to be included in database record. # Not used for scheduling ('serial', amp.Integer()), # Serial number needed for cancelling ] response = [('result', amp.Boolean())] errors = { jobs.AbortedJobError: 'AbortedJob', jobs.SuggestedReschedule: 'SuggestedReschedule', } class JobHandler(amp.CommandLocator): """Resolve actions for jobs received over AMP""" _logger = ContextLogger() def __init__(self): super(JobHandler, self).__init__() self.jobs = dict() self.done = False def job_done(self, result, serial): if serial in self.jobs: del self.jobs[serial] if self.done and not self.jobs: reactor.callLater(3, reactor.stop) return result @Job.responder def execute_job(self, netbox, job, plugins, interval, serial): self._logger.debug("Process {pid} received job {job} for" " netbox {netbox}" " with plugins {plugins}".format( pid=os.getpid(), job=job, netbox=netbox, plugins=",".join(plugins)),) job = jobs.JobHandler(job, netbox, plugins, interval) self.jobs[serial] = job deferred = job.run() deferred.addBoth(self.job_done, serial) deferred.addCallback(lambda x: {'result': x}) return deferred @Cancel.responder def cancel(self, serial): if serial in self.jobs: self.jobs[serial].cancel() return {} @Shutdown.responder def shutdown(self): self.done = True return {} def log_jobs(self): self._logger.info("Got {jobs} active jobs".format( jobs=len(self.jobs))) for job in self.jobs.values(): self._logger.info("{job} {netbox} {plugins}".format( job=job.name, netbox=job.netbox, plugins=", ".join(job.plugins))) class ProcessAMP(amp.AMP): """Modify AMP protocol to allow running over process pipes""" _logger = ContextLogger() def __init__(self, is_worker, **kwargs): super(ProcessAMP, self).__init__(**kwargs) self.is_worker = is_worker self.lost_handler = None def makeConnection(self, transport): if not hasattr(transport, 'getPeer'): setattr(transport, 'getPeer', lambda: "peer") if not hasattr(transport, 'getHost'): setattr(transport, 'getHost', lambda: "host") super(ProcessAMP, self).makeConnection(transport) def connectionLost(self, reason): super(ProcessAMP, self).connectionLost(reason) if self.is_worker: if reactor.running: reactor.stop() else: if self.lost_handler: self.lost_handler(self, reason) class InlinePool(object): "This is a dummy worker pool that executes all jobs in the current process" def __init__(self): self.active_jobs = {} def job_done(self, result, deferred): if deferred in self.active_jobs: del self.active_jobs[deferred] return result def execute_job(self, job, netbox, plugins=None, interval=None): job = jobs.JobHandler(job, netbox, plugins, interval) deferred = job.run() self.active_jobs[deferred] = job deferred.addBoth(self.job_done, deferred) return deferred def cancel(self, deferred): if deferred in self.active_jobs: self.active_jobs[deferred].cancel() class Worker(object): """This class holds information about one worker process as seen from the worker pool""" _logger = ContextLogger() def __init__(self, pool, threadpoolsize, max_jobs): self.active_jobs = 0 self.total_jobs = 0 self.max_concurrent_jobs = 0 self.pool = pool self.threadpoolsize = threadpoolsize self.max_jobs = max_jobs @inlineCallbacks def start(self): args = [control.get_process_command(), '--worker', '-f', '-s', '-P'] if self.threadpoolsize: args.append('--threadpoolsize=%d' % self.threadpoolsize) endpoint = ProcessEndpoint(reactor, control.get_process_command(), args, os.environ) factory = protocol.Factory() factory.protocol = lambda: ProcessAMP(is_worker=False, locator=JobHandler()) self.process = yield endpoint.connect(factory) self.process.lost_handler = self._worker_died returnValue(self) def done(self): return self.max_jobs and (self.total_jobs >= self.max_jobs) def _worker_died(self, worker, reason): if not self.done(): self._logger.warning("Lost worker {worker} with {jobs} " "active jobs".format( worker=worker, jobs=self.active_jobs)) elif self.active_jobs: self._logger.warning("Worker {worker} exited with {jobs} " "active jobs".format( worker=worker, jobs=self.active_jobs)) else: self._logger.debug("Worker {worker} exited normally" .format(worker=worker)) self.pool.worker_died(self) def execute(self, serial, command, **kwargs): self.active_jobs += 1 self.total_jobs += 1 self.max_concurrent_jobs = max(self.active_jobs, self.max_concurrent_jobs) deferred = self.process.callRemote(command, serial=serial, **kwargs) if self.done(): self.process.callRemote(Shutdown) return deferred def cancel(self, serial): return self.process.callRemote(Cancel, serial=serial) class WorkerPool(object): """This class represent a pool of worker processes to which jobs can be scheduled""" _logger = ContextLogger() def __init__(self, workers, max_jobs, threadpoolsize=None): twisted.internet.endpoints.log = HackLog self.workers = set() self.target_count = workers self.max_jobs = max_jobs self.threadpoolsize = threadpoolsize for i in range(self.target_count): self._spawn_worker() self.serial = 0 self.jobs = dict() def worker_died(self, worker): self.workers.remove(worker) if not worker.done(): self._spawn_worker() @inlineCallbacks def _spawn_worker(self): worker = yield Worker(self, self.threadpoolsize, self.max_jobs).start() self.workers.add(worker) def _cleanup(self, result, deferred): serial, worker = self.jobs[deferred] del self.jobs[deferred] worker.active_jobs -= 1 return result def _execute(self, command, **kwargs): ready_workers = [w for w in self.workers if not w.done()] if not ready_workers: raise RuntimeError("No ready workers") worker = min(ready_workers, key=lambda x: x.active_jobs) self.serial += 1 deferred = worker.execute(self.serial, command, **kwargs) if worker.done(): self._spawn_worker() self.jobs[deferred] = (self.serial, worker) deferred.addBoth(self._cleanup, deferred) return deferred def cancel(self, deferred): if deferred not in self.jobs: self._logger.debug("Cancelling job that isn't known") return serial, worker = self.jobs[deferred] return worker.cancel(serial) def execute_job(self, job, netbox, plugins=None, interval=None): deferred = self._execute(Job, job=job, netbox=netbox, plugins=plugins, interval=interval) deferred.addCallback(lambda x: x['result']) return deferred def log_summary(self): self._logger.info("{active} out of {target} workers running".format( active=len(self.workers), target=self.target_count)) for worker in self.workers: self._logger.info(" - ready {ready} active {active}" " max {max} total {total}".format( ready=not worker.done(), active=worker.active_jobs, max=worker.max_concurrent_jobs, total=worker.total_jobs)) class HackLog(object): @staticmethod def msg(data, **kwargs): """Used to monkeypatch twisted.endpoints to log worker output the ipdevpoll way""" sys.stderr.write(data)
gpl-2.0
-5,302,875,455,866,709,000
33.630225
80
0.589694
false
4.311449
false
false
false
mazvv/travelcrm
travelcrm/forms/foodcats.py
1
2079
# -*-coding: utf-8 -*- import colander from . import( ResourceSchema, BaseForm, BaseSearchForm, BaseAssignForm, ) from ..resources.foodcats import FoodcatsResource from ..models.foodcat import Foodcat from ..models.task import Task from ..models.note import Note from ..lib.qb.foodcats import FoodcatsQueryBuilder from ..lib.utils.common_utils import translate as _ from ..lib.utils.security_utils import get_auth_employee @colander.deferred def name_validator(node, kw): request = kw.get('request') def validator(node, value): foodcat = Foodcat.by_name(value) if ( foodcat and str(foodcat.id) != request.params.get('id') ): raise colander.Invalid( node, _(u'Food category with the same name exists'), ) return colander.All(colander.Length(max=32), validator,) class _FoodcatSchema(ResourceSchema): name = colander.SchemaNode( colander.String(), validator=name_validator, ) class FoodcatForm(BaseForm): _schema = _FoodcatSchema def submit(self, foodcat=None): if not foodcat: foodcat = Foodcat( resource=FoodcatsResource.create_resource( get_auth_employee(self.request) ) ) else: foodcat.resource.notes = [] foodcat.resource.tasks = [] foodcat.name = self._controls.get('name') for id in self._controls.get('note_id'): note = Note.get(id) foodcat.resource.notes.append(note) for id in self._controls.get('task_id'): task = Task.get(id) foodcat.resource.tasks.append(task) return foodcat class FoodcatSearchForm(BaseSearchForm): _qb = FoodcatsQueryBuilder class FoodcatAssignForm(BaseAssignForm): def submit(self, ids): for id in ids: foodcat = Foodcat.get(id) foodcat.resource.maintainer_id = self._controls.get( 'maintainer_id' )
gpl-3.0
-5,314,125,230,378,631,000
25.653846
64
0.601732
false
3.739209
false
false
false
Havate/havate-openstack
proto-build/gui/horizon/Horizon_GUI/settings.py
1
7641
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2012 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # All Rights Reserved. # # Copyright 2012 Nebula, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import logging import os import sys import warnings from django.utils.translation import ugettext_lazy as _ # noqa from openstack_dashboard import exceptions warnings.formatwarning = lambda message, category, *args, **kwargs: \ '%s: %s' % (category.__name__, message) ROOT_PATH = os.path.dirname(os.path.abspath(__file__)) BIN_DIR = os.path.abspath(os.path.join(ROOT_PATH, '..', 'bin')) if ROOT_PATH not in sys.path: sys.path.append(ROOT_PATH) DEBUG = False TEMPLATE_DEBUG = DEBUG SITE_BRANDING = 'OpenStack Dashboard' LOGIN_URL = '/auth/login/' LOGOUT_URL = '/auth/logout/' # LOGIN_REDIRECT_URL can be used as an alternative for # HORIZON_CONFIG.user_home, if user_home is not set. # Do not set it to '/home/', as this will cause circular redirect loop LOGIN_REDIRECT_URL = '/' MEDIA_ROOT = os.path.abspath(os.path.join(ROOT_PATH, '..', 'media')) MEDIA_URL = '/media/' STATIC_ROOT = os.path.abspath(os.path.join(ROOT_PATH, '..', 'static')) STATIC_URL = '/static/' ROOT_URLCONF = 'openstack_dashboard.urls' HORIZON_CONFIG = { 'dashboards': ('project', 'admin', 'settings', 'router',), 'default_dashboard': 'project', 'user_home': 'openstack_dashboard.views.get_user_home', 'ajax_queue_limit': 10, 'auto_fade_alerts': { 'delay': 3000, 'fade_duration': 1500, 'types': ['alert-success', 'alert-info'] }, 'help_url': "http://docs.openstack.org", 'exceptions': {'recoverable': exceptions.RECOVERABLE, 'not_found': exceptions.NOT_FOUND, 'unauthorized': exceptions.UNAUTHORIZED}, } # Set to True to allow users to upload images to glance via Horizon server. # When enabled, a file form field will appear on the create image form. # See documentation for deployment considerations. HORIZON_IMAGES_ALLOW_UPLOAD = True # The OPENSTACK_IMAGE_BACKEND settings can be used to customize features # in the OpenStack Dashboard related to the Image service, such as the list # of supported image formats. OPENSTACK_IMAGE_BACKEND = { 'image_formats': [ ('', ''), ('aki', _('AKI - Amazon Kernel Image')), ('ami', _('AMI - Amazon Machine Image')), ('ari', _('ARI - Amazon Ramdisk Image')), ('iso', _('ISO - Optical Disk Image')), ('qcow2', _('QCOW2 - QEMU Emulator')), ('raw', _('Raw')), ('vdi', _('VDI')), ('vhd', _('VHD')), ('vmdk', _('VMDK')) ] } MIDDLEWARE_CLASSES = ( 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'horizon.middleware.HorizonMiddleware', 'django.middleware.doc.XViewMiddleware', 'django.middleware.locale.LocaleMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ) TEMPLATE_CONTEXT_PROCESSORS = ( 'django.core.context_processors.debug', 'django.core.context_processors.i18n', 'django.core.context_processors.request', 'django.core.context_processors.media', 'django.core.context_processors.static', 'django.contrib.messages.context_processors.messages', 'horizon.context_processors.horizon', 'openstack_dashboard.context_processors.openstack', ) TEMPLATE_LOADERS = ( 'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader', 'horizon.loaders.TemplateLoader' ) TEMPLATE_DIRS = ( os.path.join(ROOT_PATH, 'templates'), ) STATICFILES_FINDERS = ( 'compressor.finders.CompressorFinder', 'django.contrib.staticfiles.finders.AppDirectoriesFinder', ) COMPRESS_PRECOMPILERS = ( ('text/less', ('lesscpy {infile}')), ) COMPRESS_CSS_FILTERS = ( 'compressor.filters.css_default.CssAbsoluteFilter', ) COMPRESS_ENABLED = True COMPRESS_OUTPUT_DIR = 'dashboard' COMPRESS_CSS_HASHING_METHOD = 'hash' COMPRESS_PARSER = 'compressor.parser.HtmlParser' INSTALLED_APPS = ( 'openstack_dashboard', 'django.contrib.contenttypes', 'django.contrib.auth', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'django.contrib.humanize', 'compressor', 'horizon', 'openstack_dashboard.dashboards.project', 'openstack_dashboard.dashboards.admin', 'openstack_dashboard.dashboards.settings', 'openstack_auth', 'openstack_dashboard.dashboards.router', ) TEST_RUNNER = 'django_nose.NoseTestSuiteRunner' AUTHENTICATION_BACKENDS = ('openstack_auth.backend.KeystoneBackend',) MESSAGE_STORAGE = 'django.contrib.messages.storage.cookie.CookieStorage' SESSION_ENGINE = 'django.contrib.sessions.backends.signed_cookies' SESSION_COOKIE_HTTPONLY = True SESSION_EXPIRE_AT_BROWSER_CLOSE = True SESSION_COOKIE_SECURE = False SESSION_TIMEOUT = 1800 gettext_noop = lambda s: s LANGUAGES = ( ('en', gettext_noop('English')), ('en-au', gettext_noop('Australian English')), ('en-gb', gettext_noop('British English')), ('es', gettext_noop('Spanish')), ('fr', gettext_noop('French')), ('ja', gettext_noop('Japanese')), ('ko', gettext_noop('Korean (Korea)')), ('nl', gettext_noop('Dutch (Netherlands)')), ('pl', gettext_noop('Polish')), ('pt-br', gettext_noop('Portuguese (Brazil)')), ('ru', gettext_noop('Russian')), ('zh-cn', gettext_noop('Simplified Chinese')), ('zh-tw', gettext_noop('Traditional Chinese')), ) LANGUAGE_CODE = 'en' LANGUAGE_COOKIE_NAME = 'horizon_language' USE_I18N = True USE_L10N = True USE_TZ = True OPENSTACK_KEYSTONE_DEFAULT_ROLE = 'Member' DEFAULT_EXCEPTION_REPORTER_FILTER = 'horizon.exceptions.HorizonReporterFilter' POLICY_FILES_PATH = os.path.join(ROOT_PATH, "conf") # Map of local copy of service policy files POLICY_FILES = { 'identity': 'keystone_policy.json', 'compute': 'nova_policy.json' } SECRET_KEY = None try: from local.local_settings import * # noqa except ImportError: logging.warning("No local_settings file found.") # Ensure that we always have a SECRET_KEY set, even when no local_settings.py # file is present. See local_settings.py.example for full documentation on the # horizon.utils.secret_key module and its use. if not SECRET_KEY: from horizon.utils import secret_key LOCAL_PATH = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'local') SECRET_KEY = secret_key.generate_or_read_from_file('/var/lib/openstack-dashboard/secret_key') from openstack_dashboard import policy POLICY_CHECK_FUNCTION = policy.check # Add HORIZON_CONFIG to the context information for offline compression COMPRESS_OFFLINE_CONTEXT = { 'STATIC_URL': STATIC_URL, 'HORIZON_CONFIG': HORIZON_CONFIG } if DEBUG: logging.basicConfig(level=logging.DEBUG)
apache-2.0
2,743,992,611,349,540,400
31.653846
97
0.693888
false
3.5375
true
false
false
follownjmoney/campaign-server
campaignserver/campaignserver/settings_prod.py
1
2523
""" Django settings for campaignserver project. For more information on this file, see https://docs.djangoproject.com/en/1.7/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.7/ref/settings/ """ # Build paths inside the project like this: os.path.join(BASE_DIR, ...) import os BASE_DIR = os.path.dirname(os.path.dirname(__file__)) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = 'k-(62ls@8owkmo72ipb_x-#9zgt#!59+8^5kw(rf3yatpmou%h' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True TEMPLATE_DEBUG = True ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = ( 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'dataserver', ) MIDDLEWARE_CLASSES = ( 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ) ROOT_URLCONF = 'campaignserver.urls' WSGI_APPLICATION = 'campaignserver.wsgi.application' # Database # https://docs.djangoproject.com/en/1.7/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.mysql', 'NAME': 'followthemoney', 'USER': 'root', 'PASSWORD': 'followthemoney', 'HOST': 'followthemoney.chwj19dbxodd.us-east-1.rds.amazonaws.com', 'PORT': '3306', } } # Internationalization # https://docs.djangoproject.com/en/1.7/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True TEMPLATE_LOADERS = ( 'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader', 'django.template.loaders.eggs.Loader', ) # List of finder classes that know how to find static files in # various locations. STATICFILES_FINDERS = ( 'django.contrib.staticfiles.finders.FileSystemFinder', 'django.contrib.staticfiles.finders.AppDirectoriesFinder', ) STATIC_URL = '/static/'
apache-2.0
5,052,721,809,511,146,000
25.010309
74
0.722553
false
3.409459
false
false
false
xesscorp/skidl
skidl/skidl.py
1
7609
# -*- coding: utf-8 -*- # MIT license # # Copyright (C) 2016 by XESS Corp. # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. from __future__ import absolute_import, division, print_function, unicode_literals import json import os from builtins import open, super from future import standard_library from . import tools # Import EDA tool-specific stuff. from .circuit import Circuit from .common import * from .defines import * from .logger import erc_logger, get_script_name, logger from .part_query import footprint_cache from .pin import Pin from .utilities import * standard_library.install_aliases() class SkidlCfg(dict): """Class for holding SKiDL configuration.""" CFG_FILE_NAME = ".skidlcfg" def __init__(self, *dirs): super().__init__() self.load(*dirs) def load(self, *dirs): """Load SKiDL configuration from JSON files in given dirs.""" for dir in dirs: path = os.path.join(dir, self.CFG_FILE_NAME) path = os.path.expanduser(path) path = os.path.abspath(path) try: with open(path) as cfg_fp: merge_dicts(self, json.load(cfg_fp)) except (FileNotFoundError, IOError): pass def store(self, dir="."): """Store SKiDL configuration as JSON in directory as .skidlcfg file.""" path = os.path.join(dir, self.CFG_FILE_NAME) path = os.path.expanduser(path) path = os.path.abspath(path) with open(path, "w") as cfg_fp: json.dump(self, cfg_fp, indent=4) def get_kicad_lib_tbl_dir(): """Get the path to where the global fp-lib-table file is found.""" paths = ( "$HOME/.config/kicad", "~/.config/kicad", "%APPDATA%/kicad", "$HOME/Library/Preferences/kicad", "~/Library/Preferences/kicad", ) for path in paths: path = os.path.normpath(os.path.expanduser(os.path.expandvars(path))) if os.path.lexists(path): return path return "" ############################################################################### # Globals that are used by everything else. ############################################################################### # Get SKiDL configuration. skidl_cfg = SkidlCfg("/etc", "~", ".") # If no configuration files were found, set some default lib search paths. if "lib_search_paths" not in skidl_cfg: skidl_cfg["lib_search_paths"] = {tool: ["."] for tool in ALL_TOOLS} # Add the location of the default KiCad part libraries. try: skidl_cfg["lib_search_paths"][KICAD].append(os.environ["KICAD_SYMBOL_DIR"]) except KeyError: logger.warning( "KICAD_SYMBOL_DIR environment variable is missing, so the default KiCad symbol libraries won't be searched." ) # Add the location of the default SKiDL part libraries. default_skidl_libs = os.path.join( os.path.dirname(os.path.abspath(__file__)), "libs" ) skidl_cfg["lib_search_paths"][SKIDL].append(default_skidl_libs) # Shortcut to library search paths. lib_search_paths = skidl_cfg["lib_search_paths"] # If no configuration files were found, set some default footprint search paths. if "footprint_search_paths" not in skidl_cfg: dir_ = get_kicad_lib_tbl_dir() skidl_cfg["footprint_search_paths"] = {tool: [dir_] for tool in ALL_TOOLS} # Cause the footprint cache to be invalidated if the footprint search path changes. def invalidate_footprint_cache(self, k, v): footprint_cache.reset() skidl_cfg["footprint_search_paths"] = TriggerDict(skidl_cfg["footprint_search_paths"]) skidl_cfg["footprint_search_paths"].trigger_funcs[KICAD] = invalidate_footprint_cache # Shortcut to footprint search paths. footprint_search_paths = skidl_cfg["footprint_search_paths"] # Set default toolset being used with SKiDL. def set_default_tool(tool): """Set the ECAD tool that will be used by default.""" skidl_cfg["default_tool"] = tool def get_default_tool(): return skidl_cfg["default_tool"] if "default_tool" not in skidl_cfg: set_default_tool(KICAD) # Make the various EDA tool library suffixes globally available. lib_suffixes = tools.lib_suffixes # Definitions for backup library of circuit parts. BACKUP_LIB_NAME = get_script_name() + "_lib" BACKUP_LIB_FILE_NAME = BACKUP_LIB_NAME + lib_suffixes[SKIDL] # Boolean controls whether backup lib will be searched for missing parts. QUERY_BACKUP_LIB = INITIAL_QUERY_BACKUP_LIB = True def set_query_backup_lib(val): """Set the boolean that controls searching for the backup library.""" global QUERY_BACKUP_LIB QUERY_BACKUP_LIB = val def get_query_backup_lib(): return QUERY_BACKUP_LIB # Backup lib for storing parts in a Circuit. backup_lib = None def set_backup_lib(lib): """Set the backup library.""" global backup_lib backup_lib = lib def get_backup_lib(): return backup_lib @norecurse def load_backup_lib(): """Load a backup library that stores the parts used in the circuit.""" global backup_lib # Don't keep reloading the backup library once it's loaded. if not backup_lib: try: # The backup library is a SKiDL lib stored as a Python module. exec(open(BACKUP_LIB_FILE_NAME).read()) # Copy the backup library in the local storage to the global storage. backup_lib = locals()[BACKUP_LIB_NAME] except (FileNotFoundError, ImportError, NameError, IOError): pass return backup_lib # Create the default Circuit object that will be used unless another is explicitly created. builtins.default_circuit = Circuit() # NOCONNECT net for attaching pins that are intentionally left open. builtins.NC = default_circuit.NC # pylint: disable=undefined-variable # Create calls to functions on whichever Circuit object is the current default. ERC = default_circuit.ERC erc_assert = default_circuit.add_erc_assertion generate_netlist = default_circuit.generate_netlist generate_xml = default_circuit.generate_xml generate_schematic = default_circuit.generate_schematic generate_svg = default_circuit.generate_svg generate_graph = default_circuit.generate_graph reset = default_circuit.reset backup_parts = default_circuit.backup_parts # Define a tag for nets that convey power (e.g., VCC or GND). POWER = Pin.drives.POWER def no_files(circuit=default_circuit): """Prevent creation of output files (netlists, ERC, logs) by this Circuit object.""" circuit.no_files = True erc_logger.stop_file_output() logger.stop_file_output()
mit
5,613,484,846,963,720,000
32.372807
120
0.681036
false
3.750123
true
false
false
iniverno/RnR-LLC
simics-3.0-install/simics-3.0.31/amd64-linux/lib/x86_components.py
1
69880
# MODULE: x86-components # CLASS: x86-system # CLASS: x86-apic-system # CLASS: x86-separate-mem-io-system # CLASS: x86-apic-bus-system # CLASS: x86-e7520-system # CLASS: i386-cpu # CLASS: i486sx-cpu # CLASS: i486dx2-cpu # CLASS: pentium-cpu # CLASS: pentium-mmx-cpu # CLASS: pentium-pro-cpu # CLASS: pentium-ii-cpu # CLASS: pentium-iii-cpu # CLASS: pentium-m-cpu # CLASS: pentium-4-cpu # CLASS: xeon-prestonia-cpu # CLASS: pentium-4e-cpu # CLASS: pentium-4e-2ht-cpu # CLASS: pentium-4e-4ht-cpu # CLASS: x86-hammer-cpu # CLASS: opteron-cpu # CLASS: north-bridge-443bx # CLASS: north-bridge-443bx-agp # CLASS: north-bridge-875p # CLASS: north-bridge-e7520 # CLASS: north-bridge-k8 # CLASS: legacy-pc-devices import os, time from sim_core import * from components import * from base_components import standard_pc_devices_component from base_components import find_device, get_highest_2exp from x86_cmos_info import register_cmos_commands ### X86 Legacy System class x86_system_component(component_object): classname = 'x86-system' basename = 'system' description = ('The "x86-system" component represents a legacy ISA based ' 'x86 system with a single processor.') connectors = { 'cpu0' : {'type' : 'x86-processor', 'direction' : 'down', 'empty_ok' : False, 'hotplug' : False, 'multi' : False}, 'chipset' : {'type' : 'x86-chipset', 'direction' : 'down', 'empty_ok' : 1, 'hotplug' : False, 'multi' : False}, 'reset' : {'type' : 'x86-reset-bus', 'direction' : 'down', 'empty_ok' : 1, 'hotplug' : False, 'multi' : False}} bios = None cpus = [] def __init__(self, parse_obj): component_object.__init__(self, parse_obj) self.break_on_reboot = False self.use_acpi = 0 self.do_create_acpi_tables = 1 self.do_init_cmos = 1 self.use_shadow = 1 self.use_hostfs = 1 self.do_init_mtrrs = 1 self.linux_acpi_bug_workaround = 1 self.map_ram = 1 self.disable_ap_cpus = 1 def get_memory_megs(self, idx): return self.memory_megs def set_memory_megs(self, val, idx): if self.obj.configured: return Sim_Set_Illegal_Value if val < 1: SIM_attribute_error('Unsupported memory size') return Sim_Set_Illegal_Value self.memory_megs = val return Sim_Set_Ok def get_rtc_time(self, idx): return self.tod def set_rtc_time(self, val, idx): if self.obj.configured: return Sim_Set_Illegal_Value try: time.strptime(val, '%Y-%m-%d %H:%M:%S %Z') except Exception, msg: SIM_attribute_error(str(msg)) return Sim_Set_Illegal_Value self.tod = val return Sim_Set_Ok def get_bios(self, idx): return self.bios def set_bios(self, val, idx): if self.obj.configured: return Sim_Set_Illegal_Value if self.instantiated: # allow checkpoint to be loaded, bios isn't used once instantiated self.bios = val return Sim_Set_Ok if not SIM_lookup_file(val): # TODO: use exception print 'Could not locate bios file %s' % val return Sim_Set_Illegal_Value self.bios_size = os.stat(SIM_lookup_file(val)).st_size # Default bios contains trailing garbage if val[:10] == 'rombios-2.': self.bios_size = 64 * 1024 if self.bios_size > 0x100000: # TODO: use exception print 'BIOS size %d is larger than max (%d)' % ( self.bios_size, 0x100000) return Sim_Set_Illegal_Value self.bios = val return Sim_Set_Ok def add_objects(self): self.cpus_per_slot = [] # RAM self.o.dram_space = pre_obj('dram$', 'memory-space') self.o.ram_image = pre_obj('ram$_image', 'image') self.o.ram_image.size = self.memory_megs * 1024 * 1024 self.o.ram = pre_obj('ram$', 'ram') self.o.ram.image = self.o.ram_image self.o.dram_space.map = [[0, self.o.ram, 0, 0, self.memory_megs * 1024 * 1024]] self.o.reset_bus = pre_obj('reset$', 'x86-reset-bus') self.o.reset_bus.reset_targets = [] self.o.conf = pre_obj('conf$', 'pc-config') if self.bios: self.o.rom_image = pre_obj('rom$_image', 'image') self.o.rom_image.size = self.bios_size self.o.rom = pre_obj('rom$', 'rom') self.o.rom.image = self.o.rom_image if self.use_hostfs: self.o.hfs = pre_obj('hfs$', 'hostfs') # Port space self.o.port_space = pre_obj('port_mem$', 'port-space') self.o.port_space.map = [ [0xfff0, self.o.conf, 0, 0, 1], [0xfff1, self.o.conf, 1, 0, 1]] # Northbridge memory-space self.o.phys_mem = pre_obj('phys_mem$', 'memory-space') self.o.phys_mem.map = [] # Southbridge memory-space self.o.pci_mem = pre_obj('pci_mem$', 'memory-space') self.o.pci_mem.map = [] self.o.phys_mem.default_target = [self.o.pci_mem, 0, 0, None] if self.bios: self.o.pci_mem.map += [ [0x100000000 - self.bios_size, self.o.rom, 0, 0, self.bios_size]] if self.use_hostfs: self.o.pci_mem.map += [ [0x0ffe81000, self.o.hfs, 0, 0, 0x10]] if self.use_shadow: self.o.shadow = pre_obj('core$', 'pc-shadow') self.o.shadow_mem = pre_obj('shadow$', 'memory-space') self.o.shadow_mem.map = [[0x100000, self.o.dram_space, 0, 0, 0x100000]] self.o.port_space.map += [ [0xfff4, self.o.shadow, 0, 0, 1], [0xfff5, self.o.shadow, 0, 1, 1]] if self.bios: self.o.pci_mem.map += [ [0x000f0000, self.o.rom, 0, self.bios_size - 0x10000, 0x10000]] if self.linux_acpi_bug_workaround: self.o.rom1_image = pre_obj('rom$_image', 'image') self.o.rom1_image.size = 0x10000 self.o.rom1 = pre_obj('rom$', 'rom') self.o.rom1.image = self.o.rom1_image self.o.pci_mem.map += [ [0x000e0000, self.o.rom1, 0, 0, 0x10000]] # Map RAM if self.map_ram and self.bios: ram_map = [[0x000000000, self.o.dram_space, 0, 0, 0xa0000]] high_mem = 4096 - 256 if self.memory_megs > high_mem: high_mem *= 1024 * 1024 highest = (self.memory_megs * 1024 * 1024) - high_mem ram_map += [ [0x000100000, self.o.dram_space, 0, 0x100000, high_mem, None, 0], [0x100000000, self.o.dram_space, 0, high_mem, highest, None, 0]] else: megs = (self.memory_megs - 1) * 1024 * 1024 ram_map += [ [0x000100000, self.o.ram, 0, 0x100000, megs, None, 0]] self.o.phys_mem.map += ram_map elif self.map_ram and not self.bios: ram_map = [[0x00000000, self.o.dram_space, 0, 0, self.memory_megs * 1024 * 1024]] self.o.phys_mem.map += ram_map def add_connector_info(self): self.connector_info['cpu0'] = [0, self.o.phys_mem, self.o.port_space] self.connector_info['chipset'] = [self.o.phys_mem, self.o.pci_mem, self.o.port_space] if "dram_space" in dir(self.o): self.connector_info['chipset'] += [ self.o.dram_space] else: self.connector_info['chipset'] += [None] self.connector_info['reset'] = [self.o.reset_bus] def load_bios(self): # Write the bios into the ROM area, so that checkpoints not # depend on the BIOS file being available all time. f = open(SIM_lookup_file(self.bios), "rb") base = 0x100000000 - self.bios_size data = map(ord, f.read(self.bios_size)) self.o.pci_mem.iface.memory_space.write(self.o.pci_mem, None, base, tuple(data), 1) f.close() def set_cmos_info(self): self.rtc = find_device(self.o.port_space, 0x70) if not self.rtc: #print "CMOS device not found - can not write information." return # set nvram info eval_cli_line('%s.cmos-init' % self.obj.name) eval_cli_line('%s.cmos-base-mem 640' % self.obj.name) eval_cli_line('%s.cmos-extended-mem %d' % (self.obj.name, self.memory_megs - 1)) m = re.match(r'(\d+)-(\d+)-(\d+) (\d+):(\d+):(\d+)', self.tod) eval_cli_line(('%s.set-date-time ' + 'year=%s month=%s mday=%s ' + 'hour=%s minute=%s second=%s') % ((self.rtc.name,) + m.groups())) eval_cli_line('%s.cmos-boot-dev C' % self.obj.name) flp = find_device(self.o.port_space, 0x3f2) if flp: if len(flp.drives): eval_cli_line('%s.cmos-floppy A "1.44"' % self.obj.name) if len(flp.drives) > 1: eval_cli_line('%s.cmos-floppy B "1.44"' % self.obj.name) ide0 = find_device(self.o.port_space, 0x1f0) if ide0 and ide0.master: size = ide0.master.disk_sectors # Our BIOS does LBA directly: set sectors 63 and heads to x * 16 bios_S = 63 # The following would probably work if our BIOS had support # for proper 'translation'. Now it seems to fail. #if size < 504 * 1024 * 1024 / 512: # bios_H = 16 #elif size < 1008 * 1024 * 1024 / 512: # bios_H = 32 #elif size < 2016 * 1024 * 1024 / 512: # bios_H = 64 #elif size < 4032 * 1024 * 1024 / 512: # bios_H = 128 if size < 4032 * 1024 * 1024 / 512: bios_H = 16 else: # 255 is de facto standard since DOS and early Windows can't # handle 256 heads, this is known as the 4GB limit bios_H = 255 bios_C = size / (bios_H * bios_S) #if bios_C * bios_H * bios_S != size: # print 'Disk size can not be translated to exact BIOS CHS' # print 'Using CHS: %d %d %d' % (bios_C, bios_H, bios_S) eval_cli_line('%s.cmos-hd C %d %d %d' % (self.obj.name, bios_C, bios_H, bios_S)) def calc_mtrr_mask(self, classname, size): return (~(size - 1) & ((1 << sim.classes[classname].classattrs.physical_bits) - 1)) def set_mtrr(self, cpu): if not SIM_class_has_attribute(cpu.classname, 'mtrr_def_type'): return cpu.mtrr_def_type = 0xc00 megs_remaining = self.memory_megs next_mtrr = 0 next_base = 0 while megs_remaining: if next_mtrr > 7: print ('Warning: %d megabytes of memory not mapped by ' 'MTRRs' % megs_remaining) break this_size = get_highest_2exp(megs_remaining) exec 'cpu.mtrr_base%d = next_base | 0x06' % next_mtrr mask = self.calc_mtrr_mask(cpu.classname, this_size * 1024 * 1024) exec 'cpu.mtrr_mask%d = mask | 0x800' % next_mtrr megs_remaining = megs_remaining - this_size next_base = next_base + this_size * 1024 * 1024 next_mtrr += 1 cpu.mtrr_fix_64k_00000 = 0x0606060606060606 cpu.mtrr_fix_16k_80000 = 0x0606060606060606 cpu.mtrr_fix_16k_a0000 = 0 cpu.mtrr_fix_4k_c0000 = 0 cpu.mtrr_fix_4k_c8000 = 0 cpu.mtrr_fix_4k_d0000 = 0 cpu.mtrr_fix_4k_d8000 = 0 cpu.mtrr_fix_4k_f0000 = 0 cpu.mtrr_fix_4k_f8000 = 0 def instantiation_done(self): component_object.instantiation_done(self) for i in range(len(self.cpus)): self.cpus[i] = SIM_get_object(self.cpus[i].name) if self.bios: self.load_bios() conf.sim.handle_outside_memory = 1 if self.do_init_cmos: self.set_cmos_info() self.install_reset_callback(self.cpus[0]) def add_cpu(self, id, cpu): if len(self.cpus_per_slot) <= id: self.cpus_per_slot += [None] * (id + 1 - len(self.cpus_per_slot)) if self.cpus_per_slot[id] == None: self.cpus_per_slot[id] = [cpu] else: self.cpus_per_slot[id].append(cpu) self.cpus = [] for l in self.cpus_per_slot: if l: for c in l: self.cpus.append(c) self.o.reset_bus.reset_targets = self.cpus cpu.cpu_group = self.o.reset_bus def connect_x86_processor(self, connector, cpu): self.add_cpu(0, cpu) def connect_x86_chipset(self, connector): if self.use_shadow: # move all mappings to shadow memory self.o.shadow_mem.map += ([x for x in self.o.pci_mem.map if x[0] >= 0xc0000 and x[0] < 0x100000]) self.o.pci_mem.map = [x for x in self.o.pci_mem.map if x[0] < 0xc0000 or x[0] >= 0x100000] self.o.pci_mem.map += ([ [0x0000c0000, self.o.shadow, 0, 0, 0x40000, self.o.shadow_mem, 1]]) def connect_x86_reset_bus(self, connector): pass def get_clock(self): return self.cpus[0] def get_processors(self): return self.cpus def set_processors(self, cpus): self.cpus = cpus def on_reset_hap(self, udata, cpu, hard_reset): self.on_reboot() def on_triple_fault(self, udata, cpu, exc_no): self.on_reboot() def on_reboot(self): SIM_log_message(self.obj, 1, 0, Sim_Log_Info, "System rebooted. Note that the reboot support in" " Simics is experimental at this point.") if self.break_on_reboot: SIM_log_message(self.obj, 1, 0, Sim_Log_Info, "You will probably want to save the disk content and continue" " from that in another Simics session.\nTo disable breaking," " change the break_on_reboot attribute.") SIM_break_simulation("Break on system reboot") def install_reset_callback(self, cpu): SIM_hap_add_callback_obj("X86_Processor_Reset", cpu, 0, self.on_reset_hap, None) excno = cpu.iface.exception.get_number(cpu, "Triple_Fault") SIM_hap_add_callback_obj_index("Core_Exception", cpu, 0, self.on_triple_fault, None, excno) def set_break_on_reboot(self, val, idx): self.break_on_reboot = val return Sim_Set_Ok def get_break_on_reboot(self, idx): return self.break_on_reboot def finalize_instance(self): component_object.finalize_instance(self) if self.instantiated: for connector, cpu, dst_conn in self.connections: if connector == 'cpu0': break else: SIM_log_message(self.obj, 1, 0, Sim_Log_Error, "No cpu to install reset callback on!") return cpu = cpu.object_list['cpu[0]'] self.install_reset_callback(cpu) system_attributes = [ ['memory_megs', Sim_Attr_Required, 'i', 'The amount of RAM in megabytes on the processor board.'], ['rtc_time', Sim_Attr_Required, 's', 'The date and time of the Real-Time clock.'], ['break_on_reboot', Sim_Attr_Optional, 'b', 'If true, the simulation will stop when machine is rebooted.']] bios_attribute = [['bios', Sim_Attr_Optional, 's', 'The x86 BIOS file to use.']] apic_attribute = [['apic_id_list', Sim_Attr_Optional, '[i*]', 'The APIC id to use for each processor']] register_component_class(x86_system_component, system_attributes + bios_attribute, top_level = True) register_cmos_commands(x86_system_component.classname) ### X86 Apic Bus System class x86_apic_bus_system_component(x86_system_component): classname = 'x86-apic-bus-system' basename = 'system' description = ('The "x86-apic-bus-system" component represents a ' 'multi-processor capable x86 system with up to 255 cpus.') connectors = x86_system_component.connectors.copy() del connectors['cpu0'] connectors['io-apic'] = { 'type' : 'apic-bus', 'direction' : 'down', 'empty_ok' : True, 'hotplug' : False, 'multi' : True} for i in range(255): connectors['cpu%d' % i] = { 'type' : 'x86-apic-processor', 'direction' : 'down', 'empty_ok' : i != 0, 'hotplug' : False, 'multi' : False} def __init__(self, parse_obj): x86_system_component.__init__(self, parse_obj) self.linux_acpi_bug_workaround = 0 self.use_acpi = 0 self.use_hostfs = 0 self.use_shadow = 0 self.do_create_acpi_tables = 0 self.do_init_cmos = 0 self.do_init_mtrrs = 1 self.map_ram = 0 self.disable_ap_cpus = 1 # default APIC id list that respects more or less Intel's manuals self.apic_id_list = [ 0, 1, 6, 7, 4, 5, 2, 3, 8, 9,14,15,12,13,10,11] def add_connector_info(self): x86_system_component.add_connector_info(self) self.connector_info['io-apic'] = [self.o.apic_bus] if "dram_space" in dir(self.o): self.connector_info['io-apic'] += [ self.o.dram_space] for i in range(255): self.connector_info['cpu%d' % i] = [i, self.o.phys_mem, self.o.port_space, self.o.apic_bus] def connect_x86_apic_processor(self, connector, cpus): id = self.connector_info[connector][0] for i in range(len(cpus)): self.add_cpu(id, cpus[i]) if self.do_init_mtrrs: self.set_mtrr(cpus[i]) def connect_apic_bus(self, connector): pass def add_objects(self): x86_system_component.add_objects(self) self.o.apic_bus = pre_obj('apic_bus$', 'apic-bus') self.o.apic_bus.apics = [] self.o.apic_bus.ioapic = [] def set_apic_id_list(self, val, idx): self.apic_id_list = val return Sim_Set_Ok def get_apic_id_list(self, idx): return self.apic_id_list def instantiation_done(self): x86_system_component.instantiation_done(self) apics_list = [] la = len(self.apic_id_list) for i in range(len(self.cpus)): apics_list += [self.cpus[i].apic] a_id = (i/la)*la + self.apic_id_list[i % la] self.cpus[i].apic.apic_id = a_id try: self.cpus[i].cpuid_physical_apic_id = a_id except: pass self.o.apic_bus.apics = apics_list i = len(self.cpus) for ioapic in self.o.apic_bus.ioapic: try: ioapic.ioapic_id = ((i/la)*la + self.apic_id_list[i % la]) << 24 except: ioapic.ioapic_id = i << 24 i = i + 1 self.cpus[0].bsp = 1 if self.disable_ap_cpus: for c in self.cpus[1:]: c.activity_state = 3 # waiting for SIPI SIM_disable_processor(c) register_component_class( x86_apic_bus_system_component, system_attributes, top_level = True) register_cmos_commands(x86_apic_bus_system_component.classname) ### X86/APIC Multipro System class x86_apic_system_component(x86_apic_bus_system_component): classname = 'x86-apic-system' basename = 'system' description = ('The "x86-apic-system" component represents a ' 'multi-processor capable x86 system with up to 255 cpus.') connectors = x86_apic_bus_system_component.connectors.copy() del connectors['io-apic'] connectors['interrupt'] = { 'type' : 'sb-interrupt', 'direction' : 'down', 'empty_ok' : True, 'hotplug' : False, 'multi' : False} def __init__(self, parse_obj): x86_apic_bus_system_component.__init__(self, parse_obj) self.linux_acpi_bug_workaround = 1 self.use_hostfs = 1 self.use_shadow = 1 self.do_create_acpi_tables = 1 self.do_init_cmos = 1 self.do_init_mtrrs = 1 self.map_ram = 1 self.use_acpi = 1 self.disable_ap_cpus = 1 def get_acpi(self, idx): return self.use_acpi def set_acpi(self, val, idx): if self.obj.configured: return Sim_Set_Illegal_Value self.use_acpi = val return Sim_Set_Ok def add_objects(self): x86_apic_bus_system_component.add_objects(self) self.o.ioapic = pre_obj('ioapic$', 'io-apic') self.o.ioapic.apic_bus = self.o.apic_bus self.o.ioapic.ioapic_id = 0 self.o.apic_bus.ioapic = [self.o.ioapic] self.o.pci_mem.map += [ [0xfec00000, self.o.ioapic, 0, 0, 0x20]] def add_connector_info(self): x86_apic_bus_system_component.add_connector_info(self) for i in range(255): self.connector_info['cpu%d' % i] = [i, self.o.phys_mem, self.o.port_space, self.o.apic_bus] self.connector_info['interrupt'] = [self.o.apic_bus, self.o.ioapic] def connect_sb_interrupt(self, connector, pic): pass def instantiation_done(self): x86_apic_bus_system_component.instantiation_done(self) # create the ACPI tables if necessary if self.do_create_acpi_tables: import x86_acpi_setup rsdp_base = iff(self.linux_acpi_bug_workaround, 0xef000, 0) bios_params = x86_acpi_setup.bios_params_holder() if self.bios and self.bios.endswith("rombios-2.68"): # this BIOS have larger data area and other offsets bios_params.ebda_base = 0x9f800 bios_params.rsdp_offs = 0x400 bios_params.high_desc_offs = 0x424 bios_params.nvr_desc_offs = 0x438 bios_params.reclaim_desc_offs = 0x44c bios_params.high_desc_offs2 = 0x460 bios_params.smem_offs = 0x488 x86_acpi_setup.create_acpi_tables( build_acpi_tables = self.use_acpi, cpu_list = self.cpus, megs = self.memory_megs, ioapic_id = self.o.apic_bus.ioapic[0].ioapic_id >> 24, user_rsdp_address = rsdp_base, bios_params = bios_params) register_component_class( x86_apic_system_component, system_attributes + bios_attribute + [['acpi', Sim_Attr_Optional, 'b', 'TRUE if the machine uses ACPI. Default is TRUE.']], top_level = True) register_cmos_commands(x86_apic_system_component.classname) # X86 Separate APIC System class x86_separate_apic_system_component(x86_apic_system_component): classname = 'x86-separate-mem-io-system' basename = 'system' description = ('The "x86-separate-mem-io-system" component represents a ' 'multi-processor capable x86 system with up to 15 cpus.') connectors = x86_apic_system_component.connectors.copy() def __init__(self, parse_obj): x86_apic_system_component.__init__(self, parse_obj) self.use_acpi = 0 self.use_shadow = 0 self.do_create_acpi_tables = 0 self.do_init_cmos = 0 self.do_init_mtrrs = 1 self.map_ram = 0 def add_objects(self): x86_apic_system_component.add_objects(self) self.o.post_log = pre_obj('post$', 'amibios8-post-log') self.o.port_space.map += [[0x80, self.o.post_log, 0, 0, 1]] def add_cpu(self, id, cpu): x86_apic_system_component.add_cpu(self, id, cpu) cpu.physical_io = self.o.pci_mem cpu.physical_dram = self.o.dram_space cpu.physical_memory.default_target = [cpu, 0, 0, self.o.dram_space] self.o.pci_mem.default_target = [self.o.dram_space, 0, 0, self.o.dram_space] register_component_class( x86_separate_apic_system_component, system_attributes + bios_attribute, top_level = True) register_cmos_commands(x86_separate_apic_system_component.classname) # E7520 system component class x86_e7520_system_component(x86_apic_bus_system_component): classname = 'x86-e7520-system' basename = 'system' description = ('The "x86-e7520-system" component represents a E7520-based ' 'multi-processor capable system with up to 15 cpus.') fm2_callback_called = 0 fm2_target = 1 bios_name = "" def define_bp_targets(self): if self.memory_megs == 2048: self.fm2_target = 2 elif self.memory_megs == 4096: self.fm2_target = 2 elif self.memory_megs == 8192: self.fm2_target = 4 if self.bios_name == "out_1024": # p:0x0000f1d76 wrmsr self.fix_cache_start = 0x0000f1d76 # p:0x0000f1dd2 wrmsr self.fix_cache_end = 0x0000f1dd2 # p:0x0000f2544 cmp bl,0x0 self.fix_mem_1 = 0x0000f2544 # p:0x0000f290e cmp byte ptr ss:[0x7],0x0 fix_mem_2 = 0x0000f290e elif self.bios_name == "goose-bios": # p:0x0000f1f7a wrmsr self.fix_cache_start = 0x0000f1f7a # p:0x0000f1fd6 wrmsr self.fix_cache_end = 0x0000f1fd6 # p:0x0000f2751 cmp bl,0x0 self.fix_mem_1 = 0x0000f2751 # p:0x0000f2b1b cmp byte ptr ss:[0x7],0x0 self.fix_mem_2 = 0x0000f2b1b else: print "Unknown BIOS, no patch available" self.fix_cache_start = -1 self.fix_cache_end = -1 self.fix_mem_1 = -1 self.fix_mem_2 = -1 def fcs_callback(self, dummy, obj, bp, memop): cp = self.cpus[0].name print "Setting-up fake cache for %s" % (cp) eval_cli_line("%s_mem.add-map device = %s_cache_space " % (cp, cp) + "base = 0xE0000 length = 0x2000") def fce_callback(self, dummy, obj, bp, memop): cp = self.cpus[0].name print "Removing fake cache for %s" % (cp) eval_cli_line("%s_mem.del-map device = %s_cache_space" % (cp, cp)) def fm1_callback(self, dummy, obj, bp, memop): cpu = self.cpus[0] print "Patch memory calibration results (#1) for %s" % (cpu.name) cpu.bl = 0 def fm2_callback(self, dummy, obj, bp, memop): cp = self.cpus[0].name if self.fm2_callback_called < self.fm2_target: print "Patch memory calibration results (#2) for %s" % (cp) eval_cli_line("%s_mem.set -l 0xe0007 size = 1 value = 0" % (cp)) self.fm2_callback_called += 1 def get_fm2_callback_called(self, idx): return self.fm2_callback_called def set_fm2_callback_called(self, val, idx): self.fm2_callback_called = val return Sim_Set_Ok def set_bp(self): pm = self.cpus[0].physical_memory if self.fix_cache_start != -1: fcs = SIM_breakpoint(pm, 0, 4, self.fix_cache_start, 1, 2) SIM_hap_add_callback_index("Core_Breakpoint_Memop", self.fcs_callback, None, fcs) if self.fix_cache_end != -1: fce = SIM_breakpoint(pm, 0, 4, self.fix_cache_end, 1, 2) SIM_hap_add_callback_index("Core_Breakpoint_Memop", self.fce_callback, None, fce) if self.fix_mem_1 != -1: fm1 = SIM_breakpoint(pm, 0, 4, self.fix_mem_1, 1, 2) SIM_hap_add_callback_index("Core_Breakpoint_Memop", self.fm1_callback, None, fm1) if self.fix_mem_2 != -1: fm2 = SIM_breakpoint(pm, 0, 4, self.fix_mem_2, 1, 2) SIM_hap_add_callback_index("Core_Breakpoint_Memop", self.fm2_callback, None, fm2) def set_bios_name(self, val, idx): self.bios_name = val return Sim_Set_Ok def get_bios_name(self, idx): return self.bios_name def finalize_instance(self): x86_apic_bus_system_component.finalize_instance(self) if self.instantiated: self.define_bp_targets() self.set_bp() def instantiation_done(self): x86_apic_bus_system_component.instantiation_done(self) self.define_bp_targets() self.set_bp() register_component_class( x86_e7520_system_component, system_attributes + [['bios_name', Sim_Attr_Optional, 's', 'The x86 BIOS filename (to set correct breakpoints).']], [['fm2_callback_called', Sim_Attr_Optional, 'i', 'Internal']], top_level = True) register_cmos_commands(x86_e7520_system_component.classname) ### X86 Processor Base Class class x86_processor_component(component_object): connectors = { 'cache-cpu' : {'type' : 'timing-model', 'direction' : 'down', 'empty_ok' : True, 'hotplug' : True, 'multi' : False}} connectors['direct-pins'] = { 'type' : 'x86-pins', 'direction' : 'down', 'empty_ok' : True, 'hotplug' : False, 'multi' : False} def get_cpu_frequency(self, idx): return self.freq_mhz def set_cpu_frequency(self, val, idx): if self.obj.configured: return Sim_Set_Illegal_Value self.freq_mhz = val return Sim_Set_Ok def get_cpi(self, idx): try: return self.cpi except: return 1 def set_cpi(self, val, idx): if self.obj.configured: return Sim_Set_Illegal_Value if not val in (1, 2, 4, 8, 16, 32, 64, 128): return Sim_Set_Illegal_Value self.cpi = val return Sim_Set_Ok def __init__(self, parse_obj, cpu_threads = 1): component_object.__init__(self, parse_obj) self.cpu_threads = cpu_threads self.o.space = [None] * cpu_threads self.o.cpu = [None] * cpu_threads self.o.tlb = [None] * cpu_threads def add_objects(self): thread_list = [] for i in range(self.cpu_threads): if self.cpu_threads == 1: self.o.space[i] = pre_obj('cpu(x)_mem', 'memory-space') self.o.cpu[i] = pre_obj('cpu(x)', self.cpuclass) self.o.tlb[i] = pre_obj('cpu(x)_tlb', 'x86-tlb') else: self.o.space[i] = pre_obj('cpu(x)_%d_mem'%i, 'memory-space') self.o.cpu[i] = pre_obj('cpu(x)_%d'%i, self.cpuclass) self.o.tlb[i] = pre_obj('cpu(x)_%d_tlb'%i, 'x86-tlb') thread_list.append(self.o.cpu[i]) self.o.space[i].map = [] self.o.cpu[i].processor_number = get_next_cpu_number() self.o.cpu[i].freq_mhz = self.freq_mhz try: self.cpi self.o.cpu[i].step_rate = [1, self.cpi, 0] except: pass self.o.cpu[i].physical_memory = self.o.space[i] self.o.tlb[i].cpu = self.o.cpu[i] self.o.tlb[i].type = "unlimited" self.o.cpu[i].tlb = self.o.tlb[i] if self.cpu_threads > 1: for i in range(self.cpu_threads): self.o.cpu[i].threads = thread_list def add_connector_info(self): if self.cpu_threads == 1: self.connector_info['cache-cpu'] = [0, self.o.cpu[0]] else: for i in range(self.cpu_threads): self.connector_info['cache-cpu%d' % i] = [i, self.o.cpu[i]] self.connector_info['direct-pins'] = [self.o.cpu] def get_cpu_threads(self, idx): return self.cpu_threads def connect_x86_processor(self, connector, id, mem_space, port_space): self.rename_component_objects('%d' % id) for i in range(self.cpu_threads): self.o.space[i].default_target = [mem_space, 0, 0, None] self.o.cpu[i].port_space = port_space def connect_x86_pins(self, connector): pass def connect_timing_model(self, connector, cache): id = self.connector_info[connector][0] self.o.space[id].timing_model = cache def disconnect_timing_model(self, connector): id = self.connector_info[connector][0] self.o.space[id].timing_model = None ### Legacy X86 Processor class x86_legacy_processor_component(x86_processor_component): connectors = x86_processor_component.connectors.copy() connectors['system'] = { 'type' : 'x86-processor', 'direction' : 'up', 'empty_ok' : False, 'hotplug' : False, 'multi' : False} connectors['interrupt'] = { 'type' : 'sb-interrupt', 'direction' : 'down', 'empty_ok' : False, 'hotplug' : False, 'multi' : False} def __init__(self, parse_obj): x86_processor_component.__init__(self, parse_obj) def add_objects(self): x86_processor_component.add_objects(self) self.o.x87 = pre_obj('x87_exc$', 'x87_exception') self.o.x87.irq_level = 13 self.o.x87.ignne_target = self.o.cpu[0] self.o.cpu[0].ferr_target = self.o.x87 def add_connector_info(self): x86_processor_component.add_connector_info(self) self.connector_info['system'] = [self.o.cpu[0]] self.connector_info['interrupt'] = [self.o.cpu[0], None] def connect_sb_interrupt(self, connector, pic): self.o.x87.irq_dev = pic class i386_processor_component(x86_legacy_processor_component): classname = 'i386-cpu' basename = 'cpu' description = ('The "i386-cpu" component represents an Intel 386 ' 'processor.') cpuclass = 'x86-386' class i486sx_processor_component(x86_legacy_processor_component): classname = 'i486sx-cpu' basename = 'cpu' description = ('The "i486sx-cpu" component represents an Intel 486sx ' 'processor.') cpuclass = 'x86-486sx' class i486dx2_processor_component(x86_legacy_processor_component): classname = 'i486dx2-cpu' basename = 'cpu' description = ('The "i486dx2-cpu" component represents an Intel 486dx2 ' 'processor.') cpuclass = 'x86-486dx2' for model in ['i386', 'i486sx', 'i486dx2']: register_component_class( eval('%s_processor_component' % model), [['cpu_frequency', Sim_Attr_Required, 'i', 'Processor frequency in MHz.'], ['cpi', Sim_Attr_Optional, 'i', 'Cycles per instruction.']]) ### X86/APIC Processor class x86_apic_processor_component(x86_processor_component): connectors = x86_processor_component.connectors.copy() connectors['system'] = { 'type' : 'x86-apic-processor', 'direction' : 'up', 'empty_ok' : False, 'hotplug' : False, 'multi' : False} # default APIC is a P4-type apic apic_type = "P4" def __init__(self, parse_obj, cpu_threads = 1): x86_processor_component.__init__(self, parse_obj, cpu_threads) self.o.apic = [None] * cpu_threads self.apic_freq_mhz = 10 def get_apic_frequency(self, idx): return self.apic_freq_mhz def set_apic_frequency(self, val, idx): if self.obj.configured: return Sim_Set_Illegal_Value self.apic_freq_mhz = val return Sim_Set_Ok def add_objects(self): x86_processor_component.add_objects(self) for i in range(self.cpu_threads): if self.cpu_threads == 1: self.o.apic[i] = pre_obj('cpu(x)_apic', 'apic') else: self.o.apic[i] = pre_obj('cpu(x)_%d_apic'%i, 'apic') self.o.apic[i].cpu = self.o.cpu[i] self.o.apic[i].queue = self.o.cpu[i] self.o.apic[i].cpu_bus_divisor = (float(self.freq_mhz) / self.apic_freq_mhz) if self.apic_type == "P4": self.o.apic[i].physical_broadcast_address = 255; self.o.apic[i].version = 0x14; self.o.apic[i].apic_type = "P4" elif self.apic_type == "P6": self.o.apic[i].physical_broadcast_address = 15; self.o.apic[i].version = 0x18; self.o.apic[i].apic_type = "P6" else: raise "Unknown APIC type %s" % self.apic_type self.o.cpu[i].apic = self.o.apic[i] if self.cpu_threads > 1: self.o.cpu[i].cpuid_logical_processor_count = self.cpu_threads else: self.o.cpu[i].cpuid_logical_processor_count = 0 self.o.space[i].map += [ [0xfee00000, self.o.apic[i], 0, 0, 0x4000]] def add_connector_info(self): x86_processor_component.add_connector_info(self) self.connector_info['system'] = [self.o.cpu] def connect_x86_apic_processor(self, connector, id, mem_space, port_space, apic_bus): self.connect_x86_processor(connector, id, mem_space, port_space) for i in range(self.cpu_threads): self.o.cpu[i].bsp = 0 self.o.apic[i].apic_id = 0 # temporary value if self.cpu_threads > 1: self.o.cpu[i].cpuid_physical_apic_id = 0 self.o.apic[i].apic_bus = apic_bus ### X86/HyperThreaded Processor, 2 Threads class x86_2ht_processor_component(x86_apic_processor_component): connectors = x86_apic_processor_component.connectors.copy() del connectors['cache-cpu'] for i in range(2): connectors['cache-cpu%d' % i] = { 'type' : 'timing-model', 'direction' : 'down', 'empty_ok' : True, 'hotplug' : True, 'multi' : False} def __init__(self, parse_obj): x86_apic_processor_component.__init__(self, parse_obj, 2) ### X86/HyperThreaded Processor, 4 Threads class x86_4ht_processor_component(x86_apic_processor_component): connectors = x86_apic_processor_component.connectors.copy() del connectors['cache-cpu'] for i in range(4): connectors['cache-cpu%d' % i] = { 'type' : 'timing-model', 'direction' : 'down', 'empty_ok' : True, 'hotplug' : True, 'multi' : False} def __init__(self, parse_obj): x86_apic_processor_component.__init__(self, parse_obj, 4) class pentium_processor_component(x86_apic_processor_component): classname = 'pentium-cpu' basename = 'cpu' description = ('The "pentium-cpu" component represents an Intel Pentium ' 'processor.') cpuclass = 'x86-pentium' apic_type = "P6" class pentium_mmx_processor_component(x86_apic_processor_component): classname = 'pentium-mmx-cpu' basename = 'cpu' description = ('The "pentium-mmx-cpu" component represents an Intel ' 'Pentium MMX processor.') cpuclass = 'x86-pentium-mmx' apic_type = "P6" class pentium_pro_processor_component(x86_apic_processor_component): classname = 'pentium-pro-cpu' basename = 'cpu' description = ('The "pentium-pro-cpu" component represents an Intel ' 'Pentium Pro (P6) processor.') cpuclass = 'x86-ppro' apic_type = "P6" class pentium_ii_processor_component(x86_apic_processor_component): classname = 'pentium-ii-cpu' basename = 'cpu' description = ('The "pentium-ii-cpu" component represents an Intel ' 'Pentium II processor.') cpuclass = 'x86-p2' apic_type = "P6" class pentium_iii_processor_component(x86_apic_processor_component): classname = 'pentium-iii-cpu' basename = 'cpu' description = ('The "pentium-iii-cpu" component represents an Intel ' 'Pentium III processor.') cpuclass = 'x86-p3' apic_type = "P6" class pentium_m_processor_component(x86_apic_processor_component): classname = 'pentium-m-cpu' basename = 'cpu' description = ('The "pentium-m-cpu" component represents an Intel ' 'Pentium M processor.') cpuclass = 'x86-pentium-m' def add_objects(self): x86_apic_processor_component.add_objects(self) for i in range(self.cpu_threads): cpu = self.o.cpu[i] # 512kb L2 cache cpu.cpuid_2_eax = 0x03020101; cpu.cpuid_2_ebx = 0; cpu.cpuid_2_ecx = 0; cpu.cpuid_2_edx = 0x430c0804; cpu.cpuid_family = 6 cpu.cpuid_model = 9 cpu.cpuid_stepping = 5 cpu.cpuid_brand_id = 22 cpu.cpuid_processor_name = "Virtutech Pentium M Processor" # Sane speedstep frequency/voltage limits. Prevents division by # zero on some software. cpu.ia32_perf_sts = 0x01010F0F00000000 class pentium_4_processor_component(x86_apic_processor_component): classname = 'pentium-4-cpu' basename = 'cpu' description = ('The "pentium-4-cpu" component represents an Intel ' 'Pentium 4 processor.') cpuclass = 'x86-p4' class xeon_prestonia_processor_component(x86_apic_processor_component): classname = 'xeon-prestonia-cpu' basename = 'cpu' description = ('The "xeon-prestonia-cpu" component represents a 32-bit Intel ' 'Xeon processor.') cpuclass = 'x86-p4' def add_objects(self): x86_apic_processor_component.add_objects(self) for i in range(self.cpu_threads): cpu = self.o.cpu[i] cpu.cpuid_family = 0xf cpu.cpuid_model = 2 cpu.cpuid_stepping = 9 cpu.cpuid_brand_id = 0xb cpu.cpuid_processor_name = "Virtutech Xeon Processor" class pentium_4e_processor_component(x86_apic_processor_component): connectors = x86_apic_processor_component.connectors.copy() classname = 'pentium-4e-cpu' basename = 'cpu' description = ('The "pentium-4e-cpu" component represents an Intel ' '64-bit Pentium 4E processor.') cpuclass = 'x86-p4e' class pentium_4e_2ht_processor_component(x86_2ht_processor_component): connectors = x86_2ht_processor_component.connectors.copy() classname = 'pentium-4e-2ht-cpu' basename = 'cpu' description = ('The "pentium-4e-2ht-cpu" component represents an Intel ' '64-bit Pentium 4E processor, with 2 hyper threads.') cpuclass = 'x86-p4e' class pentium_4e_4ht_processor_component(x86_4ht_processor_component): connectors = x86_4ht_processor_component.connectors.copy() classname = 'pentium-4e-4ht-cpu' basename = 'cpu' description = ('The "pentium-4e-4ht-cpu" component represents an Intel ' '64-bit Pentium 4E processor, with 4 hyper threads.') cpuclass = 'x86-p4e' class x86_hammer_processor_component(x86_apic_processor_component): classname = 'x86-hammer-cpu' basename = 'cpu' description = ('The "x86-hammer-cpu" component represents a generic ' '64-bit AMD Athlon 64 or Opteron processor without ' 'on-chip devices.') cpuclass = 'x86-hammer' class opteron_processor_component(x86_apic_processor_component): connectors = x86_apic_processor_component.connectors.copy() classname = 'opteron-cpu' basename = 'cpu' description = ('The "opteron-cpu" component represents an ' 'Opteron 240 processor.') connectors['pci-bus'] = { 'type' : 'pci-bus', 'direction' : 'up', 'empty_ok' : False, 'hotplug' : False, 'multi' : False} cpuclass = 'x86-hammer' def opteron_brand_id(self, num): major = num / 100 minor = num % 100 mult = (minor - 38) / 2 if major == 1: return (0xc << 6) | mult elif major == 2: return (0x10 << 6) | mult elif major == 8: return (0x14 << 6) | mult def opteron_vid(self, millivolts): if millivolts > 1550 or millivolts < 800 or (millivolts % 25) != 0: raise "VID undefined" steps = (millivolts - 800) / 25 return 0x1e - steps def opteron_fid(self, mhz): if (mhz % 200) != 0: raise "FID undefined" multiplier = mhz / 200 if multiplier < 4 or multiplier > 25: raise "FID undefined" return (multiplier - 4) << 1 def instantiation_done(self): x86_apic_processor_component.instantiation_done(self) for i in range(self.cpu_threads): cpu = SIM_get_object(self.o.cpu[i].name) print "[%s] Setting cpu type to Opteron 240." % cpu.name # 0xf5a is revision CG cpu.cpuid_family=0xf cpu.cpuid_model=5 cpu.cpuid_stepping=0xa cpu.edx=0xf5a cpu.cpuid_brand_id=self.opteron_brand_id(240) cpu.fidvid_status = ((self.opteron_vid(1500) << 48) | (self.opteron_vid(1500) << 40) | (self.opteron_vid(1500) << 32) | (self.opteron_vid(1500) << 24) | (self.opteron_fid(cpu.freq_mhz) << 16) | (self.opteron_fid(cpu.freq_mhz) << 8) | (self.opteron_fid(cpu.freq_mhz) << 0)) def add_objects(self): x86_apic_processor_component.add_objects(self) self.o.hypertransport = pre_obj(self.o.cpu[0].name + '_hypertransport$', 'k8_hypertransport') self.o.address_map = pre_obj(self.o.cpu[0].name + '_address_map$', 'k8_address_map') self.o.dram = pre_obj(self.o.cpu[0].name + '_dram$', 'k8_dram') self.o.misc = pre_obj(self.o.cpu[0].name + '_misc$', 'k8_misc') self.o.hypertransport.misc_function = self.o.misc self.o.address_map.misc_function = self.o.misc def add_connector_info(self): x86_apic_processor_component.add_connector_info(self) self.connector_info['pci-bus'] = [[[0, self.o.hypertransport], [1, self.o.address_map], [2, self.o.dram], [3, self.o.misc]]] def connect_pci_bus(self, connector, slot, pci_bus): self.o.hypertransport.pci_bus = pci_bus self.o.address_map.pci_bus = pci_bus self.o.dram.pci_bus = pci_bus self.o.misc.pci_bus = pci_bus for model in ['pentium', 'pentium_mmx', 'pentium_pro', 'pentium_ii', 'pentium_iii', 'pentium_m', 'pentium_4', 'pentium_4e', 'pentium_4e_2ht', 'pentium_4e_4ht', 'x86_hammer', 'opteron', 'xeon_prestonia']: register_component_class( eval('%s_processor_component' % model), [['cpu_frequency', Sim_Attr_Required, 'i', 'Processor frequency in MHz.'], ['cpi', Sim_Attr_Optional, 'i', 'Cycles per instruction.'], ['apic_frequency', Sim_Attr_Optional, 'i', 'APIC bus frequency in MHz, default is 10 MHz.'], ['cpu_threads', Sim_Attr_Pseudo, 'i', 'The number of hyper threads in the processor.']]) ### North Bridge base class class north_bridge_component(component_object): connectors = { 'system' : {'type' : 'x86-chipset', 'direction' : 'up', 'empty_ok' : False, 'hotplug' : False, 'multi' : False}} for i in range(24): connectors['pci-slot%d' % i] = { 'type' : 'pci-bus', 'direction' : 'down', 'empty_ok' : 1, 'hotplug' : False, 'multi' : False} def __init__(self, parse_obj): component_object.__init__(self, parse_obj) if not "pci_bus_class" in dir(self): self.pci_bus_class = 'pci-bus' def add_objects(self): self.o.pci_conf = pre_obj('pci_conf$', 'memory-space') self.o.pci_io = pre_obj('pci_io$', 'memory-space') self.o.pci_io.map = [] self.o.pci_bus = pre_obj('pci_bus$', self.pci_bus_class) self.o.pci_bus.conf_space = self.o.pci_conf self.o.pci_bus.io_space = self.o.pci_io self.o.pci_bus.pci_devices = [] def add_connector_info(self): self.connector_info['system'] = [] for i in range(24): self.connector_info['pci-slot%d' % i] = [i, self.o.pci_bus] self.connector_info['southbridge'] = [self.o.pci_bus] def connect_x86_chipset(self, connector, phys_space, pci_space, port_space, ram): self.o.pci_bus.memory_space = pci_space port_space.map += [ [0xcf8, self.o.bridge, 0, 0xcf8, 4], [0xcf9, self.o.bridge, 0, 0xcf9, 2], [0xcfa, self.o.bridge, 0, 0xcfa, 2], [0xcfb, self.o.bridge, 0, 0xcfb, 1], [0xcfc, self.o.bridge, 0, 0xcfc, 4], [0xcfd, self.o.bridge, 0, 0xcfd, 2], [0xcfe, self.o.bridge, 0, 0xcfe, 2], [0xcff, self.o.bridge, 0, 0xcff, 1]] port_space.default_target = [self.o.pci_io, 0, 0, None] pci_space.default_target = [ram, 0, 0, None] def connect_pci_bus(self, connector, device_list): slot = self.connector_info[connector][0] bus = self.connector_info[connector][1] devs = bus.pci_devices for dev in device_list: devs += [[slot, dev[0], dev[1]]] bus.pci_devices = devs ### Intel 443BX North Bridge without AGP class north_bridge_443bx_component(north_bridge_component): classname = 'north-bridge-443bx' basename = 'north_bridge' description = ('The "north-bridge-443bx" component represents an Intel ' '443BX North Bridge (host-to-PCI bridge) without AGP.') def add_objects(self): north_bridge_component.add_objects(self) self.o.bridge = pre_obj('north_bridge$', 'i82443bx') self.o.bridge.pci_bus = self.o.pci_bus self.o.pci_bus.bridge = self.o.bridge self.o.pci_bus.pci_devices = [[0, 0, self.o.bridge]] def connect_x86_chipset(self, connector, phys_space, pci_space, port_space, ram): north_bridge_component.connect_x86_chipset( self, connector, phys_space, pci_space, port_space, ram) self.o.bridge.memory = phys_space port_space.map += [[0x22, self.o.bridge, 0, 0x22, 1]] register_component_class(north_bridge_443bx_component, []) ### Intel 443BX North Bridge with AGP class north_bridge_443bx_agp_component(north_bridge_443bx_component): classname = 'north-bridge-443bx-agp' basename = 'north_bridge' description = ('The "north-bridge-443bx" component represents an Intel ' '443BX North Bridge (host-to-PCI bridge) with AGP.') connectors = north_bridge_443bx_component.connectors.copy() connectors['agp-slot0'] = { 'type' : 'agp-bus', 'direction' : 'down', 'empty_ok' : 1, 'hotplug' : False, 'multi' : False} def add_objects(self): north_bridge_443bx_component.add_objects(self) self.o.agp_conf = pre_obj('agp_conf$', 'memory-space') self.o.agp_io = pre_obj('agp_io$', 'memory-space') self.o.agp_io.map = [] self.o.agp_mem = pre_obj('agp_mem$', 'memory-space') self.o.agp_mem.map = [] self.o.agp_bus = pre_obj('agp_bus$', 'pci-bus') self.o.agp_bus.conf_space = self.o.agp_conf self.o.agp_bus.io_space = self.o.agp_io self.o.agp_bus.memory_space = self.o.agp_mem self.o.agp_bus.pci_devices = [] self.o.agp = pre_obj('pci_to_agp$', 'i82443bx_agp') self.o.agp.pci_bus = self.o.pci_bus self.o.agp.secondary_bus = self.o.agp_bus self.o.agp_bus.bridge = self.o.agp self.o.bridge.agp_bridge = self.o.agp self.o.pci_bus.pci_devices += [[1, 0, self.o.agp]] def add_connector_info(self): north_bridge_443bx_component.add_connector_info(self) self.connector_info['agp-slot0'] = [0, self.o.agp_bus] def connect_x86_chipset(self, connector, phys_space, pci_space, port_space, ram): north_bridge_443bx_component.connect_x86_chipset( self, connector, phys_space, pci_space, port_space, ram) def connect_agp_bus(self, connector, device_list): self.connect_pci_bus(connector, device_list) register_component_class(north_bridge_443bx_agp_component, []) ### Intel 875P North Bridge class north_bridge_875p_component(north_bridge_component): classname = 'north-bridge-875p' basename = 'north_bridge' description = ('The "north-bridge-875p" component represents an Intel ' '875P North Bridge (host-to-PCI bridge).') connectors = north_bridge_component.connectors.copy() connectors['agp-slot0'] = { 'type' : 'agp-bus', 'direction' : 'down', 'empty_ok' : 1, 'hotplug' : False, 'multi' : False} def add_objects(self): north_bridge_component.add_objects(self) self.o.bridge = pre_obj('north_bridge$', 'i82875P') self.o.bridge.pci_bus = self.o.pci_bus self.o.pci_bus.bridge = self.o.bridge self.o.pci_bus.pci_devices = [[0, 0, self.o.bridge]] self.o.agp_conf = pre_obj('agp_conf$', 'memory-space') self.o.agp_io = pre_obj('agp_io$', 'memory-space') self.o.agp_io.map = [] self.o.agp_mem = pre_obj('agp_mem$', 'memory-space') self.o.agp_mem.map = [] self.o.agp_bus = pre_obj('agp_bus$', 'pci-bus') self.o.agp_bus.conf_space = self.o.agp_conf self.o.agp_bus.io_space = self.o.agp_io self.o.agp_bus.memory_space = self.o.agp_mem self.o.agp_bus.pci_devices = [] self.o.agp = pre_obj('pci_to_agp$', 'i82875P_agp') self.o.agp.pci_bus = self.o.pci_bus self.o.agp.secondary_bus = self.o.agp_bus self.o.agp_bus.bridge = self.o.agp self.o.bridge.agp_bridge = self.o.agp self.o.pci_bus.pci_devices += [[1, 0, self.o.agp]] def add_connector_info(self): north_bridge_component.add_connector_info(self) self.connector_info['agp-slot0'] = [0, self.o.agp_bus] def connect_x86_chipset(self, connector, phys_space, pci_space, port_space, ram): north_bridge_component.connect_x86_chipset( self, connector, phys_space, pci_space, port_space, ram) self.o.bridge.memory = phys_space port_space.map += [[0x22, self.o.bridge, 0, 0x22, 1]] def connect_agp_bus(self, connector, device_list): self.connect_pci_bus(connector, device_list) register_component_class(north_bridge_875p_component, []) ### Intel E7520 North Bridge class north_bridge_e7520_component(north_bridge_component): classname = 'north-bridge-e7520' basename = 'north_bridge' description = ('The "north-bridge-e7520" component represents an Intel ' 'E7520 North Bridge (host-to-PCI bridge).') connectors = north_bridge_component.connectors.copy() connectors['southbridge'] = { 'type' : 'hub-link', 'direction' : 'down', 'empty_ok' : 1, 'hotplug' : False, 'multi' : False} connectors['pcie-a-slot'] = { 'type' : 'pcie-bus', 'direction' : 'down', 'empty_ok' : 1, 'hotplug' : False, 'multi' : True} connectors['pcie-a1-slot'] = { 'type' : 'pcie-bus', 'direction' : 'down', 'empty_ok' : 1, 'hotplug' : False, 'multi' : True} connectors['pcie-b-slot'] = { 'type' : 'pcie-bus', 'direction' : 'down', 'empty_ok' : 1, 'hotplug' : False, 'multi' : True} connectors['pcie-b1-slot'] = { 'type' : 'pcie-bus', 'direction' : 'down', 'empty_ok' : 1, 'hotplug' : False, 'multi' : True} connectors['pcie-c-slot'] = { 'type' : 'pcie-bus', 'direction' : 'down', 'empty_ok' : 1, 'hotplug' : False, 'multi' : True} connectors['pcie-c1-slot'] = { 'type' : 'pcie-bus', 'direction' : 'down', 'empty_ok' : 1, 'hotplug' : False, 'multi' : True} for i in range(24): del connectors['pci-slot%d' % i] def add_objects(self): self.pci_bus_class = 'pcie-switch' north_bridge_component.add_objects(self) self.o.bridge = pre_obj('mch$', 'e7520') self.o.bridge.pci_bus = self.o.pci_bus self.o.mch_error = pre_obj('mch$_error', 'e7520_error') self.o.mch_error.pci_bus = self.o.pci_bus self.o.mch_dma = pre_obj('mch$_dma', 'e7520_dma') self.o.mch_dma.pci_bus = self.o.pci_bus self.o.mch_extended = pre_obj('mch$_extended', 'e7520_extended') self.o.mch_extended.pci_bus = self.o.pci_bus # PCIE-A self.o.mch_pcie_a = pre_obj('mch$_pcie_a', 'e7520_pcie_port') self.o.mch_pcie_a.pci_config_device_id = 0x3595; self.o.mch_pcie_a.pci_bus = self.o.pci_bus self.o.mch_pcie_a_bus = pre_obj('mch$_pcie_a_bus', 'pcie-switch') self.o.mch_pcie_a_bus.pci_devices = [] self.o.mch_pcie_a_conf = pre_obj('mch$_pcie_a_conf', 'memory-space') self.o.mch_pcie_a_io = pre_obj('mch$_pcie_a_io$', 'memory-space') self.o.mch_pcie_a_mem = pre_obj('mch$_pcie_a_mem$', 'memory-space') self.o.mch_pcie_a_bus.conf_space = self.o.mch_pcie_a_conf self.o.mch_pcie_a_bus.io_space = self.o.mch_pcie_a_io self.o.mch_pcie_a_bus.memory_space = self.o.mch_pcie_a_mem self.o.mch_pcie_a_bus.bridge = self.o.mch_pcie_a self.o.mch_pcie_a.secondary_bus = self.o.mch_pcie_a_bus # PCIE-A1 self.o.mch_pcie_a1 = pre_obj('mch$_pcie_a1', 'e7520_pcie_port') self.o.mch_pcie_a1.pci_config_device_id = 0x3595; self.o.mch_pcie_a1.pci_bus = self.o.pci_bus self.o.mch_pcie_a1_bus = pre_obj('mch$_pcie_a1_bus', 'pcie-switch') self.o.mch_pcie_a1_bus.pci_devices = [] self.o.mch_pcie_a1_conf = pre_obj('mch$_pcie_a1_conf', 'memory-space') self.o.mch_pcie_a1_io = pre_obj('mch$_pcie_a1_io$', 'memory-space') self.o.mch_pcie_a1_mem = pre_obj('mch$_pcie_a1_mem$', 'memory-space') self.o.mch_pcie_a1_bus.conf_space = self.o.mch_pcie_a1_conf self.o.mch_pcie_a1_bus.io_space = self.o.mch_pcie_a1_io self.o.mch_pcie_a1_bus.memory_space = self.o.mch_pcie_a1_mem self.o.mch_pcie_a1_bus.bridge = self.o.mch_pcie_a1 self.o.mch_pcie_a1.secondary_bus = self.o.mch_pcie_a1_bus # PCIE-B self.o.mch_pcie_b = pre_obj('mch$_pcie_b', 'e7520_pcie_port') self.o.mch_pcie_b.pci_config_device_id = 0x3595; self.o.mch_pcie_b.pci_bus = self.o.pci_bus self.o.mch_pcie_b_bus = pre_obj('mch$_pcie_b_bus', 'pcie-switch') self.o.mch_pcie_b_bus.pci_devices = [] self.o.mch_pcie_b_conf = pre_obj('mch$_pcie_b_conf', 'memory-space') self.o.mch_pcie_b_io = pre_obj('mch$_pcie_b_io$', 'memory-space') self.o.mch_pcie_b_mem = pre_obj('mch$_pcie_b_mem$', 'memory-space') self.o.mch_pcie_b_bus.conf_space = self.o.mch_pcie_b_conf self.o.mch_pcie_b_bus.io_space = self.o.mch_pcie_b_io self.o.mch_pcie_b_bus.memory_space = self.o.mch_pcie_b_mem self.o.mch_pcie_b_bus.bridge = self.o.mch_pcie_b self.o.mch_pcie_b.secondary_bus = self.o.mch_pcie_b_bus # PCIE-B1 self.o.mch_pcie_b1 = pre_obj('mch$_pcie_b1', 'e7520_pcie_port') self.o.mch_pcie_b1.pci_config_device_id = 0x3595; self.o.mch_pcie_b1.pci_bus = self.o.pci_bus self.o.mch_pcie_b1_bus = pre_obj('mch$_pcie_b1_bus', 'pcie-switch') self.o.mch_pcie_b1_bus.pci_devices = [] self.o.mch_pcie_b1_conf = pre_obj('mch$_pcie_b1_conf', 'memory-space') self.o.mch_pcie_b1_io = pre_obj('mch$_pcie_b1_io$', 'memory-space') self.o.mch_pcie_b1_mem = pre_obj('mch$_pcie_b1_mem$', 'memory-space') self.o.mch_pcie_b1_bus.conf_space = self.o.mch_pcie_b1_conf self.o.mch_pcie_b1_bus.io_space = self.o.mch_pcie_b1_io self.o.mch_pcie_b1_bus.memory_space = self.o.mch_pcie_b1_mem self.o.mch_pcie_b1_bus.bridge = self.o.mch_pcie_b1 self.o.mch_pcie_b1.secondary_bus = self.o.mch_pcie_b1_bus # PCIE-C self.o.mch_pcie_c = pre_obj('mch$_pcie_c', 'e7520_pcie_port') self.o.mch_pcie_c.pci_config_device_id = 0x3595; self.o.mch_pcie_c.pci_bus = self.o.pci_bus self.o.mch_pcie_c_bus = pre_obj('mch$_pcie_c_bus', 'pcie-switch') self.o.mch_pcie_c_bus.pci_devices = [] self.o.mch_pcie_c_conf = pre_obj('mch$_pcie_c_conf', 'memory-space') self.o.mch_pcie_c_io = pre_obj('mch$_pcie_c_io$', 'memory-space') self.o.mch_pcie_c_mem = pre_obj('mch$_pcie_c_mem$', 'memory-space') self.o.mch_pcie_c_bus.conf_space = self.o.mch_pcie_c_conf self.o.mch_pcie_c_bus.io_space = self.o.mch_pcie_c_io self.o.mch_pcie_c_bus.memory_space = self.o.mch_pcie_c_mem self.o.mch_pcie_c_bus.bridge = self.o.mch_pcie_c self.o.mch_pcie_c.secondary_bus = self.o.mch_pcie_c_bus # PCIE-C1 self.o.mch_pcie_c1 = pre_obj('mch$_pcie_c1', 'e7520_pcie_port') self.o.mch_pcie_c1.pci_config_device_id = 0x3595; self.o.mch_pcie_c1.pci_bus = self.o.pci_bus self.o.mch_pcie_c1_bus = pre_obj('mch$_pcie_c1_bus', 'pcie-switch') self.o.mch_pcie_c1_bus.pci_devices = [] self.o.mch_pcie_c1_conf = pre_obj('mch$_pcie_c1_conf', 'memory-space') self.o.mch_pcie_c1_io = pre_obj('mch$_pcie_c1_io$', 'memory-space') self.o.mch_pcie_c1_mem = pre_obj('mch$_pcie_c1_mem$', 'memory-space') self.o.mch_pcie_c1_bus.conf_space = self.o.mch_pcie_c1_conf self.o.mch_pcie_c1_bus.io_space = self.o.mch_pcie_c1_io self.o.mch_pcie_c1_bus.memory_space = self.o.mch_pcie_c1_mem self.o.mch_pcie_c1_bus.bridge = self.o.mch_pcie_c1 self.o.mch_pcie_c1.secondary_bus = self.o.mch_pcie_c1_bus self.o.bridge.pcie_mem = [self.o.mch_pcie_a_mem, self.o.mch_pcie_a1_mem, self.o.mch_pcie_b_mem, self.o.mch_pcie_b1_mem, self.o.mch_pcie_c_mem, self.o.mch_pcie_c1_mem] self.o.pci_bus.bridge = self.o.bridge self.o.pci_bus.pci_devices = [[0, 0, self.o.bridge], [0, 1, self.o.mch_error], [1, 0, self.o.mch_dma], [2, 0, self.o.mch_pcie_a], [3, 0, self.o.mch_pcie_a1], [4, 0, self.o.mch_pcie_b], [5, 0, self.o.mch_pcie_b1], [6, 0, self.o.mch_pcie_c], [7, 0, self.o.mch_pcie_c1], [8, 0, self.o.mch_extended]] def add_connector_info(self): north_bridge_component.add_connector_info(self) self.connector_info['southbridge'] = [self.o.pci_bus] self.connector_info['pcie-a-slot'] = [self.o.mch_pcie_a_bus] self.connector_info['pcie-a1-slot'] = [self.o.mch_pcie_a1_bus] self.connector_info['pcie-b-slot'] = [self.o.mch_pcie_b_bus] self.connector_info['pcie-b1-slot'] = [self.o.mch_pcie_b1_bus] self.connector_info['pcie-c-slot'] = [self.o.mch_pcie_c_bus] self.connector_info['pcie-c1-slot'] = [self.o.mch_pcie_c1_bus] def check_pcie_bus(self, connector, device_list): bus = self.connector_info[connector][0] used = {} for d in bus.pci_devices: slot, fun, dev = d used[(slot, fun)] = 1 for d in device_list: slot, fun, dev = d if used.has_key((slot, fun)): raise Exception, "Slot %d Function %d already in use." % (slot, fun) def connect_pcie_bus(self, connector, device_list): bus = self.connector_info[connector][0] bus.pci_devices += device_list def connect_x86_chipset(self, connector, phys_space, pci_space, port_space, ram): north_bridge_component.connect_x86_chipset( self, connector, phys_space, pci_space, port_space, ram) self.o.bridge.ram = ram.map[0][1] # real RAM object self.o.bridge.dram = ram self.o.bridge.cpu_memory = phys_space self.o.bridge.pci_memory = pci_space # static translator phys_space.map += [ [0x000C0000, self.o.bridge, 10, 0xC0000, 0x40000, pci_space, 1,0,0]] def connect_hub_link(self, connector): pass register_component_class(north_bridge_e7520_component, []) ### AMD K8 integrated north bridge class north_bridge_k8_component(north_bridge_component): connectors = north_bridge_component.connectors.copy() for i in range(24,32): connectors['pci-slot%d' % i] = { 'type' : 'pci-bus', 'direction' : 'down', 'empty_ok' : 1, 'hotplug' : False, 'multi' : False} classname = 'north-bridge-k8' basename = 'north_bridge' description = ('The "north-bridge-k8" component represents the ' 'integrated north bridge on an AMD Athlon 64 or ' 'AMD Opteron chip.') def add_objects(self): north_bridge_component.add_objects(self) self.o.bridge = pre_obj('north_bridge$', 'k8_host_bridge') self.o.bridge.pci_bus = self.o.pci_bus self.o.pci_bus.bridge = self.o.bridge def add_connector_info(self): north_bridge_component.add_connector_info(self) for i in range(24,32): self.connector_info['pci-slot%d' % i] = [i, self.o.pci_bus] def connect_x86_chipset(self, connector, phys_space, pci_space, port_space, ram): north_bridge_component.connect_x86_chipset( self, connector, phys_space, pci_space, port_space, ram) self.o.bridge.memory = phys_space def connect_pci_bus(self, connector, device_list): north_bridge_component.connect_pci_bus(self, connector, device_list) register_component_class(north_bridge_k8_component, []) ### Legacy PC Devices class legacy_pc_devices_component(standard_pc_devices_component): classname = 'legacy-pc-devices' basename = 'legacy' description = ('The "legacy-pc-devices" component represents the legacy ' 'devices found in PC/AT compatible computers. This ' 'component can be used as system chipset in ISA based ' 'x86 machines without PCI support.') connectors = standard_pc_devices_component.connectors.copy() connectors['system'] = { 'type' : 'x86-chipset', 'direction' : 'up', 'empty_ok' : False, 'hotplug' : False, 'multi' : False} connectors['interrupt'] = { 'type' : 'sb-interrupt', 'direction' : 'up', 'empty_ok' : False, 'hotplug' : False, 'multi' : False} connectors['isa-bus'] = { 'type' : 'isa-bus', 'direction' : 'down', 'empty_ok' : 1, 'hotplug' : False, 'multi' : True} def add_objects(self): self.used_ports = [] standard_pc_devices_component.add_objects(self) self.mem_space = None def add_connector_info(self): standard_pc_devices_component.add_connector_info(self) self.connector_info['system'] = [] self.connector_info['interrupt'] = [self.o.isa] self.connector_info['isa-bus'] = [self.o.isa_bus, None, self.o.isa, self.o.dma] def connect_x86_chipset(self, connector, phys_space, pci_space, port_space, ram): self.mem_space = phys_space self.o.dma.memory = phys_space port_space.default_target = [self.o.isa_bus, 0, 0, None] self.connector_info['isa-bus'] = [self.o.isa_bus, phys_space, self.o.isa, self.o.dma] def connect_sb_interrupt(self, connector, irq_dst, ioapic): self.o.pic.irq_dev = irq_dst def check_isa_bus(self, connector, ports): for p in ports: if p in self.used_ports: # TODO: use specific exception raise Exception, "Port 0x%x already in use." % p self.used_ports += ports def connect_isa_bus(self, connector): pass register_component_class(legacy_pc_devices_component, [])
gpl-2.0
-5,586,652,861,325,700,000
38.931429
101
0.557026
false
3.091488
false
false
false
bscottm/SublimeHaskell
internals/proc_helper.py
1
9148
# -~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~- # ProcHelper: Process execution helper class. # -~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~- import errno import subprocess import os import os.path import SublimeHaskell.sublime_haskell_common as Common import SublimeHaskell.internals.logging as Logging import SublimeHaskell.internals.settings as Settings import SublimeHaskell.internals.utils as Utils import SublimeHaskell.internals.which as Which import SublimeHaskell.internals.cabal_cfgrdr as CabalConfigRdr import SublimeHaskell.internals.cabal_reader as CabalReader class ProcHelper(object): """Command and tool process execution helper.""" # Augmented PATH for the subprocesses and locating executables. augmented_path = None def __init__(self, command, **popen_kwargs): """Open a pipe to a command or tool.""" if ProcHelper.augmented_path is None: ProcHelper.augmented_path = ProcHelper.make_augmented_path() ## Necessary evil: Don't cache the environment, just update the PATH in the current environment. ## Why? Because someone could (like me) change os.environ via the ST console and those changes ## would never make it here. Use case: settting $http_proxy so that stack can fetch packages. proc_env = dict(os.environ) proc_env['PATH'] = ProcHelper.augmented_path + os.pathsep + proc_env.get('PATH', '') self.process = None self.process_err = None if Utils.is_windows(): startupinfo = subprocess.STARTUPINFO() startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW popen_kwargs['startupinfo'] = startupinfo # Allow caller to specify something different for stdout or stderr -- provide # the default here if unspecified. popen_kwargs['stdout'] = popen_kwargs.get('stdout', subprocess.PIPE) popen_kwargs['stderr'] = popen_kwargs.get('stderr', subprocess.PIPE) try: normcmd = Which.which(command, proc_env['PATH']) if normcmd is not None: self.process = subprocess.Popen(normcmd, stdin=subprocess.PIPE, env=proc_env, **popen_kwargs) else: self.process = None self.process_err = "SublimeHaskell.ProcHelper: {0} was not found on PATH!".format(command[0]) except OSError as os_exc: self.process_err = \ '\n'.join(["SublimeHaskell: Problem executing '{0}'".format(' '.join(command)) , 'Operating system error: {0}'.format(os_exc) ]) if os_exc.errno == errno.EPIPE: # Most likely reason: subprocess output a usage message stdout, stderr = self.process.communicate() exit_code = self.process.wait() self.process_err = self.process_err + \ '\n'.join(['' , 'Process exit code: {0}'.format(exit_code) , '' , "output:" , stdout if stdout else "--no output--" , '' , 'error:' , stderr if stderr else "--no error output--"]) self.process = None else: self.process = None raise os_exc # 'with' statement support: def __enter__(self): return self def __exit__(self, _type, _value, _traceback): self.cleanup() return False def cleanup(self): if self.process is not None: self.process.stdin.close() self.process.stdout.close() if self.process.stderr is not None: # stderr can be None if it is tied to stdout (i.e., 'stderr=subprocess.STDOUT') self.process.stderr.close() def wait(self, input_str=None): """Wait for subprocess to complete and exit, collect and decode ``stdout`` and ``stderr``, returning the tuple ``(exit_code, stdout, stderr)```""" if self.process is not None: stdout, stderr = self.process.communicate(Utils.encode_bytes(input_str if input_str else '')) exit_code = self.process.wait() # Ensure that we reap the file descriptors. self.cleanup() return (exit_code, Utils.decode_bytes(stdout), Utils.decode_bytes(stderr)) return (-1, '', self.process_err or "?? unknown error -- no process.") # Update the augmented environment when `add_to_PATH` or `add_standard_dirs` change. @staticmethod def update_environment(_key, _val): # Reinitialize the tool -> path cache: Which.reset_cache() ProcHelper.augmented_path = ProcHelper.make_augmented_path() @staticmethod def make_augmented_path(): ''' Generate the augmented PATH for subprocesses: adds the appropriate cabal/stack local install directory ($HOME/.local/bin for *nix, %APPDATA%/local/bin for Windows) and updates PATH with `add_to_PATH` extras. ''' std_places = [] if Settings.PLUGIN.add_standard_dirs: std_places.append("$HOME/.local/bin" if not Utils.is_windows() else "%APPDATA%/local/bin") if Utils.is_macosx(): std_places.append('$HOME/Library/Haskell/bin') std_places += CabalConfigRdr.cabal_config() std_places = [dir for dir in [Utils.normalize_path(path) for path in std_places] if os.path.isdir(dir)] add_to_path = list(filter(os.path.isdir, map(Utils.normalize_path, Settings.PLUGIN.add_to_path))) Logging.log("std_places = {0}".format(std_places), Logging.LOG_INFO) Logging.log("add_to_PATH = {0}".format(add_to_path), Logging.LOG_INFO) return os.pathsep.join(add_to_path + std_places) @staticmethod def get_extended_path(): if ProcHelper.augmented_path is None: ProcHelper.augmented_path = ProcHelper.make_augmented_path() return ProcHelper.augmented_path + os.pathsep + (os.environ.get('PATH', '')) @staticmethod def run_process(command, input_string='', **popen_kwargs): """Execute a subprocess, wait for it to complete, returning a ``(exit_code, stdout, stderr)``` tuple.""" with ProcHelper(command, **popen_kwargs) as proc: return proc.wait(input_string) def exec_wrapper_cmd(exec_with, cmd_list): wrapper = [] if exec_with == 'cabal': wrapper = ['cabal', 'exec', cmd_list[0]] elif exec_with == 'cabal-new-build': wrapper = ['cabal', 'new-run', 'exe:' + cmd_list[0]] elif exec_with == 'stack': wrapper = ['stack', 'exec', cmd_list[0]] else: errmsg = 'ProcHelper.exec_wrapper_cmd: Unknown execution prefix \'{0}\''.format(exec_with) raise RuntimeError(errmsg) return wrapper + ['--'] + cmd_list[1:] if cmd_list[1:] else wrapper def exec_with_wrapper(exec_with, install_dir, cmd_list): '''Wrapper function for inserting the execution wrapper, e.g., 'cabal exec' or 'stack exec' :returns: Process object from ProcHelper. ''' proc_args = {} if exec_with is not None: cmd_list = exec_wrapper_cmd(exec_with, cmd_list) if install_dir is not None: proc_args['cwd'] = Utils.normalize_path(install_dir) else: raise RuntimeError('ProcHelper.exec_with_wrapper: invalid install_dir (None)') else: cmd = Which.which(cmd_list[0], ProcHelper.get_extended_path()) if cmd is not None: cmd_list[0] = cmd Logging.log('ProcHelper.exec_with_wrapper: {0} in {1}'.format(cmd_list, proc_args.get('cwd')), Logging.LOG_DEBUG) return ProcHelper(cmd_list, **proc_args) def get_source_dir(filename): '''Get root of hs-source-dirs for filename in project. ''' if not filename: return os.path.expanduser('~') cabal_dir, cabal_proj = Common.locate_cabal_project(filename) if not cabal_dir: # No cabal file -> Punt and assume the source directory for the file and project is the same as the file. return os.path.dirname(filename) else: proj_info = CabalReader.CabalProjectReader(cabal_dir, cabal_proj) cabal_info = proj_info.cabal_info dirs = ['.'] executables = cabal_info.get('executable', {}) dirs.extend([sdir.strip() for exe in executables for sdirs in executables[exe].get('hs-source-dirs', []) for sdir in sdirs.split(',')]) dirs.extend([sdir.strip() for sdirs in cabal_info.get('library', {}).get('hs-source-dirs', []) for sdir in sdirs.split(',')]) paths = [os.path.abspath(os.path.join(cabal_dir, srcdirs)) for srcdirs in set(dirs)] paths.sort(key=lambda p: -len(p)) for path in paths: if filename.startswith(path): return path return os.path.dirname(filename)
mit
4,924,986,076,657,667,000
41.351852
117
0.591277
false
3.732354
false
false
false
Pytlicek/VOBS
app/controllers/supplier/profile.py
1
1457
# -*- coding: utf-8 -*- from flask import render_template, session, redirect, url_for from app import app from app.models.Forms import PublicProfile from app.models.Checks import login_required from app.models.SQL_DB import User, Item_Profile @app.route('/supplier/settings/public_profile', methods=['GET', 'POST']) @login_required def supplier_settings_public_profile(): supplier_data = Item_Profile.query.filter_by( user_id=User.query.filter_by(username=session['username']).first().id).first() form = PublicProfile() return render_template('supplier/profile/index.html', supplier_data=supplier_data, form=form) @app.route('/supplier/settings/public_profile/public_profile_change', methods=['POST']) @login_required def supplier_settings_public_profile_change(): from app.models.Profiles import edit_public_profile supplier_data = Item_Profile.query.filter_by( user_id=User.query.filter_by(username=session['username']).first().id).first() form = PublicProfile() if form.validate_on_submit(): edit_public_profile(form.company_name.data, form.company_address.data, form.company_logo.data, form.ico.data, form.dic.data, form.ic_dph.data, form.phone.data, form.email.data, form.website.data) return redirect(url_for('supplier_settings_public_profile')) else: return render_template('supplier/profile/index.html', supplier_data=supplier_data, form=form)
mit
9,213,925,581,094,542,000
46
117
0.720659
false
3.624378
false
false
false
tundish/turberfield-utils
turberfield/utils/db.py
1
7983
#!/usr/bin/env python3 # encoding: UTF-8 # This file is part of turberfield. # # Turberfield is free software: you can redistribute it and/or modify it # under the terms of the GNU General Public License as published # by the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Turberfield is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with turberfield. If not, see <http://www.gnu.org/licenses/>. from collections import namedtuple from collections import OrderedDict from collections.abc import Mapping import datetime import enum import logging import sqlite3 class Table: Column = namedtuple( "Column", ["name", "type", "isPK", "isNullable", "isUnique", "default", "fk"] ) lookup = OrderedDict() @staticmethod def declare_type(col): if isinstance(col.type, str): return "INTEGER" if "int" in col.type.lower() and col.isPK else col.type elif col.type is int: return "INTEGER" elif col.type is str: return "TEXT" elif col.type is bool: return "" elif col.type is bytes: return "BLOB" elif col.type is datetime.date: return "date" elif col.type is datetime.datetime: return "timestamp" elif "__conform__" in dir(col.type): return "BLOB" else: return "" def __init__(self, name, cols=[], lookup=None): self.name = name self.cols = cols if lookup is not None: self.lookup = lookup self.lookup[name] = self def sql_lines(self): yield "create table if not exists {0}(".format(self.name) pks = [col for col in self.cols if col.isPK] fks = OrderedDict() uqs = [col for col in self.cols if col.isUnique] constraints = len(pks) >= 2 or len(uqs) >= 2 for col in self.cols: ref = self.lookup.get(col.fk) if ref is not None: fks[col] = ref yield " ".join(( col.name, self.declare_type(col), "PRIMARY KEY" if col.isPK and len(pks) == 1 else "", "NOT NULL" if not col.isNullable else "", "UNIQUE" if col.isUnique and len(uqs) == 1 else "", "DEFAULT {0}".format(col.default) if col.default else "" )).rstrip() + ( "," if constraints or fks or col is not self.cols[-1] else "" ) if len(pks) >= 2: yield "PRIMARY KEY({0})".format(", ".join([i.name for i in pks])) if len(uqs) >= 2: yield "UNIQUE({0})".format(", ".join([i.name for i in uqs])) for col, refs in fks.items(): yield "FOREIGN KEY ({0.name}) REFERENCES {1.name}({2})".format( col, refs, ", ".join([col.name for col in refs.cols if col.isPK]) ) yield(")") class SQLOperation: @property def sql(self): raise NotImplementedError def __init__(self, *args, data=[]): self.tables = args self.data = data def run(self, con, log=None): """ Execute the SQL defined by this class. Returns the cursor for data extraction. """ cur = con.cursor() sql, data = self.sql try: if data is None: cur.executescript(sql) else: statements = sql.split(";") for s in statements: if isinstance(data, Mapping): cur.execute(s, data) else: cur.executemany(s, data) except (sqlite3.OperationalError, sqlite3.ProgrammingError) as e: if log is not None: log.error(self.sql) con.rollback() raise e else: con.commit() return cur class Creation(SQLOperation): @property def sql(self): return ( ";\n".join("\n".join(table.sql_lines()) for table in self.tables), None ) def run(self, con, log=None): cur = super().run(con) if cur is not None: cur.close() return self.tables class Insertion(SQLOperation): @property def sql(self): lines = [] for table in self.tables: if isinstance(self.data, Mapping): params = [i for i in table.cols if i.name in self.data] elif self.data: params = [i for i in table.cols if i.name in self.data[0]] lines.append( "insert into {table.name} ({columns}) values ({values})".format( table=table, columns=", ".join(i.name for i in params), values=", ".join(":{col.name}".format(col=col) for col in params) ) ) return (";\n".join(lines), self.data) class Connection: """ * Find target database files * Load extensions * Attach databases (readonly?) * Attach in-memory database * Execute pragmas * Discover state tables * Create state tables """ class CacheOptions(enum.Enum): shared = "cache=shared" private = "cache=private" class ImmutableOptions(enum.Enum): immutable = "immutable=1" mutable = "immutable=0" class ModeOptions(enum.Enum): read = "mode=ro" read_write = "mode=rw" read_write_create = "mode=rwc" memory = "mode=memory" @staticmethod def url(conn, options): return "file:{0}?{1}".format( conn, "&".join(i.value for i in options) ) @staticmethod def options(name=None, paths=[]): version = tuple(int(i) for i in sqlite3.sqlite_version.split(".")) if version < (3, 7, 13): raise UserWarning( "Your sqlite3 library is too old. Version 3.7.13 required at least." ) if not paths: if name is None: dbs = { ":memory:": [ Connection.CacheOptions.shared, ] } else: dbs = { name: [ Connection.CacheOptions.shared, Connection.ModeOptions.memory ] } elif len(paths) == 1: dbs = { paths[0]: [Connection.ModeOptions.read_write_create] } else: dbs = OrderedDict({ ":memory:": [ Connection.CacheOptions.private, Connection.ModeOptions.memory ] }) dbs.update( {i: [Connection.ModeOptions.read] for i in paths} ) return { "attach": dbs } def __init__(self, attach=[], log=None): self.log = log or logging.getLogger("Connection") self.attach = attach self.db = None for conn, options in self.attach.items(): self.log.debug(Connection.url(conn, options)) def __enter__(self): conn, options = list(self.attach.items())[0] self.db = sqlite3.connect( self.url(conn, options), uri=True, detect_types=sqlite3.PARSE_DECLTYPES ) self.db.row_factory = sqlite3.Row self.db.execute("pragma foreign_keys=ON") # states = list(gather_installed("turberfield.utils.states")) return self.db def __exit__(self, exc_type, exc_value, traceback): return False
gpl-3.0
8,149,039,837,599,711,000
29.469466
85
0.525742
false
4.159979
false
false
false
fnl/pymonad
pymonad/Either.py
1
3330
# -------------------------------------------------------- # (c) Copyright 2014 by Jason DeLaat. # Licensed under BSD 3-clause licence. # -------------------------------------------------------- from pymonad.Monad import * class Either(Monad): """ Represents a calculation that may either fail or succeed. An alternative to using exceptions. `Either` is an abstract type and should not be instantiated directly. Instead use `Right` (or its alias `Result`) and `Left` (or its alias `Error`) """ def __init__(self, value): """ Raises a `NotImplementedError`. Use `Right` or `Left` instead. """ raise NotImplementedError def __eq__(self, other): if not isinstance(other, Either): raise TypeError("Can't compare different types.") @classmethod def unit(cls, value): return Right(value) class Left(Either): """ Represents a calculation which has failed and contains an error code or message. To help with readability you may alternatively use the alias `Error`. """ def __init__(self, errorMsg): """ Creates a `Left` "calculation failed" object. `errorMsg` can be anything which gives information about what when wrong. """ super(Either, self).__init__(errorMsg) def __eq__(self, other): super(Left, self).__eq__(other) if not isinstance(other, Left): return False elif (self.getValue() == other.getValue()): return True else: return False def __ne__(self, other): return not self.__eq__(other) def __str__(self): return "Left: " + str(self.getValue()) def fmap(self, _): """ Returns the `Left` instance that was used to call the method. """ return self def amap(self, _): """ Returns the `Left` instance that was used to call the method. """ return self def bind(self, _): """ Returns the `Left` instance that was used to call the method. """ return self class Right(Either): """ Represents a calculation which has succeeded and contains the result of that calculation. To help with readability you may alternatively use the alias `Result`. """ def __init__(self, value): """ Creates a `Right` "calculation succeeded" object. `value` is the actual calculated value of whatever operation was being performed and can be any type. """ super(Either, self).__init__(value) def __eq__(self, other): super(Right, self).__eq__(other) if not isinstance(other, Right): return False elif (self.getValue() == other.getValue()): return True else: return False def __ne__(self, other): return not self.__eq__(other) def __str__(self): return "Right: " + str(self.getValue()) def fmap(self, function): """ Applies `function` to the contents of the `Right` instance and returns a new `Right` object containing the result. `function` should accept a single "normal" (non-monad) argument and return a non-monad result. """ return Right(function(self.getValue())) def amap(self, functorValue): """ Applies the function stored in the functor to `functorValue` returning a new Either value. """ return self.getValue() << functorValue def bind(self, function): """ Applies `function` to the result of a previous calculation. `function` should accept a single "normal" (non-monad) argument and return either a `Left` or `Right` type object. """ return function(self.getValue()) Error = Left Result = Right
bsd-3-clause
8,568,344,353,019,406,000
28.732143
100
0.664264
false
3.596112
false
false
false
TraceContext/tracecontext-spec
test/tracecontext/tracestate.py
1
2749
from collections import OrderedDict import re class Tracestate(object): _KEY_WITHOUT_VENDOR_FORMAT = r'[a-z][_0-9a-z\-\*\/]{0,255}' _KEY_WITH_VENDOR_FORMAT = r'[0-9a-z][_0-9a-z\-\*\/]{0,240}@[a-z][_0-9a-z\-\*\/]{0,13}' _KEY_FORMAT = _KEY_WITHOUT_VENDOR_FORMAT + '|' + _KEY_WITH_VENDOR_FORMAT _VALUE_FORMAT = r'[\x20-\x2b\x2d-\x3c\x3e-\x7e]{0,255}[\x21-\x2b\x2d-\x3c\x3e-\x7e]' _DELIMITER_FORMAT_RE = re.compile('[ \t]*,[ \t]*') _KEY_VALIDATION_RE = re.compile('^(' + _KEY_FORMAT + ')$') _VALUE_VALIDATION_RE = re.compile('^(' + _VALUE_FORMAT + ')$') _MEMBER_FORMAT_RE = re.compile('^(%s)(=)(%s)$' % (_KEY_FORMAT, _VALUE_FORMAT)) def __init__(self, *args, **kwds): if len(args) == 1 and not kwds: if isinstance(args[0], str): self._traits = OrderedDict() self.from_string(args[0]) return if isinstance(args[0], Tracestate): self._traits = OrderedDict(args[0]._traits) return self._traits = OrderedDict(*args, **kwds) def __contains__(self, key): return key in self._traits def __len__(self): return len(self._traits) def __repr__(self): return '{}({!r})'.format(type(self).__name__, str(self)) def __getitem__(self, key): return self._traits[key] def __setitem__(self, key, value): if not isinstance(key, str): raise ValueError('key must be an instance of str') if not re.match(self._KEY_VALIDATION_RE, key): raise ValueError('illegal key provided') if not isinstance(value, str): raise ValueError('value must be an instance of str') if not re.match(self._VALUE_VALIDATION_RE, value): raise ValueError('illegal value provided') self._traits[key] = value self._traits.move_to_end(key, last = False) def __str__(self): return self.to_string() def from_string(self, string): for member in re.split(self._DELIMITER_FORMAT_RE, string): if member: match = self._MEMBER_FORMAT_RE.match(member) if not match: raise ValueError('illegal key-value format {!r}'.format(member)) key, eq, value = match.groups() if key in self._traits: raise ValueError('conflict key {!r}'.format(key)) self._traits[key] = value return self def to_string(self): return ','.join(map(lambda key: key + '=' + self[key], self._traits)) # make this an optional choice instead of enforcement during put/update # if the tracestate value size is bigger than 512 characters, the tracer # CAN decide to forward the tracestate def is_valid(self): if len(self) is 0: return False # combined header length MUST be less than or equal to 512 bytes if len(self.to_string()) > 512: return False # there can be a maximum of 32 list-members in a list if len(self) > 32: return False return True def pop(self): return self._traits.popitem()
apache-2.0
5,575,316,123,618,295,000
32.52439
87
0.650782
false
2.902851
false
false
false
nick41496/Beatnik
beatnik/api_manager/api_manager.py
1
3520
import logging import os import spotipy import sys from beatnik.api_manager.clients import AppleMusicApi, SoundcloudApi from beatnik.api_manager.link_converter import LinkConverter from beatnik.api_manager.link_parser import LinkParser from beatnik.api_manager.search_handler import SearchHandler from gmusicapi import Mobileclient from spotipy.oauth2 import SpotifyClientCredentials from tidalapi import Session class ApiManager: def __init__(self): self.logger = logging.getLogger(__name__) self.apple_api = self.get_apple_api() self.gpm_api = self.get_gpm_api() self.soundcloud_api = self.get_soundcloud_api() self.spotify_api = self.get_spotify_api() self.tidal_api = self.get_tidal_api() self.link_parser = LinkParser( self.apple_api, self.gpm_api, self.soundcloud_api, self.spotify_api, self.tidal_api) self.link_converter = LinkConverter( self.apple_api, self.gpm_api, self.soundcloud_api, self.spotify_api, self.tidal_api, self.link_parser) self.search_handler = SearchHandler(self.spotify_api, self.link_converter) def get_apple_api(self): try: key_id = os.environ['APPLE_KEY_ID'] issuer = os.environ['APPLE_KEY_ISSUER'] key = os.environ['APPLE_KEY'] return AppleMusicApi(key_id=key_id, issuer=issuer, key=key) except Exception as e: self.logger.error("Something went wrong getting Apple Music API") self.logger.error(e) return None def get_gpm_api(self): try: gpm_api = Mobileclient() username = os.environ['GPM_USERNAME'] password = os.environ['GPM_PASSWORD'] if (not gpm_api.login(username, password, Mobileclient.FROM_MAC_ADDRESS, 'en_US')): self.logger.error("Unable to login to Google Play Music.") return None return gpm_api except Exception as e: self.logger.error("Something went wrong getting Google Play Music API") self.logger.error(e) return None def get_soundcloud_api(self): try: return SoundcloudApi() except Exception as e: self.logger.error("Something went wrong getting Soundcloud API") self.logger.error(e) return None def get_spotify_api(self): try: client_credentials_manager = SpotifyClientCredentials() return spotipy.Spotify(client_credentials_manager=client_credentials_manager) except Exception as e: self.logger.error("Something went wrong getting Spotify API") self.logger.error(e) return None def get_tidal_api(self): try: session = Session() username = os.environ['TIDAL_USERNAME'] password = os.environ['TIDAL_PASSWORD'] if (not session.login(username, password)): self.logger.error("Unable to login to Tidal") return None return session except Exception as e: self.logger.error("Something went wrong getting Tidal API") self.logger.error(e) return None def convert_link(self, music): music = self.link_converter.convert_link(music) return music
gpl-3.0
1,703,179,569,399,725,300
34.2
95
0.598295
false
4.032073
false
false
false
acshi/osf.io
framework/mongo/utils.py
1
5732
# -*- coding: utf-8 -*- import functools import httplib as http from django.core.paginator import Paginator from django.db.models import QuerySet import markupsafe import pymongo from modularodm.query import QueryBase from modularodm.exceptions import NoResultsFound, MultipleResultsFound from framework.exceptions import HTTPError # MongoDB forbids field names that begin with "$" or contain ".". These # utilities map to and from Mongo field names. mongo_map = { '.': '__!dot!__', '$': '__!dollar!__', } def to_mongo(item): for key, value in mongo_map.items(): item = item.replace(key, value) return item def to_mongo_key(item): return to_mongo(item).strip().lower() def from_mongo(item): for key, value in mongo_map.items(): item = item.replace(value, key) return item def unique_on(*groups): """Decorator for subclasses of `StoredObject`. Add a unique index on each group of keys provided. :param *groups: List of lists of keys to be indexed """ def wrapper(cls): cls.__indices__ = getattr(cls, '__indices__', []) cls.__indices__.extend([ { 'key_or_list': [ (key, pymongo.ASCENDING) for key in group ], 'unique': True, } for group in groups ]) return cls return wrapper def get_or_http_error(Model, pk_or_query, allow_deleted=False, display_name=None): """Load an instance of Model by primary key or modularodm.Q query. Raise an appropriate HTTPError if no record is found or if the query fails to find a unique record :param type Model: StoredObject subclass to query :param pk_or_query: :type pk_or_query: either - a <basestring> representation of the record's primary key, e.g. 'abcdef' - a <QueryBase> subclass query to uniquely select a record, e.g. Q('title', 'eq', 'Entitled') & Q('version', 'eq', 1) :param bool allow_deleted: allow deleleted records? :param basestring display_name: :raises: HTTPError(404) if the record does not exist :raises: HTTPError(400) if no unique record is found :raises: HTTPError(410) if the resource is deleted and allow_deleted = False :return: Model instance """ display_name = display_name or '' # FIXME: Not everything that uses this decorator needs to be markupsafe, but OsfWebRenderer error.mako does... safe_name = markupsafe.escape(display_name) if isinstance(pk_or_query, QueryBase): try: instance = Model.find_one(pk_or_query) except NoResultsFound: raise HTTPError(http.NOT_FOUND, data=dict( message_long='No {name} record matching that query could be found'.format(name=safe_name) )) except MultipleResultsFound: raise HTTPError(http.BAD_REQUEST, data=dict( message_long='The query must match exactly one {name} record'.format(name=safe_name) )) else: instance = Model.load(pk_or_query) if not instance: raise HTTPError(http.NOT_FOUND, data=dict( message_long='No {name} record with that primary key could be found'.format(name=safe_name) )) if getattr(instance, 'is_deleted', False) and getattr(instance, 'suspended', False): raise HTTPError(451, data=dict( # 451 - Unavailable For Legal Reasons message_short='Content removed', message_long='This content has been removed' )) if not allow_deleted and getattr(instance, 'is_deleted', False): raise HTTPError(http.GONE) return instance def autoload(Model, extract_key, inject_key, func): """Decorator to autoload a StoredObject instance by primary key and inject into kwargs. Raises an appropriate HTTPError (see #get_or_http_error) :param type Model: database collection model to query (should be a subclass of StoredObject) :param basestring extract_key: named URL field containing the desired primary key to be fetched from the database :param basestring inject_key: name the instance will be accessible as when it's injected as an argument to the function Example usage: :: def get_node(node_id): node = Node.load(node_id) ... becomes import functools autoload_node = functools.partial(autoload, Node, 'node_id', 'node') @autoload_node def get_node(node): ... """ @functools.wraps(func) def wrapper(*args, **kwargs): primary_key = kwargs.get(extract_key) instance = get_or_http_error(Model, primary_key) kwargs[inject_key] = instance return func(*args, **kwargs) return wrapper def paginated(model, query=None, increment=200, each=True): """Paginate a MODM query. :param StoredObject model: Model to query. :param Q query: Optional query object. :param int increment: Page size :param bool each: If True, each record is yielded. If False, pages are yielded. """ queryset = model.find(query) # Pagination requires an order by clause, especially when using Postgres. # see: https://docs.djangoproject.com/en/1.10/topics/pagination/#required-arguments if isinstance(queryset, QuerySet) and not queryset.ordered: queryset = queryset.order_by(queryset.model._meta.pk.name) paginator = Paginator(queryset.all(), increment) for page_num in paginator.page_range: page = paginator.page(page_num) if each: for item in page.object_list: yield item else: yield page.object_list
apache-2.0
-1,111,238,550,963,007,200
33.323353
114
0.643057
false
4.042313
false
false
false
simphony/simphony-openfoam
foam_controlwrapper/tests/test_run_time.py
1
4123
import unittest import os import shutil import tempfile from foam_controlwrapper.blockmesh_utils import create_quad_mesh from simphony.api import CUDS, Simulation from simphony.core.cuba import CUBA from simphony.cuds.meta import api from simphony.engine import EngineInterface class WrapperRunTestCase(unittest.TestCase): def setUp(self): case_name = "simplemeshIO" mesh_name = "simplemeshIO_mesh" cuds = CUDS(name=case_name) # physics model cfd = api.Cfd(name='default model') cuds.add([cfd]) self.sim_time = api.IntegrationTime(name='simulation_time', current=0.0, final=1.0, size=0.5) cuds.add([self.sim_time]) mat = api.Material(name='a_material') mat._data[CUBA.DENSITY] = 1.0 mat._data[CUBA.DYNAMIC_VISCOSITY] = 1.0 cuds.add([mat]) vel_inlet = api.Dirichlet(mat, name='vel_inlet') vel_inlet._data[CUBA.VARIABLE] = CUBA.VELOCITY vel_inlet._data[CUBA.VELOCITY] = (0.1, 0, 0) pres_inlet = api.Neumann(mat, name='pres_inlet') pres_inlet._data[CUBA.VARIABLE] = CUBA.PRESSURE vel_outlet = api.Neumann(mat, name='vel_outlet') vel_outlet._data[CUBA.VARIABLE] = CUBA.VELOCITY pres_outlet = api.Dirichlet(mat, name='pres_outlet') pres_outlet._data[CUBA.VARIABLE] = CUBA.PRESSURE pres_outlet._data[CUBA.PRESSURE] = 0.0 vel_walls = api.Dirichlet(mat, name='vel_walls') vel_walls._data[CUBA.VARIABLE] = CUBA.VELOCITY vel_walls._data[CUBA.VELOCITY] = (0, 0, 0) pres_walls = api.Neumann(mat, name='pres_walls') pres_walls._data[CUBA.VARIABLE] = CUBA.PRESSURE vel_frontAndBack = api.EmptyCondition(name='vel_frontAndBack') vel_frontAndBack._data[CUBA.VARIABLE] = CUBA.VELOCITY pres_frontAndBack = api.EmptyCondition(name='pres_frontAndBack') pres_frontAndBack._data[CUBA.VARIABLE] = CUBA.PRESSURE inlet = api.Boundary(name='inlet', condition=[vel_inlet, pres_inlet]) walls = api.Boundary(name='walls', condition=[vel_walls, pres_walls]) outlet = api.Boundary(name='outlet', condition=[vel_outlet, pres_outlet]) frontAndBack = api.Boundary(name='frontAndBack', condition=[vel_frontAndBack, pres_frontAndBack]) cuds.add([inlet, walls, outlet, frontAndBack]) corner_points = [(0.0, 0.0, 0.0), (5.0, 0.0, 0.0), (5.0, 5.0, 0.0), (0.0, 5.0, 0.0), (0.0, 0.0, 1.0), (5.0, 0.0, 1.0), (5.0, 5.0, 1.0), (0.0, 5.0, 1.0)] self.mesh_path = tempfile.mkdtemp() mesh = create_quad_mesh(self.mesh_path, mesh_name, corner_points, 5, 5, 5) cuds.add([mesh]) self.cuds = cuds self.sim = Simulation(cuds, 'OpenFOAM', engine_interface=EngineInterface.FileIO) self.mesh_in_cuds = self.cuds.get_by_name(mesh_name) def tearDown(self): if os.path.exists(self.mesh_in_cuds.path): shutil.rmtree(self.mesh_in_cuds.path) if os.path.exists(self.mesh_path): shutil.rmtree(self.mesh_path) def test_run_time(self): """Test that field variable value is changed after consecutive calls of run method """ self.sim.run() for cell in self.mesh_in_cuds.iter(item_type=CUBA.CELL): old_vel = cell.data[CUBA.VELOCITY] old_pres = cell.data[CUBA.PRESSURE] cell_uid = cell.uid self.sim.run() cell = self.mesh_in_cuds.get(cell_uid) new_vel = cell.data[CUBA.VELOCITY] new_pres = cell.data[CUBA.PRESSURE] self.assertNotEqual(old_vel, new_vel) self.assertNotEqual(old_pres, new_pres) if __name__ == '__main__': unittest.main()
gpl-2.0
-8,620,174,854,293,740,000
35.8125
77
0.564152
false
3.17398
true
false
false
project-owner/Peppy
player/client/vlcclient.py
1
10907
# Copyright 2016-2021 Peppy Player [email protected] # # This file is part of Peppy Player. # # Peppy Player is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Peppy Player is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Peppy Player. If not, see <http://www.gnu.org/licenses/>. import threading import time import urllib from player.client.baseplayer import BasePlayer from vlc import Meta from vlc import EventType from queue import Queue from util.fileutil import FILE_PLAYLIST, FILE_AUDIO class Vlcclient(BasePlayer): """ This class extends base player and provides communication with VLC player using Python binding for 'libvlc' library """ def __init__(self): """ Initializer. Starts separate threads for handling VLC events """ self.RADIO_MODE = "radio" BasePlayer.__init__(self) self.mode = self.RADIO_MODE self.instance = None self.player = None self.media = None self.current_track = "" self.seek_time = "0" self.cd_track_id = None self.cd_drive_name = None self.END_REACHED = "end reached" self.TRACK_CHANGED = "track changed" self.PAUSED = "paused" self.player_queue = Queue() self.threads_running = False self.changing_volume = False def start_client(self): """ Start threads. """ self.threads_running = True thread_1 = threading.Thread(target = self.radio_stream_event_listener) thread_1.start() thread_2 = threading.Thread(target = self.handle_event_queue) thread_2.start() def stop_client(self): """ Stop threads """ with self.lock: self.threads_running = False def set_proxy(self, proxy_process, proxy=None): """ Create new VLC player """ self.instance = proxy_process self.proxy = proxy self.player = self.instance.media_player_new() player_mgr = self.player.event_manager() player_mgr.event_attach(EventType.MediaPlayerEndReached, self.player_callback, [self.END_REACHED]) player_mgr.event_attach(EventType.MediaPlayerPlaying, self.player_callback, [self.TRACK_CHANGED]) def player_callback(self, event, data): """ Player callback method :param event: event to handle :param data: event data """ if data: self.player_queue.put(data[0]) def radio_stream_event_listener(self): """ Starts the loop for listening VLC events for radio track change """ while self.threads_running: with self.lock: if self.media and self.mode == self.RADIO_MODE: t = self.media.get_meta(Meta.NowPlaying) if t and t != self.current_track: self.current_track = t if self.enabled: self.notify_player_listeners({"current_title": t}) time.sleep(1) def handle_event_queue(self): """ Handling player event queue """ if not self.enabled: return while self.threads_running: d = self.player_queue.get() # blocking line if d == self.END_REACHED: self.notify_end_of_track_listeners() self.player_queue.task_done() elif d == self.TRACK_CHANGED: self.track_changed() self.player_queue.task_done() def track_changed(self): """ Handle track change event """ if not self.enabled: return if self.mode == self.RADIO_MODE: return current = {"source": "player"} current["state"] = "playing" t = self.media.get_meta(Meta.Title) if t == ".": return if self.cd_track_id and t.startswith("cdda:"): current["cd_track_id"] = self.cd_track_id if self.cd_tracks: t = self.cd_tracks[int(self.cd_track_id) - 1].name else: t = self.cd_drive_name + self.cd_track_title + " " + self.cd_track_id m = self.media.get_mrl() m = m[m.rfind("/") + 1:] m = urllib.parse.unquote(m) current["file_name"] = m current["current_title"] = t current["Time"] = str(self.player.get_length()/1000) if not self.seek_time: self.seek_time = "0" current["seek_time"] = self.seek_time self.notify_player_listeners(current) def set_player_volume_control(self, flag): """ Player Volume Control type setter :param volume: True - player volume cotrol type, False - amixer or hardware volume control type """ BasePlayer.set_player_volume_control(self, flag) if not self.player_volume_control: self.set_volume(100) def play(self, state): """ Start playing specified track/station. First it cleans the playlist then adds new track/station to the list and then starts playback syntax for CD: self.media = self.instance.media_new("cdda:///E:/", (":cdda-track=7")) :param state: button state which contains the track/station info """ url = None self.enabled = True if state == None: if self.state != None: url = getattr(self.state, "url", None) else: url = None else: url = getattr(state, "url", None) self.state = state if url == None: return url = url.replace("\\", "/").replace("\"", "") track_time = getattr(self.state, "track_time", None) if track_time == None: track_time = "0" else: track_time = str(track_time) if ":" in track_time: track_time = track_time.replace(":", ".") self.seek_time = track_time s = getattr(self.state, "playback_mode", None) if s and s == FILE_PLAYLIST: self.stop() self.mode = FILE_PLAYLIST self.enabled = True elif s and s == FILE_AUDIO: self.mode = FILE_AUDIO else: self.mode = self.RADIO_MODE if url.startswith("http") and self.mode != self.RADIO_MODE: url = self.encode_url(url) with self.lock: file_name = getattr(self.state, "file_name", None) if file_name and file_name.startswith("cdda://"): parts = file_name.split() self.cd_track_id = parts[1].split("=")[1] self.cd_drive_name = parts[0][len("cdda:///"):] self.media = self.instance.media_new(parts[0], parts[1]) else: self.media = self.instance.media_new(url) self.player.set_media(self.media) self.player.play() try: self.player.set_time(int(float(self.seek_time)) * 1000) except: pass if self.player_volume_control and getattr(self.state, "volume", None) != None: self.set_volume(int(self.state.volume)) def stop(self, state=None): """ Stop playback """ with self.lock: self.enabled = False self.player.stop() def seek(self, time): """ Jump to the specified position in the track :param time: time position in track """ if ":" in time: self.seek_time = self.get_seconds_from_string(time) else: self.seek_time = time with self.lock: msec = int(float(self.seek_time) * 1000) t = threading.Thread(target=self.seek_method, args=[msec]) t.start() def seek_method(self, msec): """ Seek track thread method :param msec: milliseconds for new position """ self.player.set_time(msec) def play_pause(self, pause_flag=None): """ Play/Pause playback :param pause_flag: play/pause flag """ with self.lock: self.seek_time = self.get_current_track_time() self.player.pause() def set_volume(self, level): """ Set volume. :param level: new volume level """ self.player.audio_set_volume(int(level)) if getattr(self, "state", None) != None: if self.state.volume == level: return self.state.volume = level v = self.get_volume() if v != int(level): # usually initial volume setting if hasattr(self, "volume_thread"): self.volume_thread.join() self.volume_thread = threading.Thread(target=self.set_volume_level, args=[level]) self.volume_thread.start() def set_volume_level(self, level): """ Set volume level in separate thread :param level: volume level """ n = 0 max_n = 20 vol = -2 while n < max_n and level != vol: self.player.audio_set_volume(int(level)) time.sleep(0.1) vol = self.get_volume() n += 1 def get_volume(self): """ Return current volume level :return: volume level or -1 if not available """ with self.lock: return self.player.audio_get_volume() def mute(self): """ Mute """ with self.lock: self.player.audio_toggle_mute() def current(self): """ Return the current song """ pass def shutdown(self): """ Shutdown the player """ with self.lock: self.player.stop() def get_current_track_time(self): """ Return current track time :return: current track time """ t = self.player.get_time()/1000 return str(t) def get_current_playlist(self): """ Return current playlist :return: current playlist """ return self.playlist
gpl-3.0
-9,174,182,375,290,047,000
31.85241
106
0.539103
false
4.155048
false
false
false
googleapis/googleapis-gen
google/ads/googleads/v6/googleads-py/google/ads/googleads/v6/errors/types/user_data_error.py
1
1227
# -*- coding: utf-8 -*- # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import proto # type: ignore __protobuf__ = proto.module( package='google.ads.googleads.v6.errors', marshal='google.ads.googleads.v6', manifest={ 'UserDataErrorEnum', }, ) class UserDataErrorEnum(proto.Message): r"""Container for enum describing possible user data errors. """ class UserDataError(proto.Enum): r"""Enum describing possible request errors.""" UNSPECIFIED = 0 UNKNOWN = 1 OPERATIONS_FOR_CUSTOMER_MATCH_NOT_ALLOWED = 2 TOO_MANY_USER_IDENTIFIERS = 3 USER_LIST_NOT_APPLICABLE = 4 __all__ = tuple(sorted(__protobuf__.manifest))
apache-2.0
-305,280,532,206,740,100
30.461538
74
0.693562
false
3.870662
false
false
false
jasonleaster/LeetCode
Largest_Number/ln.py
1
1325
class number() : key = 0 bit_list = [] cnt = 0 def num_init(array) : if array is None : return; num_array = [] for i in range(0, len(array)) : cls_num = number() tmp = array[i] cnt = 1 while tmp > 10 : bit_list = bit_list + [tmp % 10] tmp /= 10 cnt += 1 cls_num.key = array[i] cls_num.cnt = cnt num_array += [cls_num] return num_array def lgst_num(num_array) : if num_array is None : return output = [num_array[0]] for i in range(1, len(num_array)) : for j in range(1, len(num_array)) : if output[j].key is num_array[j].key : output += [num_array[j]] else : k = 0 a = num_array[max_loc].bit_list[k] b = num_array[j].bit_list[k] while a == b and k < min(a.cnt, b.cnt) : k += 1 a = num_array[max_loc].bit_list[k] b = num_array[j].bit_list[k] if k is a.cnt : elif k is b.cnt : else : max_loc = j num_array.bit_list[0] lgst_num([5,2,6,3,1,4])
gpl-2.0
-8,309,436,790,888,531,000
19.075758
56
0.392453
false
3.486842
false
false
false
Connexions/draft-transforms
drafttransform/cli.py
1
3331
#!/usr/bin/env python # -*- coding: utf-8 -*- # ### # Copyright (c) 2013, Rice University # This software is subject to the provisions of the GNU Affero General # Public License version 3 (AGPLv3). # See LICENCE.txt for details. # ### """The command-line interface for transforming cnxml files directly in workspace.""" import os import sys import argparse from . import transforms from workgroup import Workgroup, listWorkspaces DESCRIPTION = __doc__ DEFAULT_HOST = "qa.cnx.org" def main(argv=None): """Main functions used to interface directly with the user.""" parser = argparse.ArgumentParser(description = DESCRIPTION) parser.add_argument('-H', '--host', default = DEFAULT_HOST, help = "A legacy connexions authoring site") parser.add_argument('-a', '--auth', required = True, help = "authentication info [userid:password]") parser.add_argument('-w', '--workgroup', help = "Id of workgroup: defaults to user's private workspace") parser.add_argument('-l', '--list', action = "store_true", help = "List all workspaces") parser.add_argument('-p', '--publish', metavar = 'message', help = "Publish after transform") parser.add_argument('-P', '--publish_only', metavar = 'message', help = "Publish all drafts, no download or transform") parser.add_argument('-u', '--upload', action = "store_true", help="Upload transformed doc back to draft") parser.add_argument('-s', '--save-dir', help = "Directory to save transformed output to, as <moduleid>.xml") subparsers = parser.add_subparsers(help = "transform step") transforms.load_cli(subparsers) if len(sys.argv) < 5 or sys.argv[0].startswith('-'): sys.argv.append(transforms.get_default_cli_command_name()) print sys.argv args = parser.parse_args(argv) if hasattr(args,'save_dir') and args.save_dir or hasattr(args,'save_dir_d') and args.save_dir_d: save_dir = args.save_dir or args.save_dir_d else: save_dir = None cmmd = args.cmmd if args.list: print '\n'.join(listWorkspaces(**vars(args))) return # walk workgroup, look over and retrieve cnxmls, transform, then save and # optionally publish. workgroup = Workgroup(**vars(args)) print workgroup.url for mod in workgroup.modules(): if args.publish_only: mod.publish(args.publish_only) else: cnxml = mod.cnxml() new_cnxml = cmmd(cnxml,**vars(args)) if cnxml and new_cnxml: print '%s: %s %s' % (mod.moduleid,len(cnxml),len(new_cnxml)) if save_dir: if not os.path.exists(save_dir): os.mkdir(save_dir) with open(os.path.join(save_dir,'%s.xml' % (mod.moduleid)), 'w') as m: m.write(new_cnxml) if args.upload: mod.save(new_cnxml) if args.publish: mod.publish(args.publish) return # cmmd(**vars(args)) if __name__ == '__main__': sys.exit(main())
agpl-3.0
7,051,750,747,463,857,000
36.426966
100
0.567998
false
3.928066
false
false
false
mjenrungrot/competitive_programming
UVa Online Judge/12394.py
1
1476
# ============================================================================= # Author: Teerapat Jenrungrot - https://github.com/mjenrungrot/ # FileName: 12394.py # Description: UVa Online Judge - 12394 # ============================================================================= while True: K, N = list(map(int, input().split())) if K == 0 and N == 0: break data = [] for i in range(N): tmp = input().split() name = tmp[0] papers = list(map(int, tmp[1:])) data.append((name, papers)) checked = [True for i in range(N)] # Check number of reviews n_reviews = [0 for i in range(N)] for i in range(N): for j in range(K): n_reviews[data[i][1][j] - 1] += 1 for i in range(N): if n_reviews[i] != K: checked[i] = False # Check collaborator for i in range(N): for j in range(K): if data[i][0] == data[data[i][1][j] - 1][0]: checked[data[i][1][j] - 1] = False # Check same paper for i in range(N): for j in range(K): for k in range(j+1, K): if data[i][1][j] == data[i][1][k]: checked[data[i][1][j] - 1] = False ans = 0 for i in range(N): if not checked[i]: ans += 1 if ans == 0: print("NO PROBLEMS FOUND") elif ans == 1: print("1 PROBLEM FOUND") else: print("{} PROBLEMS FOUND".format(ans))
mit
-6,404,102,092,147,442,000
27.941176
79
0.436314
false
3.354545
false
false
false
asicontech/software-for-equipment
bfgminer/usbtest.py
2
3439
#!/usr/bin/env python # Copyright 2012-2013 Xiangfu # Copyright 2012-2013 Andrew Smith # Copyright 2013 Luke Dashjr # # This program is free software; you can redistribute it and/or modify it under # the terms of the GNU General Public License as published by the Free Software # Foundation; either version 3 of the License, or (at your option) any later # version. See COPYING for more details. # Linux usage: ./usbtest.py /dev/ttyUSB0 0xhexcodes|string|icarus # OR python usbtest.py /dev/ttyUSB0 0xhexcodes|string|icarus # # Windows usage: ./usbtest.py COM1 0xhexcodes|string|icarus # # sends the data sepcified to the USB device and waits # for a reply then displays it # # the data can be: # 0xhexcodes: e.g. 0x68656c6c6f20776f726c640a # would send "hello world\n" # # string: e.g. sendsometext # # icarus: sends 2 known block payloads for an icarus device # and shows the expected and actual answers if it's # a working V3 icarus import sys import serial import binascii if len(sys.argv) < 2: sys.stderr.write("Usage: " + sys.argv[0] + " device strings...\n") sys.stderr.write(" where device is either like /dev/ttyUSB0 or COM1\n") sys.stderr.write(" and strings are either '0xXXXX' or 'text'\n") sys.stderr.write(" if the first string is 'icarus' the rest are ignored\n") sys.stderr.write(" and 2 valid icarus test payloads are sent with results displayed\n") sys.stderr.write("\nAfter any command is sent it waits up to 30 seconds for a reply\n"); sys.exit("Aborting") # Open with a 10 second timeout - just to be sure ser = serial.Serial(sys.argv[1], 115200, serial.EIGHTBITS, serial.PARITY_NONE, serial.STOPBITS_ONE, 10, False, False, 5) if sys.argv[2] == "icarus": # This show how Icarus use the block and midstate data # This will produce nonce 063c5e01 block = "0000000120c8222d0497a7ab44a1a2c7bf39de941c9970b1dc7cdc400000079700000000e88aabe1f353238c668d8a4df9318e614c10c474f8cdf8bc5f6397b946c33d7c4e7242c31a098ea500000000000000800000000000000000000000000000000000000000000000000000000000000000000000000000000080020000" midstate = "33c5bf5751ec7f7e056443b5aee3800331432c83f404d9de38b94ecbf907b92d" rdata2 = binascii.a2b_hex(block.encode('ascii'))[95:63:-1] rmid = binascii.a2b_hex(midstate.encode('ascii'))[::-1] payload = rmid + rdata2 print("Push payload to icarus: " + binascii.hexlify(payload).decode('ascii')) ser.write(payload) b=ser.read(4) print("Result:(should be: 063c5e01): " + binascii.hexlify(b).decode('ascii')) # Just another test payload2 = "ce92099c5a80bb81c52990d5c0924c625fd25a535640607d5a4bdf8174e2c8d500000000000000000000000080000000000000000b290c1a42313b4f21b5bcb8" print("Push payload to icarus: " + payload2) ser.write(binascii.a2b_hex(payload2.encode('ascii'))) b=ser.read(4) print("Result:(should be: 8e0b31c5): " + binascii.hexlify(b).decode('ascii')) else: data = b"" for arg in sys.argv[2::]: if arg[0:2:] == '0x': data += binascii.a2b_hex(arg[2::].encode('ascii')) else: data += arg.encode('latin-1') print("Sending: 0x" + binascii.hexlify(data).decode('ascii')) ser.write(data) # If you're expecting more than one linefeed terminated reply, # you'll only see the first one # AND with no linefeed, this will wait the 10 seconds before returning print("Waiting up to 10 seconds ...") b=ser.readline() print("Result: hex 0x" + binascii.hexlify(b).decode('ascii')) print("Result: asc %s" % (repr(b),)) ser.close()
apache-2.0
6,122,057,605,471,330,000
37.640449
267
0.742658
false
2.807347
false
false
false
pwwang/bioprocs
bioprocs/scripts/imtherapy/pNetMHC.py
1
5476
from pathlib import Path from diot import Diot from bioprocs.utils import shell2 as shell, logger from bioprocs.utils.parallel import Parallel, distributeList {%from os import path%} {%from pyppl.utils import always_list%} infile = {{i.infile | quote}} afile = {{i.afile | ?path.isfile | =readlines | !always_list | repr}} outfile = Path({{o.outfile | quote}}) allfile = {{o.outfile | prefix | @append: '.all' | @append: ext(o.outfile) | quote}} netmhc = {{args.netmhc | quote}} isfa = {{args.isfa | repr}} nthread = {{args.nthread | repr}} params = {{args.params | repr}} tmpdir = {{args.tmpdir | repr}} lens = {{args.lens | ?isinstance: list | =@join: ',' | quote}} shell.load_config(netmhc = netmhc) # support HLA-A*03:79 -> HLA-A0379 alleles = [allele.strip().replace('*', '').replace(':', '') for allele in afile if 'HLA-' in allele] valid_alleles = shell.netmhc(listMHC = True).splitlines() for i in range(nthread): shell.mkdir(p = outfile.parent.joinpath('threads', str(i+1))) # split infile if isfa: seqs = [line.strip() for line in shell.grep('>', infile).splitlines() if line.strip()] seqs_to_threads = distributeList(seqs, nthread) seqs = {} for i, tseqs in enumerate(seqs_to_threads): for tseq in tseqs: seqs[tseq] = i handlers = {} lastindex = None with open(infile) as fin: for line in fin: if line.startswith('>'): seq = line.strip() index = seqs[seq] if index not in handlers: handlers[index] = open(outfile.parent.joinpath('threads', str(index+1), 'peptides.txt'), 'w') handlers[index].write(line) lastindex = index elif lastindex is None: raise IndexError('Sequence tag not found!') else: handlers[lastindex].write(line) for handler in handlers.values(): if not handler.closed: handler.close() else: with open(infile) as fin: peptides = fin.readlines() pep_to_threads = distributeList(peptides, threads) for i, pep in enumerate(pep_to_threads): with open(outfile.parent.joinpath('threads', str(i+1), 'peptides.txt'), 'w') as fpep: fpep.write(''.join(pep)) """ PARAMETER DEFAULT VALUE DESCRIPTION [-a filename] HLA-A0201 HLA allele name [-f filename] Input file (by default in FASTA format) [-p] 0 Switch on if input is a list of peptides (Peptide format) [-l string] 9 Peptide length (multiple lengths separated by comma e.g. 8,9,10) [-s] 0 Sort output on decreasing affinity [-rth float] 0.500000 Threshold for high binding peptides (%Rank) [-rlt float] 2.000000 Threshold for low binding peptides (%Rank) [-listMHC] 0 Print list of alleles included in netMHC [-xls] 0 Save output to xls file [-xlsfile filename] NetMHC_out.xls File name for xls output [-t float] -99.900002 Threshold for output [-thrfmt filename] $NETMHC/data/threshold/%s.thr Format for threshold filenames [-hlalist filename] $NETMHC/data/allelelist File with covered HLA names [-rdir filename] $NETMHC Home directory for NetMHC [-tdir filename] $TMPDIR Temporary directory (Default $$) [-syn filename] $NETMHC/data/synlists/%s.synlist Format of synlist file [-v] 0 Verbose mode [-dirty] 0 Dirty mode, leave tmp dir+files [-inptype int] 0 Input type [0] FASTA [1] Peptide [-version filename] $NETMHC/data/version File with version information [-w] 0 w option for webface """ # common options params.tdir = tmpdir params.l = lens def do_one(allele, ifile, ithread): ps = params.copy() ps.p = not isfa ps.f = ifile ps.a = allele ps._out = outfile.parent.joinpath('threads', str(ithread+1), allele + '.out.txt') ps._debug = True shell.netmhc(**ps) args = [] for allele in alleles: if allele not in valid_alleles: logger.warning('Not a valid allele: %s', allele) for i in range(nthread): if outfile.parent.joinpath('threads', str(i+1), 'peptides.txt').is_file(): args.append((allele, outfile.parent.joinpath('threads', str(i+1), 'peptides.txt'), i)) if not args: raise ValueError('No valid alleles found.') para = Parallel(nthread = nthread) para.run(do_one, args) # merge results with open(outfile, 'w') as fout, open(allfile, 'w') as fall: header_written = False pos_written = False for i, ofile in enumerate(outfile.parent.joinpath('threads').glob('*/*.out.txt')): with open(ofile) as fo: for line in fo: line = line.strip() if not line or line.startswith('-'): continue if header_written and line.startswith('#'): continue if i == 0 and line.startswith('#'): fout.write(line + '\n') fall.write(line + '\n') else: header_written = True parts = line.split() if parts and parts[0] == 'pos' and i == 0 and not pos_written: fout.write('\t'.join(parts) + '\n') fall.write('\t'.join(parts) + '\n') pos_written = True elif not parts or parts[0] in ('pos', 'Protein'): continue elif len(parts) > 14: del parts[-2] fout.write('\t'.join(parts) + '\n') fall.write('\t'.join(parts) + '\n') else: fall.write('\t'.join(parts) + '\n')
mit
-5,011,356,954,891,710,000
37.56338
107
0.60756
false
3.122007
false
false
false
minogame/bilinear_tensorflow
src/model.py
1
14668
import tensorflow as tf import tensorflow.contrib.layers as cl from gridconv_v3 import gridconv2d from deformconv import deformconv2d from utils import log_weights # # # # # # # # # CIFAR # # # # # # # # # # The network is built based on 'NCHW'. def normal_cnn_cifar(name, reuse=False): @log_weights def normal_cnn(x, is_training): bn_params = {'is_training':is_training, 'fused': True, 'data_format': 'NCHW'} with tf.variable_scope(name, reuse=reuse): x = cl.conv2d(x, num_outputs=32, kernel_size=[3, 3], stride=1, activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW', normalizer_fn=cl.batch_norm, normalizer_params=bn_params, scope='Conv1_1') x = cl.conv2d(x, num_outputs=32, kernel_size=[3, 3], stride=1, activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW', normalizer_fn=cl.batch_norm, normalizer_params=bn_params, scope='Conv1_2') x = cl.max_pool2d(x, kernel_size=3, stride=2, data_format='NCHW', padding='SAME') x = cl.conv2d(x, num_outputs=64, kernel_size=[3, 3], stride=1, activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW', normalizer_fn=cl.batch_norm, normalizer_params=bn_params, scope='Conv2_1') x = cl.conv2d(x, num_outputs=64, kernel_size=[3, 3], stride=1, activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW', normalizer_fn=cl.batch_norm, normalizer_params=bn_params, scope='Conv2_2') x = cl.conv2d(x, num_outputs=64, kernel_size=[3, 3], stride=1, activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW', normalizer_fn=cl.batch_norm, normalizer_params=bn_params, scope='Conv2_3') x = cl.max_pool2d(x, kernel_size=3, stride=2, data_format='NCHW', padding='SAME') x = cl.conv2d(x, num_outputs=128, kernel_size=[3, 3], stride=1, activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW', normalizer_fn=cl.batch_norm, normalizer_params=bn_params, scope='Conv3_1') x = cl.conv2d(x, num_outputs=128, kernel_size=[3, 3], stride=1, activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW', normalizer_fn=cl.batch_norm, normalizer_params=bn_params, scope='Conv3_2') x = cl.conv2d(x, num_outputs=128, kernel_size=[3, 3], stride=1, activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW', normalizer_fn=cl.batch_norm, normalizer_params=bn_params, scope='Conv3_3') x = tf.reduce_mean(x, [2, 3]) x = cl.fully_connected(x, num_outputs=10, activation_fn=None) return x return normal_cnn # The network is built based on 'NCHW'. def trash_cnn_cifar(name, reuse=False): @log_weights def trash_cnn(x, is_training): bn_params = {'is_training':is_training, 'fused': True, 'data_format': 'NCHW'} with tf.variable_scope(name, reuse=reuse): # x = gridconv2d(x, scope='Conv1_1', num_outputs=32, kernel_size=[3, 3], stride=1, # activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW', # normalizer_fn=cl.batch_norm, normalizer_params=bn_params) # x = gridconv2d(x, scope='Conv1_2', num_outputs=32, kernel_size=[3, 3], stride=1, # activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW', # normalizer_fn=cl.batch_norm, normalizer_params=bn_params) # x = cl.max_pool2d(x, kernel_size=3, stride=2, data_format='NCHW') x = cl.conv2d(x, num_outputs=32, kernel_size=[3, 3], stride=1, activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW', normalizer_fn=cl.batch_norm, normalizer_params=bn_params, scope='Conv1_1') x = cl.conv2d(x, num_outputs=32, kernel_size=[3, 3], stride=1, activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW', normalizer_fn=cl.batch_norm, normalizer_params=bn_params, scope='Conv1_2') x = cl.max_pool2d(x, kernel_size=3, stride=2, data_format='NCHW', padding='SAME') x = gridconv2d(x, scope='Conv2_1', num_outputs=64, kernel_size=[3, 3], stride=1, activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW', normalizer_fn=cl.batch_norm, normalizer_params=bn_params) x = gridconv2d(x, scope='Conv2_2', num_outputs=64, kernel_size=[3, 3], stride=1, activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW', normalizer_fn=cl.batch_norm, normalizer_params=bn_params) x = gridconv2d(x, scope='Conv2_3', num_outputs=64, kernel_size=[3, 3], stride=1, activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW', normalizer_fn=cl.batch_norm, normalizer_params=bn_params) x = cl.max_pool2d(x, kernel_size=3, stride=2, data_format='NCHW', padding='SAME') x = gridconv2d(x, scope='Conv3_1', num_outputs=128, kernel_size=[3, 3], stride=1, activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW', normalizer_fn=cl.batch_norm, normalizer_params=bn_params) x = gridconv2d(x, scope='Conv3_2', num_outputs=128, kernel_size=[3, 3], stride=1, activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW', normalizer_fn=cl.batch_norm, normalizer_params=bn_params) x = gridconv2d(x, scope='Conv3_3', num_outputs=128, kernel_size=[3, 3], stride=1, activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW', normalizer_fn=cl.batch_norm, normalizer_params=bn_params) x = tf.reduce_mean(x, [2, 3]) x = cl.fully_connected(x, num_outputs=10, activation_fn=None) return x return trash_cnn # The network is built based on 'NCHW'. def deform_cnn_cifar(name, reuse=False): @log_weights def deform_cnn(x, is_training): bn_params = {'is_training':is_training, 'fused': True, 'data_format': 'NCHW'} with tf.variable_scope(name, reuse=reuse): # x = gridconv2d(x, scope='Conv1_1', num_outputs=32, kernel_size=[3, 3], stride=1, # activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW', # normalizer_fn=cl.batch_norm, normalizer_params=bn_params) # x = gridconv2d(x, scope='Conv1_2', num_outputs=32, kernel_size=[3, 3], stride=1, # activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW', # normalizer_fn=cl.batch_norm, normalizer_params=bn_params) # x = cl.max_pool2d(x, kernel_size=3, stride=2, data_format='NCHW') x = cl.conv2d(x, num_outputs=32, kernel_size=[3, 3], stride=1, activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW', normalizer_fn=cl.batch_norm, normalizer_params=bn_params, scope='Conv1_1') x = cl.conv2d(x, num_outputs=32, kernel_size=[3, 3], stride=1, activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW', normalizer_fn=cl.batch_norm, normalizer_params=bn_params, scope='Conv1_2') x = cl.max_pool2d(x, kernel_size=3, stride=2, data_format='NCHW', padding='SAME') x = deformconv2d(x, scope='Conv2_1', num_outputs=64, kernel_size=[3, 3], stride=1, activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW', normalizer_fn=cl.batch_norm, normalizer_params=bn_params) x = deformconv2d(x, scope='Conv2_2', num_outputs=64, kernel_size=[3, 3], stride=1, activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW', normalizer_fn=cl.batch_norm, normalizer_params=bn_params) x = deformconv2d(x, scope='Conv2_3', num_outputs=64, kernel_size=[3, 3], stride=1, activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW', normalizer_fn=cl.batch_norm, normalizer_params=bn_params) x = cl.max_pool2d(x, kernel_size=3, stride=2, data_format='NCHW', padding='SAME') x = deformconv2d(x, scope='Conv3_1', num_outputs=128, kernel_size=[3, 3], stride=1, activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW', normalizer_fn=cl.batch_norm, normalizer_params=bn_params) x = deformconv2d(x, scope='Conv3_2', num_outputs=128, kernel_size=[3, 3], stride=1, activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW', normalizer_fn=cl.batch_norm, normalizer_params=bn_params) x = deformconv2d(x, scope='Conv3_3', num_outputs=128, kernel_size=[3, 3], stride=1, activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW', normalizer_fn=cl.batch_norm, normalizer_params=bn_params) x = tf.reduce_mean(x, [2, 3]) x = cl.fully_connected(x, num_outputs=10, activation_fn=None) return x return deform_cnn # # # # # # # # # CIFAR RESNET # # # # # # # # # def residual(name, l, is_training, increase_dim=False, first=False): bn_params = {'is_training':is_training, 'fused': True, 'data_format': 'NCHW'} shape = l.get_shape().as_list() in_channel = shape[1] if increase_dim: out_channel = in_channel * 2 stride1 = 2 else: out_channel = in_channel stride1 = 1 with tf.variable_scope(name) as scope: b1 = l if first else tf.nn.relu(cl.batch_norm(l, is_training=is_training, fused=True, data_format='NCHW')) c1 = cl.conv2d(b1, num_outputs=out_channel, kernel_size=[3, 3], stride=stride1, activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW', normalizer_fn=cl.batch_norm, normalizer_params=bn_params, scope='conv1') c2 = cl.conv2d(c1, num_outputs=out_channel, kernel_size=[3, 3], stride=1, activation_fn=None, padding='SAME', data_format='NCHW', scope='conv2') if increase_dim: l = cl.avg_pool2d(l, kernel_size=2, stride=2, data_format='NCHW') l = tf.pad(l, [[0, 0], [in_channel // 2, in_channel // 2], [0, 0], [0, 0]]) l = c2 + l return l def grid_residual(name, l, is_training, increase_dim=False, first=False, one_c=False): bn_params = {'is_training':is_training, 'fused': True, 'data_format': 'NCHW'} shape = l.get_shape().as_list() in_channel = shape[1] if increase_dim: out_channel = in_channel * 2 stride1 = 2 else: out_channel = in_channel stride1 = 1 with tf.variable_scope(name) as scope: b1 = l if first else tf.nn.relu(cl.batch_norm(l, is_training=is_training, fused=True, data_format='NCHW')) c1 = gridconv2d(b1, scope='conv1', num_outputs=out_channel, kernel_size=[3, 3], stride=stride1, activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW', one_c=one_c, normalizer_fn=cl.batch_norm, normalizer_params=bn_params) c2 = gridconv2d(c1, scope='conv2', num_outputs=out_channel, kernel_size=[3, 3], stride=1, activation_fn=None, padding='SAME', data_format='NCHW', one_c=one_c, normalizer_fn=None, normalizer_params=None) if increase_dim: l = cl.avg_pool2d(l, kernel_size=2, stride=2, data_format='NCHW') l = tf.pad(l, [[0, 0], [in_channel // 2, in_channel // 2], [0, 0], [0, 0]]) l = c2 + l return l # def resnet(name, n): # def cnn(x, is_training): # bn_params = {'is_training':is_training, 'fused': True, 'data_format': 'NCHW'} # with tf.variable_scope(name) as scope: # l = cl.conv2d(x, num_outputs=16, kernel_size=[3, 3], stride=1, # activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW', # normalizer_fn=cl.batch_norm, normalizer_params=bn_params, # scope='conv0') # l = tf.nn.relu(cl.batch_norm(l, is_training=is_training, fused=True, data_format='NCHW')) # l = residual('res1.0', l, is_training, first=True) # for k in range(1, n): # l = residual('res1.{}'.format(k), l, is_training) # # 32,c=16 # l = grid_residual('res2.0', l, is_training, increase_dim=True) # for k in range(1, n): # l = grid_residual('res2.{}'.format(k), l, is_training) # # 16,c=32 # l = grid_residual('res3.0', l, is_training, increase_dim=True) # for k in range(1, n): # l = grid_residual('res3.' + str(k), l, is_training) # l = tf.nn.relu(cl.batch_norm(l, is_training=is_training, fused=True, data_format='NCHW')) # # 8,c=64 # l = tf.reduce_mean(l, [2, 3]) # l = cl.fully_connected(l, num_outputs=10, activation_fn=None) # return l # return cnn def resnet(name, n, grid=False): def cnn(x, is_training): bn_params = {'is_training':is_training, 'fused': True, 'data_format': 'NCHW'} with tf.variable_scope(name) as scope: l = cl.conv2d(x, num_outputs=16, kernel_size=[3, 3], stride=1, activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW', normalizer_fn=cl.batch_norm, normalizer_params=bn_params, scope='conv0') l = tf.nn.relu(cl.batch_norm(l, is_training=is_training, fused=True, data_format='NCHW')) l = residual('res1.0', l, is_training, first=True) for k in range(1, n): l = residual('res1.{}'.format(k), l, is_training) # 32,c=16 l = residual('res2.0', l, is_training, increase_dim=True) for k in range(1, n): l = residual('res2.{}'.format(k), l, is_training) # 16,c=32 l = residual('res3.0', l, is_training, increase_dim=True) for k in range(1, n): l = residual('res3.' + str(k), l, is_training) l = tf.nn.relu(cl.batch_norm(l, is_training=is_training, fused=True, data_format='NCHW')) # 8,c=64 l = tf.reduce_mean(l, [2, 3]) l = cl.fully_connected(l, num_outputs=10, activation_fn=None) return l def gridcnn_c5(x, is_training): bn_params = {'is_training':is_training, 'fused': True, 'data_format': 'NCHW'} with tf.variable_scope(name) as scope: l = cl.conv2d(x, num_outputs=16, kernel_size=[3, 3], stride=1, activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW', normalizer_fn=cl.batch_norm, normalizer_params=bn_params, scope='conv0') l = tf.nn.relu(cl.batch_norm(l, is_training=is_training, fused=True, data_format='NCHW')) l = residual('res1.0', l, is_training, first=True) for k in range(1, n): l = residual('res1.{}'.format(k), l, is_training) # 32,c=16 l = residual('res2.0', l, is_training, increase_dim=True) for k in range(1, n): l = grid_residual('res2.{}'.format(k), l, is_training, one_c=False) # 16,c=32 l = residual('res3.0', l, is_training, increase_dim=True) for k in range(1, n): l = grid_residual('res3.{}'.format(k), l, is_training, one_c=False) l = tf.nn.relu(cl.batch_norm(l, is_training=is_training, fused=True, data_format='NCHW')) # 8,c=64 l = tf.reduce_mean(l, [2, 3]) l = cl.fully_connected(l, num_outputs=10, activation_fn=None) return l return gridcnn_c5 if grid else cnn
gpl-3.0
-2,439,001,560,730,617,300
45.469256
108
0.619239
false
2.647176
false
false
false
jandebleser/django-wiki
src/wiki/conf/settings.py
1
10199
from __future__ import absolute_import, unicode_literals import bleach from django.conf import settings as django_settings from django.core.files.storage import default_storage from django.core.urlresolvers import reverse_lazy from django.utils.translation import ugettext_lazy as _ #: Should urls be case sensitive? URL_CASE_SENSITIVE = getattr(django_settings, 'WIKI_URL_CASE_SENSITIVE', False) # Non-configurable (at the moment) WIKI_LANGUAGE = 'markdown' #: The editor class to use -- maybe a 3rd party or your own...? You can always #: extend the built-in editor and customize it! EDITOR = getattr( django_settings, 'WIKI_EDITOR', 'wiki.editors.markitup.MarkItUp') #: Whether to use Bleach or not. It's not recommended to turn this off unless #: you know what you're doing and you don't want to use the other options. MARKDOWN_SANITIZE_HTML = getattr( django_settings, 'WIKI_MARKDOWN_SANITIZE_HTML', True) #: Arguments for the Markdown instance, for instance a list of extensions to #: use. #: See: https://pythonhosted.org/Markdown/extensions/index.html #: #: To set a custom title for TOC's:: #: #: WIKI_MARKDOWN_KWARGS = {'extension_configs': {'toc': _('Contents of this article')}} MARKDOWN_KWARGS = { 'extensions': [ 'footnotes', 'attr_list', 'smart_strong', 'footnotes', 'attr_list', 'def_list', 'tables', 'abbr', 'sane_lists', ], 'extension_configs': { 'toc': { 'title': _('Table of Contents')}}, } MARKDOWN_KWARGS.update(getattr(django_settings, 'WIKI_MARKDOWN_KWARGS', {})) _default_tag_whitelists = bleach.ALLOWED_TAGS + [ 'figure', 'figcaption', 'br', 'hr', 'p', 'div', 'img', 'pre', 'span', 'table', 'thead', 'tbody', 'th', 'tr', 'td', 'dl', 'dt', 'dd', ] + ['h{}'.format(n) for n in range(8)] #: List of allowed tags in Markdown article contents. MARKDOWN_HTML_WHITELIST = _default_tag_whitelists MARKDOWN_HTML_WHITELIST += ( getattr( django_settings, 'WIKI_MARKDOWN_HTML_WHITELIST', [] ) ) _default_attribute_whitelist = bleach.ALLOWED_ATTRIBUTES for tag in MARKDOWN_HTML_WHITELIST: if tag not in _default_attribute_whitelist: _default_attribute_whitelist[tag] = [] _default_attribute_whitelist[tag].append('class') _default_attribute_whitelist[tag].append('id') _default_attribute_whitelist['img'].append('src') _default_attribute_whitelist['img'].append('alt') #: Dictionary of allowed attributes in Markdown article contents. MARKDOWN_HTML_ATTRIBUTES = _default_attribute_whitelist MARKDOWN_HTML_ATTRIBUTES.update( getattr( django_settings, 'WIKI_MARKDOWN_HTML_ATTRIBUTE_WHITELIST', {} ) ) #: Allowed inline styles in Markdown article contents, default is no styles #: (empty list) MARKDOWN_HTML_STYLES = ( getattr( django_settings, 'WIKI_MARKDOWN_HTML_STYLES', [] ) ) _project_defined_attrs = getattr( django_settings, 'WIKI_MARKDOWN_HTML_ATTRIBUTE_WHITELIST', False) # If styles are allowed but no custom attributes are defined, we allow styles # for all kinds of tags if MARKDOWN_HTML_STYLES and not _project_defined_attrs: MARKDOWN_HTML_ATTRIBUTES['*'] = 'style' #: This slug is used in URLPath if an article has been deleted. The children of the #: URLPath of that article are moved to lost and found. They keep their permissions #: and all their content. LOST_AND_FOUND_SLUG = getattr( django_settings, 'WIKI_LOST_AND_FOUND_SLUG', 'lost-and-found') #: When True, this blocks new slugs that resolve to non-wiki views, stopping #: users creating articles that conflict with overlapping URLs from other apps. CHECK_SLUG_URL_AVAILABLE = getattr( django_settings, 'WIKI_CHECK_SLUG_URL_AVAILABLE', True) #: Do we want to log IPs of anonymous users? LOG_IPS_ANONYMOUS = getattr(django_settings, 'WIKI_LOG_IPS_ANONYMOUS', True) #: Do we want to log IPs of logged in users? LOG_IPS_USERS = getattr(django_settings, 'WIKI_LOG_IPS_USERS', False) #################################### # PERMISSIONS AND ACCOUNT HANDLING # #################################### # NB! None of these callables need to handle anonymous users as they are treated # in separate settings... #: A function returning True/False if a user has permission to #: read contents of an article + plugins #: Relevance: viewing articles and plugins CAN_READ = getattr(django_settings, 'WIKI_CAN_READ', None) #: A function returning True/False if a user has permission to #: change contents, ie add new revisions to an article #: Often, plugins also use this #: Relevance: editing articles, changing revisions, editing plugins CAN_WRITE = getattr(django_settings, 'WIKI_CAN_WRITE', None) #: A function returning True/False if a user has permission to assign #: permissions on an article #: Relevance: changing owner and group membership CAN_ASSIGN = getattr(django_settings, 'WIKI_CAN_ASSIGN', None) #: A function returning True/False if the owner of an article has permission to change #: the group to a user's own groups #: Relevance: changing group membership CAN_ASSIGN_OWNER = getattr(django_settings, 'WIKI_ASSIGN_OWNER', None) #: A function returning True/False if a user has permission to change #: read/write access for groups and others CAN_CHANGE_PERMISSIONS = getattr( django_settings, 'WIKI_CAN_CHANGE_PERMISSIONS', None) #: Specifies if a user has access to soft deletion of articles CAN_DELETE = getattr(django_settings, 'WIKI_CAN_DELETE', None) #: A function returning True/False if a user has permission to change #: moderate, ie. lock articles and permanently delete content. CAN_MODERATE = getattr(django_settings, 'WIKI_CAN_MODERATE', None) #: A function returning True/False if a user has permission to create #: new groups and users for the wiki. CAN_ADMIN = getattr(django_settings, 'WIKI_CAN_ADMIN', None) #: Treat anonymous (non logged in) users as the "other" user group ANONYMOUS = getattr(django_settings, 'WIKI_ANONYMOUS', True) #: Globally enable write access for anonymous users, if true anonymous users will be treated #: as the others_write boolean field on models.Article. ANONYMOUS_WRITE = getattr(django_settings, 'WIKI_ANONYMOUS_WRITE', False) #: Globally enable create access for anonymous users #: Defaults to ANONYMOUS_WRITE. ANONYMOUS_CREATE = getattr( django_settings, 'WIKI_ANONYMOUS_CREATE', ANONYMOUS_WRITE) #: Default setting to allow anonymous users upload access (used in #: plugins.attachments and plugins.images). ANONYMOUS_UPLOAD = getattr(django_settings, 'WIKI_ANONYMOUS_UPLOAD', False) #: Sign up, login and logout views should be accessible ACCOUNT_HANDLING = getattr(django_settings, 'WIKI_ACCOUNT_HANDLING', True) #: Signup allowed? If it's not allowed, logged in superusers can still access #: the signup page to create new users. ACCOUNT_SIGNUP_ALLOWED = ACCOUNT_HANDLING and getattr( django_settings, 'WIKI_ACCOUNT_SIGNUP_ALLOWED', True ) if ACCOUNT_HANDLING: LOGIN_URL = reverse_lazy("wiki:login") LOGOUT_URL = reverse_lazy("wiki:logout") SIGNUP_URL = reverse_lazy("wiki:signup") else: LOGIN_URL = getattr(django_settings, "LOGIN_URL", "/") LOGOUT_URL = getattr(django_settings, "LOGOUT_URL", "/") SIGNUP_URL = getattr(django_settings, "WIKI_SIGNUP_URL", "/") ################## # OTHER SETTINGS # ################## #: Maximum amount of children to display in a menu before going "+more" #: NEVER set this to 0 as it will wrongly inform the user that there are no #: children and for instance that an article can be safely deleted. SHOW_MAX_CHILDREN = getattr(django_settings, 'WIKI_SHOW_MAX_CHILDREN', 20) #: User Bootstrap's select widget. Switch off if you're not using Bootstrap! USE_BOOTSTRAP_SELECT_WIDGET = getattr( django_settings, 'WIKI_USE_BOOTSTRAP_SELECT_WIDGET', True) #: dottedname of class used to construct urlpatterns for wiki. #: #: Default is wiki.urls.WikiURLPatterns. To customize urls or view handlers, #: you can derive from this. URL_CONFIG_CLASS = getattr( django_settings, 'WIKI_URL_CONFIG_CLASS', 'wiki.urls.WikiURLPatterns') #: Search view - dotted path denoting where the search view Class is located SEARCH_VIEW = getattr( django_settings, 'WIKI_SEARCH_VIEW', 'wiki.views.article.SearchView' if 'wiki.plugins.haystack' not in django_settings.INSTALLED_APPS else 'wiki.plugins.haystack.views.HaystackSearchView' ) #: Seconds of timeout before renewing article cache. Articles are automatically #: renewed whenever an edit occurs but article content may be generated from #: other objects that are changed. CACHE_TIMEOUT = getattr(django_settings, 'WIKI_CACHE_TIMEOUT', 600) #: Choose the Group model to use. Defaults to django's auth.Group GROUP_MODEL = getattr(django_settings, 'WIKI_GROUP_MODEL', 'auth.Group') ################### # SPAM PROTECTION # ################### #: Maximum allowed revisions per hour for any given user or IP REVISIONS_PER_HOUR = getattr(django_settings, 'WIKI_REVISIONS_PER_HOUR', 60) #: Maximum allowed revisions per minute for any given user or IP REVISIONS_PER_MINUTES = getattr( django_settings, 'WIKI_REVISIONS_PER_MINUTES', 5) #: Maximum allowed revisions per hour for any given user or IP REVISIONS_PER_HOUR_ANONYMOUS = getattr( django_settings, 'WIKI_REVISIONS_PER_HOUR_ANONYMOUS', 10) #: Maximum allowed revisions per hour for any given user or IP REVISIONS_PER_MINUTES_ANONYMOUS = getattr( django_settings, 'WIKI_REVISIONS_PER_MINUTES_ANONYMOUS', 2) #: Number of minutes for looking up REVISIONS_PER_MINUTES and #: REVISIONS_PER_MINUTES_ANONYMOUS REVISIONS_MINUTES_LOOKBACK = getattr( django_settings, 'WIKI_REVISIONS_MINUTES_LOOKBACK', 2) ########### # STORAGE # ########### #: Django Storage backend to use for images, attachments etc. STORAGE_BACKEND = getattr( django_settings, 'WIKI_STORAGE_BACKEND', default_storage) #: Use Sendfile USE_SENDFILE = getattr(django_settings, 'WIKI_ATTACHMENTS_USE_SENDFILE', False)
gpl-3.0
8,362,595,489,785,991,000
31.275316
92
0.702226
false
3.474957
false
false
false
EricsonWillians/PyGameWidgets
examples/long_text_label.py
1
1651
import sys sys.path.append("..") import pygame import core import widgets # Text label example. WINDOW_WIDTH = 1024 WINDOW_HEIGHT = 728 pygame.init() pygame.font.init screen = pygame.display.set_mode((WINDOW_WIDTH, WINDOW_HEIGHT)) clock = pygame.time.Clock() FPS = 60 running = True if __name__ == "__main__": panel = widgets.Panel(core.Grid((3, 10), (WINDOW_WIDTH, WINDOW_HEIGHT)), None, None, (0, 0)) panel.set_color((155, 155, 155, 255)) text = widgets.TextLabel(panel, (1, 2), core.Text( """ Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.""", 13, core.BLACK) ) text.set_color(core.WHITE) # This is the color of the widget, not to be confused with the color of its text. text.set_span((0, 5)) text.set_border(core.BLACK, 8) text.set_margin(10) # Altering the margin because of the border. def redraw(): pygame.display.flip() screen.fill((0, 0, 0)) panel.draw(screen) text.draw(screen) while (running): clock.tick(FPS) redraw() for e in pygame.event.get(): if e.type == pygame.QUIT: sys.exit()
gpl-3.0
-231,955,241,589,300,640
29.018182
112
0.625681
false
3.187259
false
false
false
hchauvet/beampy
beampy/statics/default_theme.py
1
4858
# -*- coding: utf-8 -*- # Default theme of Beampy # Main keys of the dict should be the name of the beampy modules or class # Each modules default options need to be defined here! THEME = {} THEME['document'] = { 'format': 'html5', #could be svg // pdf // html5 'width': 800, 'height': 600, 'optimize': True, 'resize_raster':True, 'cache': True, 'guide': False, 'text_box': False, 'html': { 'background_color': 'black' }, 'external_app': {"inkscape": "auto", "dvisvgm": "auto", "pdfjoin": "auto", "video_encoder": 'auto', "pdf2svg": "auto", "epstopdf": "auto"} } THEME['slide'] = { 'background': "white", 'layout': None, #Could be changed to a function that will decorate the current slide with elements #this can be used to create a specific layout for a theme #Could also be a string that refer to the key of the LAYOUT[key] dict if you need several layouts #for a presentation } THEME['text'] = { 'size':20, 'font':'CMR', 'color':'#000000', 'align':'', 'x':'center', 'y':'auto', 'width':None, 'usetex':True, 'va': '', 'opacity':1, 'extra_packages': [] } THEME['title'] = { 'size': 28, 'font': 'CMR', 'color': 'ForestGreen', 'x': {'shift':0.5, 'unit':'cm'}, 'y': {'shift':1.25, 'unit':'cm'}, 'reserved_y': '1.5cm', 'align': '', 'va': 'baseline', 'opacity': 1 } THEME['link'] = { 'fill':THEME['title']['color'] } THEME['maketitle'] = { 'title_size':30, 'title_color':THEME['title']['color'], 'author_size':THEME['text']['size'], 'author_color':'black', 'date_size':15, 'date_color':'#888888', 'subtitle_color':'#888888', 'subtitle_size':20, 'template': None #here you redefine a link to a function "def mytitle(titlein, author, subtitle, date, args)"" that is executed in maketitle to replace the default template } THEME['tableofcontents'] = { 'width': None, 'height': None, 'x': 25, 'y': 'center', 'section_yoffset': 50, 'subsection_xoffset': 20, 'subsection_yoffset': 10, 'section_style': 'round', 'subsection_style': None, 'section_decoration_color': THEME['title']['color'], 'section_decoration_size': 13, 'section_number_color': 'white', 'section_text_color': THEME['title']['color'], 'subsection_text_color': THEME['text']['color'], 'subsection_decoration_color': 'gray', 'subsection_decoration_size': 13/2, 'hidden_opacity': 0.2 } THEME['video'] = { 'width': None, 'height': None, 'x': 'center', 'y': 'auto', 'autoplay': False, 'loop' : False, 'control': True, 'still_image_time': 0.0, 'embedded': True } THEME['animatesvg'] = { 'start': 0, 'end': 'end', 'x': 'center', 'y': 'auto', 'width': None, 'fps': 25, 'autoplay': False } THEME['tikz'] = { 'x': 0, 'y': 0, 'tikz_header': None, 'tex_packages': None, 'latex_pre_tikzpicture': None, 'figure_options': None, 'figure_anchor': 'top_left' } THEME['figure'] = { 'x':'center', 'y':'auto', 'width':None, 'height':None } THEME['cite'] = { 'x':'center', 'y':'auto', 'color':THEME['title']['color'], 'size':16, 'reference_delimiter' : ';', 'brackets' : ('[',']'), } THEME['bibliography'] = { "max_author" : 3, "initials" : False, "journal" : False, "and" : r'\&', 'et_al' : 'et al.', 'initial_delimiter' : '.', } THEME['itemize'] = { 'x':'center', 'y':'auto', 'item_style':'bullet', 'item_spacing':'+1cm', 'item_indent':'0cm', 'item_color':THEME['title']['color'], 'text_color':THEME['text']['color'], 'width':None, 'item_layers': None } THEME['line'] = { 'x':'center', 'y':'auto', 'color': THEME['title']['color'], 'linewidth': '2px', 'opacity': 1 } THEME['rectangle'] = { 'x':'center', 'y':'auto', 'color': THEME['title']['color'], 'linewidth': '2px', 'opacity': 1, 'edgecolor': THEME['text']['color'], 'height': '10px', 'width': '%spx'%(THEME['document']['width']), 'rx':0, 'ry':0, 'svgfilter': None, 'svgclip': None } THEME['circle'] = { 'x':'center', 'y':'auto', 'color': THEME['title']['color'], 'linewidth': '1px', 'opacity': 1, 'edgecolor': THEME['title']['color'], 'r': '3px' } THEME['box'] = { 'rounded': 10, 'linewidth': 1, 'color': THEME['title']['color'], 'head_height': None, 'shadow': False, 'background_color': 'white', 'title_color': 'white', 'title_align': 'left', 'title_xoffset': 10, 'title_size': THEME['text']['size'], 'auto_height_margin': 15, 'title_height_margin': 10 }
gpl-3.0
-2,950,168,247,424,549,000
21.490741
176
0.527995
false
3.055346
false
false
false
bolkedebruin/airflow
airflow/providers/google/cloud/operators/local_to_gcs.py
1
3617
# -*- coding: utf-8 -*- # # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. """ This module contains operator for uploading local file to GCS. """ import warnings from airflow.models import BaseOperator from airflow.providers.google.cloud.hooks.gcs import GCSHook from airflow.utils.decorators import apply_defaults class LocalFilesystemToGCSOperator(BaseOperator): """ Uploads a file to Google Cloud Storage. Optionally can compress the file for upload. :param src: Path to the local file. (templated) :type src: str :param dst: Destination path within the specified bucket, it must be the full file path to destination object on GCS, including GCS object (ex. `path/to/file.txt`) (templated) :type dst: str :param bucket: The bucket to upload to. (templated) :type bucket: str :param gcp_conn_id: (Optional) The connection ID used to connect to Google Cloud Platform. :type gcp_conn_id: str :param google_cloud_storage_conn_id: (Deprecated) The connection ID used to connect to Google Cloud Platform. This parameter has been deprecated. You should pass the gcp_conn_id parameter instead. :type google_cloud_storage_conn_id: str :param mime_type: The mime-type string :type mime_type: str :param delegate_to: The account to impersonate, if any :type delegate_to: str :param gzip: Allows for file to be compressed and uploaded as gzip :type gzip: bool """ template_fields = ('src', 'dst', 'bucket') @apply_defaults def __init__(self, src, dst, bucket, gcp_conn_id='google_cloud_default', google_cloud_storage_conn_id=None, mime_type='application/octet-stream', delegate_to=None, gzip=False, *args, **kwargs): super().__init__(*args, **kwargs) if google_cloud_storage_conn_id: warnings.warn( "The google_cloud_storage_conn_id parameter has been deprecated. You should pass " "the gcp_conn_id parameter.", DeprecationWarning, stacklevel=3) gcp_conn_id = google_cloud_storage_conn_id self.src = src self.dst = dst self.bucket = bucket self.gcp_conn_id = gcp_conn_id self.mime_type = mime_type self.delegate_to = delegate_to self.gzip = gzip def execute(self, context): """ Uploads the file to Google Cloud Storage """ hook = GCSHook( google_cloud_storage_conn_id=self.gcp_conn_id, delegate_to=self.delegate_to) hook.upload( bucket_name=self.bucket, object_name=self.dst, mime_type=self.mime_type, filename=self.src, gzip=self.gzip, )
apache-2.0
-7,390,544,169,797,167,000
36.28866
104
0.646945
false
4.124287
false
false
false
robertz23/code-samples
python scripts and tools/list_intersection.py
1
1200
""" List intersection: Finds intersections between various lists """ def check_intersection(first_list, second_list): #We use set builtin function to find the intersection between lists return set(first_list).intersection(second_list) def create_lists(line): #receive a line from the file containing ascending numbers #each line is of the form 'n,n,n;n,n,n' where n is a number #and the semi-colon separates the lists first, second = line.split(';') #Make sure that we have a list of numbers and not numbers and commas first = [x for x in first.split(',')] second = [x for x in second.split(',')] #look for the intersection intersected_number = check_intersection(first, second) if intersected_number: intersected_numbers_sorted = [eachNumber for eachNumber in intersected_number] intersected_numbers_sorted.sort() print ','.join(intersected_numbers_sorted) else: print "" #return 0 if __name__ == '__main__': #l = ["1,2,3;3,4,5", "1,2,3;0,4,5", "7,8,9;8,9,10,11,12"] l = ["1,2,3,4;4,5,6", "20,21,22;45,46,47", "7,8,9;8,9,10,11,12"] for eachLine in l: create_lists(eachLine)
mit
7,565,693,730,159,266,000
32.333333
86
0.645
false
3.269755
false
false
false
joseguerrero/sembrando
src/presentacion/librerias/popups.py
1
13668
#!/usr/bin/env python # -*- coding: utf-8 -*- import pygame from .texto import texto from .textoci import texto2 from .imgfondo import fondo TEXT_COLOR2 = (0,0,0) COLOR_RED = (213, 0, 0) BAR_COLOR = (151, 0, 172) TEXT_COLOR = (255, 255, 255) class Button(pygame.sprite.Sprite): def __init__(self, identificador, parent, text, fondo = 0, ancho = 500): """ Método inicializador de la clase. @param identificador: Variable usada para identificar al botón. @type identificador: str @param parent: Instancia del gestor de pantallas. @type parent: Manejador @param text: Variable que indica el texto que tendrá el botón. @type text: str @param fondo: Indica si el fondo del botón sera con imagen o sin imagen (en desarrollo). @type fondo: bool @param ancho: Indica el ancho del botón. Es usado para cuadrar el texto centrado. @type ancho: int """ pygame.sprite.Sprite.__init__(self) self.ancho = ancho self.parent =parent tipografia = pygame.font.match_font("FreeSans", False, False) font = pygame.font.Font(tipografia, parent.config.t_fuente) self.identificador = identificador varios = "../imagenes/png/varios/" if fondo == 0: texto1 = font.render(text, 1, TEXT_COLOR) textorect = texto1.get_rect() texto2 = font.render(text, 1, COLOR_RED) self.img_fondo = pygame.image.load(varios + "img-boton.png") self.img_fondo2 = pygame.image.load(varios + "img-boton.png") imgrect = self.img_fondo.get_rect() textorect.center = imgrect.center[0],imgrect.center[1]+imgrect.center[1]/3 self.img_fondo.blit (texto1, textorect) self.img_fondo2.blit (texto2,textorect) self.rect = self.img_fondo.get_rect() self.image= self.img_fondo if fondo == 1: txt = texto(0,0,text,parent.config.t_fuente,"texto_act",self.ancho) self.rect = pygame.Rect(0,0,self.ancho,txt.ancho_final) image_texto = pygame.Surface((self.ancho,txt.ancho_final)) image_texto.fill((255,255,255)) image_texto.set_colorkey((255,255,255)) for i in txt.img_palabras: image_texto.blit(i.image, i.rect) self.image = image_texto self.img_fondo = image_texto self.img_fondo2 = image_texto def cambiar_status(self, status): """ Dibuja un efecto en los botones cambiando la imagen de fondo (descontinuado) """ if status: self.image = self.img_fondo2 else: self.image = self.img_fondo def mover_boton(self,x,y): """ Cambia la posición del botón. """ self.rect.center = (x,y) class PopUp(pygame.sprite.Sprite): def __init__(self, parent , texto1, palabra_boton , imagenes , grupo, tipo = 0 ,px=512,py=281,tam =0): """ Método inicializador de la clase. @param parent: Instancia del gestor de pantallas. @type parent: Manejador @param texto1: Indica el texto que será mostrado en la ventana emergente. @type texto1: str @param palabra_boton: Indica la palabra que tendrá el botón. Solo es usado en caso de ser tipo = 0, de ser distinto de 0 toma el valor de una cadena y no será asignado. @type palabra_boton: str @param imagenes: Indica la(s) imagen(es) que mostrará la ventana emergente. En caso de tipo = 2 no es un campo necesario, en caso de tipo = 1 debe ser una superficie y en caso de tipo = 0 el parámetro debe ser una tupla con dos imágenes, la posición 0 sera la imagen que estará al lado del texto, mientras que la posición 1 sera la imagen que estará debajo del texto. @type imagenes: pygame.Surface, tuple @param grupo: Representa un grupo de Sprites donde será agregado el sprite con la imagen y su rectángulo. @type grupo: pygame.sprite.Group @param tipo: Determina el tipo de ventana emergente, si toma el valor de 2 la ventana emergente solo tomara el parámetro texto1 (los demás parámetros deben ser introducidos), en caso de tomar el valor 1 la ventana emergente tomara los parámetros texto1, imagenes y palabra_boton, mostrando una ventana ordenada con texto justificado, un espacio a derecha donde sera ubicada la imagen (dicha imagen debe tener máximo 1/3 del tamaño de la ventana emergente) y un botón centrado debajo del texto. En caso de tomar valor 0 la ventana emergente sera similar a la anterior, con la diferencia que tendrá una imagen más ubicada en la parte izquierda del botón y debajo del texto. @type tipo: int """ pygame.sprite.Sprite.__init__(self) self.parent = parent self.sprite = pygame.sprite.Sprite() varios = "../imagenes/png/varios/" self.texto = pygame.Surface self.tipo = tipo self.arreglo_botones=[] self.grupo = grupo self.click = -1 self.activo = 0 self.tam = 0 if tipo == 0: self.img_fondo = pygame.image.load(varios + "cuadropop-up.png").convert_alpha() self.sprite.image = pygame.image.load(varios + "cuadropop-up.png").convert_alpha() self.sprite.rect = self.sprite.image.get_rect() x=30 y=30 self.texto = texto(x, y,texto1[0], parent.config.t_fuente , "texto_act" ,(self.sprite.rect.width*2/3 )) self.area_texto = pygame.Rect(x,y,self.sprite.rect.w*2/3,self.texto.ancho_final) self.area_imagenes = pygame.Rect((self.sprite.rect.w*2/3)+80, y, self.sprite.rect.w/3, self.texto.ancho_final) self.parent = parent self.boton = Button(0,self.parent,palabra_boton) self.boton.mover_boton( self.sprite.rect.width/2, self.area_texto.h + x*2 + self.boton.rect.h / 2 ) self.boton_rect = pygame.Rect(self.boton.rect.x , self.boton.rect.y , self.boton.rect.width , self.boton.rect.height) self.sprite.image = fondo(self.sprite.rect.w, self.boton.rect.y+self.boton.rect.h+x,5 ).return_imagen() self.imagen = pygame.sprite.Sprite() if type (imagenes)!= pygame.Surface : self.imagen2 = pygame.sprite.Sprite() self.imagen.image = imagenes[0] self.imagen.rect =self.imagen.image.get_rect() self.imagen.rect.center =(self.sprite.rect.w*2/3 +(self.sprite.rect.w/3)/2 , self.area_imagenes.h/2 + self.boton_rect.h/2 ) self.imagen2.image = imagenes[1] self.imagen2.rect = self.imagen.image.get_rect() self.imagen2.rect.left = x self.imagen2.rect.y = self.area_texto.h+40 self.sprite.image.blit(self.imagen2.image , self.imagen2.rect) else: self.imagen.image = imagenes self.imagen.rect =self.imagen.image.get_rect() self.imagen.rect.center =(self.sprite.rect.w*2/3 +(self.sprite.rect.w/3)/2 , self.area_imagenes.h/2 + self.boton_rect.h/2 ) if self.imagen.rect.y < 5: self.imagen.rect.y = 6 for i in self.texto.img_palabras: self.sprite.image.blit(i.image, i.rect) self.sprite.image.blit(self.boton.image , self.boton.rect) self.sprite.image.blit(self.imagen.image , self.imagen.rect) self.sprite.rect.center = (px, py) self.boton_rect.center = (self.sprite.rect.x + self.sprite.rect.width/2, self.sprite.rect.y + self.area_texto.h + x*2 + self.boton.rect.h / 2) if tipo == 1: self.img_fondo = pygame.image.load(varios + "cuadropop-up.png").convert_alpha() self.sprite.image = pygame.image.load(varios + "cuadropop-up.png").convert_alpha() self.sprite.rect = self.sprite.image.get_rect() x = 15 y = 15 o = 0 separacion = 30 tabulacion = 30 self.sprite.rect.w += tam for i in texto1: if o ==0: self.texto = texto(x, y,i, parent.config.t_fuente , "texto_act" ,(self.sprite.rect.width )-x) if o>0: self.arreglo_botones.append(Button(o-1,self.parent,i,1,self.sprite.rect.w - x*2 -tabulacion)) o+=1 self.texto.rect = pygame.Rect(x,y,self.sprite.rect.w - 80, self.texto.ancho_final) y+= self.texto.ancho_final + separacion for i in self.arreglo_botones: i.rect.x = x+tabulacion/2 i.rect.y = y y+=i.rect.h + separacion/2 self.img_fondo = fondo(self.sprite.rect.w, y).return_imagen() self.sprite.image = fondo(self.sprite.rect.w, y).return_imagen() for i in self.texto.img_palabras: self.sprite.image.blit(i.image,i.rect) self.img_fondo.blit(i.image,i.rect) self.sprite.rect.center = (px, py) for i in self.arreglo_botones: self.sprite.image.blit(i.image,i.rect) self.img_fondo.blit(i.image,i.rect) i.rect.x = self.sprite.rect.x + i.rect.x i.rect.y = self.sprite.rect.y + i.rect.y if tipo == 2: self.sprite.image = pygame.image.load(varios + "cuadropop-up.png").convert_alpha() self.sprite.rect = self.sprite.image.get_rect() self.sprite.rect.w += tam self.texto = texto2(15,15, texto1,parent.config.t_fuente, "intercalado", self.sprite.rect.w -15, imagenes) self.sprite.image = fondo(self.sprite.rect.w, self.texto.ancho_final+30).return_imagen() self.sprite.rect.h = self.texto.ancho_final+30 self.tam = self.texto.ancho_final+60 for i in self.texto.img_palabras: self.sprite.image.blit(i.image, i.rect) self.sprite.rect.center=(px,py) def popup_estatus(self): """ Define cuando esta activa la ventana emergente. @return: En caso de ser True la ventana esta activa, en caso contrario no estará activa. @rtype: bool """ if self.activo: return True else: return False def redibujar_boton(self): """ Define el efecto de los botones en las ventanas emergentes (descontinuado) """ if self.tipo ==0: self.sprite.image.blit(self.img_fondo,(self.boton.rect.x,self.boton.rect.y), self.boton.rect) self.sprite.image.blit(self.boton.img , self.boton.rect) if self.tipo == 1: self.sprite.image.blit(self.img_fondo,(0,0)) def agregar_grupo (self): """ Agrega el sprite de la ventana emergente al grupo de sprite pasado por parámetros al crear el objeto. """ self.activo=1 self.grupo.add(self.sprite) def eliminar_grupo(self): """ Elimina el sprite de la ventana emergente del grupo de sprite pasado por parámetros al crear el objeto. """ self.activo = 0 self.grupo.remove(self.sprite) def evaluar_click(self): """ Retorna el resultado del método manejador_eventos(). @return: True si se hizo click, de lo contrario False. @rtype: bool """ return self.click def manejador_eventos(self, eventos): """ Determina cuando se hace click al botón (solo en caso de ser una ventana emergente de tipo 0 o 1) @param eventos: Ultimo evento recibido. @rtype: pygame.event.Event """ teclasPulsadas = pygame.key.get_pressed() if self.tipo == 0: if self.boton_rect.collidepoint(pygame.mouse.get_pos()): if (eventos.type == pygame.MOUSEBUTTONDOWN and eventos.button == 1): self.eliminar_grupo() self.click = 0 return True else: self.click= -1 if teclasPulsadas[pygame.K_RETURN]: self.eliminar_grupo() self.click = 0 else: self.click= -1 if self.tipo == 1: for i in self.arreglo_botones: if i.rect.collidepoint(pygame.mouse.get_pos()): if eventos.type == pygame.MOUSEBUTTONDOWN and eventos.button == 1: self.click = i.identificador else: self.click = -1
gpl-3.0
-4,537,053,745,970,449,400
47.508897
166
0.548823
false
3.284578
false
false
false
SDAquaponics/Software
Arduino/PythonDriver/NotifyAQPServer.py
1
2261
#! /usr/bin/python # Import dependencies import serial, sys, sendmail, decimal import socket HOST = "raspberrypi" PORT = 1981 DEBUG = False; # Serial port settings AT_PORT = "/dev/ttyATH0" AT_BAUD = 115200 # Serial packet structure MAGIC_1 = 0xA9 MAGIC_2 = 0xBD MSG_TYPE_REQUEST = 0x00 MSFG_TYPE_RESPONSE = 0x01 MSG_LEN_READ_SENSOR_VALUES = 0x01 OPAQUE_DATA_REQ = 0x00 EOD = 0xCB CK_1 = 0xFD CK_2 = 0xFF ATMEGA_REQ_SENSOR_VALUES = [MAGIC_1, \ MAGIC_2, \ MSG_TYPE_REQUEST, \ MSG_LEN_READ_SENSOR_VALUES, \ OPAQUE_DATA_REQ,\ EOD,\ CK_1,\ CK_2] # Sensor type definitions SENSOR_TYPE_DISTANCE = 0x00 SENSOR_TYPE_TEMP = 0x01 SENSOR_TYPE_HUMIDITY = 0x02 CRITICAL_DISTANCE = 1.5 MSG_COUNTER = 0 def myprint(arg): if DEBUG: print(arg) else: None def aqp_get_sensor_values(port): results = {} nw_packet = "" port.write(ATMEGA_REQ_SENSOR_VALUES) myprint("Sent serial data to AT, waiting for response now...") magic = port.read(2) nw_packet = nw_packet + magic if ord(magic[0]) == 0xA9 and ord(magic[1]) == 0xBD: myprint("Magic numbers in response alright\n") msg_type = port.read(1) nw_packet = nw_packet + msg_type if (ord(msg_type) == 0x01): # Check for response msg_len = port.read(1) nw_packet = nw_packet + msg_len myprint ("Payload size is: %d" % ord(msg_len)) payload = port.read(ord(msg_len)) nw_packet = nw_packet + payload ck1_ck2 = port.read(2) nw_packet = nw_packet + ck1_ck2 myprint ("Ck1 = %X, Ck2 = %X" % (ord(ck1_ck2[0]), ord(ck1_ck2[1]))) else: myprint("Invalid response packet\n") else: myprint ("Bad Magic, aborting...%X, %X\n" %(ord(magic[0]), ord(magic[1]))) return nw_packet def init_serial_port(): ser = serial.Serial(AT_PORT, AT_BAUD) return ser if __name__ == "__main__": ser = init_serial_port() nw_packet = aqp_get_sensor_values(ser) # Send the network packet to the AQPServer running on the RasPI client_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) client_sock.connect((HOST, PORT)) #print ord(nw_packet[0]), ord(nw_packet[1]), ord(nw_packet[2]), ord(nw_packet[3]) client_sock.send(nw_packet) resp = client_sock.recv(1024) client_sock.close() myprint("Response from Server: %s\n" % resp)
gpl-2.0
8,152,229,391,004,637,000
20.130841
82
0.655462
false
2.569318
false
false
false
pave/pylint_flask_ext
flask_ext_clean.py
1
5123
""" Plugin for pylint that tells it about flask's extension classes. """ from pylint.utils import PyLintASTWalker from logilab.astng import MANAGER from logilab.astng import node_classes def copy_node_info(src, dest): """Copy information from src to dest Every node in the AST has to have line number information. Get the information from the old stmt.""" for attr in ['lineno', 'fromlineno', 'tolineno', 'col_offset', 'parent']: if hasattr(src, attr): setattr(dest, attr, getattr(src, attr)) def splice(stmt, new_stmt): """Replace stmt with new_stmt in the AST Also, copy useful information from stmt to new_stmt. This assumes that stmt and new_stmt are of the same type and define the same names. """ copy_node_info(stmt, new_stmt) # Replace stmt with new_stmt in the sequence of statements that # included stmt. body = stmt.parent.child_sequence(stmt) i = body.index(stmt) stmt.parent.body[i] = new_stmt # The names defined by an import statement are kept in stmt.names # as a pair of (exported_name, as_name). For example, "import foo, # bar as baz" corresponds to an import statement with # names=[("foo", None), ("bar", "baz")]. # # All names that stmt defined should now be defined by new_stmt. for (name, as_name) in stmt.names: stmt.parent.set_local(as_name or name, new_stmt) class ImportRewriterVisitor(object): """AST Visitor that looks for flask.ext imports and rewrites them This is something like the Visitor Pattern. For every Foo node in the AST, PyLintASTWalker will call visit_foo.""" def __init__(self): self.flask_ext_imported = {} def visit_from(self, stmt): """Visit 'from foo import bar' statements""" if stmt.modname == 'flask.ext': # Replace 'from flask.ext import login' with # 'import flask_login as login'. new_stmt = node_classes.Import() new_stmt.names = [] for pair in stmt.names: (name, as_name) = pair new_stmt.names.append(('flask_'+name, as_name or name)) splice(stmt, new_stmt) if stmt.modname.startswith('flask.ext.'): # Replace 'from flask.ext.wtf import Foo' with 'from # flask_wtf import Foo'. ext_name = stmt.modname[10:] new_stmt = node_classes.From('flask_'+ext_name, stmt.names, stmt.level) splice(stmt, new_stmt) def visit_import(self, stmt): """Visit 'import flask.ext.login' statements Pretend that flask.ext did "import flask_login as login".""" flask_ext_names = [] for (name, as_name) in stmt.names: if name.startswith('flask.ext.'): flask_ext_names.append(name[10:]) if not flask_ext_names: # We visited an import that doesn't import any flask.ext stuff. # Not our problem. return module = stmt.root() if not self.flask_ext_imported.get(module): # Make sure flask.ext is imported already at least once. import_stmt = node_classes.Import() import_stmt.names = [('flask.ext', None)] import_stmt.fromlineno = import_stmt.tolineno = -1 import_stmt.parent = module body = stmt.parent.child_sequence(stmt) body.insert(0, import_stmt) self.flask_ext_imported[module] = True # Mark this as the first definition of flask module.locals.setdefault('flask', []).insert(0, import_stmt) # Change all names in this statement in-place. for i, (modname, as_name) in enumerate(stmt.names): if modname.startswith('flask.ext.'): newmodname = modname.replace('flask.ext.', 'flask_') stmt.names[i] = (newmodname, as_name) # This import statement no longer defines "flask" (since it # imports flask_foo), so remove it from locals module.locals['flask'].remove(stmt) # Fool the inference engine by pretending that flask.ext does # an "import flask_foo as foo". for name in flask_ext_names: # Get real flask_ext flask_ext_module = module.import_module('flask.ext') values = flask_ext_module.locals.setdefault(name, []) if values: # We're fine, it's already been "imported" continue new_import = node_classes.Import() new_import.tolineno = new_import.fromlineno = -1 new_import.parent = flask_ext_module new_import.names = [('flask_'+name, name)] # We don't actually need to be in the AST. We just want # the inference engine to find us. values.append(new_import) def register(linter): #pylint: disable=W0613 """Pylint calls this hook to actually activate the plugin""" walker = PyLintASTWalker(linter) walker.add_checker(ImportRewriterVisitor()) MANAGER.register_transformer(walker.walk)
mit
5,791,808,267,114,807,000
36.669118
75
0.608823
false
3.949884
false
false
false
VRaviTheja/SDN-policy
flowgenerator/random_ports.py
1
1532
import random import socket import struct from random import randint def port_generator(): lim=1000 port_src_start = [] port_src_end = [] port_dst_start = [] port_dst_end = [] for i in range (0,lim): m = random.randint(1, 200) n = random.randint(1, 200) if (m<n and m!=n): port_src_start.append(m) port_src_end.append(n) elif (n<m and m!=n): port_src_start.append(n) port_src_end.append(m) while(lim!=len(port_src_start)): m = random.randint(1, 200) n = random.randint(1, 200) if (m<n and m!=n): port_src_start.append(m) port_src_end.append(n) elif (n<m and m!=n): port_src_start.append(n) port_src_end.append(m) for i in range (0,lim): k = random.randint(1, 200) p = random.randint(1, 200) if (k<p and k!=p): port_dst_start.append(k) port_dst_end.append(p) elif (p<k and k!=p): port_dst_start.append(p) port_dst_end.append(k) while(lim!=len(port_dst_start)): m = random.randint(1, 200) n = random.randint(1, 200) if (k<p and k!=p): port_dst_start.append(k) port_dst_end.append(p) elif (p<k and k!=p): port_dst_start.append(p) port_dst_end.append(k) return (port_src_start, port_src_end, port_dst_start, port_dst_end)
apache-2.0
9,052,464,247,601,924,000
28.64
71
0.498695
false
3.152263
false
false
false
harmsm/uhbd
previous_releases/0.5.0/single_file.py
1
1800
""" single_file.py: A script that runs a UHBD calculation on a single file. """ """ Version notes: 0.4: 060113 0.4.1: 060403 Hokiness fix. Changed from some_path = x + os.sep + y to os.path.join(x,y) """ __author__ = "Michael J. Harms" __date__ = "060403" __version__ = "0.4.1" # USER INPUTS pH_start = 0 pH_stop = 16 pH_interval = 0.25 ionic_strength = 0.1 dielectric = 20 import initialize import uhbd import os import sys import argParser def main(filename,output_path,pH_start,pH_stop,pH_interval,ionic_strength,dielectric): filename = os.path.join(initialize.invocation_path,filename) # Create output directory (if invoked from command line) if __name__ == "__main__": try: os.mkdir(os.path.join(initialize.invocation_path,output_path)) except OSError, value: # Don't stop if we are only overwriting existing directory if value[0] != 17: print 'File error.' print value[0], output_path, value[1] sys.exit() # Perform UHBD run uhbd.main(filename,pH_start,pH_stop,pH_interval,ionic_strength,dielectric) uhbd.copyFinalOutput(os.path.join(initialize.invocation_path,output_path)) uhbd.runCleanup() # If this is invoked from the command line, run the main function if __name__ == "__main__": # Grab command line options required, optional = argParser.main(sys.argv,["pdb_file","output_dir"], ["inpfile","outdir"], ["dielectric","ionic_strength","pHtitr"]) main(required["pdb_file"],required["output_dir"],optional.pHtitr[0], optional.pHtitr[1],optional.pHtitr[2],optional.ionic_strength, optional.dielectric)
unlicense
3,873,800,045,858,526,700
27.125
86
0.612222
false
3.302752
false
false
false
mattboyer/sqbrite
setup.py
1
1984
from setuptools.command.sdist import sdist as SetuptoolsSdist from setuptools import setup, find_packages import os import shutil import version from src import PROJECT_NAME, PROJECT_DESCRIPTION, README_PATH class SdistAndClean(SetuptoolsSdist): ''' Runs the default setuptools sdist command and then cleans the egg info directory. ''' def run(self): SetuptoolsSdist.run(self) # FIXME This works, but there *has* to be a cleaner way for distfile in self.filelist.files: if distfile.endswith('PKG-INFO'): egginfo_dir = os.path.dirname(distfile) shutil.rmtree(egginfo_dir) def package_names(): return [PROJECT_NAME] + \ [PROJECT_NAME + '.' + package for package in find_packages('src')] long_description = None with open(README_PATH, 'r') as readme: long_description = readme.read() setup( cmdclass={ 'sdist': SdistAndClean, }, name=PROJECT_NAME, version=version.get_git_version(), url='https://github.com/mattboyer/sqbrite', description=PROJECT_DESCRIPTION, long_description=long_description or PROJECT_DESCRIPTION, author='Matt Boyer', author_email='[email protected]', classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Intended Audience :: Science/Research', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Topic :: Database', 'Topic :: System :: Recovery Tools', ], packages=package_names(), # Packaging data files in Python is a complete shitshow # We need this *AND* an "include" line in MANIFEST.IN include_package_data=True, package_dir={PROJECT_NAME: 'src'}, install_requires=[ 'pyxdg', 'pyyaml', ], entry_points={ 'console_scripts': [ PROJECT_NAME+'='+PROJECT_NAME+'.sqlite_recover:main', ], }, )
mit
-8,243,067,687,102,157,000
28.176471
74
0.632056
false
3.882583
false
false
false
Zerknechterer/pyload
module/plugins/hoster/RapidgatorNet.py
1
5675
# -*- coding: utf-8 -*- import pycurl import re from module.common.json_layer import json_loads from module.network.HTTPRequest import BadHeader from module.plugins.internal.AdsCaptcha import AdsCaptcha from module.plugins.internal.ReCaptcha import ReCaptcha from module.plugins.internal.SolveMedia import SolveMedia from module.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo class RapidgatorNet(SimpleHoster): __name__ = "RapidgatorNet" __type__ = "hoster" __version__ = "0.34" __pattern__ = r'http://(?:www\.)?(rapidgator\.net|rg\.to)/file/\w+' __config__ = [("use_premium", "bool", "Use premium account if available", True)] __description__ = """Rapidgator.net hoster plugin""" __license__ = "GPLv3" __authors__ = [("zoidberg", "[email protected]"), ("chrox", None), ("stickell", "[email protected]"), ("Walter Purcaro", "[email protected]")] API_URL = "http://rapidgator.net/api/file" COOKIES = [("rapidgator.net", "lang", "en")] NAME_PATTERN = r'<title>Download file (?P<N>.*)</title>' SIZE_PATTERN = r'File size:\s*<strong>(?P<S>[\d.,]+) (?P<U>[\w^_]+)</strong>' OFFLINE_PATTERN = r'>(File not found|Error 404)' JSVARS_PATTERN = r'\s+var\s*(startTimerUrl|getDownloadUrl|captchaUrl|fid|secs)\s*=\s*\'?(.*?)\'?;' PREMIUM_ONLY_PATTERN = r'You can download files up to|This file can be downloaded by premium only<' ERROR_PATTERN = r'You have reached your (?:daily|hourly) downloads limit' WAIT_PATTERN = r'(Delay between downloads must be not less than|Try again in).+' LINK_FREE_PATTERN = r'return \'(http://\w+.rapidgator.net/.*)\';' RECAPTCHA_PATTERN = r'"http://api\.recaptcha\.net/challenge\?k=(.*?)"' ADSCAPTCHA_PATTERN = r'(http://api\.adscaptcha\.com/Get\.aspx[^"\']+)' SOLVEMEDIA_PATTERN = r'http://api\.solvemedia\.com/papi/challenge\.script\?k=(.*?)"' def setup(self): if self.account: self.sid = self.account.getAccountInfo(self.user).get('sid', None) else: self.sid = None if self.sid: self.premium = True self.resumeDownload = self.multiDL = self.premium self.chunkLimit = 1 def api_response(self, cmd): try: json = self.load('%s/%s' % (self.API_URL, cmd), get={'sid': self.sid, 'url': self.pyfile.url}, decode=True) self.logDebug("API:%s" % cmd, json, "SID: %s" % self.sid) json = json_loads(json) status = json['response_status'] msg = json['response_details'] except BadHeader, e: self.logError("API: %s" % cmd, e, "SID: %s" % self.sid) status = e.code msg = e if status == 200: return json['response'] elif status == 423: self.account.empty(self.user) self.retry() else: self.account.relogin(self.user) self.retry(wait_time=60) def handlePremium(self, pyfile): self.api_data = self.api_response('info') self.api_data['md5'] = self.api_data['hash'] pyfile.name = self.api_data['filename'] pyfile.size = self.api_data['size'] self.link = self.api_response('download')['url'] def handleFree(self, pyfile): jsvars = dict(re.findall(self.JSVARS_PATTERN, self.html)) self.logDebug(jsvars) self.req.http.lastURL = pyfile.url self.req.http.c.setopt(pycurl.HTTPHEADER, ["X-Requested-With: XMLHttpRequest"]) url = "http://rapidgator.net%s?fid=%s" % ( jsvars.get('startTimerUrl', '/download/AjaxStartTimer'), jsvars['fid']) jsvars.update(self.getJsonResponse(url)) self.wait(jsvars.get('secs', 45), False) url = "http://rapidgator.net%s?sid=%s" % ( jsvars.get('getDownloadUrl', '/download/AjaxGetDownload'), jsvars['sid']) jsvars.update(self.getJsonResponse(url)) self.req.http.lastURL = pyfile.url self.req.http.c.setopt(pycurl.HTTPHEADER, ["X-Requested-With:"]) url = "http://rapidgator.net%s" % jsvars.get('captchaUrl', '/download/captcha') self.html = self.load(url) for _i in xrange(5): m = re.search(self.LINK_FREE_PATTERN, self.html) if m: self.link = m.group(1) break else: captcha = self.handleCaptcha() if not captcha: self.error(_("Captcha pattern not found")) response, challenge = captcha.challenge() self.html = self.load(url, post={'DownloadCaptchaForm[captcha]': "", 'adcopy_challenge' : challenge, 'adcopy_response' : response}) if "The verification code is incorrect" in self.html: self.invalidCaptcha() else: self.correctCaptcha() else: self.error(_("Download link")) def handleCaptcha(self): for klass in (AdsCaptcha, ReCaptcha, SolveMedia): inst = klass(self) if inst.detect_key(): return inst def getJsonResponse(self, url): res = self.load(url, decode=True) if not res.startswith('{'): self.retry() self.logDebug(url, res) return json_loads(res) getInfo = create_getInfo(RapidgatorNet)
gpl-3.0
3,505,129,110,890,734,000
33.603659
103
0.555947
false
3.575929
false
false
false
lwcook/horsetail-matching
horsetailmatching/weightedsum.py
1
7051
import pdb import time import math import copy import warnings import numpy as np from hm import HorsetailMatching class WeightedSum(HorsetailMatching): '''Class for using weighted sum of moments within an optimization. The code is written such that all arguments that can be used at the initialization of a WeightedSum object can also be set as attributes after creation to achieve exactly the same effect. :param function fqoi: function that returns the quantity of interest, it must take two ordered arguments - the value of the design variable vector and the value of the uncertainty vector. :param list prob_uncertainties: list of probabilistic uncertainties. Is a list of UncertainParameter objects, or a list of functions that return samples of the each uncertainty. :param bool/function jac: Argument that specifies how to evaluate the gradient of the quantity of interest. If False no gradients are propagated, if True the fqoi should return a second argument g such that g_i = dq/dx_i. If a function, it should have the same signature as fqoi but return g. [default False] :param int samples_prob: number of samples to take from the probabilsitic uncertainties. [default 1000] :param function surrogate: Surrogate that is created at every design point to be sampled instead of fqoi. It should be a function that takes two arguments - an array with values of the uncertainties at which to fit the surrogate of size (num_quadrature_points, num_uncertainties), and an array of quantity of interest values corresponding to these uncertainty values to which to fit the surrogate of size (num_quadrature_points). It should return a functio that predicts the qoi at an aribtrary value of the uncertainties. [default None] :param list surrogate_points: Only with a surrogate. List of points at which fqoi is evaluated to give values to fit the surrogates to. These are passed to the surrogate function along with the qoi evaluated at these points when the surrogate is fitted [by default tensor quadrature of 5 points in each uncertain dimension is used] :param bool/function surrogate_jac: Only with a surrogate. Specifies how to take surrogates of the gradient. It works similarly to the jac argument: if False, the same surrogate is fitted to fqoi and each component of its gradient, if True, the surrogate function is expected to take a third argument - an array that is the gradient at each of the quadrature points of size (num_quadrature_points, num_design_variables). If a function, then instead the array of uncertainty values and the array of gradient values are passed to this function and it should return a function for the surrogate model of the gradient. :param bool reuse_samples: If True will reuse the same set of samples of the uncertainties for evaluating the metric at any value of the design variables, if False wise will re-sample every time evalMetric is called [default True] :param bool verbose: If True will print out details [default False]. ''' def __init__(self, fqoi, prob_uncertainties, jac=False, samples_prob=1000, surrogate=None, surrogate_points=None, surrogate_jac=False, reuse_samples=True, verbose=False, w1=1, w2=1): self.fqoi = fqoi self.prob_uncertainties = prob_uncertainties self.int_uncertainties = [] self.jac = jac self.samples_prob = samples_prob self.samples_int = 1 self.reuse_samples = reuse_samples self.u_samples = None self.surrogate = surrogate self.surrogate_points = surrogate_points self.surrogate_jac = surrogate_jac self.verbose = verbose self.w1 = w1 self.w2 = w2 ############################################################################## ## Public Methods ############################################################################## def evalMetric(self, x, w1=None, w2=None): '''Evaluates the weighted sum metric at given values of the design variables. :param iterable x: values of the design variables, this is passed as the first argument to the function fqoi :param float w1: value to weight the mean by :param float w2: value to weight the std by :return: metric_value - value of the metric evaluated at the design point given by x :rtype: float ''' if w1 is None: w1 = self.w1 if w2 is None: w2 = self.w2 if self.verbose: print('----------') print('At design: ' + str(x)) self._N_dv = len(_makeIter(x)) if self.verbose: print('Evaluating surrogate') if self.surrogate is None: def fqoi(u): return self.fqoi(x, u) def fgrad(u): return self.jac(x, u) jac = self.jac else: fqoi, fgrad, surr_jac = self._makeSurrogates(x) jac = surr_jac u_samples = self._getParameterSamples() if self.verbose: print('Evaluating quantity of interest at samples') q_samples, grad_samples = self._evalSamples(u_samples, fqoi, fgrad, jac) if self.verbose: print('Evaluating metric') return self._evalWeightedSumMetric(q_samples, grad_samples) ############################################################################## ## Private methods ## ############################################################################## def _evalWeightedSumMetric(self, q_samples, grad_samples=None): fjs = np.array(q_samples).flatten() M = self.samples_prob mean = (1./M)*np.sum(fjs) var = (1./M)*np.sum([(fj - mean)**2 for fj in fjs]) ws = self.w1*mean + self.w2*np.sqrt(var) if grad_samples is None: return ws else: ndv = grad_samples.shape[2] gradjs = grad_samples[0, :, :] gradient = np.zeros(ndv) for kdv in range(ndv): meang, varg = 0., 0. for j, fj in enumerate(fjs): meang += (1./M)*float(gradjs[j, kdv]) varg += (1./M)*2*(fj - mean)*float(gradjs[j, kdv]) gradient[kdv] = meang + 0.5*(var**-0.5)*varg return ws, gradient def getHorsetail(self): return ([0], [0]), ([0], [0]), [([0], [0])] ## Private utility functions #def _finDiff(fobj, dv, f0=None, eps=10**-6): # # if f0 is None: # f0 = fobj(dv) # # fbase = copy.copy(f0) # fnew = fobj(dv + eps) # return float((fnew - fbase)/eps) def _makeIter(x): try: iter(x) return [xi for xi in x] except: return [x]
mit
8,754,911,680,858,841,000
34.791878
80
0.600624
false
4.12822
false
false
false
garthee/gnot
modules/pstemmer.py
1
12097
class PorterStemmer: def __init__(self): """The main part of the stemming algorithm starts here. b is a buffer holding a word to be stemmed. The letters are in b[k0], b[k0+1] ... ending at b[k]. In fact k0 = 0 in this demo program. k is readjusted downwards as the stemming progresses. Zero termination is not in fact used in the algorithm. Note that only lower case sequences are stemmed. Forcing to lower case should be done before stem(...) is called. """ self.b = "" # buffer for word to be stemmed self.k = 0 self.k0 = 0 self.j = 0 # j is a general offset into the string def cons(self, i): """cons(i) is TRUE <=> b[i] is a consonant.""" if self.b[i] == 'a' or self.b[i] == 'e' or self.b[i] == 'i' or self.b[i] == 'o' or self.b[i] == 'u': return 0 if self.b[i] == 'y': if i == self.k0: return 1 else: return (not self.cons(i - 1)) return 1 def m(self): """m() measures the number of consonant sequences between k0 and j. if c is a consonant sequence and v a vowel sequence, and <..> indicates arbitrary presence, <c><v> gives 0 <c>vc<v> gives 1 <c>vcvc<v> gives 2 <c>vcvcvc<v> gives 3 .... """ n = 0 i = self.k0 while 1: if i > self.j: return n if not self.cons(i): break i = i + 1 i = i + 1 while 1: while 1: if i > self.j: return n if self.cons(i): break i = i + 1 i = i + 1 n = n + 1 while 1: if i > self.j: return n if not self.cons(i): break i = i + 1 i = i + 1 def vowelinstem(self): """vowelinstem() is TRUE <=> k0,...j contains a vowel""" for i in range(self.k0, self.j + 1): if not self.cons(i): return 1 return 0 def doublec(self, j): """doublec(j) is TRUE <=> j,(j-1) contain a double consonant.""" if j < (self.k0 + 1): return 0 if (self.b[j] != self.b[j - 1]): return 0 return self.cons(j) def cvc(self, i): """cvc(i) is TRUE <=> i-2,i-1,i has the form consonant - vowel - consonant and also if the second c is not w,x or y. this is used when trying to restore an e at the end of a short e.g. cav(e), lov(e), hop(e), crim(e), but snow, box, tray. """ if i < (self.k0 + 2) or not self.cons(i) or self.cons(i - 1) or not self.cons(i - 2): return 0 ch = self.b[i] if ch == 'w' or ch == 'x' or ch == 'y': return 0 return 1 def ends(self, s): """ends(s) is TRUE <=> k0,...k ends with the string s.""" length = len(s) if s[length - 1] != self.b[self.k]: # tiny speed-up return 0 if length > (self.k - self.k0 + 1): return 0 if self.b[self.k - length + 1:self.k + 1] != s: return 0 self.j = self.k - length return 1 def setto(self, s): """setto(s) sets (j+1),...k to the characters in the string s, readjusting k.""" length = len(s) self.b = self.b[:self.j + 1] + s + self.b[self.j + length + 1:] self.k = self.j + length def r(self, s): """r(s) is used further down.""" if self.m() > 0: self.setto(s) def step1ab(self): """step1ab() gets rid of plurals and -ed or -ing. e.g. caresses -> caress ponies -> poni ties -> ti caress -> caress cats -> cat feed -> feed agreed -> agree disabled -> disable matting -> mat mating -> mate meeting -> meet milling -> mill messing -> mess meetings -> meet """ if self.b[self.k] == 's': if self.ends("sses"): self.k = self.k - 2 elif self.ends("ies"): self.setto("i") elif self.b[self.k - 1] != 's': self.k = self.k - 1 if self.ends("eed"): if self.m() > 0: self.k = self.k - 1 elif (self.ends("ed") or self.ends("ing")) and self.vowelinstem(): self.k = self.j if self.ends("at"): self.setto("ate") elif self.ends("bl"): self.setto("ble") elif self.ends("iz"): self.setto("ize") elif self.doublec(self.k): self.k = self.k - 1 ch = self.b[self.k] if ch == 'l' or ch == 's' or ch == 'z': self.k = self.k + 1 elif (self.m() == 1 and self.cvc(self.k)): self.setto("e") def step1c(self): """step1c() turns terminal y to i when there is another vowel in the stem.""" if (self.ends("y") and self.vowelinstem()): self.b = self.b[:self.k] + 'i' + self.b[self.k + 1:] def step2(self): """step2() maps double suffices to single ones. so -ization ( = -ize plus -ation) maps to -ize etc. note that the string before the suffix must give m() > 0. """ if self.b[self.k - 1] == 'a': if self.ends("ational"): self.r("ate") elif self.ends("tional"): self.r("tion") elif self.b[self.k - 1] == 'c': if self.ends("enci"): self.r("ence") elif self.ends("anci"): self.r("ance") elif self.b[self.k - 1] == 'e': if self.ends("izer"): self.r("ize") elif self.b[self.k - 1] == 'l': if self.ends("bli"): self.r("ble") # --DEPARTURE-- # To match the published algorithm, replace this phrase with # if self.ends("abli"): self.r("able") elif self.ends("alli"): self.r("al") elif self.ends("entli"): self.r("ent") elif self.ends("eli"): self.r("e") elif self.ends("ousli"): self.r("ous") elif self.b[self.k - 1] == 'o': if self.ends("ization"): self.r("ize") elif self.ends("ation"): self.r("ate") elif self.ends("ator"): self.r("ate") elif self.b[self.k - 1] == 's': if self.ends("alism"): self.r("al") elif self.ends("iveness"): self.r("ive") elif self.ends("fulness"): self.r("ful") elif self.ends("ousness"): self.r("ous") elif self.b[self.k - 1] == 't': if self.ends("aliti"): self.r("al") elif self.ends("iviti"): self.r("ive") elif self.ends("biliti"): self.r("ble") elif self.b[self.k - 1] == 'g': # --DEPARTURE-- if self.ends("logi"): self.r("log") # To match the published algorithm, delete this phrase def step3(self): """step3() dels with -ic-, -full, -ness etc. similar strategy to step2.""" if self.b[self.k] == 'e': if self.ends("icate"): self.r("ic") elif self.ends("ative"): self.r("") elif self.ends("alize"): self.r("al") elif self.b[self.k] == 'i': if self.ends("iciti"): self.r("ic") elif self.b[self.k] == 'l': if self.ends("ical"): self.r("ic") elif self.ends("ful"): self.r("") elif self.b[self.k] == 's': if self.ends("ness"): self.r("") def step4(self): """step4() takes off -ant, -ence etc., in context <c>vcvc<v>.""" if self.b[self.k - 1] == 'a': if self.ends("al"): pass else: return elif self.b[self.k - 1] == 'c': if self.ends("ance"): pass elif self.ends("ence"): pass else: return elif self.b[self.k - 1] == 'e': if self.ends("er"): pass else: return elif self.b[self.k - 1] == 'i': if self.ends("ic"): pass else: return elif self.b[self.k - 1] == 'l': if self.ends("able"): pass elif self.ends("ible"): pass else: return elif self.b[self.k - 1] == 'n': if self.ends("ant"): pass elif self.ends("ement"): pass elif self.ends("ment"): pass elif self.ends("ent"): pass else: return elif self.b[self.k - 1] == 'o': if self.ends("ion") and (self.b[self.j] == 's' or self.b[self.j] == 't'): pass elif self.ends("ou"): pass # takes care of -ous else: return elif self.b[self.k - 1] == 's': if self.ends("ism"): pass else: return elif self.b[self.k - 1] == 't': if self.ends("ate"): pass elif self.ends("iti"): pass else: return elif self.b[self.k - 1] == 'u': if self.ends("ous"): pass else: return elif self.b[self.k - 1] == 'v': if self.ends("ive"): pass else: return elif self.b[self.k - 1] == 'z': if self.ends("ize"): pass else: return else: return if self.m() > 1: self.k = self.j def step5(self): """step5() removes a final -e if m() > 1, and changes -ll to -l if m() > 1. """ self.j = self.k if self.b[self.k] == 'e': a = self.m() if a > 1 or (a == 1 and not self.cvc(self.k - 1)): self.k = self.k - 1 if self.b[self.k] == 'l' and self.doublec(self.k) and self.m() > 1: self.k = self.k - 1 def stem(self, p, i, j): """In stem(p,i,j), p is a char pointer, and the string to be stemmed is from p[i] to p[j] inclusive. Typically i is zero and j is the offset to the last character of a string, (p[j+1] == '\0'). The stemmer adjusts the characters p[i] ... p[j] and returns the new end-point of the string, k. Stemming never increases word length, so i <= k <= j. To turn the stemmer into a module, declare 'stem' as extern, and delete the remainder of this file. """ # copy the parameters into statics self.b = p self.k = j self.k0 = i if self.k <= self.k0 + 1: return self.b # --DEPARTURE-- # With this line, strings of length 1 or 2 don't go through the # stemming process, although no mention is made of this in the # published algorithm. Remove the line to match the published # algorithm. self.step1ab() self.step1c() self.step2() self.step3() self.step4() self.step5() return self.b[self.k0:self.k + 1]
mit
3,138,866,654,744,761,300
31.783198
108
0.42432
false
3.634916
false
false
false
eternalNight/ucore_app_go
misc/dashboard/godashboard/package.py
1
14975
# Copyright 2010 The Go Authors. All rights reserved. # Use of this source code is governed by a BSD-style # license that can be found in the LICENSE file. # This is the server part of the package dashboard. # It must be run by App Engine. from google.appengine.api import mail from google.appengine.api import memcache from google.appengine.api import taskqueue from google.appengine.api import urlfetch from google.appengine.api import users from google.appengine.ext import db from google.appengine.ext import webapp from google.appengine.ext.webapp import template from google.appengine.ext.webapp.util import run_wsgi_app import datetime import logging import os import re import sets import urllib2 # local imports from auth import auth import toutf8 import const template.register_template_library('toutf8') # Storage model for package info recorded on server. class Package(db.Model): path = db.StringProperty() web_url = db.StringProperty() # derived from path count = db.IntegerProperty() # grand total week_count = db.IntegerProperty() # rolling weekly count day_count = db.TextProperty(default='') # daily count last_install = db.DateTimeProperty() # data contributed by gobuilder info = db.StringProperty() ok = db.BooleanProperty() last_ok = db.DateTimeProperty() def get_day_count(self): counts = {} if not self.day_count: return counts for d in str(self.day_count).split('\n'): date, count = d.split(' ') counts[date] = int(count) return counts def set_day_count(self, count): days = [] for day, count in count.items(): days.append('%s %d' % (day, count)) days.sort(reverse=True) days = days[:28] self.day_count = '\n'.join(days) def inc(self): count = self.get_day_count() today = str(datetime.date.today()) count[today] = count.get(today, 0) + 1 self.set_day_count(count) self.update_week_count(count) self.count += 1 def update_week_count(self, count=None): if count is None: count = self.get_day_count() total = 0 today = datetime.date.today() for i in range(7): day = str(today - datetime.timedelta(days=i)) if day in count: total += count[day] self.week_count = total # PackageDaily kicks off the daily package maintenance cron job # and serves the associated task queue. class PackageDaily(webapp.RequestHandler): def get(self): # queue a task to update each package with a week_count > 0 keys = Package.all(keys_only=True).filter('week_count >', 0) for key in keys: taskqueue.add(url='/package/daily', params={'key': key.name()}) def post(self): # update a single package (in a task queue) def update(key): p = Package.get_by_key_name(key) if not p: return p.update_week_count() p.put() key = self.request.get('key') if not key: return db.run_in_transaction(update, key) class Project(db.Model): name = db.StringProperty(indexed=True) descr = db.StringProperty() web_url = db.StringProperty() package = db.ReferenceProperty(Package) category = db.StringProperty(indexed=True) tags = db.ListProperty(str) approved = db.BooleanProperty(indexed=True) re_bitbucket = re.compile(r'^(bitbucket\.org/[a-z0-9A-Z_.\-]+/[a-zA-Z0-9_.\-]+)(/[a-z0-9A-Z_.\-/]+)?$') re_googlecode = re.compile(r'^[a-z0-9\-]+\.googlecode\.com/(svn|hg|git)(/[a-z0-9A-Z_.\-/]+)?$') re_github = re.compile(r'^github\.com/[a-z0-9A-Z_.\-]+(/[a-z0-9A-Z_.\-]+)+$') re_launchpad = re.compile(r'^launchpad\.net/([a-z0-9A-Z_.\-]+(/[a-z0-9A-Z_.\-]+)?|~[a-z0-9A-Z_.\-]+/(\+junk|[a-z0-9A-Z_.\-]+)/[a-z0-9A-Z_.\-]+)(/[a-z0-9A-Z_.\-/]+)?$') def vc_to_web(path): if re_bitbucket.match(path): m = re_bitbucket.match(path) check_url = 'http://' + m.group(1) + '/?cmd=heads' web = 'http://' + m.group(1) + '/' elif re_github.match(path): m = re_github_web.match(path) check_url = 'https://raw.github.com/' + m.group(1) + '/' + m.group(2) + '/master/' web = 'http://github.com/' + m.group(1) + '/' + m.group(2) + '/' elif re_googlecode.match(path): m = re_googlecode.match(path) check_url = 'http://'+path if not m.group(2): # append / after bare '/hg' or '/git' check_url += '/' web = 'http://code.google.com/p/' + path[:path.index('.')] elif re_launchpad.match(path): check_url = web = 'https://'+path else: return False, False return web, check_url re_bitbucket_web = re.compile(r'bitbucket\.org/([a-z0-9A-Z_.\-]+)/([a-z0-9A-Z_.\-]+)') re_googlecode_web = re.compile(r'code.google.com/p/([a-z0-9\-]+)') re_github_web = re.compile(r'github\.com/([a-z0-9A-Z_.\-]+)/([a-z0-9A-Z_.\-]+)') re_launchpad_web = re.compile(r'launchpad\.net/([a-z0-9A-Z_.\-]+(/[a-z0-9A-Z_.\-]+)?|~[a-z0-9A-Z_.\-]+/(\+junk|[a-z0-9A-Z_.\-]+)/[a-z0-9A-Z_.\-]+)(/[a-z0-9A-Z_.\-/]+)?') re_striphttp = re.compile(r'https?://(www\.)?') def find_googlecode_vcs(path): # Perform http request to path/hg or path/git to check if they're # using mercurial or git. Otherwise, assume svn. for vcs in ['git', 'hg']: try: response = urlfetch.fetch('http://'+path+vcs, deadline=1) if response.status_code == 200: return vcs except: pass return 'svn' def web_to_vc(url): url = re_striphttp.sub('', url) m = re_bitbucket_web.match(url) if m: return 'bitbucket.org/'+m.group(1)+'/'+m.group(2) m = re_github_web.match(url) if m: return 'github.com/'+m.group(1)+'/'+m.group(2) m = re_googlecode_web.match(url) if m: path = m.group(1)+'.googlecode.com/' vcs = find_googlecode_vcs(path) return path + vcs m = re_launchpad_web.match(url) if m: return m.group(0) return False MaxPathLength = 100 CacheTimeout = 3600 class PackagePage(webapp.RequestHandler): def get(self): if self.request.get('fmt') == 'json': return self.json() html = memcache.get('view-package') if not html: tdata = {} q = Package.all().filter('week_count >', 0) q.order('-week_count') tdata['by_week_count'] = q.fetch(50) q = Package.all() q.order('-last_install') tdata['by_time'] = q.fetch(20) q = Package.all() q.order('-count') tdata['by_count'] = q.fetch(100) path = os.path.join(os.path.dirname(__file__), 'package.html') html = template.render(path, tdata) memcache.set('view-package', html, time=CacheTimeout) self.response.headers['Content-Type'] = 'text/html; charset=utf-8' self.response.out.write(html) def json(self): json = memcache.get('view-package-json') if not json: q = Package.all() s = '{"packages": [' sep = '' for r in q.fetch(1000): s += '%s\n\t{"path": "%s", "last_install": "%s", "count": "%s"}' % (sep, r.path, r.last_install, r.count) sep = ',' s += '\n]}\n' json = s memcache.set('view-package-json', json, time=CacheTimeout) self.response.set_status(200) self.response.headers['Content-Type'] = 'text/plain; charset=utf-8' self.response.out.write(json) def can_get_url(self, url): try: urllib2.urlopen(urllib2.Request(url)) return True except: return False def is_valid_package_path(self, path): return (re_bitbucket.match(path) or re_googlecode.match(path) or re_github.match(path) or re_launchpad.match(path)) def record_pkg(self, path): # sanity check string if not path or len(path) > MaxPathLength or not self.is_valid_package_path(path): return False # look in datastore key = 'pkg-' + path p = Package.get_by_key_name(key) if p is None: # not in datastore - verify URL before creating web, check_url = vc_to_web(path) if not web: logging.error('unrecognized path: %s', path) return False if not self.can_get_url(check_url): logging.error('cannot get %s', check_url) return False p = Package(key_name = key, path = path, count = 0, web_url = web) if auth(self.request): # builder updating package metadata p.info = self.request.get('info') p.ok = self.request.get('ok') == "true" if p.ok: p.last_ok = datetime.datetime.utcnow() else: # goinstall reporting an install p.inc() p.last_install = datetime.datetime.utcnow() # update package object p.put() return True def post(self): path = self.request.get('path') ok = db.run_in_transaction(self.record_pkg, path) if ok: self.response.set_status(200) self.response.out.write('ok') else: logging.error('invalid path in post: %s', path) self.response.set_status(500) self.response.out.write('not ok') class ProjectPage(webapp.RequestHandler): def get(self): admin = users.is_current_user_admin() if self.request.path == "/project/login": self.redirect(users.create_login_url("/project")) elif self.request.path == "/project/logout": self.redirect(users.create_logout_url("/project")) elif self.request.path == "/project/edit" and admin: self.edit() elif self.request.path == "/project/assoc" and admin: self.assoc() else: self.list() def assoc(self): projects = Project.all() for p in projects: if p.package: continue path = web_to_vc(p.web_url) if not path: continue pkg = Package.get_by_key_name("pkg-"+path) if not pkg: self.response.out.write('no: %s %s<br>' % (p.web_url, path)) continue p.package = pkg p.put() self.response.out.write('yes: %s %s<br>' % (p.web_url, path)) def post(self): if self.request.path == "/project/edit": self.edit(True) else: data = dict(map(lambda x: (x, self.request.get(x)), ["name","descr","web_url"])) if reduce(lambda x, y: x or not y, data.values(), False): data["submitMsg"] = "You must complete all the fields." self.list(data) return p = Project.get_by_key_name("proj-"+data["name"]) if p is not None: data["submitMsg"] = "A project by this name already exists." self.list(data) return p = Project(key_name="proj-"+data["name"], **data) p.put() path = os.path.join(os.path.dirname(__file__), 'project-notify.txt') mail.send_mail( sender=const.mail_from, to=const.mail_submit_to, subject=const.mail_submit_subject, body=template.render(path, {'project': p})) self.list({"submitMsg": "Your project has been submitted."}) def list(self, additional_data={}): cache_key = 'view-project-data' tag = self.request.get('tag', None) if tag: cache_key += '-'+tag data = memcache.get(cache_key) admin = users.is_current_user_admin() if admin or not data: projects = Project.all().order('category').order('name') if not admin: projects = projects.filter('approved =', True) projects = list(projects) tags = sets.Set() for p in projects: for t in p.tags: tags.add(t) if tag: projects = filter(lambda x: tag in x.tags, projects) data = {} data['tag'] = tag data['tags'] = tags data['projects'] = projects data['admin']= admin if not admin: memcache.set(cache_key, data, time=CacheTimeout) for k, v in additional_data.items(): data[k] = v self.response.headers['Content-Type'] = 'text/html; charset=utf-8' path = os.path.join(os.path.dirname(__file__), 'project.html') self.response.out.write(template.render(path, data)) def edit(self, save=False): if save: name = self.request.get("orig_name") else: name = self.request.get("name") p = Project.get_by_key_name("proj-"+name) if not p: self.response.out.write("Couldn't find that Project.") return if save: if self.request.get("do") == "Delete": p.delete() else: pkg_name = self.request.get("package", None) if pkg_name: pkg = Package.get_by_key_name("pkg-"+pkg_name) if pkg: p.package = pkg.key() for f in ['name', 'descr', 'web_url', 'category']: setattr(p, f, self.request.get(f, None)) p.approved = self.request.get("approved") == "1" p.tags = filter(lambda x: x, self.request.get("tags", "").split(",")) p.put() memcache.delete('view-project-data') self.redirect('/project') return # get all project categories and tags cats, tags = sets.Set(), sets.Set() for r in Project.all(): cats.add(r.category) for t in r.tags: tags.add(t) self.response.headers['Content-Type'] = 'text/html; charset=utf-8' path = os.path.join(os.path.dirname(__file__), 'project-edit.html') self.response.out.write(template.render(path, { "taglist": tags, "catlist": cats, "p": p, "tags": ",".join(p.tags) })) def redirect(self, url): self.response.set_status(302) self.response.headers.add_header("Location", url) def main(): app = webapp.WSGIApplication([ ('/package', PackagePage), ('/package/daily', PackageDaily), ('/project.*', ProjectPage), ], debug=True) run_wsgi_app(app) if __name__ == '__main__': main()
bsd-3-clause
-8,376,817,798,995,562,000
33.90676
169
0.54177
false
3.546897
false
false
false
rearmlkp/Smart_Flash
Flashcard/urls.py
1
2301
"""Flashcard URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.10/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.conf.urls import url, include 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) """ from django.conf.urls import url, include from django.contrib import admin import API.views as views import Web.views as web_views # router.register(r'users', views.UserViewSet) # router.register(r'groups', views.GroupViewSet) urlpatterns = [ # Admin url(r'^admin/', admin.site.urls), # API Stuff url(r'^cards/(?P<pk>[0-9]+)$', views.card_detail), url(r'^login/$', views.login), url(r'^register/$', views.create_user), url(r'^decks/$', views.get_users_deck), url(r'^decks/create/$', views.create_deck), url(r'^decks/(?P<pk>[0-9]+)$', views.card_list), url(r'^decks/edit/(?P<pk>[0-9]+)$', views.edit_deck), url(r'^decks/delete/(?P<pk>[0-9]+)$', views.delete_deck), url(r'^card/edit/(?P<pk>[0-9]+)$', views.edit_card), url(r'^card/delete/(?P<pk>[0-9]+)$', views.delete_card), url(r'^decks/review/(?P<pk>[0-9]+)$', views.review_today), # Web Stuff url(r'^web/$', web_views.index, name='index'), url(r'^web/logout/$', web_views.logout, name='logout'), url(r'^web/register/$', web_views.register, name='register'), url(r'^web/deck/create$', web_views.deck_create, name='deck_create'), url(r'^web/deck/edform$', web_views.deck_edit_delete, name='deck_edit_delete'), url(r'^web/deck/(?P<pk>[0-9]+)$', web_views.deck_detail, name='deck_detail'), url(r'^web/deck/(?P<pk>[0-9]+)/card/create$', web_views.create_card, name='card_create'), url(r'^web/deck/review/(?P<pk>[0-9]+)$', web_views.review, name='review'), url(r'^web/deck/(?P<pk>[0-9]+)/edform$', web_views.card_edit_delete, name='card_edit_delete'), url(r'^api-auth/', include('rest_framework.urls')), ]
gpl-3.0
-7,804,049,766,153,713,000
41.611111
98
0.639722
false
2.996094
false
false
false
daira/zcash
contrib/seeds/generate-seeds.py
3
4418
#!/usr/bin/env python # Copyright (c) 2014 Wladimir J. van der Laan # Distributed under the MIT software license, see the accompanying # file COPYING or https://www.opensource.org/licenses/mit-license.php . ''' Script to generate list of seed nodes for chainparams.cpp. This script expects two text files in the directory that is passed as an argument: nodes_main.txt nodes_test.txt These files must consist of lines in the format <ip> <ip>:<port> [<ipv6>] [<ipv6>]:<port> <onion>.onion 0xDDBBCCAA (IPv4 little-endian old pnSeeds format) The output will be two data structures with the peers in binary format: static SeedSpec6 pnSeed6_main[]={ ... } static SeedSpec6 pnSeed6_test[]={ ... } These should be pasted into `src/chainparamsseeds.h`. ''' from __future__ import print_function, division from base64 import b32decode from binascii import a2b_hex import sys, os import re # ipv4 in ipv6 prefix pchIPv4 = bytearray([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0xff, 0xff]) # tor-specific ipv6 prefix pchOnionCat = bytearray([0xFD,0x87,0xD8,0x7E,0xEB,0x43]) def name_to_ipv6(addr): if len(addr)>6 and addr.endswith('.onion'): vchAddr = b32decode(addr[0:-6], True) if len(vchAddr) != 16-len(pchOnionCat): raise ValueError('Invalid onion %s' % s) return pchOnionCat + vchAddr elif '.' in addr: # IPv4 return pchIPv4 + bytearray((int(x) for x in addr.split('.'))) elif ':' in addr: # IPv6 sub = [[], []] # prefix, suffix x = 0 addr = addr.split(':') for i,comp in enumerate(addr): if comp == '': if i == 0 or i == (len(addr)-1): # skip empty component at beginning or end continue x += 1 # :: skips to suffix assert(x < 2) else: # two bytes per component val = int(comp, 16) sub[x].append(val >> 8) sub[x].append(val & 0xff) nullbytes = 16 - len(sub[0]) - len(sub[1]) assert((x == 0 and nullbytes == 0) or (x == 1 and nullbytes > 0)) return bytearray(sub[0] + ([0] * nullbytes) + sub[1]) elif addr.startswith('0x'): # IPv4-in-little-endian return pchIPv4 + bytearray(reversed(a2b_hex(addr[2:]))) else: raise ValueError('Could not parse address %s' % addr) def parse_spec(s, defaultport): match = re.match('\[([0-9a-fA-F:]+)\](?::([0-9]+))?$', s) if match: # ipv6 host = match.group(1) port = match.group(2) elif s.count(':') > 1: # ipv6, no port host = s port = '' else: (host,_,port) = s.partition(':') if not port: port = defaultport else: port = int(port) host = name_to_ipv6(host) return (host,port) def process_nodes(g, f, structname, defaultport): g.write('static SeedSpec6 %s[] = {\n' % structname) first = True for line in f: comment = line.find('#') if comment != -1: line = line[0:comment] line = line.strip() if not line: continue if not first: g.write(',\n') first = False (host,port) = parse_spec(line, defaultport) hoststr = ','.join(('0x%02x' % b) for b in host) g.write(' {{%s}, %i}' % (hoststr, port)) g.write('\n};\n') def main(): if len(sys.argv)<2: print(('Usage: %s <path_to_nodes_txt>' % sys.argv[0]), file=sys.stderr) exit(1) g = sys.stdout indir = sys.argv[1] g.write('#ifndef BITCOIN_CHAINPARAMSSEEDS_H\n') g.write('#define BITCOIN_CHAINPARAMSSEEDS_H\n') g.write('/**\n') g.write(' * List of fixed seed nodes for the bitcoin network\n') g.write(' * AUTOGENERATED by contrib/seeds/generate-seeds.py\n') g.write(' *\n') g.write(' * Each line contains a 16-byte IPv6 address and a port.\n') g.write(' * IPv4 as well as onion addresses are wrapped inside a IPv6 address accordingly.\n') g.write(' */\n') with open(os.path.join(indir,'nodes_main.txt'),'r', encoding="utf8") as f: process_nodes(g, f, 'pnSeed6_main', 8233) g.write('\n') with open(os.path.join(indir,'nodes_test.txt'),'r', encoding="utf8") as f: process_nodes(g, f, 'pnSeed6_test', 18233) g.write('#endif // BITCOIN_CHAINPARAMSSEEDS_H\n') if __name__ == '__main__': main()
mit
-2,900,496,788,579,006,500
31.014493
98
0.574015
false
3.185292
false
false
false
Vicaris/ModPro
moviepy/video/io/ffmpeg_tools.py
1
2297
""" Misc. bindings to ffmpeg and ImageMagick.""" import os import sys import subprocess as sp from moviepy.tools import subprocess_call from moviepy.config import get_setting def ffmpeg_movie_from_frames(filename, folder, fps, digits=6): """ Writes a movie out of the frames (picture files) in a folder. Almost deprecated. """ s = "%" + "%02d" % digits + "d.png" cmd = [get_setting("FFMPEG_BINARY"), "-y", "-f","image2", "-r", "%d"%fps, "-i", os.path.join(folder,folder) + '/' + s, "-b", "%dk"%bitrate, "-r", "%d"%self.fps, filename] subprocess_call(cmd) def ffmpeg_extract_subclip(filename, t1, t2, targetname=None): """ makes a new video file playing video file ``filename`` between the times ``t1`` and ``t2``. """ name,ext = os.path.splitext(filename) if not targetname: T1, T2 = [int(1000*t) for t in [t1, t2]] targetname = name+ "%sSUB%d_%d.%s"(name, T1, T2, ext) cmd = [get_setting("FFMPEG_BINARY"),"-y", "-i", filename, "-ss", "%0.2f"%t1, "-t", "%0.2f"%(t2-t1), "-vcodec", "copia", "-acodec", "copia", targetname] subprocess_call(cmd) def ffmpeg_merge_video_audio(video,audio,output, vcodec='copia', acodec='copia', ffmpeg_output=False, verbose = True): """ merges video file ``video`` and audio file ``audio`` into one movie file ``output``. """ cmd = [get_setting("FFMPEG_BINARY"), "-y", "-i", audio,"-i", video, "-vcodec", vcodec, "-acodec", acodec, output] subprocess_call(cmd, verbose = verbose) def ffmpeg_extract_audio(inputfile,output,bitrate=3000,fps=44100): """ extract the sound from a video file and save it in ``output`` """ cmd = [get_setting("FFMPEG_BINARY"), "-y", "-i", inputfile, "-ab", "%dk"%bitrate, "-ar", "%d"%fps, output] subprocess_call(cmd) def ffmpeg_resize(video,output,tamano): """ resizes ``video`` to new tamano ``tamano`` and write the result in file ``output``. """ cmd= [get_setting("FFMPEG_BINARY"), "-i", video, "-vf", "scale=%d:%d"%(res[0], res[1]), output] subprocess_call(cmd)
mit
703,182,229,391,548,000
32.289855
91
0.551589
false
3.295552
false
false
false
prheenan/Research
Perkins/Projects/WetLab/Util/DilutionUtil.py
1
13174
# force floating point division. Can still use integer with // from __future__ import division # This file is used for importing the common utilities classes. import numpy as np class DilutionObj: def __init__(self,StockConc,StockVol,DesiredConc,AddVol,Name=""): self.StockConc=StockConc self.StockVol=StockVol self.DesiredConc=DesiredConc self.AddVol=AddVol self.Name = Name class SolutionObj: def __init__(self,ArrayOfDilutionObjects): self.arr = ArrayOfDilutionObjects def GetVolumeToDilute(Concentration,Volume,DesiredConcentration): """ Gets the volume to dilute a sample with a given volume and concentration Args: Concentration: mass per volume of the system Volume: volume of the system. Units of mass/Concentration DesiredConcentration: desired concentration after the diltion Returns: Amount of additional volume to add to the system; total volume of Volume + (<return of this funciton>) gives the desired concentration """ # figure out how much stuff we have try: ng = Concentration*Volume except TypeError: ng = np.array(Concentration)*np.array(Volume) # what total volume do we need? volumeNeeded = ng/DesiredConcentration # how much do we need to add? volumeToAdd = volumeNeeded-Volume return volumeToAdd def PrintDilutions(StockConcs,StockVols,ConcDesired,UnitVol=None, UnitConc=None,**kwargs): """ Convenience wrapper: Print all the dilutions, given desired concentrations stocks, etc. Args: StockConcs,StockVols,ConcDesired: see GetDilutionObj UnitConc,UnitVol: see PrintVolumeDilutions **kwargs: ppassed directly to GetDilutionObj Returns: all the dilution objects """ if (UnitVol is None): UnitVol = ["uL" for _ in ConcDesired] if (UnitConc is None): UnitConc = ["ng/uL" for _ in ConcDesired] dilutionObj = GetDilutionObjects(StockConcs,StockVols,ConcDesired,**kwargs) _PrintVolumeDilutions(dilutionObj,UnitVol=UnitVol,UnitConc=UnitConc) return dilutionObj def GetDilutionObjects(StockConcs,StockVols,ConcDesired,**kwargs): """ Args: StockConcs,StockVols,ConcDesired: see GetDilutionObj **kwargs: ppassed directly to GetDilutionObj Returns: List of dilution objects """ dilutions = GetVolumeToDilute(StockConcs,StockVols,ConcDesired) dilutionObj = [GetDilutionObj(StockConcs,StockVols,ConcDesired,d,i, **kwargs) for i,d in enumerate(dilutions)] return dilutionObj def GetFromArrayOrScalar(Array,idx): """ Tries to get Array[idx]; otherwise just returns Array Args: Array: array-like idx: number Returns: relevant element of the array """ try: if (len(Array) > 1): return Array[idx] else: return Array[0] except (TypeError,IndexError) as e: return Array def GetDilutionObj(StockConcs,StockVols,ConcDesired,VolToAdd,Idx, StockName=""): """ Returns a Dilution object at a given index, given all the informaiton Args: StockConcs: array or scalar-like of stock concentrations StockVols: array or scalar-like of stock volumes ConcDesired: array or scalar-like of desired concentrations VolToAdd: array or scalar-like of volumes to add Idx: index within all the arrays we want StockName: optional names of the stock Returns: DilutionObj to use """ DesiredConc = GetFromArrayOrScalar(ConcDesired,Idx) StockVol = GetFromArrayOrScalar(StockVols,Idx) StockConc = GetFromArrayOrScalar(StockConcs,Idx) AddVol = GetFromArrayOrScalar(VolToAdd,Idx) StockName = GetFromArrayOrScalar(StockName,Idx) return DilutionObj(StockConc=StockConc,StockVol=StockVol, DesiredConc=DesiredConc, AddVol=AddVol,Name=StockName) def _DilutionString(dilutionObj,UnitVol,UnitConc): """ Args: dilutionObj: list of dilution objects UnitVol: string, unit of volume UnitConc: string, unit of Concentration Returns: String representation of dilution objects """ toRet = "" n = len(dilutionObj) for i,d in enumerate(dilutionObj): stockConcStr = "({:4.1f}{:s}@{:4.1f}{:s})".\ format(float(d.StockVol),UnitVol[i],float(d.StockConc), UnitConc[i]) volAddStr = "({:4.1f}{:s})".format(d.AddVol,UnitVol[i]) TotalVol = float(d.AddVol) + float(d.StockVol) toRet += ("{: <4} (#{:03d}) {: <20}" + "add {: <8} -> {:3.1f}{:6s} in {:3.1f}{:7s}").\ format(d.Name,i,stockConcStr,volAddStr,d.DesiredConc, UnitConc[i],TotalVol,UnitVol[i]) if (i != n-1): toRet += "\n" return toRet def _PrintVolumeDilutions(dilutionObj,**kwargs): """ Gets and prints all the dilution objects Args: dilutionObj: list of dilution objects **kwargs: see DilutionString """ print(_DilutionString(dilutionObj,**kwargs)) def GetVolumesNeededByConcentration(StockConcs,ConcsDesired,TotalVolume, AlreadyHaveMass=None): """ Given desired and stock concentrations and a final volume, gives the volumes needed of each stock Args: StockConcs: index i refers to some species, same units as ConcsDesired[i] ConcsDesired: what we want in the volume AlreadyHaveMass: if present, the mass already present in the buffer we will use. Element [i] should have the same 'mass' units as StockConcs[i] Returns: Array of volumes needed going from StockConcs to ConcsDesired in TotalVolume (note that TotalVolume-sum(<Return of this function>) is taken up by some unspecified buffer) """ if (AlreadyHaveMass is None): AlreadyHaveMass = np.zeros_like(StockConcs) StockArr = np.array(StockConcs) TotalVolumeNeeded = np.array(ConcsDesired)*TotalVolume/StockArr EffectiveVolumeAlreadyPresent = np.array(AlreadyHaveMass)/StockArr return TotalVolumeNeeded - EffectiveVolumeAlreadyPresent def SeriallyDilute(Stock,DesiredConcentrations,DesiredVolumes, dilution_concentration=0): """ Given a stock and desired concentraitons and desired volumes at each concentration, returns the list of stocks, volumes, dilutions, and final stocks Args: Stock: concentration, same units as elements of DesiredConcentrations DesiredConcentrations: array or scalar, same units as stock. These are the concentrations we want DesiredVolumes: scalar or array volumes, in units of au/<Stock>, we want for each dilution. Note that *actual* volume will be a bit more, since we need something to serially dilute with. E.g., if DesiredVolumes was 1L, we might need 10mL extra for 'downstream' Dilutions dilution_concentration: the concentration of whatever already in the stock. (i.e. if we aren't using something with a concentration of zero. For example, if diluting 100mM NaCl with 10mM dilution, Stock would be 100, DilutionConcentration would be 10 Returns Tuple of arrays, the elements are grouped from high to low concentrations for each of:<What stocks we used, What volumes of stocks, what volume we diluted with, what was the resulting stock>. Note that the first and last elements are just Stock and DesiredVolumes """ NumConc = len(DesiredConcentrations) MassNeededBelow = 0 VolumeStock = [] VolumeDilute = [] Stocks = [] ResultingStock = [] # work backwards with the last one first, determine what volume # and concentraton is needed for i in range(NumConc-1,-1,-1): VolumeNeeded = GetFromArrayOrScalar(DesiredVolumes,i) ConcNeeded = GetFromArrayOrScalar(DesiredConcentrations,i) # what mass is needed 'below' us? MassNeeded = ConcNeeded*VolumeNeeded MassNeededBelow += MassNeeded # determine what total volume of the final solution we need # (we know the mass, and the concentration is specified) V0 = MassNeededBelow/ConcNeeded TmpStock = Stock if (i==0) \ else GetFromArrayOrScalar(DesiredConcentrations,i-1) conc_diff = dilution_concentration - TmpStock # We are solving the following system: # c_stock * V_s + c_dilute * V_dilute = MassNeededBelow # V_s + V_dilute = V0 VolStock = (dilution_concentration*V0-MassNeededBelow)/conc_diff VolDilute = (MassNeededBelow-TmpStock*V0 )/conc_diff # we use the stock 'above' what we need here VolumeStock.append(VolStock) VolumeDilute.append(VolDilute) Stocks.append(TmpStock) ResultingStock.append(ConcNeeded) # reverse the arrays so we go big to small (natural order for dilution) RetSanitize = lambda x: x[::-1] RetArrs = Stocks,VolumeStock,VolumeDilute,ResultingStock return [RetSanitize(a) for a in RetArrs] def PrintSerialDilution(Stocks,VolumeStock,VolumeDilute,FinalStocks, VolString="uL",ConcString="ng/uL", BufferString="Buffer"): """ Given the results of SeriallyDilute, prints off the relevant information to Args: Stocks,VolumeStock,VolumeDilute,FinalStocks: output of SeriallyDilute VolString,ConcStrung: units for the volume and concentration """ for stock,VolStock,VolDilute,DilutedStock in \ zip(Stocks,VolumeStock,VolumeDilute,FinalStocks): VolumeTotal = VolStock + VolDilute StockStr = "{:5.3g}{:s} of {:5.3g}{:s} with {:5.3g}{:s} {:s}".\ format(VolStock,VolString,stock,ConcString,VolDilute, VolString,BufferString) ResultStr = "{:5.3g}{:s} of {:5.3g}{:s}".\ format(VolumeTotal,VolString,DilutedStock,ConcString) print("{:s} gives {:s}".format(StockStr,ResultStr)) def StockVolumeNeededForSerialDilution(Stock,Volumes,Desired): """ Gets the total volume needed of the 'stock' Args: see PrintSerialSteps """ _,VolumeStock,_,_ = SeriallyDilute(Stock,Desired,Volumes) return VolumeStock[0] def PrintSerialSteps(Stock,Volumes,Desired, ConcString="ng/uL",VolString="uL",BufferString="Buffer", **kwargs): """ Given a stock concentration, desired volumes and concentrations, prints out the steps needed to serially dilute Args: see PrintSerialDilution """ Stocks,VolumeStock,VolumeDilute,FinalStocks = \ SeriallyDilute(Stock,Desired,Volumes,**kwargs) PrintSerialDilution(Stocks,VolumeStock,VolumeDilute, FinalStocks,ConcString=ConcString, VolString=VolString,BufferString=BufferString) def PrintSolutionSteps(Stats,Volume,vol_units="uL",BufferName="buffer", PostVolume=0): """ Prints the steps to seriall dilute things Args: Stats: List of Tuples; each element is <Name,Concentration Unit, Stock Concentraiton, Desired concentration, mass present in solution already> PostVolume: if true, this is the volume to add after some step (e.g. thawing). We store the solution at a higher concentration """ # get the stocks, desired concntrations, and already-present concentraitons Stocks = [s[2] for s in Stats] Desired = [s[3] for s in Stats] Already = [s[4] for s in Stats] Volumes = GetVolumesNeededByConcentration(Stocks,Desired,Volume, AlreadyHaveMass=Already) BufferVolume = Volume - sum(Volumes) - PostVolume # check that our buffer is reasonable non-negative. if it is very close # to zero (less than 1% error), let it slide. assert (BufferVolume > -Volume/100) , \ "Warning: cant make this solution. Need a negative volume of buffer. "+\ "Use more concentrated stocks" print("In a total solution of {:.1f}{:s}...".format(Volume,vol_units)) for (name,conc_units,conc_stock,desired_conc,_),vol_stock in\ zip(Stats,Volumes): print("\t{:.2f}{:s} of {:.2f}{:s} {:s} for {:.2f}{:s} in solution".\ format(vol_stock,vol_units,conc_stock,conc_units,name, desired_conc,conc_units)) print("\tRemainder is ({:.1f}{:s}) of {:s}".format(BufferVolume, vol_units,BufferName)) if (PostVolume > 1e-12): print("\tTo use, add ({:.1f}{:s}) of {:s}".format(PostVolume, vol_units,BufferName))
gpl-3.0
7,359,071,448,070,813,000
38.921212
80
0.642781
false
3.717269
false
false
false
bjoernricks/kaizen
kaizen/phase/phase.py
1
2714
# vim: fileencoding=utf-8 et sw=4 ts=4 tw=80: # kaizen - Continuously improve, build and manage free software # # Copyright (C) 2011 Björn Ricks <[email protected]> # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301 USA NONE = "None" DOWNLOADED = "Downloaded" EXTRACTED = "Extracted" PATCHED = "Patched" CONFIGURED = "Configured" BUILT = "Built" DESTROOTED = "Destrooted" ACTIVATED = "Activated" class UnknownPhaseError(Exception): def __init__(self, name): self.name = name def __str__(self): return "Phase '%s' does not exist." % (self.name) class Phase(object): def __init__(self, name, value): self.value = value self.name = name def __cmp__(self, other): if self.value < other.value: return -1 if self.value == other.value: return 0 if self.value > other.value: return 1 def __eq__(self, other): if not isinstance(other, Phase): return False return self.value == other.value def __neq__(self, other): return not self.__eq__(other) def __hash__(self): return self.value def __repr__(self): return "<Phase name='%s' value='%s' id='%s'>" % (self.name, self.value, id(self)) class Phases(object): def __init__(self): self.phases = dict() self.phase_names = [ NONE, DOWNLOADED, EXTRACTED, PATCHED, CONFIGURED, BUILT, DESTROOTED, ACTIVATED, ] for i, name in enumerate(self.phase_names): self.phases[name] = Phase(name, i) def get(self, name): if not name in self.phases: raise UnknownPhaseError(name) return self.phases[name] phases_list = Phases()
gpl-2.0
-6,345,546,671,171,965,000
27.557895
79
0.56948
false
4.025223
false
false
false
srinath-chakravarthy/ovito
tests/scripts/test_suite/wigner_seitz_modifier.py
1
1252
from ovito import * from ovito.io import * from ovito.modifiers import * import numpy as np node = import_file("../../files/NetCDF/sheared_aSi.nc") modifier = WignerSeitzAnalysisModifier() node.modifiers.append(modifier) modifier.reference.load("../../files/NetCDF/sheared_aSi.nc") dataset.anim.current_frame = 4 print("Parameter defaults:") print(" eliminate_cell_deformation: {}".format(modifier.eliminate_cell_deformation)) modifier.eliminate_cell_deformation = True print(" frame_offset: {}".format(modifier.frame_offset)) modifier.frame_offset = 0 print(" reference_frame: {}".format(modifier.reference_frame)) modifier.reference_frame = 0 print(" use_frame_offset: {}".format(modifier.use_frame_offset)) modifier.use_frame_offset = False node.compute() print("Output:") print(" vacancy_count= {}".format(modifier.vacancy_count)) print(" interstitial_count= {}".format(modifier.interstitial_count)) print(" vacancy_count= {}".format(node.output.attributes['WignerSeitz.vacancy_count'])) print(" interstitial_count= {}".format(node.output.attributes['WignerSeitz.interstitial_count'])) print(node.output["Occupancy"].array) assert(node.output.attributes['WignerSeitz.vacancy_count'] == 970) assert(modifier.vacancy_count == 970)
gpl-3.0
-175,022,617,238,454,720
31.947368
98
0.749201
false
3.1067
false
true
false
PopulationGenetics/pyucsc
ucsc/db.py
1
3664
""" UCSC Interface via SQLalchemy ============================= """ import os import re from sqlalchemy import sql from sqlalchemy import orm import sqlalchemy as sa import logging; log = logging.getLogger(__name__) import config import model Session = orm.sessionmaker() session = Session() initialized = False meta = None # Set up mappers # ============== class tables(object): """ Namespace for tables """ pass def abort_ro(*args,**kwargs): return def create_session(name, echo=False): """ load UCSC table definitions and create session """ global initialized, meta, DBSNP if initialized: return uri = config.get_database_uri(name) log.info('connecting to UCSC at ' + uri) engine = sa.create_engine(uri, echo=echo) Session.configure(bind=engine) conn = engine.connect() # try: # log.info('loading cached UCSC table definitions') # table_file = os.path.join(os.path.split(__file__)[0], '.tables.pickle') # meta = pickle.load(file(table_file)) # meta.bind = engine # except IOError: # print 'WARNING: could not load table metadata, please call cache_tables()' meta = sa.MetaData() meta.bind = conn meta.reflect() # populate tables namespace for (name, table) in meta.tables.items(): if 'wgEncode' not in name: setattr(tables, name, table) # KGXref is one to one with knownGene, so we can safely always use this join join_knowngene_xref = sql.join(tables.knownGene, tables.kgXref, tables.kgXref.c.kgID==tables.knownGene.c.name ) join_knowncanonical = join_knowngene_xref.join(tables.knownCanonical, # this join means known gene only returns canonical transcripts tables.knownCanonical.c.transcript==tables.knownGene.c.name ) # get the most recent snp table available snp_tables = sorted([x for x in meta.tables if re.match('snp\d\d\d$', x)]) snp_table = snp_tables[-1] DBSNP = meta.tables[snp_table] model.Snp.table = DBSNP orm.mapper(model.Snp, DBSNP, primary_key=DBSNP.c.name, properties={ 'class_': DBSNP.c['class'], }) if snp_table + 'Common' in meta.tables: commonSnp = meta.tables[snp_table + 'Common'] model.CommonSnp.table = commonSnp orm.mapper(model.CommonSnp, commonSnp, primary_key=commonSnp.c.name, properties={ 'class_': commonSnp.c['class'], }) # TODO: should remove this join? orm.mapper(model.KnownGene, join_knowngene_xref, primary_key=tables.knownGene.c.name, exclude_properties=[tables.knownCanonical.c.chrom] ) orm.mapper(model.KnownCanonical, join_knowncanonical, primary_key=tables.knownGene.c.name, exclude_properties=[tables.knownCanonical.c.chrom, tables.knownCanonical.c.transcript] ) orm.mapper(model.CcdsGene, tables.ccdsGene, primary_key=tables.ccdsGene.c.name) orm.mapper(model.RefGene, tables.refGene, primary_key=tables.refGene.c.name) orm.mapper(model.ChainSelf, tables.chainSelf, primary_key=tables.chainSelf.c.id) orm.mapper(model.ChainSelfLink, tables.chainSelfLink, primary_key=[tables.chainSelfLink.c.qStart, tables.chainSelfLink.c.chainId], properties={ 'chain': orm.relationship(model.ChainSelf, backref='links', primaryjoin=tables.chainSelfLink.c.chainId==tables.chainSelf.c.id, foreign_keys=[tables.chainSelfLink.c.chainId], lazy=False ), } ) # monkeypatch session to enforce readonly session.flush = abort_ro initialized = True model.session = session return session
bsd-3-clause
-1,658,897,240,787,379,700
31.424779
137
0.660753
false
3.499522
false
false
false
mathieubenoit/GDSII_Generator
generateWaferMap.py
1
2385
#!/usr/bin/python import os import numpy import gdspy ld_mask_edge = {'layer': 300, 'datatype': 0} ld_kerf = {'layer': 200, 'datatype': 0} ld_acfmask = {'layer': 100, 'datatype': 0} ld_topmetal= {'layer': 81, 'datatype': 0} ld_po= {'layer': 27, 'datatype': 1} def GenerateCell(chipX = 14100., chipY=16210.,leftKerf=85.,rightKerf=15.,topKerf=465.,botKerf=15.,narray_X=13,narray_Y=11,mask_width=254000.,wafer_offset_x=-570.0,wafer_offset_y=2595.0,wafer_radius=100000) : #Extract existing die mask top cell from GDS gdsii = gdspy.current_library.read_gds(infile='Timepix3_top_ACF_Nometal.GDS',layers=ld_acfmask) die = gdspy.current_library.extract("Timepix3_top") die_ref = gdspy.CellReference(die,origin=(leftKerf,botKerf)) #Create top reticle cell pixel_cell = gdspy.Cell("Reticle_top") # Create a kerf layer for visualization kerfWidth = leftKerf+rightKerf+chipX kerfHeight = topKerf+botKerf+chipY Kerf = gdspy.Rectangle((0,0), (kerfWidth, kerfHeight),**ld_kerf) # Add cells to the top cell pixel_cell.add(Kerf) pixel_cell.add(die_ref.get_polygonsets()) pixel_cell.add(die_ref.get_paths()) #Fill the Kerf with Resist pixel_cell.add(gdspy.Rectangle((0,0), (leftKerf, kerfHeight),**ld_acfmask)) pixel_cell.add(gdspy.Rectangle((0,0), (kerfWidth, botKerf),**ld_acfmask)) pixel_cell.add(gdspy.Rectangle((0,kerfHeight), (kerfWidth, kerfHeight-topKerf),**ld_acfmask)) pixel_cell.add(gdspy.Rectangle((kerfWidth-rightKerf,0), (kerfWidth, kerfHeight-topKerf),**ld_acfmask)) wafer_cell = gdspy.Cell('Wafer_Top') mask_edge = gdspy.Rectangle((-mask_width/2,-mask_width/2), (mask_width/2., mask_width/2.),**ld_mask_edge) array_origin_x = -narray_X*(leftKerf+rightKerf+chipX)/2. + wafer_offset_x array_origin_y = -narray_Y*(botKerf+topKerf+chipY)/2. + wafer_offset_y wafer_edge = gdspy.Path(1,(wafer_radius,0)) wafer_edge.arc(wafer_radius,0,360,layer=400) wafer_cell.add(wafer_edge) print kerfWidth,kerfHeight wafer_cell.add(gdspy.CellArray(pixel_cell,narray_X,narray_Y,spacing=(kerfWidth,kerfHeight),origin=(array_origin_x,array_origin_y))) wafer_cell.add(mask_edge) # View the resulting cell gdspy.LayoutViewer(cells=[wafer_cell],depth=1) gdspy.write_gds("wafer_mask.gds",cells=[wafer_cell,pixel_cell]) if __name__ == '__main__': GenerateCell()
lgpl-3.0
4,824,642,096,970,110,000
35.707692
207
0.690985
false
2.644124
false
false
false
matthew-brett/draft-statsmodels
scikits/statsmodels/sandbox/bspline.py
1
20284
''' Bspines and smoothing splines. General references: Craven, P. and Wahba, G. (1978) "Smoothing noisy data with spline functions. Estimating the correct degree of smoothing by the method of generalized cross-validation." Numerische Mathematik, 31(4), 377-403. Hastie, Tibshirani and Friedman (2001). "The Elements of Statistical Learning." Springer-Verlag. 536 pages. Hutchison, M. and Hoog, F. "Smoothing noisy data with spline functions." Numerische Mathematik, 47(1), 99-106. ''' import numpy as np import numpy.linalg as L from scipy.linalg import solveh_banded from scipy.optimize import golden from models import _hbspline # Issue warning regarding heavy development status of this module import warnings _msg = "The bspline code is technology preview and requires significant work\ on the public API and documentation. The API will likely change in the future" warnings.warn(_msg, UserWarning) def _band2array(a, lower=0, symmetric=False, hermitian=False): """ Take an upper or lower triangular banded matrix and return a numpy array. INPUTS: a -- a matrix in upper or lower triangular banded matrix lower -- is the matrix upper or lower triangular? symmetric -- if True, return the original result plus its transpose hermitian -- if True (and symmetric False), return the original result plus its conjugate transposed """ n = a.shape[1] r = a.shape[0] _a = 0 if not lower: for j in range(r): _b = np.diag(a[r-1-j],k=j)[j:(n+j),j:(n+j)] _a += _b if symmetric and j > 0: _a += _b.T elif hermitian and j > 0: _a += _b.conjugate().T else: for j in range(r): _b = np.diag(a[j],k=j)[0:n,0:n] _a += _b if symmetric and j > 0: _a += _b.T elif hermitian and j > 0: _a += _b.conjugate().T _a = _a.T return _a def _upper2lower(ub): """ Convert upper triangular banded matrix to lower banded form. INPUTS: ub -- an upper triangular banded matrix OUTPUTS: lb lb -- a lower triangular banded matrix with same entries as ub """ lb = np.zeros(ub.shape, ub.dtype) nrow, ncol = ub.shape for i in range(ub.shape[0]): lb[i,0:(ncol-i)] = ub[nrow-1-i,i:ncol] lb[i,(ncol-i):] = ub[nrow-1-i,0:i] return lb def _lower2upper(lb): """ Convert lower triangular banded matrix to upper banded form. INPUTS: lb -- a lower triangular banded matrix OUTPUTS: ub ub -- an upper triangular banded matrix with same entries as lb """ ub = np.zeros(lb.shape, lb.dtype) nrow, ncol = lb.shape for i in range(lb.shape[0]): ub[nrow-1-i,i:ncol] = lb[i,0:(ncol-i)] ub[nrow-1-i,0:i] = lb[i,(ncol-i):] return ub def _triangle2unit(tb, lower=0): """ Take a banded triangular matrix and return its diagonal and the unit matrix: the banded triangular matrix with 1's on the diagonal, i.e. each row is divided by the corresponding entry on the diagonal. INPUTS: tb -- a lower triangular banded matrix lower -- if True, then tb is assumed to be lower triangular banded, in which case return value is also lower triangular banded. OUTPUTS: d, b d -- diagonal entries of tb b -- unit matrix: if lower is False, b is upper triangular banded and its rows of have been divided by d, else lower is True, b is lower triangular banded and its columns have been divieed by d. """ if lower: d = tb[0].copy() else: d = tb[-1].copy() if lower: return d, (tb / d) else: l = _upper2lower(tb) return d, _lower2upper(l / d) def _trace_symbanded(a, b, lower=0): """ Compute the trace(ab) for two upper or banded real symmetric matrices stored either in either upper or lower form. INPUTS: a, b -- two banded real symmetric matrices (either lower or upper) lower -- if True, a and b are assumed to be the lower half OUTPUTS: trace trace -- trace(ab) """ if lower: t = _zero_triband(a * b, lower=1) return t[0].sum() + 2 * t[1:].sum() else: t = _zero_triband(a * b, lower=0) return t[-1].sum() + 2 * t[:-1].sum() def _zero_triband(a, lower=0): """ Explicitly zero out unused elements of a real symmetric banded matrix. INPUTS: a -- a real symmetric banded matrix (either upper or lower hald) lower -- if True, a is assumed to be the lower half """ nrow, ncol = a.shape if lower: for i in range(nrow): a[i,(ncol-i):] = 0. else: for i in range(nrow): a[i,0:i] = 0. return a class BSpline(object): ''' Bsplines of a given order and specified knots. Implementation is based on description in Chapter 5 of Hastie, Tibshirani and Friedman (2001). "The Elements of Statistical Learning." Springer-Verlag. 536 pages. INPUTS: knots -- a sorted array of knots with knots[0] the lower boundary, knots[1] the upper boundary and knots[1:-1] the internal knots. order -- order of the Bspline, default is 4 which yields cubic splines M -- number of additional boundary knots, if None it defaults to order coef -- an optional array of real-valued coefficients for the Bspline of shape (knots.shape + 2 * (M - 1) - order,). x -- an optional set of x values at which to evaluate the Bspline to avoid extra evaluation in the __call__ method ''' # FIXME: update parameter names, replace single character names # FIXME: `order` should be actual spline order (implemented as order+1) ## FIXME: update the use of spline order in extension code (evaluate is recursively called) # FIXME: eliminate duplicate M and m attributes (m is order, M is related to tau size) def __init__(self, knots, order=4, M=None, coef=None, x=None): knots = np.squeeze(np.unique(np.asarray(knots))) if knots.ndim != 1: raise ValueError, 'expecting 1d array for knots' self.m = order if M is None: M = self.m self.M = M self.tau = np.hstack([[knots[0]]*(self.M-1), knots, [knots[-1]]*(self.M-1)]) self.K = knots.shape[0] - 2 if coef is None: self.coef = np.zeros((self.K + 2 * self.M - self.m), np.float64) else: self.coef = np.squeeze(coef) if self.coef.shape != (self.K + 2 * self.M - self.m): raise ValueError, 'coefficients of Bspline have incorrect shape' if x is not None: self.x = x def _setx(self, x): self._x = x self._basisx = self.basis(self._x) def _getx(self): return self._x x = property(_getx, _setx) def __call__(self, *args): """ Evaluate the BSpline at a given point, yielding a matrix B and return B * self.coef INPUTS: args -- optional arguments. If None, it returns self._basisx, the BSpline evaluated at the x values passed in __init__. Otherwise, return the BSpline evaluated at the first argument args[0]. OUTPUTS: y y -- value of Bspline at specified x values BUGS: If self has no attribute x, an exception will be raised because self has no attribute _basisx. """ if not args: b = self._basisx.T else: x = args[0] b = np.asarray(self.basis(x)).T return np.squeeze(np.dot(b, self.coef)) def basis_element(self, x, i, d=0): """ Evaluate a particular basis element of the BSpline, or its derivative. INPUTS: x -- x values at which to evaluate the basis element i -- which element of the BSpline to return d -- the order of derivative OUTPUTS: y y -- value of d-th derivative of the i-th basis element of the BSpline at specified x values """ x = np.asarray(x, np.float64) _shape = x.shape if _shape == (): x.shape = (1,) x.shape = (np.product(_shape,axis=0),) if i < self.tau.shape[0] - 1: ## TODO: OWNDATA flags... v = _hbspline.evaluate(x, self.tau, self.m, d, i, i+1) else: return np.zeros(x.shape, np.float64) if (i == self.tau.shape[0] - self.m): v = np.where(np.equal(x, self.tau[-1]), 1, v) v.shape = _shape return v def basis(self, x, d=0, lower=None, upper=None): """ Evaluate the basis of the BSpline or its derivative. If lower or upper is specified, then only the [lower:upper] elements of the basis are returned. INPUTS: x -- x values at which to evaluate the basis element i -- which element of the BSpline to return d -- the order of derivative lower -- optional lower limit of the set of basis elements upper -- optional upper limit of the set of basis elements OUTPUTS: y y -- value of d-th derivative of the basis elements of the BSpline at specified x values """ x = np.asarray(x) _shape = x.shape if _shape == (): x.shape = (1,) x.shape = (np.product(_shape,axis=0),) if upper is None: upper = self.tau.shape[0] - self.m if lower is None: lower = 0 upper = min(upper, self.tau.shape[0] - self.m) lower = max(0, lower) d = np.asarray(d) if d.shape == (): v = _hbspline.evaluate(x, self.tau, self.m, int(d), lower, upper) else: if d.shape[0] != 2: raise ValueError, "if d is not an integer, expecting a jx2 \ array with first row indicating order \ of derivative, second row coefficient in front." v = 0 for i in range(d.shape[1]): v += d[1,i] * _hbspline.evaluate(x, self.tau, self.m, d[0,i], lower, upper) v.shape = (upper-lower,) + _shape if upper == self.tau.shape[0] - self.m: v[-1] = np.where(np.equal(x, self.tau[-1]), 1, v[-1]) return v def gram(self, d=0): """ Compute Gram inner product matrix, storing it in lower triangular banded form. The (i,j) entry is G_ij = integral b_i^(d) b_j^(d) where b_i are the basis elements of the BSpline and (d) is the d-th derivative. If d is a matrix then, it is assumed to specify a differential operator as follows: the first row represents the order of derivative with the second row the coefficient corresponding to that order. For instance: [[2, 3], [3, 1]] represents 3 * f^(2) + 1 * f^(3). INPUTS: d -- which derivative to apply to each basis element, if d is a matrix, it is assumed to specify a differential operator as above OUTPUTS: gram gram -- the matrix of inner products of (derivatives) of the BSpline elements """ d = np.squeeze(d) if np.asarray(d).shape == (): self.g = _hbspline.gram(self.tau, self.m, int(d), int(d)) else: d = np.asarray(d) if d.shape[0] != 2: raise ValueError, "if d is not an integer, expecting a jx2 \ array with first row indicating order \ of derivative, second row coefficient in front." if d.shape == (2,): d.shape = (2,1) self.g = 0 for i in range(d.shape[1]): for j in range(d.shape[1]): self.g += d[1,i]* d[1,j] * _hbspline.gram(self.tau, self.m, int(d[0,i]), int(d[0,j])) self.g = self.g.T self.d = d return np.nan_to_num(self.g) class SmoothingSpline(BSpline): penmax = 30. method = "target_df" target_df = 5 default_pen = 1.0e-03 optimize = True ''' A smoothing spline, which can be used to smooth scatterplots, i.e. a list of (x,y) tuples. See fit method for more information. ''' def fit(self, y, x=None, weights=None, pen=0.): """ Fit the smoothing spline to a set of (x,y) pairs. INPUTS: y -- response variable x -- if None, uses self.x weights -- optional array of weights pen -- constant in front of Gram matrix OUTPUTS: None The smoothing spline is determined by self.coef, subsequent calls of __call__ will be the smoothing spline. ALGORITHM: Formally, this solves a minimization: fhat = ARGMIN_f SUM_i=1^n (y_i-f(x_i))^2 + pen * int f^(2)^2 int is integral. pen is lambda (from Hastie) See Chapter 5 of Hastie, Tibshirani and Friedman (2001). "The Elements of Statistical Learning." Springer-Verlag. 536 pages. for more details. TODO: Should add arbitrary derivative penalty instead of just second derivative. """ banded = True if x is None: x = self._x bt = self._basisx.copy() else: bt = self.basis(x) if pen == 0.: # can't use cholesky for singular matrices banded = False if x.shape != y.shape: raise ValueError, 'x and y shape do not agree, by default x are \ the Bspline\'s internal knots' if pen >= self.penmax: pen = self.penmax if weights is not None: self.weights = weights else: self.weights = 1. _w = np.sqrt(self.weights) bt *= _w # throw out rows with zeros (this happens at boundary points!) mask = np.flatnonzero(1 - np.alltrue(np.equal(bt, 0), axis=0)) bt = bt[:,mask] y = y[mask] self.df_total = y.shape[0] bty = np.squeeze(np.dot(bt, _w * y)) self.N = y.shape[0] if not banded: self.btb = np.dot(bt, bt.T) _g = _band2array(self.g, lower=1, symmetric=True) self.coef, _, self.rank = L.lstsq(self.btb + pen*_g, bty)[0:3] self.rank = min(self.rank, self.btb.shape[0]) del(_g) else: self.btb = np.zeros(self.g.shape, np.float64) nband, nbasis = self.g.shape for i in range(nbasis): for k in range(min(nband, nbasis-i)): self.btb[k,i] = (bt[i] * bt[i+k]).sum() bty.shape = (1,bty.shape[0]) self.pen = pen self.chol, self.coef = solveh_banded(self.btb + pen*self.g, bty, lower=1) self.coef = np.squeeze(self.coef) self.resid = y * self.weights - np.dot(self.coef, bt) self.pen = pen del(bty); del(mask); del(bt) def smooth(self, y, x=None, weights=None): if self.method == "target_df": if hasattr(self, 'pen'): self.fit(y, x=x, weights=weights, pen=self.pen) else: self.fit_target_df(y, x=x, weights=weights, df=self.target_df) elif self.method == "optimize_gcv": self.fit_optimize_gcv(y, x=x, weights=weights) def gcv(self): """ Generalized cross-validation score of current fit. Craven, P. and Wahba, G. "Smoothing noisy data with spline functions. Estimating the correct degree of smoothing by the method of generalized cross-validation." Numerische Mathematik, 31(4), 377-403. """ norm_resid = (self.resid**2).sum() return norm_resid / (self.df_total - self.trace()) def df_resid(self): """ Residual degrees of freedom in the fit. self.N - self.trace() where self.N is the number of observations of last fit. """ return self.N - self.trace() def df_fit(self): """ How many degrees of freedom used in the fit? self.trace() """ return self.trace() def trace(self): """ Trace of the smoothing matrix S(pen) TODO: addin a reference to Wahba, and whoever else I used. """ if self.pen > 0: _invband = _hbspline.invband(self.chol.copy()) tr = _trace_symbanded(_invband, self.btb, lower=1) return tr else: return self.rank def fit_target_df(self, y, x=None, df=None, weights=None, tol=1.0e-03, apen=0, bpen=1.0e-03): """ Fit smoothing spline with approximately df degrees of freedom used in the fit, i.e. so that self.trace() is approximately df. Uses binary search strategy. In general, df must be greater than the dimension of the null space of the Gram inner product. For cubic smoothing splines, this means that df > 2. INPUTS: y -- response variable x -- if None, uses self.x df -- target degrees of freedom weights -- optional array of weights tol -- (relative) tolerance for convergence apen -- lower bound of penalty for binary search bpen -- upper bound of penalty for binary search OUTPUTS: None The smoothing spline is determined by self.coef, subsequent calls of __call__ will be the smoothing spline. """ df = df or self.target_df olddf = y.shape[0] - self.m if hasattr(self, "pen"): self.fit(y, x=x, weights=weights, pen=self.pen) curdf = self.trace() if np.fabs(curdf - df) / df < tol: return if curdf > df: apen, bpen = self.pen, 2 * self.pen else: apen, bpen = 0., self.pen while True: curpen = 0.5 * (apen + bpen) self.fit(y, x=x, weights=weights, pen=curpen) curdf = self.trace() if curdf > df: apen, bpen = curpen, 2 * curpen else: apen, bpen = apen, curpen if apen >= self.penmax: raise ValueError, "penalty too large, try setting penmax \ higher or decreasing df" if np.fabs(curdf - df) / df < tol: break def fit_optimize_gcv(self, y, x=None, weights=None, tol=1.0e-03, brack=(-100,20)): """ Fit smoothing spline trying to optimize GCV. Try to find a bracketing interval for scipy.optimize.golden based on bracket. It is probably best to use target_df instead, as it is sometimes difficult to find a bracketing interval. INPUTS: y -- response variable x -- if None, uses self.x df -- target degrees of freedom weights -- optional array of weights tol -- (relative) tolerance for convergence brack -- an initial guess at the bracketing interval OUTPUTS: None The smoothing spline is determined by self.coef, subsequent calls of __call__ will be the smoothing spline. """ def _gcv(pen, y, x): self.fit(y, x=x, pen=np.exp(pen)) a = self.gcv() return a a = golden(_gcv, args=(y,x), brack=bracket, tol=tol)
bsd-3-clause
-464,578,479,505,752,400
29.640483
105
0.543581
false
3.713658
false
false
false
CrispyMcToast/bkup
src/fs/Scanner.py
1
4092
#!/usr/bin/python import threading import os import hashlib import time MAX_THREADS = 5 thread_count = 0 tc_lock = threading.Lock() def inc_count(): global thread_count tc_lock.acquire() thread_count += 1 tc_lock.release() def dec_count(): global thread_count tc_lock.acquire() thread_count -= 1 tc_lock.release() def get_count(): global thread_count tc_lock.acquire() count = thread_count tc_lock.release() return count hash_lock = threading.Lock() scanned_hash = {} def merge_hash(addative): hash_lock.acquire() scanned_hash.update(addative) hash_lock.release() def get_hash(): #TODO: is this ok? hash_lock.acquire() h = scanned_hash hash_lock.release() return h class Scanner(threading.Thread): def __init__(self, base_path, master=True, appendable=""): threading.Thread.__init__(self) self.runnable = threading.Event() self.base_path = base_path self.total_processed = 0 self.hashed_files = {} self.subthreads = [] self.thread_lock = threading.Lock() self.appendable = appendable self.exit = False self.master = master self.complete = False def run(self): self.runnable.set() inc_count() self.scan() dec_count() while self.master and get_count() != 0: time.sleep(1) self.complete = True def finished(self): c = self.complete self.thread_lock.acquire() for s in self.subthreads: c = c & s.finished() print(s.finished()) self.thread_lock.release() return c def get_total(self): return self.total_processed def scan(self): path = "" for root, subdir, files in os.walk(self.base_path): path = root self.total_processed += 1 self.hashed_files[self.appendable+"/"] = 0 while get_count() < MAX_THREADS and len(subdir) > 0: appended_path = self.appendable+"/"+subdir[0] s = Scanner(root+"/"+subdir[0], master=False, appendable=appended_path) self.thread_lock.acquire() self.subthreads.append(s) self.thread_lock.release() s.start() del subdir[0] for f in files: try: self.runnable.wait() if self.exit: return fpath = path + "/" + f if not os.path.islink(fpath): h = self.hash(fpath) filep = self.remove_base(fpath) self.total_processed += 1 self.hashed_files[self.appendable+"/"+filep] = h except PermissionError as e: #ignore continue except OSError as e: #ignore continue merge_hash(self.hashed_files) self.hashed_files={} def remove_base(self, path): return path[len(self.base_path)+1:] def hash(self, path, blocksize=65536): f = open(path, "rb") hasher = hashlib.sha256() buf = f.read(blocksize) while len(buf) > 0: hasher.update(buf) buf = f.read(blocksize) return hasher.hexdigest() def pause(self): self.thread_lock.acquire() for s in self.subthreads: s.pause() self.thread_lock.release() self.runnable.clear() def unpause(self): self.thread_lock.acquire() for s in self.subthreads: s.unpause() self.thread_lock.release() self.runnable.set() def stop(self): self.thread_lock.acquire() for s in self.subthreads: s.stop() self.thread_lock.release() self.exit = True self.runnable.clear()
gpl-3.0
-2,186,930,800,156,176,000
23.357143
72
0.516129
false
4.071642
false
false
false
Nettacker/Nettacker
lib/payload/shellcode/stack/engine.py
1
3693
#!/usr/bin/env python # -*- coding: utf-8 -*- import binascii from core.alert import error from core.compatible import version def shellcoder(shellcode): n = 0 xshellcode = '\\x' for w in shellcode: n += 1 xshellcode += str(w) if n == 2: n = 0 xshellcode += str('\\x') return xshellcode[:-2] def st(data): if version() ==2: return str(binascii.b2a_hex(data[::-1])) if version() ==3: return (binascii.b2a_hex(data[::-1].encode('latin-1'))).decode('latin-1') def generate(data, register, gtype): length = len(data) if gtype == 'int': flag_8 = True try: data = hex(int(data, 8)) except: flag_8 = False if flag_8 is False: try: data = hex(int(data, 16)) except: error('hex or digit required!\nExit\n') if gtype == 'string': data = st(data) if length <= 3: if gtype == 'string': data = str('0x') + str(data) if len(data) % 2 != 0: data = data.replace('0x', '0x0') if len(data) == 8: data = data + '90\npop %s\nshr $0x8,%s\npush %s\n' % ( register, register, register) if len(data) == 6: data = data + '9090\npop %s\nshr $0x10,%s\npush %s\n' % ( register, register, register) if len(data) == 4: data = data + '909090\npop %s\nshr $0x10,%s\nshr $0x8,%s\npush %s\n' % ( register, register, register, register) data = str('push $') + str(data) if length >= 4: if gtype == 'int': data = data[2:] stack_content = data shr_counter = len(stack_content) % 8 shr = None if shr_counter == 2: shr = '\npop %s\nshr $0x10,%s\nshr $0x8,%s\npush %s\n' % ( register, register, register, register) stack_content = stack_content[0:2] + '909090' + stack_content[2:] if shr_counter == 4: shr = '\npop %s\nshr $0x10,%s\npush %s\n' % (register, register, register) stack_content = stack_content[0:4] + '9090' + stack_content[4:] if shr_counter == 6: shr = '\npop %s\nshr $0x8,%s\npush %s\n' % (register, register, register) stack_content = stack_content[0:6] + '90' + stack_content[6:] zshr = shr m = int(len(stack_content)) n = int(len(stack_content) / 8) file_shellcode = '' if (len(stack_content) % 8) == 0: shr_n = 0 r = '' while (n != 0): if shr is not None: shr_n += 1 zx = m - 8 file_shellcode = 'push $0x' + str(stack_content[ zx:m]) + '\n' + file_shellcode m -= 8 n = n - 1 shr = None if shr is None: shr_n += 1 zx = m - 8 file_shellcode = 'push $0x' + str(stack_content[ zx:m]) + '\n' + file_shellcode m -= 8 n = n - 1 if zshr is None: file_z = file_shellcode if zshr is not None: rep1 = file_shellcode[:16] rep2 = rep1 + zshr file_z = file_shellcode.replace(rep1, rep2) data = file_z return data
gpl-3.0
-6,920,994,318,472,593,000
33.839623
84
0.426753
false
3.575024
false
false
false
destijl/grr
grr/gui/http_api.py
1
21177
#!/usr/bin/env python """HTTP API logic that ties API call handlers with HTTP routes.""" import json import time import traceback import urllib2 # pylint: disable=g-bad-import-order,unused-import from grr.gui import django_lib # pylint: enable=g-bad-import-order,unused-import from django import http from werkzeug import exceptions as werkzeug_exceptions from werkzeug import routing from google.protobuf import json_format from google.protobuf import symbol_database import logging from grr.gui import api_auth_manager from grr.gui import api_call_handler_base from grr.gui import api_call_router from grr.gui import api_value_renderers from grr.lib import access_control from grr.lib import config_lib from grr.lib import log from grr.lib import rdfvalue from grr.lib import registry from grr.lib import stats from grr.lib import utils from grr.lib.aff4_objects import users as aff4_users from grr.lib.rdfvalues import structs as rdf_structs from grr.proto import api_pb2 class Error(Exception): pass class PostRequestParsingError(Error): pass class UnsupportedHttpMethod(Error): pass class AdditionalArgsProcessingError(Error): pass class UnexpectedResultTypeError(Error): pass class ApiCallRouterNotFoundError(Error): pass class RouterMatcher(object): """Matches requests to routers (and caches them).""" def __init__(self): self._routing_maps_cache = utils.FastStore() def _BuildHttpRoutingMap(self, router_cls): """Builds a werkzeug routing map out of a given router class.""" if not issubclass(router_cls, api_call_router.ApiCallRouter): raise ValueError("Router has to be an instance of ApiCallRouter.") routing_map = routing.Map() # Note: we traverse methods of the base class (ApiCallRouter) to avoid # potential problems caused by child router classes using the @Http # annotation (thus adding additional unforeseen HTTP paths/methods). We # don't want the HTTP API to depend on a particular router implementation. for _, metadata in router_cls.GetAnnotatedMethods().items(): for http_method, path, unused_options in metadata.http_methods: routing_map.add( routing.Rule( path, methods=[http_method], endpoint=metadata)) # This adds support for the next version of the API that uses # standartized JSON protobuf serialization. routing_map.add( routing.Rule( path.replace("/api/", "/api/v2/"), methods=[http_method], endpoint=metadata)) return routing_map def _GetRoutingMap(self, router): """Returns a routing map for a given router instance.""" try: routing_map = self._routing_maps_cache.Get(router.__class__) except KeyError: routing_map = self._BuildHttpRoutingMap(router.__class__) self._routing_maps_cache.Put(router.__class__, routing_map) return routing_map def _SetField(self, args, type_info, value): """Sets fields on the arg rdfvalue object.""" if hasattr(type_info, "enum"): try: coerced_obj = type_info.enum[value.upper()] except KeyError: # A bool is an enum but serializes to "1" / "0" which are both not in # enum or reverse_enum. coerced_obj = type_info.type.FromSerializedString(value) else: coerced_obj = type_info.type.FromSerializedString(value) args.Set(type_info.name, coerced_obj) def _GetArgsFromRequest(self, request, method_metadata, route_args): """Builds args struct out of HTTP request.""" format_mode = GetRequestFormatMode(request, method_metadata) if request.method in ["GET", "HEAD"]: if method_metadata.args_type: unprocessed_request = request.GET if hasattr(unprocessed_request, "dict"): unprocessed_request = unprocessed_request.dict() args = method_metadata.args_type() for type_info in args.type_infos: if type_info.name in route_args: self._SetField(args, type_info, route_args[type_info.name]) elif type_info.name in unprocessed_request: self._SetField(args, type_info, unprocessed_request[type_info.name]) else: args = None elif request.method in ["POST", "DELETE", "PATCH"]: try: args = method_metadata.args_type() for type_info in args.type_infos: if type_info.name in route_args: self._SetField(args, type_info, route_args[type_info.name]) if request.META["CONTENT_TYPE"].startswith("multipart/form-data;"): payload = json.loads(request.POST["_params_"]) args.FromDict(payload) for name, fd in request.FILES.items(): args.Set(name, fd.read()) elif format_mode == JsonMode.PROTO3_JSON_MODE: # NOTE: Arguments rdfvalue has to be a protobuf-based RDFValue. args_proto = args.protobuf() json_format.Parse(request.body or "{}", args_proto) args.ParseFromString(args_proto.SerializeToString()) else: payload = json.loads(request.body or "{}") if payload: args.FromDict(payload) except Exception as e: # pylint: disable=broad-except logging.exception("Error while parsing POST request %s (%s): %s", request.path, request.method, e) raise PostRequestParsingError(e) else: raise UnsupportedHttpMethod("Unsupported method: %s." % request.method) return args def MatchRouter(self, request): """Returns a router for a given HTTP request.""" router = api_auth_manager.API_AUTH_MGR.GetRouterForUser(request.user) routing_map = self._GetRoutingMap(router) matcher = routing_map.bind("%s:%s" % (request.environ["SERVER_NAME"], request.environ["SERVER_PORT"])) try: match = matcher.match(request.path, request.method) except werkzeug_exceptions.NotFound: raise ApiCallRouterNotFoundError("No API router was found for (%s) %s" % (request.path, request.method)) router_method_metadata, route_args_dict = match return (router, router_method_metadata, self._GetArgsFromRequest(request, router_method_metadata, route_args_dict)) class JSONEncoderWithRDFPrimitivesSupport(json.JSONEncoder): """Custom JSON encoder that encodes handlers output. Custom encoder is required to facilitate usage of primitive values - booleans, integers and strings - in handlers responses. If handler references an RDFString, RDFInteger or and RDFBOol when building a response, it will lead to JSON encoding failure when response encoded, unless this custom encoder is used. Another way to solve this issue would be to explicitly call api_value_renderers.RenderValue on every value returned from the renderer, but it will make the code look overly verbose and dirty. """ def default(self, obj): if isinstance(obj, (rdfvalue.RDFInteger, rdfvalue.RDFBool, rdfvalue.RDFString)): return obj.SerializeToDataStore() return json.JSONEncoder.default(self, obj) class JsonMode(object): """Enum class for various JSON encoding modes.""" PROTO3_JSON_MODE = 0 GRR_JSON_MODE = 1 GRR_ROOT_TYPES_STRIPPED_JSON_MODE = 2 GRR_TYPE_STRIPPED_JSON_MODE = 3 def GetRequestFormatMode(request, method_metadata): """Returns JSON format mode corresponding to a given request and method.""" if request.path.startswith("/api/v2/"): return JsonMode.PROTO3_JSON_MODE if hasattr(request, "GET") and request.GET.get("strip_type_info", ""): return JsonMode.GRR_TYPE_STRIPPED_JSON_MODE for http_method, unused_url, options in method_metadata.http_methods: if (http_method == request.method and options.get("strip_root_types", False)): return JsonMode.GRR_ROOT_TYPES_STRIPPED_JSON_MODE return JsonMode.GRR_JSON_MODE class HttpRequestHandler(object): """Handles HTTP requests.""" @staticmethod def BuildToken(request, execution_time): """Build an ACLToken from the request.""" # The request.GET dictionary will also be filled on HEAD calls. if request.method in ["GET", "HEAD"]: reason = request.GET.get("reason", "") elif request.method in ["POST", "DELETE", "PATCH"]: # The header X-GRR-REASON is set in api-service.js, which django converts # to HTTP_X_GRR_REASON. reason = utils.SmartUnicode( urllib2.unquote(request.META.get("HTTP_X_GRR_REASON", ""))) # We assume that request.user contains the username that we can trust. # No matter what authentication method is used, the WebAuthManager is # responsible for authenticating the userand setting request.user to # a correct value (see gui/webauth.py). # # The token that's built here will be later used to find an API router, # get the ApiCallHandler from the router, and then to call the handler's # Handle() method. API router will be responsible for all the ACL checks. token = access_control.ACLToken( username=request.user, reason=reason, process="GRRAdminUI", expiry=rdfvalue.RDFDatetime.Now() + execution_time) for field in ["REMOTE_ADDR", "HTTP_X_FORWARDED_FOR"]: remote_addr = request.META.get(field, "") if remote_addr: token.source_ips.append(remote_addr) return token def _FormatResultAsJson(self, result, format_mode=None): if result is None: return dict(status="OK") if format_mode == JsonMode.PROTO3_JSON_MODE: return json.loads(json_format.MessageToJson(result.AsPrimitiveProto())) elif format_mode == JsonMode.GRR_ROOT_TYPES_STRIPPED_JSON_MODE: result_dict = {} for field, value in result.ListSetFields(): if isinstance(field, (rdf_structs.ProtoDynamicEmbedded, rdf_structs.ProtoEmbedded, rdf_structs.ProtoList)): result_dict[field.name] = api_value_renderers.RenderValue(value) else: result_dict[field.name] = api_value_renderers.RenderValue(value)[ "value"] return result_dict elif format_mode == JsonMode.GRR_TYPE_STRIPPED_JSON_MODE: rendered_data = api_value_renderers.RenderValue(result) return api_value_renderers.StripTypeInfo(rendered_data) elif format_mode == JsonMode.GRR_JSON_MODE: return api_value_renderers.RenderValue(result) else: raise ValueError("Invalid format_mode: %s", format_mode) @staticmethod def CallApiHandler(handler, args, token=None): """Handles API call to a given handler with given args and token.""" result = handler.Handle(args, token=token) expected_type = handler.result_type if expected_type is None: expected_type = None.__class__ if result.__class__ != expected_type: raise UnexpectedResultTypeError("Expected %s, but got %s." % (expected_type.__name__, result.__class__.__name__)) return result def __init__(self, router_matcher=None): self._router_matcher = router_matcher or RouterMatcher() def _BuildResponse(self, status, rendered_data, method_name=None, headers=None, token=None, no_audit_log=False): """Builds HTTPResponse object from rendered data and HTTP status.""" response = http.HttpResponse( status=status, content_type="application/json; charset=utf-8") response["Content-Disposition"] = "attachment; filename=response.json" response["X-Content-Type-Options"] = "nosniff" if token and token.reason: response["X-GRR-Reason"] = utils.SmartStr(token.reason) if method_name: response["X-API-Method"] = method_name if no_audit_log: response["X-No-Log"] = "True" for key, value in (headers or {}).items(): response[key] = value response.write(")]}'\n") # XSSI protection # To avoid IE content sniffing problems, escape the tags. Otherwise somebody # may send a link with malicious payload that will be opened in IE (which # does content sniffing and doesn't respect Content-Disposition header) and # IE will treat the document as html and executre arbitrary JS that was # passed with the payload. str_data = json.dumps( rendered_data, cls=JSONEncoderWithRDFPrimitivesSupport) response.write(str_data.replace("<", r"\u003c").replace(">", r"\u003e")) return response def _BuildStreamingResponse(self, binary_stream, method_name=None): """Builds HTTPResponse object for streaming.""" response = http.StreamingHttpResponse( streaming_content=binary_stream.GenerateContent(), content_type="binary/octet-stream") response["Content-Disposition"] = ("attachment; filename=%s" % binary_stream.filename) if method_name: response["X-API-Method"] = method_name if binary_stream.content_length: response["Content-Length"] = binary_stream.content_length return response def HandleRequest(self, request): """Handles given HTTP request.""" impersonated_username = config_lib.CONFIG["AdminUI.debug_impersonate_user"] if impersonated_username: logging.info("Overriding user as %s", impersonated_username) request.user = config_lib.CONFIG["AdminUI.debug_impersonate_user"] if not aff4_users.GRRUser.IsValidUsername(request.user): return self._BuildResponse( 403, dict(message="Invalid username: %s" % request.user)) try: router, method_metadata, args = self._router_matcher.MatchRouter(request) except access_control.UnauthorizedAccess as e: logging.exception("Access denied to %s (%s): %s", request.path, request.method, e) additional_headers = { "X-GRR-Unauthorized-Access-Reason": utils.SmartStr(e.message), "X-GRR-Unauthorized-Access-Subject": utils.SmartStr(e.subject) } return self._BuildResponse( 403, dict( message="Access denied by ACL: %s" % utils.SmartStr(e.message), subject=utils.SmartStr(e.subject)), headers=additional_headers) except ApiCallRouterNotFoundError as e: return self._BuildResponse(404, dict(message=e.message)) except werkzeug_exceptions.MethodNotAllowed as e: return self._BuildResponse(405, dict(message=e.message)) except Error as e: logging.exception("Can't match URL to router/method: %s", e) return self._BuildResponse( 500, dict( message=str(e), traceBack=traceback.format_exc())) # SetUID() is called here so that ACL checks done by the router do not # clash with datastore ACL checks. # TODO(user): increase token expiry time. token = self.BuildToken(request, 60).SetUID() handler = None try: # ACL checks are done here by the router. If this method succeeds (i.e. # does not raise), then handlers run without further ACL checks (they're # free to do some in their own implementations, though). handler = getattr(router, method_metadata.name)(args, token=token) if handler.args_type != method_metadata.args_type: raise RuntimeError("Handler args type doesn't match " "method args type: %s vs %s" % (handler.args_type, method_metadata.args_type)) binary_result_type = ( api_call_router.RouterMethodMetadata.BINARY_STREAM_RESULT_TYPE) if (handler.result_type != method_metadata.result_type and not (handler.result_type is None and method_metadata.result_type == binary_result_type)): raise RuntimeError("Handler result type doesn't match " "method result type: %s vs %s" % (handler.result_type, method_metadata.result_type)) # HEAD method is only used for checking the ACLs for particular API # methods. if request.method == "HEAD": # If the request would return a stream, we add the Content-Length # header to the response. if (method_metadata.result_type == method_metadata.BINARY_STREAM_RESULT_TYPE): binary_stream = handler.Handle(args, token=token) headers = None if binary_stream.content_length: headers = {"Content-Length": binary_stream.content_length} return self._BuildResponse( 200, {"status": "OK"}, method_name=method_metadata.name, headers=headers, no_audit_log=method_metadata.no_audit_log_required, token=token) else: return self._BuildResponse( 200, {"status": "OK"}, method_name=method_metadata.name, no_audit_log=method_metadata.no_audit_log_required, token=token) if (method_metadata.result_type == method_metadata.BINARY_STREAM_RESULT_TYPE): binary_stream = handler.Handle(args, token=token) return self._BuildStreamingResponse( binary_stream, method_name=method_metadata.name) else: format_mode = GetRequestFormatMode(request, method_metadata) result = self.CallApiHandler(handler, args, token=token) rendered_data = self._FormatResultAsJson( result, format_mode=format_mode) return self._BuildResponse( 200, rendered_data, method_name=method_metadata.name, no_audit_log=method_metadata.no_audit_log_required, token=token) except access_control.UnauthorizedAccess as e: logging.exception("Access denied to %s (%s) with %s: %s", request.path, request.method, method_metadata.name, e) additional_headers = { "X-GRR-Unauthorized-Access-Reason": utils.SmartStr(e.message), "X-GRR-Unauthorized-Access-Subject": utils.SmartStr(e.subject) } return self._BuildResponse( 403, dict( message="Access denied by ACL: %s" % e.message, subject=utils.SmartStr(e.subject)), headers=additional_headers, method_name=method_metadata.name, no_audit_log=method_metadata.no_audit_log_required, token=token) except api_call_handler_base.ResourceNotFoundError as e: return self._BuildResponse( 404, dict(message=e.message), method_name=method_metadata.name, no_audit_log=method_metadata.no_audit_log_required, token=token) except NotImplementedError as e: return self._BuildResponse( 501, dict(message=e.message), method_name=method_metadata.name, no_audit_log=method_metadata.no_audit_log_required, token=token) except Exception as e: # pylint: disable=broad-except logging.exception("Error while processing %s (%s) with %s: %s", request.path, request.method, handler.__class__.__name__, e) return self._BuildResponse( 500, dict( message=str(e), traceBack=traceback.format_exc()), method_name=method_metadata.name, no_audit_log=method_metadata.no_audit_log_required, token=token) def RenderHttpResponse(request): """Renders HTTP response to a given HTTP request.""" start_time = time.time() response = HTTP_REQUEST_HANDLER.HandleRequest(request) total_time = time.time() - start_time log.LOGGER.LogHttpApiCall(request, response) method_name = response.get("X-API-Method", "unknown") if response.status_code == 200: status = "SUCCESS" elif response.status_code == 403: status = "FORBIDDEN" elif response.status_code == 404: status = "NOT_FOUND" elif response.status_code == 501: status = "NOT_IMPLEMENTED" else: status = "SERVER_ERROR" if request.method == "HEAD": metric_name = "api_access_probe_latency" else: metric_name = "api_method_latency" stats.STATS.RecordEvent( metric_name, total_time, fields=(method_name, "http", status)) return response HTTP_REQUEST_HANDLER = None class HttpApiInitHook(registry.InitHook): """Register HTTP API handlers.""" def RunOnce(self): global HTTP_REQUEST_HANDLER HTTP_REQUEST_HANDLER = HttpRequestHandler() db = symbol_database.Default() # Register api_pb2.DESCRIPTOR in the database, so that all API-related # protos are recognized when Any messages are unpacked. db.RegisterFileDescriptor(api_pb2.DESCRIPTOR) stats.STATS.RegisterEventMetric( "api_method_latency", fields=[("method_name", str), ("protocol", str), ("status", str)]) stats.STATS.RegisterEventMetric( "api_access_probe_latency", fields=[("method_name", str), ("protocol", str), ("status", str)])
apache-2.0
-6,772,753,926,901,431,000
35.893728
80
0.651934
false
4.030643
false
false
false
bgroff/kala-app
django_kala/auth/views/settings/avatar.py
1
2011
from django.conf import settings from django.contrib import messages from auth.forms.settings.avatar import AvatarForm from django.contrib.auth import get_user_model from django.contrib.auth.mixins import LoginRequiredMixin from django.core.exceptions import PermissionDenied from django.shortcuts import get_object_or_404, redirect from django.urls import reverse from django.utils.translation import ugettext as _ from django.views.generic.base import TemplateView from PIL import Image from io import BytesIO size = 32, 32 class AvatarView(LoginRequiredMixin, TemplateView): template_name = 'accounts/settings/avatar.html' def get_context_data(self, **kwargs): return { 'form': self.form, 'user': self.user, } def dispatch(self, request, pk, *args, **kwargs): self.user = get_object_or_404(get_user_model().objects.all(), pk=pk) if not request.user.is_superuser and request.user != self.user: raise PermissionDenied(_('You do not have permission to edit this user.')) self.form = AvatarForm(request.POST or None, request.FILES or None) return super(AvatarView, self).dispatch(request, *args, **kwargs) def post(self, request, *args, **kwargs): if self.form.is_valid(): try: avatar_content = request.FILES['avatar'].read() avatar = Image.open(BytesIO(avatar_content)) avatar = avatar.resize(size) avatar_out = BytesIO() avatar.save(avatar_out, format='PNG') avatar_out.seek(0) manager = settings.PLATFORM_MANAGER() manager.upload_avatar(avatar_out, self.user) except Exception as exception: print(exception) messages.success(request, _('The avatar has been updated.')) return redirect(reverse('users:avatar', args=[self.user.pk])) return self.render_to_response(self.get_context_data())
mit
-1,904,576,512,794,497,300
34.910714
86
0.652909
false
4.19833
false
false
false
google/mobly
tests/mobly/controllers/android_device_lib/callback_handler_test.py
1
5977
# Copyright 2017 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import mock import unittest from mobly.controllers.android_device_lib import callback_handler from mobly.controllers.android_device_lib import jsonrpc_client_base MOCK_CALLBACK_ID = "1-0" MOCK_RAW_EVENT = { 'callbackId': '2-1', 'name': 'AsyncTaskResult', 'time': 20460228696, 'data': { 'exampleData': "Here's a simple event.", 'successful': True, 'secretNumber': 12 } } class CallbackHandlerTest(unittest.TestCase): """Unit tests for mobly.controllers.android_device_lib.callback_handler. """ def test_timeout_value(self): self.assertGreaterEqual(jsonrpc_client_base._SOCKET_READ_TIMEOUT, callback_handler.MAX_TIMEOUT) def test_callback_id_property(self): mock_event_client = mock.Mock() handler = callback_handler.CallbackHandler(callback_id=MOCK_CALLBACK_ID, event_client=mock_event_client, ret_value=None, method_name=None, ad=mock.Mock()) self.assertEqual(handler.callback_id, MOCK_CALLBACK_ID) with self.assertRaisesRegex(AttributeError, "can't set attribute"): handler.callback_id = 'ha' def test_event_dict_to_snippet_event(self): mock_event_client = mock.Mock() mock_event_client.eventWaitAndGet = mock.Mock(return_value=MOCK_RAW_EVENT) handler = callback_handler.CallbackHandler(callback_id=MOCK_CALLBACK_ID, event_client=mock_event_client, ret_value=None, method_name=None, ad=mock.Mock()) event = handler.waitAndGet('ha') self.assertEqual(event.name, MOCK_RAW_EVENT['name']) self.assertEqual(event.creation_time, MOCK_RAW_EVENT['time']) self.assertEqual(event.data, MOCK_RAW_EVENT['data']) self.assertEqual(event.callback_id, MOCK_RAW_EVENT['callbackId']) def test_wait_and_get_timeout(self): mock_event_client = mock.Mock() java_timeout_msg = ('com.google.android.mobly.snippet.event.' 'EventSnippet$EventSnippetException: timeout.') mock_event_client.eventWaitAndGet = mock.Mock( side_effect=jsonrpc_client_base.ApiError(mock.Mock(), java_timeout_msg)) handler = callback_handler.CallbackHandler(callback_id=MOCK_CALLBACK_ID, event_client=mock_event_client, ret_value=None, method_name=None, ad=mock.Mock()) expected_msg = 'Timed out after waiting .*s for event "ha" .*' with self.assertRaisesRegex(callback_handler.TimeoutError, expected_msg): handler.waitAndGet('ha') def test_wait_for_event(self): mock_event_client = mock.Mock() mock_event_client.eventWaitAndGet = mock.Mock(return_value=MOCK_RAW_EVENT) handler = callback_handler.CallbackHandler(callback_id=MOCK_CALLBACK_ID, event_client=mock_event_client, ret_value=None, method_name=None, ad=mock.Mock()) def some_condition(event): return event.data['successful'] event = handler.waitForEvent('AsyncTaskResult', some_condition, 0.01) def test_wait_for_event_negative(self): mock_event_client = mock.Mock() mock_event_client.eventWaitAndGet = mock.Mock(return_value=MOCK_RAW_EVENT) handler = callback_handler.CallbackHandler(callback_id=MOCK_CALLBACK_ID, event_client=mock_event_client, ret_value=None, method_name=None, ad=mock.Mock()) expected_msg = ( 'Timed out after 0.01s waiting for an "AsyncTaskResult" event that' ' satisfies the predicate "some_condition".') def some_condition(event): return False with self.assertRaisesRegex(callback_handler.TimeoutError, expected_msg): handler.waitForEvent('AsyncTaskResult', some_condition, 0.01) def test_wait_for_event_max_timeout(self): """waitForEvent should not raise the timeout exceed threshold error. """ mock_event_client = mock.Mock() mock_event_client.eventWaitAndGet = mock.Mock(return_value=MOCK_RAW_EVENT) handler = callback_handler.CallbackHandler(callback_id=MOCK_CALLBACK_ID, event_client=mock_event_client, ret_value=None, method_name=None, ad=mock.Mock()) def some_condition(event): return event.data['successful'] big_timeout = callback_handler.MAX_TIMEOUT * 2 # This line should not raise. event = handler.waitForEvent('AsyncTaskResult', some_condition, timeout=big_timeout) if __name__ == "__main__": unittest.main()
apache-2.0
958,428,415,450,603,900
42.948529
80
0.575205
false
4.375549
true
false
false