repo_name
stringlengths
5
100
path
stringlengths
4
375
copies
stringclasses
991 values
size
stringlengths
4
7
content
stringlengths
666
1M
license
stringclasses
15 values
glwu/python-for-android
python3-alpha/python3-src/Tools/scripts/untabify.py
49
1298
#! /usr/bin/env python3 "Replace tabs with spaces in argument files. Print names of changed files." import os import sys import getopt import tokenize def main(): tabsize = 8 try: opts, args = getopt.getopt(sys.argv[1:], "t:") if not args: raise getopt.error("At least one file argument required") except getopt.error as msg: print(msg) print("usage:", sys.argv[0], "[-t tabwidth] file ...") return for optname, optvalue in opts: if optname == '-t': tabsize = int(optvalue) for filename in args: process(filename, tabsize) def process(filename, tabsize, verbose=True): try: with tokenize.open(filename) as f: text = f.read() encoding = f.encoding except IOError as msg: print("%r: I/O error: %s" % (filename, msg)) return newtext = text.expandtabs(tabsize) if newtext == text: return backup = filename + "~" try: os.unlink(backup) except os.error: pass try: os.rename(filename, backup) except os.error: pass with open(filename, "w", encoding=encoding) as f: f.write(newtext) if verbose: print(filename) if __name__ == '__main__': main()
apache-2.0
paweljasinski/ironpython3
Src/StdLib/Lib/test/test_threading.py
72
36686
""" Tests for the threading module. """ import test.support from test.support import verbose, strip_python_stderr, import_module, cpython_only from test.script_helper import assert_python_ok import random import re import sys _thread = import_module('_thread') threading = import_module('threading') import time import unittest import weakref import os from test.script_helper import assert_python_ok, assert_python_failure import subprocess from test import lock_tests # Between fork() and exec(), only async-safe functions are allowed (issues # #12316 and #11870), and fork() from a worker thread is known to trigger # problems with some operating systems (issue #3863): skip problematic tests # on platforms known to behave badly. platforms_to_skip = ('freebsd4', 'freebsd5', 'freebsd6', 'netbsd5', 'hp-ux11') # A trivial mutable counter. class Counter(object): def __init__(self): self.value = 0 def inc(self): self.value += 1 def dec(self): self.value -= 1 def get(self): return self.value class TestThread(threading.Thread): def __init__(self, name, testcase, sema, mutex, nrunning): threading.Thread.__init__(self, name=name) self.testcase = testcase self.sema = sema self.mutex = mutex self.nrunning = nrunning def run(self): delay = random.random() / 10000.0 if verbose: print('task %s will run for %.1f usec' % (self.name, delay * 1e6)) with self.sema: with self.mutex: self.nrunning.inc() if verbose: print(self.nrunning.get(), 'tasks are running') self.testcase.assertTrue(self.nrunning.get() <= 3) time.sleep(delay) if verbose: print('task', self.name, 'done') with self.mutex: self.nrunning.dec() self.testcase.assertTrue(self.nrunning.get() >= 0) if verbose: print('%s is finished. %d tasks are running' % (self.name, self.nrunning.get())) class BaseTestCase(unittest.TestCase): def setUp(self): self._threads = test.support.threading_setup() def tearDown(self): test.support.threading_cleanup(*self._threads) test.support.reap_children() class ThreadTests(BaseTestCase): # Create a bunch of threads, let each do some work, wait until all are # done. def test_various_ops(self): # This takes about n/3 seconds to run (about n/3 clumps of tasks, # times about 1 second per clump). NUMTASKS = 10 # no more than 3 of the 10 can run at once sema = threading.BoundedSemaphore(value=3) mutex = threading.RLock() numrunning = Counter() threads = [] for i in range(NUMTASKS): t = TestThread("<thread %d>"%i, self, sema, mutex, numrunning) threads.append(t) self.assertEqual(t.ident, None) self.assertTrue(re.match('<TestThread\(.*, initial\)>', repr(t))) t.start() if verbose: print('waiting for all tasks to complete') for t in threads: t.join() self.assertTrue(not t.is_alive()) self.assertNotEqual(t.ident, 0) self.assertFalse(t.ident is None) self.assertTrue(re.match('<TestThread\(.*, stopped -?\d+\)>', repr(t))) if verbose: print('all tasks done') self.assertEqual(numrunning.get(), 0) def test_ident_of_no_threading_threads(self): # The ident still must work for the main thread and dummy threads. self.assertFalse(threading.currentThread().ident is None) def f(): ident.append(threading.currentThread().ident) done.set() done = threading.Event() ident = [] _thread.start_new_thread(f, ()) done.wait() self.assertFalse(ident[0] is None) # Kill the "immortal" _DummyThread del threading._active[ident[0]] # run with a small(ish) thread stack size (256kB) def test_various_ops_small_stack(self): if verbose: print('with 256kB thread stack size...') try: threading.stack_size(262144) except _thread.error: raise unittest.SkipTest( 'platform does not support changing thread stack size') self.test_various_ops() threading.stack_size(0) # run with a large thread stack size (1MB) def test_various_ops_large_stack(self): if verbose: print('with 1MB thread stack size...') try: threading.stack_size(0x100000) except _thread.error: raise unittest.SkipTest( 'platform does not support changing thread stack size') self.test_various_ops() threading.stack_size(0) def test_foreign_thread(self): # Check that a "foreign" thread can use the threading module. def f(mutex): # Calling current_thread() forces an entry for the foreign # thread to get made in the threading._active map. threading.current_thread() mutex.release() mutex = threading.Lock() mutex.acquire() tid = _thread.start_new_thread(f, (mutex,)) # Wait for the thread to finish. mutex.acquire() self.assertIn(tid, threading._active) self.assertIsInstance(threading._active[tid], threading._DummyThread) del threading._active[tid] # PyThreadState_SetAsyncExc() is a CPython-only gimmick, not (currently) # exposed at the Python level. This test relies on ctypes to get at it. def test_PyThreadState_SetAsyncExc(self): ctypes = import_module("ctypes") set_async_exc = ctypes.pythonapi.PyThreadState_SetAsyncExc class AsyncExc(Exception): pass exception = ctypes.py_object(AsyncExc) # First check it works when setting the exception from the same thread. tid = threading.get_ident() try: result = set_async_exc(ctypes.c_long(tid), exception) # The exception is async, so we might have to keep the VM busy until # it notices. while True: pass except AsyncExc: pass else: # This code is unreachable but it reflects the intent. If we wanted # to be smarter the above loop wouldn't be infinite. self.fail("AsyncExc not raised") try: self.assertEqual(result, 1) # one thread state modified except UnboundLocalError: # The exception was raised too quickly for us to get the result. pass # `worker_started` is set by the thread when it's inside a try/except # block waiting to catch the asynchronously set AsyncExc exception. # `worker_saw_exception` is set by the thread upon catching that # exception. worker_started = threading.Event() worker_saw_exception = threading.Event() class Worker(threading.Thread): def run(self): self.id = threading.get_ident() self.finished = False try: while True: worker_started.set() time.sleep(0.1) except AsyncExc: self.finished = True worker_saw_exception.set() t = Worker() t.daemon = True # so if this fails, we don't hang Python at shutdown t.start() if verbose: print(" started worker thread") # Try a thread id that doesn't make sense. if verbose: print(" trying nonsensical thread id") result = set_async_exc(ctypes.c_long(-1), exception) self.assertEqual(result, 0) # no thread states modified # Now raise an exception in the worker thread. if verbose: print(" waiting for worker thread to get started") ret = worker_started.wait() self.assertTrue(ret) if verbose: print(" verifying worker hasn't exited") self.assertTrue(not t.finished) if verbose: print(" attempting to raise asynch exception in worker") result = set_async_exc(ctypes.c_long(t.id), exception) self.assertEqual(result, 1) # one thread state modified if verbose: print(" waiting for worker to say it caught the exception") worker_saw_exception.wait(timeout=10) self.assertTrue(t.finished) if verbose: print(" all OK -- joining worker") if t.finished: t.join() # else the thread is still running, and we have no way to kill it def test_limbo_cleanup(self): # Issue 7481: Failure to start thread should cleanup the limbo map. def fail_new_thread(*args): raise threading.ThreadError() _start_new_thread = threading._start_new_thread threading._start_new_thread = fail_new_thread try: t = threading.Thread(target=lambda: None) self.assertRaises(threading.ThreadError, t.start) self.assertFalse( t in threading._limbo, "Failed to cleanup _limbo map on failure of Thread.start().") finally: threading._start_new_thread = _start_new_thread def test_finalize_runnning_thread(self): # Issue 1402: the PyGILState_Ensure / _Release functions may be called # very late on python exit: on deallocation of a running thread for # example. import_module("ctypes") rc, out, err = assert_python_failure("-c", """if 1: import ctypes, sys, time, _thread # This lock is used as a simple event variable. ready = _thread.allocate_lock() ready.acquire() # Module globals are cleared before __del__ is run # So we save the functions in class dict class C: ensure = ctypes.pythonapi.PyGILState_Ensure release = ctypes.pythonapi.PyGILState_Release def __del__(self): state = self.ensure() self.release(state) def waitingThread(): x = C() ready.release() time.sleep(100) _thread.start_new_thread(waitingThread, ()) ready.acquire() # Be sure the other thread is waiting. sys.exit(42) """) self.assertEqual(rc, 42) def test_finalize_with_trace(self): # Issue1733757 # Avoid a deadlock when sys.settrace steps into threading._shutdown assert_python_ok("-c", """if 1: import sys, threading # A deadlock-killer, to prevent the # testsuite to hang forever def killer(): import os, time time.sleep(2) print('program blocked; aborting') os._exit(2) t = threading.Thread(target=killer) t.daemon = True t.start() # This is the trace function def func(frame, event, arg): threading.current_thread() return func sys.settrace(func) """) def test_join_nondaemon_on_shutdown(self): # Issue 1722344 # Raising SystemExit skipped threading._shutdown rc, out, err = assert_python_ok("-c", """if 1: import threading from time import sleep def child(): sleep(1) # As a non-daemon thread we SHOULD wake up and nothing # should be torn down yet print("Woke up, sleep function is:", sleep) threading.Thread(target=child).start() raise SystemExit """) self.assertEqual(out.strip(), b"Woke up, sleep function is: <built-in function sleep>") self.assertEqual(err, b"") def test_enumerate_after_join(self): # Try hard to trigger #1703448: a thread is still returned in # threading.enumerate() after it has been join()ed. enum = threading.enumerate old_interval = sys.getswitchinterval() try: for i in range(1, 100): sys.setswitchinterval(i * 0.0002) t = threading.Thread(target=lambda: None) t.start() t.join() l = enum() self.assertNotIn(t, l, "#1703448 triggered after %d trials: %s" % (i, l)) finally: sys.setswitchinterval(old_interval) def test_no_refcycle_through_target(self): class RunSelfFunction(object): def __init__(self, should_raise): # The links in this refcycle from Thread back to self # should be cleaned up when the thread completes. self.should_raise = should_raise self.thread = threading.Thread(target=self._run, args=(self,), kwargs={'yet_another':self}) self.thread.start() def _run(self, other_ref, yet_another): if self.should_raise: raise SystemExit cyclic_object = RunSelfFunction(should_raise=False) weak_cyclic_object = weakref.ref(cyclic_object) cyclic_object.thread.join() del cyclic_object self.assertIsNone(weak_cyclic_object(), msg=('%d references still around' % sys.getrefcount(weak_cyclic_object()))) raising_cyclic_object = RunSelfFunction(should_raise=True) weak_raising_cyclic_object = weakref.ref(raising_cyclic_object) raising_cyclic_object.thread.join() del raising_cyclic_object self.assertIsNone(weak_raising_cyclic_object(), msg=('%d references still around' % sys.getrefcount(weak_raising_cyclic_object()))) def test_old_threading_api(self): # Just a quick sanity check to make sure the old method names are # still present t = threading.Thread() t.isDaemon() t.setDaemon(True) t.getName() t.setName("name") t.isAlive() e = threading.Event() e.isSet() threading.activeCount() def test_repr_daemon(self): t = threading.Thread() self.assertFalse('daemon' in repr(t)) t.daemon = True self.assertTrue('daemon' in repr(t)) def test_deamon_param(self): t = threading.Thread() self.assertFalse(t.daemon) t = threading.Thread(daemon=False) self.assertFalse(t.daemon) t = threading.Thread(daemon=True) self.assertTrue(t.daemon) @unittest.skipUnless(hasattr(os, 'fork'), 'test needs fork()') def test_dummy_thread_after_fork(self): # Issue #14308: a dummy thread in the active list doesn't mess up # the after-fork mechanism. code = """if 1: import _thread, threading, os, time def background_thread(evt): # Creates and registers the _DummyThread instance threading.current_thread() evt.set() time.sleep(10) evt = threading.Event() _thread.start_new_thread(background_thread, (evt,)) evt.wait() assert threading.active_count() == 2, threading.active_count() if os.fork() == 0: assert threading.active_count() == 1, threading.active_count() os._exit(0) else: os.wait() """ _, out, err = assert_python_ok("-c", code) self.assertEqual(out, b'') self.assertEqual(err, b'') @unittest.skipUnless(hasattr(os, 'fork'), "needs os.fork()") def test_is_alive_after_fork(self): # Try hard to trigger #18418: is_alive() could sometimes be True on # threads that vanished after a fork. old_interval = sys.getswitchinterval() self.addCleanup(sys.setswitchinterval, old_interval) # Make the bug more likely to manifest. sys.setswitchinterval(1e-6) for i in range(20): t = threading.Thread(target=lambda: None) t.start() self.addCleanup(t.join) pid = os.fork() if pid == 0: os._exit(1 if t.is_alive() else 0) else: pid, status = os.waitpid(pid, 0) self.assertEqual(0, status) def test_main_thread(self): main = threading.main_thread() self.assertEqual(main.name, 'MainThread') self.assertEqual(main.ident, threading.current_thread().ident) self.assertEqual(main.ident, threading.get_ident()) def f(): self.assertNotEqual(threading.main_thread().ident, threading.current_thread().ident) th = threading.Thread(target=f) th.start() th.join() @unittest.skipUnless(hasattr(os, 'fork'), "test needs os.fork()") @unittest.skipUnless(hasattr(os, 'waitpid'), "test needs os.waitpid()") def test_main_thread_after_fork(self): code = """if 1: import os, threading pid = os.fork() if pid == 0: main = threading.main_thread() print(main.name) print(main.ident == threading.current_thread().ident) print(main.ident == threading.get_ident()) else: os.waitpid(pid, 0) """ _, out, err = assert_python_ok("-c", code) data = out.decode().replace('\r', '') self.assertEqual(err, b"") self.assertEqual(data, "MainThread\nTrue\nTrue\n") @unittest.skipIf(sys.platform in platforms_to_skip, "due to known OS bug") @unittest.skipUnless(hasattr(os, 'fork'), "test needs os.fork()") @unittest.skipUnless(hasattr(os, 'waitpid'), "test needs os.waitpid()") def test_main_thread_after_fork_from_nonmain_thread(self): code = """if 1: import os, threading, sys def f(): pid = os.fork() if pid == 0: main = threading.main_thread() print(main.name) print(main.ident == threading.current_thread().ident) print(main.ident == threading.get_ident()) # stdout is fully buffered because not a tty, # we have to flush before exit. sys.stdout.flush() else: os.waitpid(pid, 0) th = threading.Thread(target=f) th.start() th.join() """ _, out, err = assert_python_ok("-c", code) data = out.decode().replace('\r', '') self.assertEqual(err, b"") self.assertEqual(data, "Thread-1\nTrue\nTrue\n") def test_tstate_lock(self): # Test an implementation detail of Thread objects. started = _thread.allocate_lock() finish = _thread.allocate_lock() started.acquire() finish.acquire() def f(): started.release() finish.acquire() time.sleep(0.01) # The tstate lock is None until the thread is started t = threading.Thread(target=f) self.assertIs(t._tstate_lock, None) t.start() started.acquire() self.assertTrue(t.is_alive()) # The tstate lock can't be acquired when the thread is running # (or suspended). tstate_lock = t._tstate_lock self.assertFalse(tstate_lock.acquire(timeout=0), False) finish.release() # When the thread ends, the state_lock can be successfully # acquired. self.assertTrue(tstate_lock.acquire(timeout=5), False) # But is_alive() is still True: we hold _tstate_lock now, which # prevents is_alive() from knowing the thread's end-of-life C code # is done. self.assertTrue(t.is_alive()) # Let is_alive() find out the C code is done. tstate_lock.release() self.assertFalse(t.is_alive()) # And verify the thread disposed of _tstate_lock. self.assertTrue(t._tstate_lock is None) def test_repr_stopped(self): # Verify that "stopped" shows up in repr(Thread) appropriately. started = _thread.allocate_lock() finish = _thread.allocate_lock() started.acquire() finish.acquire() def f(): started.release() finish.acquire() t = threading.Thread(target=f) t.start() started.acquire() self.assertIn("started", repr(t)) finish.release() # "stopped" should appear in the repr in a reasonable amount of time. # Implementation detail: as of this writing, that's trivially true # if .join() is called, and almost trivially true if .is_alive() is # called. The detail we're testing here is that "stopped" shows up # "all on its own". LOOKING_FOR = "stopped" for i in range(500): if LOOKING_FOR in repr(t): break time.sleep(0.01) self.assertIn(LOOKING_FOR, repr(t)) # we waited at least 5 seconds def test_BoundedSemaphore_limit(self): # BoundedSemaphore should raise ValueError if released too often. for limit in range(1, 10): bs = threading.BoundedSemaphore(limit) threads = [threading.Thread(target=bs.acquire) for _ in range(limit)] for t in threads: t.start() for t in threads: t.join() threads = [threading.Thread(target=bs.release) for _ in range(limit)] for t in threads: t.start() for t in threads: t.join() self.assertRaises(ValueError, bs.release) @cpython_only def test_frame_tstate_tracing(self): # Issue #14432: Crash when a generator is created in a C thread that is # destroyed while the generator is still used. The issue was that a # generator contains a frame, and the frame kept a reference to the # Python state of the destroyed C thread. The crash occurs when a trace # function is setup. def noop_trace(frame, event, arg): # no operation return noop_trace def generator(): while 1: yield "genereator" def callback(): if callback.gen is None: callback.gen = generator() return next(callback.gen) callback.gen = None old_trace = sys.gettrace() sys.settrace(noop_trace) try: # Install a trace function threading.settrace(noop_trace) # Create a generator in a C thread which exits after the call import _testcapi _testcapi.call_in_temporary_c_thread(callback) # Call the generator in a different Python thread, check that the # generator didn't keep a reference to the destroyed thread state for test in range(3): # The trace function is still called here callback() finally: sys.settrace(old_trace) class ThreadJoinOnShutdown(BaseTestCase): def _run_and_join(self, script): script = """if 1: import sys, os, time, threading # a thread, which waits for the main program to terminate def joiningfunc(mainthread): mainthread.join() print('end of thread') # stdout is fully buffered because not a tty, we have to flush # before exit. sys.stdout.flush() \n""" + script rc, out, err = assert_python_ok("-c", script) data = out.decode().replace('\r', '') self.assertEqual(data, "end of main\nend of thread\n") def test_1_join_on_shutdown(self): # The usual case: on exit, wait for a non-daemon thread script = """if 1: import os t = threading.Thread(target=joiningfunc, args=(threading.current_thread(),)) t.start() time.sleep(0.1) print('end of main') """ self._run_and_join(script) @unittest.skipUnless(hasattr(os, 'fork'), "needs os.fork()") @unittest.skipIf(sys.platform in platforms_to_skip, "due to known OS bug") def test_2_join_in_forked_process(self): # Like the test above, but from a forked interpreter script = """if 1: childpid = os.fork() if childpid != 0: os.waitpid(childpid, 0) sys.exit(0) t = threading.Thread(target=joiningfunc, args=(threading.current_thread(),)) t.start() print('end of main') """ self._run_and_join(script) @unittest.skipUnless(hasattr(os, 'fork'), "needs os.fork()") @unittest.skipIf(sys.platform in platforms_to_skip, "due to known OS bug") def test_3_join_in_forked_from_thread(self): # Like the test above, but fork() was called from a worker thread # In the forked process, the main Thread object must be marked as stopped. script = """if 1: main_thread = threading.current_thread() def worker(): childpid = os.fork() if childpid != 0: os.waitpid(childpid, 0) sys.exit(0) t = threading.Thread(target=joiningfunc, args=(main_thread,)) print('end of main') t.start() t.join() # Should not block: main_thread is already stopped w = threading.Thread(target=worker) w.start() """ self._run_and_join(script) @unittest.skipIf(sys.platform in platforms_to_skip, "due to known OS bug") def test_4_daemon_threads(self): # Check that a daemon thread cannot crash the interpreter on shutdown # by manipulating internal structures that are being disposed of in # the main thread. script = """if True: import os import random import sys import time import threading thread_has_run = set() def random_io(): '''Loop for a while sleeping random tiny amounts and doing some I/O.''' while True: in_f = open(os.__file__, 'rb') stuff = in_f.read(200) null_f = open(os.devnull, 'wb') null_f.write(stuff) time.sleep(random.random() / 1995) null_f.close() in_f.close() thread_has_run.add(threading.current_thread()) def main(): count = 0 for _ in range(40): new_thread = threading.Thread(target=random_io) new_thread.daemon = True new_thread.start() count += 1 while len(thread_has_run) < count: time.sleep(0.001) # Trigger process shutdown sys.exit(0) main() """ rc, out, err = assert_python_ok('-c', script) self.assertFalse(err) @unittest.skipUnless(hasattr(os, 'fork'), "needs os.fork()") @unittest.skipIf(sys.platform in platforms_to_skip, "due to known OS bug") def test_reinit_tls_after_fork(self): # Issue #13817: fork() would deadlock in a multithreaded program with # the ad-hoc TLS implementation. def do_fork_and_wait(): # just fork a child process and wait it pid = os.fork() if pid > 0: os.waitpid(pid, 0) else: os._exit(0) # start a bunch of threads that will fork() child processes threads = [] for i in range(16): t = threading.Thread(target=do_fork_and_wait) threads.append(t) t.start() for t in threads: t.join() @unittest.skipUnless(hasattr(os, 'fork'), "needs os.fork()") def test_clear_threads_states_after_fork(self): # Issue #17094: check that threads states are cleared after fork() # start a bunch of threads threads = [] for i in range(16): t = threading.Thread(target=lambda : time.sleep(0.3)) threads.append(t) t.start() pid = os.fork() if pid == 0: # check that threads states have been cleared if len(sys._current_frames()) == 1: os._exit(0) else: os._exit(1) else: _, status = os.waitpid(pid, 0) self.assertEqual(0, status) for t in threads: t.join() class SubinterpThreadingTests(BaseTestCase): def test_threads_join(self): # Non-daemon threads should be joined at subinterpreter shutdown # (issue #18808) r, w = os.pipe() self.addCleanup(os.close, r) self.addCleanup(os.close, w) code = r"""if 1: import os import threading import time def f(): # Sleep a bit so that the thread is still running when # Py_EndInterpreter is called. time.sleep(0.05) os.write(%d, b"x") threading.Thread(target=f).start() """ % (w,) ret = test.support.run_in_subinterp(code) self.assertEqual(ret, 0) # The thread was joined properly. self.assertEqual(os.read(r, 1), b"x") def test_threads_join_2(self): # Same as above, but a delay gets introduced after the thread's # Python code returned but before the thread state is deleted. # To achieve this, we register a thread-local object which sleeps # a bit when deallocated. r, w = os.pipe() self.addCleanup(os.close, r) self.addCleanup(os.close, w) code = r"""if 1: import os import threading import time class Sleeper: def __del__(self): time.sleep(0.05) tls = threading.local() def f(): # Sleep a bit so that the thread is still running when # Py_EndInterpreter is called. time.sleep(0.05) tls.x = Sleeper() os.write(%d, b"x") threading.Thread(target=f).start() """ % (w,) ret = test.support.run_in_subinterp(code) self.assertEqual(ret, 0) # The thread was joined properly. self.assertEqual(os.read(r, 1), b"x") @cpython_only def test_daemon_threads_fatal_error(self): subinterp_code = r"""if 1: import os import threading import time def f(): # Make sure the daemon thread is still running when # Py_EndInterpreter is called. time.sleep(10) threading.Thread(target=f, daemon=True).start() """ script = r"""if 1: import _testcapi _testcapi.run_in_subinterp(%r) """ % (subinterp_code,) with test.support.SuppressCrashReport(): rc, out, err = assert_python_failure("-c", script) self.assertIn("Fatal Python error: Py_EndInterpreter: " "not the last thread", err.decode()) class ThreadingExceptionTests(BaseTestCase): # A RuntimeError should be raised if Thread.start() is called # multiple times. def test_start_thread_again(self): thread = threading.Thread() thread.start() self.assertRaises(RuntimeError, thread.start) def test_joining_current_thread(self): current_thread = threading.current_thread() self.assertRaises(RuntimeError, current_thread.join); def test_joining_inactive_thread(self): thread = threading.Thread() self.assertRaises(RuntimeError, thread.join) def test_daemonize_active_thread(self): thread = threading.Thread() thread.start() self.assertRaises(RuntimeError, setattr, thread, "daemon", True) def test_releasing_unacquired_lock(self): lock = threading.Lock() self.assertRaises(RuntimeError, lock.release) @unittest.skipUnless(sys.platform == 'darwin' and test.support.python_is_optimized(), 'test macosx problem') def test_recursion_limit(self): # Issue 9670 # test that excessive recursion within a non-main thread causes # an exception rather than crashing the interpreter on platforms # like Mac OS X or FreeBSD which have small default stack sizes # for threads script = """if True: import threading def recurse(): return recurse() def outer(): try: recurse() except RuntimeError: pass w = threading.Thread(target=outer) w.start() w.join() print('end of main thread') """ expected_output = "end of main thread\n" p = subprocess.Popen([sys.executable, "-c", script], stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = p.communicate() data = stdout.decode().replace('\r', '') self.assertEqual(p.returncode, 0, "Unexpected error: " + stderr.decode()) self.assertEqual(data, expected_output) class TimerTests(BaseTestCase): def setUp(self): BaseTestCase.setUp(self) self.callback_args = [] self.callback_event = threading.Event() def test_init_immutable_default_args(self): # Issue 17435: constructor defaults were mutable objects, they could be # mutated via the object attributes and affect other Timer objects. timer1 = threading.Timer(0.01, self._callback_spy) timer1.start() self.callback_event.wait() timer1.args.append("blah") timer1.kwargs["foo"] = "bar" self.callback_event.clear() timer2 = threading.Timer(0.01, self._callback_spy) timer2.start() self.callback_event.wait() self.assertEqual(len(self.callback_args), 2) self.assertEqual(self.callback_args, [((), {}), ((), {})]) def _callback_spy(self, *args, **kwargs): self.callback_args.append((args[:], kwargs.copy())) self.callback_event.set() class LockTests(lock_tests.LockTests): locktype = staticmethod(threading.Lock) class PyRLockTests(lock_tests.RLockTests): locktype = staticmethod(threading._PyRLock) @unittest.skipIf(threading._CRLock is None, 'RLock not implemented in C') class CRLockTests(lock_tests.RLockTests): locktype = staticmethod(threading._CRLock) class EventTests(lock_tests.EventTests): eventtype = staticmethod(threading.Event) class ConditionAsRLockTests(lock_tests.RLockTests): # An Condition uses an RLock by default and exports its API. locktype = staticmethod(threading.Condition) class ConditionTests(lock_tests.ConditionTests): condtype = staticmethod(threading.Condition) class SemaphoreTests(lock_tests.SemaphoreTests): semtype = staticmethod(threading.Semaphore) class BoundedSemaphoreTests(lock_tests.BoundedSemaphoreTests): semtype = staticmethod(threading.BoundedSemaphore) class BarrierTests(lock_tests.BarrierTests): barriertype = staticmethod(threading.Barrier) if __name__ == "__main__": unittest.main()
apache-2.0
watonyweng/nova
nova/virt/libvirt/storage/rbd_utils.py
6
11793
# Copyright 2012 Grid Dynamics # Copyright 2013 Inktank Storage, Inc. # Copyright 2014 Mirantis, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import urllib try: import rados import rbd except ImportError: rados = None rbd = None from oslo_log import log as logging from oslo_serialization import jsonutils from oslo_service import loopingcall from oslo_utils import excutils from oslo_utils import units from nova import exception from nova.i18n import _ from nova.i18n import _LE from nova.i18n import _LW from nova import utils LOG = logging.getLogger(__name__) class RBDVolumeProxy(object): """Context manager for dealing with an existing rbd volume. This handles connecting to rados and opening an ioctx automatically, and otherwise acts like a librbd Image object. The underlying librados client and ioctx can be accessed as the attributes 'client' and 'ioctx'. """ def __init__(self, driver, name, pool=None, snapshot=None, read_only=False): client, ioctx = driver._connect_to_rados(pool) try: snap_name = snapshot.encode('utf8') if snapshot else None self.volume = rbd.Image(ioctx, name.encode('utf8'), snapshot=snap_name, read_only=read_only) except rbd.ImageNotFound: with excutils.save_and_reraise_exception(): LOG.debug("rbd image %s does not exist", name) driver._disconnect_from_rados(client, ioctx) except rbd.Error: with excutils.save_and_reraise_exception(): LOG.exception(_LE("error opening rbd image %s"), name) driver._disconnect_from_rados(client, ioctx) self.driver = driver self.client = client self.ioctx = ioctx def __enter__(self): return self def __exit__(self, type_, value, traceback): try: self.volume.close() finally: self.driver._disconnect_from_rados(self.client, self.ioctx) def __getattr__(self, attrib): return getattr(self.volume, attrib) class RADOSClient(object): """Context manager to simplify error handling for connecting to ceph.""" def __init__(self, driver, pool=None): self.driver = driver self.cluster, self.ioctx = driver._connect_to_rados(pool) def __enter__(self): return self def __exit__(self, type_, value, traceback): self.driver._disconnect_from_rados(self.cluster, self.ioctx) @property def features(self): features = self.cluster.conf_get('rbd_default_features') if ((features is None) or (int(features) == 0)): features = rbd.RBD_FEATURE_LAYERING return int(features) class RBDDriver(object): def __init__(self, pool, ceph_conf, rbd_user): self.pool = pool.encode('utf8') # NOTE(angdraug): rados.Rados fails to connect if ceph_conf is None: # https://github.com/ceph/ceph/pull/1787 self.ceph_conf = ceph_conf.encode('utf8') if ceph_conf else '' self.rbd_user = rbd_user.encode('utf8') if rbd_user else None if rbd is None: raise RuntimeError(_('rbd python libraries not found')) def _connect_to_rados(self, pool=None): client = rados.Rados(rados_id=self.rbd_user, conffile=self.ceph_conf) try: client.connect() pool_to_open = pool or self.pool ioctx = client.open_ioctx(pool_to_open.encode('utf-8')) return client, ioctx except rados.Error: # shutdown cannot raise an exception client.shutdown() raise def _disconnect_from_rados(self, client, ioctx): # closing an ioctx cannot raise an exception ioctx.close() client.shutdown() def ceph_args(self): """List of command line parameters to be passed to ceph commands to reflect RBDDriver configuration such as RBD user name and location of ceph.conf. """ args = [] if self.rbd_user: args.extend(['--id', self.rbd_user]) if self.ceph_conf: args.extend(['--conf', self.ceph_conf]) return args def get_mon_addrs(self): args = ['ceph', 'mon', 'dump', '--format=json'] + self.ceph_args() out, _ = utils.execute(*args) lines = out.split('\n') if lines[0].startswith('dumped monmap epoch'): lines = lines[1:] monmap = jsonutils.loads('\n'.join(lines)) addrs = [mon['addr'] for mon in monmap['mons']] hosts = [] ports = [] for addr in addrs: host_port = addr[:addr.rindex('/')] host, port = host_port.rsplit(':', 1) hosts.append(host.strip('[]')) ports.append(port) return hosts, ports def parse_url(self, url): prefix = 'rbd://' if not url.startswith(prefix): reason = _('Not stored in rbd') raise exception.ImageUnacceptable(image_id=url, reason=reason) pieces = map(urllib.unquote, url[len(prefix):].split('/')) if '' in pieces: reason = _('Blank components') raise exception.ImageUnacceptable(image_id=url, reason=reason) if len(pieces) != 4: reason = _('Not an rbd snapshot') raise exception.ImageUnacceptable(image_id=url, reason=reason) return pieces def _get_fsid(self): with RADOSClient(self) as client: return client.cluster.get_fsid() def is_cloneable(self, image_location, image_meta): url = image_location['url'] try: fsid, pool, image, snapshot = self.parse_url(url) except exception.ImageUnacceptable as e: LOG.debug('not cloneable: %s', e) return False if self._get_fsid() != fsid: reason = '%s is in a different ceph cluster' % url LOG.debug(reason) return False if image_meta['disk_format'] != 'raw': reason = ("rbd image clone requires image format to be " "'raw' but image {0} is '{1}'").format( url, image_meta['disk_format']) LOG.debug(reason) return False # check that we can read the image try: return self.exists(image, pool=pool, snapshot=snapshot) except rbd.Error as e: LOG.debug('Unable to open image %(loc)s: %(err)s' % dict(loc=url, err=e)) return False def clone(self, image_location, dest_name): _fsid, pool, image, snapshot = self.parse_url( image_location['url']) LOG.debug('cloning %(pool)s/%(img)s@%(snap)s' % dict(pool=pool, img=image, snap=snapshot)) with RADOSClient(self, str(pool)) as src_client: with RADOSClient(self) as dest_client: rbd.RBD().clone(src_client.ioctx, image.encode('utf-8'), snapshot.encode('utf-8'), dest_client.ioctx, dest_name, features=src_client.features) def size(self, name): with RBDVolumeProxy(self, name) as vol: return vol.size() def resize(self, name, size): """Resize RBD volume. :name: Name of RBD object :size: New size in bytes """ LOG.debug('resizing rbd image %s to %d', name, size) with RBDVolumeProxy(self, name) as vol: vol.resize(size) def exists(self, name, pool=None, snapshot=None): try: with RBDVolumeProxy(self, name, pool=pool, snapshot=snapshot, read_only=True): return True except rbd.ImageNotFound: return False def remove_image(self, name): """Remove RBD volume :name: Name of RBD volume """ with RADOSClient(self, self.pool) as client: try: rbd.RBD().remove(client.ioctx, name) except rbd.ImageNotFound: LOG.warn(_LW('image %(volume)s in pool %(pool)s can not be ' 'found, failed to remove'), {'volume': name, 'pool': self.pool}) except rbd.ImageHasSnapshots: LOG.error(_LE('image %(volume)s in pool %(pool)s has ' 'snapshots, failed to remove'), {'volume': name, 'pool': self.pool}) def import_image(self, base, name): """Import RBD volume from image file. Uses the command line import instead of librbd since rbd import command detects zeroes to preserve sparseness in the image. :base: Path to image file :name: Name of RBD volume """ args = ['--pool', self.pool, base, name] # Image format 2 supports cloning, # in stable ceph rbd release default is not 2, # we need to use it explicitly. args += ['--image-format=2'] args += self.ceph_args() utils.execute('rbd', 'import', *args) def cleanup_volumes(self, instance): def _cleanup_vol(ioctx, volume, retryctx): try: rbd.RBD().remove(client.ioctx, volume) raise loopingcall.LoopingCallDone(retvalue=False) except (rbd.ImageBusy, rbd.ImageHasSnapshots): LOG.warn(_LW('rbd remove %(volume)s in pool %(pool)s ' 'failed'), {'volume': volume, 'pool': self.pool}) retryctx['retries'] -= 1 if retryctx['retries'] <= 0: raise loopingcall.LoopingCallDone() with RADOSClient(self, self.pool) as client: def belongs_to_instance(disk): return disk.startswith(instance.uuid) volumes = rbd.RBD().list(client.ioctx) for volume in filter(belongs_to_instance, volumes): # NOTE(danms): We let it go for ten seconds retryctx = {'retries': 10} timer = loopingcall.FixedIntervalLoopingCall( _cleanup_vol, client.ioctx, volume, retryctx) timed_out = timer.start(interval=1).wait() if timed_out: # NOTE(danms): Run this again to propagate the error, but # if it succeeds, don't raise the loopingcall exception try: _cleanup_vol(client.ioctx, volume, retryctx) except loopingcall.LoopingCallDone: pass def get_pool_info(self): with RADOSClient(self) as client: stats = client.cluster.get_cluster_stats() return {'total': stats['kb'] * units.Ki, 'free': stats['kb_avail'] * units.Ki, 'used': stats['kb_used'] * units.Ki}
apache-2.0
Johnetordoff/osf.io
api_tests/draft_registrations/views/test_draft_registration_institutions_list.py
6
2214
import pytest from api.base.settings.defaults import API_BASE from api_tests.nodes.views.test_node_institutions_list import TestNodeInstitutionList from osf_tests.factories import DraftRegistrationFactory, AuthUserFactory @pytest.fixture() def user(): return AuthUserFactory() @pytest.fixture() def user_two(): return AuthUserFactory() @pytest.mark.django_db class TestDraftRegistrationInstitutionList(TestNodeInstitutionList): @pytest.fixture() def node_one(self, institution, user): # Overrides TestNodeInstitutionList draft = DraftRegistrationFactory(initiator=user) draft.affiliated_institutions.add(institution) draft.save() return draft @pytest.fixture() def node_two(self, user): # Overrides TestNodeInstitutionList return DraftRegistrationFactory(initiator=user) @pytest.fixture() def node_one_url(self, node_one): # Overrides TestNodeInstitutionList return '/{}draft_registrations/{}/institutions/'.format(API_BASE, node_one._id) @pytest.fixture() def node_two_url(self, node_two): # Overrides TestNodeInstitutionList return '/{}draft_registrations/{}/institutions/'.format(API_BASE, node_two._id) # Overrides TestNodeInstitutionList def test_node_institution_detail( self, app, user, user_two, institution, node_one, node_two, node_one_url, node_two_url, ): # test_return_institution_unauthenticated res = app.get(node_one_url, expect_errors=True) assert res.status_code == 401 # test_return institution_contrib res = app.get(node_one_url, auth=user.auth) assert res.status_code == 200 assert res.json['data'][0]['attributes']['name'] == institution.name assert res.json['data'][0]['id'] == institution._id # test_return_no_institution res = app.get( node_two_url, auth=user.auth, ) assert res.status_code == 200 assert len(res.json['data']) == 0 # test non contrib res = app.get( node_one_url, auth=user_two.auth, expect_errors=True ) assert res.status_code == 403
apache-2.0
tersmitten/ansible
lib/ansible/modules/network/nos/nos_command.py
73
7281
#!/usr/bin/python # Copyright: (c) 2018, Extreme Networks Inc. # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import (absolute_import, division, print_function) ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = """ --- module: nos_command version_added: "2.7" author: "Lindsay Hill (@LindsayHill)" short_description: Run commands on remote devices running Extreme Networks NOS description: - Sends arbitrary commands to a NOS device and returns the results read from the device. This module includes an argument that will cause the module to wait for a specific condition before returning or timing out if the condition is not met. - This module does not support running commands in configuration mode. Please use M(nos_config) to configure NOS devices. notes: - Tested against NOS 7.2.0 - If a command sent to the device requires answering a prompt, it is possible to pass a dict containing I(command), I(answer) and I(prompt). See examples. options: commands: description: - List of commands to send to the remote NOS device over the configured provider. The resulting output from the command is returned. If the I(wait_for) argument is provided, the module is not returned until the condition is satisfied or the number of retries has expired. required: true wait_for: description: - List of conditions to evaluate against the output of the command. The task will wait for each condition to be true before moving forward. If the conditional is not true within the configured number of retries, the task fails. See examples. match: description: - The I(match) argument is used in conjunction with the I(wait_for) argument to specify the match policy. Valid values are C(all) or C(any). If the value is set to C(all) then all conditionals in the wait_for must be satisfied. If the value is set to C(any) then only one of the values must be satisfied. default: all choices: ['any', 'all'] retries: description: - Specifies the number of retries a command should by tried before it is considered failed. The command is run on the target device every retry and evaluated against the I(wait_for) conditions. default: 10 interval: description: - Configures the interval in seconds to wait between retries of the command. If the command does not pass the specified conditions, the interval indicates how long to wait before trying the command again. default: 1 """ EXAMPLES = """ tasks: - name: run show version on remote devices nos_command: commands: show version - name: run show version and check to see if output contains NOS nos_command: commands: show version wait_for: result[0] contains NOS - name: run multiple commands on remote nodes nos_command: commands: - show version - show interfaces - name: run multiple commands and evaluate the output nos_command: commands: - show version - show interface status wait_for: - result[0] contains NOS - result[1] contains Te - name: run command that requires answering a prompt nos_command: commands: - command: 'clear sessions' prompt: 'This operation will logout all the user sessions. Do you want to continue (yes/no)?:' answer: y """ RETURN = """ stdout: description: The set of responses from the commands returned: always apart from low level errors (such as action plugin) type: list sample: ['...', '...'] stdout_lines: description: The value of stdout split into a list returned: always apart from low level errors (such as action plugin) type: list sample: [['...', '...'], ['...'], ['...']] failed_conditions: description: The list of conditionals that have failed returned: failed type: list sample: ['...', '...'] """ import re import time from ansible.module_utils.network.nos.nos import run_commands from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.network.common.utils import ComplexList from ansible.module_utils.network.common.parsing import Conditional from ansible.module_utils.six import string_types __metaclass__ = type def to_lines(stdout): for item in stdout: if isinstance(item, string_types): item = str(item).split('\n') yield item def parse_commands(module, warnings): command = ComplexList(dict( command=dict(key=True), prompt=dict(), answer=dict() ), module) commands = command(module.params['commands']) for item in list(commands): configure_type = re.match(r'conf(?:\w*)(?:\s+(\w+))?', item['command']) if module.check_mode: if configure_type and configure_type.group(1) not in ('confirm', 'replace', 'revert', 'network'): module.fail_json( msg='nos_command does not support running config mode ' 'commands. Please use nos_config instead' ) if not item['command'].startswith('show'): warnings.append( 'only show commands are supported when using check mode, not ' 'executing `%s`' % item['command'] ) commands.remove(item) return commands def main(): """main entry point for module execution """ argument_spec = dict( commands=dict(type='list', required=True), wait_for=dict(type='list'), match=dict(default='all', choices=['all', 'any']), retries=dict(default=10, type='int'), interval=dict(default=1, type='int') ) module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True) result = {'changed': False} warnings = list() commands = parse_commands(module, warnings) result['warnings'] = warnings wait_for = module.params['wait_for'] or list() conditionals = [Conditional(c) for c in wait_for] retries = module.params['retries'] interval = module.params['interval'] match = module.params['match'] while retries > 0: responses = run_commands(module, commands) for item in list(conditionals): if item(responses): if match == 'any': conditionals = list() break conditionals.remove(item) if not conditionals: break time.sleep(interval) retries -= 1 if conditionals: failed_conditions = [item.raw for item in conditionals] msg = 'One or more conditional statements have not been satisfied' module.fail_json(msg=msg, failed_conditions=failed_conditions) result.update({ 'changed': False, 'stdout': responses, 'stdout_lines': list(to_lines(responses)) }) module.exit_json(**result) if __name__ == '__main__': main()
gpl-3.0
madhurrajn/samashthi
lib/django/core/management/commands/compilemessages.py
315
5174
from __future__ import unicode_literals import codecs import glob import os from django.core.management.base import BaseCommand, CommandError from django.core.management.utils import find_command, popen_wrapper from django.utils._os import npath, upath def has_bom(fn): with open(fn, 'rb') as f: sample = f.read(4) return (sample[:3] == b'\xef\xbb\xbf' or sample.startswith((codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE))) def is_writable(path): # Known side effect: updating file access/modified time to current time if # it is writable. try: with open(path, 'a'): os.utime(path, None) except (IOError, OSError): return False return True class Command(BaseCommand): help = 'Compiles .po files to .mo files for use with builtin gettext support.' requires_system_checks = False leave_locale_alone = True program = 'msgfmt' program_options = ['--check-format'] def add_arguments(self, parser): parser.add_argument('--locale', '-l', dest='locale', action='append', default=[], help='Locale(s) to process (e.g. de_AT). Default is to process all. ' 'Can be used multiple times.') parser.add_argument('--exclude', '-x', dest='exclude', action='append', default=[], help='Locales to exclude. Default is none. Can be used multiple times.') parser.add_argument('--use-fuzzy', '-f', dest='fuzzy', action='store_true', default=False, help='Use fuzzy translations.') def handle(self, **options): locale = options.get('locale') exclude = options.get('exclude') self.verbosity = int(options.get('verbosity')) if options.get('fuzzy'): self.program_options = self.program_options + ['-f'] if find_command(self.program) is None: raise CommandError("Can't find %s. Make sure you have GNU gettext " "tools 0.15 or newer installed." % self.program) basedirs = [os.path.join('conf', 'locale'), 'locale'] if os.environ.get('DJANGO_SETTINGS_MODULE'): from django.conf import settings basedirs.extend(upath(path) for path in settings.LOCALE_PATHS) # Walk entire tree, looking for locale directories for dirpath, dirnames, filenames in os.walk('.', topdown=True): for dirname in dirnames: if dirname == 'locale': basedirs.append(os.path.join(dirpath, dirname)) # Gather existing directories. basedirs = set(map(os.path.abspath, filter(os.path.isdir, basedirs))) if not basedirs: raise CommandError("This script should be run from the Django Git " "checkout or your project or app tree, or with " "the settings module specified.") # Build locale list all_locales = [] for basedir in basedirs: locale_dirs = filter(os.path.isdir, glob.glob('%s/*' % basedir)) all_locales.extend(map(os.path.basename, locale_dirs)) # Account for excluded locales locales = locale or all_locales locales = set(locales) - set(exclude) for basedir in basedirs: if locales: dirs = [os.path.join(basedir, l, 'LC_MESSAGES') for l in locales] else: dirs = [basedir] locations = [] for ldir in dirs: for dirpath, dirnames, filenames in os.walk(ldir): locations.extend((dirpath, f) for f in filenames if f.endswith('.po')) if locations: self.compile_messages(locations) def compile_messages(self, locations): """ Locations is a list of tuples: [(directory, file), ...] """ for i, (dirpath, f) in enumerate(locations): if self.verbosity > 0: self.stdout.write('processing file %s in %s\n' % (f, dirpath)) po_path = os.path.join(dirpath, f) if has_bom(po_path): raise CommandError("The %s file has a BOM (Byte Order Mark). " "Django only supports .po files encoded in " "UTF-8 and without any BOM." % po_path) base_path = os.path.splitext(po_path)[0] # Check writability on first location if i == 0 and not is_writable(npath(base_path + '.mo')): self.stderr.write("The po files under %s are in a seemingly not writable location. " "mo files will not be updated/created." % dirpath) return args = [self.program] + self.program_options + ['-o', npath(base_path + '.mo'), npath(base_path + '.po')] output, errors, status = popen_wrapper(args) if status: if errors: msg = "Execution of %s failed: %s" % (self.program, errors) else: msg = "Execution of %s failed" % self.program raise CommandError(msg)
bsd-3-clause
testmana2/test
QScintilla/Lexers/LexerTCL.py
2
2267
# -*- coding: utf-8 -*- # Copyright (c) 2008 - 2015 Detlev Offenbach <[email protected]> # """ Module implementing a TCL/Tk lexer with some additional methods. """ from __future__ import unicode_literals from PyQt5.Qsci import QsciLexerTCL from .Lexer import Lexer import Preferences class LexerTCL(Lexer, QsciLexerTCL): """ Subclass to implement some additional lexer dependant methods. """ def __init__(self, parent=None): """ Constructor @param parent parent widget of this lexer """ QsciLexerTCL.__init__(self, parent) Lexer.__init__(self) self.commentString = "#" def initProperties(self): """ Public slot to initialize the properties. """ try: self.setFoldCompact(Preferences.getEditor("AllFoldCompact")) except AttributeError: pass try: self.setFoldComments(Preferences.getEditor("TclFoldComment")) except AttributeError: pass def isCommentStyle(self, style): """ Public method to check, if a style is a comment style. @param style style to check (integer) @return flag indicating a comment style (boolean) """ return style in [QsciLexerTCL.Comment, QsciLexerTCL.CommentBlock, QsciLexerTCL.CommentBox, QsciLexerTCL.CommentLine] def isStringStyle(self, style): """ Public method to check, if a style is a string style. @param style style to check (integer) @return flag indicating a string style (boolean) """ return style in [QsciLexerTCL.QuotedString] def defaultKeywords(self, kwSet): """ Public method to get the default keywords. @param kwSet number of the keyword set (integer) @return string giving the keywords (string) or None """ return QsciLexerTCL.keywords(self, kwSet) def maximumKeywordSet(self): """ Public method to get the maximum keyword set. @return maximum keyword set (integer) """ return 9
gpl-3.0
Kast0rTr0y/ansible
lib/ansible/cli/pull.py
49
11656
# (c) 2012, Michael DeHaan <[email protected]> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. from __future__ import (absolute_import, division, print_function) __metaclass__ = type ######################################################## import datetime import os import platform import random import shutil import socket import sys import time from ansible.errors import AnsibleOptionsError from ansible.cli import CLI from ansible.module_utils._text import to_native from ansible.plugins import module_loader from ansible.utils.cmd_functions import run_cmd try: from __main__ import display except ImportError: from ansible.utils.display import Display display = Display() ######################################################## class PullCLI(CLI): ''' code behind ansible ad-hoc cli''' DEFAULT_REPO_TYPE = 'git' DEFAULT_PLAYBOOK = 'local.yml' PLAYBOOK_ERRORS = { 1: 'File does not exist', 2: 'File is not readable' } SUPPORTED_REPO_MODULES = ['git'] def parse(self): ''' create an options parser for bin/ansible ''' self.parser = CLI.base_parser( usage='%prog -U <repository> [options]', connect_opts=True, vault_opts=True, runtask_opts=True, subset_opts=True, inventory_opts=True, module_opts=True, runas_prompt_opts=True, ) # options unique to pull self.parser.add_option('--purge', default=False, action='store_true', help='purge checkout after playbook run') self.parser.add_option('-o', '--only-if-changed', dest='ifchanged', default=False, action='store_true', help='only run the playbook if the repository has been updated') self.parser.add_option('-s', '--sleep', dest='sleep', default=None, help='sleep for random interval (between 0 and n number of seconds) before starting. This is a useful way to disperse git requests') self.parser.add_option('-f', '--force', dest='force', default=False, action='store_true', help='run the playbook even if the repository could not be updated') self.parser.add_option('-d', '--directory', dest='dest', default=None, help='directory to checkout repository to') self.parser.add_option('-U', '--url', dest='url', default=None, help='URL of the playbook repository') self.parser.add_option('--full', dest='fullclone', action='store_true', help='Do a full clone, instead of a shallow one.') self.parser.add_option('-C', '--checkout', dest='checkout', help='branch/tag/commit to checkout. ' 'Defaults to behavior of repository module.') self.parser.add_option('--accept-host-key', default=False, dest='accept_host_key', action='store_true', help='adds the hostkey for the repo url if not already added') self.parser.add_option('-m', '--module-name', dest='module_name', default=self.DEFAULT_REPO_TYPE, help='Repository module name, which ansible will use to check out the repo. Default is %s.' % self.DEFAULT_REPO_TYPE) self.parser.add_option('--verify-commit', dest='verify', default=False, action='store_true', help='verify GPG signature of checked out commit, if it fails abort running the playbook.' ' This needs the corresponding VCS module to support such an operation') self.parser.add_option('--clean', dest='clean', default=False, action='store_true', help='modified files in the working repository will be discarded') self.parser.add_option('--track-subs', dest='tracksubs', default=False, action='store_true', help='submodules will track the latest changes' ' This is equivalent to specifying the --remote flag to git submodule update') # for pull we don't wan't a default self.parser.set_defaults(inventory=None) super(PullCLI, self).parse() if not self.options.dest: hostname = socket.getfqdn() # use a hostname dependent directory, in case of $HOME on nfs self.options.dest = os.path.join('~/.ansible/pull', hostname) self.options.dest = os.path.expandvars(os.path.expanduser(self.options.dest)) if self.options.sleep: try: secs = random.randint(0,int(self.options.sleep)) self.options.sleep = secs except ValueError: raise AnsibleOptionsError("%s is not a number." % self.options.sleep) if not self.options.url: raise AnsibleOptionsError("URL for repository not specified, use -h for help") if self.options.module_name not in self.SUPPORTED_REPO_MODULES: raise AnsibleOptionsError("Unsuported repo module %s, choices are %s" % (self.options.module_name, ','.join(self.SUPPORTED_REPO_MODULES))) display.verbosity = self.options.verbosity self.validate_conflicts(vault_opts=True) def run(self): ''' use Runner lib to do SSH things ''' super(PullCLI, self).run() # log command line now = datetime.datetime.now() display.display(now.strftime("Starting Ansible Pull at %F %T")) display.display(' '.join(sys.argv)) # Build Checkout command # Now construct the ansible command node = platform.node() host = socket.getfqdn() limit_opts = 'localhost,%s,127.0.0.1' % ','.join(set([host, node, host.split('.')[0], node.split('.')[0]])) base_opts = '-c local ' if self.options.verbosity > 0: base_opts += ' -%s' % ''.join([ "v" for x in range(0, self.options.verbosity) ]) # Attempt to use the inventory passed in as an argument # It might not yet have been downloaded so use localhost as default if not self.options.inventory or ( ',' not in self.options.inventory and not os.path.exists(self.options.inventory)): inv_opts = 'localhost,' else: inv_opts = self.options.inventory #FIXME: enable more repo modules hg/svn? if self.options.module_name == 'git': repo_opts = "name=%s dest=%s" % (self.options.url, self.options.dest) if self.options.checkout: repo_opts += ' version=%s' % self.options.checkout if self.options.accept_host_key: repo_opts += ' accept_hostkey=yes' if self.options.private_key_file: repo_opts += ' key_file=%s' % self.options.private_key_file if self.options.verify: repo_opts += ' verify_commit=yes' if self.options.clean: repo_opts += ' force=yes' if self.options.tracksubs: repo_opts += ' track_submodules=yes' if not self.options.fullclone: repo_opts += ' depth=1' path = module_loader.find_plugin(self.options.module_name) if path is None: raise AnsibleOptionsError(("module '%s' not found.\n" % self.options.module_name)) bin_path = os.path.dirname(os.path.abspath(sys.argv[0])) # hardcode local and inventory/host as this is just meant to fetch the repo cmd = '%s/ansible -i "%s" %s -m %s -a "%s" all -l "%s"' % (bin_path, inv_opts, base_opts, self.options.module_name, repo_opts, limit_opts) for ev in self.options.extra_vars: cmd += ' -e "%s"' % ev # Nap? if self.options.sleep: display.display("Sleeping for %d seconds..." % self.options.sleep) time.sleep(self.options.sleep) # RUN the Checkout command display.debug("running ansible with VCS module to checkout repo") display.vvvv('EXEC: %s' % cmd) rc, out, err = run_cmd(cmd, live=True) if rc != 0: if self.options.force: display.warning("Unable to update repository. Continuing with (forced) run of playbook.") else: return rc elif self.options.ifchanged and '"changed": true' not in out: display.display("Repository has not changed, quitting.") return 0 playbook = self.select_playbook(self.options.dest) if playbook is None: raise AnsibleOptionsError("Could not find a playbook to run.") # Build playbook command cmd = '%s/ansible-playbook %s %s' % (bin_path, base_opts, playbook) if self.options.vault_password_file: cmd += " --vault-password-file=%s" % self.options.vault_password_file if self.options.inventory: cmd += ' -i "%s"' % self.options.inventory for ev in self.options.extra_vars: cmd += ' -e "%s"' % ev if self.options.ask_sudo_pass or self.options.ask_su_pass or self.options.become_ask_pass: cmd += ' --ask-become-pass' if self.options.skip_tags: cmd += ' --skip-tags "%s"' % to_native(u','.join(self.options.skip_tags)) if self.options.tags: cmd += ' -t "%s"' % to_native(u','.join(self.options.tags)) if self.options.subset: cmd += ' -l "%s"' % self.options.subset else: cmd += ' -l "%s"' % limit_opts os.chdir(self.options.dest) # RUN THE PLAYBOOK COMMAND display.debug("running ansible-playbook to do actual work") display.debug('EXEC: %s' % cmd) rc, out, err = run_cmd(cmd, live=True) if self.options.purge: os.chdir('/') try: shutil.rmtree(self.options.dest) except Exception as e: display.error("Failed to remove %s: %s" % (self.options.dest, str(e))) return rc def try_playbook(self, path): if not os.path.exists(path): return 1 if not os.access(path, os.R_OK): return 2 return 0 def select_playbook(self, path): playbook = None if len(self.args) > 0 and self.args[0] is not None: playbook = os.path.join(path, self.args[0]) rc = self.try_playbook(playbook) if rc != 0: display.warning("%s: %s" % (playbook, self.PLAYBOOK_ERRORS[rc])) return None return playbook else: fqdn = socket.getfqdn() hostpb = os.path.join(path, fqdn + '.yml') shorthostpb = os.path.join(path, fqdn.split('.')[0] + '.yml') localpb = os.path.join(path, self.DEFAULT_PLAYBOOK) errors = [] for pb in [hostpb, shorthostpb, localpb]: rc = self.try_playbook(pb) if rc == 0: playbook = pb break else: errors.append("%s: %s" % (pb, self.PLAYBOOK_ERRORS[rc])) if playbook is None: display.warning("\n".join(errors)) return playbook
gpl-3.0
xq262144/hue
desktop/core/ext-py/cx_Oracle-5.2.1/test/Cursor.py
3
11191
"""Module for testing cursor objects.""" import cx_Oracle import sys class TestCursor(BaseTestCase): def testExecuteNoArgs(self): """test executing a statement without any arguments""" result = self.cursor.execute("begin null; end;") self.failUnlessEqual(result, None) def testExecuteNoStatementWithArgs(self): """test executing a None statement with bind variables""" self.failUnlessRaises(cx_Oracle.ProgrammingError, self.cursor.execute, None, x = 5) def testExecuteEmptyKeywordArgs(self): """test executing a statement with args and empty keyword args""" simpleVar = self.cursor.var(cx_Oracle.NUMBER) args = [simpleVar] kwArgs = {} result = self.cursor.execute("begin :1 := 25; end;", args, **kwArgs) self.failUnlessEqual(result, None) self.failUnlessEqual(simpleVar.getvalue(), 25) def testExecuteKeywordArgs(self): """test executing a statement with keyword arguments""" simpleVar = self.cursor.var(cx_Oracle.NUMBER) result = self.cursor.execute("begin :value := 5; end;", value = simpleVar) self.failUnlessEqual(result, None) self.failUnlessEqual(simpleVar.getvalue(), 5) def testExecuteDictionaryArg(self): """test executing a statement with a dictionary argument""" simpleVar = self.cursor.var(cx_Oracle.NUMBER) dictArg = { "value" : simpleVar } result = self.cursor.execute("begin :value := 10; end;", dictArg) self.failUnlessEqual(result, None) self.failUnlessEqual(simpleVar.getvalue(), 10) def testExecuteMultipleMethod(self): """test executing a statement with both a dict arg and keyword args""" simpleVar = self.cursor.var(cx_Oracle.NUMBER) dictArg = { "value" : simpleVar } self.failUnlessRaises(cx_Oracle.InterfaceError, self.cursor.execute, "begin :value := 15; end;", dictArg, value = simpleVar) def testExecuteAndModifyArraySize(self): """test executing a statement and then changing the array size""" self.cursor.execute("select IntCol from TestNumbers") self.cursor.arraysize = 20 self.failUnlessEqual(len(self.cursor.fetchall()), 10) def testCallProc(self): """test executing a stored procedure""" var = self.cursor.var(cx_Oracle.NUMBER) results = self.cursor.callproc("proc_Test", ("hi", 5, var)) self.failUnlessEqual(results, ["hi", 10, 2.0]) def testCallProcNoArgs(self): """test executing a stored procedure without any arguments""" results = self.cursor.callproc("proc_TestNoArgs") self.failUnlessEqual(results, []) def testCallFunc(self): """test executing a stored function""" results = self.cursor.callfunc("func_Test", cx_Oracle.NUMBER, ("hi", 5)) self.failUnlessEqual(results, 7) def testCallFuncNoArgs(self): """test executing a stored function without any arguments""" results = self.cursor.callfunc("func_TestNoArgs", cx_Oracle.NUMBER) self.failUnlessEqual(results, 712) def testExecuteManyByName(self): """test executing a statement multiple times (named args)""" self.cursor.execute("truncate table TestExecuteMany") rows = [ { "value" : n } for n in range(250) ] self.cursor.arraysize = 100 statement = "insert into TestExecuteMany (IntCol) values (:value)" self.cursor.executemany(statement, rows) self.connection.commit() self.cursor.execute("select count(*) from TestExecuteMany") count, = self.cursor.fetchone() self.failUnlessEqual(count, len(rows)) def testExecuteManyByPosition(self): """test executing a statement multiple times (positional args)""" self.cursor.execute("truncate table TestExecuteMany") rows = [ [n] for n in range(230) ] self.cursor.arraysize = 100 statement = "insert into TestExecuteMany (IntCol) values (:1)" self.cursor.executemany(statement, rows) self.connection.commit() self.cursor.execute("select count(*) from TestExecuteMany") count, = self.cursor.fetchone() self.failUnlessEqual(count, len(rows)) def testExecuteManyWithPrepare(self): """test executing a statement multiple times (with prepare)""" self.cursor.execute("truncate table TestExecuteMany") rows = [ [n] for n in range(225) ] self.cursor.arraysize = 100 statement = "insert into TestExecuteMany (IntCol) values (:1)" self.cursor.prepare(statement) self.cursor.executemany(None, rows) self.connection.commit() self.cursor.execute("select count(*) from TestExecuteMany") count, = self.cursor.fetchone() self.failUnlessEqual(count, len(rows)) def testExecuteManyWithRebind(self): """test executing a statement multiple times (with rebind)""" self.cursor.execute("truncate table TestExecuteMany") rows = [ [n] for n in range(235) ] self.cursor.arraysize = 100 statement = "insert into TestExecuteMany (IntCol) values (:1)" self.cursor.executemany(statement, rows[:50]) self.cursor.executemany(statement, rows[50:]) self.connection.commit() self.cursor.execute("select count(*) from TestExecuteMany") count, = self.cursor.fetchone() self.failUnlessEqual(count, len(rows)) def testExecuteManyWithResize(self): """test executing a statement multiple times (with resize)""" self.cursor.execute("truncate table TestExecuteMany") rows = [ ( 1, "First" ), ( 2, "Second" ), ( 3, "Third" ), ( 4, "Fourth" ), ( 5, "Fifth" ), ( 6, "Sixth" ), ( 7, "Seventh" ) ] self.cursor.bindarraysize = 5 self.cursor.setinputsizes(int, 100) sql = "insert into TestExecuteMany (IntCol, StringCol) values (:1, :2)" self.cursor.executemany(sql, rows) var = self.cursor.bindvars[1] self.cursor.execute("select count(*) from TestExecuteMany") count, = self.cursor.fetchone() self.failUnlessEqual(count, len(rows)) self.failUnlessEqual(var.maxlength, 100 * self.connection.maxBytesPerCharacter) def testExecuteManyWithExecption(self): """test executing a statement multiple times (with exception)""" self.cursor.execute("truncate table TestExecuteMany") rows = [ { "value" : n } for n in (1, 2, 3, 2, 5) ] statement = "insert into TestExecuteMany (IntCol) values (:value)" self.failUnlessRaises(cx_Oracle.DatabaseError, self.cursor.executemany, statement, rows) self.failUnlessEqual(self.cursor.rowcount, 3) def testPrepare(self): """test preparing a statement and executing it multiple times""" self.failUnlessEqual(self.cursor.statement, None) statement = "begin :value := :value + 5; end;" self.cursor.prepare(statement) var = self.cursor.var(cx_Oracle.NUMBER) self.failUnlessEqual(self.cursor.statement, statement) var.setvalue(0, 2) self.cursor.execute(None, value = var) self.failUnlessEqual(var.getvalue(), 7) self.cursor.execute(None, value = var) self.failUnlessEqual(var.getvalue(), 12) self.cursor.execute("begin :value2 := 3; end;", value2 = var) self.failUnlessEqual(var.getvalue(), 3) def testExceptionOnClose(self): "confirm an exception is raised after closing a cursor" self.cursor.close() self.failUnlessRaises(cx_Oracle.InterfaceError, self.cursor.execute, "select 1 from dual") def testIterators(self): """test iterators""" self.cursor.execute(""" select IntCol from TestNumbers where IntCol between 1 and 3 order by IntCol""") rows = [] for row in self.cursor: rows.append(row[0]) self.failUnlessEqual(rows, [1, 2, 3]) def testIteratorsInterrupted(self): """test iterators (with intermediate execute)""" self.cursor.execute("truncate table TestExecuteMany") self.cursor.execute(""" select IntCol from TestNumbers where IntCol between 1 and 3 order by IntCol""") testIter = iter(self.cursor) if sys.version_info[0] >= 3: value, = next(testIter) else: value, = testIter.next() self.cursor.execute("insert into TestExecuteMany (IntCol) values (1)") if sys.version_info[0] >= 3: self.failUnlessRaises(cx_Oracle.InterfaceError, next, testIter) else: self.failUnlessRaises(cx_Oracle.InterfaceError, testIter.next) def testBindNames(self): """test that bindnames() works correctly.""" self.failUnlessRaises(cx_Oracle.ProgrammingError, self.cursor.bindnames) self.cursor.prepare("begin null; end;") self.failUnlessEqual(self.cursor.bindnames(), []) self.cursor.prepare("begin :retval := :inval + 5; end;") self.failUnlessEqual(self.cursor.bindnames(), ["RETVAL", "INVAL"]) self.cursor.prepare("begin :retval := :a * :a + :b * :b; end;") self.failUnlessEqual(self.cursor.bindnames(), ["RETVAL", "A", "B"]) self.cursor.prepare("begin :a := :b + :c + :d + :e + :f + :g + " + \ ":h + :i + :j + :k + :l; end;") self.failUnlessEqual(self.cursor.bindnames(), ["A", "B", "C", "D", "E", "F", "G", "H", "I", "J", "K", "L"]) def testBadPrepare(self): """test that subsequent executes succeed after bad prepare""" self.failUnlessRaises(cx_Oracle.DatabaseError, self.cursor.execute, "begin raise_application_error(-20000, 'this); end;") self.cursor.execute("begin null; end;") def testBadExecute(self): """test that subsequent fetches fail after bad execute""" self.failUnlessRaises(cx_Oracle.DatabaseError, self.cursor.execute, "select y from dual") self.failUnlessRaises(cx_Oracle.InterfaceError, self.cursor.fetchall) def testSetInputSizesMultipleMethod(self): """test setting input sizes with both positional and keyword args""" self.failUnlessRaises(cx_Oracle.InterfaceError, self.cursor.setinputsizes, 5, x = 5) def testSetInputSizesByPosition(self): """test setting input sizes with positional args""" var = self.cursor.var(cx_Oracle.STRING, 100) self.cursor.setinputsizes(None, 5, None, 10, None, cx_Oracle.NUMBER) self.cursor.execute(""" begin :1 := :2 || to_char(:3) || :4 || to_char(:5) || to_char(:6); end;""", [var, 'test_', 5, '_second_', 3, 7]) self.failUnlessEqual(var.getvalue(), "test_5_second_37")
apache-2.0
kennedyshead/home-assistant
homeassistant/components/aurora/config_flow.py
2
3659
"""Config flow for SpaceX Launches and Starman.""" import logging from aiohttp import ClientError from auroranoaa import AuroraForecast import voluptuous as vol from homeassistant import config_entries from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME from homeassistant.core import callback from homeassistant.helpers import aiohttp_client from .const import CONF_THRESHOLD, DEFAULT_NAME, DEFAULT_THRESHOLD, DOMAIN _LOGGER = logging.getLogger(__name__) class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): """Handle a config flow for NOAA Aurora Integration.""" VERSION = 1 @staticmethod @callback def async_get_options_flow(config_entry): """Get the options flow for this handler.""" return OptionsFlowHandler(config_entry) async def async_step_user(self, user_input=None): """Handle the initial step.""" errors = {} if user_input is not None: name = user_input[CONF_NAME] longitude = user_input[CONF_LONGITUDE] latitude = user_input[CONF_LATITUDE] session = aiohttp_client.async_get_clientsession(self.hass) api = AuroraForecast(session=session) try: await api.get_forecast_data(longitude, latitude) except ClientError: errors["base"] = "cannot_connect" except Exception: # pylint: disable=broad-except _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: await self.async_set_unique_id( f"{user_input[CONF_LONGITUDE]}_{user_input[CONF_LATITUDE]}" ) self._abort_if_unique_id_configured() return self.async_create_entry( title=f"Aurora - {name}", data=user_input ) return self.async_show_form( step_id="user", data_schema=vol.Schema( { vol.Required(CONF_NAME, default=DEFAULT_NAME): str, vol.Required( CONF_LONGITUDE, default=self.hass.config.longitude, ): vol.All( vol.Coerce(float), vol.Range(min=-180, max=180), ), vol.Required( CONF_LATITUDE, default=self.hass.config.latitude, ): vol.All( vol.Coerce(float), vol.Range(min=-90, max=90), ), } ), errors=errors, ) class OptionsFlowHandler(config_entries.OptionsFlow): """Handle options flow changes.""" def __init__(self, config_entry): """Initialize options flow.""" self.config_entry = config_entry async def async_step_init(self, user_input=None): """Manage options.""" if user_input is not None: return self.async_create_entry(title="", data=user_input) return self.async_show_form( step_id="init", data_schema=vol.Schema( { vol.Required( CONF_THRESHOLD, default=self.config_entry.options.get( CONF_THRESHOLD, DEFAULT_THRESHOLD ), ): vol.All( vol.Coerce(int), vol.Range(min=0, max=100), ), } ), )
apache-2.0
h3biomed/ansible
lib/ansible/modules/network/bigswitch/bcf_switch.py
44
5102
#!/usr/bin/python # -*- coding: utf-8 -*- # Copyright: (c) 2017, Ted Elhourani <[email protected]> # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: bcf_switch author: "Ted (@tedelhourani)" short_description: Create and remove a bcf switch. description: - Create and remove a Big Cloud Fabric switch. version_added: "2.4" options: name: description: - The name of the switch. required: true fabric_role: description: - Fabric role of the switch. choices: ['spine', 'leaf'] required: true leaf_group: description: - The leaf group of the switch if the switch is a leaf. required: false mac: description: - The MAC address of the switch. required: true state: description: - Whether the switch should be present or absent. default: present choices: ['present', 'absent'] controller: description: - The controller IP address. required: true validate_certs: description: - If C(false), SSL certificates will not be validated. This should only be used on personally controlled devices using self-signed certificates. required: false default: true type: bool access_token: description: - Big Cloud Fabric access token. If this isn't set then the environment variable C(BIGSWITCH_ACCESS_TOKEN) is used. ''' EXAMPLES = ''' - name: bcf leaf switch bcf_switch: name: Rack1Leaf1 fabric_role: leaf leaf_group: R1 mac: 00:00:00:02:00:02 controller: '{{ inventory_hostname }}' state: present validate_certs: false ''' RETURN = ''' # ''' import os import traceback from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.network.bigswitch.bigswitch import Rest from ansible.module_utils._text import to_native def switch(module, check_mode): try: access_token = module.params['access_token'] or os.environ['BIGSWITCH_ACCESS_TOKEN'] except KeyError as e: module.fail_json(msg='Unable to load %s' % e.message, exception=traceback.format_exc()) name = module.params['name'] fabric_role = module.params['fabric_role'] leaf_group = module.params['leaf_group'] dpid = '00:00:' + module.params['mac'] state = module.params['state'] controller = module.params['controller'] rest = Rest(module, {'content-type': 'application/json', 'Cookie': 'session_cookie=' + access_token}, 'https://' + controller + ':8443/api/v1/data/controller/core') response = rest.get('switch-config', data={}) if response.status_code != 200: module.fail_json(msg="failed to obtain existing switch config: {0}".format(response.json['description'])) config_present = False for switch in response.json: if all((switch['name'] == name, switch['fabric-role'] == fabric_role, switch['dpid'] == dpid)): config_present = switch.get('leaf-group', None) == leaf_group if config_present: break if state in ('present') and config_present: module.exit_json(changed=False) if state in ('absent') and not config_present: module.exit_json(changed=False) if check_mode: module.exit_json(changed=True) if state in ('present'): data = {'name': name, 'fabric-role': fabric_role, 'leaf-group': leaf_group, 'dpid': dpid} response = rest.put('switch-config[name="%s"]' % name, data) if response.status_code == 204: module.exit_json(changed=True) else: module.fail_json(msg="error configuring switch '{0}': {1}".format(name, response.json['description'])) if state in ('absent'): response = rest.delete('switch-config[name="%s"]' % name, data={}) if response.status_code == 204: module.exit_json(changed=True) else: module.fail_json(msg="error deleting switch '{0}': {1}".format(name, response.json['description'])) def main(): module = AnsibleModule( argument_spec=dict( name=dict(type='str', required=True), fabric_role=dict(choices=['spine', 'leaf'], required=True), leaf_group=dict(type='str', required=False), mac=dict(type='str', required=True), controller=dict(type='str', required=True), state=dict(choices=['present', 'absent'], default='present'), validate_certs=dict(type='bool', default='True'), access_token=dict(type='str', no_log=True) ), supports_check_mode=True, ) try: switch(module, check_mode=module.check_mode) except Exception as e: module.fail_json(msg=to_native(e), exception=traceback.format_exc()) if __name__ == '__main__': main()
gpl-3.0
kromain/chromium-tools
third_party/gsutil/gslib/commands/getcors.py
51
4357
# Copyright 2012 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import sys import xml from gslib.command import Command from gslib.command import COMMAND_NAME from gslib.command import COMMAND_NAME_ALIASES from gslib.command import CONFIG_REQUIRED from gslib.command import FILE_URIS_OK from gslib.command import MAX_ARGS from gslib.command import MIN_ARGS from gslib.command import PROVIDER_URIS_OK from gslib.command import SUPPORTED_SUB_ARGS from gslib.command import URIS_START_ARG from gslib.exception import CommandException from gslib.help_provider import HELP_NAME from gslib.help_provider import HELP_NAME_ALIASES from gslib.help_provider import HELP_ONE_LINE_SUMMARY from gslib.help_provider import HELP_TEXT from gslib.help_provider import HelpType from gslib.help_provider import HELP_TYPE _detailed_help_text = (""" <B>SYNOPSIS</B> gsutil getcors uri <B>DESCRIPTION</B> Gets the Cross-Origin Resource Sharing (CORS) configuration for a given bucket. This command is supported for buckets only, not objects and you can get the CORS settings for only one bucket at a time. The output from getcors can be redirected into a file, edited and then updated via the setcors sub-command. The CORS configuration is expressed by an XML document with the following structure: <?xml version="1.0" ?> <CorsConfig> <Cors> <Origins> <Origin>origin1.example.com</Origin> </Origins> <Methods> <Method>GET</Method> </Methods> <ResponseHeaders> <ResponseHeader>Content-Type</ResponseHeader> </ResponseHeaders> </Cors> </CorsConfig> For more info about CORS, see http://www.w3.org/TR/cors/. """) class GetCorsCommand(Command): """Implementation of gsutil getcors command.""" # Command specification (processed by parent class). command_spec = { # Name of command. COMMAND_NAME : 'getcors', # List of command name aliases. COMMAND_NAME_ALIASES : [], # Min number of args required by this command. MIN_ARGS : 1, # Max number of args required by this command, or NO_MAX. MAX_ARGS : 1, # Getopt-style string specifying acceptable sub args. SUPPORTED_SUB_ARGS : '', # True if file URIs acceptable for this command. FILE_URIS_OK : False, # True if provider-only URIs acceptable for this command. PROVIDER_URIS_OK : False, # Index in args of first URI arg. URIS_START_ARG : 0, # True if must configure gsutil before running command. CONFIG_REQUIRED : True, } help_spec = { # Name of command or auxiliary help info for which this help applies. HELP_NAME : 'getcors', # List of help name aliases. HELP_NAME_ALIASES : [], # Type of help) HELP_TYPE : HelpType.COMMAND_HELP, # One line summary of this help. HELP_ONE_LINE_SUMMARY : 'Get a bucket\'s CORS XML document', # The full help text. HELP_TEXT : _detailed_help_text, } # Command entry point. def RunCommand(self): # Wildcarding is allowed but must resolve to just one bucket. uris = list(self.WildcardIterator(self.args[0]).IterUris()) if len(uris) == 0: raise CommandException('No URIs matched') if len(uris) != 1: raise CommandException('%s matched more than one URI, which is not\n' 'allowed by the %s command' % (self.args[0], self.command_name)) uri = uris[0] if not uri.names_bucket(): raise CommandException('"%s" command must specify a bucket' % self.command_name) cors = uri.get_cors(False, self.headers) # Pretty-print the XML to make it more easily human editable. parsed_xml = xml.dom.minidom.parseString(cors.to_xml().encode('utf-8')) sys.stdout.write(parsed_xml.toprettyxml(indent=' ')) return 0
bsd-3-clause
tensorflow/tensorflow
tensorflow/python/keras/tests/model_subclassing_test_util.py
6
5439
# Copyright 2018 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Keras models for use in Model subclassing tests.""" from tensorflow.python import keras from tensorflow.python.keras import testing_utils # pylint: disable=missing-docstring,not-callable class SimpleConvTestModel(keras.Model): def __init__(self, num_classes=10): super(SimpleConvTestModel, self).__init__(name='test_model') self.num_classes = num_classes self.conv1 = keras.layers.Conv2D(32, (3, 3), activation='relu') self.flatten = keras.layers.Flatten() self.dense1 = keras.layers.Dense(num_classes, activation='softmax') def call(self, x): x = self.conv1(x) x = self.flatten(x) return self.dense1(x) def get_multi_io_subclass_model(use_bn=False, use_dp=False, num_classes=(2, 3)): """Creates MultiIOModel for the tests of subclass model.""" shared_layer = keras.layers.Dense(32, activation='relu') branch_a = [shared_layer] if use_dp: branch_a.append(keras.layers.Dropout(0.5)) branch_a.append(keras.layers.Dense(num_classes[0], activation='softmax')) branch_b = [shared_layer] if use_bn: branch_b.append(keras.layers.BatchNormalization()) branch_b.append(keras.layers.Dense(num_classes[1], activation='softmax')) model = ( testing_utils._MultiIOSubclassModel( # pylint: disable=protected-access branch_a, branch_b, name='test_model')) return model class NestedTestModel1(keras.Model): """A model subclass nested inside a model subclass. """ def __init__(self, num_classes=2): super(NestedTestModel1, self).__init__(name='nested_model_1') self.num_classes = num_classes self.dense1 = keras.layers.Dense(32, activation='relu') self.dense2 = keras.layers.Dense(num_classes, activation='relu') self.bn = keras.layers.BatchNormalization() self.test_net = testing_utils.SmallSubclassMLP( num_hidden=32, num_classes=4, use_bn=True, use_dp=True) def call(self, inputs): x = self.dense1(inputs) x = self.bn(x) x = self.test_net(x) return self.dense2(x) class NestedTestModel2(keras.Model): """A model subclass with a functional-API graph network inside. """ def __init__(self, num_classes=2): super(NestedTestModel2, self).__init__(name='nested_model_2') self.num_classes = num_classes self.dense1 = keras.layers.Dense(32, activation='relu') self.dense2 = keras.layers.Dense(num_classes, activation='relu') self.bn = self.bn = keras.layers.BatchNormalization() self.test_net = self.get_functional_graph_model(32, 4) @staticmethod def get_functional_graph_model(input_dim, num_classes): # A simple functional-API model (a.k.a. graph network) inputs = keras.Input(shape=(input_dim,)) x = keras.layers.Dense(32, activation='relu')(inputs) x = keras.layers.BatchNormalization()(x) outputs = keras.layers.Dense(num_classes)(x) return keras.Model(inputs, outputs) def call(self, inputs): x = self.dense1(inputs) x = self.bn(x) x = self.test_net(x) return self.dense2(x) def get_nested_model_3(input_dim, num_classes): # A functional-API model with a subclassed model inside. # NOTE: this requires the inner subclass to implement `compute_output_shape`. inputs = keras.Input(shape=(input_dim,)) x = keras.layers.Dense(32, activation='relu')(inputs) x = keras.layers.BatchNormalization()(x) class Inner(keras.Model): def __init__(self): super(Inner, self).__init__() self.dense1 = keras.layers.Dense(32, activation='relu') self.dense2 = keras.layers.Dense(5, activation='relu') self.bn = keras.layers.BatchNormalization() def call(self, inputs): x = self.dense1(inputs) x = self.dense2(x) return self.bn(x) test_model = Inner() x = test_model(x) outputs = keras.layers.Dense(num_classes)(x) return keras.Model(inputs, outputs, name='nested_model_3') class CustomCallModel(keras.Model): def __init__(self): super(CustomCallModel, self).__init__() self.dense1 = keras.layers.Dense(1, activation='relu') self.dense2 = keras.layers.Dense(1, activation='softmax') def call(self, first, second, fiddle_with_output='no', training=True): combined = self.dense1(first) + self.dense2(second) if fiddle_with_output == 'yes': return 10. * combined else: return combined class TrainingNoDefaultModel(keras.Model): def __init__(self): super(TrainingNoDefaultModel, self).__init__() self.dense1 = keras.layers.Dense(1) def call(self, x, training): return self.dense1(x) class TrainingMaskingModel(keras.Model): def __init__(self): super(TrainingMaskingModel, self).__init__() self.dense1 = keras.layers.Dense(1) def call(self, x, training=False, mask=None): return self.dense1(x)
apache-2.0
pilou-/ansible
lib/ansible/modules/network/aci/mso_schema_template_externalepg.py
21
6089
#!/usr/bin/python # -*- coding: utf-8 -*- # Copyright: (c) 2018, Dag Wieers (@dagwieers) <[email protected]> # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = r''' --- module: mso_schema_template_externalepg short_description: Manage external EPGs in schema templates description: - Manage external EPGs in schema templates on Cisco ACI Multi-Site. author: - Dag Wieers (@dagwieers) version_added: '2.8' options: schema: description: - The name of the schema. type: str required: yes template: description: - The name of the template. type: str required: yes externalepg: description: - The name of the external EPG to manage. type: str aliases: [ name ] display_name: description: - The name as displayed on the MSO web interface. type: str vrf: description: - The VRF associated to this ANP. type: dict state: description: - Use C(present) or C(absent) for adding or removing. - Use C(query) for listing an object or multiple objects. type: str choices: [ absent, present, query ] default: present extends_documentation_fragment: mso ''' EXAMPLES = r''' - name: Add a new external EPG mso_schema_template_externalepg: host: mso_host username: admin password: SomeSecretPassword schema: Schema 1 template: Template 1 externalepg: External EPG 1 state: present delegate_to: localhost - name: Remove an external EPG mso_schema_template_externalepg: host: mso_host username: admin password: SomeSecretPassword schema: Schema 1 template: Template 1 externalepg: external EPG1 state: absent delegate_to: localhost - name: Query a specific external EPGs mso_schema_template_externalepg: host: mso_host username: admin password: SomeSecretPassword schema: Schema 1 template: Template 1 externalepg: external EPG1 state: query delegate_to: localhost register: query_result - name: Query all external EPGs mso_schema_template_externalepg: host: mso_host username: admin password: SomeSecretPassword schema: Schema 1 template: Template 1 state: query delegate_to: localhost register: query_result ''' RETURN = r''' ''' from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.network.aci.mso import MSOModule, mso_argument_spec, mso_reference_spec, issubset def main(): argument_spec = mso_argument_spec() argument_spec.update( schema=dict(type='str', required=True), template=dict(type='str', required=True), externalepg=dict(type='str', aliases=['name']), # This parameter is not required for querying all objects display_name=dict(type='str'), vrf=dict(type='dict', options=mso_reference_spec()), state=dict(type='str', default='present', choices=['absent', 'present', 'query']), ) module = AnsibleModule( argument_spec=argument_spec, supports_check_mode=True, required_if=[ ['state', 'absent', ['externalepg']], ['state', 'present', ['externalepg', 'vrf']], ], ) schema = module.params['schema'] template = module.params['template'] externalepg = module.params['externalepg'] display_name = module.params['display_name'] vrf = module.params['vrf'] state = module.params['state'] mso = MSOModule(module) # Get schema_id schema_obj = mso.get_obj('schemas', displayName=schema) if schema_obj: schema_id = schema_obj['id'] else: mso.fail_json(msg="Provided schema '{0}' does not exist".format(schema)) schema_path = 'schemas/{id}'.format(**schema_obj) # Get template templates = [t['name'] for t in schema_obj['templates']] if template not in templates: mso.fail_json(msg="Provided template '{0}' does not exist. Existing templates: {1}".format(template, ', '.join(templates))) template_idx = templates.index(template) # Get external EPGs externalepgs = [e['name'] for e in schema_obj['templates'][template_idx]['externalEpgs']] if externalepg is not None and externalepg in externalepgs: externalepg_idx = externalepgs.index(externalepg) mso.existing = schema_obj['templates'][template_idx]['externalEpgs'][externalepg_idx] if state == 'query': if externalepg is None: mso.existing = schema_obj['templates'][template_idx]['externalEpgs'] elif not mso.existing: mso.fail_json(msg="External EPG '{externalepg}' not found".format(externalepg=externalepg)) mso.exit_json() eepgs_path = '/templates/{0}/externalEpgs'.format(template) eepg_path = '/templates/{0}/externalEpgs/{1}'.format(template, externalepg) ops = [] mso.previous = mso.existing if state == 'absent': if mso.existing: mso.sent = mso.existing = {} ops.append(dict(op='remove', path=eepg_path)) elif state == 'present': vrf_ref = mso.make_reference(vrf, 'vrf', schema_id, template) if display_name is None and not mso.existing: display_name = externalepg payload = dict( name=externalepg, displayName=display_name, vrfRef=vrf_ref, # FIXME subnets=[], contractRelationships=[], ) mso.sanitize(payload, collate=True) if mso.existing: ops.append(dict(op='replace', path=eepg_path, value=mso.sent)) else: ops.append(dict(op='add', path=eepgs_path + '/-', value=mso.sent)) mso.existing = mso.proposed if not module.check_mode: mso.request(schema_path, method='PATCH', data=ops) mso.exit_json() if __name__ == "__main__": main()
gpl-3.0
randynobx/ansible
lib/ansible/modules/network/nxos/nxos_acl_interface.py
32
8173
#!/usr/bin/python # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # ANSIBLE_METADATA = { 'metadata_version': '1.0', 'status': ['preview'], 'supported_by': 'community' } DOCUMENTATION = ''' --- module: nxos_acl_interface extends_documentation_fragment: nxos version_added: "2.2" short_description: Manages applying ACLs to interfaces. description: - Manages applying ACLs to interfaces. author: - Jason Edelman (@jedelman8) - Gabriele Gerbino (@GGabriele) options: name: description: - Case sensitive name of the access list (ACL). required: true interface: description: - Full name of interface, e.g. I(Ethernet1/1). required: true direction: description: - Direction ACL to be applied in on the interface. required: true choices: ['ingress', 'egress'] state: description: - Specify desired state of the resource. required: false default: present choices: ['present','absent'] ''' EXAMPLES = ''' - name: apply egress acl to ethernet1/41 nxos_acl_interface: name: ANSIBLE interface: ethernet1/41 direction: egress state: present ''' RETURN = ''' acl_applied_to: description: list of interfaces the ACL is applied to returned: always type: list sample: [{"acl_type": "Router ACL", "direction": "egress", "interface": "Ethernet1/41", "name": "ANSIBLE"}] commands: description: commands sent to the device returned: always type: list sample: ["interface ethernet1/41", "ip access-group ANSIBLE out"] ''' import re from ansible.module_utils.nxos import load_config, run_commands from ansible.module_utils.nxos import nxos_argument_spec, check_args from ansible.module_utils.basic import AnsibleModule NAME = r'.*IP?\s+access list\s+(?P<name>\S+).*' INTERFACE = r'.*\s+(?P<interface>\w+(\d+)?\/?(\d+)?)\s-\s(?P<direction>\w+)\s+\W(?P<acl_type>\w+\s\w+)\W.*' def get_acl_interface(module, acl): command = ['show ip access-list summary'] acl_list = [] body = run_commands(module, command) body_split = body[0].split('Active on interfaces:') for each_acl in body_split: temp = {} try: match_name = re.match(NAME, each_acl, re.DOTALL) name_dict = match_name.groupdict() name = name_dict['name'] except AttributeError: name = '' temp['interfaces'] = [] for line in each_acl.split('\n'): try: match_interface = re.match(INTERFACE, line, re.DOTALL) interface_dict = match_interface.groupdict() interface = interface_dict['interface'] direction = interface_dict['direction'] acl_type = interface_dict['acl_type'] except AttributeError: interface = '' direction = '' acl_type = '' intf_temp = {} if interface: intf_temp['interface'] = interface if acl_type: intf_temp['acl_type'] = acl_type if direction: intf_temp['direction'] = direction if intf_temp: temp['interfaces'].append(intf_temp) if name: temp['name'] = name if temp: acl_list.append(temp) existing_no_null = [] for each in acl_list: if each.get('name') == acl: interfaces = each.get('interfaces') for interface in interfaces: new_temp = {} new_temp['name'] = acl new_temp.update(interface) existing_no_null.append(new_temp) return existing_no_null def other_existing_acl(get_existing, interface, direction): # now we'll just get the interface in question # needs to be a list since same acl could be applied in both dirs acls_interface = [] this = {} if get_existing: for each in get_existing: if each.get('interface').lower() == interface: acls_interface.append(each) if acls_interface: for each in acls_interface: if each.get('direction') == direction: this = each return acls_interface, this def apply_acl(proposed): commands = [] commands.append('interface ' + proposed.get('interface')) direction = proposed.get('direction') if direction == 'egress': cmd = 'ip access-group {0} {1}'.format(proposed.get('name'), 'out') elif direction == 'ingress': cmd = 'ip access-group {0} {1}'.format(proposed.get('name'), 'in') commands.append(cmd) return commands def remove_acl(proposed): commands = [] commands.append('interface ' + proposed.get('interface')) direction = proposed.get('direction') if direction == 'egress': cmd = 'no ip access-group {0} {1}'.format(proposed.get('name'), 'out') elif direction == 'ingress': cmd = 'no ip access-group {0} {1}'.format(proposed.get('name'), 'in') commands.append(cmd) return commands def flatten_list(command_lists): flat_command_list = [] for command in command_lists: if isinstance(command, list): flat_command_list.extend(command) else: flat_command_list.append(command) return flat_command_list def main(): argument_spec = dict( name=dict(required=False, type='str'), interface=dict(required=True), direction=dict(required=True, choices=['egress', 'ingress']), state=dict(choices=['absent', 'present'], default='present'), ) argument_spec.update(nxos_argument_spec) module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True) warnings = list() check_args(module, warnings) results = dict(changed=False, warnings=warnings) state = module.params['state'] name = module.params['name'] interface = module.params['interface'].lower() direction = module.params['direction'].lower() proposed = dict(name=name, interface=interface, direction=direction) # includes all interfaces the ACL is applied to (list) get_existing = get_acl_interface(module, name) # interface_acls = includes entries of this ACL on the interface (list) # this_dir_acl_intf = dict - not null if it already exists interfaces_acls, existing = other_existing_acl(get_existing, interface, direction) end_state_acls = get_existing cmds = [] commands = [] if state == 'present': if not existing: command = apply_acl(proposed) if command: commands.append(command) elif state == 'absent': if existing: command = remove_acl(proposed) if command: commands.append(command) if commands: cmds = flatten_list(commands) if cmds: if module.check_mode: module.exit_json(changed=True, commands=cmds) else: load_config(module, cmds) results['changed'] = True end_state_acls = get_acl_interface(module, name) interfaces_acls, this_dir_acl_intf = other_existing_acl(end_state_acls, interface, direction) if 'configure' in cmds: cmds.pop(0) else: cmds = [] results['commands'] = cmds results['acl_applied_to'] = end_state_acls module.exit_json(**results) if __name__ == '__main__': main()
gpl-3.0
f0rki/cb-multios
original-challenges/NarfRPN/poller/for-release/machine.py
1
4948
#!/usr/bin/env python # # Copyright (C) 2014 Narf Industries <[email protected]> # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the "Software"), # to deal in the Software without restriction, including without limitation # the rights to use, copy, modify, merge, publish, distribute, sublicense, # and/or sell copies of the Software, and to permit persons to whom the # Software is furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. # IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY # CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, # TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE # SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # from generator.actions import Actions from os.path import dirname, abspath, join import random import struct import sys sys.path.append(join(dirname(dirname(dirname(abspath(__file__)))), "support")) from support import Support, Matrix class PolandBall(Actions): SUCCESS = '\x00' * 4 FAILURE = '\xff' * 4 def start(self): self.state['model'] = Support() def cmd(self): pass def push(self): pass def push_int(self): val = random.randint(-Support.INT_LIMIT, Support.INT_LIMIT) self.state['model'].push(val) self.write(self.state['model'].make_push(val)) self.read(length=4, expect=self.SUCCESS) def push_matrix(self): pass def push_square_matrix(self): size = random.randint(1, 2) val = Matrix(size, size) self.state['model'].push(val) self.write(self.state['model'].make_push(val)) self.read(length=4, expect=self.SUCCESS) def push_large_matrix(self): width = random.randint(3, 16) height = random.randint(3, 16) val = Matrix(width, height) self.state['model'].push(val) self.write(self.state['model'].make_push(val)) self.read(length=4, expect=self.SUCCESS) def push_invalid_matrix(self): size = random.randint(1, 8) self.write(self.state['model'].make_push(Matrix(size, size, None, False))) self.read(length=4, expect=self.FAILURE) def push_double(self): pass def push_valid_double(self): val = random.uniform(-Support.FLOAT_LIMIT, Support.FLOAT_LIMIT) self.state['model'].push(val) self.write(self.state['model'].make_push(val)) self.read(length=4, expect=self.SUCCESS) def push_invalid_double(self): self.write(self.state['model'].make_push(float('inf'))) self.read(length=4, expect=self.FAILURE) def pop(self): res = self.state['model'].pop() self.write(self.state['model'].make_pop()) if res is not None: fuzzy = isinstance(res, Matrix) packed, length = self.state['model'].pack_value(res, fuzzy=fuzzy) self.read(length=length, expect=packed, expect_format='pcre' if fuzzy else 'asciic') self.read(length=4, expect=self.SUCCESS) else: self.read(length=4, expect=self.FAILURE) def clear(self): self.state['model'].clear() self.write(self.state['model'].make_clear()) self.read(length=4, expect=self.SUCCESS) def add(self): exp = self.state['model'].add() self.write(self.state['model'].make_add()) self.read(length=4, expect=self.SUCCESS if exp == 0 else self.FAILURE) def sub(self): exp = self.state['model'].sub() self.write(self.state['model'].make_sub()) self.read(length=4, expect=self.SUCCESS if exp == 0 else self.FAILURE) def mul(self): exp = self.state['model'].mul() self.write(self.state['model'].make_mul()) self.read(length=4, expect=self.SUCCESS if exp == 0 else self.FAILURE) def div(self): exp = self.state['model'].div() self.write(self.state['model'].make_div()) self.read(length=4, expect=self.SUCCESS if exp == 0 else self.FAILURE) def inv(self): exp = self.state['model'].inv() # Avoid vuln by not inverting when invalid if exp == -1: return self.write(self.state['model'].make_inv()) self.read(length=4, expect=self.SUCCESS if exp == 0 else self.FAILURE) def finish(self): self.write(self.state['model'].make_quit()) def invalid_cmd(self): self.write(struct.pack('<I', random.randint(8, 2 ** 32 - 1))) self.read(length=4, expect=self.FAILURE)
mit
qsnake/gpaw
gpaw/pes/ds_beta.py
3
7095
from math import exp, sin, cos, pi, sqrt, acos, asin import numpy as np from ase.units import Bohr, Hartree, alpha import _gpaw from gpaw.fd_operators import Gradient from gpaw.utilities.gl_quadrature import GaussLegendre from gpaw.pes import ds_prefactor from gpaw.pes.state import State, H1s from gpaw.pes.continuum import PlaneWave #debug from gpaw.mpi import rank class CrossSectionBeta: def __init__(self, initial = None, final = None, r0 = [0, 0, 0], # center of mass vector form='L', ngauss=8): self.initial = initial self.r0 = np.array(r0) / Bohr self.form = form if final is None: self.final = PlaneWave(initial.gd) else: self.final = final self.Ekin = None # Gauss-Legendre weights and abscissas self.gl = {} self.gl['x'] = GaussLegendre(-1., 1., ngauss) self.gl['phi'] = GaussLegendre(0, 2 * pi, ngauss) self.gl['psi'] = self.gl['phi'] self.angle = {} # sin and cos of the magic angle (54.7 deg) self.costhm = 1. / sqrt(3) self.sinthm = sqrt(2. / 3.) def calculate(self, Ekin): """Calculate the necessary overlaps.""" Ekin = Ekin / Hartree if self.Ekin == Ekin: return self.Ekin = Ekin # photoelectron momentum self.k = sqrt(2 * self.Ekin) for angle in ['x', 'phi', 'psi']: self.angle[angle] = self.gl[angle].get_x()[0] self.T20, self.T2m = self.gi_x() # we need the average self.T20 /= 8 * pi**2 self.T2m /= 8 * pi**2 def get_omega(self): """Return the necessary photon energy.""" return self.Ekin - self.initial.get_energy() / Hartree def get_beta(self, Ekin=None): """Return the asymmetry parameter. E: photoelectron kinetic energy [eV] """ if Ekin is not None: self.calculate(Ekin) return self.T20 / self.T2m - 1. def get_ds(self, Ekin=None, units='Mb'): """Return the total cross section. Ekin: photoelectron kinetic energy [eV] units: 'Mb', 1 Mb = 1.e-22 m**2 'Ang', 1 A**2 = 1.e-20 m**2 'a.u.', 1 a_0**2 = 2.8e-21 m**2 as output units """ if Ekin is not None: self.calculate(Ekin) try: pre = ds_prefactor[units] except KeyError: raise NotImplementedError, 'Unknown units: >' + units + '<' # me_c = self.initial.get_me_c(np.array([0., 0., self.k]), self.form) # T2mana = np.abs(np.dot(me_c,me_c)) / 3. # print "T2m:", T2mana, self.T2m omega = self.get_omega() # integration over momentum agles pre *= self.k * 4 * pi # print omega, self.initial.get_ds(self.k, omega, self.form), \ # (self.k * 4 * pi * (2 * pi)**2 / 137.0359895 * self.T2m / omega) return pre * ((2 * pi)**2 * alpha * self.T2m / omega) def gauss_integrate(self, angle, function): T20 = 0. T2m = 0. gl = self.gl[angle] for x, w in zip(gl.get_x(), gl.get_w()): # print angle, x, w self.angle[angle] = x t20, t2m = function() T20 += w * t20 T2m += w * t2m # print "<gauss_integrate> angle=", angle, T2m return T20, T2m def gi_x(self): """Gauss integrate over x=cos(theta)""" return self.gauss_integrate('x', self.gi_phi) def gi_phi(self): """Gauss integrate over phi""" return self.gauss_integrate('phi', self.gi_psi) def gi_psi(self): """Gauss integrate over psi""" return self.gauss_integrate('psi', self.integrand) def integrand(self): # polarisation in the direction of vk costh = self.angle['x'] sinth = sqrt(1. - costh**2) sinphi = sin(self.angle['phi']) cosphi = cos(self.angle['phi']) eps0 = np.array([sinth * cosphi, sinth * sinphi, costh ]) vk = self.k * eps0 # polarisation at the magic angle costhm = self.costhm sinthm = self.sinthm sinpsi = sin(self.angle['psi']) cospsi = cos(self.angle['psi']) epsm = np.array([sinthm * (cosphi * sinpsi * costh + sinphi * cospsi) + costhm * cosphi * sinth, sinthm * (sinphi * sinpsi * costh - cosphi * cospsi) + costhm * sinphi * sinth, costhm * costh - sinthm * sinth * sinpsi]) # initial and final state on the grid initial_G = self.initial.get_grid() final_G = self.final.get_grid(vk, self.r0) ini_analyt = H1s(self.initial.gd, self.r0) gd = self.initial.gd if self.form == 'L': if_G = initial_G * final_G omega = self.get_omega() if 0: me_c = [] for c in range(3): xyz_G = ((np.arange(gd.n_c[c]) + gd.beg_c[c]) * gd.h_c[c] - self.r0[c]) shape = [1, 1, 1] shape[c] = -1 xyz_G.shape = tuple(shape) np.resize(xyz_G, gd.n_c) me_c.append(gd.integrate(if_G * xyz_G)) me_c = np.array(me_c) * omega else: me_c = gd.calculate_dipole_moment(if_G) me_c += self.r0 * gd.integrate(if_G) me_c *= -omega elif self.form == 'V': dtype = final_G.dtype phase_cd = np.ones((3, 2), dtype) if not hasattr(gd, 'ddr'): gd.ddr = [Gradient(gd, c, dtype=dtype).apply for c in range(3)] dfinal_G = gd.empty(dtype=dtype) me_c = np.empty(3, dtype=dtype) for c in range(3): # print "rank, c, apply", rank, c, dtype, final_G.shape, dfinal_G.shape gd.ddr[c](final_G, dfinal_G, phase_cd) me_c[c] = gd.integrate(initial_G * dfinal_G) else: raise NotImplementedError # print self.k, self.initial.get_me_c(vk, self.form)[0].imag, me_c[0].imag if 0: omega = self.get_omega() me_analyt = ini_analyt.get_me_c(vk, self.form)[0].imag me = me_c[0].imag def ds(me): return self.k / omega * me**2 print omega, ds(me_analyt), ds(me), me_analyt, me # print 'analyt', self.initial.get_me_c(vk, self.form) # print 'num', me_c # print 'analyt/num', self.initial.get_me_c(vk, self.form) / me_c # return the squared matrix elements T2 = [] for eps in [eps0, epsm]: me = np.dot(eps, me_c) # print "eps, T2:", eps, (me * me.conj()).real T2.append((me * me.conj()).real) # print vk, T2 return T2[0], T2[1]
gpl-3.0
ocefpaf/pyproj
pyproj/geod.py
1
25100
""" The Geod class can perform forward and inverse geodetic, or Great Circle, computations. The forward computation involves determining latitude, longitude and back azimuth of a terminus point given the latitude and longitude of an initial point, plus azimuth and distance. The inverse computation involves determining the forward and back azimuths and distance given the latitudes and longitudes of an initial and terminus point. """ __all__ = ["Geod", "pj_ellps", "geodesic_version_str"] import math from typing import Any, Dict, List, Optional, Tuple, Union from pyproj._geod import Geod as _Geod from pyproj._geod import geodesic_version_str from pyproj._list import get_ellps_map from pyproj.exceptions import GeodError from pyproj.utils import _convertback, _copytobuffer pj_ellps = get_ellps_map() class Geod(_Geod): """ performs forward and inverse geodetic, or Great Circle, computations. The forward computation (using the 'fwd' method) involves determining latitude, longitude and back azimuth of a terminus point given the latitude and longitude of an initial point, plus azimuth and distance. The inverse computation (using the 'inv' method) involves determining the forward and back azimuths and distance given the latitudes and longitudes of an initial and terminus point. Attributes ---------- initstring: str The string form of the user input used to create the Geod. sphere: bool If True, it is a sphere. a: float The ellipsoid equatorial radius, or semi-major axis. b: float The ellipsoid polar radius, or semi-minor axis. es: float The 'eccentricity' of the ellipse, squared (1-b2/a2). f: float The ellipsoid 'flattening' parameter ( (a-b)/a ). """ def __init__(self, initstring: Optional[str] = None, **kwargs) -> None: """ initialize a Geod class instance. Geodetic parameters for specifying the ellipsoid can be given in a dictionary 'initparams', as keyword arguments, or as as proj geod initialization string. You can get a dictionary of ellipsoids using :func:`pyproj.get_ellps_map` or with the variable `pyproj.pj_ellps`. The parameters of the ellipsoid may also be set directly using the 'a' (semi-major or equatorial axis radius) keyword, and any one of the following keywords: 'b' (semi-minor, or polar axis radius), 'e' (eccentricity), 'es' (eccentricity squared), 'f' (flattening), or 'rf' (reciprocal flattening). See the proj documentation (https://proj.org) for more information about specifying ellipsoid parameters. Example usage: >>> from pyproj import Geod >>> g = Geod(ellps='clrk66') # Use Clarke 1866 ellipsoid. >>> # specify the lat/lons of some cities. >>> boston_lat = 42.+(15./60.); boston_lon = -71.-(7./60.) >>> portland_lat = 45.+(31./60.); portland_lon = -123.-(41./60.) >>> newyork_lat = 40.+(47./60.); newyork_lon = -73.-(58./60.) >>> london_lat = 51.+(32./60.); london_lon = -(5./60.) >>> # compute forward and back azimuths, plus distance >>> # between Boston and Portland. >>> az12,az21,dist = g.inv(boston_lon,boston_lat,portland_lon,portland_lat) >>> f"{az12:.3f} {az21:.3f} {dist:.3f}" '-66.531 75.654 4164192.708' >>> # compute latitude, longitude and back azimuth of Portland, >>> # given Boston lat/lon, forward azimuth and distance to Portland. >>> endlon, endlat, backaz = g.fwd(boston_lon, boston_lat, az12, dist) >>> f"{endlat:.3f} {endlon:.3f} {backaz:.3f}" '45.517 -123.683 75.654' >>> # compute the azimuths, distances from New York to several >>> # cities (pass a list) >>> lons1 = 3*[newyork_lon]; lats1 = 3*[newyork_lat] >>> lons2 = [boston_lon, portland_lon, london_lon] >>> lats2 = [boston_lat, portland_lat, london_lat] >>> az12,az21,dist = g.inv(lons1,lats1,lons2,lats2) >>> for faz, baz, d in list(zip(az12,az21,dist)): ... f"{faz:7.3f} {baz:8.3f} {d:12.3f}" ' 54.663 -123.448 288303.720' '-65.463 79.342 4013037.318' ' 51.254 -71.576 5579916.651' >>> g2 = Geod('+ellps=clrk66') # use proj4 style initialization string >>> az12,az21,dist = g2.inv(boston_lon,boston_lat,portland_lon,portland_lat) >>> f"{az12:.3f} {az21:.3f} {dist:.3f}" '-66.531 75.654 4164192.708' """ # if initparams is a proj-type init string, # convert to dict. ellpsd = {} # type: Dict[str, Union[str, float]] if initstring is not None: for kvpair in initstring.split(): # Actually only +a and +b are needed # We can ignore safely any parameter that doesn't have a value if kvpair.find("=") == -1: continue k, v = kvpair.split("=") k = k.lstrip("+") if k in ["a", "b", "rf", "f", "es", "e"]: ellpsd[k] = float(v) else: ellpsd[k] = v # merge this dict with kwargs dict. kwargs = dict(list(kwargs.items()) + list(ellpsd.items())) sphere = False if "ellps" in kwargs: # ellipse name given, look up in pj_ellps dict ellps_dict = pj_ellps[kwargs["ellps"]] a = ellps_dict["a"] # type: float if ellps_dict["description"] == "Normal Sphere": sphere = True if "b" in ellps_dict: b = ellps_dict["b"] # type: float es = 1.0 - (b * b) / (a * a) # type: float f = (a - b) / a # type: float elif "rf" in ellps_dict: f = 1.0 / ellps_dict["rf"] b = a * (1.0 - f) es = 1.0 - (b * b) / (a * a) else: # a (semi-major axis) and one of # b the semi-minor axis # rf the reciprocal flattening # f flattening # es eccentricity squared # must be given. a = kwargs["a"] if "b" in kwargs: b = kwargs["b"] es = 1.0 - (b * b) / (a * a) f = (a - b) / a elif "rf" in kwargs: f = 1.0 / kwargs["rf"] b = a * (1.0 - f) es = 1.0 - (b * b) / (a * a) elif "f" in kwargs: f = kwargs["f"] b = a * (1.0 - f) es = 1.0 - (b / a) ** 2 elif "es" in kwargs: es = kwargs["es"] b = math.sqrt(a ** 2 - es * a ** 2) f = (a - b) / a elif "e" in kwargs: es = kwargs["e"] ** 2 b = math.sqrt(a ** 2 - es * a ** 2) f = (a - b) / a else: b = a f = 0.0 es = 0.0 # msg='ellipse name or a, plus one of f,es,b must be given' # raise ValueError(msg) if math.fabs(f) < 1.0e-8: sphere = True super().__init__(a, f, sphere, b, es) def fwd( self, lons: Any, lats: Any, az: Any, dist: Any, radians=False ) -> Tuple[Any, Any, Any]: """ Forward transformation Determine longitudes, latitudes and back azimuths of terminus points given longitudes and latitudes of initial points, plus forward azimuths and distances. Parameters ---------- lons: array, :class:`numpy.ndarray`, list, tuple, or scalar Longitude(s) of initial point(s) lats: array, :class:`numpy.ndarray`, list, tuple, or scalar Latitude(s) of initial point(s) az: array, :class:`numpy.ndarray`, list, tuple, or scalar Forward azimuth(s) dist: array, :class:`numpy.ndarray`, list, tuple, or scalar Distance(s) between initial and terminus point(s) in meters radians: bool, optional If True, the input data is assumed to be in radians. Returns ------- array, :class:`numpy.ndarray`, list, tuple, or scalar: Longitude(s) of terminus point(s) array, :class:`numpy.ndarray`, list, tuple, or scalar: Latitude(s) of terminus point(s) array, :class:`numpy.ndarray`, list, tuple, or scalar: Back azimuth(s) """ # process inputs, making copies that support buffer API. inx, xisfloat, xislist, xistuple = _copytobuffer(lons) iny, yisfloat, yislist, yistuple = _copytobuffer(lats) inz, zisfloat, zislist, zistuple = _copytobuffer(az) ind, disfloat, dislist, distuple = _copytobuffer(dist) self._fwd(inx, iny, inz, ind, radians=radians) # if inputs were lists, tuples or floats, convert back. outx = _convertback(xisfloat, xislist, xistuple, inx) outy = _convertback(yisfloat, yislist, yistuple, iny) outz = _convertback(zisfloat, zislist, zistuple, inz) return outx, outy, outz def inv( self, lons1: Any, lats1: Any, lons2: Any, lats2: Any, radians=False ) -> Tuple[Any, Any, Any]: """ Inverse transformation Determine forward and back azimuths, plus distances between initial points and terminus points. Parameters ---------- lons1: array, :class:`numpy.ndarray`, list, tuple, or scalar Longitude(s) of initial point(s) lats1: array, :class:`numpy.ndarray`, list, tuple, or scalar Latitude(s) of initial point(s) lons2: array, :class:`numpy.ndarray`, list, tuple, or scalar Longitude(s) of terminus point(s) lats2: array, :class:`numpy.ndarray`, list, tuple, or scalar Latitude(s) of terminus point(s) radians: bool, optional If True, the input data is assumed to be in radians. Returns ------- array, :class:`numpy.ndarray`, list, tuple, or scalar: Forward azimuth(s) array, :class:`numpy.ndarray`, list, tuple, or scalar: Back azimuth(s) array, :class:`numpy.ndarray`, list, tuple, or scalar: Distance(s) between initial and terminus point(s) in meters """ # process inputs, making copies that support buffer API. inx, xisfloat, xislist, xistuple = _copytobuffer(lons1) iny, yisfloat, yislist, yistuple = _copytobuffer(lats1) inz, zisfloat, zislist, zistuple = _copytobuffer(lons2) ind, disfloat, dislist, distuple = _copytobuffer(lats2) self._inv(inx, iny, inz, ind, radians=radians) # if inputs were lists, tuples or floats, convert back. outx = _convertback(xisfloat, xislist, xistuple, inx) outy = _convertback(yisfloat, yislist, yistuple, iny) outz = _convertback(zisfloat, zislist, zistuple, inz) return outx, outy, outz def npts( self, lon1: float, lat1: float, lon2: float, lat2: float, npts: int, radians: bool = False, ) -> List: """ Given a single initial point and terminus point, returns a list of longitude/latitude pairs describing npts equally spaced intermediate points along the geodesic between the initial and terminus points. Example usage: >>> from pyproj import Geod >>> g = Geod(ellps='clrk66') # Use Clarke 1866 ellipsoid. >>> # specify the lat/lons of Boston and Portland. >>> boston_lat = 42.+(15./60.); boston_lon = -71.-(7./60.) >>> portland_lat = 45.+(31./60.); portland_lon = -123.-(41./60.) >>> # find ten equally spaced points between Boston and Portland. >>> lonlats = g.npts(boston_lon,boston_lat,portland_lon,portland_lat,10) >>> for lon,lat in lonlats: f'{lat:.3f} {lon:.3f}' '43.528 -75.414' '44.637 -79.883' '45.565 -84.512' '46.299 -89.279' '46.830 -94.156' '47.149 -99.112' '47.251 -104.106' '47.136 -109.100' '46.805 -114.051' '46.262 -118.924' >>> # test with radians=True (inputs/outputs in radians, not degrees) >>> import math >>> dg2rad = math.radians(1.) >>> rad2dg = math.degrees(1.) >>> lonlats = g.npts( ... dg2rad*boston_lon, ... dg2rad*boston_lat, ... dg2rad*portland_lon, ... dg2rad*portland_lat, ... 10, ... radians=True ... ) >>> for lon,lat in lonlats: f'{rad2dg*lat:.3f} {rad2dg*lon:.3f}' '43.528 -75.414' '44.637 -79.883' '45.565 -84.512' '46.299 -89.279' '46.830 -94.156' '47.149 -99.112' '47.251 -104.106' '47.136 -109.100' '46.805 -114.051' '46.262 -118.924' Parameters ---------- lon1: float Longitude of the initial point lat1: float Latitude of the initial point lon2: float Longitude of the terminus point lat2: float Latitude of the terminus point npts: int Number of points to be returned radians: bool, optional If True, the input data is assumed to be in radians. Returns ------- list of tuples: list of (lon, lat) points along the geodesic between the initial and terminus points. """ lons, lats = super()._npts(lon1, lat1, lon2, lat2, npts, radians=radians) return list(zip(lons, lats)) def line_length(self, lons: Any, lats: Any, radians: bool = False) -> float: """ .. versionadded:: 2.3.0 Calculate the total distance between points along a line. >>> from pyproj import Geod >>> geod = Geod('+a=6378137 +f=0.0033528106647475126') >>> lats = [-72.9, -71.9, -74.9, -74.3, -77.5, -77.4, -71.7, -65.9, -65.7, ... -66.6, -66.9, -69.8, -70.0, -71.0, -77.3, -77.9, -74.7] >>> lons = [-74, -102, -102, -131, -163, 163, 172, 140, 113, ... 88, 59, 25, -4, -14, -33, -46, -61] >>> total_length = geod.line_length(lons, lats) >>> f"{total_length:.3f}" '14259605.611' Parameters ---------- lons: array, :class:`numpy.ndarray`, list, tuple, or scalar The longitude points along a line. lats: array, :class:`numpy.ndarray`, list, tuple, or scalar The latitude points along a line. radians: bool, optional If True, the input data is assumed to be in radians. Returns ------- float: The total length of the line. """ # process inputs, making copies that support buffer API. inx, xisfloat, xislist, xistuple = _copytobuffer(lons) iny, yisfloat, yislist, yistuple = _copytobuffer(lats) return self._line_length(inx, iny, radians=radians) def line_lengths(self, lons: Any, lats: Any, radians: bool = False) -> Any: """ .. versionadded:: 2.3.0 Calculate the distances between points along a line. >>> from pyproj import Geod >>> geod = Geod(ellps="WGS84") >>> lats = [-72.9, -71.9, -74.9] >>> lons = [-74, -102, -102] >>> for line_length in geod.line_lengths(lons, lats): ... f"{line_length:.3f}" '943065.744' '334805.010' Parameters ---------- lons: array, :class:`numpy.ndarray`, list, tuple, or scalar The longitude points along a line. lats: array, :class:`numpy.ndarray`, list, tuple, or scalar The latitude points along a line. radians: bool, optional If True, the input data is assumed to be in radians. Returns ------- array, :class:`numpy.ndarray`, list, tuple, or scalar: The total length of the line. """ # process inputs, making copies that support buffer API. inx, xisfloat, xislist, xistuple = _copytobuffer(lons) iny, yisfloat, yislist, yistuple = _copytobuffer(lats) self._line_length(inx, iny, radians=radians) line_lengths = _convertback(xisfloat, xislist, xistuple, inx) return line_lengths if xisfloat else line_lengths[:-1] def polygon_area_perimeter( self, lons: Any, lats: Any, radians: bool = False ) -> Tuple[float, float]: """ .. versionadded:: 2.3.0 A simple interface for computing the area (meters^2) and perimeter (meters) of a geodesic polygon. Arbitrarily complex polygons are allowed. In the case self-intersecting of polygons the area is accumulated "algebraically", e.g., the areas of the 2 loops in a figure-8 polygon will partially cancel. There's no need to "close" the polygon by repeating the first vertex. The area returned is signed with counter-clockwise traversal being treated as positive. .. note:: lats should be in the range [-90 deg, 90 deg]. Example usage: >>> from pyproj import Geod >>> geod = Geod('+a=6378137 +f=0.0033528106647475126') >>> lats = [-72.9, -71.9, -74.9, -74.3, -77.5, -77.4, -71.7, -65.9, -65.7, ... -66.6, -66.9, -69.8, -70.0, -71.0, -77.3, -77.9, -74.7] >>> lons = [-74, -102, -102, -131, -163, 163, 172, 140, 113, ... 88, 59, 25, -4, -14, -33, -46, -61] >>> poly_area, poly_perimeter = geod.polygon_area_perimeter(lons, lats) >>> f"{poly_area:.1f} {poly_perimeter:.1f}" '13376856682207.4 14710425.4' Parameters ---------- lons: array, :class:`numpy.ndarray`, list, tuple, or scalar An array of longitude values. lats: array, :class:`numpy.ndarray`, list, tuple, or scalar An array of latitude values. radians: bool, optional If True, the input data is assumed to be in radians. Returns ------- (float, float): The geodesic area (meters^2) and permimeter (meters) of the polygon. """ return self._polygon_area_perimeter( _copytobuffer(lons)[0], _copytobuffer(lats)[0], radians=radians ) def geometry_length(self, geometry, radians: bool = False) -> float: """ .. versionadded:: 2.3.0 Returns the geodesic length (meters) of the shapely geometry. If it is a Polygon, it will return the sum of the lengths along the perimeter. If it is a MultiPolygon or MultiLine, it will return the sum of the lengths. Example usage: >>> from pyproj import Geod >>> from shapely.geometry import Point, LineString >>> line_string = LineString([Point(1, 2), Point(3, 4)]) >>> geod = Geod(ellps="WGS84") >>> f"{geod.geometry_length(line_string):.3f}" '313588.397' Parameters ---------- geometry: :class:`shapely.geometry.BaseGeometry` The geometry to calculate the length from. radians: bool, optional If True, the input data is assumed to be in radians. Returns ------- float: The total geodesic length of the geometry (meters). """ try: return self.line_length(*geometry.xy, radians=radians) # type: ignore except (AttributeError, NotImplementedError): pass if hasattr(geometry, "exterior"): return self.geometry_length(geometry.exterior, radians=radians) elif hasattr(geometry, "geoms"): total_length = 0.0 for geom in geometry.geoms: total_length += self.geometry_length(geom, radians=radians) return total_length raise GeodError("Invalid geometry provided.") def geometry_area_perimeter( self, geometry, radians: bool = False ) -> Tuple[float, float]: """ .. versionadded:: 2.3.0 A simple interface for computing the area (meters^2) and perimeter (meters) of a geodesic polygon as a shapely geometry. Arbitrarily complex polygons are allowed. In the case self-intersecting of polygons the area is accumulated "algebraically", e.g., the areas of the 2 loops in a figure-8 polygon will partially cancel. There's no need to "close" the polygon by repeating the first vertex. .. note:: lats should be in the range [-90 deg, 90 deg]. .. warning:: The area returned is signed with counter-clockwise (CCW) traversal being treated as positive. For polygons, holes should use the opposite traversal to the exterior (if the exterior is CCW, the holes/interiors should be CW). You can use `shapely.ops.orient` to modify the orientation. If it is a Polygon, it will return the area and exterior perimeter. It will subtract the area of the interior holes. If it is a MultiPolygon or MultiLine, it will return the sum of the areas and perimeters of all geometries. Example usage: >>> from pyproj import Geod >>> from shapely.geometry import LineString, Point, Polygon >>> geod = Geod(ellps="WGS84") >>> poly_area, poly_perimeter = geod.geometry_area_perimeter( ... Polygon( ... LineString([ ... Point(1, 1), Point(10, 1), Point(10, 10), Point(1, 10) ... ]), ... holes=[LineString([Point(1, 2), Point(3, 4), Point(5, 2)])], ... ) ... ) >>> f"{poly_area:.3f} {poly_perimeter:.3f}" '944373881400.339 3979008.036' Parameters ---------- geometry: :class:`shapely.geometry.BaseGeometry` The geometry to calculate the area and perimeter from. radians: bool, optional If True, the input data is assumed to be in radians. Default is degrees. Returns ------- (float, float): The geodesic area (meters^2) and permimeter (meters) of the polygon. """ try: return self.polygon_area_perimeter( # type: ignore *geometry.xy, radians=radians, ) except (AttributeError, NotImplementedError): pass # polygon if hasattr(geometry, "exterior"): total_area, total_perimeter = self.geometry_area_perimeter( geometry.exterior, radians=radians ) # subtract area of holes for hole in geometry.interiors: area, _ = self.geometry_area_perimeter(hole, radians=radians) total_area += area return total_area, total_perimeter # multi geometries elif hasattr(geometry, "geoms"): total_area = 0.0 total_perimeter = 0.0 for geom in geometry.geoms: area, perimeter = self.geometry_area_perimeter(geom, radians=radians) total_area += area total_perimeter += perimeter return total_area, total_perimeter raise GeodError("Invalid geometry provided.") def __repr__(self) -> str: # search for ellipse name for (ellps, vals) in pj_ellps.items(): if self.a == vals["a"]: b = vals.get("b", None) rf = vals.get("rf", None) # self.sphere is True when self.f is zero or very close to # zero (0), so prevent divide by zero. if self.b == b or (not self.sphere and (1.0 / self.f) == rf): return f"{self.__class__.__name__}(ellps={ellps!r})" # no ellipse name found, call super class return super().__repr__() def __eq__(self, other: Any) -> bool: """ equality operator == for Geod objects Example usage: >>> from pyproj import Geod >>> # Use Clarke 1866 ellipsoid. >>> gclrk1 = Geod(ellps='clrk66') >>> # Define Clarke 1866 using parameters >>> gclrk2 = Geod(a=6378206.4, b=6356583.8) >>> gclrk1 == gclrk2 True >>> # WGS 66 ellipsoid, PROJ style >>> gwgs66 = Geod('+ellps=WGS66') >>> # Naval Weapons Lab., 1965 ellipsoid >>> gnwl9d = Geod('+ellps=NWL9D') >>> # these ellipsoids are the same >>> gnwl9d == gwgs66 True >>> gclrk1 != gnwl9d # Clarke 1866 is unlike NWL9D True """ if not isinstance(other, _Geod): return False return self.__repr__() == other.__repr__()
isc
kistef/Wappalyzer
src/drivers/python/wappalyzer.py
27
1470
#!/usr/bin/env python # -*- coding: utf-8 -*- import os import sys import PyV8 import requests from urlparse import urlparse try: import json except ImportError: import simplejson as json class Wappalyzer(object): def __init__(self, url): self.file_dir = os.path.dirname(__file__) f = open(os.path.join(self.file_dir, 'apps.json')) data = json.loads(f.read()) f.close() self.categories = data['categories'] self.apps = data['apps'] self.url = url def analyze(self): ctxt = PyV8.JSContext() ctxt.enter() f1 = open(os.path.join(self.file_dir, 'js/wappalyzer.js')) f2 = open(os.path.join(self.file_dir, '../php/js/driver.js')) ctxt.eval(f1.read()) ctxt.eval(f2.read()) f1.close() f2.close() host = urlparse(self.url).hostname response = requests.get(self.url) html = response.text headers = dict(response.headers) data = {'host': host, 'url': self.url, 'html': html, 'headers': headers} apps = json.dumps(self.apps) categories = json.dumps(self.categories) return ctxt.eval("w.apps = %s; w.categories = %s; w.driver.data = %s; w.driver.init();" % (apps, categories, json.dumps(data))) if __name__ == '__main__': try: w = Wappalyzer(sys.argv[1]) print w.analyze() except IndexError: print ('Usage: python %s <url>' % sys.argv[0])
gpl-3.0
brokenjacobs/ansible
lib/ansible/modules/cloud/amazon/s3_logging.py
77
5864
#!/usr/bin/python # # This is a free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This Ansible library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this library. If not, see <http://www.gnu.org/licenses/>. ANSIBLE_METADATA = {'metadata_version': '1.0', 'status': ['stableinterface'], 'supported_by': 'curated'} DOCUMENTATION = ''' --- module: s3_logging short_description: Manage logging facility of an s3 bucket in AWS description: - Manage logging facility of an s3 bucket in AWS version_added: "2.0" author: Rob White (@wimnat) options: name: description: - "Name of the s3 bucket." required: true state: description: - "Enable or disable logging." required: false default: present choices: [ 'present', 'absent' ] target_bucket: description: - "The bucket to log to. Required when state=present." required: false default: null target_prefix: description: - "The prefix that should be prepended to the generated log files written to the target_bucket." required: false default: "" extends_documentation_fragment: - aws - ec2 ''' EXAMPLES = ''' # Note: These examples do not set authentication details, see the AWS Guide for details. - name: Enable logging of s3 bucket mywebsite.com to s3 bucket mylogs s3_logging: name: mywebsite.com target_bucket: mylogs target_prefix: logs/mywebsite.com state: present - name: Remove logging on an s3 bucket s3_logging: name: mywebsite.com state: absent ''' try: import boto.ec2 from boto.s3.connection import OrdinaryCallingFormat, Location from boto.exception import BotoServerError, S3CreateError, S3ResponseError HAS_BOTO = True except ImportError: HAS_BOTO = False from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.ec2 import AnsibleAWSError, ec2_argument_spec, get_aws_connection_info def compare_bucket_logging(bucket, target_bucket, target_prefix): bucket_log_obj = bucket.get_logging_status() if bucket_log_obj.target != target_bucket or bucket_log_obj.prefix != target_prefix: return False else: return True def enable_bucket_logging(connection, module): bucket_name = module.params.get("name") target_bucket = module.params.get("target_bucket") target_prefix = module.params.get("target_prefix") changed = False try: bucket = connection.get_bucket(bucket_name) except S3ResponseError as e: module.fail_json(msg=e.message) try: if not compare_bucket_logging(bucket, target_bucket, target_prefix): # Before we can enable logging we must give the log-delivery group WRITE and READ_ACP permissions to the target bucket try: target_bucket_obj = connection.get_bucket(target_bucket) except S3ResponseError as e: if e.status == 301: module.fail_json(msg="the logging target bucket must be in the same region as the bucket being logged") else: module.fail_json(msg=e.message) target_bucket_obj.set_as_logging_target() bucket.enable_logging(target_bucket, target_prefix) changed = True except S3ResponseError as e: module.fail_json(msg=e.message) module.exit_json(changed=changed) def disable_bucket_logging(connection, module): bucket_name = module.params.get("name") changed = False try: bucket = connection.get_bucket(bucket_name) if not compare_bucket_logging(bucket, None, None): bucket.disable_logging() changed = True except S3ResponseError as e: module.fail_json(msg=e.message) module.exit_json(changed=changed) def main(): argument_spec = ec2_argument_spec() argument_spec.update( dict( name = dict(required=True), target_bucket = dict(required=False, default=None), target_prefix = dict(required=False, default=""), state = dict(required=False, default='present', choices=['present', 'absent']) ) ) module = AnsibleModule(argument_spec=argument_spec) if not HAS_BOTO: module.fail_json(msg='boto required for this module') region, ec2_url, aws_connect_params = get_aws_connection_info(module) if region in ('us-east-1', '', None): # S3ism for the US Standard region location = Location.DEFAULT else: # Boto uses symbolic names for locations but region strings will # actually work fine for everything except us-east-1 (US Standard) location = region try: connection = boto.s3.connect_to_region(location, is_secure=True, calling_format=OrdinaryCallingFormat(), **aws_connect_params) # use this as fallback because connect_to_region seems to fail in boto + non 'classic' aws accounts in some cases if connection is None: connection = boto.connect_s3(**aws_connect_params) except (boto.exception.NoAuthHandlerFound, AnsibleAWSError) as e: module.fail_json(msg=str(e)) state = module.params.get("state") if state == 'present': enable_bucket_logging(connection, module) elif state == 'absent': disable_bucket_logging(connection, module) if __name__ == '__main__': main()
gpl-3.0
wlamond/scikit-learn
sklearn/tests/test_random_projection.py
141
14040
from __future__ import division import numpy as np import scipy.sparse as sp from sklearn.metrics import euclidean_distances from sklearn.random_projection import johnson_lindenstrauss_min_dim from sklearn.random_projection import gaussian_random_matrix from sklearn.random_projection import sparse_random_matrix from sklearn.random_projection import SparseRandomProjection from sklearn.random_projection import GaussianRandomProjection from sklearn.utils.testing import assert_less from sklearn.utils.testing import assert_raises from sklearn.utils.testing import assert_raise_message from sklearn.utils.testing import assert_array_equal from sklearn.utils.testing import assert_equal from sklearn.utils.testing import assert_almost_equal from sklearn.utils.testing import assert_in from sklearn.utils.testing import assert_array_almost_equal from sklearn.utils.testing import assert_warns from sklearn.exceptions import DataDimensionalityWarning all_sparse_random_matrix = [sparse_random_matrix] all_dense_random_matrix = [gaussian_random_matrix] all_random_matrix = set(all_sparse_random_matrix + all_dense_random_matrix) all_SparseRandomProjection = [SparseRandomProjection] all_DenseRandomProjection = [GaussianRandomProjection] all_RandomProjection = set(all_SparseRandomProjection + all_DenseRandomProjection) # Make some random data with uniformly located non zero entries with # Gaussian distributed values def make_sparse_random_data(n_samples, n_features, n_nonzeros): rng = np.random.RandomState(0) data_coo = sp.coo_matrix( (rng.randn(n_nonzeros), (rng.randint(n_samples, size=n_nonzeros), rng.randint(n_features, size=n_nonzeros))), shape=(n_samples, n_features)) return data_coo.toarray(), data_coo.tocsr() def densify(matrix): if not sp.issparse(matrix): return matrix else: return matrix.toarray() n_samples, n_features = (10, 1000) n_nonzeros = int(n_samples * n_features / 100.) data, data_csr = make_sparse_random_data(n_samples, n_features, n_nonzeros) ############################################################################### # test on JL lemma ############################################################################### def test_invalid_jl_domain(): assert_raises(ValueError, johnson_lindenstrauss_min_dim, 100, 1.1) assert_raises(ValueError, johnson_lindenstrauss_min_dim, 100, 0.0) assert_raises(ValueError, johnson_lindenstrauss_min_dim, 100, -0.1) assert_raises(ValueError, johnson_lindenstrauss_min_dim, 0, 0.5) def test_input_size_jl_min_dim(): assert_raises(ValueError, johnson_lindenstrauss_min_dim, 3 * [100], 2 * [0.9]) assert_raises(ValueError, johnson_lindenstrauss_min_dim, 3 * [100], 2 * [0.9]) johnson_lindenstrauss_min_dim(np.random.randint(1, 10, size=(10, 10)), 0.5 * np.ones((10, 10))) ############################################################################### # tests random matrix generation ############################################################################### def check_input_size_random_matrix(random_matrix): assert_raises(ValueError, random_matrix, 0, 0) assert_raises(ValueError, random_matrix, -1, 1) assert_raises(ValueError, random_matrix, 1, -1) assert_raises(ValueError, random_matrix, 1, 0) assert_raises(ValueError, random_matrix, -1, 0) def check_size_generated(random_matrix): assert_equal(random_matrix(1, 5).shape, (1, 5)) assert_equal(random_matrix(5, 1).shape, (5, 1)) assert_equal(random_matrix(5, 5).shape, (5, 5)) assert_equal(random_matrix(1, 1).shape, (1, 1)) def check_zero_mean_and_unit_norm(random_matrix): # All random matrix should produce a transformation matrix # with zero mean and unit norm for each columns A = densify(random_matrix(10000, 1, random_state=0)) assert_array_almost_equal(0, np.mean(A), 3) assert_array_almost_equal(1.0, np.linalg.norm(A), 1) def check_input_with_sparse_random_matrix(random_matrix): n_components, n_features = 5, 10 for density in [-1., 0.0, 1.1]: assert_raises(ValueError, random_matrix, n_components, n_features, density=density) def test_basic_property_of_random_matrix(): # Check basic properties of random matrix generation for random_matrix in all_random_matrix: yield check_input_size_random_matrix, random_matrix yield check_size_generated, random_matrix yield check_zero_mean_and_unit_norm, random_matrix for random_matrix in all_sparse_random_matrix: yield check_input_with_sparse_random_matrix, random_matrix random_matrix_dense = \ lambda n_components, n_features, random_state: random_matrix( n_components, n_features, random_state=random_state, density=1.0) yield check_zero_mean_and_unit_norm, random_matrix_dense def test_gaussian_random_matrix(): # Check some statical properties of Gaussian random matrix # Check that the random matrix follow the proper distribution. # Let's say that each element of a_{ij} of A is taken from # a_ij ~ N(0.0, 1 / n_components). # n_components = 100 n_features = 1000 A = gaussian_random_matrix(n_components, n_features, random_state=0) assert_array_almost_equal(0.0, np.mean(A), 2) assert_array_almost_equal(np.var(A, ddof=1), 1 / n_components, 1) def test_sparse_random_matrix(): # Check some statical properties of sparse random matrix n_components = 100 n_features = 500 for density in [0.3, 1.]: s = 1 / density A = sparse_random_matrix(n_components, n_features, density=density, random_state=0) A = densify(A) # Check possible values values = np.unique(A) assert_in(np.sqrt(s) / np.sqrt(n_components), values) assert_in(- np.sqrt(s) / np.sqrt(n_components), values) if density == 1.0: assert_equal(np.size(values), 2) else: assert_in(0., values) assert_equal(np.size(values), 3) # Check that the random matrix follow the proper distribution. # Let's say that each element of a_{ij} of A is taken from # # - -sqrt(s) / sqrt(n_components) with probability 1 / 2s # - 0 with probability 1 - 1 / s # - +sqrt(s) / sqrt(n_components) with probability 1 / 2s # assert_almost_equal(np.mean(A == 0.0), 1 - 1 / s, decimal=2) assert_almost_equal(np.mean(A == np.sqrt(s) / np.sqrt(n_components)), 1 / (2 * s), decimal=2) assert_almost_equal(np.mean(A == - np.sqrt(s) / np.sqrt(n_components)), 1 / (2 * s), decimal=2) assert_almost_equal(np.var(A == 0.0, ddof=1), (1 - 1 / s) * 1 / s, decimal=2) assert_almost_equal(np.var(A == np.sqrt(s) / np.sqrt(n_components), ddof=1), (1 - 1 / (2 * s)) * 1 / (2 * s), decimal=2) assert_almost_equal(np.var(A == - np.sqrt(s) / np.sqrt(n_components), ddof=1), (1 - 1 / (2 * s)) * 1 / (2 * s), decimal=2) ############################################################################### # tests on random projection transformer ############################################################################### def test_sparse_random_projection_transformer_invalid_density(): for RandomProjection in all_SparseRandomProjection: assert_raises(ValueError, RandomProjection(density=1.1).fit, data) assert_raises(ValueError, RandomProjection(density=0).fit, data) assert_raises(ValueError, RandomProjection(density=-0.1).fit, data) def test_random_projection_transformer_invalid_input(): for RandomProjection in all_RandomProjection: assert_raises(ValueError, RandomProjection(n_components='auto').fit, [[0, 1, 2]]) assert_raises(ValueError, RandomProjection(n_components=-10).fit, data) def test_try_to_transform_before_fit(): for RandomProjection in all_RandomProjection: assert_raises(ValueError, RandomProjection(n_components='auto').transform, data) def test_too_many_samples_to_find_a_safe_embedding(): data, _ = make_sparse_random_data(1000, 100, 1000) for RandomProjection in all_RandomProjection: rp = RandomProjection(n_components='auto', eps=0.1) expected_msg = ( 'eps=0.100000 and n_samples=1000 lead to a target dimension' ' of 5920 which is larger than the original space with' ' n_features=100') assert_raise_message(ValueError, expected_msg, rp.fit, data) def test_random_projection_embedding_quality(): data, _ = make_sparse_random_data(8, 5000, 15000) eps = 0.2 original_distances = euclidean_distances(data, squared=True) original_distances = original_distances.ravel() non_identical = original_distances != 0.0 # remove 0 distances to avoid division by 0 original_distances = original_distances[non_identical] for RandomProjection in all_RandomProjection: rp = RandomProjection(n_components='auto', eps=eps, random_state=0) projected = rp.fit_transform(data) projected_distances = euclidean_distances(projected, squared=True) projected_distances = projected_distances.ravel() # remove 0 distances to avoid division by 0 projected_distances = projected_distances[non_identical] distances_ratio = projected_distances / original_distances # check that the automatically tuned values for the density respect the # contract for eps: pairwise distances are preserved according to the # Johnson-Lindenstrauss lemma assert_less(distances_ratio.max(), 1 + eps) assert_less(1 - eps, distances_ratio.min()) def test_SparseRandomProjection_output_representation(): for SparseRandomProjection in all_SparseRandomProjection: # when using sparse input, the projected data can be forced to be a # dense numpy array rp = SparseRandomProjection(n_components=10, dense_output=True, random_state=0) rp.fit(data) assert isinstance(rp.transform(data), np.ndarray) sparse_data = sp.csr_matrix(data) assert isinstance(rp.transform(sparse_data), np.ndarray) # the output can be left to a sparse matrix instead rp = SparseRandomProjection(n_components=10, dense_output=False, random_state=0) rp = rp.fit(data) # output for dense input will stay dense: assert isinstance(rp.transform(data), np.ndarray) # output for sparse output will be sparse: assert sp.issparse(rp.transform(sparse_data)) def test_correct_RandomProjection_dimensions_embedding(): for RandomProjection in all_RandomProjection: rp = RandomProjection(n_components='auto', random_state=0, eps=0.5).fit(data) # the number of components is adjusted from the shape of the training # set assert_equal(rp.n_components, 'auto') assert_equal(rp.n_components_, 110) if RandomProjection in all_SparseRandomProjection: assert_equal(rp.density, 'auto') assert_almost_equal(rp.density_, 0.03, 2) assert_equal(rp.components_.shape, (110, n_features)) projected_1 = rp.transform(data) assert_equal(projected_1.shape, (n_samples, 110)) # once the RP is 'fitted' the projection is always the same projected_2 = rp.transform(data) assert_array_equal(projected_1, projected_2) # fit transform with same random seed will lead to the same results rp2 = RandomProjection(random_state=0, eps=0.5) projected_3 = rp2.fit_transform(data) assert_array_equal(projected_1, projected_3) # Try to transform with an input X of size different from fitted. assert_raises(ValueError, rp.transform, data[:, 1:5]) # it is also possible to fix the number of components and the density # level if RandomProjection in all_SparseRandomProjection: rp = RandomProjection(n_components=100, density=0.001, random_state=0) projected = rp.fit_transform(data) assert_equal(projected.shape, (n_samples, 100)) assert_equal(rp.components_.shape, (100, n_features)) assert_less(rp.components_.nnz, 115) # close to 1% density assert_less(85, rp.components_.nnz) # close to 1% density def test_warning_n_components_greater_than_n_features(): n_features = 20 data, _ = make_sparse_random_data(5, n_features, int(n_features / 4)) for RandomProjection in all_RandomProjection: assert_warns(DataDimensionalityWarning, RandomProjection(n_components=n_features + 1).fit, data) def test_works_with_sparse_data(): n_features = 20 data, _ = make_sparse_random_data(5, n_features, int(n_features / 4)) for RandomProjection in all_RandomProjection: rp_dense = RandomProjection(n_components=3, random_state=1).fit(data) rp_sparse = RandomProjection(n_components=3, random_state=1).fit(sp.csr_matrix(data)) assert_array_almost_equal(densify(rp_dense.components_), densify(rp_sparse.components_))
bsd-3-clause
apark263/tensorflow
tensorflow/tools/docs/generate_lib.py
18
23300
# Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Generate docs for the TensorFlow Python API.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import argparse import fnmatch import os import shutil import tempfile import six from tensorflow.python.util import tf_inspect from tensorflow.tools.common import public_api from tensorflow.tools.common import traverse from tensorflow.tools.docs import doc_controls from tensorflow.tools.docs import doc_generator_visitor from tensorflow.tools.docs import parser from tensorflow.tools.docs import pretty_docs from tensorflow.tools.docs import py_guide_parser def write_docs(output_dir, parser_config, yaml_toc, root_title='TensorFlow', search_hints=True, site_api_path=''): """Write previously extracted docs to disk. Write a docs page for each symbol included in the indices of parser_config to a tree of docs at `output_dir`. Symbols with multiple aliases will have only one page written about them, which is referenced for all aliases. Args: output_dir: Directory to write documentation markdown files to. Will be created if it doesn't exist. parser_config: A `parser.ParserConfig` object, containing all the necessary indices. yaml_toc: Set to `True` to generate a "_toc.yaml" file. root_title: The title name for the root level index.md. search_hints: (bool) include meta-data search hints at the top of each output file. site_api_path: The output path relative to the site root. Used in the `_toc.yaml` and `_redirects.yaml` files. Raises: ValueError: if `output_dir` is not an absolute path """ # Make output_dir. if not os.path.isabs(output_dir): raise ValueError("'output_dir' must be an absolute path.\n" " output_dir='%s'" % output_dir) if not os.path.exists(output_dir): os.makedirs(output_dir) # These dictionaries are used for table-of-contents generation below # They will contain, after the for-loop below:: # - module name(string):classes and functions the module contains(list) module_children = {} # - symbol name(string):pathname (string) symbol_to_file = {} # Collect redirects for an api _redirects.yaml file. redirects = [] # Parse and write Markdown pages, resolving cross-links (@{symbol}). for full_name, py_object in six.iteritems(parser_config.index): parser_config.reference_resolver.current_doc_full_name = full_name if full_name in parser_config.duplicate_of: continue # Methods and some routines are documented only as part of their class. if not (tf_inspect.ismodule(py_object) or tf_inspect.isclass(py_object) or parser.is_free_function(py_object, full_name, parser_config.index)): continue sitepath = os.path.join('api_docs/python', parser.documentation_path(full_name)[:-3]) # For TOC, we need to store a mapping from full_name to the file # we're generating symbol_to_file[full_name] = sitepath # For a module, remember the module for the table-of-contents if tf_inspect.ismodule(py_object): if full_name in parser_config.tree: module_children.setdefault(full_name, []) # For something else that's documented, # figure out what module it lives in else: subname = str(full_name) while True: subname = subname[:subname.rindex('.')] if tf_inspect.ismodule(parser_config.index[subname]): module_children.setdefault(subname, []).append(full_name) break # Generate docs for `py_object`, resolving references. page_info = parser.docs_for_object(full_name, py_object, parser_config) path = os.path.join(output_dir, parser.documentation_path(full_name)) directory = os.path.dirname(path) try: if not os.path.exists(directory): os.makedirs(directory) # This function returns raw bytes in PY2 or unicode in PY3. if search_hints: content = [page_info.get_metadata_html()] else: content = [''] content.append(pretty_docs.build_md_page(page_info)) text = '\n'.join(content) if six.PY3: text = text.encode('utf-8') with open(path, 'wb') as f: f.write(text) except OSError: raise OSError( 'Cannot write documentation for %s to %s' % (full_name, directory)) duplicates = parser_config.duplicates.get(full_name, []) if not duplicates: continue duplicates = [item for item in duplicates if item != full_name] for dup in duplicates: from_path = os.path.join(site_api_path, dup.replace('.', '/')) to_path = os.path.join(site_api_path, full_name.replace('.', '/')) redirects.append(( os.path.join('/', from_path), os.path.join('/', to_path))) if redirects: redirects = sorted(redirects) template = ('- from: {}\n' ' to: {}\n') redirects = [template.format(f, t) for f, t in redirects] api_redirects_path = os.path.join(output_dir, '_redirects.yaml') with open(api_redirects_path, 'w') as redirect_file: redirect_file.write('redirects:\n') redirect_file.write(''.join(redirects)) if yaml_toc: # Generate table of contents # Put modules in alphabetical order, case-insensitive modules = sorted(module_children.keys(), key=lambda a: a.upper()) leftnav_path = os.path.join(output_dir, '_toc.yaml') with open(leftnav_path, 'w') as f: # Generate header f.write('# Automatically generated file; please do not edit\ntoc:\n') for module in modules: indent_num = module.count('.') # Don't list `tf.submodule` inside `tf` indent_num = max(indent_num, 1) indent = ' '*indent_num if indent_num > 1: # tf.contrib.baysflow.entropy will be under # tf.contrib->baysflow->entropy title = module.split('.')[-1] else: title = module header = [ '- title: ' + title, ' section:', ' - title: Overview', ' path: ' + os.path.join('/', site_api_path, symbol_to_file[module])] header = ''.join([indent+line+'\n' for line in header]) f.write(header) symbols_in_module = module_children.get(module, []) # Sort case-insensitive, if equal sort case sensitive (upper first) symbols_in_module.sort(key=lambda a: (a.upper(), a)) for full_name in symbols_in_module: item = [ ' - title: ' + full_name[len(module) + 1:], ' path: ' + os.path.join('/', site_api_path, symbol_to_file[full_name])] item = ''.join([indent+line+'\n' for line in item]) f.write(item) # Write a global index containing all full names with links. with open(os.path.join(output_dir, 'index.md'), 'w') as f: f.write( parser.generate_global_index(root_title, parser_config.index, parser_config.reference_resolver)) def add_dict_to_dict(add_from, add_to): for key in add_from: if key in add_to: add_to[key].extend(add_from[key]) else: add_to[key] = add_from[key] # Exclude some libraries in contrib from the documentation altogether. def _get_default_private_map(): return { 'tf.contrib.autograph': ['utils', 'operators'], 'tf.test': ['mock'], 'tf.compat': ['v1', 'v2'], 'tf.contrib.estimator': ['python'], } # Exclude members of some libraries. def _get_default_do_not_descend_map(): # TODO(markdaoust): Use docs_controls decorators, locally, instead. return { 'tf': ['cli', 'lib', 'wrappers'], 'tf.contrib': [ 'compiler', 'grid_rnn', # Block contrib.keras to de-clutter the docs 'keras', 'labeled_tensor', 'quantization', 'session_bundle', 'slim', 'solvers', 'specs', 'tensor_forest', 'tensorboard', 'testing', 'tfprof', ], 'tf.contrib.bayesflow': [ 'special_math', 'stochastic_gradient_estimators', 'stochastic_variables' ], 'tf.contrib.ffmpeg': ['ffmpeg_ops'], 'tf.contrib.graph_editor': [ 'edit', 'match', 'reroute', 'subgraph', 'transform', 'select', 'util' ], 'tf.contrib.keras': ['api', 'python'], 'tf.contrib.layers': ['feature_column', 'summaries'], 'tf.contrib.learn': [ 'datasets', 'head', 'graph_actions', 'io', 'models', 'monitors', 'ops', 'preprocessing', 'utils', ], 'tf.contrib.util': ['loader'], } class DocControlsAwareCrawler(public_api.PublicAPIVisitor): """A `docs_controls` aware API-crawler.""" def _is_private(self, path, name, obj): if doc_controls.should_skip(obj): return True return super(DocControlsAwareCrawler, self)._is_private(path, name, obj) def extract(py_modules, private_map, do_not_descend_map, visitor_cls=doc_generator_visitor.DocGeneratorVisitor): """Extract docs from tf namespace and write them to disk.""" # Traverse the first module. visitor = visitor_cls(py_modules[0][0]) api_visitor = DocControlsAwareCrawler(visitor) api_visitor.set_root_name(py_modules[0][0]) add_dict_to_dict(private_map, api_visitor.private_map) add_dict_to_dict(do_not_descend_map, api_visitor.do_not_descend_map) traverse.traverse(py_modules[0][1], api_visitor) # Traverse all py_modules after the first: for module_name, module in py_modules[1:]: visitor.set_root_name(module_name) api_visitor.set_root_name(module_name) traverse.traverse(module, api_visitor) return visitor class _GetMarkdownTitle(py_guide_parser.PyGuideParser): """Extract the title from a .md file.""" def __init__(self): self.title = None py_guide_parser.PyGuideParser.__init__(self) def process_title(self, _, title): if self.title is None: # only use the first title self.title = title class _DocInfo(object): """A simple struct for holding a doc's url and title.""" def __init__(self, url, title): self.url = url self.title = title def build_doc_index(src_dir): """Build an index from a keyword designating a doc to _DocInfo objects.""" doc_index = {} if not os.path.isabs(src_dir): raise ValueError("'src_dir' must be an absolute path.\n" " src_dir='%s'" % src_dir) if not os.path.exists(src_dir): raise ValueError("'src_dir' path must exist.\n" " src_dir='%s'" % src_dir) for dirpath, _, filenames in os.walk(src_dir): suffix = os.path.relpath(path=dirpath, start=src_dir) for base_name in filenames: if not base_name.endswith('.md'): continue title_parser = _GetMarkdownTitle() title_parser.process(os.path.join(dirpath, base_name)) if title_parser.title is None: msg = ('`{}` has no markdown title (# title)'.format( os.path.join(dirpath, base_name))) raise ValueError(msg) key_parts = os.path.join(suffix, base_name[:-3]).split('/') if key_parts[-1] == 'index': key_parts = key_parts[:-1] doc_info = _DocInfo(os.path.join(suffix, base_name), title_parser.title) doc_index[key_parts[-1]] = doc_info if len(key_parts) > 1: doc_index['/'.join(key_parts[-2:])] = doc_info return doc_index class _GuideRef(object): def __init__(self, base_name, title, section_title, section_tag): self.url = 'api_guides/python/' + (('%s#%s' % (base_name, section_tag)) if section_tag else base_name) self.link_text = (('%s > %s' % (title, section_title)) if section_title else title) def make_md_link(self, url_prefix): return '[%s](%s%s)' % (self.link_text, url_prefix, self.url) class _GenerateGuideIndex(py_guide_parser.PyGuideParser): """Turn guide files into an index from symbol name to a list of _GuideRefs.""" def __init__(self): self.index = {} py_guide_parser.PyGuideParser.__init__(self) def process(self, full_path, base_name): """Index a file, reading from `full_path`, with `base_name` as the link.""" self.full_path = full_path self.base_name = base_name self.title = None self.section_title = None self.section_tag = None py_guide_parser.PyGuideParser.process(self, full_path) def process_title(self, _, title): if self.title is None: # only use the first title self.title = title def process_section(self, _, section_title, tag): self.section_title = section_title self.section_tag = tag def process_line(self, _, line): """Index the file and section of each `symbol` reference.""" for match in parser.AUTO_REFERENCE_RE.finditer(line): val = self.index.get(match.group(1), []) val.append( _GuideRef(self.base_name, self.title, self.section_title, self.section_tag)) self.index[match.group(1)] = val def _build_guide_index(guide_src_dir): """Return dict: symbol name -> _GuideRef from the files in `guide_src_dir`.""" index_generator = _GenerateGuideIndex() if os.path.exists(guide_src_dir): for full_path, base_name in py_guide_parser.md_files_in_dir(guide_src_dir): index_generator.process(full_path, base_name) return index_generator.index class _UpdateTags(py_guide_parser.PyGuideParser): """Rewrites a Python guide so that each section has an explicit id tag. "section" here refers to blocks delimited by second level headings. """ def process_section(self, line_number, section_title, tag): self.replace_line(line_number, '<h2 id="%s">%s</h2>' % (tag, section_title)) def update_id_tags_inplace(src_dir): """Set explicit ids on all second-level headings to ensure back-links work. Args: src_dir: The directory of md-files to convert (inplace). """ tag_updater = _UpdateTags() for dirpath, _, filenames in os.walk(src_dir): for base_name in filenames: if not base_name.endswith('.md'): continue full_path = os.path.join(src_dir, dirpath, base_name) # Tag updater loads the file, makes the replacements, and returns the # modified file contents content = tag_updater.process(full_path) with open(full_path, 'w') as f: f.write(content) EXCLUDED = set(['__init__.py', 'OWNERS', 'README.txt']) def replace_refs(src_dir, output_dir, reference_resolver, file_pattern='*.md', api_docs_relpath='api_docs'): """Fix @{} references in all files under `src_dir` matching `file_pattern`. A matching directory structure, with the modified files is written to `output_dir`. `{"__init__.py","OWNERS","README.txt"}` are skipped. Files not matching `file_pattern` (using `fnmatch`) are copied with no change. Also, files in the `api_guides/python` directory get explicit ids set on all heading-2s to ensure back-links work. Args: src_dir: The directory to convert files from. output_dir: The root directory to write the resulting files to. reference_resolver: A `parser.ReferenceResolver` to make the replacements. file_pattern: Only replace references in files matching file_patters, using fnmatch. Non-matching files are copied unchanged. api_docs_relpath: Relative-path string to the api_docs, from the src_dir. """ # Iterate through all the source files and process them. for dirpath, _, filenames in os.walk(src_dir): depth = os.path.relpath(src_dir, start=dirpath) # How to get from `dirpath` to api_docs/python/ relative_path_to_root = os.path.join(depth, api_docs_relpath, 'python') # Make the directory under output_dir. new_dir = os.path.join(output_dir, os.path.relpath(path=dirpath, start=src_dir)) if not os.path.exists(new_dir): os.makedirs(new_dir) for base_name in filenames: if base_name in EXCLUDED: continue full_in_path = os.path.join(dirpath, base_name) # Set the `current_doc_full_name` so bad files can be reported on errors. reference_resolver.current_doc_full_name = full_in_path suffix = os.path.relpath(path=full_in_path, start=src_dir) full_out_path = os.path.join(output_dir, suffix) # Copy files that do not match the file_pattern, unmodified. if not fnmatch.fnmatch(base_name, file_pattern): if full_in_path != full_out_path: shutil.copyfile(full_in_path, full_out_path) continue with open(full_in_path, 'rb') as f: content = f.read().decode('utf-8') content = reference_resolver.replace_references(content, relative_path_to_root) with open(full_out_path, 'wb') as f: f.write(content.encode('utf-8')) class DocGenerator(object): """Main entry point for generating docs.""" def __init__(self): self.argument_parser = argparse.ArgumentParser() self._py_modules = None self._private_map = _get_default_private_map() self._do_not_descend_map = _get_default_do_not_descend_map() self.yaml_toc = True self.argument_parser.add_argument( '--no_search_hints', dest='search_hints', action='store_false', default=True) self.argument_parser.add_argument( '--site_api_path', type=str, default='', help='The path from the site-root to api_docs' 'directory for this project') self.argument_parser.add_argument( '--api_cache_out_path', type=str, default=None, help='Path to store a json-serialized api-index, so links can be ' 'inserted into docs without rebuilding the api_docs') def add_output_dir_argument(self): self.argument_parser.add_argument( '--output_dir', type=str, default=None, required=True, help='Directory to write docs to.') def add_src_dir_argument(self): self.argument_parser.add_argument( '--src_dir', type=str, default=tempfile.mkdtemp(), required=False, help='Optional directory of source docs to add api_docs links to') def add_base_dir_argument(self, default_base_dir): self.argument_parser.add_argument( '--base_dir', type=str, default=default_base_dir, help='Base directory to strip from file names referenced in docs.') def parse_known_args(self): flags, _ = self.argument_parser.parse_known_args() return flags def add_to_private_map(self, d): add_dict_to_dict(d, self._private_map) def add_to_do_not_descend_map(self, d): add_dict_to_dict(d, self._do_not_descend_map) def set_private_map(self, d): self._private_map = d def set_do_not_descend_map(self, d): self._do_not_descend_map = d def set_py_modules(self, py_modules): self._py_modules = py_modules def py_module_names(self): if self._py_modules is None: raise RuntimeError( 'Must call set_py_modules() before running py_module_names().') return [name for (name, _) in self._py_modules] def make_reference_resolver(self, visitor, doc_index): return parser.ReferenceResolver.from_visitor( visitor, doc_index, py_module_names=self.py_module_names()) def make_parser_config(self, visitor, reference_resolver, guide_index, base_dir): return parser.ParserConfig( reference_resolver=reference_resolver, duplicates=visitor.duplicates, duplicate_of=visitor.duplicate_of, tree=visitor.tree, index=visitor.index, reverse_index=visitor.reverse_index, guide_index=guide_index, base_dir=base_dir) def run_extraction(self): return extract(self._py_modules, self._private_map, self._do_not_descend_map) def build(self, flags): """Build all the docs. This produces two outputs python api docs: * generated from modules set with `set_py_modules`. * written to '{FLAGS.output_dir}/api_docs/python/' non-api docs: * Everything in '{FLAGS.src_dir}' is copied to '{FLAGS.output_dir}'. * '@{}' references in '.md' files are replaced with links. * '.md' files under 'api_guides/python' have explicit ids set for their second level headings. Args: flags: * src_dir: Where to fetch the non-api-docs. * base_dir: Base of the docs directory (Used to build correct relative links). * output_dir: Where to write the resulting docs. Returns: The number of errors encountered while processing. """ # Extract the python api from the _py_modules doc_index = build_doc_index(flags.src_dir) visitor = self.run_extraction() reference_resolver = self.make_reference_resolver(visitor, doc_index) if getattr(flags, 'api_cache_out_path', None): reference_resolver.to_json_file(flags.api_cache_out_path) # Build the guide_index for the api_docs back links. root_title = getattr(flags, 'root_title', 'TensorFlow') guide_index = _build_guide_index( os.path.join(flags.src_dir, 'api_guides/python')) # Write the api docs. parser_config = self.make_parser_config(visitor, reference_resolver, guide_index, flags.base_dir) output_dir = os.path.join(flags.output_dir, 'api_docs/python') write_docs( output_dir, parser_config, yaml_toc=self.yaml_toc, root_title=root_title, search_hints=getattr(flags, 'search_hints', True), site_api_path=getattr(flags, 'site_api_path', '')) # Replace all the @{} references in files under `FLAGS.src_dir` replace_refs(flags.src_dir, flags.output_dir, reference_resolver, '*.md') # Fix the tags in the guide dir. guide_dir = os.path.join(flags.output_dir, 'api_guides/python') if os.path.exists(guide_dir): update_id_tags_inplace(guide_dir) # Report all errors found by the reference resolver, and return the error # code. parser_config.reference_resolver.log_errors() return parser_config.reference_resolver.num_errors()
apache-2.0
devintjones/pyodbc
tests3/pgtests.py
3
15439
#!/usr/bin/python # Unit tests for PostgreSQL on Linux (Fedora) # This is a stripped down copy of the SQL Server tests. from __future__ import print_function import sys, os, re import unittest from decimal import Decimal from testutils import * _TESTSTR = '0123456789-abcdefghijklmnopqrstuvwxyz-' def _generate_test_string(length): """ Returns a string of composed of `seed` to make a string `length` characters long. To enhance performance, there are 3 ways data is read, based on the length of the value, so most data types are tested with 3 lengths. This function helps us generate the test data. We use a recognizable data set instead of a single character to make it less likely that "overlap" errors will be hidden and to help us manually identify where a break occurs. """ if length <= len(_TESTSTR): return _TESTSTR[:length] c = int((length + len(_TESTSTR)-1) / len(_TESTSTR)) v = _TESTSTR * c return v[:length] class PGTestCase(unittest.TestCase): # These are from the C++ code. Keep them up to date. # If we are reading a binary, string, or unicode value and do not know how large it is, we'll try reading 2K into a # buffer on the stack. We then copy into a new Python object. SMALL_READ = 2048 # A read guaranteed not to fit in the MAX_STACK_STACK stack buffer, but small enough to be used for varchar (4K max). LARGE_READ = 4000 SMALL_STRING = _generate_test_string(SMALL_READ) LARGE_STRING = _generate_test_string(LARGE_READ) def __init__(self, connection_string, ansi, method_name): unittest.TestCase.__init__(self, method_name) self.connection_string = connection_string self.ansi = ansi def setUp(self): self.cnxn = pyodbc.connect(self.connection_string, ansi=self.ansi) self.cursor = self.cnxn.cursor() for i in range(3): try: self.cursor.execute("drop table t%d" % i) self.cnxn.commit() except: pass self.cnxn.rollback() def tearDown(self): try: self.cursor.close() self.cnxn.close() except: # If we've already closed the cursor or connection, exceptions are thrown. pass def test_datasources(self): p = pyodbc.dataSources() self.assert_(isinstance(p, dict)) def test_getinfo_string(self): value = self.cnxn.getinfo(pyodbc.SQL_CATALOG_NAME_SEPARATOR) self.assert_(isinstance(value, str)) def test_getinfo_bool(self): value = self.cnxn.getinfo(pyodbc.SQL_ACCESSIBLE_TABLES) self.assert_(isinstance(value, bool)) def test_getinfo_int(self): value = self.cnxn.getinfo(pyodbc.SQL_DEFAULT_TXN_ISOLATION) self.assert_(isinstance(value, int)) def test_getinfo_smallint(self): value = self.cnxn.getinfo(pyodbc.SQL_CONCAT_NULL_BEHAVIOR) self.assert_(isinstance(value, int)) def test_negative_float(self): value = -200 self.cursor.execute("create table t1(n float)") self.cursor.execute("insert into t1 values (?)", value) result = self.cursor.execute("select n from t1").fetchone()[0] self.assertEqual(value, result) def _test_strtype(self, sqltype, value, colsize=None): """ The implementation for string, Unicode, and binary tests. """ assert colsize is None or (value is None or colsize >= len(value)) if colsize: sql = "create table t1(s %s(%s))" % (sqltype, colsize) else: sql = "create table t1(s %s)" % sqltype self.cursor.execute(sql) self.cursor.execute("insert into t1 values(?)", value) v = self.cursor.execute("select * from t1").fetchone()[0] self.assertEqual(type(v), type(value)) if value is not None: self.assertEqual(len(v), len(value)) self.assertEqual(v, value) # # varchar # def test_empty_varchar(self): self._test_strtype('varchar', '', self.SMALL_READ) def test_null_varchar(self): self._test_strtype('varchar', None, self.SMALL_READ) def test_large_null_varchar(self): # There should not be a difference, but why not find out? self._test_strtype('varchar', None, self.LARGE_READ) def test_small_varchar(self): self._test_strtype('varchar', self.SMALL_STRING, self.SMALL_READ) def test_large_varchar(self): self._test_strtype('varchar', self.LARGE_STRING, self.LARGE_READ) def test_varchar_many(self): self.cursor.execute("create table t1(c1 varchar(300), c2 varchar(300), c3 varchar(300))") v1 = 'ABCDEFGHIJ' * 30 v2 = '0123456789' * 30 v3 = '9876543210' * 30 self.cursor.execute("insert into t1(c1, c2, c3) values (?,?,?)", v1, v2, v3); row = self.cursor.execute("select c1, c2, c3 from t1").fetchone() self.assertEqual(v1, row.c1) self.assertEqual(v2, row.c2) self.assertEqual(v3, row.c3) def test_small_decimal(self): # value = Decimal('1234567890987654321') value = Decimal('100010') # (I use this because the ODBC docs tell us how the bytes should look in the C struct) self.cursor.execute("create table t1(d numeric(19))") self.cursor.execute("insert into t1 values(?)", value) v = self.cursor.execute("select * from t1").fetchone()[0] self.assertEqual(type(v), Decimal) self.assertEqual(v, value) def test_small_decimal_scale(self): # The same as small_decimal, except with a different scale. This value exactly matches the ODBC documentation # example in the C Data Types appendix. value = '1000.10' value = Decimal(value) self.cursor.execute("create table t1(d numeric(20,6))") self.cursor.execute("insert into t1 values(?)", value) v = self.cursor.execute("select * from t1").fetchone()[0] self.assertEqual(type(v), Decimal) self.assertEqual(v, value) def test_negative_decimal_scale(self): value = Decimal('-10.0010') self.cursor.execute("create table t1(d numeric(19,4))") self.cursor.execute("insert into t1 values(?)", value) v = self.cursor.execute("select * from t1").fetchone()[0] self.assertEqual(type(v), Decimal) self.assertEqual(v, value) def _exec(self): self.cursor.execute(self.sql) def test_close_cnxn(self): """Make sure using a Cursor after closing its connection doesn't crash.""" self.cursor.execute("create table t1(id integer, s varchar(20))") self.cursor.execute("insert into t1 values (?,?)", 1, 'test') self.cursor.execute("select * from t1") self.cnxn.close() # Now that the connection is closed, we expect an exception. (If the code attempts to use # the HSTMT, we'll get an access violation instead.) self.sql = "select * from t1" self.assertRaises(pyodbc.ProgrammingError, self._exec) def test_empty_string(self): self.cursor.execute("create table t1(s varchar(20))") self.cursor.execute("insert into t1 values(?)", "") def test_fixed_str(self): value = "testing" self.cursor.execute("create table t1(s char(7))") self.cursor.execute("insert into t1 values(?)", "testing") v = self.cursor.execute("select * from t1").fetchone()[0] self.assertEqual(type(v), str) self.assertEqual(len(v), len(value)) # If we alloc'd wrong, the test below might work because of an embedded NULL self.assertEqual(v, value) def test_negative_row_index(self): self.cursor.execute("create table t1(s varchar(20))") self.cursor.execute("insert into t1 values(?)", "1") row = self.cursor.execute("select * from t1").fetchone() self.assertEquals(row[0], "1") self.assertEquals(row[-1], "1") def test_version(self): self.assertEquals(3, len(pyodbc.version.split('.'))) # 1.3.1 etc. def test_rowcount_delete(self): self.assertEquals(self.cursor.rowcount, -1) self.cursor.execute("create table t1(i int)") count = 4 for i in range(count): self.cursor.execute("insert into t1 values (?)", i) self.cursor.execute("delete from t1") self.assertEquals(self.cursor.rowcount, count) def test_rowcount_nodata(self): """ This represents a different code path than a delete that deleted something. The return value is SQL_NO_DATA and code after it was causing an error. We could use SQL_NO_DATA to step over the code that errors out and drop down to the same SQLRowCount code. On the other hand, we could hardcode a zero return value. """ self.cursor.execute("create table t1(i int)") # This is a different code path internally. self.cursor.execute("delete from t1") self.assertEquals(self.cursor.rowcount, 0) def test_rowcount_select(self): self.cursor.execute("create table t1(i int)") count = 4 for i in range(count): self.cursor.execute("insert into t1 values (?)", i) self.cursor.execute("select * from t1") self.assertEquals(self.cursor.rowcount, 4) # PostgreSQL driver fails here? # def test_rowcount_reset(self): # "Ensure rowcount is reset to -1" # # self.cursor.execute("create table t1(i int)") # count = 4 # for i in range(count): # self.cursor.execute("insert into t1 values (?)", i) # self.assertEquals(self.cursor.rowcount, 1) # # self.cursor.execute("create table t2(i int)") # self.assertEquals(self.cursor.rowcount, -1) def test_lower_case(self): "Ensure pyodbc.lowercase forces returned column names to lowercase." # Has to be set before creating the cursor, so we must recreate self.cursor. pyodbc.lowercase = True self.cursor = self.cnxn.cursor() self.cursor.execute("create table t1(Abc int, dEf int)") self.cursor.execute("select * from t1") names = [ t[0] for t in self.cursor.description ] names.sort() self.assertEquals(names, [ "abc", "def" ]) # Put it back so other tests don't fail. pyodbc.lowercase = False def test_row_description(self): """ Ensure Cursor.description is accessible as Row.cursor_description. """ self.cursor = self.cnxn.cursor() self.cursor.execute("create table t1(a int, b char(3))") self.cnxn.commit() self.cursor.execute("insert into t1 values(1, 'abc')") row = self.cursor.execute("select * from t1").fetchone() self.assertEquals(self.cursor.description, row.cursor_description) def test_executemany(self): self.cursor.execute("create table t1(a int, b varchar(10))") params = [ (i, str(i)) for i in range(1, 6) ] self.cursor.executemany("insert into t1(a, b) values (?,?)", params) # REVIEW: Without the cast, we get the following error: # [07006] [unixODBC]Received an unsupported type from Postgres.;\nERROR: table "t2" does not exist (14) count = self.cursor.execute("select cast(count(*) as int) from t1").fetchone()[0] self.assertEqual(count, len(params)) self.cursor.execute("select a, b from t1 order by a") rows = self.cursor.fetchall() self.assertEqual(count, len(rows)) for param, row in zip(params, rows): self.assertEqual(param[0], row[0]) self.assertEqual(param[1], row[1]) def test_executemany_failure(self): """ Ensure that an exception is raised if one query in an executemany fails. """ self.cursor.execute("create table t1(a int, b varchar(10))") params = [ (1, 'good'), ('error', 'not an int'), (3, 'good') ] self.failUnlessRaises(pyodbc.Error, self.cursor.executemany, "insert into t1(a, b) value (?, ?)", params) def test_row_slicing(self): self.cursor.execute("create table t1(a int, b int, c int, d int)"); self.cursor.execute("insert into t1 values(1,2,3,4)") row = self.cursor.execute("select * from t1").fetchone() result = row[:] self.failUnless(result is row) result = row[:-1] self.assertEqual(result, (1,2,3)) result = row[0:4] self.failUnless(result is row) def test_row_repr(self): self.cursor.execute("create table t1(a int, b int, c int, d int)"); self.cursor.execute("insert into t1 values(1,2,3,4)") row = self.cursor.execute("select * from t1").fetchone() result = str(row) self.assertEqual(result, "(1, 2, 3, 4)") result = str(row[:-1]) self.assertEqual(result, "(1, 2, 3)") result = str(row[:1]) self.assertEqual(result, "(1,)") def main(): from optparse import OptionParser parser = OptionParser(usage="usage: %prog [options] connection_string") parser.add_option("-v", "--verbose", default=0, action="count", help="Increment test verbosity (can be used multiple times)") parser.add_option("-d", "--debug", action="store_true", default=False, help="Print debugging items") parser.add_option("-t", "--test", help="Run only the named test") parser.add_option('-a', '--ansi', help='ANSI only', default=False, action='store_true') (options, args) = parser.parse_args() if len(args) > 1: parser.error('Only one argument is allowed. Do you need quotes around the connection string?') if not args: connection_string = load_setup_connection_string('pgtests') if not connection_string: parser.print_help() raise SystemExit() else: connection_string = args[0] if options.verbose: cnxn = pyodbc.connect(connection_string, ansi=options.ansi) print_library_info(cnxn) # print 'library:', os.path.abspath(pyodbc.__file__) # print 'odbc: %s' % cnxn.getinfo(pyodbc.SQL_ODBC_VER) # print 'driver: %s %s' % (cnxn.getinfo(pyodbc.SQL_DRIVER_NAME), cnxn.getinfo(pyodbc.SQL_DRIVER_VER)) # print 'driver supports ODBC version %s' % cnxn.getinfo(pyodbc.SQL_DRIVER_ODBC_VER) # print 'unicode:', pyodbc.UNICODE_SIZE, 'sqlwchar:', pyodbc.SQLWCHAR_SIZE cnxn.close() if options.test: # Run a single test if not options.test.startswith('test_'): options.test = 'test_%s' % (options.test) s = unittest.TestSuite([ PGTestCase(connection_string, options.ansi, options.test) ]) else: # Run all tests in the class methods = [ m for m in dir(PGTestCase) if m.startswith('test_') ] methods.sort() s = unittest.TestSuite([ PGTestCase(connection_string, options.ansi, m) for m in methods ]) testRunner = unittest.TextTestRunner(verbosity=options.verbose) result = testRunner.run(s) if __name__ == '__main__': # Add the build directory to the path so we're testing the latest build, not the installed version. add_to_path() import pyodbc main()
mit
demon-ru/iml-crm
addons/hr_timesheet_sheet/__init__.py
434
1127
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import hr_timesheet_sheet import wizard import report import res_config # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
llaera/namebench
nb_third_party/dns/node.py
215
5914
# Copyright (C) 2001-2007, 2009, 2010 Nominum, Inc. # # Permission to use, copy, modify, and distribute this software and its # documentation for any purpose with or without fee is hereby granted, # provided that the above copyright notice and this permission notice # appear in all copies. # # THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR # ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES # WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN # ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT # OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. """DNS nodes. A node is a set of rdatasets.""" import StringIO import dns.rdataset import dns.rdatatype import dns.renderer class Node(object): """A DNS node. A node is a set of rdatasets @ivar rdatasets: the node's rdatasets @type rdatasets: list of dns.rdataset.Rdataset objects""" __slots__ = ['rdatasets'] def __init__(self): """Initialize a DNS node. """ self.rdatasets = []; def to_text(self, name, **kw): """Convert a node to text format. Each rdataset at the node is printed. Any keyword arguments to this method are passed on to the rdataset's to_text() method. @param name: the owner name of the rdatasets @type name: dns.name.Name object @rtype: string """ s = StringIO.StringIO() for rds in self.rdatasets: print >> s, rds.to_text(name, **kw) return s.getvalue()[:-1] def __repr__(self): return '<DNS node ' + str(id(self)) + '>' def __eq__(self, other): """Two nodes are equal if they have the same rdatasets. @rtype: bool """ # # This is inefficient. Good thing we don't need to do it much. # for rd in self.rdatasets: if rd not in other.rdatasets: return False for rd in other.rdatasets: if rd not in self.rdatasets: return False return True def __ne__(self, other): return not self.__eq__(other) def __len__(self): return len(self.rdatasets) def __iter__(self): return iter(self.rdatasets) def find_rdataset(self, rdclass, rdtype, covers=dns.rdatatype.NONE, create=False): """Find an rdataset matching the specified properties in the current node. @param rdclass: The class of the rdataset @type rdclass: int @param rdtype: The type of the rdataset @type rdtype: int @param covers: The covered type. Usually this value is dns.rdatatype.NONE, but if the rdtype is dns.rdatatype.SIG or dns.rdatatype.RRSIG, then the covers value will be the rdata type the SIG/RRSIG covers. The library treats the SIG and RRSIG types as if they were a family of types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). This makes RRSIGs much easier to work with than if RRSIGs covering different rdata types were aggregated into a single RRSIG rdataset. @type covers: int @param create: If True, create the rdataset if it is not found. @type create: bool @raises KeyError: An rdataset of the desired type and class does not exist and I{create} is not True. @rtype: dns.rdataset.Rdataset object """ for rds in self.rdatasets: if rds.match(rdclass, rdtype, covers): return rds if not create: raise KeyError rds = dns.rdataset.Rdataset(rdclass, rdtype) self.rdatasets.append(rds) return rds def get_rdataset(self, rdclass, rdtype, covers=dns.rdatatype.NONE, create=False): """Get an rdataset matching the specified properties in the current node. None is returned if an rdataset of the specified type and class does not exist and I{create} is not True. @param rdclass: The class of the rdataset @type rdclass: int @param rdtype: The type of the rdataset @type rdtype: int @param covers: The covered type. @type covers: int @param create: If True, create the rdataset if it is not found. @type create: bool @rtype: dns.rdataset.Rdataset object or None """ try: rds = self.find_rdataset(rdclass, rdtype, covers, create) except KeyError: rds = None return rds def delete_rdataset(self, rdclass, rdtype, covers=dns.rdatatype.NONE): """Delete the rdataset matching the specified properties in the current node. If a matching rdataset does not exist, it is not an error. @param rdclass: The class of the rdataset @type rdclass: int @param rdtype: The type of the rdataset @type rdtype: int @param covers: The covered type. @type covers: int """ rds = self.get_rdataset(rdclass, rdtype, covers) if not rds is None: self.rdatasets.remove(rds) def replace_rdataset(self, replacement): """Replace an rdataset. It is not an error if there is no rdataset matching I{replacement}. Ownership of the I{replacement} object is transferred to the node; in other words, this method does not store a copy of I{replacement} at the node, it stores I{replacement} itself. """ self.delete_rdataset(replacement.rdclass, replacement.rdtype, replacement.covers) self.rdatasets.append(replacement)
apache-2.0
szibis/ansible-modules-core
database/postgresql/postgresql_user.py
23
25160
#!/usr/bin/python # -*- coding: utf-8 -*- # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. DOCUMENTATION = ''' --- module: postgresql_user short_description: Adds or removes a users (roles) from a PostgreSQL database. description: - Add or remove PostgreSQL users (roles) from a remote host and, optionally, grant the users access to an existing database or tables. - The fundamental function of the module is to create, or delete, roles from a PostgreSQL cluster. Privilege assignment, or removal, is an optional step, which works on one database at a time. This allows for the module to be called several times in the same module to modify the permissions on different databases, or to grant permissions to already existing users. - A user cannot be removed until all the privileges have been stripped from the user. In such situation, if the module tries to remove the user it will fail. To avoid this from happening the fail_on_user option signals the module to try to remove the user, but if not possible keep going; the module will report if changes happened and separately if the user was removed or not. version_added: "0.6" options: name: description: - name of the user (role) to add or remove required: true default: null password: description: - set the user's password, before 1.4 this was required. - "When passing an encrypted password, the encrypted parameter must also be true, and it must be generated with the format C('str[\\"md5\\"] + md5[ password + username ]'), resulting in a total of 35 characters. An easy way to do this is: C(echo \\"md5`echo -n \\"verysecretpasswordJOE\\" | md5`\\"). Note that if encrypted is set, the stored password will be hashed whether or not it is pre-encrypted." required: false default: null db: description: - name of database where permissions will be granted required: false default: null fail_on_user: description: - if C(yes), fail when user can't be removed. Otherwise just log and continue required: false default: 'yes' choices: [ "yes", "no" ] port: description: - Database port to connect to. required: false default: 5432 login_user: description: - User (role) used to authenticate with PostgreSQL required: false default: postgres login_password: description: - Password used to authenticate with PostgreSQL required: false default: null login_host: description: - Host running PostgreSQL. required: false default: localhost login_unix_socket: description: - Path to a Unix domain socket for local connections required: false default: null priv: description: - "PostgreSQL privileges string in the format: C(table:priv1,priv2)" required: false default: null role_attr_flags: description: - "PostgreSQL role attributes string in the format: CREATEDB,CREATEROLE,SUPERUSER" required: false default: null choices: [ "[NO]SUPERUSER","[NO]CREATEROLE", "[NO]CREATEUSER", "[NO]CREATEDB", "[NO]INHERIT", "[NO]LOGIN", "[NO]REPLICATION" ] state: description: - The user (role) state required: false default: present choices: [ "present", "absent" ] encrypted: description: - whether the password is stored hashed in the database. boolean. Passwords can be passed already hashed or unhashed, and postgresql ensures the stored password is hashed when encrypted is set. required: false default: false version_added: '1.4' expires: description: - sets the user's password expiration. required: false default: null version_added: '1.4' no_password_changes: description: - if C(yes), don't inspect database for password changes. Effective when C(pg_authid) is not accessible (such as AWS RDS). Otherwise, make password changes as necessary. required: false default: 'no' choices: [ "yes", "no" ] version_added: '2.0' notes: - The default authentication assumes that you are either logging in as or sudo'ing to the postgres account on the host. - This module uses psycopg2, a Python PostgreSQL database adapter. You must ensure that psycopg2 is installed on the host before using this module. If the remote host is the PostgreSQL server (which is the default case), then PostgreSQL must also be installed on the remote host. For Ubuntu-based systems, install the postgresql, libpq-dev, and python-psycopg2 packages on the remote host before using this module. - If the passlib library is installed, then passwords that are encrypted in the DB but not encrypted when passed as arguments can be checked for changes. If the passlib library is not installed, unencrypted passwords stored in the DB encrypted will be assumed to have changed. - If you specify PUBLIC as the user, then the privilege changes will apply to all users. You may not specify password or role_attr_flags when the PUBLIC user is specified. requirements: [ psycopg2 ] author: "Lorin Hochstein (@lorin)" ''' EXAMPLES = ''' # Create django user and grant access to database and products table - postgresql_user: db=acme name=django password=ceec4eif7ya priv=CONNECT/products:ALL # Create rails user, grant privilege to create other databases and demote rails from super user status - postgresql_user: name=rails password=secret role_attr_flags=CREATEDB,NOSUPERUSER # Remove test user privileges from acme - postgresql_user: db=acme name=test priv=ALL/products:ALL state=absent fail_on_user=no # Remove test user from test database and the cluster - postgresql_user: db=test name=test priv=ALL state=absent # Example privileges string format INSERT,UPDATE/table:SELECT/anothertable:ALL # Remove an existing user's password - postgresql_user: db=test user=test password=NULL ''' import re import itertools try: import psycopg2 import psycopg2.extras except ImportError: postgresqldb_found = False else: postgresqldb_found = True _flags = ('SUPERUSER', 'CREATEROLE', 'CREATEUSER', 'CREATEDB', 'INHERIT', 'LOGIN', 'REPLICATION') VALID_FLAGS = frozenset(itertools.chain(_flags, ('NO%s' % f for f in _flags))) VALID_PRIVS = dict(table=frozenset(('SELECT', 'INSERT', 'UPDATE', 'DELETE', 'TRUNCATE', 'REFERENCES', 'TRIGGER', 'ALL')), database=frozenset(('CREATE', 'CONNECT', 'TEMPORARY', 'TEMP', 'ALL')), ) # map to cope with idiosyncracies of SUPERUSER and LOGIN PRIV_TO_AUTHID_COLUMN = dict(SUPERUSER='rolsuper', CREATEROLE='rolcreaterole', CREATEUSER='rolcreateuser', CREATEDB='rolcreatedb', INHERIT='rolinherit', LOGIN='rolcanlogin', REPLICATION='rolreplication') class InvalidFlagsError(Exception): pass class InvalidPrivsError(Exception): pass # =========================================== # PostgreSQL module specific support methods. # def user_exists(cursor, user): # The PUBLIC user is a special case that is always there if user == 'PUBLIC': return True query = "SELECT rolname FROM pg_roles WHERE rolname=%(user)s" cursor.execute(query, {'user': user}) return cursor.rowcount > 0 def user_add(cursor, user, password, role_attr_flags, encrypted, expires): """Create a new database user (role).""" # Note: role_attr_flags escaped by parse_role_attrs and encrypted is a literal query_password_data = dict(password=password, expires=expires) query = ['CREATE USER %(user)s' % { "user": pg_quote_identifier(user, 'role')}] if password is not None: query.append("WITH %(crypt)s" % { "crypt": encrypted }) query.append("PASSWORD %(password)s") if expires is not None: query.append("VALID UNTIL %(expires)s") query.append(role_attr_flags) query = ' '.join(query) cursor.execute(query, query_password_data) return True def user_alter(cursor, module, user, password, role_attr_flags, encrypted, expires, no_password_changes): """Change user password and/or attributes. Return True if changed, False otherwise.""" changed = False # Note: role_attr_flags escaped by parse_role_attrs and encrypted is a literal if user == 'PUBLIC': if password is not None: module.fail_json(msg="cannot change the password for PUBLIC user") elif role_attr_flags != '': module.fail_json(msg="cannot change the role_attr_flags for PUBLIC user") else: return False # Handle passwords. if not no_password_changes and (password is not None or role_attr_flags is not None): # Select password and all flag-like columns in order to verify changes. query_password_data = dict(password=password, expires=expires) select = "SELECT * FROM pg_authid where rolname=%(user)s" cursor.execute(select, {"user": user}) # Grab current role attributes. current_role_attrs = cursor.fetchone() # Do we actually need to do anything? pwchanging = False if password is not None: if encrypted: if password.startswith('md5'): if password != current_role_attrs['rolpassword']: pwchanging = True else: try: from passlib.hash import postgres_md5 as pm if pm.encrypt(password, user) != current_role_attrs['rolpassword']: pwchanging = True except ImportError: # Cannot check if passlib is not installed, so assume password is different pwchanging = True else: if password != current_role_attrs['rolpassword']: pwchanging = True role_attr_flags_changing = False if role_attr_flags: role_attr_flags_dict = {} for r in role_attr_flags.split(' '): if r.startswith('NO'): role_attr_flags_dict[r.replace('NO', '', 1)] = False else: role_attr_flags_dict[r] = True for role_attr_name, role_attr_value in role_attr_flags_dict.items(): if current_role_attrs[PRIV_TO_AUTHID_COLUMN[role_attr_name]] != role_attr_value: role_attr_flags_changing = True expires_changing = (expires is not None and expires == current_roles_attrs['rol_valid_until']) if not pwchanging and not role_attr_flags_changing and not expires_changing: return False alter = ['ALTER USER %(user)s' % {"user": pg_quote_identifier(user, 'role')}] if pwchanging: alter.append("WITH %(crypt)s" % {"crypt": encrypted}) alter.append("PASSWORD %(password)s") alter.append(role_attr_flags) elif role_attr_flags: alter.append('WITH %s' % role_attr_flags) if expires is not None: alter.append("VALID UNTIL %(expires)s") try: cursor.execute(' '.join(alter), query_password_data) except psycopg2.InternalError, e: if e.pgcode == '25006': # Handle errors due to read-only transactions indicated by pgcode 25006 # ERROR: cannot execute ALTER ROLE in a read-only transaction changed = False module.fail_json(msg=e.pgerror) return changed else: raise psycopg2.InternalError, e # Grab new role attributes. cursor.execute(select, {"user": user}) new_role_attrs = cursor.fetchone() # Detect any differences between current_ and new_role_attrs. for i in range(len(current_role_attrs)): if current_role_attrs[i] != new_role_attrs[i]: changed = True return changed def user_delete(cursor, user): """Try to remove a user. Returns True if successful otherwise False""" cursor.execute("SAVEPOINT ansible_pgsql_user_delete") try: cursor.execute("DROP USER %s" % pg_quote_identifier(user, 'role')) except: cursor.execute("ROLLBACK TO SAVEPOINT ansible_pgsql_user_delete") cursor.execute("RELEASE SAVEPOINT ansible_pgsql_user_delete") return False cursor.execute("RELEASE SAVEPOINT ansible_pgsql_user_delete") return True def has_table_privileges(cursor, user, table, privs): """ Return the difference between the privileges that a user already has and the privileges that they desire to have. :returns: tuple of: * privileges that they have and were requested * privileges they currently hold but were not requested * privileges requested that they do not hold """ cur_privs = get_table_privileges(cursor, user, table) have_currently = cur_privs.intersection(privs) other_current = cur_privs.difference(privs) desired = privs.difference(cur_privs) return (have_currently, other_current, desired) def get_table_privileges(cursor, user, table): if '.' in table: schema, table = table.split('.', 1) else: schema = 'public' query = '''SELECT privilege_type FROM information_schema.role_table_grants WHERE grantee=%s AND table_name=%s AND table_schema=%s''' cursor.execute(query, (user, table, schema)) return frozenset([x[0] for x in cursor.fetchall()]) def grant_table_privileges(cursor, user, table, privs): # Note: priv escaped by parse_privs privs = ', '.join(privs) query = 'GRANT %s ON TABLE %s TO %s' % ( privs, pg_quote_identifier(table, 'table'), pg_quote_identifier(user, 'role') ) cursor.execute(query) def revoke_table_privileges(cursor, user, table, privs): # Note: priv escaped by parse_privs privs = ', '.join(privs) query = 'REVOKE %s ON TABLE %s FROM %s' % ( privs, pg_quote_identifier(table, 'table'), pg_quote_identifier(user, 'role') ) cursor.execute(query) def get_database_privileges(cursor, user, db): priv_map = { 'C':'CREATE', 'T':'TEMPORARY', 'c':'CONNECT', } query = 'SELECT datacl FROM pg_database WHERE datname = %s' cursor.execute(query, (db,)) datacl = cursor.fetchone()[0] if datacl is None: return set() r = re.search('%s=(C?T?c?)/[a-z]+\,?' % user, datacl) if r is None: return set() o = set() for v in r.group(1): o.add(priv_map[v]) return normalize_privileges(o, 'database') def has_database_privileges(cursor, user, db, privs): """ Return the difference between the privileges that a user already has and the privileges that they desire to have. :returns: tuple of: * privileges that they have and were requested * privileges they currently hold but were not requested * privileges requested that they do not hold """ cur_privs = get_database_privileges(cursor, user, db) have_currently = cur_privs.intersection(privs) other_current = cur_privs.difference(privs) desired = privs.difference(cur_privs) return (have_currently, other_current, desired) def grant_database_privileges(cursor, user, db, privs): # Note: priv escaped by parse_privs privs =', '.join(privs) if user == "PUBLIC": query = 'GRANT %s ON DATABASE %s TO PUBLIC' % ( privs, pg_quote_identifier(db, 'database')) else: query = 'GRANT %s ON DATABASE %s TO %s' % ( privs, pg_quote_identifier(db, 'database'), pg_quote_identifier(user, 'role')) cursor.execute(query) def revoke_database_privileges(cursor, user, db, privs): # Note: priv escaped by parse_privs privs = ', '.join(privs) if user == "PUBLIC": query = 'REVOKE %s ON DATABASE %s FROM PUBLIC' % ( privs, pg_quote_identifier(db, 'database')) else: query = 'REVOKE %s ON DATABASE %s FROM %s' % ( privs, pg_quote_identifier(db, 'database'), pg_quote_identifier(user, 'role')) cursor.execute(query) def revoke_privileges(cursor, user, privs): if privs is None: return False revoke_funcs = dict(table=revoke_table_privileges, database=revoke_database_privileges) check_funcs = dict(table=has_table_privileges, database=has_database_privileges) changed = False for type_ in privs: for name, privileges in privs[type_].iteritems(): # Check that any of the privileges requested to be removed are # currently granted to the user differences = check_funcs[type_](cursor, user, name, privileges) if differences[0]: revoke_funcs[type_](cursor, user, name, privileges) changed = True return changed def grant_privileges(cursor, user, privs): if privs is None: return False grant_funcs = dict(table=grant_table_privileges, database=grant_database_privileges) check_funcs = dict(table=has_table_privileges, database=has_database_privileges) grant_funcs = dict(table=grant_table_privileges, database=grant_database_privileges) check_funcs = dict(table=has_table_privileges, database=has_database_privileges) changed = False for type_ in privs: for name, privileges in privs[type_].iteritems(): # Check that any of the privileges requested for the user are # currently missing differences = check_funcs[type_](cursor, user, name, privileges) if differences[2]: grant_funcs[type_](cursor, user, name, privileges) changed = True return changed def parse_role_attrs(role_attr_flags): """ Parse role attributes string for user creation. Format: attributes[,attributes,...] Where: attributes := CREATEDB,CREATEROLE,NOSUPERUSER,... [ "[NO]SUPERUSER","[NO]CREATEROLE", "[NO]CREATEUSER", "[NO]CREATEDB", "[NO]INHERIT", "[NO]LOGIN", "[NO]REPLICATION" ] """ if ',' in role_attr_flags: flag_set = frozenset(r.upper() for r in role_attr_flags.split(",")) elif role_attr_flags: flag_set = frozenset((role_attr_flags.upper(),)) else: flag_set = frozenset() if not flag_set.issubset(VALID_FLAGS): raise InvalidFlagsError('Invalid role_attr_flags specified: %s' % ' '.join(flag_set.difference(VALID_FLAGS))) o_flags = ' '.join(flag_set) return o_flags def normalize_privileges(privs, type_): new_privs = set(privs) if 'ALL' in privs: new_privs.update(VALID_PRIVS[type_]) new_privs.remove('ALL') if 'TEMP' in privs: new_privs.add('TEMPORARY') new_privs.remove('TEMP') return new_privs def parse_privs(privs, db): """ Parse privilege string to determine permissions for database db. Format: privileges[/privileges/...] Where: privileges := DATABASE_PRIVILEGES[,DATABASE_PRIVILEGES,...] | TABLE_NAME:TABLE_PRIVILEGES[,TABLE_PRIVILEGES,...] """ if privs is None: return privs o_privs = { 'database':{}, 'table':{} } for token in privs.split('/'): if ':' not in token: type_ = 'database' name = db priv_set = frozenset(x.strip().upper() for x in token.split(',') if x.strip()) else: type_ = 'table' name, privileges = token.split(':', 1) priv_set = frozenset(x.strip().upper() for x in privileges.split(',') if x.strip()) if not priv_set.issubset(VALID_PRIVS[type_]): raise InvalidPrivsError('Invalid privs specified for %s: %s' % (type_, ' '.join(priv_set.difference(VALID_PRIVS[type_])))) priv_set = normalize_privileges(priv_set, type_) o_privs[type_][name] = priv_set return o_privs # =========================================== # Module execution. # def main(): module = AnsibleModule( argument_spec=dict( login_user=dict(default="postgres"), login_password=dict(default=""), login_host=dict(default=""), login_unix_socket=dict(default=""), user=dict(required=True, aliases=['name']), password=dict(default=None), state=dict(default="present", choices=["absent", "present"]), priv=dict(default=None), db=dict(default=''), port=dict(default='5432'), fail_on_user=dict(type='bool', default='yes'), role_attr_flags=dict(default=''), encrypted=dict(type='bool', default='no'), no_password_changes=dict(type='bool', default='no'), expires=dict(default=None) ), supports_check_mode = True ) user = module.params["user"] password = module.params["password"] state = module.params["state"] fail_on_user = module.params["fail_on_user"] db = module.params["db"] if db == '' and module.params["priv"] is not None: module.fail_json(msg="privileges require a database to be specified") privs = parse_privs(module.params["priv"], db) port = module.params["port"] no_password_changes = module.params["no_password_changes"] try: role_attr_flags = parse_role_attrs(module.params["role_attr_flags"]) except InvalidFlagsError, e: module.fail_json(msg=str(e)) if module.params["encrypted"]: encrypted = "ENCRYPTED" else: encrypted = "UNENCRYPTED" expires = module.params["expires"] if not postgresqldb_found: module.fail_json(msg="the python psycopg2 module is required") # To use defaults values, keyword arguments must be absent, so # check which values are empty and don't include in the **kw # dictionary params_map = { "login_host":"host", "login_user":"user", "login_password":"password", "port":"port", "db":"database" } kw = dict( (params_map[k], v) for (k, v) in module.params.iteritems() if k in params_map and v != "" ) # If a login_unix_socket is specified, incorporate it here. is_localhost = "host" not in kw or kw["host"] == "" or kw["host"] == "localhost" if is_localhost and module.params["login_unix_socket"] != "": kw["host"] = module.params["login_unix_socket"] try: db_connection = psycopg2.connect(**kw) cursor = db_connection.cursor(cursor_factory=psycopg2.extras.DictCursor) except Exception, e: module.fail_json(msg="unable to connect to database: %s" % e) kw = dict(user=user) changed = False user_removed = False if state == "present": if user_exists(cursor, user): try: changed = user_alter(cursor, module, user, password, role_attr_flags, encrypted, expires, no_password_changes) except SQLParseError, e: module.fail_json(msg=str(e)) else: try: changed = user_add(cursor, user, password, role_attr_flags, encrypted, expires) except SQLParseError, e: module.fail_json(msg=str(e)) try: changed = grant_privileges(cursor, user, privs) or changed except SQLParseError, e: module.fail_json(msg=str(e)) else: if user_exists(cursor, user): if module.check_mode: changed = True kw['user_removed'] = True else: try: changed = revoke_privileges(cursor, user, privs) user_removed = user_delete(cursor, user) except SQLParseError, e: module.fail_json(msg=str(e)) changed = changed or user_removed if fail_on_user and not user_removed: msg = "unable to remove user" module.fail_json(msg=msg) kw['user_removed'] = user_removed if changed: if module.check_mode: db_connection.rollback() else: db_connection.commit() kw['changed'] = changed module.exit_json(**kw) # import module snippets from ansible.module_utils.basic import * from ansible.module_utils.database import * main()
gpl-3.0
guardicore/monkey
monkey/monkey_island/cc/services/bootloader.py
1
3054
from typing import Dict, List from bson import ObjectId from monkey_island.cc.database import mongo from monkey_island.cc.services.node import NodeCreationException, NodeService from monkey_island.cc.services.utils.bootloader_config import ( MIN_GLIBC_VERSION, SUPPORTED_WINDOWS_VERSIONS, ) from monkey_island.cc.services.utils.node_states import NodeStates class BootloaderService: @staticmethod def parse_bootloader_telem(telem: Dict) -> bool: telem["ips"] = BootloaderService.remove_local_ips(telem["ips"]) if telem["os_version"] == "": telem["os_version"] = "Unknown OS" telem_id = BootloaderService.get_mongo_id_for_bootloader_telem(telem) mongo.db.bootloader_telems.update({"_id": telem_id}, {"$setOnInsert": telem}, upsert=True) will_monkey_run = BootloaderService.is_os_compatible(telem) try: node = NodeService.get_or_create_node_from_bootloader_telem(telem, will_monkey_run) except NodeCreationException: # Didn't find the node, but allow monkey to run anyways return True node_group = BootloaderService.get_next_node_state(node, telem["system"], will_monkey_run) if "group" not in node or node["group"] != node_group.value: NodeService.set_node_group(node["_id"], node_group) return will_monkey_run @staticmethod def get_next_node_state(node: Dict, system: str, will_monkey_run: bool) -> NodeStates: group_keywords = [system, "monkey"] if "group" in node and node["group"] == "island": group_keywords.extend(["island", "starting"]) else: group_keywords.append("starting") if will_monkey_run else group_keywords.append("old") node_group = NodeStates.get_by_keywords(group_keywords) return node_group @staticmethod def get_mongo_id_for_bootloader_telem(bootloader_telem) -> ObjectId: ip_hash = hex(hash(str(bootloader_telem["ips"])))[3:15] hostname_hash = hex(hash(bootloader_telem["hostname"]))[3:15] return ObjectId(ip_hash + hostname_hash) @staticmethod def is_os_compatible(bootloader_data) -> bool: if bootloader_data["system"] == "windows": return BootloaderService.is_windows_version_supported(bootloader_data["os_version"]) elif bootloader_data["system"] == "linux": return BootloaderService.is_glibc_supported(bootloader_data["glibc_version"]) @staticmethod def is_windows_version_supported(windows_version) -> bool: return SUPPORTED_WINDOWS_VERSIONS.get(windows_version, True) @staticmethod def is_glibc_supported(glibc_version_string) -> bool: glibc_version_string = glibc_version_string.lower() glibc_version = glibc_version_string.split(" ")[-1] return glibc_version >= str(MIN_GLIBC_VERSION) and "eglibc" not in glibc_version_string @staticmethod def remove_local_ips(ip_list) -> List[str]: return [i for i in ip_list if not i.startswith("127")]
gpl-3.0
MrTheodor/espressopp
src/interaction/DihedralHarmonicCos.py
7
3547
# Copyright (C) 2012,2013 # Max Planck Institute for Polymer Research # Copyright (C) 2008,2009,2010,2011 # Max-Planck-Institute for Polymer Research & Fraunhofer SCAI # # This file is part of ESPResSo++. # # ESPResSo++ is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # ESPResSo++ is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. r""" ****************************************** espressopp.interaction.DihedralHarmonicCos ****************************************** .. math:: U = K (cos(\phi) - cos(\phi_0))^2 .. function:: espressopp.interaction.DihedralHarmonicCos(K, phi0) :param K: (default: 0.0) :param phi0: (default: 0.0) :type K: real :type phi0: real .. function:: espressopp.interaction.FixedQuadrupleListDihedralHarmonicCos(system, fql, potential) :param system: :param fql: :param potential: :type system: :type fql: :type potential: .. function:: espressopp.interaction.FixedQuadrupleListDihedralHarmonicCos.getFixedQuadrupleList() :rtype: A Python list of lists. .. function:: espressopp.interaction.FixedQuadrupleListDihedralHarmonicCos.setPotential(potential) :param potential: :type potential: """ from espressopp import pmi from espressopp.esutil import * from espressopp.interaction.DihedralPotential import * from espressopp.interaction.Interaction import * from _espressopp import interaction_DihedralHarmonicCos, interaction_FixedQuadrupleListDihedralHarmonicCos class DihedralHarmonicCosLocal(DihedralPotentialLocal, interaction_DihedralHarmonicCos): def __init__(self, K=0.0, phi0=0.0): if not (pmi._PMIComm and pmi._PMIComm.isActive()) or pmi._MPIcomm.rank in pmi._PMIComm.getMPIcpugroup(): cxxinit(self, interaction_DihedralHarmonicCos, K, phi0) class FixedQuadrupleListDihedralHarmonicCosLocal(InteractionLocal, interaction_FixedQuadrupleListDihedralHarmonicCos): def __init__(self, system, fql, potential): if not (pmi._PMIComm and pmi._PMIComm.isActive()) or pmi._MPIcomm.rank in pmi._PMIComm.getMPIcpugroup(): cxxinit(self, interaction_FixedQuadrupleListDihedralHarmonicCos, system, fql, potential) def setPotential(self, potential): if not (pmi._PMIComm and pmi._PMIComm.isActive()) or pmi._MPIcomm.rank in pmi._PMIComm.getMPIcpugroup(): self.cxxclass.setPotential(self, potential) def getFixedQuadrupleList(self): if not (pmi._PMIComm and pmi._PMIComm.isActive()) or pmi._MPIcomm.rank in pmi._PMIComm.getMPIcpugroup(): return self.cxxclass.getFixedQuadrupleList(self) if pmi.isController: class DihedralHarmonicCos(DihedralPotential): 'The DihedralHarmonicCos potential.' pmiproxydefs = dict( cls = 'espressopp.interaction.DihedralHarmonicCosLocal', pmiproperty = ['K', 'phi'] ) class FixedQuadrupleListDihedralHarmonicCos(Interaction): __metaclass__ = pmi.Proxy pmiproxydefs = dict( cls = 'espressopp.interaction.FixedQuadrupleListDihedralHarmonicCosLocal', pmicall = ['setPotential', 'getFixedQuadrupleList'] )
gpl-3.0
topicusonderwijs/zxing-ios
cpp/scons/scons-local-2.0.0.final.0/SCons/Tool/wix.py
34
3561
"""SCons.Tool.wix Tool-specific initialization for wix, the Windows Installer XML Tool. There normally shouldn't be any need to import this module directly. It will usually be imported through the generic SCons.Tool.Tool() selection method. """ # # Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # __revision__ = "src/engine/SCons/Tool/wix.py 5023 2010/06/14 22:05:46 scons" import SCons.Builder import SCons.Action import os def generate(env): """Add Builders and construction variables for WiX to an Environment.""" if not exists(env): return env['WIXCANDLEFLAGS'] = ['-nologo'] env['WIXCANDLEINCLUDE'] = [] env['WIXCANDLECOM'] = '$WIXCANDLE $WIXCANDLEFLAGS -I $WIXCANDLEINCLUDE -o ${TARGET} ${SOURCE}' env['WIXLIGHTFLAGS'].append( '-nologo' ) env['WIXLIGHTCOM'] = "$WIXLIGHT $WIXLIGHTFLAGS -out ${TARGET} ${SOURCES}" object_builder = SCons.Builder.Builder( action = '$WIXCANDLECOM', suffix = '.wxiobj', src_suffix = '.wxs') linker_builder = SCons.Builder.Builder( action = '$WIXLIGHTCOM', src_suffix = '.wxiobj', src_builder = object_builder) env['BUILDERS']['WiX'] = linker_builder def exists(env): env['WIXCANDLE'] = 'candle.exe' env['WIXLIGHT'] = 'light.exe' # try to find the candle.exe and light.exe tools and # add the install directory to light libpath. #for path in os.environ['PATH'].split(os.pathsep): for path in os.environ['PATH'].split(os.pathsep): if not path: continue # workaround for some weird python win32 bug. if path[0] == '"' and path[-1:]=='"': path = path[1:-1] # normalize the path path = os.path.normpath(path) # search for the tools in the PATH environment variable try: if env['WIXCANDLE'] in os.listdir(path) and\ env['WIXLIGHT'] in os.listdir(path): env.PrependENVPath('PATH', path) env['WIXLIGHTFLAGS'] = [ os.path.join( path, 'wixui.wixlib' ), '-loc', os.path.join( path, 'WixUI_en-us.wxl' ) ] return 1 except OSError: pass # ignore this, could be a stale PATH entry. return None # Local Variables: # tab-width:4 # indent-tabs-mode:nil # End: # vim: set expandtab tabstop=4 shiftwidth=4:
apache-2.0
atlarge-research/opendc-web-server
opendc/api/v1/experiments/experimentId/rack-states/endpoint.py
1
1515
from opendc.models.experiment import Experiment from opendc.models.rack_state import RackState from opendc.util import exceptions from opendc.util.rest import Response def GET(request): """Get this Experiment's Tack States.""" # Make sure required parameters are there try: request.check_required_parameters( path={ 'experimentId': 'int' } ) except exceptions.ParameterError as e: return Response(400, e.message) # Instantiate an Experiment from the database experiment = Experiment.from_primary_key((request.params_path['experimentId'],)) # Make sure this Experiment exists if not experiment.exists(): return Response(404, '{} not found.'.format(experiment)) # Make sure this user is authorized to view this Experiment's Rack States if not experiment.google_id_has_at_least(request.google_id, 'VIEW'): return Response(403, 'Forbidden from viewing Rack States for {}.'.format(experiment)) # Get and return the Rack States if 'tick' in request.params_query: rack_states = RackState.from_experiment_id_and_tick( request.params_path['experimentId'], request.params_query['tick'] ) else: rack_states = RackState.from_experiment_id(request.params_path['experimentId']) return Response( 200, 'Successfully retrieved Rack States for {}.'.format(experiment), [x.to_JSON() for x in rack_states] )
mit
vganapath/rally
rally/plugins/common/runners/serial.py
5
3029
# Copyright (C) 2014 Yahoo! Inc. All Rights Reserved. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally import consts from rally.task import runner @runner.configure(name="serial") class SerialScenarioRunner(runner.ScenarioRunner): """Scenario runner that executes benchmark scenarios serially. Unlike scenario runners that execute in parallel, the serial scenario runner executes scenarios one-by-one in the same python interpreter process as Rally. This allows you to benchmark your scenario without introducing any concurrent operations as well as interactively debug the scenario from the same command that you use to start Rally. """ # NOTE(mmorais): additionalProperties is set True to allow switching # between parallel and serial runners by modifying only *type* property CONFIG_SCHEMA = { "type": "object", "$schema": consts.JSON_SCHEMA, "properties": { "type": { "type": "string" }, "times": { "type": "integer", "minimum": 1 } }, "additionalProperties": True } def _run_scenario(self, cls, method_name, context, args): """Runs the specified benchmark scenario with given arguments. The scenario iterations are executed one-by-one in the same python interpreter process as Rally. This allows you to benchmark your scenario without introducing any concurrent operations as well as interactively debug the scenario from the same command that you use to start Rally. :param cls: The Scenario class where the scenario is implemented :param method_name: Name of the method that implements the scenario :param context: Benchmark context that contains users, admin & other information, that was created before benchmark started. :param args: Arguments to call the scenario method with :returns: List of results fore each single scenario iteration, where each result is a dictionary """ times = self.config.get("times", 1) for i in range(times): if self.aborted.is_set(): break result = runner._run_scenario_once( cls, method_name, runner._get_scenario_context(i, context), args) self._send_result(result) self._flush_results()
apache-2.0
argriffing/cvxpy
cvxpy/atoms/affine/sum_entries.py
7
1828
""" Copyright 2013 Steven Diamond This file is part of CVXPY. CVXPY is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. CVXPY is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with CVXPY. If not, see <http://www.gnu.org/licenses/>. """ from cvxpy.atoms.affine.affine_atom import AffAtom import cvxpy.utilities as u import cvxpy.lin_ops.lin_utils as lu import numpy as np class sum_entries(AffAtom): """ Summing the entries of an expression. Attributes ---------- expr : CVXPY Expression The expression to sum the entries of. """ def __init__(self, expr): super(sum_entries, self).__init__(expr) @AffAtom.numpy_numeric def numeric(self, values): """Sums the entries of value. """ return np.sum(values[0]) def shape_from_args(self): """Always scalar. """ return u.Shape(1, 1) @staticmethod def graph_implementation(arg_objs, size, data=None): """Sum the linear expression's entries. Parameters ---------- arg_objs : list LinExpr for each argument. size : tuple The size of the resulting expression. data : Additional data required by the atom. Returns ------- tuple (LinOp for objective, list of constraints) """ return (lu.sum_entries(arg_objs[0]), [])
gpl-3.0
deisi/home-assistant
tests/components/device_tracker/test_unifi.py
4
5171
"""The tests for the Unifi WAP device tracker platform.""" import unittest from unittest import mock import urllib from homeassistant.components.device_tracker import unifi as unifi from homeassistant.const import CONF_HOST, CONF_USERNAME, CONF_PASSWORD from unifi import controller class TestUnifiScanner(unittest.TestCase): """Test the Unifiy platform.""" @mock.patch('homeassistant.components.device_tracker.unifi.UnifiScanner') @mock.patch.object(controller, 'Controller') def test_config_minimal(self, mock_ctrl, mock_scanner): """Test the setup with minimal configuration.""" config = { 'device_tracker': { CONF_USERNAME: 'foo', CONF_PASSWORD: 'password', } } result = unifi.get_scanner(None, config) self.assertEqual(unifi.UnifiScanner.return_value, result) mock_ctrl.assert_called_once_with('localhost', 'foo', 'password', 8443, 'v4', 'default') mock_scanner.assert_called_once_with(mock_ctrl.return_value) @mock.patch('homeassistant.components.device_tracker.unifi.UnifiScanner') @mock.patch.object(controller, 'Controller') def test_config_full(self, mock_ctrl, mock_scanner): """Test the setup with full configuration.""" config = { 'device_tracker': { CONF_USERNAME: 'foo', CONF_PASSWORD: 'password', CONF_HOST: 'myhost', 'port': 123, 'site_id': 'abcdef01', } } result = unifi.get_scanner(None, config) self.assertEqual(unifi.UnifiScanner.return_value, result) mock_ctrl.assert_called_once_with('myhost', 'foo', 'password', 123, 'v4', 'abcdef01') mock_scanner.assert_called_once_with(mock_ctrl.return_value) @mock.patch('homeassistant.components.device_tracker.unifi.UnifiScanner') @mock.patch.object(controller, 'Controller') def test_config_error(self, mock_ctrl, mock_scanner): """Test for configuration errors.""" config = { 'device_tracker': { CONF_HOST: 'myhost', 'port': 123, } } result = unifi.get_scanner(None, config) self.assertFalse(result) self.assertFalse(mock_ctrl.called) @mock.patch('homeassistant.components.device_tracker.unifi.UnifiScanner') @mock.patch.object(controller, 'Controller') def test_config_badport(self, mock_ctrl, mock_scanner): """Test the setup with a bad port.""" config = { 'device_tracker': { CONF_USERNAME: 'foo', CONF_PASSWORD: 'password', CONF_HOST: 'myhost', 'port': 'foo', } } result = unifi.get_scanner(None, config) self.assertFalse(result) self.assertFalse(mock_ctrl.called) @mock.patch('homeassistant.components.device_tracker.unifi.UnifiScanner') @mock.patch.object(controller, 'Controller') def test_config_controller_failed(self, mock_ctrl, mock_scanner): """Test for controller failure.""" config = { 'device_tracker': { CONF_USERNAME: 'foo', CONF_PASSWORD: 'password', } } mock_ctrl.side_effect = urllib.error.HTTPError( '/', 500, 'foo', {}, None) result = unifi.get_scanner(None, config) self.assertFalse(result) def test_scanner_update(self): """Test the scanner update.""" ctrl = mock.MagicMock() fake_clients = [ {'mac': '123'}, {'mac': '234'}, ] ctrl.get_clients.return_value = fake_clients unifi.UnifiScanner(ctrl) ctrl.get_clients.assert_called_once_with() def test_scanner_update_error(self): """Test the scanner update for error.""" ctrl = mock.MagicMock() ctrl.get_clients.side_effect = urllib.error.HTTPError( '/', 500, 'foo', {}, None) unifi.UnifiScanner(ctrl) def test_scan_devices(self): """Test the scanning for devices.""" ctrl = mock.MagicMock() fake_clients = [ {'mac': '123'}, {'mac': '234'}, ] ctrl.get_clients.return_value = fake_clients scanner = unifi.UnifiScanner(ctrl) self.assertEqual(set(['123', '234']), set(scanner.scan_devices())) def test_get_device_name(self): """Test the getting of device names.""" ctrl = mock.MagicMock() fake_clients = [ {'mac': '123', 'hostname': 'foobar'}, {'mac': '234', 'name': 'Nice Name'}, {'mac': '456'}, ] ctrl.get_clients.return_value = fake_clients scanner = unifi.UnifiScanner(ctrl) self.assertEqual('foobar', scanner.get_device_name('123')) self.assertEqual('Nice Name', scanner.get_device_name('234')) self.assertEqual(None, scanner.get_device_name('456')) self.assertEqual(None, scanner.get_device_name('unknown'))
mit
willprice/arduino-sphere-project
scripts/example_direction_finder/temboo/Library/Amazon/EC2/DeregisterImage.py
5
4153
# -*- coding: utf-8 -*- ############################################################################### # # DeregisterImage # Deregisters the specified AMI. # # Python versions 2.6, 2.7, 3.x # # Copyright 2014, Temboo Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, # either express or implied. See the License for the specific # language governing permissions and limitations under the License. # # ############################################################################### from temboo.core.choreography import Choreography from temboo.core.choreography import InputSet from temboo.core.choreography import ResultSet from temboo.core.choreography import ChoreographyExecution import json class DeregisterImage(Choreography): def __init__(self, temboo_session): """ Create a new instance of the DeregisterImage Choreo. A TembooSession object, containing a valid set of Temboo credentials, must be supplied. """ super(DeregisterImage, self).__init__(temboo_session, '/Library/Amazon/EC2/DeregisterImage') def new_input_set(self): return DeregisterImageInputSet() def _make_result_set(self, result, path): return DeregisterImageResultSet(result, path) def _make_execution(self, session, exec_id, path): return DeregisterImageChoreographyExecution(session, exec_id, path) class DeregisterImageInputSet(InputSet): """ An InputSet with methods appropriate for specifying the inputs to the DeregisterImage Choreo. The InputSet object is used to specify input parameters when executing this Choreo. """ def set_AWSAccessKeyId(self, value): """ Set the value of the AWSAccessKeyId input for this Choreo. ((required, string) The Access Key ID provided by Amazon Web Services.) """ super(DeregisterImageInputSet, self)._set_input('AWSAccessKeyId', value) def set_AWSSecretKeyId(self, value): """ Set the value of the AWSSecretKeyId input for this Choreo. ((required, string) The Secret Key ID provided by Amazon Web Services.) """ super(DeregisterImageInputSet, self)._set_input('AWSSecretKeyId', value) def set_ImageId(self, value): """ Set the value of the ImageId input for this Choreo. ((required, string) The AMI ID.) """ super(DeregisterImageInputSet, self)._set_input('ImageId', value) def set_ResponseFormat(self, value): """ Set the value of the ResponseFormat input for this Choreo. ((optional, string) The format that the response should be in. Valid values are "xml" (the default) and "json".) """ super(DeregisterImageInputSet, self)._set_input('ResponseFormat', value) def set_UserRegion(self, value): """ Set the value of the UserRegion input for this Choreo. ((optional, string) The AWS region that corresponds to the EC2 endpoint you wish to access. The default region is "us-east-1". See description below for valid values.) """ super(DeregisterImageInputSet, self)._set_input('UserRegion', value) class DeregisterImageResultSet(ResultSet): """ A ResultSet with methods tailored to the values returned by the DeregisterImage Choreo. The ResultSet object is used to retrieve the results of a Choreo execution. """ def getJSONFromString(self, str): return json.loads(str) def get_Response(self): """ Retrieve the value for the "Response" output from this Choreo execution. (The response from Amazon.) """ return self._output.get('Response', None) class DeregisterImageChoreographyExecution(ChoreographyExecution): def _make_result_set(self, response, path): return DeregisterImageResultSet(response, path)
gpl-2.0
NinoFabio/Plugin.Video.Fabio
servers/powvideo.py
51
5034
# -*- coding: utf-8 -*- #------------------------------------------------------------ # pelisalacarta - XBMC Plugin # Conector para powvideo # http://blog.tvalacarta.info/plugin-xbmc/pelisalacarta/ #------------------------------------------------------------ import urlparse,urllib2,urllib,re import os from core import scrapertools from core import logger from core import config from core import jsunpack def test_video_exists( page_url ): logger.info("pelisalacarta.powvideo test_video_exists(page_url='%s')" % page_url) return True,"" def get_video_url( page_url , premium = False , user="" , password="", video_password="" ): logger.info("pelisalacarta.powvideo get_video_url(page_url='%s')" % page_url) # Lo pide una vez headers = [['User-Agent','Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.8.1.14) Gecko/20080404 Firefox/2.0.0.14']] data = scrapertools.cache_page( page_url , headers=headers ) #logger.info("data="+data) try: ''' <input type="hidden" name="op" value="download1"> <input type="hidden" name="usr_login" value=""> <input type="hidden" name="id" value="auoxxtvyquoy"> <input type="hidden" name="fname" value="Star.Trek.Into.Darkness.2013.HD.m720p.LAT.avi"> <input type="hidden" name="referer" value=""> <input type="hidden" name="hash" value="1624-83-46-1377796069-b5e6b8f9759d080a3667adad637f00ac"> <input type="submit" name="imhuman" value="Continue to Video" id="btn_download"> ''' op = scrapertools.get_match(data,'<input type="hidden" name="op" value="(down[^"]+)"') usr_login = "" id = scrapertools.get_match(data,'<input type="hidden" name="id" value="([^"]+)"') fname = scrapertools.get_match(data,'<input type="hidden" name="fname" value="([^"]+)"') referer = scrapertools.get_match(data,'<input type="hidden" name="referer" value="([^"]*)"') hashvalue = scrapertools.get_match(data,'<input type="hidden" name="hash" value="([^"]*)"') submitbutton = scrapertools.get_match(data,'<input type="submit" name="imhuman" value="([^"]+)"').replace(" ","+") import time time.sleep(5) # Lo pide una segunda vez, como si hubieras hecho click en el banner #op=download1&usr_login=&id=auoxxtvyquoy&fname=Star.Trek.Into.Darkness.2013.HD.m720p.LAT.avi&referer=&hash=1624-83-46-1377796019-c2b422f91da55d12737567a14ea3dffe&imhuman=Continue+to+Video #op=search&usr_login=&id=auoxxtvyquoy&fname=Star.Trek.Into.Darkness.2013.HD.m720p.LAT.avi&referer=&hash=1624-83-46-1377796398-8020e5629f50ff2d7b7de99b55bdb177&imhuman=Continue+to+Video post = "op="+op+"&usr_login="+usr_login+"&id="+id+"&fname="+fname+"&referer="+referer+"&hash="+hashvalue+"&imhuman="+submitbutton headers.append(["Referer",page_url]) data = scrapertools.cache_page( page_url , post=post, headers=headers ) #logger.info("data="+data) except: import traceback traceback.print_exc() # Extrae la URL logger.info("data="+data) data = scrapertools.find_single_match(data,"<script type='text/javascript'>(.*?)</script>") logger.info("data="+data) data = jsunpack.unpack(data) logger.info("data="+data) data = data.replace("\\","") media_url = scrapertools.find_single_match(data,"file:'([^']+)'") video_urls = [] video_urls.append( [ scrapertools.get_filename_from_url(media_url)[-4:]+" [powvideo]",media_url]) for video_url in video_urls: logger.info("[powvideo.py] %s - %s" % (video_url[0],video_url[1])) return video_urls # Encuentra vídeos del servidor en el texto pasado def find_videos(data): encontrados = set() devuelve = [] # http://powvideo.net/embed-sbb9ptsfqca2 patronvideos = 'powvideo.net/embed-([a-z0-9]+)' logger.info("pelisalacarta.powvideo find_videos #"+patronvideos+"#") matches = re.compile(patronvideos,re.DOTALL).findall(data) for match in matches: titulo = "[powvideo]" url = "http://powvideo.net/"+match if url not in encontrados: logger.info(" url="+url) devuelve.append( [ titulo , url , 'powvideo' ] ) encontrados.add(url) else: logger.info(" url duplicada="+url) # http://powvideo.net/auoxxtvyoy patronvideos = 'powvideo.net/([a-z0-9]+)' logger.info("pelisalacarta.powvideo find_videos #"+patronvideos+"#") matches = re.compile(patronvideos,re.DOTALL).findall(data) for match in matches: titulo = "[powvideo]" url = "http://powvideo.net/"+match if url not in encontrados and match!="embed": logger.info(" url="+url) devuelve.append( [ titulo , url , 'powvideo' ] ) encontrados.add(url) else: logger.info(" url duplicada="+url) return devuelve def test(): video_urls = get_video_url("http://powvideo.net/auoxxtvyquoy") return len(video_urls)>0
gpl-2.0
TheTypoMaster/chromium-crosswalk
chrome/common/extensions/docs/server2/features_bundle.py
78
14840
# Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from copy import copy from branch_utility import BranchUtility from compiled_file_system import SingleFile, Unicode from docs_server_utils import StringIdentity from extensions_paths import API_PATHS, JSON_TEMPLATES from file_system import FileNotFoundError from future import All, Future from path_util import Join from platform_util import GetExtensionTypes, PlatformToExtensionType from third_party.json_schema_compiler.json_parse import Parse _API_FEATURES = '_api_features.json' _MANIFEST_FEATURES = '_manifest_features.json' _PERMISSION_FEATURES = '_permission_features.json' def HasParent(feature_name, feature, all_feature_names): # A feature has a parent if it has a . in its name, its parent exists, # and it does not explicitly specify that it has no parent. return ('.' in feature_name and feature_name.rsplit('.', 1)[0] in all_feature_names and not feature.get('noparent')) def GetParentName(feature_name, feature, all_feature_names): '''Returns the name of the parent feature, or None if it does not have a parent. ''' if not HasParent(feature_name, feature, all_feature_names): return None return feature_name.rsplit('.', 1)[0] def _CreateFeaturesFromJSONFutures(json_futures): '''Returns a dict of features. The value of each feature is a list with all of its possible values. ''' def ignore_feature(name, value): '''Returns true if this feature should be ignored. Features are ignored if they are only available to whitelisted apps or component extensions/apps, as in these cases the APIs are not available to public developers. Private APIs are also unavailable to public developers, but logic elsewhere makes sure they are not listed. So they shouldn't be ignored via this mechanism. ''' if name.endswith('Private'): return False return value.get('location') == 'component' or 'whitelist' in value features = {} for json_future in json_futures: try: features_json = Parse(json_future.Get()) except FileNotFoundError: # Not all file system configurations have the extra files. continue for name, rawvalue in features_json.iteritems(): if name not in features: features[name] = [] for value in (rawvalue if isinstance(rawvalue, list) else (rawvalue,)): if not ignore_feature(name, value): features[name].append(value) return features def _CopyParentFeatureValues(child, parent): '''Takes data from feature dict |parent| and copies/merges it into feature dict |child|. Two passes are run over the features, and on the first pass features are not resolved across caches, so a None value for |parent| may be passed in. ''' if parent is None: return child merged = copy(parent) merged.pop('noparent', None) merged.pop('name', None) merged.update(child) return merged def _ResolveFeature(feature_name, feature_values, extra_feature_values, platform, features_type, features_map): '''Filters and combines the possible values for a feature into one dict. It uses |features_map| to resolve dependencies for each value and inherit unspecified platform and channel data. |feature_values| is then filtered by platform and all values with the most stable platform are merged into one dict. All values in |extra_feature_values| get merged into this dict. Returns |resolve_successful| and |feature|. |resolve_successful| is False if the feature's dependencies have not been merged yet themselves, meaning that this feature can not be reliably resolved yet. |feature| is the resulting feature dict, or None if the feature does not exist on the platform specified. ''' feature = None most_stable_channel = None for value in feature_values: # If 'extension_types' or 'channel' is unspecified, these values should # be inherited from dependencies. If they are specified, these values # should override anything specified by dependencies. inherit_valid_platform = 'extension_types' not in value if inherit_valid_platform: valid_platform = None else: valid_platform = (value['extension_types'] == 'all' or platform in value['extension_types']) inherit_channel = 'channel' not in value channel = value.get('channel') dependencies = value.get('dependencies', []) parent = GetParentName( feature_name, value, features_map[features_type]['all_names']) if parent is not None: # The parent data needs to be resolved so the child can inherit it. if parent in features_map[features_type].get('unresolved', ()): return False, None value = _CopyParentFeatureValues( value, features_map[features_type]['resolved'].get(parent)) # Add the parent as a dependency to ensure proper platform filtering. dependencies.append(features_type + ':' + parent) for dependency in dependencies: dep_type, dep_name = dependency.split(':') if (dep_type not in features_map or dep_name in features_map[dep_type].get('unresolved', ())): # The dependency itself has not been merged yet or the features map # does not have the needed data. Fail to resolve. return False, None dep = features_map[dep_type]['resolved'].get(dep_name) if inherit_valid_platform and (valid_platform is None or valid_platform): # If dep is None, the dependency does not exist because it has been # filtered out by platform. This feature value does not explicitly # specify platform data, so filter this feature value out. # Only run this check if valid_platform is True or None so that it # can't be reset once it is False. valid_platform = dep is not None if inherit_channel and dep and 'channel' in dep: if channel is None or BranchUtility.NewestChannel( (dep['channel'], channel)) != channel: # Inherit the least stable channel from the dependencies. channel = dep['channel'] # Default to stable on all platforms. if valid_platform is None: valid_platform = True if valid_platform and channel is None: channel = 'stable' if valid_platform: # The feature value is valid. Merge it into the feature dict. if feature is None or BranchUtility.NewestChannel( (most_stable_channel, channel)) != channel: # If this is the first feature value to be merged, copy the dict. # If this feature value has a more stable channel than the most stable # channel so far, replace the old dict so that it only merges values # from the most stable channel. feature = copy(value) most_stable_channel = channel elif channel == most_stable_channel: feature.update(value) if feature is None: # Nothing was left after filtering the values, but all dependency resolves # were successful. This feature does not exist on |platform|. return True, None # Merge in any extra values. for value in extra_feature_values: feature.update(value) # Cleanup, fill in missing fields. if 'name' not in feature: feature['name'] = feature_name feature['channel'] = most_stable_channel return True, feature class _FeaturesCache(object): def __init__(self, file_system, compiled_fs_factory, json_paths, extra_paths, platform, features_type): self._cache = compiled_fs_factory.Create( file_system, self._CreateCache, type(self), category=platform) self._text_cache = compiled_fs_factory.ForUnicode(file_system) self._json_paths = json_paths self._extra_paths = extra_paths self._platform = platform self._features_type = features_type @Unicode def _CreateCache(self, _, features_json): json_path_futures = [self._text_cache.GetFromFile(path) for path in self._json_paths[1:]] extra_path_futures = [self._text_cache.GetFromFile(path) for path in self._extra_paths] features_values = _CreateFeaturesFromJSONFutures( [Future(value=features_json)] + json_path_futures) extra_features_values = _CreateFeaturesFromJSONFutures(extra_path_futures) features = { 'resolved': {}, 'unresolved': copy(features_values), 'extra': extra_features_values, 'all_names': set(features_values.keys()) } # Merges as many feature values as possible without resolving dependencies # from other FeaturesCaches. Pass in a features_map with just this # FeatureCache's features_type. Makes repeated passes until no new # resolves are successful. new_resolves = True while new_resolves: new_resolves = False for feature_name, feature_values in features_values.iteritems(): if feature_name not in features['unresolved']: continue resolve_successful, feature = _ResolveFeature( feature_name, feature_values, extra_features_values.get(feature_name, ()), self._platform, self._features_type, {self._features_type: features}) if resolve_successful: del features['unresolved'][feature_name] new_resolves = True if feature is not None: features['resolved'][feature_name] = feature return features def GetFeatures(self): if not self._json_paths: return Future(value={}) return self._cache.GetFromFile(self._json_paths[0]) class FeaturesBundle(object): '''Provides access to properties of API, Manifest, and Permission features. ''' def __init__(self, file_system, compiled_fs_factory, object_store_creator, platform): def create_features_cache(features_type, feature_file, *extra_paths): return _FeaturesCache( file_system, compiled_fs_factory, [Join(path, feature_file) for path in API_PATHS], extra_paths, self._platform, features_type) if platform not in GetExtensionTypes(): self._platform = PlatformToExtensionType(platform) else: self._platform = platform self._caches = { 'api': create_features_cache('api', _API_FEATURES), 'manifest': create_features_cache( 'manifest', _MANIFEST_FEATURES, Join(JSON_TEMPLATES, 'manifest.json')), 'permission': create_features_cache( 'permission', _PERMISSION_FEATURES, Join(JSON_TEMPLATES, 'permissions.json')) } # Namespace the object store by the file system ID because this class is # used by the availability finder cross-channel. self._object_store = object_store_creator.Create( _FeaturesCache, category=StringIdentity(file_system.GetIdentity(), self._platform)) def GetPermissionFeatures(self): return self.GetFeatures('permission', ('permission',)) def GetManifestFeatures(self): return self.GetFeatures('manifest', ('manifest',)) def GetAPIFeatures(self): return self.GetFeatures('api', ('api', 'manifest', 'permission')) def GetFeatures(self, features_type, dependencies): '''Resolves all dependencies in the categories specified by |dependencies|. Returns the features in the |features_type| category. ''' def next_(features): if features is not None: return Future(value=features) dependency_futures = [] cache_types = [] for cache_type in dependencies: cache_types.append(cache_type) dependency_futures.append(self._object_store.Get(cache_type)) def load_features(dependency_features_list): futures = [] for dependency_features, cache_type in zip(dependency_features_list, cache_types): if dependency_features is not None: # Get cached dependencies if possible. If it has been cached, all # of its features have been resolved, so the other fields are # unnecessary. futures.append(Future(value={'resolved': dependency_features})) else: futures.append(self._caches[cache_type].GetFeatures()) def resolve(features): features_map = {} for cache_type, feature in zip(cache_types, features): # Copy down to features_map level because the 'resolved' and # 'unresolved' dicts will be modified. features_map[cache_type] = dict((c, copy(d)) for c, d in feature.iteritems()) def has_unresolved(): '''Determines if there are any unresolved features left over in any of the categories in |dependencies|. ''' return any(cache.get('unresolved') for cache in features_map.itervalues()) # Iterate until everything is resolved. If dependencies are multiple # levels deep, it might take multiple passes to inherit data to the # topmost feature. while has_unresolved(): for cache_type, cache in features_map.iteritems(): if 'unresolved' not in cache: continue to_remove = [] for name, values in cache['unresolved'].iteritems(): resolve_successful, feature = _ResolveFeature( name, values, cache['extra'].get(name, ()), self._platform, cache_type, features_map) if not resolve_successful: continue # Try again on the next iteration of the while loop # When successfully resolved, remove it from the unresolved # dict. Add it to the resolved dict if it didn't get deleted. to_remove.append(name) if feature is not None: cache['resolved'][name] = feature for key in to_remove: del cache['unresolved'][key] for cache_type, cache in features_map.iteritems(): self._object_store.Set(cache_type, cache['resolved']) return features_map[features_type]['resolved'] return All(futures).Then(resolve) return All(dependency_futures).Then(load_features) return self._object_store.Get(features_type).Then(next_)
bsd-3-clause
amyvmiwei/chromium
third_party/tlslite/tlslite/integration/AsyncStateMachine.py
121
6963
""" A state machine for using TLS Lite with asynchronous I/O. """ class AsyncStateMachine: """ This is an abstract class that's used to integrate TLS Lite with asyncore and Twisted. This class signals wantsReadsEvent() and wantsWriteEvent(). When the underlying socket has become readable or writeable, the event should be passed to this class by calling inReadEvent() or inWriteEvent(). This class will then try to read or write through the socket, and will update its state appropriately. This class will forward higher-level events to its subclass. For example, when a complete TLS record has been received, outReadEvent() will be called with the decrypted data. """ def __init__(self): self._clear() def _clear(self): #These store the various asynchronous operations (i.e. #generators). Only one of them, at most, is ever active at a #time. self.handshaker = None self.closer = None self.reader = None self.writer = None #This stores the result from the last call to the #currently active operation. If 0 it indicates that the #operation wants to read, if 1 it indicates that the #operation wants to write. If None, there is no active #operation. self.result = None def _checkAssert(self, maxActive=1): #This checks that only one operation, at most, is #active, and that self.result is set appropriately. activeOps = 0 if self.handshaker: activeOps += 1 if self.closer: activeOps += 1 if self.reader: activeOps += 1 if self.writer: activeOps += 1 if self.result == None: if activeOps != 0: raise AssertionError() elif self.result in (0,1): if activeOps != 1: raise AssertionError() else: raise AssertionError() if activeOps > maxActive: raise AssertionError() def wantsReadEvent(self): """If the state machine wants to read. If an operation is active, this returns whether or not the operation wants to read from the socket. If an operation is not active, this returns None. @rtype: bool or None @return: If the state machine wants to read. """ if self.result != None: return self.result == 0 return None def wantsWriteEvent(self): """If the state machine wants to write. If an operation is active, this returns whether or not the operation wants to write to the socket. If an operation is not active, this returns None. @rtype: bool or None @return: If the state machine wants to write. """ if self.result != None: return self.result == 1 return None def outConnectEvent(self): """Called when a handshake operation completes. May be overridden in subclass. """ pass def outCloseEvent(self): """Called when a close operation completes. May be overridden in subclass. """ pass def outReadEvent(self, readBuffer): """Called when a read operation completes. May be overridden in subclass.""" pass def outWriteEvent(self): """Called when a write operation completes. May be overridden in subclass.""" pass def inReadEvent(self): """Tell the state machine it can read from the socket.""" try: self._checkAssert() if self.handshaker: self._doHandshakeOp() elif self.closer: self._doCloseOp() elif self.reader: self._doReadOp() elif self.writer: self._doWriteOp() else: self.reader = self.tlsConnection.readAsync(16384) self._doReadOp() except: self._clear() raise def inWriteEvent(self): """Tell the state machine it can write to the socket.""" try: self._checkAssert() if self.handshaker: self._doHandshakeOp() elif self.closer: self._doCloseOp() elif self.reader: self._doReadOp() elif self.writer: self._doWriteOp() else: self.outWriteEvent() except: self._clear() raise def _doHandshakeOp(self): try: self.result = self.handshaker.next() except StopIteration: self.handshaker = None self.result = None self.outConnectEvent() def _doCloseOp(self): try: self.result = self.closer.next() except StopIteration: self.closer = None self.result = None self.outCloseEvent() def _doReadOp(self): self.result = self.reader.next() if not self.result in (0,1): readBuffer = self.result self.reader = None self.result = None self.outReadEvent(readBuffer) def _doWriteOp(self): try: self.result = self.writer.next() except StopIteration: self.writer = None self.result = None def setHandshakeOp(self, handshaker): """Start a handshake operation. @type handshaker: generator @param handshaker: A generator created by using one of the asynchronous handshake functions (i.e. handshakeServerAsync, or handshakeClientxxx(..., async=True). """ try: self._checkAssert(0) self.handshaker = handshaker self._doHandshakeOp() except: self._clear() raise def setServerHandshakeOp(self, **args): """Start a handshake operation. The arguments passed to this function will be forwarded to L{tlslite.TLSConnection.TLSConnection.handshakeServerAsync}. """ handshaker = self.tlsConnection.handshakeServerAsync(**args) self.setHandshakeOp(handshaker) def setCloseOp(self): """Start a close operation. """ try: self._checkAssert(0) self.closer = self.tlsConnection.closeAsync() self._doCloseOp() except: self._clear() raise def setWriteOp(self, writeBuffer): """Start a write operation. @type writeBuffer: str @param writeBuffer: The string to transmit. """ try: self._checkAssert(0) self.writer = self.tlsConnection.writeAsync(writeBuffer) self._doWriteOp() except: self._clear() raise
bsd-3-clause
U2Ft/library
library/library.py
1
4240
#!/usr/bin/env python """ Library Manage a list of media items in a collection, and maintain a list of items to add to the collection. Supports movies, TV shows, music artists, books, and games. Usage: library [-i] [-y] (have|want) (book|tv|movie|artist|game) <name> library list (book|tv|movie|artist|game) [have|want] [--sort=<field>] library del [y] (book|tv|movie|artist|game) <id> Options: -h, --help Show this help message. -i, --use-ID Specify an appropriate ID instead of a title to search for. -y, --yes Assume the answer to every question is "yes". --sort=<field> Sort listing by the specified field (one of "year", "rating", or "title"). """ from docopt import docopt import json from peewee import DoesNotExist from .item import Item from .extractors import NotFoundError from .utils import confirm def print_list(kind, filter, sort_by): """List out all of the items in the specified table of the specified state.""" results = Item.select().where(Item.kind == kind) if filter: results = results.where(Item.state == filter) if not results.first(): # appending an "s" happens to correctly pluralise all supported types exit("There are no {}s in the library.".format(kind)) if sort_by == "year": results = results.order_by(kind.year) elif sort_by == "rating": results = results.order_by(kind.rating) elif sort_by == "title": results = results.order_by(kind.title) if kind in ("movie", "TV show"): print u"IMDb ID\t\tState\t\tYear\t\tRating\t\tTitle\n" item_string = u"{0}\t{1}\t\t{2}\t\t{3}\t\t{4}" for item in results: data = json.loads(str(item.data)) print item_string.format(item.item_ID, item.state, data["Year"], data["Rating"], data["Title"]) elif kind is "book": print u"Google Books ID\t\tState\t\tYear\t\tAuthor\t\t\t\tTitle\n" item_string = u"{0}\t\t{1}\t\t{2}\t\t{3}\t\t{4}" for item in results: data = json.loads(str(item.data)) print item_string.format(item.item_ID, item.state, data["Year"], data["Author"], data["Title"]) elif kind is "game": print u"TheGamesDB ID\tState\tYear\tPlatform\t\tName\n" item_string = u"{0}\t\t{1}\t{2}\t{3}\t{4}" for item in results: data = json.loads(str(item.data)) print item_string.format(item.item_ID, item.state, data["Year"], data["Platform"], data["Name"]) def remove_item(kind, id, yes): try: item = Item.get((Item.kind == kind) & (Item.item_ID == id)) except DoesNotExist: print "No item of that type with that ID was found." else: data = json.loads(str(item.data)) title = data["Name"] if kind is "game" else data["Title"] if confirm(u"Delete \"{}\"?".format(title), yes): item.delete_instance() print "Deleted." else: print "Not deleted." def main(): args = docopt(__doc__) # peewee complains about not having a handler otherwise import peewee peewee.logger.addHandler(peewee.logging.NullHandler()) if args["movie"]: kind = "movie" elif args["tv"]: kind = "TV show" elif args["book"]: kind = "book" elif args["game"]: kind = "game" elif args["artist"]: raise NotImplementedError if not Item.table_exists(): Item.create_table() if args["del"]: remove_item(kind, args["<id>"], args["--yes"]) else: state = "have" if args["have"] else None state = "want" if args["want"] else state if args["list"]: if args["--sort"] not in (None, "year", "rating", "title"): exit("Invalid field for sorting by. Check the help message.") print_list(kind, state, args["--sort"]) else: try: item = Item(kind, args["<name>"], args["--use-ID"]) item.find(args["--yes"]) except NotFoundError: exit("Item not found.") else: # item was found item.set_state(state) if __name__ == "__main__": main()
mit
Bismarrck/tensorflow
tensorflow/tools/docs/parser_test.py
22
25251
# Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for documentation parser.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import collections import functools import os import sys from tensorflow.python.platform import googletest from tensorflow.python.util import tf_inspect from tensorflow.tools.docs import doc_controls from tensorflow.tools.docs import parser # The test needs a real module. `types.ModuleType()` doesn't work, as the result # is a `builtin` module. Using "parser" here is arbitraty. The tests don't # depend on the module contents. At this point in the process the public api # has already been extracted. test_module = parser def test_function(unused_arg, unused_kwarg='default'): """Docstring for test function.""" pass def test_function_with_args_kwargs(unused_arg, *unused_args, **unused_kwargs): """Docstring for second test function.""" pass class ParentClass(object): @doc_controls.do_not_doc_inheritable def hidden_method(self): pass class TestClass(ParentClass): """Docstring for TestClass itself.""" def a_method(self, arg='default'): """Docstring for a method.""" pass def hidden_method(self): pass @doc_controls.do_not_generate_docs def hidden_method2(self): pass class ChildClass(object): """Docstring for a child class.""" pass @property def a_property(self): """Docstring for a property.""" pass CLASS_MEMBER = 'a class member' class DummyVisitor(object): def __init__(self, index, duplicate_of): self.index = index self.duplicate_of = duplicate_of class ParserTest(googletest.TestCase): def test_documentation_path(self): self.assertEqual('test.md', parser.documentation_path('test')) self.assertEqual('test/module.md', parser.documentation_path('test.module')) def test_replace_references(self): class HasOneMember(object): def foo(self): pass string = ( 'A @{tf.reference}, another @{tf.reference$with\nnewline}, a member ' '@{tf.reference.foo}, and a @{tf.third$link `text` with `code` in ' 'it}.') duplicate_of = {'tf.third': 'tf.fourth'} index = {'tf.reference': HasOneMember, 'tf.reference.foo': HasOneMember.foo, 'tf.third': HasOneMember, 'tf.fourth': HasOneMember} visitor = DummyVisitor(index, duplicate_of) reference_resolver = parser.ReferenceResolver.from_visitor( visitor=visitor, doc_index={}, py_module_names=['tf']) result = reference_resolver.replace_references(string, '../..') self.assertEqual('A <a href="../../tf/reference.md">' '<code>tf.reference</code></a>, ' 'another <a href="../../tf/reference.md">' 'with\nnewline</a>, ' 'a member <a href="../../tf/reference.md#foo">' '<code>tf.reference.foo</code></a>, ' 'and a <a href="../../tf/fourth.md">link ' '<code>text</code> with ' '<code>code</code> in it</a>.', result) def test_doc_replace_references(self): string = '@{$doc1} @{$doc1#abc} @{$doc1$link} @{$doc1#def$zelda} @{$do/c2}' class DocInfo(object): pass doc1 = DocInfo() doc1.title = 'Title1' doc1.url = 'URL1' doc2 = DocInfo() doc2.title = 'Two words' doc2.url = 'somewhere/else' doc_index = {'doc1': doc1, 'do/c2': doc2} visitor = DummyVisitor(index={}, duplicate_of={}) reference_resolver = parser.ReferenceResolver.from_visitor( visitor=visitor, doc_index=doc_index, py_module_names=['tf']) result = reference_resolver.replace_references(string, 'python') self.assertEqual('<a href="../URL1">Title1</a> ' '<a href="../URL1#abc">Title1</a> ' '<a href="../URL1">link</a> ' '<a href="../URL1#def">zelda</a> ' '<a href="../somewhere/else">Two words</a>', result) def test_docs_for_class(self): index = { 'TestClass': TestClass, 'TestClass.a_method': TestClass.a_method, 'TestClass.a_property': TestClass.a_property, 'TestClass.ChildClass': TestClass.ChildClass, 'TestClass.CLASS_MEMBER': TestClass.CLASS_MEMBER } visitor = DummyVisitor(index=index, duplicate_of={}) reference_resolver = parser.ReferenceResolver.from_visitor( visitor=visitor, doc_index={}, py_module_names=['tf']) tree = { 'TestClass': ['a_method', 'a_property', 'ChildClass', 'CLASS_MEMBER'] } parser_config = parser.ParserConfig( reference_resolver=reference_resolver, duplicates={}, duplicate_of={}, tree=tree, index=index, reverse_index={}, guide_index={}, base_dir='/') page_info = parser.docs_for_object( full_name='TestClass', py_object=TestClass, parser_config=parser_config) # Make sure the brief docstring is present self.assertEqual( tf_inspect.getdoc(TestClass).split('\n')[0], page_info.doc.brief) # Make sure the method is present self.assertEqual(TestClass.a_method, page_info.methods[0].obj) # Make sure that the signature is extracted properly and omits self. self.assertEqual(["arg='default'"], page_info.methods[0].signature) # Make sure the property is present self.assertIs(TestClass.a_property, page_info.properties[0].obj) # Make sure there is a link to the child class and it points the right way. self.assertIs(TestClass.ChildClass, page_info.classes[0].obj) # Make sure this file is contained as the definition location. self.assertEqual(os.path.relpath(__file__, '/'), page_info.defined_in.path) def test_namedtuple_field_order(self): namedtupleclass = collections.namedtuple('namedtupleclass', {'z', 'y', 'x', 'w', 'v', 'u'}) index = { 'namedtupleclass': namedtupleclass, 'namedtupleclass.u': namedtupleclass.u, 'namedtupleclass.v': namedtupleclass.v, 'namedtupleclass.w': namedtupleclass.w, 'namedtupleclass.x': namedtupleclass.x, 'namedtupleclass.y': namedtupleclass.y, 'namedtupleclass.z': namedtupleclass.z, } visitor = DummyVisitor(index=index, duplicate_of={}) reference_resolver = parser.ReferenceResolver.from_visitor( visitor=visitor, doc_index={}, py_module_names=['tf']) tree = {'namedtupleclass': {'u', 'v', 'w', 'x', 'y', 'z'}} parser_config = parser.ParserConfig( reference_resolver=reference_resolver, duplicates={}, duplicate_of={}, tree=tree, index=index, reverse_index={}, guide_index={}, base_dir='/') page_info = parser.docs_for_object( full_name='namedtupleclass', py_object=namedtupleclass, parser_config=parser_config) # Each namedtiple field has a docstring of the form: # 'Alias for field number ##'. These props are returned sorted. def sort_key(prop_info): return int(prop_info.obj.__doc__.split(' ')[-1]) self.assertSequenceEqual(page_info.properties, sorted(page_info.properties, key=sort_key)) def test_docs_for_class_should_skip(self): class Parent(object): @doc_controls.do_not_doc_inheritable def a_method(self, arg='default'): pass class Child(Parent): def a_method(self, arg='default'): pass index = { 'Child': Child, 'Child.a_method': Child.a_method, } visitor = DummyVisitor(index=index, duplicate_of={}) reference_resolver = parser.ReferenceResolver.from_visitor( visitor=visitor, doc_index={}, py_module_names=['tf']) tree = { 'Child': ['a_method'], } parser_config = parser.ParserConfig( reference_resolver=reference_resolver, duplicates={}, duplicate_of={}, tree=tree, index=index, reverse_index={}, guide_index={}, base_dir='/') page_info = parser.docs_for_object( full_name='Child', py_object=Child, parser_config=parser_config) # Make sure the `a_method` is not present self.assertEqual(0, len(page_info.methods)) def test_docs_for_message_class(self): class CMessage(object): def hidden(self): pass class Message(object): def hidden2(self): pass class MessageMeta(object): def hidden3(self): pass class ChildMessage(CMessage, Message, MessageMeta): def my_method(self): pass index = { 'ChildMessage': ChildMessage, 'ChildMessage.hidden': ChildMessage.hidden, 'ChildMessage.hidden2': ChildMessage.hidden2, 'ChildMessage.hidden3': ChildMessage.hidden3, 'ChildMessage.my_method': ChildMessage.my_method, } visitor = DummyVisitor(index=index, duplicate_of={}) reference_resolver = parser.ReferenceResolver.from_visitor( visitor=visitor, doc_index={}, py_module_names=['tf']) tree = {'ChildMessage': ['hidden', 'hidden2', 'hidden3', 'my_method']} parser_config = parser.ParserConfig( reference_resolver=reference_resolver, duplicates={}, duplicate_of={}, tree=tree, index=index, reverse_index={}, guide_index={}, base_dir='/') page_info = parser.docs_for_object( full_name='ChildMessage', py_object=ChildMessage, parser_config=parser_config) self.assertEqual(1, len(page_info.methods)) self.assertEqual('my_method', page_info.methods[0].short_name) def test_docs_for_module(self): index = { 'TestModule': test_module, 'TestModule.test_function': test_function, 'TestModule.test_function_with_args_kwargs': test_function_with_args_kwargs, 'TestModule.TestClass': TestClass, } visitor = DummyVisitor(index=index, duplicate_of={}) reference_resolver = parser.ReferenceResolver.from_visitor( visitor=visitor, doc_index={}, py_module_names=['tf']) tree = { 'TestModule': ['TestClass', 'test_function', 'test_function_with_args_kwargs'] } parser_config = parser.ParserConfig( reference_resolver=reference_resolver, duplicates={}, duplicate_of={}, tree=tree, index=index, reverse_index={}, guide_index={}, base_dir='/') page_info = parser.docs_for_object( full_name='TestModule', py_object=test_module, parser_config=parser_config) # Make sure the brief docstring is present self.assertEqual( tf_inspect.getdoc(test_module).split('\n')[0], page_info.doc.brief) # Make sure that the members are there funcs = {f_info.obj for f_info in page_info.functions} self.assertEqual({test_function, test_function_with_args_kwargs}, funcs) classes = {cls_info.obj for cls_info in page_info.classes} self.assertEqual({TestClass}, classes) # Make sure the module's file is contained as the definition location. self.assertEqual( os.path.relpath(test_module.__file__, '/'), page_info.defined_in.path) def test_docs_for_function(self): index = { 'test_function': test_function } visitor = DummyVisitor(index=index, duplicate_of={}) reference_resolver = parser.ReferenceResolver.from_visitor( visitor=visitor, doc_index={}, py_module_names=['tf']) tree = { '': ['test_function'] } parser_config = parser.ParserConfig( reference_resolver=reference_resolver, duplicates={}, duplicate_of={}, tree=tree, index=index, reverse_index={}, guide_index={}, base_dir='/') page_info = parser.docs_for_object( full_name='test_function', py_object=test_function, parser_config=parser_config) # Make sure the brief docstring is present self.assertEqual( tf_inspect.getdoc(test_function).split('\n')[0], page_info.doc.brief) # Make sure the extracted signature is good. self.assertEqual(['unused_arg', "unused_kwarg='default'"], page_info.signature) # Make sure this file is contained as the definition location. self.assertEqual(os.path.relpath(__file__, '/'), page_info.defined_in.path) def test_docs_for_function_with_kwargs(self): index = { 'test_function_with_args_kwargs': test_function_with_args_kwargs } visitor = DummyVisitor(index=index, duplicate_of={}) reference_resolver = parser.ReferenceResolver.from_visitor( visitor=visitor, doc_index={}, py_module_names=['tf']) tree = { '': ['test_function_with_args_kwargs'] } parser_config = parser.ParserConfig( reference_resolver=reference_resolver, duplicates={}, duplicate_of={}, tree=tree, index=index, reverse_index={}, guide_index={}, base_dir='/') page_info = parser.docs_for_object( full_name='test_function_with_args_kwargs', py_object=test_function_with_args_kwargs, parser_config=parser_config) # Make sure the brief docstring is present self.assertEqual( tf_inspect.getdoc(test_function_with_args_kwargs).split('\n')[0], page_info.doc.brief) # Make sure the extracted signature is good. self.assertEqual(['unused_arg', '*unused_args', '**unused_kwargs'], page_info.signature) def test_parse_md_docstring(self): def test_function_with_fancy_docstring(arg): """Function with a fancy docstring. And a bunch of references: @{tf.reference}, another @{tf.reference}, a member @{tf.reference.foo}, and a @{tf.third}. Args: arg: An argument. Raises: an exception Returns: arg: the input, and arg: the input, again. @compatibility(numpy) NumPy has nothing as awesome as this function. @end_compatibility @compatibility(theano) Theano has nothing as awesome as this function. Check it out. @end_compatibility """ return arg, arg class HasOneMember(object): def foo(self): pass duplicate_of = {'tf.third': 'tf.fourth'} index = { 'tf': test_module, 'tf.fancy': test_function_with_fancy_docstring, 'tf.reference': HasOneMember, 'tf.reference.foo': HasOneMember.foo, 'tf.third': HasOneMember, 'tf.fourth': HasOneMember } visitor = DummyVisitor(index=index, duplicate_of=duplicate_of) reference_resolver = parser.ReferenceResolver.from_visitor( visitor=visitor, doc_index={}, py_module_names=['tf']) doc_info = parser._parse_md_docstring(test_function_with_fancy_docstring, '../..', reference_resolver) self.assertNotIn('@', doc_info.docstring) self.assertNotIn('compatibility', doc_info.docstring) self.assertNotIn('Raises:', doc_info.docstring) self.assertEqual(len(doc_info.function_details), 3) self.assertEqual(set(doc_info.compatibility.keys()), {'numpy', 'theano'}) self.assertEqual(doc_info.compatibility['numpy'], 'NumPy has nothing as awesome as this function.\n') def test_generate_index(self): index = { 'tf': test_module, 'tf.TestModule': test_module, 'tf.test_function': test_function, 'tf.TestModule.test_function': test_function, 'tf.TestModule.TestClass': TestClass, 'tf.TestModule.TestClass.a_method': TestClass.a_method, 'tf.TestModule.TestClass.a_property': TestClass.a_property, 'tf.TestModule.TestClass.ChildClass': TestClass.ChildClass, } duplicate_of = {'tf.TestModule.test_function': 'tf.test_function'} visitor = DummyVisitor(index=index, duplicate_of=duplicate_of) reference_resolver = parser.ReferenceResolver.from_visitor( visitor=visitor, doc_index={}, py_module_names=['tf']) docs = parser.generate_global_index('TestLibrary', index=index, reference_resolver=reference_resolver) # Make sure duplicates and non-top-level symbols are in the index, but # methods and properties are not. self.assertNotIn('a_method', docs) self.assertNotIn('a_property', docs) self.assertIn('TestModule.TestClass', docs) self.assertIn('TestModule.TestClass.ChildClass', docs) self.assertIn('TestModule.test_function', docs) # Leading backtick to make sure it's included top-level. # This depends on formatting, but should be stable. self.assertIn('<code>tf.test_function', docs) def test_argspec_for_functools_partial(self): # pylint: disable=unused-argument def test_function_for_partial1(arg1, arg2, kwarg1=1, kwarg2=2): pass def test_function_for_partial2(arg1, arg2, *my_args, **my_kwargs): pass # pylint: enable=unused-argument # pylint: disable=protected-access # Make sure everything works for regular functions. expected = tf_inspect.FullArgSpec( args=['arg1', 'arg2', 'kwarg1', 'kwarg2'], varargs=None, varkw=None, defaults=(1, 2), kwonlyargs=[], kwonlydefaults=None, annotations={}) self.assertEqual(expected, parser._get_arg_spec(test_function_for_partial1)) # Make sure doing nothing works. expected = tf_inspect.FullArgSpec( args=['arg1', 'arg2', 'kwarg1', 'kwarg2'], varargs=None, varkw=None, defaults=(1, 2), kwonlyargs=[], kwonlydefaults=None, annotations={}) partial = functools.partial(test_function_for_partial1) self.assertEqual(expected, parser._get_arg_spec(partial)) # Make sure setting args from the front works. expected = tf_inspect.FullArgSpec( args=['arg2', 'kwarg1', 'kwarg2'], varargs=None, varkw=None, defaults=(1, 2), kwonlyargs=[], kwonlydefaults=None, annotations={}) partial = functools.partial(test_function_for_partial1, 1) self.assertEqual(expected, parser._get_arg_spec(partial)) expected = tf_inspect.FullArgSpec( args=['kwarg2'], varargs=None, varkw=None, defaults=(2,), kwonlyargs=[], kwonlydefaults=None, annotations={}) partial = functools.partial(test_function_for_partial1, 1, 2, 3) self.assertEqual(expected, parser._get_arg_spec(partial)) # Make sure setting kwargs works. expected = tf_inspect.FullArgSpec( args=['arg1', 'arg2', 'kwarg2'], varargs=None, varkw=None, defaults=(2,), kwonlyargs=[], kwonlydefaults=None, annotations={}) partial = functools.partial(test_function_for_partial1, kwarg1=0) self.assertEqual(expected, parser._get_arg_spec(partial)) expected = tf_inspect.FullArgSpec( args=['arg1', 'arg2', 'kwarg1'], varargs=None, varkw=None, defaults=(1,), kwonlyargs=[], kwonlydefaults=None, annotations={}) partial = functools.partial(test_function_for_partial1, kwarg2=0) self.assertEqual(expected, parser._get_arg_spec(partial)) expected = tf_inspect.FullArgSpec( args=['arg1'], varargs=None, varkw=None, defaults=(), kwonlyargs=[], kwonlydefaults=None, annotations={}) partial = functools.partial(test_function_for_partial1, arg2=0, kwarg1=0, kwarg2=0) self.assertEqual(expected, parser._get_arg_spec(partial)) # Make sure *args, *kwargs is accounted for. expected = tf_inspect.FullArgSpec( args=[], varargs='my_args', varkw='my_kwargs', defaults=(), kwonlyargs=[], kwonlydefaults=None, annotations={}) partial = functools.partial(test_function_for_partial2, 0, 1) self.assertEqual(expected, parser._get_arg_spec(partial)) # pylint: enable=protected-access def testSaveReferenceResolver(self): you_cant_serialize_this = object() duplicate_of = {'AClass': ['AClass2']} doc_index = {'doc': you_cant_serialize_this} is_fragment = { 'tf': False, 'tf.VERSION': True, 'tf.AClass': False, 'tf.AClass.method': True, 'tf.AClass2': False, 'tf.function': False } py_module_names = ['tf', 'tfdbg'] resolver = parser.ReferenceResolver(duplicate_of, doc_index, is_fragment, py_module_names) outdir = googletest.GetTempDir() filepath = os.path.join(outdir, 'resolver.json') resolver.to_json_file(filepath) resolver2 = parser.ReferenceResolver.from_json_file(filepath, doc_index) # There are no __slots__, so all fields are visible in __dict__. self.assertEqual(resolver.__dict__, resolver2.__dict__) def testIsFreeFunction(self): result = parser.is_free_function(test_function, 'test_module.test_function', {'test_module': test_module}) self.assertTrue(result) result = parser.is_free_function(test_function, 'TestClass.test_function', {'TestClass': TestClass}) self.assertFalse(result) result = parser.is_free_function(TestClass, 'TestClass', {}) self.assertFalse(result) result = parser.is_free_function(test_module, 'test_module', {}) self.assertFalse(result) RELU_DOC = """Computes rectified linear: `max(features, 0)` Args: features: A `Tensor`. Must be one of the following types: `float32`, `float64`, `int32`, `int64`, `uint8`, `int16`, `int8`, `uint16`, `half`. name: A name for the operation (optional) Returns: A `Tensor`. Has the same type as `features` """ class TestParseFunctionDetails(googletest.TestCase): def test_parse_function_details(self): docstring, function_details = parser._parse_function_details(RELU_DOC) self.assertEqual(len(function_details), 2) args = function_details[0] self.assertEqual(args.keyword, 'Args') self.assertEqual(len(args.header), 0) self.assertEqual(len(args.items), 2) self.assertEqual(args.items[0][0], 'features') self.assertEqual(args.items[1][0], 'name') self.assertEqual(args.items[1][1], 'A name for the operation (optional)\n\n') returns = function_details[1] self.assertEqual(returns.keyword, 'Returns') relu_doc_lines = RELU_DOC.split('\n') self.assertEqual(docstring, relu_doc_lines[0] + '\n\n') self.assertEqual(returns.header, relu_doc_lines[-2] + '\n') self.assertEqual( RELU_DOC, docstring + ''.join(str(detail) for detail in function_details)) class TestGenerateSignature(googletest.TestCase): def test_known_object(self): known_object = object() reverse_index = {id(known_object): 'location.of.object.in.api'} def example_fun(arg=known_object): # pylint: disable=unused-argument pass sig = parser._generate_signature(example_fun, reverse_index) self.assertEqual(sig, ['arg=location.of.object.in.api']) def test_literals(self): if sys.version_info >= (3, 0): print('Warning: Doc generation is not supported from python3.') return def example_fun(a=5, b=5.0, c=None, d=True, e='hello', f=(1, (2, 3))): # pylint: disable=g-bad-name, unused-argument pass sig = parser._generate_signature(example_fun, reverse_index={}) self.assertEqual( sig, ['a=5', 'b=5.0', 'c=None', 'd=True', "e='hello'", 'f=(1, (2, 3))']) def test_dotted_name(self): if sys.version_info >= (3, 0): print('Warning: Doc generation is not supported from python3.') return # pylint: disable=g-bad-name class a(object): class b(object): class c(object): class d(object): def __init__(self, *args): pass # pylint: enable=g-bad-name e = {'f': 1} def example_fun(arg1=a.b.c.d, arg2=a.b.c.d(1, 2), arg3=e['f']): # pylint: disable=unused-argument pass sig = parser._generate_signature(example_fun, reverse_index={}) self.assertEqual(sig, ['arg1=a.b.c.d', 'arg2=a.b.c.d(1, 2)', "arg3=e['f']"]) if __name__ == '__main__': googletest.main()
apache-2.0
thedep2/CouchPotatoServer
libs/tornado/util.py
102
12256
"""Miscellaneous utility functions and classes. This module is used internally by Tornado. It is not necessarily expected that the functions and classes defined here will be useful to other applications, but they are documented here in case they are. The one public-facing part of this module is the `Configurable` class and its `~Configurable.configure` method, which becomes a part of the interface of its subclasses, including `.AsyncHTTPClient`, `.IOLoop`, and `.Resolver`. """ from __future__ import absolute_import, division, print_function, with_statement import array import inspect import os import sys import zlib try: xrange # py2 except NameError: xrange = range # py3 class ObjectDict(dict): """Makes a dictionary behave like an object, with attribute-style access. """ def __getattr__(self, name): try: return self[name] except KeyError: raise AttributeError(name) def __setattr__(self, name, value): self[name] = value class GzipDecompressor(object): """Streaming gzip decompressor. The interface is like that of `zlib.decompressobj` (without some of the optional arguments, but it understands gzip headers and checksums. """ def __init__(self): # Magic parameter makes zlib module understand gzip header # http://stackoverflow.com/questions/1838699/how-can-i-decompress-a-gzip-stream-with-zlib # This works on cpython and pypy, but not jython. self.decompressobj = zlib.decompressobj(16 + zlib.MAX_WBITS) def decompress(self, value, max_length=None): """Decompress a chunk, returning newly-available data. Some data may be buffered for later processing; `flush` must be called when there is no more input data to ensure that all data was processed. If ``max_length`` is given, some input data may be left over in ``unconsumed_tail``; you must retrieve this value and pass it back to a future call to `decompress` if it is not empty. """ return self.decompressobj.decompress(value, max_length) @property def unconsumed_tail(self): """Returns the unconsumed portion left over """ return self.decompressobj.unconsumed_tail def flush(self): """Return any remaining buffered data not yet returned by decompress. Also checks for errors such as truncated input. No other methods may be called on this object after `flush`. """ return self.decompressobj.flush() def import_object(name): """Imports an object by name. import_object('x') is equivalent to 'import x'. import_object('x.y.z') is equivalent to 'from x.y import z'. >>> import tornado.escape >>> import_object('tornado.escape') is tornado.escape True >>> import_object('tornado.escape.utf8') is tornado.escape.utf8 True >>> import_object('tornado') is tornado True >>> import_object('tornado.missing_module') Traceback (most recent call last): ... ImportError: No module named missing_module """ if name.count('.') == 0: return __import__(name, None, None) parts = name.split('.') obj = __import__('.'.join(parts[:-1]), None, None, [parts[-1]], 0) try: return getattr(obj, parts[-1]) except AttributeError: raise ImportError("No module named %s" % parts[-1]) # Fake unicode literal support: Python 3.2 doesn't have the u'' marker for # literal strings, and alternative solutions like "from __future__ import # unicode_literals" have other problems (see PEP 414). u() can be applied # to ascii strings that include \u escapes (but they must not contain # literal non-ascii characters). if type('') is not type(b''): def u(s): return s unicode_type = str basestring_type = str else: def u(s): return s.decode('unicode_escape') unicode_type = unicode basestring_type = basestring # Deprecated alias that was used before we dropped py25 support. # Left here in case anyone outside Tornado is using it. bytes_type = bytes if sys.version_info > (3,): exec(""" def raise_exc_info(exc_info): raise exc_info[1].with_traceback(exc_info[2]) def exec_in(code, glob, loc=None): if isinstance(code, str): code = compile(code, '<string>', 'exec', dont_inherit=True) exec(code, glob, loc) """) else: exec(""" def raise_exc_info(exc_info): raise exc_info[0], exc_info[1], exc_info[2] def exec_in(code, glob, loc=None): if isinstance(code, basestring): # exec(string) inherits the caller's future imports; compile # the string first to prevent that. code = compile(code, '<string>', 'exec', dont_inherit=True) exec code in glob, loc """) def errno_from_exception(e): """Provides the errno from an Exception object. There are cases that the errno attribute was not set so we pull the errno out of the args but if someone instantiates an Exception without any args you will get a tuple error. So this function abstracts all that behavior to give you a safe way to get the errno. """ if hasattr(e, 'errno'): return e.errno elif e.args: return e.args[0] else: return None class Configurable(object): """Base class for configurable interfaces. A configurable interface is an (abstract) class whose constructor acts as a factory function for one of its implementation subclasses. The implementation subclass as well as optional keyword arguments to its initializer can be set globally at runtime with `configure`. By using the constructor as the factory method, the interface looks like a normal class, `isinstance` works as usual, etc. This pattern is most useful when the choice of implementation is likely to be a global decision (e.g. when `~select.epoll` is available, always use it instead of `~select.select`), or when a previously-monolithic class has been split into specialized subclasses. Configurable subclasses must define the class methods `configurable_base` and `configurable_default`, and use the instance method `initialize` instead of ``__init__``. """ __impl_class = None __impl_kwargs = None def __new__(cls, **kwargs): base = cls.configurable_base() args = {} if cls is base: impl = cls.configured_class() if base.__impl_kwargs: args.update(base.__impl_kwargs) else: impl = cls args.update(kwargs) instance = super(Configurable, cls).__new__(impl) # initialize vs __init__ chosen for compatibility with AsyncHTTPClient # singleton magic. If we get rid of that we can switch to __init__ # here too. instance.initialize(**args) return instance @classmethod def configurable_base(cls): """Returns the base class of a configurable hierarchy. This will normally return the class in which it is defined. (which is *not* necessarily the same as the cls classmethod parameter). """ raise NotImplementedError() @classmethod def configurable_default(cls): """Returns the implementation class to be used if none is configured.""" raise NotImplementedError() def initialize(self): """Initialize a `Configurable` subclass instance. Configurable classes should use `initialize` instead of ``__init__``. """ @classmethod def configure(cls, impl, **kwargs): """Sets the class to use when the base class is instantiated. Keyword arguments will be saved and added to the arguments passed to the constructor. This can be used to set global defaults for some parameters. """ base = cls.configurable_base() if isinstance(impl, (unicode_type, bytes)): impl = import_object(impl) if impl is not None and not issubclass(impl, cls): raise ValueError("Invalid subclass of %s" % cls) base.__impl_class = impl base.__impl_kwargs = kwargs @classmethod def configured_class(cls): """Returns the currently configured class.""" base = cls.configurable_base() if cls.__impl_class is None: base.__impl_class = cls.configurable_default() return base.__impl_class @classmethod def _save_configuration(cls): base = cls.configurable_base() return (base.__impl_class, base.__impl_kwargs) @classmethod def _restore_configuration(cls, saved): base = cls.configurable_base() base.__impl_class = saved[0] base.__impl_kwargs = saved[1] class ArgReplacer(object): """Replaces one value in an ``args, kwargs`` pair. Inspects the function signature to find an argument by name whether it is passed by position or keyword. For use in decorators and similar wrappers. """ def __init__(self, func, name): self.name = name try: self.arg_pos = inspect.getargspec(func).args.index(self.name) except ValueError: # Not a positional parameter self.arg_pos = None def get_old_value(self, args, kwargs, default=None): """Returns the old value of the named argument without replacing it. Returns ``default`` if the argument is not present. """ if self.arg_pos is not None and len(args) > self.arg_pos: return args[self.arg_pos] else: return kwargs.get(self.name, default) def replace(self, new_value, args, kwargs): """Replace the named argument in ``args, kwargs`` with ``new_value``. Returns ``(old_value, args, kwargs)``. The returned ``args`` and ``kwargs`` objects may not be the same as the input objects, or the input objects may be mutated. If the named argument was not found, ``new_value`` will be added to ``kwargs`` and None will be returned as ``old_value``. """ if self.arg_pos is not None and len(args) > self.arg_pos: # The arg to replace is passed positionally old_value = args[self.arg_pos] args = list(args) # *args is normally a tuple args[self.arg_pos] = new_value else: # The arg to replace is either omitted or passed by keyword. old_value = kwargs.get(self.name) kwargs[self.name] = new_value return old_value, args, kwargs def timedelta_to_seconds(td): """Equivalent to td.total_seconds() (introduced in python 2.7).""" return (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10 ** 6) / float(10 ** 6) def _websocket_mask_python(mask, data): """Websocket masking function. `mask` is a `bytes` object of length 4; `data` is a `bytes` object of any length. Returns a `bytes` object of the same length as `data` with the mask applied as specified in section 5.3 of RFC 6455. This pure-python implementation may be replaced by an optimized version when available. """ mask = array.array("B", mask) unmasked = array.array("B", data) for i in xrange(len(data)): unmasked[i] = unmasked[i] ^ mask[i % 4] if hasattr(unmasked, 'tobytes'): # tostring was deprecated in py32. It hasn't been removed, # but since we turn on deprecation warnings in our tests # we need to use the right one. return unmasked.tobytes() else: return unmasked.tostring() if (os.environ.get('TORNADO_NO_EXTENSION') or os.environ.get('TORNADO_EXTENSION') == '0'): # These environment variables exist to make it easier to do performance # comparisons; they are not guaranteed to remain supported in the future. _websocket_mask = _websocket_mask_python else: try: from tornado.speedups import websocket_mask as _websocket_mask except ImportError: if os.environ.get('TORNADO_EXTENSION') == '1': raise _websocket_mask = _websocket_mask_python def doctests(): import doctest return doctest.DocTestSuite()
gpl-3.0
nrwahl2/ansible
lib/ansible/modules/cloud/digital_ocean/digital_ocean_sshkey.py
10
8100
#!/usr/bin/python # -*- coding: utf-8 -*- # Copyright: Ansible Project # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: digital_ocean_sshkey short_description: Manage DigitalOcean SSH keys description: - Create/delete DigitalOcean SSH keys. version_added: "2.4" author: "Patrick Marques (@pmarques)" options: state: description: - Indicate desired state of the target. default: present choices: ['present', 'absent'] fingerprint: description: - This is a unique identifier for the SSH key used to delete a key required: false default: None version_added: 2.4 name: description: - The name for the SSH key required: false default: None ssh_pub_key: description: - The Public SSH key to add. required: false default: None oauth_token: description: - DigitalOcean OAuth token. required: true version_added: 2.4 notes: - Version 2 of DigitalOcean API is used. requirements: - "python >= 2.6" ''' EXAMPLES = ''' - name: "Create ssh key" digital_ocean_sshkey: name: "My SSH Public Key" public_key: "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAQQDDHr/jh2Jy4yALcK4JyWbVkPRaWmhck3IgCoeOO3z1e2dBowLh64QAM+Qb72pxekALga2oi4GvT+TlWNhzPH4V example" register: result - name: "Delete ssh key" digital_ocean_sshkey: state: "absent" fingerprint: "3b:16:bf:e4:8b:00:8b:b8:59:8c:a9:d3:f0:19:45:fa" ''' RETURN = ''' # Digital Ocean API info https://developers.digitalocean.com/documentation/v2/#list-all-keys data: description: This is only present when C(state=present) returned: when C(state=present) type: dict sample: { "ssh_key": { "id": 512189, "fingerprint": "3b:16:bf:e4:8b:00:8b:b8:59:8c:a9:d3:f0:19:45:fa", "name": "My SSH Public Key", "public_key": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAQQDDHr/jh2Jy4yALcK4JyWbVkPRaWmhck3IgCoeOO3z1e2dBowLh64QAM+Qb72pxekALga2oi4GvT+TlWNhzPH4V example" } } ''' import json import os import hashlib import base64 from ansible.module_utils.basic import env_fallback from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.urls import fetch_url class Response(object): def __init__(self, resp, info): self.body = None if resp: self.body = resp.read() self.info = info @property def json(self): if not self.body: if "body" in self.info: return json.loads(self.info["body"]) return None try: return json.loads(self.body) except ValueError: return None @property def status_code(self): return self.info["status"] class Rest(object): def __init__(self, module, headers): self.module = module self.headers = headers self.baseurl = 'https://api.digitalocean.com/v2' def _url_builder(self, path): if path[0] == '/': path = path[1:] return '%s/%s' % (self.baseurl, path) def send(self, method, path, data=None, headers=None): url = self._url_builder(path) data = self.module.jsonify(data) timeout = self.module.params['timeout'] resp, info = fetch_url(self.module, url, data=data, headers=self.headers, method=method, timeout=timeout) # Exceptions in fetch_url may result in a status -1, the ensures a if info['status'] == -1: self.module.fail_json(msg=info['msg']) return Response(resp, info) def get(self, path, data=None, headers=None): return self.send('GET', path, data, headers) def put(self, path, data=None, headers=None): return self.send('PUT', path, data, headers) def post(self, path, data=None, headers=None): return self.send('POST', path, data, headers) def delete(self, path, data=None, headers=None): return self.send('DELETE', path, data, headers) def core(module): api_token = module.params['oauth_token'] state = module.params['state'] fingerprint = module.params['fingerprint'] name = module.params['name'] ssh_pub_key = module.params['ssh_pub_key'] rest = Rest(module, {'Authorization': 'Bearer {0}'.format(api_token), 'Content-type': 'application/json'}) fingerprint = fingerprint or ssh_key_fingerprint(ssh_pub_key) response = rest.get('account/keys/{0}'.format(fingerprint)) status_code = response.status_code json = response.json if status_code not in (200, 404): module.fail_json(msg='Error getting ssh key [{0}: {1}]'.format( status_code, response.json['message']), fingerprint=fingerprint) if state in ('present'): if status_code == 404: # IF key not found create it! if module.check_mode: module.exit_json(changed=True) payload = { 'name': name, 'public_key': ssh_pub_key } response = rest.post('account/keys', data=payload) status_code = response.status_code json = response.json if status_code == 201: module.exit_json(changed=True, data=json) module.fail_json(msg='Error creating ssh key [{0}: {1}]'.format( status_code, response.json['message'])) elif status_code == 200: # If key found was found, check if name needs to be updated if name is None or json['ssh_key']['name'] == name: module.exit_json(changed=False, data=json) if module.check_mode: module.exit_json(changed=True) payload = { 'name': name, } response = rest.put('account/keys/{0}'.format(fingerprint), data=payload) status_code = response.status_code json = response.json if status_code == 200: module.exit_json(changed=True, data=json) module.fail_json(msg='Error updating ssh key name [{0}: {1}]'.format( status_code, response.json['message']), fingerprint=fingerprint) elif state in ('absent'): if status_code == 404: module.exit_json(changed=False) if module.check_mode: module.exit_json(changed=True) response = rest.delete('account/keys/{0}'.format(fingerprint)) status_code = response.status_code json = response.json if status_code == 204: module.exit_json(changed=True) module.fail_json(msg='Error creating ssh key [{0}: {1}]'.format( status_code, response.json['message'])) def ssh_key_fingerprint(ssh_pub_key): key = ssh_pub_key.split(None, 2)[1] fingerprint = hashlib.md5(base64.decodestring(key)).hexdigest() return ':'.join(a + b for a, b in zip(fingerprint[::2], fingerprint[1::2])) def main(): module = AnsibleModule( argument_spec=dict( state=dict(choices=['present', 'absent'], default='present'), fingerprint=dict(aliases=['id'], required=False), name=dict(required=False), ssh_pub_key=dict(required=False), oauth_token=dict( no_log=True, # Support environment variable for DigitalOcean OAuth Token fallback=(env_fallback, ['DO_API_TOKEN', 'DO_API_KEY', 'DO_OAUTH_TOKEN']), required=True, ), validate_certs=dict(type='bool', default=True), timeout=dict(type='int', default=30), ), required_one_of=( ('fingerprint', 'ssh_pub_key'), ), supports_check_mode=True, ) core(module) if __name__ == '__main__': main()
gpl-3.0
ravippandey/xen-api
scripts/link-vms-by-sr.py
10
2795
#!/usr/bin/python # Populate a directory of symlinks partitioning VMs by SR # (c) Anil Madhavapeddy, Citrix Systems Inc, 2008 import atexit import XenAPI import os, sys import getopt def logout(): try: session.xenapi.session.logout() except: pass atexit.register(logout) def usage(): print >> sys.stderr, "%s [-d <directory>]" % sys.argv[0] sys.exit(1) def main(argv): session = XenAPI.xapi_local() session.xenapi.login_with_password("", "") try: opts, args = getopt.getopt(sys.argv[1:], "hd:", []) except getopt.GetoptError, err: print str(err) usage() dir = None for o,a in opts: if o == "-d": dir = a if dir == None: usage() vms = session.xenapi.VM.get_all_records() vbds = session.xenapi.VBD.get_all_records() vdis = session.xenapi.VDI.get_all_records() srs = session.xenapi.SR.get_all_records() vms_in_sr = {} for vm in vms: vmrec = vms[vm] # Ignore built-in templates if vmrec['other_config'].has_key('default_template'): if vmrec['other_config']['default_template'] == 'true': continue # Ignore dom0 if vmrec['is_control_domain']: continue # Ignore snapshots if vmrec['is_a_snapshot']: continue # for each VM, figure out the set of SRs it uses for vbd in vmrec['VBDs']: if not vbds.has_key(vbd): continue vdi = vbds[vbd]['VDI'] # Ignore VBDs with no VDI such as an empty CD VBD if vdi == '': continue if not vdis.has_key(vdi): continue sr = vdis[vdi]['SR'] if not srs.has_key(sr): continue sruuid = srs[sr]['uuid'] vmuuid = vmrec['uuid'] if not vms_in_sr.has_key(sruuid): vms_in_sr[sruuid] = {} vms_in_sr[sruuid][vmuuid] = 1 for sruuid in vms_in_sr.keys(): linkdir = "%s/by-sr/%s" % (dir, sruuid) if os.path.isdir(linkdir): print >> sys.stderr, "Directory %s already exists, skipping" % linkdir continue try: os.makedirs(linkdir) except: print >> sys.stderr, "Failed to create directory: %s" % linkdir for vmuuid in vms_in_sr[sruuid].keys(): try: src = "../../all/%s.vmmeta" % vmuuid targ = "%s/%s.vmmeta" % (linkdir, vmuuid) os.symlink(src, targ) except: print >> sys.stderr, "Failed to create symlink: %s -> %s" % (src, targ) session.xenapi.logout() if __name__ == "__main__": main(sys.argv[1:])
lgpl-2.1
apdjustino/DRCOG_Urbansim
src/drcog/models/census_hedonic.py
1
9924
import numpy as np, pandas as pd, os from synthicity.utils import misc from drcog.models import dataset np.random.seed(1) import statsmodels.api as sm import pysal import matplotlib.pyplot as plt import statsmodels ## This code estimate a hedonic model for residential units. It is using data from the census for household characteristics (income), prices, average year of move... and ## buildings data aggregated at the zonal level. The model regresses the ratio of price over income (at the zonal level) on zonal characteristics. The ratio price/income allows to correct for some of the ## heteroskedasticity, which may generate biases when taking the log. On the right hand side, income (later, age) is instrumented by the year of move, as different the year ## of move implies different credit and housing markets conditions at purchasing time. ## The code has two parts: ## ------ Part 1: Generate the data for extimation ## ------ Part 2: Regression and coefficient export ### PART 1: Construction of the table for estimation #### PART 1.a.: Buildings characteristics at the zonal level: def data_zone(dset,buildings, parcels,zones,establishments): b=buildings p=parcels if p.index.name != 'parcel_id': p = p.set_index(parcels['parcel_id']) z=zones e=establishments ### neighborhood (right now zone ) characteristics b['zone_id']=p.zone_id[b.parcel_id].values #e['zone_id'] = b.zone_id[e.building_id].values z['zone_id']=z.index pp=pd.DataFrame(p['zone_id'], index=p.index) pp['county_id']=p['county_id'] pp.drop_duplicates(['zone_id' ], inplace=True) pp=pp.reset_index(pp.zone_id) z=pd.merge(z, pp, left_on='zone_id', right_index=True) z['emp']=e.groupby('zone_id').employees.sum() z['job_within_30min'] = dset.compute_range(z['emp'],30) z['ln_job_within_30min']=np.log( z['job_within_30min']) z['non_residential_sqft_mean']=b.groupby('zone_id').non_residential_sqft.mean() z['ln_non_residential_sqft_mean']=np.log(z['non_residential_sqft_mean']) z['residential_sqft_mean']=b[np.in1d(b['building_type_id'], [2,3,20,24])].groupby('zone_id').bldg_sq_ft.mean() z['ln_residential_sqft_mean']=np.log(z['residential_sqft_mean']) #z['median_value']=b.groupby('zone_id').unit_price_residential.mean() del z['job_within_30min'] del z['non_residential_sqft_mean'] del z['residential_sqft_mean'] del z['emp'] z['median_year_built'] = b.groupby('zone_id').year_built.median().astype('int32') z['ln_median_year_built']=np.log(z['median_year_built']) z['median_yearbuilt_post_2000'] = (b.groupby('zone_id').year_built.median()>2000).astype('int32') z['median_yearbuilt_pre_1970'] = (b.groupby('zone_id').year_built.median()<1970).astype('int32') z['zone_contains_park'] = (p[p.lu_type_id==14].groupby('zone_id').size()>0).astype('int32') z['zonecentroid_x2']=np.log(z['zonecentroid_x'])**2 z['zonecentroid_y2']=np.log(z['zonecentroid_y'])**2 z['zonecentroid_xy']=np.log(z['zonecentroid_x'])*np.log(z['zonecentroid_y']) z['ln_zonecentroid_x']=np.log(z['zonecentroid_x']) z['ln_zonecentroid_y']=np.log(z['zonecentroid_y']) z['zonecentroid_x3']=np.log(z['zonecentroid_x'])**3 z['zonecentroid_y3']=np.log(z['zonecentroid_y'])**3 z['zonecentroid_x4']=np.log(z['zonecentroid_x'])**4 z['zonecentroid_y4']=np.log(z['zonecentroid_y'])**4 del z['median_year_built'] return z #### PART 1.b.: Household characteristics using census data def data_zone_census( zones): data_census=pd.read_csv(os.path.join(misc.data_dir(),'census_zone.csv')) #del data_census['median_value'] data=pd.merge(zones, data_census, on='zone_id', how='inner') #Income using census block group dat data['median_income']=data['median_income'].astype(float) data['ln_inc']=np.log(data['median_income']) # Asked price (census) #data['median_value']=data['median_value'].apply(float) data['ln_price']=np.log(data['median_value']) # Race composition data['all races']=data['White alone'].apply(float)+ data['Black or African American alone'].apply(float)\ + data['American Indian and Alaska Native alone'].apply(float)+ data['Asian alone'].apply(float)\ +data['Native Hawaiian and Other Pacific Islander alone'].apply(float)+ data['Some other race alone'].apply(float)\ +data['two races or more'].apply(float) data['percent_white']=np.log(data['White alone']/data['all races']) data['percent_black']=data['Black or African American alone']/data['all races'] data['percent_black2']=data['Black or African American alone']/data['all races']**2 data['ln_residential_sqft_mean2']=data['ln_residential_sqft_mean']**2 # Creating max and min income of neighbors ( can important have implications in terms of gentrification) geo=pd.DataFrame(data['zonecentroid_x']) geo['zonecentroid_y']=data['zonecentroid_y'] geo=np.array(geo) w=pysal.knnW(geo, k=10) n=len(geo) neigh_income_max=np.zeros(n) neigh_income_min=np.zeros(n) for i in range(0, n-1): arr=w.neighbors[i] zone=np.zeros(n) for j in arr: zone[j]=1 data['neigh']=zone neigh_income_max[i]=data[data['neigh']==1].median_income.max() neigh_income_min[i]=data[data['neigh']==1].median_income.min() data['ln_neigh_income_max']=np.log(neigh_income_max/data['median_income']) data['ln_neigh_income_min']=np.log(neigh_income_min/data['median_income']) data=data.set_index(data['zone_id']) return data ###PART 2: Estimation #### PART 2. a.: Instrument for income (highly endogenous to prices because of selection (mostly driven by financial constraint). #### School districts are used as spatial fixed effects. def instrument(depvar, indvar, data, instr, fixedeffect): # Make sure that there is no inf or nan in the data for varname in depvar + indvar + instr + fixedeffect: data=data[np.isfinite(data[varname])] # Generate dummies for categorical variables and remove one of them (to avoid multi-collinearity) inst=pd.get_dummies(data['Year_move'], prefix='YearMove') del inst['YearMove_2008.0'] x=pd.get_dummies(data['school_district_id'], prefix='sdis') del x['sdis_8'] # Fill the righ hand side with instruments collist=list(inst) for varname in collist : x[varname]=inst[varname] # Fill the righ hand side with exogenous variables for varname in indvar: x[varname]=data[varname] # Add a constant x['cons']=1 # Regression (here simply OLS, but something else could be used) mod=sm.OLS(data[depvar], x) result=mod.fit() print result.summary() # Store the predicted value (that will be used on the right hand side in the second stage) for varname in depvar: data[varname+'_iv']=result.predict() return data #### PART 2. b.: Second stage regression, replacing income by its predicted value from stage 1 def second_stage(depvar, indvar, data, instrumented, instr, indvariv, fixedeffect): # Instrumentation (first stage) data=instrument(instrumented, indvariv, data, instr, fixedeffect) # Make sure that there is no inf or nan in the RHS/LHS variables for varname in depvar + indvar + fixedeffect: data=data[np.isfinite(data[varname])] #data=data[data['median_value']<400000] # Generate dummies for categorical variables and remove one of them (to avoid multi-collinearity) x=pd.get_dummies(data['school_district_id'], prefix='sdis') del x['sdis_8'] # Fill the righ hand side with instruments for varname in indvar: x[varname]=data[varname] # Replace the instrumented variable by ita predictor from stage one for varname in instrumented: x[varname]=data[varname+'_iv'] # Add a constant x['const']=1 print x # Main Regression. GLM estimation using a Negative Binomial family (it seems to work better than other families) mod=sm.GLM(data[depvar], x, family=sm.families.Poisson()) result=mod.fit() # Return Coefficient collist=list(x.columns.values) dset.store_coeff("coeff_residential",result.params.values,result.params.index) coeff_store_path = os.path.join(misc.data_dir(),'coeffs_res.h5') coeff_store = pd.HDFStore(coeff_store_path) coeff_store['coeffs_res'] = dset.coeffs coeff_store.close() # Predicted Prices data['sim_price']=result.predict() print result.summary() return data """ from synthicity.utils import misc from drcog.models import dataset dset = dataset.DRCOGDataset(os.path.join(misc.data_dir(),'drcog.h5')) zones=data_zone(dset,dset.buildings, dset.parcels,dset.zones,dset.establishments) data=data_zone_census(zones) data['val_inc']=data['median_value'] ind_var=[ 'ln_job_within_30min','zonecentroid_x','zonecentroid_y','zonecentroid_x2','zonecentroid_y2', 'zonecentroid_xy','zonecentroid_x3','zonecentroid_y3','zonecentroid_x4','zonecentroid_y4', 'median_yearbuilt_post_2000','median_yearbuilt_pre_1970','ln_non_residential_sqft_mean','ln_residential_sqft_mean', 'ln_neigh_income_max', 'ln_neigh_income_min', 'percent_black'] ind_var0=[ 'ln_job_within_30min','zonecentroid_x','zonecentroid_y','zonecentroid_x2','zonecentroid_y2', 'zonecentroid_xy','zonecentroid_x3','zonecentroid_y3','zonecentroid_y3','zonecentroid_x4','zonecentroid_y4', 'median_yearbuilt_post_2000','median_yearbuilt_pre_1970','ln_non_residential_sqft_mean','ln_residential_sqft_mean', 'percent_black'] data=second_stage(['val_inc'], ind_var,data, ['ln_inc'], ['Year_move'],ind_var0 , ['school_district_id']) print data[data['median_value']>400000]['sim_price'].mean() print data[data['median_value']>400000]['median_value'].mean() plt.plot(data['sim_price'], data['median_value'], 'ro') plt.show() """
agpl-3.0
sidartaoliveira/ansible
lib/ansible/modules/cloud/univention/udm_group.py
69
5397
#!/usr/bin/python # -*- coding: UTF-8 -*- # Copyright (c) 2016, Adfinis SyGroup AG # Tobias Rueetschi <[email protected]> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # ANSIBLE_METADATA = {'metadata_version': '1.0', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: udm_group version_added: "2.2" author: "Tobias Rueetschi (@2-B)" short_description: Manage of the posix group description: - "This module allows to manage user groups on a univention corporate server (UCS). It uses the python API of the UCS to create a new object or edit it." requirements: - Python >= 2.6 options: state: required: false default: "present" choices: [ present, absent ] description: - Whether the group is present or not. name: required: true description: - Name of the posix group. description: required: false description: - Group description. position: required: false description: - define the whole ldap position of the group, e.g. C(cn=g123m-1A,cn=classes,cn=schueler,cn=groups,ou=schule,dc=example,dc=com). ou: required: false description: - LDAP OU, e.g. school for LDAP OU C(ou=school,dc=example,dc=com). subpath: required: false description: - Subpath inside the OU, e.g. C(cn=classes,cn=students,cn=groups). ''' EXAMPLES = ''' # Create a POSIX group - udm_group: name: g123m-1A # Create a POSIX group with the exact DN # C(cn=g123m-1A,cn=classes,cn=students,cn=groups,ou=school,dc=school,dc=example,dc=com) - udm_group: name: g123m-1A subpath: 'cn=classes,cn=students,cn=groups' ou: school # or - udm_group: name: g123m-1A position: 'cn=classes,cn=students,cn=groups,ou=school,dc=school,dc=example,dc=com' ''' RETURN = '''# ''' from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.univention_umc import ( umc_module_for_add, umc_module_for_edit, ldap_search, base_dn, ) def main(): module = AnsibleModule( argument_spec = dict( name = dict(required=True, type='str'), description = dict(default=None, type='str'), position = dict(default='', type='str'), ou = dict(default='', type='str'), subpath = dict(default='cn=groups', type='str'), state = dict(default='present', choices=['present', 'absent'], type='str') ), supports_check_mode=True ) name = module.params['name'] description = module.params['description'] position = module.params['position'] ou = module.params['ou'] subpath = module.params['subpath'] state = module.params['state'] changed = False groups = list(ldap_search( '(&(objectClass=posixGroup)(cn={}))'.format(name), attr=['cn'] )) if position != '': container = position else: if ou != '': ou = 'ou={},'.format(ou) if subpath != '': subpath = '{},'.format(subpath) container = '{}{}{}'.format(subpath, ou, base_dn()) group_dn = 'cn={},{}'.format(name, container) exists = bool(len(groups)) if state == 'present': try: if not exists: grp = umc_module_for_add('groups/group', container) else: grp = umc_module_for_edit('groups/group', group_dn) grp['name'] = name grp['description'] = description diff = grp.diff() changed = grp.diff() != [] if not module.check_mode: if not exists: grp.create() else: grp.modify() except: module.fail_json( msg="Creating/editing group {} in {} failed".format(name, container) ) if state == 'absent' and exists: try: grp = umc_module_for_edit('groups/group', group_dn) if not module.check_mode: grp.remove() changed = True except: module.fail_json( msg="Removing group {} failed".format(name) ) module.exit_json( changed=changed, name=name, diff=diff, container=container ) if __name__ == '__main__': main()
gpl-3.0
AlexHAHA/ardupilot
Tools/autotest/pysim/fg_display.py
229
1919
#!/usr/bin/env python import socket, struct, time, math, errno from pymavlink import fgFDM class udp_socket(object): '''a UDP socket''' def __init__(self, device, blocking=True, input=True): a = device.split(':') if len(a) != 2: print("UDP ports must be specified as host:port") sys.exit(1) self.port = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) if input: self.port.bind((a[0], int(a[1]))) self.destination_addr = None else: self.destination_addr = (a[0], int(a[1])) if not blocking: self.port.setblocking(0) self.last_address = None def recv(self,n=1000): try: data, self.last_address = self.port.recvfrom(n) except socket.error as e: if e.errno in [ errno.EAGAIN, errno.EWOULDBLOCK ]: return "" raise return data def write(self, buf): try: if self.destination_addr: self.port.sendto(buf, self.destination_addr) else: self.port.sendto(buf, self.last_addr) except socket.error: pass def ft2m(x): return x * 0.3048 def m2ft(x): return x / 0.3048 def kt2mps(x): return x * 0.514444444 def mps2kt(x): return x / 0.514444444 udp = udp_socket("127.0.0.1:5123") fgout = udp_socket("127.0.0.1:5124", input=False) tlast = time.time() count = 0 fg = fgFDM.fgFDM() while True: buf = udp.recv(1000) fg.parse(buf) fgout.write(fg.pack()) count += 1 if time.time() - tlast > 1.0: print("%u FPS len=%u" % (count, len(buf))) count = 0 tlast = time.time() print(fg.get('latitude', units='degrees'), fg.get('longitude', units='degrees'), fg.get('altitude', units='meters'), fg.get('vcas', units='mps'))
gpl-3.0
valkjsaaa/sl4a
python/src/Demo/tkinter/guido/dialog.py
47
3202
#! /usr/bin/env python # A Python function that generates dialog boxes with a text message, # optional bitmap, and any number of buttons. # Cf. Ousterhout, Tcl and the Tk Toolkit, Figs. 27.2-3, pp. 269-270. from Tkinter import * import sys def dialog(master, title, text, bitmap, default, *args): # 1. Create the top-level window and divide it into top # and bottom parts. w = Toplevel(master, class_='Dialog') w.title(title) w.iconname('Dialog') top = Frame(w, relief=RAISED, borderwidth=1) top.pack(side=TOP, fill=BOTH) bot = Frame(w, relief=RAISED, borderwidth=1) bot.pack(side=BOTTOM, fill=BOTH) # 2. Fill the top part with the bitmap and message. msg = Message(top, width='3i', text=text, font='-Adobe-Times-Medium-R-Normal-*-180-*') msg.pack(side=RIGHT, expand=1, fill=BOTH, padx='3m', pady='3m') if bitmap: bm = Label(top, bitmap=bitmap) bm.pack(side=LEFT, padx='3m', pady='3m') # 3. Create a row of buttons at the bottom of the dialog. var = IntVar() buttons = [] i = 0 for but in args: b = Button(bot, text=but, command=lambda v=var,i=i: v.set(i)) buttons.append(b) if i == default: bd = Frame(bot, relief=SUNKEN, borderwidth=1) bd.pack(side=LEFT, expand=1, padx='3m', pady='2m') b.lift() b.pack (in_=bd, side=LEFT, padx='2m', pady='2m', ipadx='2m', ipady='1m') else: b.pack (side=LEFT, expand=1, padx='3m', pady='3m', ipadx='2m', ipady='1m') i = i+1 # 4. Set up a binding for <Return>, if there's a default, # set a grab, and claim the focus too. if default >= 0: w.bind('<Return>', lambda e, b=buttons[default], v=var, i=default: (b.flash(), v.set(i))) oldFocus = w.focus_get() w.grab_set() w.focus_set() # 5. Wait for the user to respond, then restore the focus # and return the index of the selected button. w.waitvar(var) w.destroy() if oldFocus: oldFocus.focus_set() return var.get() # The rest is the test program. def go(): i = dialog(mainWidget, 'Not Responding', "The file server isn't responding right now; " "I'll keep trying.", '', -1, 'OK') print 'pressed button', i i = dialog(mainWidget, 'File Modified', 'File "tcl.h" has been modified since ' 'the last time it was saved. ' 'Do you want to save it before exiting the application?', 'warning', 0, 'Save File', 'Discard Changes', 'Return To Editor') print 'pressed button', i def test(): import sys global mainWidget mainWidget = Frame() Pack.config(mainWidget) start = Button(mainWidget, text='Press Here To Start', command=go) start.pack() endit = Button(mainWidget, text="Exit", command=sys.exit) endit.pack(fill=BOTH) mainWidget.mainloop() if __name__ == '__main__': test()
apache-2.0
wfxiang08/django185
django/contrib/gis/geos/prototypes/geom.py
103
4450
from ctypes import POINTER, c_char_p, c_int, c_size_t, c_ubyte from django.contrib.gis.geos.libgeos import CS_PTR, GEOM_PTR from django.contrib.gis.geos.prototypes.errcheck import ( check_geom, check_minus_one, check_sized_string, check_string, check_zero, ) from django.contrib.gis.geos.prototypes.threadsafe import GEOSFunc # This is the return type used by binary output (WKB, HEX) routines. c_uchar_p = POINTER(c_ubyte) # We create a simple subclass of c_char_p here because when the response # type is set to c_char_p, you get a _Python_ string and there's no way # to access the string's address inside the error checking function. # In other words, you can't free the memory allocated inside GEOS. Previously, # the return type would just be omitted and the integer address would be # used -- but this allows us to be specific in the function definition and # keeps the reference so it may be free'd. class geos_char_p(c_char_p): pass # ### ctypes generation functions ### def bin_constructor(func): "Generates a prototype for binary construction (HEX, WKB) GEOS routines." func.argtypes = [c_char_p, c_size_t] func.restype = GEOM_PTR func.errcheck = check_geom return func # HEX & WKB output def bin_output(func): "Generates a prototype for the routines that return a sized string." func.argtypes = [GEOM_PTR, POINTER(c_size_t)] func.errcheck = check_sized_string func.restype = c_uchar_p return func def geom_output(func, argtypes): "For GEOS routines that return a geometry." if argtypes: func.argtypes = argtypes func.restype = GEOM_PTR func.errcheck = check_geom return func def geom_index(func): "For GEOS routines that return geometries from an index." return geom_output(func, [GEOM_PTR, c_int]) def int_from_geom(func, zero=False): "Argument is a geometry, return type is an integer." func.argtypes = [GEOM_PTR] func.restype = c_int if zero: func.errcheck = check_zero else: func.errcheck = check_minus_one return func def string_from_geom(func): "Argument is a Geometry, return type is a string." func.argtypes = [GEOM_PTR] func.restype = geos_char_p func.errcheck = check_string return func # ### ctypes prototypes ### # Deprecated creation routines from WKB, HEX, WKT from_hex = bin_constructor(GEOSFunc('GEOSGeomFromHEX_buf')) from_wkb = bin_constructor(GEOSFunc('GEOSGeomFromWKB_buf')) from_wkt = geom_output(GEOSFunc('GEOSGeomFromWKT'), [c_char_p]) # Deprecated output routines to_hex = bin_output(GEOSFunc('GEOSGeomToHEX_buf')) to_wkb = bin_output(GEOSFunc('GEOSGeomToWKB_buf')) to_wkt = string_from_geom(GEOSFunc('GEOSGeomToWKT')) # The GEOS geometry type, typeid, num_coordites and number of geometries geos_normalize = int_from_geom(GEOSFunc('GEOSNormalize')) geos_type = string_from_geom(GEOSFunc('GEOSGeomType')) geos_typeid = int_from_geom(GEOSFunc('GEOSGeomTypeId')) get_dims = int_from_geom(GEOSFunc('GEOSGeom_getDimensions'), zero=True) get_num_coords = int_from_geom(GEOSFunc('GEOSGetNumCoordinates')) get_num_geoms = int_from_geom(GEOSFunc('GEOSGetNumGeometries')) # Geometry creation factories create_point = geom_output(GEOSFunc('GEOSGeom_createPoint'), [CS_PTR]) create_linestring = geom_output(GEOSFunc('GEOSGeom_createLineString'), [CS_PTR]) create_linearring = geom_output(GEOSFunc('GEOSGeom_createLinearRing'), [CS_PTR]) # Polygon and collection creation routines are special and will not # have their argument types defined. create_polygon = geom_output(GEOSFunc('GEOSGeom_createPolygon'), None) create_collection = geom_output(GEOSFunc('GEOSGeom_createCollection'), None) # Ring routines get_extring = geom_output(GEOSFunc('GEOSGetExteriorRing'), [GEOM_PTR]) get_intring = geom_index(GEOSFunc('GEOSGetInteriorRingN')) get_nrings = int_from_geom(GEOSFunc('GEOSGetNumInteriorRings')) # Collection Routines get_geomn = geom_index(GEOSFunc('GEOSGetGeometryN')) # Cloning geom_clone = GEOSFunc('GEOSGeom_clone') geom_clone.argtypes = [GEOM_PTR] geom_clone.restype = GEOM_PTR # Destruction routine. destroy_geom = GEOSFunc('GEOSGeom_destroy') destroy_geom.argtypes = [GEOM_PTR] destroy_geom.restype = None # SRID routines geos_get_srid = GEOSFunc('GEOSGetSRID') geos_get_srid.argtypes = [GEOM_PTR] geos_get_srid.restype = c_int geos_set_srid = GEOSFunc('GEOSSetSRID') geos_set_srid.argtypes = [GEOM_PTR, c_int] geos_set_srid.restype = None
bsd-3-clause
mlperf/training_results_v0.5
v0.5.0/google/cloud_v2.8/gnmt-tpuv2-8/code/gnmt/model/t2t/tensor2tensor/data_generators/vqa_utils.py
3
8144
# coding=utf-8 # Copyright 2018 The Tensor2Tensor Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Utilities for VQA data sets.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import tensorflow as tf from tensorflow.python.ops import control_flow_ops # some functions are copied and modified from # vgg_preprocessing and inception_preprocessing in # models/research/slim/preprocessing/ _R_MEAN = 123.68 _G_MEAN = 116.78 _B_MEAN = 103.94 def _smallest_size_at_least(height, width, smallest_side): """Computes new shape with the smallest side equal to `smallest_side`. Computes new shape with the smallest side equal to `smallest_side` while preserving the original aspect ratio. Args: height: an int32 scalar tensor indicating the current height. width: an int32 scalar tensor indicating the current width. smallest_side: A python integer or scalar `Tensor` indicating the size of the smallest side after resize. Returns: new_height: an int32 scalar tensor indicating the new height. new_width: and int32 scalar tensor indicating the new width. """ smallest_side = tf.convert_to_tensor(smallest_side, dtype=tf.int32) height = tf.to_float(height) width = tf.to_float(width) smallest_side = tf.to_float(smallest_side) scale = tf.cond( tf.greater(height, width), lambda: smallest_side / width, lambda: smallest_side / height) new_height = tf.to_int32(height * scale) new_width = tf.to_int32(width * scale) return new_height, new_width def _aspect_preserving_resize(image, smallest_side): """Resize images preserving the original aspect ratio. Args: image: A 3-D image `Tensor`. smallest_side: A python integer or scalar `Tensor` indicating the size of the smallest side after resize. Returns: resized_image: A 3-D tensor containing the resized image. """ smallest_side = tf.convert_to_tensor(smallest_side, dtype=tf.int32) shape = tf.shape(image) height = shape[0] width = shape[1] new_height, new_width = _smallest_size_at_least(height, width, smallest_side) image = tf.expand_dims(image, 0) resized_image = tf.image.resize_images( image, size=[new_height, new_width], method=tf.image.ResizeMethod.BICUBIC) resized_image = tf.squeeze(resized_image) resized_image.set_shape([None, None, 3]) return resized_image def _flip(image): """Random horizontal image flip.""" image = tf.image.random_flip_left_right(image) return image def _distort_color(image, color_ordering=0, scope=None): """Distort the color of a Tensor image. Each color distortion is non-commutative and thus ordering of the color ops matters. Ideally we would randomly permute the ordering of the color ops. Rather then adding that level of complication, we select a distinct ordering of color ops for each preprocessing thread. Args: image: 3-D Tensor containing single image in [0, 1]. color_ordering: Python int, a type of distortion (valid values: 0-3). scope: Optional scope for name_scope. Returns: 3-D Tensor color-distorted image on range [0, 1] Raises: ValueError: if color_ordering not in [0, 3] """ with tf.name_scope(scope, "distort_color", [image]): if color_ordering == 0: image = tf.image.random_brightness(image, max_delta=32. / 255.) image = tf.image.random_saturation(image, lower=0.5, upper=1.5) image = tf.image.random_hue(image, max_delta=0.2) image = tf.image.random_contrast(image, lower=0.5, upper=1.5) elif color_ordering == 1: image = tf.image.random_saturation(image, lower=0.5, upper=1.5) image = tf.image.random_brightness(image, max_delta=32. / 255.) image = tf.image.random_contrast(image, lower=0.5, upper=1.5) image = tf.image.random_hue(image, max_delta=0.2) elif color_ordering == 2: image = tf.image.random_contrast(image, lower=0.5, upper=1.5) image = tf.image.random_hue(image, max_delta=0.2) image = tf.image.random_brightness(image, max_delta=32. / 255.) image = tf.image.random_saturation(image, lower=0.5, upper=1.5) elif color_ordering == 3: image = tf.image.random_hue(image, max_delta=0.2) image = tf.image.random_saturation(image, lower=0.5, upper=1.5) image = tf.image.random_contrast(image, lower=0.5, upper=1.5) image = tf.image.random_brightness(image, max_delta=32. / 255.) else: raise ValueError("color_ordering must be in [0, 3]") # The random_* ops do not necessarily clamp. return tf.clip_by_value(image, 0.0, 1.0) def _apply_with_random_selector(x, func, num_cases): """Computes func(x, sel), with sel sampled from [0...num_cases-1]. Args: x: input Tensor. func: Python function to apply. num_cases: Python int32, number of cases to sample sel from. Returns: The result of func(x, sel), where func receives the value of the selector as a python integer, but sel is sampled dynamically. """ sel = tf.random_uniform([], maxval=num_cases, dtype=tf.int32) # Pass the real x only to one of the func calls. return control_flow_ops.merge([ func(control_flow_ops.switch(x, tf.equal(sel, case))[1], case) for case in range(num_cases) ])[0] def _mean_image_subtraction(image, means): """Subtracts the given means from each image channel. For example: means = [123.68, 116.779, 103.939] image = _mean_image_subtraction(image, means) Note that the rank of `image` must be known. Args: image: a tensor of size [height, width, C]. means: a C-vector of values to subtract from each channel. Returns: the centered image. Raises: ValueError: If the rank of `image` is unknown, if `image` has a rank other than three or if the number of channels in `image` doesn't match the number of values in `means`. """ if image.get_shape().ndims != 3: raise ValueError("Input must be of size [height, width, C>0]") num_channels = image.get_shape().as_list()[-1] if len(means) != num_channels: raise ValueError("len(means) must match the number of channels") channels = tf.split(axis=2, num_or_size_splits=num_channels, value=image) for i in range(num_channels): channels[i] -= means[i] return tf.concat(axis=2, values=channels) def vqa_v2_preprocess_image( image, height, width, mode, resize_side=512, distort=True, image_model_fn="resnet_v1_152", ): """vqa v2 preprocess image.""" image = tf.image.convert_image_dtype(image, dtype=tf.float32) assert resize_side > 0 if resize_side: image = _aspect_preserving_resize(image, resize_side) if mode == tf.estimator.ModeKeys.TRAIN: image = tf.random_crop(image, [height, width, 3]) else: # Central crop, assuming resize_height > height, resize_width > width. image = tf.image.resize_image_with_crop_or_pad(image, height, width) image = tf.clip_by_value(image, 0.0, 1.0) if mode == tf.estimator.ModeKeys.TRAIN and distort: image = _flip(image) num_distort_cases = 4 # pylint: disable=unnecessary-lambda image = _apply_with_random_selector( image, lambda x, ordering: _distort_color(x, ordering), num_cases=num_distort_cases) if image_model_fn.startswith("resnet_v1"): # resnet_v1 uses vgg preprocessing image = image * 255. image = _mean_image_subtraction(image, [_R_MEAN, _G_MEAN, _B_MEAN]) elif image_model_fn.startswith("resnet_v2"): # resnet v2 uses inception preprocessing image = tf.subtract(image, 0.5) image = tf.multiply(image, 2.0) return image
apache-2.0
seancug/python-example
fatiando-0.2/cookbook/seismic_srtomo_smooth.py
1
2303
""" Seismic: 2D straight-ray tomography using smoothness regularization Uses synthetic data and a model generated from an image file. """ import urllib from os import path import numpy from fatiando.mesher import SquareMesh from fatiando.seismic import ttime2d, srtomo from fatiando.inversion.regularization import Smoothness2D from fatiando.vis import mpl from fatiando import utils area = (0, 500000, 0, 500000) shape = (30, 30) model = SquareMesh(area, shape) # Fetch the image from the online docs urllib.urlretrieve( 'http://fatiando.readthedocs.org/en/latest/_static/logo.png', 'logo.png') model.img2prop('logo.png', 4000, 10000, 'vp') # Make some travel time data and add noise seed = 0 # Set the random seed so that points are the same everythime src_loc = utils.random_points(area, 80, seed=seed) rec_loc = utils.circular_points(area, 30, random=True, seed=seed) srcs, recs = utils.connect_points(src_loc, rec_loc) tts = ttime2d.straight(model, 'vp', srcs, recs) tts, error = utils.contaminate(tts, 0.01, percent=True, return_stddev=True) # Make the mesh mesh = SquareMesh(area, shape) # and run the inversion tomo = srtomo.SRTomo(tts, srcs, recs, mesh) + 10**6*Smoothness2D(mesh.shape) estimate = tomo.fit().estimate_ residuals = tomo.residuals() mesh.addprop('vp', estimate) # Calculate and print the standard deviation of the residuals # it should be close to the data error if the inversion was able to fit the data print "Assumed error: %g" % (error) print "Standard deviation of residuals: %g" % (numpy.std(residuals)) mpl.figure(figsize=(14, 5)) mpl.subplot(1, 2, 1) mpl.axis('scaled') mpl.title('Vp synthetic model of the Earth') mpl.squaremesh(model, prop='vp', cmap=mpl.cm.seismic) cb = mpl.colorbar() cb.set_label('Velocity') mpl.points(src_loc, '*y', label="Sources") mpl.points(rec_loc, '^r', label="Receivers") mpl.legend(loc='lower left', shadow=True, numpoints=1, prop={'size':10}) mpl.m2km() mpl.subplot(1, 2, 2) mpl.axis('scaled') mpl.title('Tomography result (smoothed)') mpl.squaremesh(mesh, prop='vp', vmin=4000, vmax=10000, cmap=mpl.cm.seismic) cb = mpl.colorbar() cb.set_label('Velocity') mpl.m2km() mpl.figure() mpl.grid() mpl.title('Residuals (data with %.4f s error)' % (error)) mpl.hist(residuals, color='gray', bins=10) mpl.xlabel("seconds") mpl.show()
gpl-2.0
alander/StarCluster
starcluster/cli.py
16
13164
# Copyright 2009-2014 Justin Riley # # This file is part of StarCluster. # # StarCluster is free software: you can redistribute it and/or modify it under # the terms of the GNU Lesser General Public License as published by the Free # Software Foundation, either version 3 of the License, or (at your option) any # later version. # # StarCluster is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more # details. # # You should have received a copy of the GNU Lesser General Public License # along with StarCluster. If not, see <http://www.gnu.org/licenses/>. """ StarCluster Command Line Interface: starcluster [global-opts] action [action-opts] [<action-args> ...] """ import os import sys import shlex import socket import optparse import platform from boto.exception import BotoServerError, EC2ResponseError, S3ResponseError from starcluster import config from starcluster import static from starcluster import logger from starcluster import commands from starcluster import exception from starcluster import completion from starcluster.logger import log, console from starcluster import __version__ __description__ = """ StarCluster - (http://star.mit.edu/cluster) (v. %s) Software Tools for Academics and Researchers (STAR) Please submit bug reports to [email protected] """ % __version__ class StarClusterCLI(object): """ StarCluster Command Line Interface """ def __init__(self): self._gparser = None self.subcmds_map = {} @property def gparser(self): if not self._gparser: self._gparser = self.create_global_parser() return self._gparser def print_header(self): print >> sys.stderr, __description__.replace('\n', '', 1) def parse_subcommands(self, gparser=None): """ Parse global arguments, find subcommand from list of subcommand objects, parse local subcommand arguments and return a tuple of global options, selected command object, command options, and command arguments. Call execute() on the command object to run. The command object has members 'gopts' and 'opts' set for global and command options respectively, you don't need to call execute with those but you could if you wanted to. """ gparser = gparser or self.gparser # parse global options. gopts, args = gparser.parse_args() if not args: gparser.print_help() raise SystemExit("\nError: you must specify an action.") # set debug level if specified if gopts.DEBUG: console.setLevel(logger.DEBUG) config.DEBUG_CONFIG = True # load StarClusterConfig into global options try: cfg = config.StarClusterConfig(gopts.CONFIG) cfg.load() except exception.ConfigNotFound, e: log.error(e.msg) e.display_options() sys.exit(1) except exception.ConfigError, e: log.error(e.msg) sys.exit(1) gopts.CONFIG = cfg # Parse command arguments and invoke command. subcmdname, subargs = args[0], args[1:] try: sc = self.subcmds_map[subcmdname] lparser = optparse.OptionParser(sc.__doc__.strip()) sc.gopts = gopts sc.parser = lparser sc.gparser = gparser sc.subcmds_map = self.subcmds_map sc.addopts(lparser) sc.opts, subsubargs = lparser.parse_args(subargs) except KeyError: raise SystemExit("Error: invalid command '%s'" % subcmdname) return gopts, sc, sc.opts, subsubargs def create_global_parser(self, subcmds=None, no_usage=False, add_help=True): if no_usage: gparser = optparse.OptionParser(usage=optparse.SUPPRESS_USAGE, add_help_option=add_help) else: gparser = optparse.OptionParser(__doc__.strip(), version=__version__, add_help_option=add_help) # Build map of name -> command and docstring. cmds_header = 'Available Commands:' gparser.usage += '\n\n%s\n' % cmds_header gparser.usage += '%s\n' % ('-' * len(cmds_header)) gparser.usage += "NOTE: Pass --help to any command for a list of " gparser.usage += 'its options and detailed usage information\n\n' subcmds = subcmds or commands.all_cmds for sc in subcmds: helptxt = sc.__doc__.splitlines()[3].strip() gparser.usage += '- %s: %s\n' % (', '.join(sc.names), helptxt) for n in sc.names: assert n not in self.subcmds_map self.subcmds_map[n] = sc gparser.add_option("-d", "--debug", dest="DEBUG", action="store_true", default=False, help="print debug messages (useful for " "diagnosing problems)") gparser.add_option("-c", "--config", dest="CONFIG", action="store", metavar="FILE", help="use alternate config file (default: %s)" % static.STARCLUSTER_CFG_FILE) gparser.add_option("-r", "--region", dest="REGION", action="store", help="specify a region to use (default: us-east-1)") gparser.disable_interspersed_args() return gparser def __write_module_version(self, modname, fp): """ Write module version information to a file """ try: mod = __import__(modname) fp.write("%s: %s\n" % (mod.__name__, mod.__version__)) except Exception, e: print "error getting version for '%s' module: %s" % (modname, e) def bug_found(self): """ Builds a crash-report when StarCluster encounters an unhandled exception. Report includes system info, python version, dependency versions, and a full debug log and stack-trace of the crash. """ dashes = '-' * 10 header = dashes + ' %s ' + dashes + '\n' crashfile = open(static.CRASH_FILE, 'w') argv = sys.argv[:] argv[0] = os.path.basename(argv[0]) argv = ' '.join(argv) crashfile.write(header % "SYSTEM INFO") crashfile.write("StarCluster: %s\n" % __version__) crashfile.write("Python: %s\n" % sys.version.replace('\n', ' ')) crashfile.write("Platform: %s\n" % platform.platform()) dependencies = ['boto', 'paramiko', 'Crypto'] for dep in dependencies: self.__write_module_version(dep, crashfile) crashfile.write("\n" + header % "CRASH DETAILS") crashfile.write('Command: %s\n\n' % argv) for line in logger.get_session_log(): crashfile.write(line) crashfile.close() print log.error("Oops! Looks like you've found a bug in StarCluster") log.error("Crash report written to: %s" % static.CRASH_FILE) log.error("Please remove any sensitive data from the crash report") log.error("and submit it to [email protected]") sys.exit(1) def get_global_opts(self): """ Parse and return global options. This method will silently return None if any errors are encountered during parsing. """ gparser = self.create_global_parser(no_usage=True, add_help=False) try: sys.stdout = open(os.devnull, 'w') sys.stderr = open(os.devnull, 'w') gopts, _ = gparser.parse_args() return gopts except SystemExit: pass finally: sys.stdout = sys.__stdout__ sys.stderr = sys.__stderr__ def is_completion_active(self): return 'OPTPARSE_AUTO_COMPLETE' in os.environ def _init_completion(self): """ Restore original sys.argv from COMP_LINE in the case that starcluster is being called by Bash/ZSH for completion options. Bash/ZSH will simply call 'starcluster' with COMP_LINE environment variable set to the current (partial) argv for completion. StarCluster's Bash/ZSH completion code needs to read the global config option in case an alternate config is specified at the command line when completing options. StarCluster's completion code uses the config to generate completion options. Setting sys.argv to $COMP_LINE in this case allows the global option parser to be used to extract the global -c option (if specified) and load the proper config in the completion code. """ if 'COMP_LINE' in os.environ: newargv = shlex.split(os.environ.get('COMP_LINE')) for i, arg in enumerate(newargv): arg = os.path.expanduser(arg) newargv[i] = os.path.expandvars(arg) sys.argv = newargv def handle_completion(self): if self.is_completion_active(): gparser = self.create_global_parser(no_usage=True, add_help=False) # set sys.path to COMP_LINE if it exists self._init_completion() # fetch the global options gopts = self.get_global_opts() # try to load StarClusterConfig into global options if gopts: try: cfg = config.StarClusterConfig(gopts.CONFIG) cfg.load() except exception.ConfigError: cfg = None gopts.CONFIG = cfg scmap = {} for sc in commands.all_cmds: sc.gopts = gopts for n in sc.names: scmap[n] = sc listcter = completion.ListCompleter(scmap.keys()) subcter = completion.NoneCompleter() completion.autocomplete(gparser, listcter, None, subcter, subcommands=scmap) sys.exit(1) def main(self): """ StarCluster main """ # Handle Bash/ZSH completion if necessary self.handle_completion() # Show StarCluster header self.print_header() # Parse subcommand options and args gopts, sc, opts, args = self.parse_subcommands() if args and args[0] == 'help': # make 'help' subcommand act like --help option sc.parser.print_help() sys.exit(0) # run the subcommand and handle exceptions try: sc.execute(args) except (EC2ResponseError, S3ResponseError, BotoServerError), e: log.error("%s: %s" % (e.error_code, e.error_message), exc_info=True) sys.exit(1) except socket.error, e: log.exception("Connection error:") log.error("Check your internet connection?") sys.exit(1) except exception.ThreadPoolException, e: log.error(e.format_excs()) self.bug_found() except exception.ClusterDoesNotExist, e: cm = gopts.CONFIG.get_cluster_manager() cls = '' try: cls = cm.get_clusters(load_plugins=False, load_receipt=False) except: log.debug("Error fetching cluster list", exc_info=True) log.error(e.msg) if cls: taglist = ', '.join([c.cluster_tag for c in cls]) active_clusters = "(active clusters: %s)" % taglist log.error(active_clusters) sys.exit(1) except exception.BaseException, e: log.error(e.msg, extra={'__textwrap__': True}) log.debug(e.msg, exc_info=True) sys.exit(1) except SystemExit: # re-raise SystemExit to avoid the bug-catcher below raise except Exception: log.error("Unhandled exception occured", exc_info=True) self.bug_found() def warn_debug_file_moved(): old_file = os.path.join(static.TMP_DIR, 'starcluster-debug-%s.log' % static.CURRENT_USER) if os.path.exists(old_file): stars = '*' * 50 log.warn(stars) log.warn("The default log file location is now:") log.warn("") log.warn(static.DEBUG_FILE) log.warn("") log.warn("Please delete or move the old log file located at:") log.warn("") log.warn(old_file) log.warn(stars) def main(): try: static.create_sc_config_dirs() logger.configure_sc_logging() warn_debug_file_moved() StarClusterCLI().main() except KeyboardInterrupt: print "Interrupted, exiting." sys.exit(1) if __name__ == '__main__': main()
gpl-3.0
0x7F800000/gcc
gcc/ada/doc/share/conf.py
69
3588
# -*- coding: utf-8 -*- # # GNAT build configuration file import sys import os import time import re sys.path.append('.') import ada_pygments import latex_elements # Some configuration values for the various documentation handled by # this conf.py DOCS = { 'gnat_rm': { 'title': u'GNAT Reference Manual'}, 'gnat_ugn': { 'title': u'GNAT User\'s Guide for Native Platforms'}} # Then retrieve the source directory root_source_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) gnatvsn_spec = os.path.join(root_source_dir, '..', 'gnatvsn.ads') basever = os.path.join(root_source_dir, '..', '..', 'BASE-VER') texi_fsf = True # Set to False when FSF doc is switched to sphinx by default with open(gnatvsn_spec, 'rb') as fd: gnatvsn_content = fd.read() def get_copyright(): return u'2008-%s, Free Software Foundation' % time.strftime('%Y') def get_gnat_version(): m = re.search(r'Gnat_Static_Version_String : ' + r'constant String := "([^\(\)]+)\(.*\)?";', gnatvsn_content) if m: return m.group(1).strip() else: if texi_fsf and os.path.exists(basever): return '' try: with open(basever, 'rb') as fd: return fd.read() except: pass print 'cannot find GNAT version in gnatvsn.ads or in ' + basever sys.exit(1) def get_gnat_build_type(): m = re.search(r'Build_Type : constant Gnat_Build_Type := (.+);', gnatvsn_content) if m: return {'Gnatpro': 'PRO', 'FSF': 'FSF', 'GPL': 'GPL'}[m.group(1).strip()] else: print 'cannot compute GNAT build type' sys.exit(1) # First retrieve the name of the documentation we are building doc_name = os.environ.get('DOC_NAME', None) if doc_name is None: print 'DOC_NAME environment variable should be set' sys.exit(1) if doc_name not in DOCS: print '%s is not a valid documentation name' % doc_name sys.exit(1) # Exclude sources that are not part of the current documentation exclude_patterns = [] for d in os.listdir(root_source_dir): if d not in ('share', doc_name, doc_name + '.rst'): exclude_patterns.append(d) print 'ignoring %s' % d if doc_name == 'gnat_rm': exclude_patterns.append('share/gnat_project_manager.rst') print 'ignoring share/gnat_project_manager.rst' extensions = [] templates_path = ['_templates'] source_suffix = '.rst' master_doc = doc_name # General information about the project. project = DOCS[doc_name]['title'] copyright = get_copyright() version = get_gnat_version() release = get_gnat_version() pygments_style = 'sphinx' tags.add(get_gnat_build_type()) html_theme = 'sphinxdoc' if os.path.isfile('adacore_transparent.png'): html_logo = 'adacore_transparent.png' if os.path.isfile('favicon.ico'): html_favicon = 'favicon.ico' html_static_path = ['_static'] latex_elements = { 'preamble': latex_elements.TOC_DEPTH + latex_elements.PAGE_BLANK + latex_elements.TOC_CMD + latex_elements.LATEX_HYPHEN + latex_elements.doc_settings(DOCS[doc_name]['title'], get_gnat_version()), 'tableofcontents': latex_elements.TOC} latex_documents = [ (master_doc, '%s.tex' % doc_name, project, u'AdaCore', 'manual')] texinfo_documents = [ (master_doc, doc_name, project, u'AdaCore', doc_name, doc_name, '')] def setup(app): app.add_lexer('ada', ada_pygments.AdaLexer()) app.add_lexer('gpr', ada_pygments.GNATProjectLexer())
gpl-2.0
jalavik/invenio
invenio/modules/search/receivers.py
16
3296
# -*- coding: utf-8 -*- # # This file is part of Invenio. # Copyright (C) 2013, 2014, 2015 CERN. # # Invenio is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License as # published by the Free Software Foundation; either version 2 of the # License, or (at your option) any later version. # # Invenio is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Invenio; if not, write to the Free Software Foundation, Inc., # 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA. """Define custom action handlers.""" import os from flask import flash, g, current_app from logging import Formatter, getLogger, FileHandler from six import iteritems from werkzeug.local import LocalProxy def get_logger(): """Get search logger.""" logger = getattr(g, 'search_logger', None) if logger is None: handler = FileHandler( os.path.join(current_app.config['CFG_LOGDIR'], 'search.log'), delay=True ) logger = getLogger('invenio.search') formatter = Formatter('{asctime}#{action}#{p}#{f}#{colls}#{total}', datefmt='%Y%m%d%H%M%S', style='{') handler.setFormatter(formatter) logger.addHandler(handler) g.search_logger = logger return logger logger = LocalProxy(get_logger) def websearch_before_browse_handler(collection, **kwargs): """Flash message before browsing handler is called.""" # keys = ['p', 'p1', 'p2', 'p3', 'f', 'f1', 'f2', 'f3', 'rm', 'cc', 'ln', # 'jrec', 'rg', 'aas', 'action'] # kwargs = dict(filter(lambda (k, v): k in keys, iteritems(kwargs))) # if kwargs.get('action', '') == 'browse': # if msg and len(msg) > 0: # flash(_("Did you mean to browse in %{x_index_name} index?", # url), 'websearch-after-search-form') def after_search(app, **kwargs): """Log user query after search.""" from .models import UserQuery UserQuery.log() logger.info(extra=kwargs) def after_insert_user_query(): """Flash message after user query is logged.""" # of = request.values.get('of', 'hb') # if of.startswith("h") and (em == '' or EM_REPOSITORY["alert"] in em): # if not of in ['hcs', 'hcs2']: # # display alert/RSS teaser for non-summary formats: # display_email_alert_part = True # if current_user: # if current_user['email'] == 'guest': # if CFG_ACCESS_CONTROL_LEVEL_ACCOUNTS > 4: # display_email_alert_part = False # else: # if not current_user['precached_usealerts']: # display_email_alert_part = False # from flask import flash # flash(websearch_templates.tmpl_alert_rss_teaser_box_for_query( # id_query, # ln=ln, # display_email_alert_part=display_email_alert_part), # 'search-results-after') pass
gpl-2.0
thnee/ansible
lib/ansible/modules/network/aci/aci_interface_selector_to_switch_policy_leaf_profile.py
13
7713
#!/usr/bin/python # -*- coding: utf-8 -*- # Copyright: (c) 2017, Bruno Calogero <[email protected]> # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'certified'} DOCUMENTATION = r''' --- module: aci_interface_selector_to_switch_policy_leaf_profile short_description: Bind interface selector profiles to switch policy leaf profiles (infra:RsAccPortP) description: - Bind interface selector profiles to switch policy leaf profiles on Cisco ACI fabrics. version_added: '2.5' options: leaf_profile: description: - Name of the Leaf Profile to which we add a Selector. type: str aliases: [ leaf_profile_name ] interface_selector: description: - Name of Interface Profile Selector to be added and associated with the Leaf Profile. type: str aliases: [ name, interface_selector_name, interface_profile_name ] state: description: - Use C(present) or C(absent) for adding or removing. - Use C(query) for listing an object or multiple objects. type: str choices: [ absent, present, query ] default: present extends_documentation_fragment: aci notes: - This module requires an existing leaf profile, the module M(aci_switch_policy_leaf_profile) can be used for this. seealso: - module: aci_switch_policy_leaf_profile - name: APIC Management Information Model reference description: More information about the internal APIC class B(infra:RsAccPortP). link: https://developer.cisco.com/docs/apic-mim-ref/ author: - Bruno Calogero (@brunocalogero) ''' EXAMPLES = r''' - name: Associating an interface selector profile to a switch policy leaf profile aci_interface_selector_to_switch_policy_leaf_profile: host: apic username: admin password: SomeSecretPassword leaf_profile: sw_name interface_selector: interface_profile_name state: present delegate_to: localhost - name: Remove an interface selector profile associated with a switch policy leaf profile aci_interface_selector_to_switch_policy_leaf_profile: host: apic username: admin password: SomeSecretPassword leaf_profile: sw_name interface_selector: interface_profile_name state: absent delegate_to: localhost - name: Query an interface selector profile associated with a switch policy leaf profile aci_interface_selector_to_switch_policy_leaf_profile: host: apic username: admin password: SomeSecretPassword leaf_profile: sw_name interface_selector: interface_profile_name state: query delegate_to: localhost register: query_result ''' RETURN = r''' current: description: The existing configuration from the APIC after the module has finished returned: success type: list sample: [ { "fvTenant": { "attributes": { "descr": "Production environment", "dn": "uni/tn-production", "name": "production", "nameAlias": "", "ownerKey": "", "ownerTag": "" } } } ] error: description: The error information as returned from the APIC returned: failure type: dict sample: { "code": "122", "text": "unknown managed object class foo" } raw: description: The raw output returned by the APIC REST API (xml or json) returned: parse error type: str sample: '<?xml version="1.0" encoding="UTF-8"?><imdata totalCount="1"><error code="122" text="unknown managed object class foo"/></imdata>' sent: description: The actual/minimal configuration pushed to the APIC returned: info type: list sample: { "fvTenant": { "attributes": { "descr": "Production environment" } } } previous: description: The original configuration from the APIC before the module has started returned: info type: list sample: [ { "fvTenant": { "attributes": { "descr": "Production", "dn": "uni/tn-production", "name": "production", "nameAlias": "", "ownerKey": "", "ownerTag": "" } } } ] proposed: description: The assembled configuration from the user-provided parameters returned: info type: dict sample: { "fvTenant": { "attributes": { "descr": "Production environment", "name": "production" } } } filter_string: description: The filter string used for the request returned: failure or debug type: str sample: ?rsp-prop-include=config-only method: description: The HTTP method used for the request to the APIC returned: failure or debug type: str sample: POST response: description: The HTTP response from the APIC returned: failure or debug type: str sample: OK (30 bytes) status: description: The HTTP status from the APIC returned: failure or debug type: int sample: 200 url: description: The HTTP url used for the request to the APIC returned: failure or debug type: str sample: https://10.11.12.13/api/mo/uni/tn-production.json ''' from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.network.aci.aci import ACIModule, aci_argument_spec def main(): argument_spec = aci_argument_spec() argument_spec.update( leaf_profile=dict(type='str', aliases=['leaf_profile_name']), # Not required for querying all objects interface_selector=dict(type='str', aliases=['interface_profile_name', 'interface_selector_name', 'name']), # Not required for querying all objects state=dict(type='str', default='present', choices=['absent', 'present', 'query']) ) module = AnsibleModule( argument_spec=argument_spec, supports_check_mode=True, required_if=[ ['state', 'absent', ['leaf_profile', 'interface_selector']], ['state', 'present', ['leaf_profile', 'interface_selector']] ], ) leaf_profile = module.params.get('leaf_profile') # WARNING: interface_selector accepts non existing interface_profile names and they appear on APIC gui with a state of "missing-target" interface_selector = module.params.get('interface_selector') state = module.params.get('state') # Defining the interface profile tDn for clarity interface_selector_tDn = 'uni/infra/accportprof-{0}'.format(interface_selector) aci = ACIModule(module) aci.construct_url( root_class=dict( aci_class='infraNodeP', aci_rn='infra/nprof-{0}'.format(leaf_profile), module_object=leaf_profile, target_filter={'name': leaf_profile}, ), subclass_1=dict( aci_class='infraRsAccPortP', aci_rn='rsaccPortP-[{0}]'.format(interface_selector_tDn), module_object=interface_selector, target_filter={'name': interface_selector}, ) ) aci.get_existing() if state == 'present': aci.payload( aci_class='infraRsAccPortP', class_config=dict(tDn=interface_selector_tDn), ) aci.get_diff(aci_class='infraRsAccPortP') aci.post_config() elif state == 'absent': aci.delete_config() aci.exit_json() if __name__ == "__main__": main()
gpl-3.0
zincumyx/Mammoth
mammoth-src/build/contrib/hod/hodlib/Common/types.py
182
43056
#Licensed to the Apache Software Foundation (ASF) under one #or more contributor license agreements. See the NOTICE file #distributed with this work for additional information #regarding copyright ownership. The ASF licenses this file #to you under the Apache License, Version 2.0 (the #"License"); you may not use this file except in compliance #with the License. You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 #Unless required by applicable law or agreed to in writing, software #distributed under the License is distributed on an "AS IS" BASIS, #WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #See the License for the specific language governing permissions and #limitations under the License. # $Id:types.py 6172 2007-05-22 20:26:54Z zim $ # #------------------------------------------------------------------------------ """ Higher level data types and type related classes. Supported Types (Verification and Display): address - validates ip:port and host:port tcp addresses ip_address - validates and IP address net_address - validates an IP like address, ie netmask hostname - validates a hostname with DNS eaddress - validates a single email address or a comma seperated list of email addresses http_version - validates a value is a http version (1.0/1.1) tcp_port - validates a value to be a valid tcp port (2-65535) bool - validates value is (0, 1, true, false) / converts true -> 1 and false -> 0 directory - validates a values is a directory / resolves path to absolute path file - validates a value is a file / resolves path to absolute path float - validates a value is a float, converts string to float pos_float - validates a value is a float and >= 0, converts string to float pos_num - same as pos_float neg_float - validates a value is a float and < 0, converts string to float int - validates a value is an integer, converts string to integer pos_int - validates a value is an integer and >= 0, converts string to integer neg_int - validates a values is an integer and < 0, converts striing to integer freq - frequency, positive integer size - validates a size in bytes, kb, mb, kb, and tb (int > 0 post fixed with K, M, G, or T) also converts value to integer bytes range - numeric range, x-y normalized to a tuple, if a single number is supplie a single element tuple is returned timestamp - utc timestamp of the form YYYYMMDDHHMMSS user_account - UNIX user account name user_group - UNIX group name string - arbitrarily long string list - comma seperated list of strings of arbitrary length, keyval - comma seperated list of key=value pairs, key does not need to be unique. uri - a uri """ import sys, os, socket, pwd, grp, stat, re, re, string, pprint, urlparse from tcp import tcpSocket, check_net_address, check_ip_address from util import check_timestamp types = { 'directory' : { 'db' : 'string', 'units' : None }, 'address' : { 'db' : 'string', 'units' : None }, 'ip_address' : { 'db' : 'string', 'units' : None }, 'net_address' : { 'db' : 'string', 'units' : None }, 'bool' : { 'db' : 'bool', 'units' : None }, 'int' : { 'db' : 'integer', 'units' : None }, 'float' : { 'db' : 'float', 'units' : None }, 'pos_int' : { 'db' : 'integer', 'units' : None }, 'neg_int' : { 'db' : 'integer', 'units' : None }, 'pos_num' : { 'db' : 'float', 'units' : None }, 'pos_float' : { 'db' : 'float', 'units' : None }, 'neg_float' : { 'db' : 'float', 'units' : None }, 'string' : { 'db' : 'string', 'units' : None }, 'list' : { 'db' : 'string', 'units' : None }, 'file' : { 'db' : 'string', 'units' : None }, 'size' : { 'db' : 'integer', 'units' : 'bytes' }, 'freq' : { 'db' : 'integer', 'units' : 'hz' }, 'eaddress' : { 'db' : 'string', 'units' : None }, 'tcp_port' : { 'db' : 'integer', 'units' : None }, 'http_version' : { 'db' : 'float', 'units' : None }, 'range' : { 'db' : 'string', 'units' : None }, 'hostname' : { 'db' : 'string', 'units' : None }, 'user_account' : { 'db' : 'string', 'units' : None }, 'user_group' : { 'db' : 'string', 'units' : None }, 'timestamp' : { 'db' : 'timestamp', 'units' : None }, 'keyval' : { 'db' : 'string', 'units' : None }, 'uri' : { 'db' : 'string', 'units' : None }, '' : { 'db' : 'string', 'units' : None }} dbTypes = { 'string' : { 'type' : 'varchar', 'store' : 'type_strings_0', 'table' : True }, 'integer' : { 'type' : 'bigint', 'store' : 'integers', 'table' : False }, 'float' : { 'type' : 'real', 'store' : 'floats', 'table' : False }, 'bool' : { 'type' : 'boolean', 'store' : 'bools', 'table' : False }, 'timestamp' : { 'type' : 'timestamp(0)', 'store' : 'timestamps', 'table' : False }} reSizeFormat = re.compile("^(\d+)(k|m|g|t|p|kb|mb|gb|tb|pb)$", flags=2) reDash = re.compile("\s*-\s*") sizeFactors = { 'b' : 1, 'bytes' : 1, 'k' : 1024, 'kb' : 1024, 'm' : 1048576, 'mb' : 1048576, 'g' : 1073741824, 'gb' : 1073741824, 't' : 1099511627776, 'tb' : 1099511627776, 'p' : 1125899906842624, 'pb' : 1125899906842624 } freqFactors = { 'hz' : 1, 'khz' : 1000, 'mhz' : 1000000, 'ghz' : 1000000000, 'thz' : 1000000000000, 'phz' : 1000000000000000 } sizeMap = [ { 'factor' : sizeFactors['b'], 'long' : 'byte', 'short' : 'byte' }, { 'factor' : sizeFactors['k'], 'long' : 'Kilobyte', 'short' : 'KB' }, { 'factor' : sizeFactors['m'], 'long' : 'Megabyte', 'short' : 'MB' }, { 'factor' : sizeFactors['g'], 'long' : 'Gigabyte', 'short' : 'GB' }, { 'factor' : sizeFactors['t'], 'long' : 'Terabyte', 'short' : 'TB' }, { 'factor' : sizeFactors['p'], 'long' : 'Petabyte', 'short' : 'PB' } ] freqMap = [ { 'factor' : freqFactors['hz'], 'long' : 'Hertz', 'short' : 'Hz' }, { 'factor' : freqFactors['khz'], 'long' : 'Kilohertz', 'short' : 'KHz' }, { 'factor' : freqFactors['mhz'], 'long' : 'Megahertz', 'short' : 'MHz' }, { 'factor' : freqFactors['ghz'], 'long' : 'Gigahertz', 'short' : 'GHz' }, { 'factor' : freqFactors['thz'], 'long' : 'Terahertz', 'short' : 'THz' }, { 'factor' : freqFactors['phz'], 'long' : 'Petahertz', 'short' : 'PHz' } ] reListString = r"(?<!\\)," reList = re.compile(reListString) reKeyVal = r"(?<!\\)=" reKeyVal = re.compile(reKeyVal) class typeToString: """Provides method for converting normalized types to strings.""" def __init__(self): self.toStringFunctions = {} self.__build_to_string_functions() def __call__(self, type, value): return self.toStringFunctions[type](value) def __build_to_string_functions(self): functions = {} for function in dir(self): functions[function] = 1 for type in types.keys(): # kinda bad, need to find out how to know the name of the class # I'm in. But it works. functionName = "_typeToString__tostring_%s" % type if functions.has_key(functionName): self.toStringFunctions[type] = getattr(self, functionName) else: if type == '': self.toStringFunctions[type] = self.__tostring_nothing else: error = "To string function %s for type %s does not exist." \ % (functionName, type) raise Exception(error) sys.exit(1) def __tostring(self, value): return str(value) def __tostring_directory(self, value): return self.__tostring(value) def __tostring_address(self, value): return "%s:%s" % (value[0], value[1]) def __tostring_ip_address(self, value): return self.__tostring(value) def __tostring_net_address(self, value): return self.__tostring(value) def __tostring_bool(self, value): if value == False: return 'false' elif value == True: return 'true' else: return str(value) def __tostring_int(self, value): return self.__tostring(value) def __tostring_float(self, value): return self.__tostring(value) def __tostring_pos_int(self, value): return self.__tostring(value) def __tostring_neg_int(self, value): return self.__tostring(value) def __tostring_freq(self, value): return self.__tostring(value) def __tostring_pos_float(self, value): return self.__tostring(value) def __tostring_pos_num(self, value): return self.__tostring(value) def __tostring_neg_float(self, value): return self.__tostring(value) def __tostring_string(self, value): return value def __tostring_keyval(self, value): string = '"' # to protect from shell escapes for key in value: # for item in value[key]: # string = "%s%s=%s," % (string, key, item) # Quotes still cannot protect Double-slashes. # Dealing with them separately val = re.sub(r"\\\\",r"\\\\\\\\",value[key]) string = "%s%s=%s," % (string, key, val) return string[:-1] + '"' def __tostring_list(self, value): string = '' for item in value: string = "%s%s," % (string, item) return string[:-1] def __tostring_file(self, value): return self.__tostring(value) def __tostring_size(self, value): return self.__tostring(value) def __tostring_eaddress(self, value): return self.__tostring(value) def __tostring_tcp_port(self, value): return self.__tostring(value) def __tostring_http_version(self, value): return self.__tostring(value) def __tostring_range(self, value): if len(value) < 2: return value[0] else: return "%s-%s" % (value[0], value[1]) def __tostring_timestamp(self, value): return self.__tostring(value) def __tostring_hostname(self, value): return self.__tostring(value) def __tostring_user_account(self, value): return self.__tostring(value) def __tostring_user_group(self, value): return self.__tostring(value) def __tostring_uri(self, value): return self.__tostring(value) def __tostring_nothing(self, value): return value class typeValidator: """Type validation class used to normalize values or validated single/large sets of values by type.""" def __init__(self, originalDir=None): self.verifyFunctions = {} self.__build_verify_functions() self.validateList = [] self.validatedInfo = [] self.__originalDir = originalDir def __getattr__(self, attrname): """validateList = [ { 'func' : <bound method configValidator>, 'name' : 'SA_COMMON.old_xml_dir', 'value': 'var/data/old' }, { 'func' : <bound method configValidator>, 'name' : 'SA_COMMON.log_level', 'value': '4' } ] validatedInfo = [ { # name supplied to add() 'name' : 'SA_COMMON.tmp_xml_dir', # is valid or not 'isValid' : 1 # normalized value 'normalized' : /var/data/tmp, # error string ? 'errorData' : 0 }, { 'name' : 'SA_COMMON.new_xml_dir', 'isValid' : 1 'normalized' : /var/data/new, 'errorData' : 0 } ]""" if attrname == "validateList": return self.validateList # list of items to be validated elif attrname == "validatedInfo": return self.validatedInfo # list of validation results else: raise AttributeError, attrname def __build_verify_functions(self): functions = {} for function in dir(self): functions[function] = 1 for type in types.keys(): # kinda bad, need to find out how to know the name of the class # I'm in. But it works. functionName = "_typeValidator__verify_%s" % type if functions.has_key(functionName): self.verifyFunctions[type] = getattr(self, functionName) else: if type == '': self.verifyFunctions[type] = self.__verify_nothing else: error = "Verify function %s for type %s does not exist." \ % (functionName, type) raise Exception(error) sys.exit(1) def __get_value_info(self): valueInfo = { 'isValid' : 0, 'normalized' : 0, 'errorData' : 0 } return valueInfo def __set_value_info(self, valueInfo, **valueData): try: valueInfo['normalized'] = valueData['normalized'] valueInfo['isValid'] = 1 except KeyError: valueInfo['isValid'] = 0 try: valueInfo['errorData'] = valueData['errorData'] except: pass # start of 'private' verification methods, each one should correspond to a # type string (see self.verify_config()) def __verify_directory(self, type, value): valueInfo = self.__get_value_info() if os.path.isdir(value): self.__set_value_info(valueInfo, normalized=self.normalize(type, value)) else: self.__set_value_info(valueInfo) return valueInfo def __norm_directory(self, value): return self.__normalizedPath(value) def __verify_address(self, type, value): valueInfo = self.__get_value_info() try: socket = tcpSocket(value) if socket.verify(): self.__set_value_info(valueInfo, normalized=self.normalize(type, value)) else: self.__set_value_info(valueInfo) except: self.__set_value_info(valueInfo) return valueInfo def __norm_address(self, value): return value.split(':') def __verify_ip_address(self, type, value): valueInfo = self.__get_value_info() if check_ip_address(value): self.__set_value_info(valueInfo, normalized=self.normalize(type, value)) else: self.__set_value_info(valueInfo) return valueInfo def __verify_net_address(self, type, value): valueInfo = self.__get_value_info() if check_net_address(value): self.__set_value_info(valueInfo, normalized=self.normalize(type, value)) else: self.__set_value_info(valueInfo) return valueInfo def __verify_bool(self, type, value): valueInfo = self.__get_value_info() value = str(value) if re.match("^false|0|f|no$", value, 2): self.__set_value_info(valueInfo, normalized=False) elif re.match("^true|1|t|yes$", value, 2): self.__set_value_info(valueInfo, normalized=True) else: self.__set_value_info(valueInfo) return valueInfo def __norm_bool(self, value): value = str(value) norm = "" if re.match("^false|0|f|no$", value, 2): norm = False elif re.match("^true|1|t|yes$", value, 2): norm = True else: raise Exception("invalid bool specified: %s" % value) return norm def __verify_int(self, type, value): valueInfo = self.__get_value_info() try: self.__set_value_info(valueInfo, normalized=self.normalize(type, value)) except: self.__set_value_info(valueInfo) return valueInfo def __norm_int(self, value): return int(value) def __verify_float(self, type, value): valueInfo = self.__get_value_info() try: self.__set_value_info(valueInfo, normalized=self.normalize(type, value)) except: self.__set_value_info(valueInfo) return valueInfo def __norm_float(self, value): return float(value) def __verify_pos_int(self, type, value): valueInfo = self.__get_value_info() try: value = self.normalize(type, value) except: self.__set_value_info(valueInfo) else: self.__set_value_info(valueInfo, normalized=value) return valueInfo def __norm_pos_int(self, value): value = int(value) if value < 0: raise Exception("value is not positive: %s" % value) return value def __verify_neg_int(self, type, value): valueInfo = self.__get_value_info() try: value = self.normalize(type, value) except: self.__set_value_info(valueInfo) else: self.__set_value_info(valueInfo, normalized=value) return valueInfo def __norm_neg_int(self, type, value): value = int(value) if value > 0: raise Exception("value is not negative: %s" % value) return value def __verify_freq(self, type, value): return self.__verify_pos_int(type, value) def __norm_freq(self, value): return self.__norm_pos_int(value) def __verify_pos_float(self, type, value): valueInfo = self.__get_value_info() try: value = self.normalize(type, value) except: self.__set_value_info(valueInfo) else: self.__set_value_info(valueInfo, normalized=value) return valueInfo def __norm_pos_float(self, value): value = float(value) if value < 0: raise Exception("value is not positive: %s" % value) return value def __verify_pos_num(self, type, value): return self.__verify_pos_float(value) def __norm_pos_num(self, value): return self.__norm_pos_float(value) def __verify_neg_float(self, type, value): valueInfo = self.__get_value_info() try: value = self.normalize(type, value) except: self.__set_value_info(valueInfo) else: self.__set_value_info(valueInfo, normalized=value) return valueInfo def __norm_neg_float(self, value): value = float(value) if value >= 0: raise Exception("value is not negative: %s" % value) return value def __verify_string(self, type, value): valueInfo = self.__get_value_info() self.__set_value_info(valueInfo, normalized=self.normalize(type, value)) return valueInfo def __norm_string(self, value): return str(value) def __verify_keyval(self, type, value): valueInfo = self.__get_value_info() if reKeyVal.search(value): try: self.__set_value_info(valueInfo, normalized=self.normalize(type, value)) except: self.__set_value_info(valueInfo, errorData = \ "invalid list of key-value pairs : [ %s ]" % value) else: msg = "No key value pairs found?" self.__set_value_info(valueInfo, errorData=msg) return valueInfo def __norm_keyval(self, value): list = self.__norm_list(value) keyValue = {} for item in list: (key, value) = reKeyVal.split(item) #if not keyValue.has_key(key): # keyValue[key] = [] #keyValue[key].append(value) keyValue[key] = value return keyValue def __verify_list(self, type, value): valueInfo = self.__get_value_info() self.__set_value_info(valueInfo, normalized=self.normalize(type,value)) return valueInfo def __norm_list(self, value): norm = [] if reList.search(value): norm = reList.split(value) else: norm = [value,] return norm def __verify_file(self, type, value): valueInfo = self.__get_value_info() if os.path.isfile(value): self.__set_value_info(valueInfo, normalized=self.normalize(type, value)) else: self.__set_value_info(valueInfo) return valueInfo def __norm_file(self, value): return self.__normalizedPath(value) def __verify_size(self, type, value): valueInfo = self.__get_value_info() value = str(value) if reSizeFormat.match(value): numberPart = int(reSizeFormat.sub("\g<1>", value)) factorPart = reSizeFormat.sub("\g<2>", value) try: normalized = normalize_size(numberPart, factorPart) self.__set_value_info(valueInfo, normalized=normalized) except: self.__set_value_info(valueInfo) else: try: value = int(value) except: self.__set_value_info(valueInfo) else: if value >= 0: self.__set_value_info(valueInfo, normalized=value) else: self.__set_value_info(valueInfo) return valueInfo def __norm_size(self, file): norm = None if reSizeFormat.match(value): numberPart = int(reSizeFormat.sub("\g<1>", value)) factorPart = reSizeFormat.sub("\g<2>", value) norm = normalize_size(numberPart, factorPart) else: norm = int(value) return norm def __verify_eaddress(self, type, value): valueInfo = self.__get_value_info() emailList = reComma.split(value) for emailAddress in emailList: if reEmailAddress.match(emailAddress): emailParts = reEmailDelimit.split(emailAddress) try: socket.gethostbyname(emailParts[1]) self.__set_value_info(valueInfo, normalized=self.normalize( type, value)) except: errorString = "%s is invalid (domain lookup failed)" % \ emailAddress self.__set_value_info(valueInfo, errorData=errorString) else: errorString = "%s is invalid" % emailAddress self.__set_value_info(valueInfo, errorData=errorString) return valueInfo def __verify_tcp_port(self, type, value): valueInfo = self.__get_value_info() try: value = self.__norm_tcp_port(value) except: self.__set_value_info(valueInfo) else: if value in range(2, 65536): self.__set_value_info(valueInfo, normalized=value) else: self.__set_value_info(valueInfo) return valueInfo def __norm_tcp_port(self, value): return int(value) def __verify_http_version(self, type, value): valueInfo = self.__get_value_info() if value in ('1.0', '1.1'): self.__set_value_info(valueInfo, normalized=float(value)) else: self.__set_value_info(valueInfo) return valueInfo def __verify_range(self, type, value): valueInfo = self.__get_value_info() range = reDash.split(value) try: if len(range) > 1: start = int(range[0]) end = int(range[1]) else: start = int(range[0]) end = None except: self.__set_value_info(valueInfo) else: if end: if end - start != 0: self.__set_value_info(valueInfo, normalized=(start, end)) else: self.__set_value_info(valueInfo) else: self.__set_value_info(valueInfo, normalized=(start,)) return valueInfo def __norm_range(self, value): range = reDash.split(value) if len(range) > 1: start = int(range[0]) end = int(range[1]) else: start = int(range[0]) end = None return (start, end) def __verify_uri(self, type, value): valueInfo = self.__get_value_info() _norm = None try: uriComponents = urlparse.urlparse(value) if uriComponents[0] == '' or uriComponents[0] == 'file': # if scheme is '' or 'file' if not os.path.isfile(uriComponents[2]) and \ not os.path.isdir(uriComponents[2]): raise Exception("Invalid local URI") else: self.__set_value_info(valueInfo, normalized=self.normalize( type,value)) else: # other schemes # currently not checking anything. TODO self.__set_value_info(valueInfo, normalized=self.normalize( type,value)) except: errorString = "%s is an invalid uri" % value self.__set_value_info(valueInfo, errorData=errorString) return valueInfo def __norm_uri(self, value): uriComponents = list(urlparse.urlparse(value)) if uriComponents[0] == '': # if scheme is ''' return self.__normalizedPath(uriComponents[2]) elif uriComponents[0] == 'file': # if scheme is 'file' normalizedPath = self.__normalizedPath(uriComponents[2]) return urlparse.urlunsplit(uriComponents[0:1] + [normalizedPath] + uriComponents[3:]) # Not dealing with any other case right now return value def __verify_timestamp(self, type, value): valueInfo = self.__get_value_info() if check_timestamp(value): self.__set_value_info(valueInfo, normalized=self.normalize(type, value)) else: self.__set_value_info(valueInfo) return valueInfo def __verify_hostname(self, type, value): valueInfo = self.__get_value_info() try: socket.gethostbyname(value) self.__set_value_info(valueInfo, normalized=self.normalize(type, value)) except: errorString = "%s is invalid (domain lookup failed)" % value self.__set_value_info(valueInfo, errorData=errorString) return valueInfo def __verify_user_account(self, type, value): valueInfo = self.__get_value_info() try: pwd.getpwnam(value) except: errorString = "'%s' user account does not exist" % value self.__set_value_info(valueInfo, errorData=errorString) else: self.__set_value_info(valueInfo, normalized=self.normalize(type, value)) return valueInfo def __verify_user_group(self, type, value): valueInfo = self.__get_value_info() try: grp.getgrnam(value) except: errorString = "'%s' group does not exist" % value self.__set_value_info(valueInfo, errorData=errorString) else: self.__set_value_info(valueInfo, normalized=self.normalize(type, value)) return valueInfo def __verify_nothing(self, type, value): valueInfo = self.__get_value_info() self.__set_value_info(valueInfo, normalized=self.normalize(type, value)) return valueInfo #-------------------------------------------------------------------------- def normalize(self, type, value): try: normFunc = getattr(self, "_typeValidator__norm_%s" % type) return normFunc(value) except AttributeError, A: # this exception should occur only when we don't have corresponding normalize function return value def verify(self, type, value, allowNone=False): """Verifies a value based on its type. type - supported configValidator type value - data to be validated allowNone - don't freak out if None or '' is supplied returns a valueInfo dictionary: valueInfo = { 'isValid' : 1, 'normalized' : 5, 'errorData' : 0 } where: isValid - true or false (0/1) normalized - the normalized value errorData - if invalid an error string supported types: see top level""" result = None if allowNone: if value == '' or value == None: result = self.__verify_nothing(None, None) result['normalized'] = None else: result = self.verifyFunctions[type](type, value) else: result = self.verifyFunctions[type](type, value) return result def is_valid_type(self, type): """Returns true if type is valid.""" return types.has_key(type) def type_info(self, type): """Returns type info dictionary.""" dbInfo = dbTypes[types[type]['db']] typeInfo = types[type].copy() typeInfo['db'] = dbInfo return typeInfo def add(self, name, type, value): """Adds a value and type by name to the configValidate object to be verified using validate(). name - name used to key values and access the results of the validation type - configValidator type value - data to be verified""" self.validateList.append({ 'name' : name, 'type' : type, 'value': value }) def validate(self, allowNone=False): """Validates configValidate object populating validatedInfo with valueInfo dictionaries for each value added to the object.""" for valItem in self.validateList: valueInfo = self.verify(valItem['type'], valItem['value'], allowNone) if valueInfo: valueInfo['name'] = valItem['name'] self.validatedInfo.append(valueInfo) else: raise Exception("\nMissing a return value: valueInfo\n%s" % \ self.verifyFunctions[valItem['type']](valItem['value'])) def __normalizedPath(self, value): oldWd = os.getcwd() if self.__originalDir: os.chdir(self.__originalDir) normPath = os.path.realpath(value) os.chdir(oldWd) return normPath class display: def __init__(self): self.displayFunctions = {} self.__build_dispaly_functions() def __build_dispaly_functions(self): functions = {} for function in dir(self): functions[function] = 1 for type in types.keys(): # kinda bad, need to find out how to know the name of the class # I'm in. But it works. functionName = "_cisplay__display_%s" % type if functions.has_key(functionName): self.displayFunctions[type] = getattr(self, functionName) else: if type == '': self.displayFunctions[type] = self.__display_default else: error = "Display function %s for type %s does not exist." \ % (functionName, type) raise Exception(error) sys.exit(1) def __display_default(self, value, style): return value def __display_generic_number(self, value): displayNumber = '' splitNum = string.split(str(value), sep='.') numList = list(str(splitNum[0])) numList.reverse() length = len(numList) counter = 0 for char in numList: counter = counter + 1 if counter % 3 or counter == length: displayNumber = "%s%s" % (char, displayNumber) else: displayNumber = ",%s%s" % (char, displayNumber) if len(splitNum) > 1: displayNumber = "%s.%s" % (displayNumber, splitNum[1]) return displayNumber def __display_generic_mappable(self, map, value, style, plural=True): displayValue = '' length = len(str(value)) if length > 3: for factorSet in map: displayValue = float(value) / factorSet['factor'] if len(str(int(displayValue))) <= 3 or \ factorSet['factor'] == map[-1]['factor']: displayValue = "%10.2f" % displayValue if displayValue[-1] == '0': if displayValue > 1 and style != 'short' and plural: displayValue = "%s %ss" % (displayValue[:-1], factorSet[style]) else: displayValue = "%s %s" % (displayValue[:-1], factorSet[style]) else: if displayValue > 1 and style != 'short' and plural: displayValue = "%s %ss" % (displayValue, factorSet[style]) else: displayValue = "%s %s" % (displayValue, factorSet[style]) break return displayValue def __display_directory(self, value, style): return self.__display_default(value, style) def __display_address(self, value, style): return self.__display_default(value, style) def __display_ip_address(self, value, style): return self.__display_default(value, style) def __display_net_address(self, value, style): return self.__display_default(value, style) def __display_bool(self, value, style): displayValue = value if not isinstance(displayValue, bool): if re.match("^false|0|f|no$", value, 2): displayValue=False elif re.match("^true|1|t|yes$", value, 2): displayValue=True return displayValue def __display_int(self, value, style): return self.__display_generic_number(value) def __display_float(self, value, style): return self.__display_generic_number(value) def __display_pos_int(self, value, style): return self.__display_generic_number(value) def __display_neg_int(self, value, style): return self.__display_generic_number(value) def __display_pos_num(self, value, style): return self.__display_generic_number(value) def __display_pos_float(self, value, style): return self.__display_generic_number(value) def __display_neg_float(self, value, style): return self.__display_generic_number(value) def __display_string(self, value, style): return self.__display_default(value, style) def __display_list(self, value, style): value = value.rstrip() return value.rstrip(',') def __display_keyval(self, value, style): value = value.rstrip() return value.rstrip(',') def __display_file(self, value, style): return self.__display_default(value, style) def __display_size(self, value, style): return self.__display_generic_mappable(sizeMap, value, style) def __display_freq(self, value, style): return self.__display_generic_mappable(freqMap, value, style, False) def __display_eaddress(self, value, style): return self.__display_default(value, style) def __display_tcp_port(self, value, style): return self.__display_default(value, style) def __display_http_version(self, value, style): return self.__display_default(value, style) def __display_range(self, value, style): return self.__display_default(value, style) def __display_hostname(self, value, style): return self.__display_default(value, style) def __display_user_account(self, value, style): return self.__display_default(value, style) def __display_user_group(self, value, style): return self.__display_default(value, style) def __display_timestamp(self, value, style): return self.__display_default(value, style) def display(self, type, value, style='short'): displayValue = value if value != None: displayValue = self.displayFunctions[type](value, style) return displayValue typeValidatorInstance = typeValidator() def is_valid_type(type): """Returns true if type is valid.""" return typeValidatorInstance.is_valid_type(type) def type_info(type): """Returns type info dictionary.""" return typeValidatorInstance.type_info(type) def verify(type, value, allowNone=False): """Returns a normalized valueInfo dictionary.""" return typeValidatorInstance.verify(type, value, allowNone) def __normalize(map, val, factor): normFactor = string.lower(factor) normVal = float(val) return int(normVal * map[normFactor]) def normalize_size(size, factor): """ Normalize a size to bytes. size - number of B, KB, MB, GB, TB, or PB factor - size factor (case insensitive): b | bytes - bytes k | kb - kilobytes m | mb - megabytes g | gb - gigabytes t | tb - terabytes p | pb - petabytes """ return __normalize(sizeFactors, size, factor) def normalize_freq(freq, factor): """ Normalize a frequency to hertz. freq - number of Hz, Khz, Mhz, Ghz, Thz, or Phz factor - size factor (case insensitive): Hz - Hertz Mhz - Megahertz Ghz - Gigahertz Thz - Terahertz Phz - Petahertz """ return __normalize(freqFactors, freq, factor)
apache-2.0
xiejianying/pjsip
pjsip-apps/src/pygui/call.py
26
3368
# $Id$ # # pjsua Python GUI Demo # # Copyright (C)2013 Teluu Inc. (http://www.teluu.com) # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # import sys if sys.version_info[0] >= 3: # Python 3 import tkinter as tk from tkinter import ttk from tkinter import messagebox as msgbox else: import Tkinter as tk import tkMessageBox as msgbox import ttk import random import pjsua2 as pj import application import endpoint as ep # Call class class Call(pj.Call): """ High level Python Call object, derived from pjsua2's Call object. """ def __init__(self, acc, peer_uri='', chat=None, call_id = pj.PJSUA_INVALID_ID): pj.Call.__init__(self, acc, call_id) self.acc = acc self.peerUri = peer_uri self.chat = chat self.connected = False self.onhold = False def onCallState(self, prm): ci = self.getInfo() self.connected = ci.state == pj.PJSIP_INV_STATE_CONFIRMED if self.chat: self.chat.updateCallState(self, ci) def onCallMediaState(self, prm): ci = self.getInfo() for mi in ci.media: if mi.type == pj.PJMEDIA_TYPE_AUDIO and \ (mi.status == pj.PJSUA_CALL_MEDIA_ACTIVE or \ mi.status == pj.PJSUA_CALL_MEDIA_REMOTE_HOLD): m = self.getMedia(mi.index) am = pj.AudioMedia.typecastFromMedia(m) # connect ports ep.Endpoint.instance.audDevManager().getCaptureDevMedia().startTransmit(am) am.startTransmit(ep.Endpoint.instance.audDevManager().getPlaybackDevMedia()) if mi.status == pj.PJSUA_CALL_MEDIA_REMOTE_HOLD and not self.onhold: self.chat.addMessage(None, "'%s' sets call onhold" % (self.peerUri)) self.onhold = True elif mi.status == pj.PJSUA_CALL_MEDIA_ACTIVE and self.onhold: self.chat.addMessage(None, "'%s' sets call active" % (self.peerUri)) self.onhold = False if self.chat: self.chat.updateCallMediaState(self, ci) def onInstantMessage(self, prm): # chat instance should have been initalized if not self.chat: return self.chat.addMessage(self.peerUri, prm.msgBody) self.chat.showWindow() def onInstantMessageStatus(self, prm): if prm.code/100 == 2: return # chat instance should have been initalized if not self.chat: return self.chat.addMessage(None, "Failed sending message to '%s' (%d): %s" % (self.peerUri, prm.code, prm.reason)) def onTypingIndication(self, prm): # chat instance should have been initalized if not self.chat: return self.chat.setTypingIndication(self.peerUri, prm.isTyping) def onDtmfDigit(self, prm): #msgbox.showinfo("pygui", 'Got DTMF:' + prm.digit) pass def onCallMediaTransportState(self, prm): #msgbox.showinfo("pygui", "Media transport state") pass if __name__ == '__main__': application.main()
gpl-2.0
ygol/odoo
addons/stock/tests/test_stock_flow.py
23
87441
# -*- coding: utf-8 -*- from openerp.addons.stock.tests.common import TestStockCommon from openerp.tools import mute_logger, float_round class TestStockFlow(TestStockCommon): @mute_logger('openerp.addons.base.ir.ir_model', 'openerp.models') def test_00_picking_create_and_transfer_quantity(self): """ Basic stock operation on incoming and outgoing shipment. """ LotObj = self.env['stock.production.lot'] # ---------------------------------------------------------------------- # Create incoming shipment of product A, B, C, D # ---------------------------------------------------------------------- # Product A ( 1 Unit ) , Product C ( 10 Unit ) # Product B ( 1 Unit ) , Product D ( 10 Unit ) # Product D ( 5 Unit ) # ---------------------------------------------------------------------- picking_in = self.PickingObj.create({ 'partner_id': self.partner_delta_id, 'picking_type_id': self.picking_type_in}) self.MoveObj.create({ 'name': self.productA.name, 'product_id': self.productA.id, 'product_uom_qty': 1, 'product_uom': self.productA.uom_id.id, 'picking_id': picking_in.id, 'location_id': self.supplier_location, 'location_dest_id': self.stock_location}) self.MoveObj.create({ 'name': self.productB.name, 'product_id': self.productB.id, 'product_uom_qty': 1, 'product_uom': self.productB.uom_id.id, 'picking_id': picking_in.id, 'location_id': self.supplier_location, 'location_dest_id': self.stock_location}) self.MoveObj.create({ 'name': self.productC.name, 'product_id': self.productC.id, 'product_uom_qty': 10, 'product_uom': self.productC.uom_id.id, 'picking_id': picking_in.id, 'location_id': self.supplier_location, 'location_dest_id': self.stock_location}) self.MoveObj.create({ 'name': self.productD.name, 'product_id': self.productD.id, 'product_uom_qty': 10, 'product_uom': self.productD.uom_id.id, 'picking_id': picking_in.id, 'location_id': self.supplier_location, 'location_dest_id': self.stock_location}) self.MoveObj.create({ 'name': self.productD.name, 'product_id': self.productD.id, 'product_uom_qty': 5, 'product_uom': self.productD.uom_id.id, 'picking_id': picking_in.id, 'location_id': self.supplier_location, 'location_dest_id': self.stock_location}) # Check incoming shipment move lines state. for move in picking_in.move_lines: self.assertEqual(move.state, 'draft', 'Wrong state of move line.') # Confirm incoming shipment. picking_in.action_confirm() # Check incoming shipment move lines state. for move in picking_in.move_lines: self.assertEqual(move.state, 'assigned', 'Wrong state of move line.') # ---------------------------------------------------------------------- # Replace pack operation of incoming shipments. # ---------------------------------------------------------------------- picking_in.do_prepare_partial() self.StockPackObj.search([('product_id', '=', self.productA.id), ('picking_id', '=', picking_in.id)]).write({ 'product_qty': 4.0}) self.StockPackObj.search([('product_id', '=', self.productB.id), ('picking_id', '=', picking_in.id)]).write({ 'product_qty': 5.0}) self.StockPackObj.search([('product_id', '=', self.productC.id), ('picking_id', '=', picking_in.id)]).write({ 'product_qty': 5.0}) self.StockPackObj.search([('product_id', '=', self.productD.id), ('picking_id', '=', picking_in.id)]).write({ 'product_qty': 5.0}) lot2_productC = LotObj.create({'name': 'C Lot 2', 'product_id': self.productC.id}) self.StockPackObj.create({ 'product_id': self.productC.id, 'product_qty': 2, 'product_uom_id': self.productC.uom_id.id, 'location_id': self.supplier_location, 'location_dest_id': self.stock_location, 'picking_id': picking_in.id, 'lot_id': lot2_productC.id}) self.StockPackObj.create({ 'product_id': self.productD.id, 'product_qty': 2, 'product_uom_id': self.productD.uom_id.id, 'location_id': self.supplier_location, 'location_dest_id': self.stock_location, 'picking_id': picking_in.id}) # Check incoming shipment total quantity of pack operation packs = self.StockPackObj.search([('picking_id', '=', picking_in.id)]) total_qty = [pack.product_qty for pack in packs] self.assertEqual(sum(total_qty), 23, 'Wrong quantity in pack operation (%s found instead of 23)' % (sum(total_qty))) # Transfer Incoming Shipment. picking_in.do_transfer() # ---------------------------------------------------------------------- # Check state, quantity and total moves of incoming shipment. # ---------------------------------------------------------------------- # Check total no of move lines of incoming shipment. self.assertEqual(len(picking_in.move_lines), 6, 'Wrong number of move lines.') # Check incoming shipment state. self.assertEqual(picking_in.state, 'done', 'Incoming shipment state should be done.') # Check incoming shipment move lines state. for move in picking_in.move_lines: self.assertEqual(move.state, 'done', 'Wrong state of move line.') # Check product A done quantity must be 3 and 1 moves = self.MoveObj.search([('product_id', '=', self.productA.id), ('picking_id', '=', picking_in.id)]) a_done_qty = [move.product_uom_qty for move in moves] self.assertEqual(set(a_done_qty), set([1.0, 3.0]), 'Wrong move quantity for product A.') # Check product B done quantity must be 4 and 1 moves = self.MoveObj.search([('product_id', '=', self.productB.id), ('picking_id', '=', picking_in.id)]) b_done_qty = [move.product_uom_qty for move in moves] self.assertEqual(set(b_done_qty), set([4.0, 1.0]), 'Wrong move quantity for product B.') # Check product C done quantity must be 7 c_done_qty = self.MoveObj.search([('product_id', '=', self.productC.id), ('picking_id', '=', picking_in.id)], limit=1).product_uom_qty self.assertEqual(c_done_qty, 7.0, 'Wrong move quantity of product C (%s found instead of 7)' % (c_done_qty)) # Check product D done quantity must be 7 d_done_qty = self.MoveObj.search([('product_id', '=', self.productD.id), ('picking_id', '=', picking_in.id)], limit=1).product_uom_qty self.assertEqual(d_done_qty, 7.0, 'Wrong move quantity of product D (%s found instead of 7)' % (d_done_qty)) # ---------------------------------------------------------------------- # Check Back order of Incoming shipment. # ---------------------------------------------------------------------- # Check back order created or not. back_order_in = self.PickingObj.search([('backorder_id', '=', picking_in.id)]) self.assertEqual(len(back_order_in), 1, 'Back order should be created.') # Check total move lines of back order. self.assertEqual(len(back_order_in.move_lines), 3, 'Wrong number of move lines.') # Check back order should be created with 3 quantity of product C. moves = self.MoveObj.search([('product_id', '=', self.productC.id), ('picking_id', '=', back_order_in.id)]) product_c_qty = [move.product_uom_qty for move in moves] self.assertEqual(sum(product_c_qty), 3.0, 'Wrong move quantity of product C (%s found instead of 3)' % (product_c_qty)) # Check back order should be created with 8 quantity of product D. moves = self.MoveObj.search([('product_id', '=', self.productD.id), ('picking_id', '=', back_order_in.id)]) product_d_qty = [move.product_uom_qty for move in moves] self.assertEqual(sum(product_d_qty), 8.0, 'Wrong move quantity of product D (%s found instead of 8)' % (product_d_qty)) # ====================================================================== # Create Outgoing shipment with ... # product A ( 10 Unit ) , product B ( 5 Unit ) # product C ( 3 unit ) , product D ( 10 Unit ) # ====================================================================== picking_out = self.PickingObj.create({ 'partner_id': self.partner_agrolite_id, 'picking_type_id': self.picking_type_out}) self.MoveObj.create({ 'name': self.productA.name, 'product_id': self.productA.id, 'product_uom_qty': 10, 'product_uom': self.productA.uom_id.id, 'picking_id': picking_out.id, 'location_id': self.stock_location, 'location_dest_id': self.customer_location}) self.MoveObj.create({ 'name': self.productB.name, 'product_id': self.productB.id, 'product_uom_qty': 5, 'product_uom': self.productB.uom_id.id, 'picking_id': picking_out.id, 'location_id': self.stock_location, 'location_dest_id': self.customer_location}) self.MoveObj.create({ 'name': self.productC.name, 'product_id': self.productC.id, 'product_uom_qty': 3, 'product_uom': self.productC.uom_id.id, 'picking_id': picking_out.id, 'location_id': self.stock_location, 'location_dest_id': self.customer_location}) self.MoveObj.create({ 'name': self.productD.name, 'product_id': self.productD.id, 'product_uom_qty': 10, 'product_uom': self.productD.uom_id.id, 'picking_id': picking_out.id, 'location_id': self.stock_location, 'location_dest_id': self.customer_location}) # Confirm outgoing shipment. picking_out.action_confirm() for move in picking_out.move_lines: self.assertEqual(move.state, 'confirmed', 'Wrong state of move line.') # Product assign to outgoing shipments picking_out.action_assign() for move in picking_out.move_lines: self.assertEqual(move.state, 'assigned', 'Wrong state of move line.') # Check availability for product A aval_a_qty = self.MoveObj.search([('product_id', '=', self.productA.id), ('picking_id', '=', picking_out.id)], limit=1).availability self.assertEqual(aval_a_qty, 4.0, 'Wrong move quantity availability of product A (%s found instead of 4)' % (aval_a_qty)) # Check availability for product B aval_b_qty = self.MoveObj.search([('product_id', '=', self.productB.id), ('picking_id', '=', picking_out.id)], limit=1).availability self.assertEqual(aval_b_qty, 5.0, 'Wrong move quantity availability of product B (%s found instead of 5)' % (aval_b_qty)) # Check availability for product C aval_c_qty = self.MoveObj.search([('product_id', '=', self.productC.id), ('picking_id', '=', picking_out.id)], limit=1).availability self.assertEqual(aval_c_qty, 3.0, 'Wrong move quantity availability of product C (%s found instead of 3)' % (aval_c_qty)) # Check availability for product D aval_d_qty = self.MoveObj.search([('product_id', '=', self.productD.id), ('picking_id', '=', picking_out.id)], limit=1).availability self.assertEqual(aval_d_qty, 7.0, 'Wrong move quantity availability of product D (%s found instead of 7)' % (aval_d_qty)) # ---------------------------------------------------------------------- # Replace pack operation of outgoing shipment. # ---------------------------------------------------------------------- picking_out.do_prepare_partial() self.StockPackObj.search([('product_id', '=', self.productA.id), ('picking_id', '=', picking_out.id)]).write({'product_qty': 2.0}) self.StockPackObj.search([('product_id', '=', self.productB.id), ('picking_id', '=', picking_out.id)]).write({'product_qty': 3.0}) self.StockPackObj.create({ 'product_id': self.productB.id, 'product_qty': 2, 'product_uom_id': self.productB.uom_id.id, 'location_id': self.stock_location, 'location_dest_id': self.customer_location, 'picking_id': picking_out.id}) self.StockPackObj.search([('product_id', '=', self.productC.id), ('picking_id', '=', picking_out.id)]).write({ 'product_qty': 2.0, 'lot_id': lot2_productC.id}) self.StockPackObj.create({ 'product_id': self.productC.id, 'product_qty': 3, 'product_uom_id': self.productC.uom_id.id, 'location_id': self.stock_location, 'location_dest_id': self.customer_location, 'picking_id': picking_out.id}) self.StockPackObj.search([('product_id', '=', self.productD.id), ('picking_id', '=', picking_out.id)]).write({'product_qty': 6.0}) # Transfer picking. picking_out.do_transfer() # ---------------------------------------------------------------------- # Check state, quantity and total moves of outgoing shipment. # ---------------------------------------------------------------------- # check outgoing shipment status. self.assertEqual(picking_out.state, 'done', 'Wrong state of outgoing shipment.') # check outgoing shipment total moves and and its state. self.assertEqual(len(picking_out.move_lines), 5, 'Wrong number of move lines') for move in picking_out.move_lines: self.assertEqual(move.state, 'done', 'Wrong state of move line.') back_order_out = self.PickingObj.search([('backorder_id', '=', picking_out.id)]) #------------------ # Check back order. # ----------------- self.assertEqual(len(back_order_out), 1, 'Back order should be created.') # Check total move lines of back order. self.assertEqual(len(back_order_out.move_lines), 2, 'Wrong number of move lines') # Check back order should be created with 8 quantity of product A. product_a_qty = self.MoveObj.search([('product_id', '=', self.productA.id), ('picking_id', '=', back_order_out.id)], limit=1).product_uom_qty self.assertEqual(product_a_qty, 8.0, 'Wrong move quantity of product A (%s found instead of 8)' % (product_a_qty)) # Check back order should be created with 4 quantity of product D. product_d_qty = self.MoveObj.search([('product_id', '=', self.productD.id), ('picking_id', '=', back_order_out.id)], limit=1).product_uom_qty self.assertEqual(product_d_qty, 4.0, 'Wrong move quantity of product D (%s found instead of 4)' % (product_d_qty)) #----------------------------------------------------------------------- # Check stock location quant quantity and quantity available # of product A, B, C, D #----------------------------------------------------------------------- # Check quants and available quantity for product A quants = self.StockQuantObj.search([('product_id', '=', self.productA.id), ('location_id', '=', self.stock_location)]) total_qty = [quant.qty for quant in quants] self.assertEqual(sum(total_qty), 2.0, 'Expecting 2.0 Unit , got %.4f Unit on location stock!' % (sum(total_qty))) self.assertEqual(self.productA.qty_available, 2.0, 'Wrong quantity available (%s found instead of 2.0)' % (self.productA.qty_available)) # Check quants and available quantity for product B quants = self.StockQuantObj.search([('product_id', '=', self.productB.id), ('location_id', '=', self.stock_location)]) self.assertFalse(quants, 'No quant should found as outgoing shipment took everything out of stock.') self.assertEqual(self.productB.qty_available, 0.0, 'Product B should have zero quantity available.') # Check quants and available quantity for product C quants = self.StockQuantObj.search([('product_id', '=', self.productC.id), ('location_id', '=', self.stock_location)]) total_qty = [quant.qty for quant in quants] self.assertEqual(sum(total_qty), 2.0, 'Expecting 2.0 Unit, got %.4f Unit on location stock!' % (sum(total_qty))) self.assertEqual(self.productC.qty_available, 2.0, 'Wrong quantity available (%s found instead of 2.0)' % (self.productC.qty_available)) # Check quants and available quantity for product D quant = self.StockQuantObj.search([('product_id', '=', self.productD.id), ('location_id', '=', self.stock_location)], limit=1) self.assertEqual(quant.qty, 1.0, 'Expecting 1.0 Unit , got %.4f Unit on location stock!' % (quant.qty)) self.assertEqual(self.productD.qty_available, 1.0, 'Wrong quantity available (%s found instead of 1.0)' % (self.productD.qty_available)) #----------------------------------------------------------------------- # Back Order of Incoming shipment #----------------------------------------------------------------------- lot3_productC = LotObj.create({'name': 'Lot 3', 'product_id': self.productC.id}) lot4_productC = LotObj.create({'name': 'Lot 4', 'product_id': self.productC.id}) lot5_productC = LotObj.create({'name': 'Lot 5', 'product_id': self.productC.id}) lot6_productC = LotObj.create({'name': 'Lot 6', 'product_id': self.productC.id}) lot1_productD = LotObj.create({'name': 'Lot 1', 'product_id': self.productD.id}) lot2_productD = LotObj.create({'name': 'Lot 2', 'product_id': self.productD.id}) # Confirm back order of incoming shipment. back_order_in.action_confirm() self.assertEqual(back_order_in.state, 'assigned', 'Wrong state of incoming shipment back order.') for move in back_order_in.move_lines: self.assertEqual(move.state, 'assigned', 'Wrong state of move line.') # ---------------------------------------------------------------------- # Replace pack operation (Back order of Incoming shipment) # ---------------------------------------------------------------------- back_order_in.do_prepare_partial() packD = self.StockPackObj.search([('product_id', '=', self.productD.id), ('picking_id', '=', back_order_in.id)]) self.assertEqual(len(packD), 1, 'Wrong number of pack operation.') packD.write({'product_qty': 4, 'lot_id': lot1_productD.id}) self.StockPackObj.create({ 'product_id': self.productD.id, 'product_qty': 4, 'product_uom_id': self.productD.uom_id.id, 'location_id': self.supplier_location, 'location_dest_id': self.stock_location, 'picking_id': back_order_in.id, 'lot_id': lot2_productD.id}) self.StockPackObj.search([('product_id', '=', self.productC.id), ('picking_id', '=', back_order_in.id)], limit=1).write({'product_qty': 1, 'lot_id': lot3_productC.id}) self.StockPackObj.create({ 'product_id': self.productC.id, 'product_qty': 1, 'product_uom_id': self.productC.uom_id.id, 'location_id': self.supplier_location, 'location_dest_id': self.stock_location, 'picking_id': back_order_in.id, 'lot_id': lot4_productC.id}) self.StockPackObj.create({ 'product_id': self.productC.id, 'product_qty': 2, 'product_uom_id': self.productC.uom_id.id, 'location_id': self.supplier_location, 'location_dest_id': self.stock_location, 'picking_id': back_order_in.id, 'lot_id': lot5_productC.id}) self.StockPackObj.create({ 'product_id': self.productC.id, 'product_qty': 2, 'product_uom_id': self.productC.uom_id.id, 'location_id': self.supplier_location, 'location_dest_id': self.stock_location, 'picking_id': back_order_in.id, 'lot_id': lot6_productC.id}) self.StockPackObj.create({ 'product_id': self.productA.id, 'product_qty': 10, 'product_uom_id': self.productA.uom_id.id, 'location_id': self.supplier_location, 'location_dest_id': self.stock_location, 'picking_id': back_order_in.id}) back_order_in.do_transfer() # ---------------------------------------------------------------------- # Check state, quantity and total moves (Back order of Incoming shipment). # ---------------------------------------------------------------------- # Check total no of move lines. self.assertEqual(len(back_order_in.move_lines), 6, 'Wrong number of move lines') # Check incoming shipment state must be 'Done'. self.assertEqual(back_order_in.state, 'done', 'Wrong state of picking.') # Check incoming shipment move lines state must be 'Done'. for move in back_order_in.move_lines: self.assertEqual(move.state, 'done', 'Wrong state of move lines.') # Check product A done quantity must be 10 movesA = self.MoveObj.search([('product_id', '=', self.productA.id), ('picking_id', '=', back_order_in.id)]) self.assertEqual(movesA.product_uom_qty, 10, "Wrong move quantity of product A (%s found instead of 10)" % (movesA.product_uom_qty)) # Check product C done quantity must be 3.0, 1.0, 2.0 movesC = self.MoveObj.search([('product_id', '=', self.productC.id), ('picking_id', '=', back_order_in.id)]) c_done_qty = [move.product_uom_qty for move in movesC] self.assertEqual(set(c_done_qty), set([3.0, 1.0, 2.0]), 'Wrong quantity of moves product C.') # Check product D done quantity must be 5.0 and 3.0 movesD = self.MoveObj.search([('product_id', '=', self.productD.id), ('picking_id', '=', back_order_in.id)]) d_done_qty = [move.product_uom_qty for move in movesD] self.assertEqual(set(d_done_qty), set([3.0, 5.0]), 'Wrong quantity of moves product D.') # Check no back order is created. self.assertFalse(self.PickingObj.search([('backorder_id', '=', back_order_in.id)]), "Should not create any back order.") #----------------------------------------------------------------------- # Check stock location quant quantity and quantity available # of product A, B, C, D #----------------------------------------------------------------------- # Check quants and available quantity for product A. quants = self.StockQuantObj.search([('product_id', '=', self.productA.id), ('location_id', '=', self.stock_location)]) total_qty = [quant.qty for quant in quants] self.assertEqual(sum(total_qty), 12.0, 'Wrong total stock location quantity (%s found instead of 12)' % (sum(total_qty))) self.assertEqual(self.productA.qty_available, 12.0, 'Wrong quantity available (%s found instead of 12)' % (self.productA.qty_available)) # Check quants and available quantity for product B. quants = self.StockQuantObj.search([('product_id', '=', self.productB.id), ('location_id', '=', self.stock_location)]) self.assertFalse(quants, 'No quant should found as outgoing shipment took everything out of stock') self.assertEqual(self.productB.qty_available, 0.0, 'Total quantity in stock should be 0 as the backorder took everything out of stock') # Check quants and available quantity for product C. quants = self.StockQuantObj.search([('product_id', '=', self.productC.id), ('location_id', '=', self.stock_location)]) total_qty = [quant.qty for quant in quants] self.assertEqual(sum(total_qty), 8.0, 'Wrong total stock location quantity (%s found instead of 8)' % (sum(total_qty))) self.assertEqual(self.productC.qty_available, 8.0, 'Wrong quantity available (%s found instead of 8)' % (self.productC.qty_available)) # Check quants and available quantity for product D. quants = self.StockQuantObj.search([('product_id', '=', self.productD.id), ('location_id', '=', self.stock_location)]) total_qty = [quant.qty for quant in quants] self.assertEqual(sum(total_qty), 9.0, 'Wrong total stock location quantity (%s found instead of 9)' % (sum(total_qty))) self.assertEqual(self.productD.qty_available, 9.0, 'Wrong quantity available (%s found instead of 9)' % (self.productD.qty_available)) #----------------------------------------------------------------------- # Back order of Outgoing shipment # ---------------------------------------------------------------------- back_order_out.do_prepare_partial() back_order_out.do_transfer() # Check stock location quants and available quantity for product A. quants = self.StockQuantObj.search([('product_id', '=', self.productA.id), ('location_id', '=', self.stock_location)]) total_qty = [quant.qty for quant in quants] self.assertGreaterEqual(float_round(sum(total_qty), precision_rounding=0.0001), 1, 'Total stock location quantity for product A should not be nagative.') def test_10_pickings_transfer_with_different_uom(self): """ Picking transfer with diffrent unit of meassure. """ # ---------------------------------------------------------------------- # Create incoming shipment of products DozA, SDozA, SDozARound, kgB, gB # ---------------------------------------------------------------------- # DozA ( 10 Dozen ) , SDozA ( 10.5 SuperDozen ) # SDozARound ( 10.5 10.5 SuperDozenRound ) , kgB ( 0.020 kg ) # gB ( 525.3 g ) # ---------------------------------------------------------------------- picking_in_A = self.PickingObj.create({ 'partner_id': self.partner_delta_id, 'picking_type_id': self.picking_type_in}) self.MoveObj.create({ 'name': self.DozA.name, 'product_id': self.DozA.id, 'product_uom_qty': 10, 'product_uom': self.DozA.uom_id.id, 'picking_id': picking_in_A.id, 'location_id': self.supplier_location, 'location_dest_id': self.stock_location}) self.MoveObj.create({ 'name': self.SDozA.name, 'product_id': self.SDozA.id, 'product_uom_qty': 10.5, 'product_uom': self.SDozA.uom_id.id, 'picking_id': picking_in_A.id, 'location_id': self.supplier_location, 'location_dest_id': self.stock_location}) self.MoveObj.create({ 'name': self.SDozARound.name, 'product_id': self.SDozARound.id, 'product_uom_qty': 10.5, 'product_uom': self.SDozARound.uom_id.id, 'picking_id': picking_in_A.id, 'location_id': self.supplier_location, 'location_dest_id': self.stock_location}) self.MoveObj.create({ 'name': self.kgB.name, 'product_id': self.kgB.id, 'product_uom_qty': 0.020, 'product_uom': self.kgB.uom_id.id, 'picking_id': picking_in_A.id, 'location_id': self.supplier_location, 'location_dest_id': self.stock_location}) self.MoveObj.create({ 'name': self.gB.name, 'product_id': self.gB.id, 'product_uom_qty': 525.3, 'product_uom': self.gB.uom_id.id, 'picking_id': picking_in_A.id, 'location_id': self.supplier_location, 'location_dest_id': self.stock_location}) # Check incoming shipment move lines state. for move in picking_in_A.move_lines: self.assertEqual(move.state, 'draft', 'Move state must be draft.') # Confirm incoming shipment. picking_in_A.action_confirm() # Check incoming shipment move lines state. for move in picking_in_A.move_lines: self.assertEqual(move.state, 'assigned', 'Move state must be draft.') picking_in_A.do_prepare_partial() # ---------------------------------------------------- # Check pack operation quantity of incoming shipments. # ---------------------------------------------------- PackSdozAround = self.StockPackObj.search([('product_id', '=', self.SDozARound.id), ('picking_id', '=', picking_in_A.id)], limit=1) self.assertEqual(PackSdozAround.product_qty, 11, 'Wrong quantity in pack operation (%s found instead of 11)' % (PackSdozAround.product_qty)) picking_in_A.do_transfer() #----------------------------------------------------------------------- # Check stock location quant quantity and quantity available #----------------------------------------------------------------------- # Check quants and available quantity for product DozA quants = self.StockQuantObj.search([('product_id', '=', self.DozA.id), ('location_id', '=', self.stock_location)]) total_qty = [quant.qty for quant in quants] self.assertEqual(sum(total_qty), 10, 'Expecting 10 Dozen , got %.4f Dozen on location stock!' % (sum(total_qty))) self.assertEqual(self.DozA.qty_available, 10, 'Wrong quantity available (%s found instead of 10)' % (self.DozA.qty_available)) # Check quants and available quantity for product SDozA quants = self.StockQuantObj.search([('product_id', '=', self.SDozA.id), ('location_id', '=', self.stock_location)]) total_qty = [quant.qty for quant in quants] self.assertEqual(sum(total_qty), 10.5, 'Expecting 10.5 SDozen , got %.4f SDozen on location stock!' % (sum(total_qty))) self.assertEqual(self.SDozA.qty_available, 10.5, 'Wrong quantity available (%s found instead of 10.5)' % (self.SDozA.qty_available)) # Check quants and available quantity for product SDozARound quants = self.StockQuantObj.search([('product_id', '=', self.SDozARound.id), ('location_id', '=', self.stock_location)]) total_qty = [quant.qty for quant in quants] self.assertEqual(sum(total_qty), 11, 'Expecting 11 SDozenRound , got %.4f SDozenRound on location stock!' % (sum(total_qty))) self.assertEqual(self.SDozARound.qty_available, 11, 'Wrong quantity available (%s found instead of 11)' % (self.SDozARound.qty_available)) # Check quants and available quantity for product gB quants = self.StockQuantObj.search([('product_id', '=', self.gB.id), ('location_id', '=', self.stock_location)]) total_qty = [quant.qty for quant in quants] self.assertEqual(sum(total_qty), 525.3, 'Expecting 525.3 gram , got %.4f gram on location stock!' % (sum(total_qty))) self.assertEqual(self.gB.qty_available, 525.3, 'Wrong quantity available (%s found instead of 525.3' % (self.gB.qty_available)) # Check quants and available quantity for product kgB quants = self.StockQuantObj.search([('product_id', '=', self.kgB.id), ('location_id', '=', self.stock_location)]) total_qty = [quant.qty for quant in quants] self.assertEqual(sum(total_qty), 0.020, 'Expecting 0.020 kg , got %.4f kg on location stock!' % (sum(total_qty))) self.assertEqual(self.kgB.qty_available, 0.020, 'Wrong quantity available (%s found instead of 0.020)' % (self.kgB.qty_available)) # ---------------------------------------------------------------------- # Create Incoming Shipment B # ---------------------------------------------------------------------- picking_in_B = self.PickingObj.create({ 'partner_id': self.partner_delta_id, 'picking_type_id': self.picking_type_in}) self.MoveObj.create({ 'name': self.DozA.name, 'product_id': self.DozA.id, 'product_uom_qty': 120, 'product_uom': self.uom_unit.id, 'picking_id': picking_in_B.id, 'location_id': self.supplier_location, 'location_dest_id': self.stock_location}) self.MoveObj.create({ 'name': self.SDozA.name, 'product_id': self.SDozA.id, 'product_uom_qty': 1512, 'product_uom': self.uom_unit.id, 'picking_id': picking_in_B.id, 'location_id': self.supplier_location, 'location_dest_id': self.stock_location}) self.MoveObj.create({ 'name': self.SDozARound.name, 'product_id': self.SDozARound.id, 'product_uom_qty': 1584, 'product_uom': self.uom_unit.id, 'picking_id': picking_in_B.id, 'location_id': self.supplier_location, 'location_dest_id': self.stock_location}) self.MoveObj.create({ 'name': self.kgB.name, 'product_id': self.kgB.id, 'product_uom_qty': 20.0, 'product_uom': self.uom_gm.id, 'picking_id': picking_in_B.id, 'location_id': self.supplier_location, 'location_dest_id': self.stock_location}) self.MoveObj.create({ 'name': self.gB.name, 'product_id': self.gB.id, 'product_uom_qty': 0.525, 'product_uom': self.uom_kg.id, 'picking_id': picking_in_B.id, 'location_id': self.supplier_location, 'location_dest_id': self.stock_location}) # Check incoming shipment move lines state. for move in picking_in_B.move_lines: self.assertEqual(move.state, 'draft', 'Wrong state of move line.') # Confirm incoming shipment. picking_in_B.action_confirm() # Check incoming shipment move lines state. for move in picking_in_B.move_lines: self.assertEqual(move.state, 'assigned', 'Wrong state of move line.') picking_in_B.do_prepare_partial() # ---------------------------------------------------------------------- # Check product quantity and unit of measure of pack operaation. # ---------------------------------------------------------------------- # Check pack operation quantity and unit of measure for product DozA. PackdozA = self.StockPackObj.search([('product_id', '=', self.DozA.id), ('picking_id', '=', picking_in_B.id)], limit=1) self.assertEqual(PackdozA.product_qty, 120, 'Wrong quantity in pack operation (%s found instead of 120)' % (PackdozA.product_qty)) self.assertEqual(PackdozA.product_uom_id.id, self.uom_unit.id, 'Wrong uom in pack operation for product DozA.') # Check pack operation quantity and unit of measure for product SDozA. PackSdozA = self.StockPackObj.search([('product_id', '=', self.SDozA.id), ('picking_id', '=', picking_in_B.id)], limit=1) self.assertEqual(PackSdozA.product_qty, 1512, 'Wrong quantity in pack operation (%s found instead of 1512)' % (PackSdozA.product_qty)) self.assertEqual(PackSdozA.product_uom_id.id, self.uom_unit.id, 'Wrong uom in pack operation for product SDozA.') # Check pack operation quantity and unit of measure for product SDozARound. PackSdozAround = self.StockPackObj.search([('product_id', '=', self.SDozARound.id), ('picking_id', '=', picking_in_B.id)], limit=1) self.assertEqual(PackSdozAround.product_qty, 1584, 'Wrong quantity in pack operation (%s found instead of 1584)' % (PackSdozAround.product_qty)) self.assertEqual(PackSdozAround.product_uom_id.id, self.uom_unit.id, 'Wrong uom in pack operation for product SDozARound.') # Check pack operation quantity and unit of measure for product gB. packgB = self.StockPackObj.search([('product_id', '=', self.gB.id), ('picking_id', '=', picking_in_B.id)], limit=1) self.assertEqual(packgB.product_qty, 525, 'Wrong quantity in pack operation (%s found instead of 525)' % (packgB.product_qty)) self.assertEqual(packgB.product_uom_id.id, self.uom_gm.id, 'Wrong uom in pack operation for product gB.') # Check pack operation quantity and unit of measure for product kgB. packkgB = self.StockPackObj.search([('product_id', '=', self.kgB.id), ('picking_id', '=', picking_in_B.id)], limit=1) self.assertEqual(packkgB.product_qty, 20.0, 'Wrong quantity in pack operation (%s found instead of 20)' % (packkgB.product_qty)) self.assertEqual(packkgB.product_uom_id.id, self.uom_gm.id, 'Wrong uom in pack operation for product kgB') # ---------------------------------------------------------------------- # Replace pack operation of incoming shipment. # ---------------------------------------------------------------------- self.StockPackObj.search([('product_id', '=', self.kgB.id), ('picking_id', '=', picking_in_B.id)]).write({ 'product_qty': 0.020, 'product_uom_id': self.uom_kg.id}) self.StockPackObj.search([('product_id', '=', self.gB.id), ('picking_id', '=', picking_in_B.id)]).write({ 'product_qty': 525.3, 'product_uom_id': self.uom_gm.id}) self.StockPackObj.search([('product_id', '=', self.DozA.id), ('picking_id', '=', picking_in_B.id)]).write({ 'product_qty': 4, 'product_uom_id': self.uom_dozen.id}) self.StockPackObj.create({ 'product_id': self.DozA.id, 'product_qty': 48, 'product_uom_id': self.uom_unit.id, 'location_id': self.supplier_location, 'location_dest_id': self.stock_location, 'picking_id': picking_in_B.id}) # Transfer product. # ----------------- picking_in_B.do_transfer() #----------------------------------------------------------------------- # Check incoming shipment #----------------------------------------------------------------------- # Check incoming shipment state. self.assertEqual(picking_in_B.state, 'done', 'Incoming shipment state should be done.') # Check incoming shipment move lines state. for move in picking_in_B.move_lines: self.assertEqual(move.state, 'done', 'Wrong state of move line.') # Check total done move lines for incoming shipment. self.assertEqual(len(picking_in_B.move_lines), 6, 'Wrong number of move lines') # Check product DozA done quantity. moves_DozA = self.MoveObj.search([('product_id', '=', self.DozA.id), ('picking_id', '=', picking_in_B.id)], limit=1) self.assertEqual(moves_DozA.product_uom_qty, 96, 'Wrong move quantity (%s found instead of 96)' % (moves_DozA.product_uom_qty)) self.assertEqual(moves_DozA.product_uom.id, self.uom_unit.id, 'Wrong uom in move for product DozA.') # Check product SDozA done quantity. moves_SDozA = self.MoveObj.search([('product_id', '=', self.SDozA.id), ('picking_id', '=', picking_in_B.id)], limit=1) self.assertEqual(moves_SDozA.product_uom_qty, 1512, 'Wrong move quantity (%s found instead of 1512)' % (moves_SDozA.product_uom_qty)) self.assertEqual(moves_SDozA.product_uom.id, self.uom_unit.id, 'Wrong uom in move for product SDozA.') # Check product SDozARound done quantity. moves_SDozARound = self.MoveObj.search([('product_id', '=', self.SDozARound.id), ('picking_id', '=', picking_in_B.id)], limit=1) self.assertEqual(moves_SDozARound.product_uom_qty, 1584, 'Wrong move quantity (%s found instead of 1584)' % (moves_SDozARound.product_uom_qty)) self.assertEqual(moves_SDozARound.product_uom.id, self.uom_unit.id, 'Wrong uom in move for product SDozARound.') # Check product kgB done quantity. moves_kgB = self.MoveObj.search([('product_id', '=', self.kgB.id), ('picking_id', '=', picking_in_B.id)], limit=1) self.assertEqual(moves_kgB.product_uom_qty, 20, 'Wrong quantity in move (%s found instead of 20)' % (moves_kgB.product_uom_qty)) self.assertEqual(moves_kgB.product_uom.id, self.uom_gm.id, 'Wrong uom in move for product kgB.') # Check two moves created for product gB with quantity (0.525 kg and 0.3 g) moves_gB_kg = self.MoveObj.search([('product_id', '=', self.gB.id), ('picking_id', '=', picking_in_B.id), ('product_uom', '=', self.uom_kg.id)], limit=1) self.assertEqual(moves_gB_kg.product_uom_qty, 0.525, 'Wrong move quantity (%s found instead of 0.525)' % (moves_gB_kg.product_uom_qty)) self.assertEqual(moves_gB_kg.product_uom.id, self.uom_kg.id, 'Wrong uom in move for product gB.') moves_gB_g = self.MoveObj.search([('product_id', '=', self.gB.id), ('picking_id', '=', picking_in_B.id), ('product_uom', '=', self.uom_gm.id)], limit=1) self.assertEqual(moves_gB_g.product_uom_qty, 0.3, 'Wrong move quantity (%s found instead of 0.3)' % (moves_gB_g.product_uom_qty)) self.assertEqual(moves_gB_g.product_uom.id, self.uom_gm.id, 'Wrong uom in move for product gB.') # ---------------------------------------------------------------------- # Check Back order of Incoming shipment. # ---------------------------------------------------------------------- # Check back order created or not. bo_in_B = self.PickingObj.search([('backorder_id', '=', picking_in_B.id)]) self.assertEqual(len(bo_in_B), 1, 'Back order should be created.') # Check total move lines of back order. self.assertEqual(len(bo_in_B.move_lines), 1, 'Wrong number of move lines') # Check back order created with correct quantity and uom or not. moves_DozA = self.MoveObj.search([('product_id', '=', self.DozA.id), ('picking_id', '=', bo_in_B.id)], limit=1) self.assertEqual(moves_DozA.product_uom_qty, 24.0, 'Wrong move quantity (%s found instead of 0.525)' % (moves_DozA.product_uom_qty)) self.assertEqual(moves_DozA.product_uom.id, self.uom_unit.id, 'Wrong uom in move for product DozA.') # ---------------------------------------------------------------------- # Check product stock location quantity and quantity available. # ---------------------------------------------------------------------- # Check quants and available quantity for product DozA quants = self.StockQuantObj.search([('product_id', '=', self.DozA.id), ('location_id', '=', self.stock_location)]) total_qty = [quant.qty for quant in quants] self.assertEqual(sum(total_qty), 18, 'Expecting 18 Dozen , got %.4f Dozen on location stock!' % (sum(total_qty))) self.assertEqual(self.DozA.qty_available, 18, 'Wrong quantity available (%s found instead of 18)' % (self.DozA.qty_available)) # Check quants and available quantity for product SDozA quants = self.StockQuantObj.search([('product_id', '=', self.SDozA.id), ('location_id', '=', self.stock_location)]) total_qty = [quant.qty for quant in quants] self.assertEqual(sum(total_qty), 21, 'Expecting 18 SDozen , got %.4f SDozen on location stock!' % (sum(total_qty))) self.assertEqual(self.SDozA.qty_available, 21, 'Wrong quantity available (%s found instead of 21)' % (self.SDozA.qty_available)) # Check quants and available quantity for product SDozARound quants = self.StockQuantObj.search([('product_id', '=', self.SDozARound.id), ('location_id', '=', self.stock_location)]) total_qty = [quant.qty for quant in quants] self.assertEqual(sum(total_qty), 22, 'Expecting 22 SDozenRound , got %.4f SDozenRound on location stock!' % (sum(total_qty))) self.assertEqual(self.SDozARound.qty_available, 22, 'Wrong quantity available (%s found instead of 22)' % (self.SDozARound.qty_available)) # Check quants and available quantity for product gB. quants = self.StockQuantObj.search([('product_id', '=', self.gB.id), ('location_id', '=', self.stock_location)]) total_qty = [quant.qty for quant in quants] self.assertEqual(sum(total_qty), 1050.6, 'Expecting 1050.6 Gram , got %.4f Gram on location stock!' % (sum(total_qty))) self.assertEqual(self.gB.qty_available, 1050.6, 'Wrong quantity available (%s found instead of 1050.6)' % (self.gB.qty_available)) # Check quants and available quantity for product kgB. quants = self.StockQuantObj.search([('product_id', '=', self.kgB.id), ('location_id', '=', self.stock_location)]) total_qty = [quant.qty for quant in quants] self.assertEqual(sum(total_qty), 0.040, 'Expecting 0.040 kg , got %.4f kg on location stock!' % (sum(total_qty))) self.assertEqual(self.kgB.qty_available, 0.040, 'Wrong quantity available (%s found instead of 0.040)' % (self.kgB.qty_available)) # ---------------------------------------------------------------------- # Create outgoing shipment. # ---------------------------------------------------------------------- before_out_quantity = self.kgB.qty_available picking_out = self.PickingObj.create({ 'partner_id': self.partner_agrolite_id, 'picking_type_id': self.picking_type_out}) self.MoveObj.create({ 'name': self.kgB.name, 'product_id': self.kgB.id, 'product_uom_qty': 0.966, 'product_uom': self.uom_gm.id, 'picking_id': picking_out.id, 'location_id': self.stock_location, 'location_dest_id': self.customer_location}) self.MoveObj.create({ 'name': self.kgB.name, 'product_id': self.kgB.id, 'product_uom_qty': 0.034, 'product_uom': self.uom_gm.id, 'picking_id': picking_out.id, 'location_id': self.stock_location, 'location_dest_id': self.customer_location}) picking_out.action_confirm() picking_out.action_assign() picking_out.do_prepare_partial() picking_out.do_transfer() # Check quantity difference after stock transfer. quantity_diff = before_out_quantity - self.kgB.qty_available self.assertEqual(float_round(quantity_diff, precision_rounding=0.0001), 0.001, 'Wrong quantity diffrence.') self.assertEqual(self.kgB.qty_available, 0.039, 'Wrong quantity available (%s found instead of 0.039)' % (self.kgB.qty_available)) # ====================================================================== # Outgoing shipments. # ====================================================================== # Create Outgoing shipment with ... # product DozA ( 54 Unit ) , SDozA ( 288 Unit ) # product SDozRound ( 360 unit ) , product gB ( 0.503 kg ) # product kgB ( 19 g ) # ====================================================================== picking_out = self.PickingObj.create({ 'partner_id': self.partner_agrolite_id, 'picking_type_id': self.picking_type_out}) self.MoveObj.create({ 'name': self.DozA.name, 'product_id': self.DozA.id, 'product_uom_qty': 54, 'product_uom': self.uom_unit.id, 'picking_id': picking_out.id, 'location_id': self.stock_location, 'location_dest_id': self.customer_location}) self.MoveObj.create({ 'name': self.SDozA.name, 'product_id': self.SDozA.id, 'product_uom_qty': 288, 'product_uom': self.uom_unit.id, 'picking_id': picking_out.id, 'location_id': self.stock_location, 'location_dest_id': self.customer_location}) self.MoveObj.create({ 'name': self.SDozARound.name, 'product_id': self.SDozARound.id, 'product_uom_qty': 360, 'product_uom': self.uom_unit.id, 'picking_id': picking_out.id, 'location_id': self.stock_location, 'location_dest_id': self.customer_location}) self.MoveObj.create({ 'name': self.gB.name, 'product_id': self.gB.id, 'product_uom_qty': 0.503, 'product_uom': self.uom_kg.id, 'picking_id': picking_out.id, 'location_id': self.stock_location, 'location_dest_id': self.customer_location}) self.MoveObj.create({ 'name': self.kgB.name, 'product_id': self.kgB.id, 'product_uom_qty': 20, 'product_uom': self.uom_gm.id, 'picking_id': picking_out.id, 'location_id': self.stock_location, 'location_dest_id': self.customer_location}) # Confirm outgoing shipment. picking_out.action_confirm() for move in picking_out.move_lines: self.assertEqual(move.state, 'confirmed', 'Wrong state of move line.') # Assing product to outgoing shipments picking_out.action_assign() for move in picking_out.move_lines: self.assertEqual(move.state, 'assigned', 'Wrong state of move line.') # Check product A available quantity DozA_qty = self.MoveObj.search([('product_id', '=', self.DozA.id), ('picking_id', '=', picking_out.id)], limit=1).availability self.assertEqual(DozA_qty, 4.5, 'Wrong move quantity availability (%s found instead of 4.5)' % (DozA_qty)) # Check product B available quantity SDozA_qty = self.MoveObj.search([('product_id', '=', self.SDozA.id), ('picking_id', '=', picking_out.id)], limit=1).availability self.assertEqual(SDozA_qty, 2, 'Wrong move quantity availability (%s found instead of 2)' % (SDozA_qty)) # Check product C available quantity SDozARound_qty = self.MoveObj.search([('product_id', '=', self.SDozARound.id), ('picking_id', '=', picking_out.id)], limit=1).availability self.assertEqual(SDozARound_qty, 3, 'Wrong move quantity availability (%s found instead of 3)' % (SDozARound_qty)) # Check product D available quantity gB_qty = self.MoveObj.search([('product_id', '=', self.gB.id), ('picking_id', '=', picking_out.id)], limit=1).availability self.assertEqual(gB_qty, 503, 'Wrong move quantity availability (%s found instead of 503)' % (gB_qty)) # Check product D available quantity kgB_qty = self.MoveObj.search([('product_id', '=', self.kgB.id), ('picking_id', '=', picking_out.id)], limit=1).availability self.assertEqual(kgB_qty, 0.020, 'Wrong move quantity availability (%s found instead of 0.020)' % (kgB_qty)) picking_out.action_confirm() picking_out.action_assign() picking_out.do_prepare_partial() picking_out.do_transfer() # ---------------------------------------------------------------------- # Check product stock location quantity and quantity available. # ---------------------------------------------------------------------- # Check quants and available quantity for product DozA quants = self.StockQuantObj.search([('product_id', '=', self.DozA.id), ('location_id', '=', self.stock_location)]) total_qty = [quant.qty for quant in quants] self.assertEqual(sum(total_qty), 13.5, 'Expecting 13.5 Dozen , got %.4f Dozen on location stock!' % (sum(total_qty))) self.assertEqual(self.DozA.qty_available, 13.5, 'Wrong quantity available (%s found instead of 13.5)' % (self.DozA.qty_available)) # Check quants and available quantity for product SDozA quants = self.StockQuantObj.search([('product_id', '=', self.SDozA.id), ('location_id', '=', self.stock_location)]) total_qty = [quant.qty for quant in quants] self.assertEqual(sum(total_qty), 19, 'Expecting 19 SDozen , got %.4f SDozen on location stock!' % (sum(total_qty))) self.assertEqual(self.SDozA.qty_available, 19, 'Wrong quantity available (%s found instead of 19)' % (self.SDozA.qty_available)) # Check quants and available quantity for product SDozARound quants = self.StockQuantObj.search([('product_id', '=', self.SDozARound.id), ('location_id', '=', self.stock_location)]) total_qty = [quant.qty for quant in quants] self.assertEqual(sum(total_qty), 19, 'Expecting 19 SDozRound , got %.4f SDozRound on location stock!' % (sum(total_qty))) self.assertEqual(self.SDozARound.qty_available, 19, 'Wrong quantity available (%s found instead of 19)' % (self.SDozARound.qty_available)) # Check quants and available quantity for product gB. quants = self.StockQuantObj.search([('product_id', '=', self.gB.id), ('location_id', '=', self.stock_location)]) total_qty = [quant.qty for quant in quants] self.assertEqual(float_round(sum(total_qty), precision_rounding=0.0001), 547.6, 'Expecting 547.6 g , got %.4f g on location stock!' % (sum(total_qty))) self.assertEqual(self.gB.qty_available, 547.6, 'Wrong quantity available (%s found instead of 547.6)' % (self.gB.qty_available)) # Check quants and available quantity for product kgB. quants = self.StockQuantObj.search([('product_id', '=', self.kgB.id), ('location_id', '=', self.stock_location)]) total_qty = [quant.qty for quant in quants] self.assertEqual(sum(total_qty), 0.019, 'Expecting 0.019 kg , got %.4f kg on location stock!' % (sum(total_qty))) self.assertEqual(self.kgB.qty_available, 0.019, 'Wrong quantity available (%s found instead of 0.019)' % (self.kgB.qty_available)) # ---------------------------------------------------------------------- # Receipt back order of incoming shipment. # ---------------------------------------------------------------------- bo_in_B.do_prepare_partial() bo_in_B.do_transfer() # Check quants and available quantity for product kgB. quants = self.StockQuantObj.search([('product_id', '=', self.DozA.id), ('location_id', '=', self.stock_location)]) total_qty = [quant.qty for quant in quants] self.assertEqual(sum(total_qty), 15.5, 'Expecting 15.5 Dozen , got %.4f Dozen on location stock!' % (sum(total_qty))) self.assertEqual(self.DozA.qty_available, 15.5, 'Wrong quantity available (%s found instead of 15.5)' % (self.DozA.qty_available)) # ----------------------------------------- # Create product in kg and receive in ton. # ----------------------------------------- productKG = self.ProductObj.create({'name': 'Product KG', 'uom_id': self.uom_kg.id, 'uom_po_id': self.uom_kg.id}) picking_in = self.PickingObj.create({ 'partner_id': self.partner_delta_id, 'picking_type_id': self.picking_type_in}) self.MoveObj.create({ 'name': productKG.name, 'product_id': productKG.id, 'product_uom_qty': 1.0, 'product_uom': self.uom_tone.id, 'picking_id': picking_in.id, 'location_id': self.supplier_location, 'location_dest_id': self.stock_location}) # Check incoming shipment state. self.assertEqual(picking_in.state, 'draft', 'Incoming shipment state should be draft.') # Check incoming shipment move lines state. for move in picking_in.move_lines: self.assertEqual(move.state, 'draft', 'Wrong state of move line.') # Confirm incoming shipment. picking_in.action_confirm() # Check incoming shipment move lines state. for move in picking_in.move_lines: self.assertEqual(move.state, 'assigned', 'Wrong state of move line.') picking_in.do_prepare_partial() # Check pack operation quantity. packKG = self.StockPackObj.search([('product_id', '=', productKG.id), ('picking_id', '=', picking_in.id)], limit=1) self.assertEqual(packKG.product_qty, 1000, 'Wrong product quantity in pack operation (%s found instead of 1000)' % (packKG.product_qty)) self.assertEqual(packKG.product_uom_id.id, self.uom_kg.id, 'Wrong product uom in pack operation.') # Transfer Incoming shipment. picking_in.do_transfer() #----------------------------------------------------------------------- # Check incoming shipment after transfer. #----------------------------------------------------------------------- # Check incoming shipment state. self.assertEqual(picking_in.state, 'done', 'Incoming shipment state should be done.') # Check incoming shipment move lines state. for move in picking_in.move_lines: self.assertEqual(move.state, 'done', 'Wrong state of move lines.') # Check total done move lines for incoming shipment. self.assertEqual(len(picking_in.move_lines), 1, 'Wrong number of move lines') # Check product DozA done quantity. move = self.MoveObj.search([('product_id', '=', productKG.id), ('picking_id', '=', picking_in.id)], limit=1) self.assertEqual(move.product_uom_qty, 1, 'Wrong product quantity in done move.') self.assertEqual(move.product_uom.id, self.uom_tone.id, 'Wrong unit of measure in done move.') self.assertEqual(productKG.qty_available, 1000, 'Wrong quantity available of product (%s found instead of 1000)' % (productKG.qty_available)) picking_out = self.PickingObj.create({ 'partner_id': self.partner_agrolite_id, 'picking_type_id': self.picking_type_out}) self.MoveObj.create({ 'name': productKG.name, 'product_id': productKG.id, 'product_uom_qty': 2.5, 'product_uom': self.uom_gm.id, 'picking_id': picking_out.id, 'location_id': self.stock_location, 'location_dest_id': self.customer_location}) picking_out.action_confirm() picking_out.action_assign() picking_out.do_prepare_partial() pack_opt = self.StockPackObj.search([('product_id', '=', productKG.id), ('picking_id', '=', picking_out.id)], limit=1) pack_opt.write({'product_qty': 0.5}) picking_out.do_transfer() quants = self.StockQuantObj.search([('product_id', '=', productKG.id), ('location_id', '=', self.stock_location)]) total_qty = [quant.qty for quant in quants] # Check total quantity stock location. self.assertEqual(sum(total_qty), 999.9995, 'Expecting 999.9995 kg , got %.4f kg on location stock!' % (sum(total_qty))) # Check Back order created or not. #--------------------------------- bo_out_1 = self.PickingObj.search([('backorder_id', '=', picking_out.id)]) self.assertEqual(len(bo_out_1), 1, 'Back order should be created.') # Check total move lines of back order. self.assertEqual(len(bo_out_1.move_lines), 1, 'Wrong number of move lines') moves_KG = self.MoveObj.search([('product_id', '=', productKG.id), ('picking_id', '=', bo_out_1.id)], limit=1) # Check back order created with correct quantity and uom or not. self.assertEqual(moves_KG.product_uom_qty, 2.0, 'Wrong move quantity (%s found instead of 2.0)' % (moves_KG.product_uom_qty)) self.assertEqual(moves_KG.product_uom.id, self.uom_gm.id, 'Wrong uom in move for product KG.') bo_out_1.action_assign() bo_out_1.do_prepare_partial() pack_opt = self.StockPackObj.search([('product_id', '=', productKG.id), ('picking_id', '=', bo_out_1.id)], limit=1) pack_opt.write({'product_qty': 0.5}) bo_out_1.do_transfer() quants = self.StockQuantObj.search([('product_id', '=', productKG.id), ('location_id', '=', self.stock_location)]) total_qty = [quant.qty for quant in quants] # Check total quantity stock location. self.assertEqual(sum(total_qty), 999.9990, 'Expecting 999.9990 kg , got %.4f kg on location stock!' % (sum(total_qty))) # Check Back order created or not. #--------------------------------- bo_out_2 = self.PickingObj.search([('backorder_id', '=', bo_out_1.id)]) self.assertEqual(len(bo_out_2), 1, 'Back order should be created.') # Check total move lines of back order. self.assertEqual(len(bo_out_2.move_lines), 1, 'Wrong number of move lines') # Check back order created with correct move quantity and uom or not. moves_KG = self.MoveObj.search([('product_id', '=', productKG.id), ('picking_id', '=', bo_out_2.id)], limit=1) self.assertEqual(moves_KG.product_uom_qty, 1.5, 'Wrong move quantity (%s found instead of 1.5)' % (moves_KG.product_uom_qty)) self.assertEqual(moves_KG.product_uom.id, self.uom_gm.id, 'Wrong uom in move for product KG.') bo_out_2.action_assign() bo_out_2.do_prepare_partial() pack_opt = self.StockPackObj.search([('product_id', '=', productKG.id), ('picking_id', '=', bo_out_2.id)], limit=1) pack_opt.write({'product_qty': 0.5}) bo_out_2.do_transfer() # Check total quantity stock location of product KG. quants = self.StockQuantObj.search([('product_id', '=', productKG.id), ('location_id', '=', self.stock_location)]) total_qty = [quant.qty for quant in quants] self.assertEqual(sum(total_qty), 999.9985, 'Expecting 999.9985 kg , got %.4f kg on location stock!' % (sum(total_qty))) # Check Back order created or not. #--------------------------------- bo_out_3 = self.PickingObj.search([('backorder_id', '=', bo_out_2.id)]) self.assertEqual(len(bo_out_3), 1, 'Back order should be created.') # Check total move lines of back order. self.assertEqual(len(bo_out_3.move_lines), 1, 'Wrong number of move lines') # Check back order created with correct quantity and uom or not. moves_KG = self.MoveObj.search([('product_id', '=', productKG.id), ('picking_id', '=', bo_out_3.id)], limit=1) self.assertEqual(moves_KG.product_uom_qty, 1, 'Wrong move quantity (%s found instead of 1.0)' % (moves_KG.product_uom_qty)) self.assertEqual(moves_KG.product_uom.id, self.uom_gm.id, 'Wrong uom in move for product KG.') bo_out_3.action_assign() bo_out_3.do_prepare_partial() pack_opt = self.StockPackObj.search([('product_id', '=', productKG.id), ('picking_id', '=', bo_out_3.id)], limit=1) pack_opt.write({'product_qty': 0.5}) bo_out_3.do_transfer() quants = self.StockQuantObj.search([('product_id', '=', productKG.id), ('location_id', '=', self.stock_location)]) total_qty = [quant.qty for quant in quants] self.assertEqual(sum(total_qty), 999.9980, 'Expecting 999.9980 kg , got %.4f kg on location stock!' % (sum(total_qty))) # Check Back order created or not. #--------------------------------- bo_out_4 = self.PickingObj.search([('backorder_id', '=', bo_out_3.id)]) self.assertEqual(len(bo_out_4), 1, 'Back order should be created.') # Check total move lines of back order. self.assertEqual(len(bo_out_4.move_lines), 1, 'Wrong number of move lines') # Check back order created with correct quantity and uom or not. moves_KG = self.MoveObj.search([('product_id', '=', productKG.id), ('picking_id', '=', bo_out_4.id)], limit=1) self.assertEqual(moves_KG.product_uom_qty, 0.5, 'Wrong move quantity (%s found instead of 0.5)' % (moves_KG.product_uom_qty)) self.assertEqual(moves_KG.product_uom.id, self.uom_gm.id, 'Wrong uom in move for product KG.') bo_out_4.action_assign() bo_out_4.do_prepare_partial() pack_opt = self.StockPackObj.search([('product_id', '=', productKG.id), ('picking_id', '=', bo_out_4.id)], limit=1) pack_opt.write({'product_qty': 0.5}) bo_out_4.do_transfer() quants = self.StockQuantObj.search([('product_id', '=', productKG.id), ('location_id', '=', self.stock_location)]) total_qty = [quant.qty for quant in quants] self.assertEqual(sum(total_qty), 999.9975, 'Expecting 999.9975 kg , got %.4f kg on location stock!' % (sum(total_qty))) def test_20_create_inventory_with_different_uom(self): """Create inventory with different unit of measure.""" # ------------------------------------------------ # Test inventory with product A(Unit). # ------------------------------------------------ inventory = self.InvObj.create({'name': 'Test', 'product_id': self.UnitA.id, 'filter': 'product'}) inventory.prepare_inventory() self.assertFalse(inventory.line_ids, "Inventory line should not created.") inventory_line = self.InvLineObj.create({ 'inventory_id': inventory.id, 'product_id': self.UnitA.id, 'product_uom_id': self.uom_dozen.id, 'product_qty': 10, 'location_id': self.stock_location}) inventory.action_done() # Check quantity available of product UnitA. quants = self.StockQuantObj.search([('product_id', '=', self.UnitA.id), ('location_id', '=', self.stock_location)]) total_qty = [quant.qty for quant in quants] self.assertEqual(sum(total_qty), 120, 'Expecting 120 Units , got %.4f Units on location stock!' % (sum(total_qty))) self.assertEqual(self.UnitA.qty_available, 120, 'Expecting 120 Units , got %.4f Units of quantity available!' % (self.UnitA.qty_available)) # Create Inventory again for product UnitA. inventory = self.InvObj.create({'name': 'Test', 'product_id': self.UnitA.id, 'filter': 'product'}) inventory.prepare_inventory() self.assertEqual(len(inventory.line_ids), 1, "One inventory line should be created.") inventory_line = self.InvLineObj.search([('product_id', '=', self.UnitA.id), ('inventory_id', '=', inventory.id)], limit=1) self.assertEqual(inventory_line.product_qty, 120, "Wrong product quantity in inventory line.") # Modify the inventory line and set the quantity to 144 product on this new inventory. inventory_line.write({'product_qty': 144}) inventory.action_done() move = self.MoveObj.search([('product_id', '=', self.UnitA.id), ('inventory_id', '=', inventory.id)], limit=1) self.assertEqual(move.product_uom_qty, 24, "Wrong move quantity of product UnitA.") # Check quantity available of product UnitA. quants = self.StockQuantObj.search([('product_id', '=', self.UnitA.id), ('location_id', '=', self.stock_location)]) total_qty = [quant.qty for quant in quants] self.assertEqual(sum(total_qty), 144, 'Expecting 144 Units , got %.4f Units on location stock!' % (sum(total_qty))) self.assertEqual(self.UnitA.qty_available, 144, 'Expecting 144 Units , got %.4f Units of quantity available!' % (self.UnitA.qty_available)) # ------------------------------------------------ # Test inventory with product KG. # ------------------------------------------------ productKG = self.ProductObj.create({'name': 'Product KG', 'uom_id': self.uom_kg.id, 'uom_po_id': self.uom_kg.id}) inventory = self.InvObj.create({'name': 'Inventory Product KG', 'product_id': productKG.id, 'filter': 'product'}) inventory.prepare_inventory() self.assertFalse(inventory.line_ids, "Inventory line should not created.") inventory_line = self.InvLineObj.create({ 'inventory_id': inventory.id, 'product_id': productKG.id, 'product_uom_id': self.uom_tone.id, 'product_qty': 5, 'location_id': self.stock_location}) inventory.action_done() quants = self.StockQuantObj.search([('product_id', '=', productKG.id), ('location_id', '=', self.stock_location)]) total_qty = [quant.qty for quant in quants] self.assertEqual(sum(total_qty), 5000, 'Expecting 5000 kg , got %.4f kg on location stock!' % (sum(total_qty))) self.assertEqual(productKG.qty_available, 5000, 'Expecting 5000 kg , got %.4f kg of quantity available!' % (productKG.qty_available)) # Create Inventory again. inventory = self.InvObj.create({'name': 'Test', 'product_id': productKG.id, 'filter': 'product'}) inventory.prepare_inventory() self.assertEqual(len(inventory.line_ids), 1, "One inventory line should be created.") inventory_line = self.InvLineObj.search([('product_id', '=', productKG.id), ('inventory_id', '=', inventory.id)], limit=1) self.assertEqual(inventory_line.product_qty, 5000, "Wrong product quantity in inventory line.") # Modify the inventory line and set the quantity to 4000 product on this new inventory. inventory_line.write({'product_qty': 4000}) inventory.action_done() # Check inventory move quantity of product KG. move = self.MoveObj.search([('product_id', '=', productKG.id), ('inventory_id', '=', inventory.id)], limit=1) self.assertEqual(move.product_uom_qty, 1000, "Wrong move quantity of product KG.") # Check quantity available of product KG. quants = self.StockQuantObj.search([('product_id', '=', productKG.id), ('location_id', '=', self.stock_location)]) total_qty = [quant.qty for quant in quants] self.assertEqual(sum(total_qty), 4000, 'Expecting 4000 kg , got %.4f on location stock!' % (sum(total_qty))) self.assertEqual(productKG.qty_available, 4000, 'Expecting 4000 kg , got %.4f of quantity available!' % (productKG.qty_available)) #-------------------------------------------------------- # TEST PARTIAL INVENTORY WITH PACKS and LOTS #--------------------------------------------------------- packproduct = self.ProductObj.create({'name': 'Pack Product', 'uom_id': self.uom_unit.id, 'uom_po_id': self.uom_unit.id}) lotproduct = self.ProductObj.create({'name': 'Lot Product', 'uom_id': self.uom_unit.id, 'uom_po_id': self.uom_unit.id}) inventory = self.InvObj.create({'name': 'Test Partial and Pack', 'filter': 'partial', 'location_id': self.stock_location}) inventory.prepare_inventory() pack_obj = self.env['stock.quant.package'] lot_obj = self.env['stock.production.lot'] pack1 = pack_obj.create({'name': 'PACK00TEST1'}) pack2 = pack_obj.create({'name': 'PACK00TEST2'}) lot1 = lot_obj.create({'name': 'Lot001', 'product_id': lotproduct.id}) move = self.MoveObj.search([('product_id', '=', productKG.id), ('inventory_id', '=', inventory.id)], limit=1) self.assertEqual(len(move), 0, "Partial filter should not create a lines upon prepare") line_vals = [] line_vals += [{'location_id': self.stock_location, 'product_id': packproduct.id, 'product_qty': 10, 'product_uom_id': packproduct.uom_id.id}] line_vals += [{'location_id': self.stock_location, 'product_id': packproduct.id, 'product_qty': 20, 'product_uom_id': packproduct.uom_id.id, 'package_id': pack1.id}] line_vals += [{'location_id': self.stock_location, 'product_id': lotproduct.id, 'product_qty': 30, 'product_uom_id': lotproduct.uom_id.id, 'prod_lot_id': lot1.id}] line_vals += [{'location_id': self.stock_location, 'product_id': lotproduct.id, 'product_qty': 25, 'product_uom_id': lotproduct.uom_id.id, 'prod_lot_id': False}] inventory.write({'line_ids': [(0, 0, x) for x in line_vals]}) inventory.action_done() self.assertEqual(packproduct.qty_available, 30, "Wrong qty available for packproduct") self.assertEqual(lotproduct.qty_available, 55, "Wrong qty available for lotproduct") quants = self.StockQuantObj.search([('product_id', '=', packproduct.id), ('location_id', '=', self.stock_location), ('package_id', '=', pack1.id)]) total_qty = sum([quant.qty for quant in quants]) self.assertEqual(total_qty, 20, 'Expecting 20 units on package 1 of packproduct, but we got %.4f on location stock!' % (total_qty)) #Create an inventory that will put the lots without lot to 0 and check that taking without pack will not take it from the pack inventory2 = self.InvObj.create({'name': 'Test Partial Lot and Pack2', 'filter': 'partial', 'location_id': self.stock_location}) inventory2.prepare_inventory() line_vals = [] line_vals += [{'location_id': self.stock_location, 'product_id': packproduct.id, 'product_qty': 20, 'product_uom_id': packproduct.uom_id.id}] line_vals += [{'location_id': self.stock_location, 'product_id': lotproduct.id, 'product_qty': 0, 'product_uom_id': lotproduct.uom_id.id, 'prod_lot_id': False}] line_vals += [{'location_id': self.stock_location, 'product_id': lotproduct.id, 'product_qty': 10, 'product_uom_id': lotproduct.uom_id.id, 'prod_lot_id': lot1.id}] inventory2.write({'line_ids': [(0, 0, x) for x in line_vals]}) inventory2.action_done() self.assertEqual(packproduct.qty_available, 40, "Wrong qty available for packproduct") self.assertEqual(lotproduct.qty_available, 10, "Wrong qty available for lotproduct") quants = self.StockQuantObj.search([('product_id', '=', lotproduct.id), ('location_id', '=', self.stock_location), ('lot_id', '=', lot1.id)]) total_qty = sum([quant.qty for quant in quants]) self.assertEqual(total_qty, 10, 'Expecting 0 units lot of lotproduct, but we got %.4f on location stock!' % (total_qty)) quants = self.StockQuantObj.search([('product_id', '=', lotproduct.id), ('location_id', '=', self.stock_location), ('lot_id', '=', False)]) total_qty = sum([quant.qty for quant in quants]) self.assertEqual(total_qty, 0, 'Expecting 0 units lot of lotproduct, but we got %.4f on location stock!' % (total_qty)) def test_30_create_in_out_with_product_pack_lines(self): picking_in = self.PickingObj.create({ 'partner_id': self.partner_delta_id, 'picking_type_id': self.picking_type_in}) self.MoveObj.create({ 'name': self.productE.name, 'product_id': self.productE.id, 'product_uom_qty': 10, 'product_uom': self.productE.uom_id.id, 'picking_id': picking_in.id, 'location_id': self.supplier_location, 'location_dest_id': self.stock_location}) picking_in.action_confirm() picking_in.do_prepare_partial() pack_obj = self.env['stock.quant.package'] pack1 = pack_obj.create({'name': 'PACKINOUTTEST1'}) pack2 = pack_obj.create({'name': 'PACKINOUTTEST2'}) picking_in.pack_operation_ids[0].result_package_id = pack1 picking_in.pack_operation_ids[0].product_qty = 4 packop2 = picking_in.pack_operation_ids[0].copy() packop2.product_qty = 6 packop2.result_package_id = pack2 picking_in.do_transfer() self.assertEqual(sum([x.qty for x in picking_in.move_lines[0].quant_ids]), 10.0, 'Expecting 10 pieces in stock') #check the quants are in the package self.assertEqual(sum(x.qty for x in pack1.quant_ids), 4.0, 'Pack 1 should have 4 pieces') self.assertEqual(sum(x.qty for x in pack2.quant_ids), 6.0, 'Pack 2 should have 6 pieces') picking_out = self.PickingObj.create({ 'partner_id': self.partner_agrolite_id, 'picking_type_id': self.picking_type_out}) self.MoveObj.create({ 'name': self.productE.name, 'product_id': self.productE.id, 'product_uom_qty': 3, 'product_uom': self.productE.uom_id.id, 'picking_id': picking_out.id, 'location_id': self.stock_location, 'location_dest_id': self.customer_location}) picking_out.action_confirm() picking_out.action_assign() picking_out.do_prepare_partial() packout1 = picking_out.pack_operation_ids[0] packout2 = picking_out.pack_operation_ids[0].copy() packout1.product_qty = 2 packout1.package_id = pack1 packout2.package_id = pack2 packout2.product_qty = 1 picking_out.do_transfer() #Check there are no negative quants neg_quants = self.env['stock.quant'].search([('product_id', '=', self.productE.id), ('qty', '<', 0.0)]) self.assertEqual(len(neg_quants), 0, 'There are negative quants!') self.assertEqual(len(picking_out.move_lines[0].linked_move_operation_ids), 2, 'We should have 2 links in the matching between the move and the operations') self.assertEqual(len(picking_out.move_lines[0].quant_ids), 2, 'We should have exactly 2 quants in the end') def test_40_create_in_out_with_product_pack_lines(self): picking_in = self.PickingObj.create({ 'partner_id': self.partner_delta_id, 'picking_type_id': self.picking_type_in}) self.MoveObj.create({ 'name': self.productE.name, 'product_id': self.productE.id, 'product_uom_qty': 200, 'product_uom': self.productE.uom_id.id, 'picking_id': picking_in.id, 'location_id': self.supplier_location, 'location_dest_id': self.stock_location}) picking_in.action_confirm() picking_in.do_prepare_partial() pack_obj = self.env['stock.quant.package'] pack1 = pack_obj.create({'name': 'PACKINOUTTEST1'}) pack2 = pack_obj.create({'name': 'PACKINOUTTEST2'}) picking_in.pack_operation_ids[0].result_package_id = pack1 picking_in.pack_operation_ids[0].product_qty = 120 packop2 = picking_in.pack_operation_ids[0].copy() packop2.product_qty = 80 packop2.result_package_id = pack2 picking_in.do_transfer() self.assertEqual(sum([x.qty for x in picking_in.move_lines[0].quant_ids]), 200.0, 'Expecting 200 pieces in stock') #check the quants are in the package self.assertEqual(sum(x.qty for x in pack1.quant_ids), 120, 'Pack 1 should have 120 pieces') self.assertEqual(sum(x.qty for x in pack2.quant_ids), 80, 'Pack 2 should have 80 pieces') picking_out = self.PickingObj.create({ 'partner_id': self.partner_agrolite_id, 'picking_type_id': self.picking_type_out}) self.MoveObj.create({ 'name': self.productE.name, 'product_id': self.productE.id, 'product_uom_qty': 200 , 'product_uom': self.productE.uom_id.id, 'picking_id': picking_out.id, 'location_id': self.stock_location, 'location_dest_id': self.customer_location}) picking_out.action_confirm() picking_out.action_assign() picking_out.do_prepare_partial() #Convert entire packs into taking out of packs packout0 = picking_out.pack_operation_ids[0] packout1 = picking_out.pack_operation_ids[1] packout0.write({'product_id': self.productE.id, 'product_qty' : 120.0, 'product_uom_id' : self.productE.uom_id.id, 'package_id': pack1.id, }) packout1.write({'product_id': self.productE.id, 'product_qty' : 80.0, 'product_uom_id' : self.productE.uom_id.id, 'package_id': pack2.id, }) picking_out.do_transfer() #Check there are no negative quants neg_quants = self.env['stock.quant'].search([('product_id', '=', self.productE.id), ('qty', '<', 0.0)]) self.assertEqual(len(neg_quants), 0, 'There are negative quants!') # We should also make sure that when matching stock moves with pack operations, it takes the correct self.assertEqual(len(picking_out.move_lines[0].linked_move_operation_ids), 2, 'We should only have 2 links beween the move and the 2 operations') self.assertEqual(len(picking_out.move_lines[0].quant_ids), 2, 'We should have exactly 2 quants in the end') # Do not forward port in 10.0 and beyond def test_inventory_adjustment_and_negative_quants_1(self): """Make sure negative quants from returns get wiped out with an inventory adjustment""" productA = self.env['product.product'].create({'name': 'Product A', 'type': 'product'}) stock_location = self.env.ref('stock.stock_location_stock') customer_location = self.env.ref('stock.stock_location_customers') location_loss = self.env.ref('stock.location_inventory') # Create a picking out and force availability picking_out = self.env['stock.picking'].create({ 'partner_id': self.env.ref('base.res_partner_2').id, 'picking_type_id': self.env.ref('stock.picking_type_out').id, 'location_id': stock_location.id, 'location_dest_id': customer_location.id, }) self.env['stock.move'].create({ 'name': productA.name, 'product_id': productA.id, 'product_uom_qty': 1, 'product_uom': productA.uom_id.id, 'picking_id': picking_out.id, 'location_id': stock_location.id, 'location_dest_id': customer_location.id, }) picking_out.action_confirm() picking_out.force_assign() picking_out.do_transfer() # Create return picking for all goods default_data = self.env['stock.return.picking']\ .with_context(active_ids=picking_out.ids, active_id=picking_out.ids[0])\ .default_get([ 'move_dest_exists', 'product_return_moves' ]) list_return_moves = default_data['product_return_moves'] default_data['product_return_moves'] = [(0, 0, return_move) for return_move in list_return_moves] return_wiz = self.env['stock.return.picking']\ .with_context(active_ids=picking_out.ids, active_id=picking_out.ids[0])\ .create(default_data) res = return_wiz._create_returns()[0] return_pick = self.env['stock.picking'].browse(res) return_pick.action_assign() return_pick.do_transfer() # Make an inventory adjustment to set the quantity to 0 inventory = self.env['stock.inventory'].create({ 'name': 'Starting for product_1', 'filter': 'product', 'location_id': stock_location.id, 'product_id': productA.id, }) inventory.prepare_inventory() self.assertEqual(len(inventory.line_ids), 1, "Wrong inventory lines generated.") self.assertEqual(inventory.line_ids.theoretical_qty, 0, "Theoretical quantity should be zero.") inventory.action_done() # The inventory adjustment should have created two moves self.assertEqual(len(inventory.move_ids), 2) quantity = inventory.move_ids.mapped('product_qty') self.assertEqual(quantity, [1, 1], "Moves created with wrong quantity.") location_ids = inventory.move_ids.mapped('location_id').ids self.assertEqual(set(location_ids), {stock_location.id, location_loss.id}) # There should be no quant in the stock location quants = self.env['stock.quant'].search([('product_id', '=', productA.id), ('location_id', '=', stock_location.id)]) self.assertEqual(len(quants), 0) # There should be one quant in the inventory loss location quant = self.env['stock.quant'].search([('product_id', '=', productA.id), ('location_id', '=', location_loss.id)]) self.assertEqual(len(quant), 1) self.assertEqual(quant.qty, 1) def test_inventory_adjustment_and_negative_quants_2(self): """Make sure negative quants get wiped out with an inventory adjustment""" productA = self.env['product.product'].create({'name': 'Product A', 'type': 'product'}) stock_location = self.env.ref('stock.stock_location_stock') customer_location = self.env.ref('stock.stock_location_customers') location_loss = self.env.ref('stock.location_inventory') # Create a picking out and force availability picking_out = self.env['stock.picking'].create({ 'partner_id': self.env.ref('base.res_partner_2').id, 'picking_type_id': self.env.ref('stock.picking_type_out').id, 'location_id': stock_location.id, 'location_dest_id': customer_location.id, }) self.env['stock.move'].create({ 'name': productA.name, 'product_id': productA.id, 'product_uom_qty': 1, 'product_uom': productA.uom_id.id, 'picking_id': picking_out.id, 'location_id': stock_location.id, 'location_dest_id': customer_location.id, }) picking_out.action_confirm() picking_out.force_assign() picking_out.do_transfer() # Make an inventory adjustment to set the quantity to 0 inventory = self.env['stock.inventory'].create({ 'name': 'Starting for product_1', 'filter': 'product', 'location_id': stock_location.id, 'product_id': productA.id, }) inventory.prepare_inventory() self.assertEqual(len(inventory.line_ids), 1, "Wrong inventory lines generated.") self.assertEqual(inventory.line_ids.theoretical_qty, -1, "Theoretical quantity should be -1.") inventory.line_ids.product_qty = 0 # Put the quantity back to 0 inventory.action_done() # The inventory adjustment should have created one self.assertEqual(len(inventory.move_ids), 1) quantity = inventory.move_ids.mapped('product_qty') self.assertEqual(quantity, [1], "Moves created with wrong quantity.") location_ids = inventory.move_ids.mapped('location_id').ids self.assertEqual(set(location_ids), {location_loss.id}) # There should be no quant in the stock location quants = self.env['stock.quant'].search([('product_id', '=', productA.id), ('location_id', '=', stock_location.id)]) self.assertEqual(len(quants), 0) # There should be no quant in the inventory loss location quant = self.env['stock.quant'].search([('product_id', '=', productA.id), ('location_id', '=', location_loss.id)]) self.assertEqual(len(quant), 0)
agpl-3.0
mobo95/pyload
module/web/filters.py
35
1482
#!/usr/bin/env python # -*- coding: utf-8 -*- import os from os.path import abspath, commonprefix, join quotechar = "::/" try: from os.path import relpath except: from posixpath import curdir, sep, pardir def relpath(path, start=curdir): """Return a relative version of a path""" if not path: raise ValueError("no path specified") start_list = abspath(start).split(sep) path_list = abspath(path).split(sep) # Work out how much of the filepath is shared by start and path. i = len(commonprefix([start_list, path_list])) rel_list = [pardir] * (len(start_list)-i) + path_list[i:] if not rel_list: return curdir return join(*rel_list) def quotepath(path): try: return path.replace("../", quotechar) except AttributeError: return path except: return "" def unquotepath(path): try: return path.replace(quotechar, "../") except AttributeError: return path except: return "" def path_make_absolute(path): p = os.path.abspath(path) if p[-1] == os.path.sep: return p else: return p + os.path.sep def path_make_relative(path): p = relpath(path) if p[-1] == os.path.sep: return p else: return p + os.path.sep def truncate(value, n): if (n - len(value)) < 3: return value[:n]+"..." return value def date(date, format): return date
gpl-3.0
amwelch/a10sdk-python
a10sdk/core/cgnv6/cgnv6_lsn_alg_pptp.py
2
4919
from a10sdk.common.A10BaseClass import A10BaseClass class SamplingEnable(A10BaseClass): """This class does not support CRUD Operations please use parent. :param counters1: {"enum": ["all", "calls-established", "mismatched-pns-call-id", "gre-sessions-created", "gre-sessions-freed", "no-gre-session-match", "smp-sessions-created", "smp-sessions-freed", "smp-session-creation-failure", "extension-creation-failure", "ha-sent", "ha-rcv", "ha-no-mem", "ha-conflict", "ha-overwrite", "ha-call-sent", "ha-call-rcv", "ha-smp-conflict", "ha-smp-in-del-q", "smp-app-type-mismatch", "quota-inc", "quota-dec", "quota-inc-not-found", "quota-dec-not-found"], "type": "string", "description": "'all': all; 'calls-established': Calls Established; 'mismatched-pns-call-id': Mismatched PNS Call ID; 'gre-sessions-created': GRE Sessions Created; 'gre-sessions-freed': GRE Sessions Freed; 'no-gre-session-match': No Matching GRE Session; 'smp-sessions-created': SMP Sessions Created; 'smp-sessions-freed': SMP Sessions Freed; 'smp-session-creation-failure': SMP Session Creation Failures; 'extension-creation-failure': Extension Creation Failures; 'ha-sent': HA Info Sent; 'ha-rcv': HA Info Received; 'ha-no-mem': HA Memory Allocation Failure; 'ha-conflict': HA Call ID Conflicts; 'ha-overwrite': HA Call ID Overwrites; 'ha-call-sent': HA Call Sent; 'ha-call-rcv': HA Call Received; 'ha-smp-conflict': HA SMP Conflicts; 'ha-smp-in-del-q': HA SMP Deleted; 'smp-app-type-mismatch': SMP ALG App Type Mismatch; 'quota-inc': Quota Incremented; 'quota-dec': Quota Decremented; 'quota-inc-not-found': Quota Not Found on Increment; 'quota-dec-not-found': Quota Not Found on Decrement; ", "format": "enum"} :param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py` """ def __init__(self, **kwargs): self.ERROR_MSG = "" self.b_key = "sampling-enable" self.DeviceProxy = "" self.counters1 = "" for keys, value in kwargs.items(): setattr(self,keys, value) class Pptp(A10BaseClass): """ :param pptp_value: {"optional": true, "enum": ["enable"], "type": "string", "description": "'enable': Enable PPTP ALG for LSN; ", "format": "enum"} :param sampling_enable: {"minItems": 1, "items": {"type": "object"}, "uniqueItems": true, "type": "array", "array": [{"properties": {"optional": true, "counters1": {"enum": ["all", "calls-established", "mismatched-pns-call-id", "gre-sessions-created", "gre-sessions-freed", "no-gre-session-match", "smp-sessions-created", "smp-sessions-freed", "smp-session-creation-failure", "extension-creation-failure", "ha-sent", "ha-rcv", "ha-no-mem", "ha-conflict", "ha-overwrite", "ha-call-sent", "ha-call-rcv", "ha-smp-conflict", "ha-smp-in-del-q", "smp-app-type-mismatch", "quota-inc", "quota-dec", "quota-inc-not-found", "quota-dec-not-found"], "type": "string", "description": "'all': all; 'calls-established': Calls Established; 'mismatched-pns-call-id': Mismatched PNS Call ID; 'gre-sessions-created': GRE Sessions Created; 'gre-sessions-freed': GRE Sessions Freed; 'no-gre-session-match': No Matching GRE Session; 'smp-sessions-created': SMP Sessions Created; 'smp-sessions-freed': SMP Sessions Freed; 'smp-session-creation-failure': SMP Session Creation Failures; 'extension-creation-failure': Extension Creation Failures; 'ha-sent': HA Info Sent; 'ha-rcv': HA Info Received; 'ha-no-mem': HA Memory Allocation Failure; 'ha-conflict': HA Call ID Conflicts; 'ha-overwrite': HA Call ID Overwrites; 'ha-call-sent': HA Call Sent; 'ha-call-rcv': HA Call Received; 'ha-smp-conflict': HA SMP Conflicts; 'ha-smp-in-del-q': HA SMP Deleted; 'smp-app-type-mismatch': SMP ALG App Type Mismatch; 'quota-inc': Quota Incremented; 'quota-dec': Quota Decremented; 'quota-inc-not-found': Quota Not Found on Increment; 'quota-dec-not-found': Quota Not Found on Decrement; ", "format": "enum"}}}]} :param uuid: {"description": "uuid of the object", "format": "string", "minLength": 1, "modify-not-allowed": 1, "optional": true, "maxLength": 64, "type": "string"} :param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py` Class Description:: Change LSN PPTP ALG Settings. Class pptp supports CRUD Operations and inherits from `common/A10BaseClass`. This class is the `"PARENT"` class for this module.` URL for this object:: `https://<Hostname|Ip address>//axapi/v3/cgnv6/lsn/alg/pptp`. """ def __init__(self, **kwargs): self.ERROR_MSG = "" self.required=[] self.b_key = "pptp" self.a10_url="/axapi/v3/cgnv6/lsn/alg/pptp" self.DeviceProxy = "" self.pptp_value = "" self.sampling_enable = [] self.uuid = "" for keys, value in kwargs.items(): setattr(self,keys, value)
apache-2.0
Dunkas12/BeepBoopBot
lib/youtube_dl/extractor/telegraaf.py
52
2785
# coding: utf-8 from __future__ import unicode_literals from .common import InfoExtractor from ..utils import ( determine_ext, remove_end, ) class TelegraafIE(InfoExtractor): _VALID_URL = r'https?://(?:www\.)?telegraaf\.nl/tv/(?:[^/]+/)+(?P<id>\d+)/[^/]+\.html' _TEST = { 'url': 'http://www.telegraaf.nl/tv/nieuws/binnenland/24353229/__Tikibad_ontruimd_wegens_brand__.html', 'info_dict': { 'id': '24353229', 'ext': 'mp4', 'title': 'Tikibad ontruimd wegens brand', 'description': 'md5:05ca046ff47b931f9b04855015e163a4', 'thumbnail': r're:^https?://.*\.jpg$', 'duration': 33, }, 'params': { # m3u8 download 'skip_download': True, }, } def _real_extract(self, url): video_id = self._match_id(url) webpage = self._download_webpage(url, video_id) player_url = self._html_search_regex( r'<iframe[^>]+src="([^"]+")', webpage, 'player URL') player_page = self._download_webpage( player_url, video_id, note='Download player webpage') playlist_url = self._search_regex( r'playlist\s*:\s*"([^"]+)"', player_page, 'playlist URL') playlist_data = self._download_json(playlist_url, video_id) item = playlist_data['items'][0] formats = [] locations = item['locations'] for location in locations.get('adaptive', []): manifest_url = location['src'] ext = determine_ext(manifest_url) if ext == 'm3u8': formats.extend(self._extract_m3u8_formats( manifest_url, video_id, ext='mp4', m3u8_id='hls', fatal=False)) elif ext == 'mpd': formats.extend(self._extract_mpd_formats( manifest_url, video_id, mpd_id='dash', fatal=False)) else: self.report_warning('Unknown adaptive format %s' % ext) for location in locations.get('progressive', []): formats.append({ 'url': location['sources'][0]['src'], 'width': location.get('width'), 'height': location.get('height'), 'format_id': 'http-%s' % location['label'], }) self._sort_formats(formats) title = remove_end(self._og_search_title(webpage), ' - VIDEO') description = self._og_search_description(webpage) duration = item.get('duration') thumbnail = item.get('poster') return { 'id': video_id, 'title': title, 'description': description, 'formats': formats, 'duration': duration, 'thumbnail': thumbnail, }
gpl-3.0
Mazecreator/tensorflow
tensorflow/python/training/basic_session_run_hooks_test.py
1
43043
# pylint: disable=g-bad-file-header # Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for basic_session_run_hooks.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import shutil import tempfile import threading import time from tensorflow.contrib.framework.python.framework import checkpoint_utils from tensorflow.contrib.framework.python.ops import variables from tensorflow.contrib.testing.python.framework import fake_summary_writer from tensorflow.python.client import session as session_lib from tensorflow.python.framework import constant_op from tensorflow.python.framework import dtypes from tensorflow.python.framework import meta_graph from tensorflow.python.framework import ops from tensorflow.python.ops import array_ops from tensorflow.python.ops import control_flow_ops from tensorflow.python.ops import state_ops from tensorflow.python.ops import variable_scope from tensorflow.python.ops import variables as variables_lib import tensorflow.python.ops.nn_grad # pylint: disable=unused-import from tensorflow.python.platform import test from tensorflow.python.platform import tf_logging from tensorflow.python.summary import summary as summary_lib from tensorflow.python.summary.writer import writer_cache from tensorflow.python.training import basic_session_run_hooks from tensorflow.python.training import monitored_session from tensorflow.python.training import session_run_hook from tensorflow.python.training import training_util class MockCheckpointSaverListener( basic_session_run_hooks.CheckpointSaverListener): def __init__(self): self.begin_count = 0 self.before_save_count = 0 self.after_save_count = 0 self.end_count = 0 def begin(self): self.begin_count += 1 def before_save(self, session, global_step): self.before_save_count += 1 def after_save(self, session, global_step): self.after_save_count += 1 def end(self, session, global_step): self.end_count += 1 def get_counts(self): return { 'begin': self.begin_count, 'before_save': self.before_save_count, 'after_save': self.after_save_count, 'end': self.end_count } class SecondOrStepTimerTest(test.TestCase): def test_raise_in_both_secs_and_steps(self): with self.assertRaises(ValueError): basic_session_run_hooks.SecondOrStepTimer(every_secs=2.0, every_steps=10) def test_raise_in_none_secs_and_steps(self): with self.assertRaises(ValueError): basic_session_run_hooks.SecondOrStepTimer() def test_every_secs(self): timer = basic_session_run_hooks.SecondOrStepTimer(every_secs=1.0) self.assertTrue(timer.should_trigger_for_step(1)) timer.update_last_triggered_step(1) self.assertFalse(timer.should_trigger_for_step(1)) self.assertFalse(timer.should_trigger_for_step(2)) time.sleep(1.0) self.assertFalse(timer.should_trigger_for_step(1)) self.assertTrue(timer.should_trigger_for_step(2)) def test_every_steps(self): timer = basic_session_run_hooks.SecondOrStepTimer(every_steps=3) self.assertTrue(timer.should_trigger_for_step(1)) timer.update_last_triggered_step(1) self.assertFalse(timer.should_trigger_for_step(1)) self.assertFalse(timer.should_trigger_for_step(2)) self.assertFalse(timer.should_trigger_for_step(3)) self.assertTrue(timer.should_trigger_for_step(4)) def test_update_last_triggered_step(self): timer = basic_session_run_hooks.SecondOrStepTimer(every_steps=1) elapsed_secs, elapsed_steps = timer.update_last_triggered_step(1) self.assertEqual(None, elapsed_secs) self.assertEqual(None, elapsed_steps) elapsed_secs, elapsed_steps = timer.update_last_triggered_step(5) self.assertLess(0, elapsed_secs) self.assertEqual(4, elapsed_steps) elapsed_secs, elapsed_steps = timer.update_last_triggered_step(7) self.assertLess(0, elapsed_secs) self.assertEqual(2, elapsed_steps) class StopAtStepTest(test.TestCase): def test_raise_in_both_last_step_and_num_steps(self): with self.assertRaises(ValueError): basic_session_run_hooks.StopAtStepHook(num_steps=10, last_step=20) def test_stop_based_on_last_step(self): h = basic_session_run_hooks.StopAtStepHook(last_step=10) with ops.Graph().as_default(): global_step = variables.get_or_create_global_step() no_op = control_flow_ops.no_op() h.begin() with session_lib.Session() as sess: mon_sess = monitored_session._HookedSession(sess, [h]) sess.run(state_ops.assign(global_step, 5)) h.after_create_session(sess, None) mon_sess.run(no_op) self.assertFalse(mon_sess.should_stop()) sess.run(state_ops.assign(global_step, 9)) mon_sess.run(no_op) self.assertFalse(mon_sess.should_stop()) sess.run(state_ops.assign(global_step, 10)) mon_sess.run(no_op) self.assertTrue(mon_sess.should_stop()) sess.run(state_ops.assign(global_step, 11)) mon_sess._should_stop = False mon_sess.run(no_op) self.assertTrue(mon_sess.should_stop()) def test_stop_based_on_num_step(self): h = basic_session_run_hooks.StopAtStepHook(num_steps=10) with ops.Graph().as_default(): global_step = variables.get_or_create_global_step() no_op = control_flow_ops.no_op() h.begin() with session_lib.Session() as sess: mon_sess = monitored_session._HookedSession(sess, [h]) sess.run(state_ops.assign(global_step, 5)) h.after_create_session(sess, None) mon_sess.run(no_op) self.assertFalse(mon_sess.should_stop()) sess.run(state_ops.assign(global_step, 13)) mon_sess.run(no_op) self.assertFalse(mon_sess.should_stop()) sess.run(state_ops.assign(global_step, 14)) mon_sess.run(no_op) self.assertFalse(mon_sess.should_stop()) sess.run(state_ops.assign(global_step, 15)) mon_sess.run(no_op) self.assertTrue(mon_sess.should_stop()) sess.run(state_ops.assign(global_step, 16)) mon_sess._should_stop = False mon_sess.run(no_op) self.assertTrue(mon_sess.should_stop()) def test_stop_based_with_multiple_steps(self): h = basic_session_run_hooks.StopAtStepHook(num_steps=10) with ops.Graph().as_default(): global_step = variables.get_or_create_global_step() no_op = control_flow_ops.no_op() h.begin() with session_lib.Session() as sess: mon_sess = monitored_session._HookedSession(sess, [h]) sess.run(state_ops.assign(global_step, 5)) h.after_create_session(sess, None) mon_sess.run(no_op) self.assertFalse(mon_sess.should_stop()) sess.run(state_ops.assign(global_step, 15)) mon_sess.run(no_op) self.assertTrue(mon_sess.should_stop()) class LoggingTensorHookTest(test.TestCase): def setUp(self): # Mock out logging calls so we can verify whether correct tensors are being # monitored. self._actual_log = tf_logging.info self.logged_message = None def mock_log(*args, **kwargs): self.logged_message = args self._actual_log(*args, **kwargs) tf_logging.info = mock_log def tearDown(self): tf_logging.info = self._actual_log def test_illegal_args(self): with self.assertRaisesRegexp(ValueError, 'nvalid every_n_iter'): basic_session_run_hooks.LoggingTensorHook(tensors=['t'], every_n_iter=0) with self.assertRaisesRegexp(ValueError, 'nvalid every_n_iter'): basic_session_run_hooks.LoggingTensorHook(tensors=['t'], every_n_iter=-10) with self.assertRaisesRegexp(ValueError, 'xactly one of'): basic_session_run_hooks.LoggingTensorHook( tensors=['t'], every_n_iter=5, every_n_secs=5) with self.assertRaisesRegexp(ValueError, 'xactly one of'): basic_session_run_hooks.LoggingTensorHook(tensors=['t']) def test_print_at_end_only(self): with ops.Graph().as_default(), session_lib.Session() as sess: t = constant_op.constant(42.0, name='foo') train_op = constant_op.constant(3) hook = basic_session_run_hooks.LoggingTensorHook( tensors=[t.name], at_end=True) hook.begin() mon_sess = monitored_session._HookedSession(sess, [hook]) sess.run(variables_lib.global_variables_initializer()) self.logged_message = '' for _ in range(3): mon_sess.run(train_op) # assertNotRegexpMatches is not supported by python 3.1 and later self.assertEqual(str(self.logged_message).find(t.name), -1) hook.end(sess) self.assertRegexpMatches(str(self.logged_message), t.name) def _validate_print_every_n_steps(self, sess, at_end): t = constant_op.constant(42.0, name='foo') train_op = constant_op.constant(3) hook = basic_session_run_hooks.LoggingTensorHook( tensors=[t.name], every_n_iter=10, at_end=at_end) hook.begin() mon_sess = monitored_session._HookedSession(sess, [hook]) sess.run(variables_lib.global_variables_initializer()) mon_sess.run(train_op) self.assertRegexpMatches(str(self.logged_message), t.name) for _ in range(3): self.logged_message = '' for _ in range(9): mon_sess.run(train_op) # assertNotRegexpMatches is not supported by python 3.1 and later self.assertEqual(str(self.logged_message).find(t.name), -1) mon_sess.run(train_op) self.assertRegexpMatches(str(self.logged_message), t.name) # Add additional run to verify proper reset when called multiple times. self.logged_message = '' mon_sess.run(train_op) # assertNotRegexpMatches is not supported by python 3.1 and later self.assertEqual(str(self.logged_message).find(t.name), -1) self.logged_message = '' hook.end(sess) if at_end: self.assertRegexpMatches(str(self.logged_message), t.name) else: # assertNotRegexpMatches is not supported by python 3.1 and later self.assertEqual(str(self.logged_message).find(t.name), -1) def test_print_every_n_steps(self): with ops.Graph().as_default(), session_lib.Session() as sess: self._validate_print_every_n_steps(sess, at_end=False) # Verify proper reset. self._validate_print_every_n_steps(sess, at_end=False) def test_print_every_n_steps_and_end(self): with ops.Graph().as_default(), session_lib.Session() as sess: self._validate_print_every_n_steps(sess, at_end=True) # Verify proper reset. self._validate_print_every_n_steps(sess, at_end=True) def test_print_first_step(self): # if it runs every iteration, first iteration has None duration. with ops.Graph().as_default(), session_lib.Session() as sess: t = constant_op.constant(42.0, name='foo') train_op = constant_op.constant(3) hook = basic_session_run_hooks.LoggingTensorHook( tensors={'foo': t}, every_n_iter=1) hook.begin() mon_sess = monitored_session._HookedSession(sess, [hook]) sess.run(variables_lib.global_variables_initializer()) mon_sess.run(train_op) self.assertRegexpMatches(str(self.logged_message), 'foo') # in first run, elapsed time is None. self.assertEqual(str(self.logged_message).find('sec'), -1) def _validate_print_every_n_secs(self, sess, at_end): t = constant_op.constant(42.0, name='foo') train_op = constant_op.constant(3) hook = basic_session_run_hooks.LoggingTensorHook( tensors=[t.name], every_n_secs=1.0, at_end=at_end) hook.begin() mon_sess = monitored_session._HookedSession(sess, [hook]) sess.run(variables_lib.global_variables_initializer()) mon_sess.run(train_op) self.assertRegexpMatches(str(self.logged_message), t.name) # assertNotRegexpMatches is not supported by python 3.1 and later self.logged_message = '' mon_sess.run(train_op) self.assertEqual(str(self.logged_message).find(t.name), -1) time.sleep(1.0) self.logged_message = '' mon_sess.run(train_op) self.assertRegexpMatches(str(self.logged_message), t.name) self.logged_message = '' hook.end(sess) if at_end: self.assertRegexpMatches(str(self.logged_message), t.name) else: # assertNotRegexpMatches is not supported by python 3.1 and later self.assertEqual(str(self.logged_message).find(t.name), -1) def test_print_every_n_secs(self): with ops.Graph().as_default(), session_lib.Session() as sess: self._validate_print_every_n_secs(sess, at_end=False) # Verify proper reset. self._validate_print_every_n_secs(sess, at_end=False) def test_print_every_n_secs_and_end(self): with ops.Graph().as_default(), session_lib.Session() as sess: self._validate_print_every_n_secs(sess, at_end=True) # Verify proper reset. self._validate_print_every_n_secs(sess, at_end=True) def test_print_formatter(self): with ops.Graph().as_default(), session_lib.Session() as sess: t = constant_op.constant(42.0, name='foo') train_op = constant_op.constant(3) hook = basic_session_run_hooks.LoggingTensorHook( tensors=[t.name], every_n_iter=10, formatter=lambda items: 'qqq=%s' % items[t.name]) hook.begin() mon_sess = monitored_session._HookedSession(sess, [hook]) sess.run(variables_lib.global_variables_initializer()) mon_sess.run(train_op) self.assertEqual(self.logged_message[0], 'qqq=42.0') class CheckpointSaverHookTest(test.TestCase): def setUp(self): self.model_dir = tempfile.mkdtemp() self.graph = ops.Graph() with self.graph.as_default(): self.scaffold = monitored_session.Scaffold() self.global_step = variables.get_or_create_global_step() self.train_op = training_util._increment_global_step(1) def tearDown(self): shutil.rmtree(self.model_dir, ignore_errors=True) def test_saves_when_saver_and_scaffold_both_missing(self): with self.graph.as_default(): hook = basic_session_run_hooks.CheckpointSaverHook( self.model_dir, save_steps=1) hook.begin() self.scaffold.finalize() with session_lib.Session() as sess: sess.run(self.scaffold.init_op) mon_sess = monitored_session._HookedSession(sess, [hook]) mon_sess.run(self.train_op) self.assertEqual(1, checkpoint_utils.load_variable(self.model_dir, self.global_step.name)) def test_raise_when_saver_and_scaffold_both_present(self): with self.assertRaises(ValueError): basic_session_run_hooks.CheckpointSaverHook( self.model_dir, saver=self.scaffold.saver, scaffold=self.scaffold) def test_raise_in_both_secs_and_steps(self): with self.assertRaises(ValueError): basic_session_run_hooks.CheckpointSaverHook( self.model_dir, save_secs=10, save_steps=20) def test_raise_in_none_secs_and_steps(self): with self.assertRaises(ValueError): basic_session_run_hooks.CheckpointSaverHook(self.model_dir) def test_save_secs_saves_in_first_step(self): with self.graph.as_default(): hook = basic_session_run_hooks.CheckpointSaverHook( self.model_dir, save_secs=2, scaffold=self.scaffold) hook.begin() self.scaffold.finalize() with session_lib.Session() as sess: sess.run(self.scaffold.init_op) mon_sess = monitored_session._HookedSession(sess, [hook]) mon_sess.run(self.train_op) self.assertEqual(1, checkpoint_utils.load_variable(self.model_dir, self.global_step.name)) def test_save_secs_calls_listeners_at_begin_and_end(self): with self.graph.as_default(): listener = MockCheckpointSaverListener() hook = basic_session_run_hooks.CheckpointSaverHook( self.model_dir, save_secs=2, scaffold=self.scaffold, listeners=[listener]) hook.begin() self.scaffold.finalize() with session_lib.Session() as sess: sess.run(self.scaffold.init_op) mon_sess = monitored_session._HookedSession(sess, [hook]) mon_sess.run(self.train_op) # hook runs here mon_sess.run(self.train_op) # hook won't run here, so it does at end hook.end(sess) # hook runs here self.assertEqual({ 'begin': 1, 'before_save': 2, 'after_save': 2, 'end': 1 }, listener.get_counts()) def test_listener_with_monitored_session(self): with ops.Graph().as_default(): scaffold = monitored_session.Scaffold() global_step = variables.get_or_create_global_step() train_op = training_util._increment_global_step(1) listener = MockCheckpointSaverListener() hook = basic_session_run_hooks.CheckpointSaverHook( self.model_dir, save_steps=1, scaffold=scaffold, listeners=[listener]) with monitored_session.SingularMonitoredSession( hooks=[hook], scaffold=scaffold, checkpoint_dir=self.model_dir) as sess: sess.run(train_op) sess.run(train_op) global_step_val = sess.raw_session().run(global_step) listener_counts = listener.get_counts() self.assertEqual(2, global_step_val) self.assertEqual({ 'begin': 1, 'before_save': 2, 'after_save': 2, 'end': 1 }, listener_counts) def test_listener_with_default_saver(self): with ops.Graph().as_default(): global_step = variables.get_or_create_global_step() train_op = training_util._increment_global_step(1) listener = MockCheckpointSaverListener() hook = basic_session_run_hooks.CheckpointSaverHook( self.model_dir, save_steps=1, listeners=[listener]) with monitored_session.SingularMonitoredSession( hooks=[hook], checkpoint_dir=self.model_dir) as sess: sess.run(train_op) sess.run(train_op) global_step_val = sess.raw_session().run(global_step) listener_counts = listener.get_counts() self.assertEqual(2, global_step_val) self.assertEqual({ 'begin': 1, 'before_save': 2, 'after_save': 2, 'end': 1 }, listener_counts) with ops.Graph().as_default(): global_step = variables.get_or_create_global_step() with monitored_session.SingularMonitoredSession( checkpoint_dir=self.model_dir) as sess2: global_step_saved_val = sess2.run(global_step) self.assertEqual(2, global_step_saved_val) def test_two_listeners_with_default_saver(self): with ops.Graph().as_default(): global_step = variables.get_or_create_global_step() train_op = training_util._increment_global_step(1) listener1 = MockCheckpointSaverListener() listener2 = MockCheckpointSaverListener() hook = basic_session_run_hooks.CheckpointSaverHook( self.model_dir, save_steps=1, listeners=[listener1, listener2]) with monitored_session.SingularMonitoredSession( hooks=[hook], checkpoint_dir=self.model_dir) as sess: sess.run(train_op) sess.run(train_op) global_step_val = sess.raw_session().run(global_step) listener1_counts = listener1.get_counts() listener2_counts = listener2.get_counts() self.assertEqual(2, global_step_val) self.assertEqual({ 'begin': 1, 'before_save': 2, 'after_save': 2, 'end': 1 }, listener1_counts) self.assertEqual(listener1_counts, listener2_counts) with ops.Graph().as_default(): global_step = variables.get_or_create_global_step() with monitored_session.SingularMonitoredSession( checkpoint_dir=self.model_dir) as sess2: global_step_saved_val = sess2.run(global_step) self.assertEqual(2, global_step_saved_val) @test.mock.patch.object(time, 'time') def test_save_secs_saves_periodically(self, mock_time): # Let's have a realistic start time current_time = 1484695987.209386 with self.graph.as_default(): mock_time.return_value = current_time hook = basic_session_run_hooks.CheckpointSaverHook( self.model_dir, save_secs=2, scaffold=self.scaffold) hook.begin() self.scaffold.finalize() with session_lib.Session() as sess: sess.run(self.scaffold.init_op) mon_sess = monitored_session._HookedSession(sess, [hook]) mock_time.return_value = current_time mon_sess.run(self.train_op) # Saved. mock_time.return_value = current_time + 0.5 mon_sess.run(self.train_op) # Not saved. self.assertEqual(1, checkpoint_utils.load_variable(self.model_dir, self.global_step.name)) # Simulate 2.5 seconds of sleep. mock_time.return_value = current_time + 2.5 mon_sess.run(self.train_op) # Saved. mock_time.return_value = current_time + 2.6 mon_sess.run(self.train_op) # Not saved. mock_time.return_value = current_time + 2.7 mon_sess.run(self.train_op) # Not saved. self.assertEqual(3, checkpoint_utils.load_variable(self.model_dir, self.global_step.name)) # Simulate 7.5 more seconds of sleep (10 seconds from start. mock_time.return_value = current_time + 10 mon_sess.run(self.train_op) # Saved. self.assertEqual(6, checkpoint_utils.load_variable(self.model_dir, self.global_step.name)) @test.mock.patch.object(time, 'time') def test_save_secs_calls_listeners_periodically(self, mock_time): # Let's have a realistic start time current_time = 1484695987.209386 with self.graph.as_default(): mock_time.return_value = current_time listener = MockCheckpointSaverListener() hook = basic_session_run_hooks.CheckpointSaverHook( self.model_dir, save_secs=2, scaffold=self.scaffold, listeners=[listener]) hook.begin() self.scaffold.finalize() with session_lib.Session() as sess: sess.run(self.scaffold.init_op) mon_sess = monitored_session._HookedSession(sess, [hook]) mock_time.return_value = current_time + 0.5 mon_sess.run(self.train_op) # hook runs here mock_time.return_value = current_time + 0.5 mon_sess.run(self.train_op) mock_time.return_value = current_time + 3.0 mon_sess.run(self.train_op) # hook runs here mock_time.return_value = current_time + 3.5 mon_sess.run(self.train_op) mock_time.return_value = current_time + 4.0 mon_sess.run(self.train_op) mock_time.return_value = current_time + 6.5 mon_sess.run(self.train_op) # hook runs here mock_time.return_value = current_time + 7.0 mon_sess.run(self.train_op) # hook won't run here, so it does at end mock_time.return_value = current_time + 7.5 hook.end(sess) # hook runs here self.assertEqual({ 'begin': 1, 'before_save': 4, 'after_save': 4, 'end': 1 }, listener.get_counts()) def test_save_steps_saves_in_first_step(self): with self.graph.as_default(): hook = basic_session_run_hooks.CheckpointSaverHook( self.model_dir, save_steps=2, scaffold=self.scaffold) hook.begin() self.scaffold.finalize() with session_lib.Session() as sess: sess.run(self.scaffold.init_op) mon_sess = monitored_session._HookedSession(sess, [hook]) mon_sess.run(self.train_op) self.assertEqual(1, checkpoint_utils.load_variable(self.model_dir, self.global_step.name)) def test_save_steps_saves_periodically(self): with self.graph.as_default(): hook = basic_session_run_hooks.CheckpointSaverHook( self.model_dir, save_steps=2, scaffold=self.scaffold) hook.begin() self.scaffold.finalize() with session_lib.Session() as sess: sess.run(self.scaffold.init_op) mon_sess = monitored_session._HookedSession(sess, [hook]) mon_sess.run(self.train_op) mon_sess.run(self.train_op) # Not saved self.assertEqual(1, checkpoint_utils.load_variable(self.model_dir, self.global_step.name)) mon_sess.run(self.train_op) # saved self.assertEqual(3, checkpoint_utils.load_variable(self.model_dir, self.global_step.name)) mon_sess.run(self.train_op) # Not saved self.assertEqual(3, checkpoint_utils.load_variable(self.model_dir, self.global_step.name)) mon_sess.run(self.train_op) # saved self.assertEqual(5, checkpoint_utils.load_variable(self.model_dir, self.global_step.name)) def test_save_saves_at_end(self): with self.graph.as_default(): hook = basic_session_run_hooks.CheckpointSaverHook( self.model_dir, save_secs=2, scaffold=self.scaffold) hook.begin() self.scaffold.finalize() with session_lib.Session() as sess: sess.run(self.scaffold.init_op) mon_sess = monitored_session._HookedSession(sess, [hook]) mon_sess.run(self.train_op) mon_sess.run(self.train_op) hook.end(sess) self.assertEqual(2, checkpoint_utils.load_variable(self.model_dir, self.global_step.name)) def test_summary_writer_defs(self): fake_summary_writer.FakeSummaryWriter.install() writer_cache.FileWriterCache.clear() summary_writer = writer_cache.FileWriterCache.get(self.model_dir) with self.graph.as_default(): hook = basic_session_run_hooks.CheckpointSaverHook( self.model_dir, save_steps=2, scaffold=self.scaffold) hook.begin() self.scaffold.finalize() with session_lib.Session() as sess: sess.run(self.scaffold.init_op) mon_sess = monitored_session._HookedSession(sess, [hook]) mon_sess.run(self.train_op) summary_writer.assert_summaries( test_case=self, expected_logdir=self.model_dir, expected_added_meta_graphs=[ meta_graph.create_meta_graph_def( graph_def=self.graph.as_graph_def(add_shapes=True), saver_def=self.scaffold.saver.saver_def) ]) fake_summary_writer.FakeSummaryWriter.uninstall() class ResourceCheckpointSaverHookTest(test.TestCase): def setUp(self): self.model_dir = tempfile.mkdtemp() self.graph = ops.Graph() with self.graph.as_default(): self.scaffold = monitored_session.Scaffold() with variable_scope.variable_scope('foo', use_resource=True): self.global_step = training_util.get_or_create_global_step() self.train_op = training_util._increment_global_step(1) def test_save_steps_saves_periodically(self): with self.graph.as_default(): hook = basic_session_run_hooks.CheckpointSaverHook( self.model_dir, save_steps=2, scaffold=self.scaffold) hook.begin() self.scaffold.finalize() with session_lib.Session() as sess: sess.run(self.scaffold.init_op) mon_sess = monitored_session._HookedSession(sess, [hook]) mon_sess.run(self.train_op) mon_sess.run(self.train_op) # Not saved self.assertEqual(1, checkpoint_utils.load_variable(self.model_dir, self.global_step.name)) mon_sess.run(self.train_op) # saved self.assertEqual(3, checkpoint_utils.load_variable(self.model_dir, self.global_step.name)) mon_sess.run(self.train_op) # Not saved self.assertEqual(3, checkpoint_utils.load_variable(self.model_dir, self.global_step.name)) mon_sess.run(self.train_op) # saved self.assertEqual(5, checkpoint_utils.load_variable(self.model_dir, self.global_step.name)) class StepCounterHookTest(test.TestCase): def setUp(self): self.log_dir = tempfile.mkdtemp() def tearDown(self): shutil.rmtree(self.log_dir, ignore_errors=True) def test_step_counter_every_n_steps(self): with ops.Graph().as_default() as g, session_lib.Session() as sess: variables.get_or_create_global_step() train_op = training_util._increment_global_step(1) summary_writer = fake_summary_writer.FakeSummaryWriter(self.log_dir, g) hook = basic_session_run_hooks.StepCounterHook( summary_writer=summary_writer, every_n_steps=10) hook.begin() sess.run(variables_lib.global_variables_initializer()) mon_sess = monitored_session._HookedSession(sess, [hook]) for _ in range(30): time.sleep(0.01) mon_sess.run(train_op) hook.end(sess) summary_writer.assert_summaries( test_case=self, expected_logdir=self.log_dir, expected_graph=g, expected_summaries={}) self.assertItemsEqual([11, 21], summary_writer.summaries.keys()) for step in [11, 21]: summary_value = summary_writer.summaries[step][0].value[0] self.assertEqual('global_step/sec', summary_value.tag) self.assertGreater(summary_value.simple_value, 0) def test_step_counter_every_n_secs(self): with ops.Graph().as_default() as g, session_lib.Session() as sess: variables.get_or_create_global_step() train_op = training_util._increment_global_step(1) summary_writer = fake_summary_writer.FakeSummaryWriter(self.log_dir, g) hook = basic_session_run_hooks.StepCounterHook( summary_writer=summary_writer, every_n_steps=None, every_n_secs=0.1) hook.begin() sess.run(variables_lib.global_variables_initializer()) mon_sess = monitored_session._HookedSession(sess, [hook]) mon_sess.run(train_op) time.sleep(0.2) mon_sess.run(train_op) time.sleep(0.2) mon_sess.run(train_op) hook.end(sess) summary_writer.assert_summaries( test_case=self, expected_logdir=self.log_dir, expected_graph=g, expected_summaries={}) self.assertTrue(summary_writer.summaries, 'No summaries were created.') self.assertItemsEqual([2, 3], summary_writer.summaries.keys()) for summary in summary_writer.summaries.values(): summary_value = summary[0].value[0] self.assertEqual('global_step/sec', summary_value.tag) self.assertGreater(summary_value.simple_value, 0) def test_global_step_name(self): with ops.Graph().as_default() as g, session_lib.Session() as sess: with variable_scope.variable_scope('bar'): variable_scope.get_variable( 'foo', initializer=0, trainable=False, collections=[ ops.GraphKeys.GLOBAL_STEP, ops.GraphKeys.GLOBAL_VARIABLES ]) train_op = training_util._increment_global_step(1) summary_writer = fake_summary_writer.FakeSummaryWriter(self.log_dir, g) hook = basic_session_run_hooks.StepCounterHook( summary_writer=summary_writer, every_n_steps=1, every_n_secs=None) hook.begin() sess.run(variables_lib.global_variables_initializer()) mon_sess = monitored_session._HookedSession(sess, [hook]) mon_sess.run(train_op) mon_sess.run(train_op) hook.end(sess) summary_writer.assert_summaries( test_case=self, expected_logdir=self.log_dir, expected_graph=g, expected_summaries={}) self.assertTrue(summary_writer.summaries, 'No summaries were created.') self.assertItemsEqual([2], summary_writer.summaries.keys()) summary_value = summary_writer.summaries[2][0].value[0] self.assertEqual('bar/foo/sec', summary_value.tag) class SummarySaverHookTest(test.TestCase): def setUp(self): test.TestCase.setUp(self) self.log_dir = 'log/dir' self.summary_writer = fake_summary_writer.FakeSummaryWriter(self.log_dir) var = variables_lib.Variable(0.0) tensor = state_ops.assign_add(var, 1.0) tensor2 = tensor * 2 self.summary_op = summary_lib.scalar('my_summary', tensor) self.summary_op2 = summary_lib.scalar('my_summary2', tensor2) variables.get_or_create_global_step() self.train_op = training_util._increment_global_step(1) def test_raise_when_scaffold_and_summary_op_both_missing(self): with self.assertRaises(ValueError): basic_session_run_hooks.SummarySaverHook() def test_raise_when_scaffold_and_summary_op_both_present(self): with self.assertRaises(ValueError): basic_session_run_hooks.SummarySaverHook( scaffold=monitored_session.Scaffold(), summary_op=self.summary_op) def test_raise_in_both_secs_and_steps(self): with self.assertRaises(ValueError): basic_session_run_hooks.SummarySaverHook( save_secs=10, save_steps=20, summary_writer=self.summary_writer) def test_raise_in_none_secs_and_steps(self): with self.assertRaises(ValueError): basic_session_run_hooks.SummarySaverHook( save_secs=None, save_steps=None, summary_writer=self.summary_writer) def test_save_steps(self): hook = basic_session_run_hooks.SummarySaverHook( save_steps=8, summary_writer=self.summary_writer, summary_op=self.summary_op) with self.test_session() as sess: hook.begin() sess.run(variables_lib.global_variables_initializer()) mon_sess = monitored_session._HookedSession(sess, [hook]) for _ in range(30): mon_sess.run(self.train_op) hook.end(sess) self.summary_writer.assert_summaries( test_case=self, expected_logdir=self.log_dir, expected_summaries={ 1: { 'my_summary': 1.0 }, 9: { 'my_summary': 2.0 }, 17: { 'my_summary': 3.0 }, 25: { 'my_summary': 4.0 }, }) def test_multiple_summaries(self): hook = basic_session_run_hooks.SummarySaverHook( save_steps=8, summary_writer=self.summary_writer, summary_op=[self.summary_op, self.summary_op2]) with self.test_session() as sess: hook.begin() sess.run(variables_lib.global_variables_initializer()) mon_sess = monitored_session._HookedSession(sess, [hook]) for _ in range(10): mon_sess.run(self.train_op) hook.end(sess) self.summary_writer.assert_summaries( test_case=self, expected_logdir=self.log_dir, expected_summaries={ 1: { 'my_summary': 1.0, 'my_summary2': 2.0 }, 9: { 'my_summary': 2.0, 'my_summary2': 4.0 }, }) def test_save_secs_saving_once_every_step(self): hook = basic_session_run_hooks.SummarySaverHook( save_secs=0.5, summary_writer=self.summary_writer, summary_op=self.summary_op) with self.test_session() as sess: hook.begin() sess.run(variables_lib.global_variables_initializer()) mon_sess = monitored_session._HookedSession(sess, [hook]) for _ in range(4): mon_sess.run(self.train_op) time.sleep(0.5) hook.end(sess) self.summary_writer.assert_summaries( test_case=self, expected_logdir=self.log_dir, expected_summaries={ 1: { 'my_summary': 1.0 }, 2: { 'my_summary': 2.0 }, 3: { 'my_summary': 3.0 }, 4: { 'my_summary': 4.0 }, }) @test.mock.patch.object(time, 'time') def test_save_secs_saving_once_every_three_steps(self, mock_time): mock_time.return_value = 1484695987.209386 hook = basic_session_run_hooks.SummarySaverHook( save_secs=9., summary_writer=self.summary_writer, summary_op=self.summary_op) with self.test_session() as sess: hook.begin() sess.run(variables_lib.global_variables_initializer()) mon_sess = monitored_session._HookedSession(sess, [hook]) for _ in range(8): mon_sess.run(self.train_op) mock_time.return_value += 3.1 hook.end(sess) # 24.8 seconds passed (3.1*8), it saves every 9 seconds starting from first: self.summary_writer.assert_summaries( test_case=self, expected_logdir=self.log_dir, expected_summaries={ 1: { 'my_summary': 1.0 }, 4: { 'my_summary': 2.0 }, 7: { 'my_summary': 3.0 }, }) class GlobalStepWaiterHookTest(test.TestCase): def test_not_wait_for_step_zero(self): with ops.Graph().as_default(): variables.get_or_create_global_step() hook = basic_session_run_hooks.GlobalStepWaiterHook(wait_until_step=0) hook.begin() with session_lib.Session() as sess: # Before run should return without waiting gstep increment. hook.before_run( session_run_hook.SessionRunContext( original_args=None, session=sess)) def test_wait_for_step(self): with ops.Graph().as_default(): gstep = variables.get_or_create_global_step() hook = basic_session_run_hooks.GlobalStepWaiterHook(wait_until_step=1000) hook.begin() with session_lib.Session() as sess: sess.run(variables_lib.global_variables_initializer()) waiter = threading.Thread( target=hook.before_run, args=(session_run_hook.SessionRunContext( original_args=None, session=sess),)) waiter.daemon = True waiter.start() time.sleep(1.0) self.assertTrue(waiter.is_alive()) sess.run(state_ops.assign(gstep, 500)) time.sleep(1.0) self.assertTrue(waiter.is_alive()) sess.run(state_ops.assign(gstep, 1100)) time.sleep(1.2) self.assertFalse(waiter.is_alive()) class FinalOpsHookTest(test.TestCase): def test_final_ops_is_scalar_tensor(self): with ops.Graph().as_default(): expected_value = 4 final_ops = constant_op.constant(expected_value) hook = basic_session_run_hooks.FinalOpsHook(final_ops) hook.begin() with session_lib.Session() as session: hook.end(session) self.assertEqual(expected_value, hook.final_ops_values) def test_final_ops_is_tensor(self): with ops.Graph().as_default(): expected_values = [1, 6, 3, 5, 2, 4] final_ops = constant_op.constant(expected_values) hook = basic_session_run_hooks.FinalOpsHook(final_ops) hook.begin() with session_lib.Session() as session: hook.end(session) self.assertListEqual(expected_values, hook.final_ops_values.tolist()) def test_final_ops_with_dictionary(self): with ops.Graph().as_default(): expected_values = [4, -3] final_ops = array_ops.placeholder(dtype=dtypes.float32) final_ops_feed_dict = {final_ops: expected_values} hook = basic_session_run_hooks.FinalOpsHook( final_ops, final_ops_feed_dict) hook.begin() with session_lib.Session() as session: hook.end(session) self.assertListEqual(expected_values, hook.final_ops_values.tolist()) class ResourceSummarySaverHookTest(test.TestCase): def setUp(self): test.TestCase.setUp(self) self.log_dir = 'log/dir' self.summary_writer = fake_summary_writer.FakeSummaryWriter(self.log_dir) var = variable_scope.get_variable('var', initializer=0.0, use_resource=True) tensor = state_ops.assign_add(var, 1.0) self.summary_op = summary_lib.scalar('my_summary', tensor) with variable_scope.variable_scope('foo', use_resource=True): variables.create_global_step() self.train_op = training_util._increment_global_step(1) def test_save_steps(self): hook = basic_session_run_hooks.SummarySaverHook( save_steps=8, summary_writer=self.summary_writer, summary_op=self.summary_op) with self.test_session() as sess: hook.begin() sess.run(variables_lib.global_variables_initializer()) mon_sess = monitored_session._HookedSession(sess, [hook]) for _ in range(30): mon_sess.run(self.train_op) hook.end(sess) self.summary_writer.assert_summaries( test_case=self, expected_logdir=self.log_dir, expected_summaries={ 1: { 'my_summary': 1.0 }, 9: { 'my_summary': 2.0 }, 17: { 'my_summary': 3.0 }, 25: { 'my_summary': 4.0 }, }) class FeedFnHookTest(test.TestCase): def test_feeding_placeholder(self): with ops.Graph().as_default(), session_lib.Session() as sess: x = array_ops.placeholder(dtype=dtypes.float32) y = x + 1 hook = basic_session_run_hooks.FeedFnHook( feed_fn=lambda: {x: 1.0}) hook.begin() mon_sess = monitored_session._HookedSession(sess, [hook]) self.assertEqual(mon_sess.run(y), 2) if __name__ == '__main__': test.main()
apache-2.0
Vaidyanath/tempest
tempest/services/messaging/json/messaging_client.py
7
6520
# Copyright (c) 2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import json import urllib import uuid from tempest.api_schema.response.messaging.v1 import queues as queues_schema from tempest.common import service_client class MessagingClientJSON(service_client.ServiceClient): def __init__(self, auth_provider, service, region, endpoint_type=None, build_interval=None, build_timeout=None, disable_ssl_certificate_validation=None, ca_certs=None, trace_requests=None): dscv = disable_ssl_certificate_validation super(MessagingClientJSON, self).__init__( auth_provider, service, region, endpoint_type=endpoint_type, build_interval=build_interval, build_timeout=build_timeout, disable_ssl_certificate_validation=dscv, ca_certs=ca_certs, trace_requests=trace_requests) self.version = '1' self.uri_prefix = 'v{0}'.format(self.version) client_id = uuid.uuid4().hex self.headers = {'Client-ID': client_id} def list_queues(self): uri = '{0}/queues'.format(self.uri_prefix) resp, body = self.get(uri) if resp['status'] != '204': body = json.loads(body) self.validate_response(queues_schema.list_queues, resp, body) return resp, body def create_queue(self, queue_name): uri = '{0}/queues/{1}'.format(self.uri_prefix, queue_name) resp, body = self.put(uri, body=None) self.expected_success(201, resp.status) return resp, body def get_queue(self, queue_name): uri = '{0}/queues/{1}'.format(self.uri_prefix, queue_name) resp, body = self.get(uri) self.expected_success(204, resp.status) return resp, body def head_queue(self, queue_name): uri = '{0}/queues/{1}'.format(self.uri_prefix, queue_name) resp, body = self.head(uri) self.expected_success(204, resp.status) return resp, body def delete_queue(self, queue_name): uri = '{0}/queues/{1}'.format(self.uri_prefix, queue_name) resp, body = self.delete(uri) self.expected_success(204, resp.status) return resp, body def get_queue_stats(self, queue_name): uri = '{0}/queues/{1}/stats'.format(self.uri_prefix, queue_name) resp, body = self.get(uri) body = json.loads(body) self.validate_response(queues_schema.queue_stats, resp, body) return resp, body def get_queue_metadata(self, queue_name): uri = '{0}/queues/{1}/metadata'.format(self.uri_prefix, queue_name) resp, body = self.get(uri) self.expected_success(200, resp.status) body = json.loads(body) return resp, body def set_queue_metadata(self, queue_name, rbody): uri = '{0}/queues/{1}/metadata'.format(self.uri_prefix, queue_name) resp, body = self.put(uri, body=json.dumps(rbody)) self.expected_success(204, resp.status) return resp, body def post_messages(self, queue_name, rbody): uri = '{0}/queues/{1}/messages'.format(self.uri_prefix, queue_name) resp, body = self.post(uri, body=json.dumps(rbody), extra_headers=True, headers=self.headers) body = json.loads(body) self.validate_response(queues_schema.post_messages, resp, body) return resp, body def list_messages(self, queue_name): uri = '{0}/queues/{1}/messages?echo=True'.format(self.uri_prefix, queue_name) resp, body = self.get(uri, extra_headers=True, headers=self.headers) if resp['status'] != '204': body = json.loads(body) self.validate_response(queues_schema.list_messages, resp, body) return resp, body def get_single_message(self, message_uri): resp, body = self.get(message_uri, extra_headers=True, headers=self.headers) if resp['status'] != '204': body = json.loads(body) self.validate_response(queues_schema.get_single_message, resp, body) return resp, body def get_multiple_messages(self, message_uri): resp, body = self.get(message_uri, extra_headers=True, headers=self.headers) if resp['status'] != '204': body = json.loads(body) self.validate_response(queues_schema.get_multiple_messages, resp, body) return resp, body def delete_messages(self, message_uri): resp, body = self.delete(message_uri) self.expected_success(204, resp.status) return resp, body def post_claims(self, queue_name, rbody, url_params=False): uri = '{0}/queues/{1}/claims'.format(self.uri_prefix, queue_name) if url_params: uri += '?%s' % urllib.urlencode(url_params) resp, body = self.post(uri, body=json.dumps(rbody), extra_headers=True, headers=self.headers) body = json.loads(body) self.validate_response(queues_schema.claim_messages, resp, body) return resp, body def query_claim(self, claim_uri): resp, body = self.get(claim_uri) if resp['status'] != '204': body = json.loads(body) self.validate_response(queues_schema.query_claim, resp, body) return resp, body def update_claim(self, claim_uri, rbody): resp, body = self.patch(claim_uri, body=json.dumps(rbody)) self.expected_success(204, resp.status) return resp, body def release_claim(self, claim_uri): resp, body = self.delete(claim_uri) self.expected_success(204, resp.status) return resp, body
apache-2.0
googleads/googleads-adxseller-examples
python/v1.1/get_all_ad_units_for_custom_channel.py
2
2448
#!/usr/bin/python # coding: utf-8 # # Copyright 2013 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """This example gets all ad units corresponding to a specified custom channel. To get custom channels, run get_all_custom_channels.py. Tags: customchannels.adunits.list """ __author__ = '[email protected] (Sérgio Gomes)' import argparse import sys from apiclient import sample_tools from oauth2client import client # Declare command-line flags. argparser = argparse.ArgumentParser(add_help=False) argparser.add_argument( 'ad_client_id', help='The ID of the ad client with the specified custom channel') argparser.add_argument( 'custom_channel_id', help='The ID of the custom channel for which to get ad units') MAX_PAGE_SIZE = 50 def main(argv): # Authenticate and construct service. service, flags = sample_tools.init( argv, 'adexchangeseller', 'v1.1', __doc__, __file__, parents=[argparser], scope='https://www.googleapis.com/auth/adexchange.seller.readonly') # Process flags and read their values. ad_client_id = flags.ad_client_id custom_channel_id = flags.custom_channel_id try: # Retrieve ad unit list in pages and display data as we receive it. request = service.customchannels().adunits().list( adClientId=ad_client_id, customChannelId=custom_channel_id, maxResults=MAX_PAGE_SIZE) while request is not None: result = request.execute() ad_units = result['items'] for ad_unit in ad_units: print ('Ad unit with code "%s", name "%s" and status "%s" was found. ' % (ad_unit['code'], ad_unit['name'], ad_unit['status'])) request = service.adunits().list_next(request, result) except client.AccessTokenRefreshError: print ('The credentials have been revoked or expired, please re-run the ' 'application to re-authorize') if __name__ == '__main__': main(sys.argv)
apache-2.0
nosedjango/nosedjango
nosedjangotests/polls/migrations/0001_initial.py
1
1274
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models try: from django.db import migrations except ImportError: pass else: class Migration(migrations.Migration): dependencies = [ ('contenttypes', '0002_remove_content_type_name'), ] operations = [ migrations.CreateModel( name='Choice', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('object_id', models.PositiveIntegerField()), ('choice', models.CharField(max_length=200)), ('votes', models.IntegerField(default=0)), ('content_type', models.ForeignKey(to='contenttypes.ContentType')), ], ), migrations.CreateModel( name='Poll', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('question', models.CharField(max_length=200)), ('pub_date', models.DateTimeField(verbose_name=b'date published')), ], ), ]
lgpl-3.0
azatoth/scons
test/Java/JAVABOOTCLASSPATH.py
5
2718
#!/usr/bin/env python # # __COPYRIGHT__ # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # __revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" """ Verify that use of $JAVABOOTCLASSPATH sets the -bootclasspath option on javac compilations. """ import os import TestSCons _python_ = TestSCons._python_ test = TestSCons.TestSCons() where_javac, java_version = test.java_where_javac() where_javah = test.java_where_javah() test.write('SConstruct', """ env = Environment(tools = ['javac', 'javah'], JAVAC = r'%(where_javac)s', JAVABOOTCLASSPATH = ['dir1', 'dir2']) j1 = env.Java(target = 'class', source = 'com/Example1.java') j2 = env.Java(target = 'class', source = 'com/Example2.java') """ % locals()) test.subdir('com') test.write(['com', 'Example1.java'], """\ package com; public class Example1 { public static void main(String[] args) { } } """) test.write(['com', 'Example2.java'], """\ package com; public class Example2 { public static void main(String[] args) { } } """) # Setting -bootclasspath messes with the Java runtime environment, so # we'll just take the easy way out and examine the -n output to see if # the expected option shows up on the command line. bootclasspath = os.pathsep.join(['dir1', 'dir2']) expect = """\ %(where_javac)s -bootclasspath %(bootclasspath)s -d class -sourcepath com com/Example1.java %(where_javac)s -bootclasspath %(bootclasspath)s -d class -sourcepath com com/Example2.java """ % locals() test.run(arguments = '-Q -n .', stdout = expect) test.pass_test() # Local Variables: # tab-width:4 # indent-tabs-mode:nil # End: # vim: set expandtab tabstop=4 shiftwidth=4:
mit
quantifiedcode-bot/checkmate
checkmate/contrib/plugins/git/commands/diff.py
4
2418
# -*- coding: utf-8 -*- """ This file is part of checkmate, a meta code checker written in Python. Copyright (C) 2015 Andreas Dewes, QuantifiedCode UG This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more details. You should have received a copy of the GNU Affero General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. """ from __future__ import unicode_literals from checkmate.management.commands.base import BaseCommand from ..models import GitSnapshot from checkmate.lib.code import CodeEnvironment import sys import os import random import os.path import json import time import pprint class Command(BaseCommand): options = BaseCommand.options + [ { 'name' : '--branch', 'action' : 'store', 'dest' : 'branch', 'type' : str, 'default' : 'master', 'help' : 'The branch for which to show the log.' }, ] def run(self): if len(self.raw_args) < 2: sys.stderr.write("Usage: checkmate git diff [snapshot-pk-1] [snapshot-pk-2]\n") return -1 snapshot_a_pk,snapshot_b_pk = self.raw_args[:2] try: snapshot_a = self.backend.get(GitSnapshot,{'pk' : snapshot_a_pk}) except GitSnapshot.DoesNotExist: sys.stderr.write("Snapshot does not exist: %s\n" % snapshot_a_pk) return -1 try: snapshot_b = self.backend.get(GitSnapshot,{'pk' : snapshot_b_pk}) except GitSnapshot.DoesNotExist: sys.stderr.write("Snapshot does not exist: %s\n" % snapshot_b_pk) return -1 code_environment = CodeEnvironment([],{},{}) diff = code_environment.diff_snapshots(snapshot_a,snapshot_b) for key,values in diff.issues.items(): if not values: continue print key for value in values: print value.analyzer,value.description
agpl-3.0
daradurvs/ignite
modules/platforms/python/examples/type_hints.py
11
1618
# Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from pyignite import Client from pyignite.datatypes import CharObject, ShortObject client = Client() client.connect('127.0.0.1', 10800) my_cache = client.get_or_create_cache('my cache') my_cache.put('my key', 42) # value ‘42’ takes 9 bytes of memory as a LongObject my_cache.put('my key', 42, value_hint=ShortObject) # value ‘42’ takes only 3 bytes as a ShortObject my_cache.put('a', 1) # ‘a’ is a key of type String my_cache.put('a', 2, key_hint=CharObject) # another key ‘a’ of type CharObject was created value = my_cache.get('a') print(value) # 1 value = my_cache.get('a', key_hint=CharObject) print(value) # 2 # now let us delete both keys at once my_cache.remove_keys([ 'a', # a default type key ('a', CharObject), # a key of type CharObject ]) my_cache.destroy() client.close()
apache-2.0
caosmo/pip
tests/data/src/sample/setup.py
51
3713
from setuptools import setup, find_packages import codecs import os import re here = os.path.abspath(os.path.dirname(__file__)) # Read the version number from a source file. # Why read it, and not import? # see https://groups.google.com/d/topic/pypa-dev/0PkjVpcxTzQ/discussion def find_version(*file_paths): # Open in Latin-1 so that we avoid encoding errors. # Use codecs.open for Python 2 compatibility with codecs.open(os.path.join(here, *file_paths), 'r', 'latin1') as f: version_file = f.read() # The version line must have the form # __version__ = 'ver' version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]", version_file, re.M) if version_match: return version_match.group(1) raise RuntimeError("Unable to find version string.") # Get the long description from the relevant file with codecs.open('DESCRIPTION.rst', encoding='utf-8') as f: long_description = f.read() setup( name="sample", version=find_version('sample', '__init__.py'), description="A sample Python project", long_description=long_description, # The project URL. url='https://github.com/pypa/sampleproject', # Author details author='The Python Packaging Authority', author_email='[email protected]', # Choose your license license='MIT', classifiers=[ # How mature is this project? Common values are # 3 - Alpha # 4 - Beta # 5 - Production/Stable 'Development Status :: 3 - Alpha', # Indicate who your project is intended for 'Intended Audience :: Developers', 'Topic :: Software Development :: Build Tools', # Pick your license as you wish (should match "license" above) 'License :: OSI Approved :: MIT License', # Specify the Python versions you support here. In particular, ensure # that you indicate whether you support Python 2, Python 3 or both. 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.1', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', ], # What does your project relate to? keywords='sample setuptools development', # You can just specify the packages manually here if your project is # simple. Or you can use find_packages. packages=find_packages(exclude=["contrib", "docs", "tests*"]), # List run-time dependencies here. These will be installed by pip when your # project is installed. install_requires = ['peppercorn'], # If there are data files included in your packages that need to be # installed, specify them here. If using Python 2.6 or less, then these # have to be included in MANIFEST.in as well. package_data={ 'sample': ['package_data.dat'], }, # Although 'package_data' is the preferred approach, in some case you may # need to place data files outside of your packages. # see http://docs.python.org/3.4/distutils/setupscript.html#installing-additional-files # In this case, 'data_file' will be installed into '<sys.prefix>/my_data' data_files=[('my_data', ['data/data_file'])], # To provide executable scripts, use entry points in preference to the # "scripts" keyword. Entry points provide cross-platform support and allow # pip to create the appropriate form of executable for the target platform. entry_points={ 'console_scripts': [ 'sample=sample:main', ], }, )
mit
2014c2g14/c2g14
exts/w2/static/Brython2.0.0-20140209-164925/Lib/unittest/test/test_discovery.py
785
13838
import os import re import sys import unittest class TestableTestProgram(unittest.TestProgram): module = '__main__' exit = True defaultTest = failfast = catchbreak = buffer = None verbosity = 1 progName = '' testRunner = testLoader = None def __init__(self): pass class TestDiscovery(unittest.TestCase): # Heavily mocked tests so I can avoid hitting the filesystem def test_get_name_from_path(self): loader = unittest.TestLoader() loader._top_level_dir = '/foo' name = loader._get_name_from_path('/foo/bar/baz.py') self.assertEqual(name, 'bar.baz') if not __debug__: # asserts are off return with self.assertRaises(AssertionError): loader._get_name_from_path('/bar/baz.py') def test_find_tests(self): loader = unittest.TestLoader() original_listdir = os.listdir def restore_listdir(): os.listdir = original_listdir original_isfile = os.path.isfile def restore_isfile(): os.path.isfile = original_isfile original_isdir = os.path.isdir def restore_isdir(): os.path.isdir = original_isdir path_lists = [['test1.py', 'test2.py', 'not_a_test.py', 'test_dir', 'test.foo', 'test-not-a-module.py', 'another_dir'], ['test3.py', 'test4.py', ]] os.listdir = lambda path: path_lists.pop(0) self.addCleanup(restore_listdir) def isdir(path): return path.endswith('dir') os.path.isdir = isdir self.addCleanup(restore_isdir) def isfile(path): # another_dir is not a package and so shouldn't be recursed into return not path.endswith('dir') and not 'another_dir' in path os.path.isfile = isfile self.addCleanup(restore_isfile) loader._get_module_from_name = lambda path: path + ' module' loader.loadTestsFromModule = lambda module: module + ' tests' top_level = os.path.abspath('/foo') loader._top_level_dir = top_level suite = list(loader._find_tests(top_level, 'test*.py')) expected = [name + ' module tests' for name in ('test1', 'test2')] expected.extend([('test_dir.%s' % name) + ' module tests' for name in ('test3', 'test4')]) self.assertEqual(suite, expected) def test_find_tests_with_package(self): loader = unittest.TestLoader() original_listdir = os.listdir def restore_listdir(): os.listdir = original_listdir original_isfile = os.path.isfile def restore_isfile(): os.path.isfile = original_isfile original_isdir = os.path.isdir def restore_isdir(): os.path.isdir = original_isdir directories = ['a_directory', 'test_directory', 'test_directory2'] path_lists = [directories, [], [], []] os.listdir = lambda path: path_lists.pop(0) self.addCleanup(restore_listdir) os.path.isdir = lambda path: True self.addCleanup(restore_isdir) os.path.isfile = lambda path: os.path.basename(path) not in directories self.addCleanup(restore_isfile) class Module(object): paths = [] load_tests_args = [] def __init__(self, path): self.path = path self.paths.append(path) if os.path.basename(path) == 'test_directory': def load_tests(loader, tests, pattern): self.load_tests_args.append((loader, tests, pattern)) return 'load_tests' self.load_tests = load_tests def __eq__(self, other): return self.path == other.path loader._get_module_from_name = lambda name: Module(name) def loadTestsFromModule(module, use_load_tests): if use_load_tests: raise self.failureException('use_load_tests should be False for packages') return module.path + ' module tests' loader.loadTestsFromModule = loadTestsFromModule loader._top_level_dir = '/foo' # this time no '.py' on the pattern so that it can match # a test package suite = list(loader._find_tests('/foo', 'test*')) # We should have loaded tests from the test_directory package by calling load_tests # and directly from the test_directory2 package self.assertEqual(suite, ['load_tests', 'test_directory2' + ' module tests']) self.assertEqual(Module.paths, ['test_directory', 'test_directory2']) # load_tests should have been called once with loader, tests and pattern self.assertEqual(Module.load_tests_args, [(loader, 'test_directory' + ' module tests', 'test*')]) def test_discover(self): loader = unittest.TestLoader() original_isfile = os.path.isfile original_isdir = os.path.isdir def restore_isfile(): os.path.isfile = original_isfile os.path.isfile = lambda path: False self.addCleanup(restore_isfile) orig_sys_path = sys.path[:] def restore_path(): sys.path[:] = orig_sys_path self.addCleanup(restore_path) full_path = os.path.abspath(os.path.normpath('/foo')) with self.assertRaises(ImportError): loader.discover('/foo/bar', top_level_dir='/foo') self.assertEqual(loader._top_level_dir, full_path) self.assertIn(full_path, sys.path) os.path.isfile = lambda path: True os.path.isdir = lambda path: True def restore_isdir(): os.path.isdir = original_isdir self.addCleanup(restore_isdir) _find_tests_args = [] def _find_tests(start_dir, pattern): _find_tests_args.append((start_dir, pattern)) return ['tests'] loader._find_tests = _find_tests loader.suiteClass = str suite = loader.discover('/foo/bar/baz', 'pattern', '/foo/bar') top_level_dir = os.path.abspath('/foo/bar') start_dir = os.path.abspath('/foo/bar/baz') self.assertEqual(suite, "['tests']") self.assertEqual(loader._top_level_dir, top_level_dir) self.assertEqual(_find_tests_args, [(start_dir, 'pattern')]) self.assertIn(top_level_dir, sys.path) def test_discover_with_modules_that_fail_to_import(self): loader = unittest.TestLoader() listdir = os.listdir os.listdir = lambda _: ['test_this_does_not_exist.py'] isfile = os.path.isfile os.path.isfile = lambda _: True orig_sys_path = sys.path[:] def restore(): os.path.isfile = isfile os.listdir = listdir sys.path[:] = orig_sys_path self.addCleanup(restore) suite = loader.discover('.') self.assertIn(os.getcwd(), sys.path) self.assertEqual(suite.countTestCases(), 1) test = list(list(suite)[0])[0] # extract test from suite with self.assertRaises(ImportError): test.test_this_does_not_exist() def test_command_line_handling_parseArgs(self): program = TestableTestProgram() args = [] def do_discovery(argv): args.extend(argv) program._do_discovery = do_discovery program.parseArgs(['something', 'discover']) self.assertEqual(args, []) program.parseArgs(['something', 'discover', 'foo', 'bar']) self.assertEqual(args, ['foo', 'bar']) def test_command_line_handling_discover_by_default(self): program = TestableTestProgram() program.module = None self.called = False def do_discovery(argv): self.called = True self.assertEqual(argv, []) program._do_discovery = do_discovery program.parseArgs(['something']) self.assertTrue(self.called) def test_command_line_handling_discover_by_default_with_options(self): program = TestableTestProgram() program.module = None args = ['something', '-v', '-b', '-v', '-c', '-f'] self.called = False def do_discovery(argv): self.called = True self.assertEqual(argv, args[1:]) program._do_discovery = do_discovery program.parseArgs(args) self.assertTrue(self.called) def test_command_line_handling_do_discovery_too_many_arguments(self): class Stop(Exception): pass def usageExit(): raise Stop program = TestableTestProgram() program.usageExit = usageExit with self.assertRaises(Stop): # too many args program._do_discovery(['one', 'two', 'three', 'four']) def test_command_line_handling_do_discovery_calls_loader(self): program = TestableTestProgram() class Loader(object): args = [] def discover(self, start_dir, pattern, top_level_dir): self.args.append((start_dir, pattern, top_level_dir)) return 'tests' program._do_discovery(['-v'], Loader=Loader) self.assertEqual(program.verbosity, 2) self.assertEqual(program.test, 'tests') self.assertEqual(Loader.args, [('.', 'test*.py', None)]) Loader.args = [] program = TestableTestProgram() program._do_discovery(['--verbose'], Loader=Loader) self.assertEqual(program.test, 'tests') self.assertEqual(Loader.args, [('.', 'test*.py', None)]) Loader.args = [] program = TestableTestProgram() program._do_discovery([], Loader=Loader) self.assertEqual(program.test, 'tests') self.assertEqual(Loader.args, [('.', 'test*.py', None)]) Loader.args = [] program = TestableTestProgram() program._do_discovery(['fish'], Loader=Loader) self.assertEqual(program.test, 'tests') self.assertEqual(Loader.args, [('fish', 'test*.py', None)]) Loader.args = [] program = TestableTestProgram() program._do_discovery(['fish', 'eggs'], Loader=Loader) self.assertEqual(program.test, 'tests') self.assertEqual(Loader.args, [('fish', 'eggs', None)]) Loader.args = [] program = TestableTestProgram() program._do_discovery(['fish', 'eggs', 'ham'], Loader=Loader) self.assertEqual(program.test, 'tests') self.assertEqual(Loader.args, [('fish', 'eggs', 'ham')]) Loader.args = [] program = TestableTestProgram() program._do_discovery(['-s', 'fish'], Loader=Loader) self.assertEqual(program.test, 'tests') self.assertEqual(Loader.args, [('fish', 'test*.py', None)]) Loader.args = [] program = TestableTestProgram() program._do_discovery(['-t', 'fish'], Loader=Loader) self.assertEqual(program.test, 'tests') self.assertEqual(Loader.args, [('.', 'test*.py', 'fish')]) Loader.args = [] program = TestableTestProgram() program._do_discovery(['-p', 'fish'], Loader=Loader) self.assertEqual(program.test, 'tests') self.assertEqual(Loader.args, [('.', 'fish', None)]) self.assertFalse(program.failfast) self.assertFalse(program.catchbreak) Loader.args = [] program = TestableTestProgram() program._do_discovery(['-p', 'eggs', '-s', 'fish', '-v', '-f', '-c'], Loader=Loader) self.assertEqual(program.test, 'tests') self.assertEqual(Loader.args, [('fish', 'eggs', None)]) self.assertEqual(program.verbosity, 2) self.assertTrue(program.failfast) self.assertTrue(program.catchbreak) def test_detect_module_clash(self): class Module(object): __file__ = 'bar/foo.py' sys.modules['foo'] = Module full_path = os.path.abspath('foo') original_listdir = os.listdir original_isfile = os.path.isfile original_isdir = os.path.isdir def cleanup(): os.listdir = original_listdir os.path.isfile = original_isfile os.path.isdir = original_isdir del sys.modules['foo'] if full_path in sys.path: sys.path.remove(full_path) self.addCleanup(cleanup) def listdir(_): return ['foo.py'] def isfile(_): return True def isdir(_): return True os.listdir = listdir os.path.isfile = isfile os.path.isdir = isdir loader = unittest.TestLoader() mod_dir = os.path.abspath('bar') expected_dir = os.path.abspath('foo') msg = re.escape(r"'foo' module incorrectly imported from %r. Expected %r. " "Is this module globally installed?" % (mod_dir, expected_dir)) self.assertRaisesRegex( ImportError, '^%s$' % msg, loader.discover, start_dir='foo', pattern='foo.py' ) self.assertEqual(sys.path[0], full_path) def test_discovery_from_dotted_path(self): loader = unittest.TestLoader() tests = [self] expectedPath = os.path.abspath(os.path.dirname(unittest.test.__file__)) self.wasRun = False def _find_tests(start_dir, pattern): self.wasRun = True self.assertEqual(start_dir, expectedPath) return tests loader._find_tests = _find_tests suite = loader.discover('unittest.test') self.assertTrue(self.wasRun) self.assertEqual(suite._tests, tests) if __name__ == '__main__': unittest.main()
gpl-2.0
adhoc-dev/sale-workflow
sale_quotation_sourcing/model/procurement.py
34
1936
# -*- coding: utf-8 -*- # # Author: Alexandre Fayolle, Leonardo Pistone # Copyright 2014-2015 Camptocamp SA # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # # from openerp import models, api, _ class ProcurementOrder(models.Model): _inherit = 'procurement.order' @api.multi def make_po(self): """ link the procurement to the PO line sourcing the SO line if the SO line is manually sourced. Otherwise, use the normal implementation. """ res = {} to_propagate = self.browse() for procurement in self: curr_proc = procurement sale_line = False while curr_proc: if curr_proc.sale_line_id: sale_line = curr_proc.sale_line_id break curr_proc = curr_proc.move_dest_id.procurement_id if sale_line and sale_line.manually_sourced: po_line = sale_line.sourced_by res[procurement.id] = po_line.id procurement.purchase_line_id = po_line procurement.message_post(body=_('Manually sourced')) else: to_propagate |= procurement res.update(super(ProcurementOrder, to_propagate).make_po()) return res
agpl-3.0
frouty/odoogoeen
addons/event/report/__init__.py
435
1079
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import report_event_registration # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
faizan-barmawer/openstack_ironic
ironic/drivers/modules/iboot.py
5
7453
# -*- coding: utf-8 -*- # # Copyright 2014 Red Hat, Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Ironic iBoot PDU power manager. """ from oslo.utils import importutils from ironic.common import exception from ironic.common.i18n import _ from ironic.common.i18n import _LW from ironic.common import states from ironic.conductor import task_manager from ironic.drivers import base from ironic.openstack.common import log as logging iboot = importutils.try_import('iboot') LOG = logging.getLogger(__name__) REQUIRED_PROPERTIES = { 'iboot_address': _("IP address of the node. Required."), 'iboot_username': _("username. Required."), 'iboot_password': _("password. Required."), } OPTIONAL_PROPERTIES = { 'iboot_relay_id': _("iBoot PDU relay id; default is 1. Optional."), 'iboot_port': _("iBoot PDU port; default is 9100. Optional."), } COMMON_PROPERTIES = REQUIRED_PROPERTIES.copy() COMMON_PROPERTIES.update(OPTIONAL_PROPERTIES) def _parse_driver_info(node): info = node.driver_info or {} missing_info = [key for key in REQUIRED_PROPERTIES if not info.get(key)] if missing_info: raise exception.MissingParameterValue(_( "The following iBoot credentials were not supplied to iBoot PDU " "driver: %s.") % missing_info) address = info.get('iboot_address', None) username = info.get('iboot_username', None) password = info.get('iboot_password', None) relay_id = info.get('iboot_relay_id', 1) try: relay_id = int(relay_id) except ValueError: raise exception.InvalidParameterValue(_( "iBoot PDU relay id must be an integer.")) port = info.get('iboot_port', 9100) try: port = int(port) except ValueError: raise exception.InvalidParameterValue(_( "iBoot PDU port must be an integer.")) return { 'address': address, 'username': username, 'password': password, 'port': port, 'relay_id': relay_id, 'uuid': node.uuid, } def _get_connection(driver_info): # NOTE: python-iboot wants username and password as strings (not unicode) return iboot.iBootInterface(driver_info['address'], str(driver_info['username']), str(driver_info['password']), port=driver_info['port'], num_relays=driver_info['relay_id']) def _switch(driver_info, enabled): conn = _get_connection(driver_info) relay_id = driver_info['relay_id'] return conn.switch(relay_id, enabled) def _power_status(driver_info): conn = _get_connection(driver_info) relay_id = driver_info['relay_id'] try: response = conn.get_relays() status = response[relay_id - 1] except TypeError: msg = (_("Cannot get power status for node '%(node)s'. iBoot " "get_relays() returned '%(resp)s'.") % {'node': driver_info['uuid'], 'resp': response}) LOG.error(msg) raise exception.IBootOperationError(message=msg) except IndexError: LOG.warning(_LW("Cannot get power status for node '%(node)s' at relay " "'%(relay)s'. iBoot get_relays() returned " "'%(resp)s'."), {'node': driver_info['uuid'], 'relay': relay_id, 'resp': response}) return states.ERROR if status: return states.POWER_ON else: return states.POWER_OFF class IBootPower(base.PowerInterface): """iBoot PDU Power Driver for Ironic This PowerManager class provides a mechanism for controlling power state via an iBoot capable device. Requires installation of python-iboot: https://github.com/darkip/python-iboot """ def get_properties(self): return COMMON_PROPERTIES def validate(self, task): """Validate driver_info for iboot driver. :param task: a TaskManager instance containing the node to act on. :raises: InvalidParameterValue if iboot parameters are invalid. :raises: MissingParameterValue if required iboot parameters are missing. """ _parse_driver_info(task.node) def get_power_state(self, task): """Get the current power state of the task's node. :param task: a TaskManager instance containing the node to act on. :returns: one of ironic.common.states POWER_OFF, POWER_ON or ERROR. :raises: IBootOperationError on an error from iBoot. :raises: InvalidParameterValue if iboot parameters are invalid. :raises: MissingParameterValue if required iboot parameters are missing. """ driver_info = _parse_driver_info(task.node) return _power_status(driver_info) @task_manager.require_exclusive_lock def set_power_state(self, task, pstate): """Turn the power on or off. :param task: a TaskManager instance containing the node to act on. :param pstate: The desired power state, one of ironic.common.states POWER_ON, POWER_OFF. :raises: IBootOperationError on an error from iBoot. :raises: InvalidParameterValue if iboot parameters are invalid or if an invalid power state was specified. :raises: MissingParameterValue if required iboot parameters are missing. :raises: PowerStateFailure if the power couldn't be set to pstate. """ driver_info = _parse_driver_info(task.node) if pstate == states.POWER_ON: _switch(driver_info, True) elif pstate == states.POWER_OFF: _switch(driver_info, False) else: raise exception.InvalidParameterValue(_( "set_power_state called with invalid " "power state %s.") % pstate) state = _power_status(driver_info) if state != pstate: raise exception.PowerStateFailure(pstate=pstate) @task_manager.require_exclusive_lock def reboot(self, task): """Cycles the power to the task's node. :param task: a TaskManager instance containing the node to act on. :raises: IBootOperationError on an error from iBoot. :raises: InvalidParameterValue if iboot parameters are invalid. :raises: MissingParameterValue if required iboot parameters are missing. :raises: PowerStateFailure if the final state of the node is not POWER_ON. """ driver_info = _parse_driver_info(task.node) _switch(driver_info, False) _switch(driver_info, True) state = _power_status(driver_info) if state != states.POWER_ON: raise exception.PowerStateFailure(pstate=states.POWER_ON)
apache-2.0
katrid/django
django/core/management/commands/startproject.py
503
1359
from importlib import import_module from django.core.management.base import CommandError from django.core.management.templates import TemplateCommand from django.utils.crypto import get_random_string class Command(TemplateCommand): help = ("Creates a Django project directory structure for the given " "project name in the current directory or optionally in the " "given directory.") missing_args_message = "You must provide a project name." def handle(self, **options): project_name, target = options.pop('name'), options.pop('directory') self.validate_name(project_name, "project") # Check that the project_name cannot be imported. try: import_module(project_name) except ImportError: pass else: raise CommandError("%r conflicts with the name of an existing " "Python module and cannot be used as a " "project name. Please try another name." % project_name) # Create a random SECRET_KEY to put it in the main settings. chars = 'abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)' options['secret_key'] = get_random_string(50, chars) super(Command, self).handle('project', project_name, target, **options)
bsd-3-clause
ArielSaldana/sunset
yaml/test/gtest-1.8.0/googletest/scripts/common.py
1180
2919
# Copyright 2013 Google Inc. All Rights Reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Shared utilities for writing scripts for Google Test/Mock.""" __author__ = '[email protected] (Zhanyong Wan)' import os import re # Matches the line from 'svn info .' output that describes what SVN # path the current local directory corresponds to. For example, in # a googletest SVN workspace's trunk/test directory, the output will be: # # URL: https://googletest.googlecode.com/svn/trunk/test _SVN_INFO_URL_RE = re.compile(r'^URL: https://(\w+)\.googlecode\.com/svn(.*)') def GetCommandOutput(command): """Runs the shell command and returns its stdout as a list of lines.""" f = os.popen(command, 'r') lines = [line.strip() for line in f.readlines()] f.close() return lines def GetSvnInfo(): """Returns the project name and the current SVN workspace's root path.""" for line in GetCommandOutput('svn info .'): m = _SVN_INFO_URL_RE.match(line) if m: project = m.group(1) # googletest or googlemock rel_path = m.group(2) root = os.path.realpath(rel_path.count('/') * '../') return project, root return None, None def GetSvnTrunk(): """Returns the current SVN workspace's trunk root path.""" _, root = GetSvnInfo() return root + '/trunk' if root else None def IsInGTestSvn(): project, _ = GetSvnInfo() return project == 'googletest' def IsInGMockSvn(): project, _ = GetSvnInfo() return project == 'googlemock'
mit
nmiculinic/psiml2017-facs
landmarks_from_camera.py
1
1865
import argparse import numpy as np import cv2 import time import dataset from PIL import Image, ImageDraw from keras.models import load_model args = argparse.ArgumentParser() args.add_argument("model_path") args = args.parse_args() model = load_model(args.model_path) print(model.inputs) dim = int(model.input.shape[1]) # dim = 160 def draw_landmarks(image, landmarks, r=1, fill_color=(255,0,0,100)): draw = ImageDraw.Draw(image) for row in landmarks: x, y = row draw.ellipse((x-r, y-r, x+r, y+r), fill=fill_color) cap = cv2.VideoCapture(0) cap.set(3, 640) cap.set(4, 480) face_cascade = cv2.CascadeClassifier('./haarcascade_frontalface_alt.xml') while(True): #_, img = cap.read() img = cv2.imread(r"C:\Users\admin\Desktop\random\fn059t2afunaff001.png",cv2.IMREAD_COLOR) print(img.shape) #img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) img_gray = cv2.cvtColor(img, cv2.COLOR_RGB2GRAY) faces = face_cascade.detectMultiScale(img_gray, 1.3, 5) for (x,y,w,h) in faces: img = Image.fromarray(img[y:y+h, x:x+w]) dp = { 'image': img, 'landmarks': np.zeros((66,2)) } dp = dataset.resize_mirror_datapoint(dp, dim, False) img = dp['image'] landmark = np.array([[ [0,0], [50,50], [100,50] ]]) landmark = model.predict(np.array(img.convert("L"))[None, :, :, None] / 255.0) landmark = np.squeeze(landmark, axis=0) h, w = img.size img = img.resize((2*h, 2*w)).convert("L") landmark *= 2 draw_landmarks(img, landmark, r=2) # Convert RGB to BGR img = np.array(img.convert("RGB")) img = img[:, :, ::-1].copy() cv2.imshow('frame', img) if cv2.waitKey(1) & 0xFF == ord('q'): break
mit
jayceyxc/hue
desktop/core/ext-py/Django-1.6.10/tests/empty/tests.py
58
1221
from __future__ import absolute_import from django.core.exceptions import ImproperlyConfigured from django.db.models.loading import get_app from django.test import TestCase from django.test.utils import override_settings from django.utils import six from .models import Empty class EmptyModelTests(TestCase): def test_empty(self): m = Empty() self.assertEqual(m.id, None) m.save() Empty.objects.create() self.assertEqual(len(Empty.objects.all()), 2) self.assertTrue(m.id is not None) existing = Empty(m.id) existing.save() class NoModelTests(TestCase): """ Test for #7198 to ensure that the proper error message is raised when attempting to load an app with no models.py file. Because the test runner won't currently load a test module with no models.py file, this TestCase instead lives in this module. It seemed like an appropriate home for it. """ @override_settings(INSTALLED_APPS=("empty.no_models",)) def test_no_models(self): with six.assertRaisesRegex(self, ImproperlyConfigured, 'App with label no_models is missing a models.py module.'): get_app('no_models')
apache-2.0
lepricon49/headphones
lib/yaml/representer.py
359
17642
__all__ = ['BaseRepresenter', 'SafeRepresenter', 'Representer', 'RepresenterError'] from error import * from nodes import * import datetime import sys, copy_reg, types class RepresenterError(YAMLError): pass class BaseRepresenter(object): yaml_representers = {} yaml_multi_representers = {} def __init__(self, default_style=None, default_flow_style=None): self.default_style = default_style self.default_flow_style = default_flow_style self.represented_objects = {} self.object_keeper = [] self.alias_key = None def represent(self, data): node = self.represent_data(data) self.serialize(node) self.represented_objects = {} self.object_keeper = [] self.alias_key = None def get_classobj_bases(self, cls): bases = [cls] for base in cls.__bases__: bases.extend(self.get_classobj_bases(base)) return bases def represent_data(self, data): if self.ignore_aliases(data): self.alias_key = None else: self.alias_key = id(data) if self.alias_key is not None: if self.alias_key in self.represented_objects: node = self.represented_objects[self.alias_key] #if node is None: # raise RepresenterError("recursive objects are not allowed: %r" % data) return node #self.represented_objects[alias_key] = None self.object_keeper.append(data) data_types = type(data).__mro__ if type(data) is types.InstanceType: data_types = self.get_classobj_bases(data.__class__)+list(data_types) if data_types[0] in self.yaml_representers: node = self.yaml_representers[data_types[0]](self, data) else: for data_type in data_types: if data_type in self.yaml_multi_representers: node = self.yaml_multi_representers[data_type](self, data) break else: if None in self.yaml_multi_representers: node = self.yaml_multi_representers[None](self, data) elif None in self.yaml_representers: node = self.yaml_representers[None](self, data) else: node = ScalarNode(None, unicode(data)) #if alias_key is not None: # self.represented_objects[alias_key] = node return node def add_representer(cls, data_type, representer): if not 'yaml_representers' in cls.__dict__: cls.yaml_representers = cls.yaml_representers.copy() cls.yaml_representers[data_type] = representer add_representer = classmethod(add_representer) def add_multi_representer(cls, data_type, representer): if not 'yaml_multi_representers' in cls.__dict__: cls.yaml_multi_representers = cls.yaml_multi_representers.copy() cls.yaml_multi_representers[data_type] = representer add_multi_representer = classmethod(add_multi_representer) def represent_scalar(self, tag, value, style=None): if style is None: style = self.default_style node = ScalarNode(tag, value, style=style) if self.alias_key is not None: self.represented_objects[self.alias_key] = node return node def represent_sequence(self, tag, sequence, flow_style=None): value = [] node = SequenceNode(tag, value, flow_style=flow_style) if self.alias_key is not None: self.represented_objects[self.alias_key] = node best_style = True for item in sequence: node_item = self.represent_data(item) if not (isinstance(node_item, ScalarNode) and not node_item.style): best_style = False value.append(node_item) if flow_style is None: if self.default_flow_style is not None: node.flow_style = self.default_flow_style else: node.flow_style = best_style return node def represent_mapping(self, tag, mapping, flow_style=None): value = [] node = MappingNode(tag, value, flow_style=flow_style) if self.alias_key is not None: self.represented_objects[self.alias_key] = node best_style = True if hasattr(mapping, 'items'): mapping = mapping.items() mapping.sort() for item_key, item_value in mapping: node_key = self.represent_data(item_key) node_value = self.represent_data(item_value) if not (isinstance(node_key, ScalarNode) and not node_key.style): best_style = False if not (isinstance(node_value, ScalarNode) and not node_value.style): best_style = False value.append((node_key, node_value)) if flow_style is None: if self.default_flow_style is not None: node.flow_style = self.default_flow_style else: node.flow_style = best_style return node def ignore_aliases(self, data): return False class SafeRepresenter(BaseRepresenter): def ignore_aliases(self, data): if data in [None, ()]: return True if isinstance(data, (str, unicode, bool, int, float)): return True def represent_none(self, data): return self.represent_scalar(u'tag:yaml.org,2002:null', u'null') def represent_str(self, data): tag = None style = None try: data = unicode(data, 'ascii') tag = u'tag:yaml.org,2002:str' except UnicodeDecodeError: try: data = unicode(data, 'utf-8') tag = u'tag:yaml.org,2002:str' except UnicodeDecodeError: data = data.encode('base64') tag = u'tag:yaml.org,2002:binary' style = '|' return self.represent_scalar(tag, data, style=style) def represent_unicode(self, data): return self.represent_scalar(u'tag:yaml.org,2002:str', data) def represent_bool(self, data): if data: value = u'true' else: value = u'false' return self.represent_scalar(u'tag:yaml.org,2002:bool', value) def represent_int(self, data): return self.represent_scalar(u'tag:yaml.org,2002:int', unicode(data)) def represent_long(self, data): return self.represent_scalar(u'tag:yaml.org,2002:int', unicode(data)) inf_value = 1e300 while repr(inf_value) != repr(inf_value*inf_value): inf_value *= inf_value def represent_float(self, data): if data != data or (data == 0.0 and data == 1.0): value = u'.nan' elif data == self.inf_value: value = u'.inf' elif data == -self.inf_value: value = u'-.inf' else: value = unicode(repr(data)).lower() # Note that in some cases `repr(data)` represents a float number # without the decimal parts. For instance: # >>> repr(1e17) # '1e17' # Unfortunately, this is not a valid float representation according # to the definition of the `!!float` tag. We fix this by adding # '.0' before the 'e' symbol. if u'.' not in value and u'e' in value: value = value.replace(u'e', u'.0e', 1) return self.represent_scalar(u'tag:yaml.org,2002:float', value) def represent_list(self, data): #pairs = (len(data) > 0 and isinstance(data, list)) #if pairs: # for item in data: # if not isinstance(item, tuple) or len(item) != 2: # pairs = False # break #if not pairs: return self.represent_sequence(u'tag:yaml.org,2002:seq', data) #value = [] #for item_key, item_value in data: # value.append(self.represent_mapping(u'tag:yaml.org,2002:map', # [(item_key, item_value)])) #return SequenceNode(u'tag:yaml.org,2002:pairs', value) def represent_dict(self, data): return self.represent_mapping(u'tag:yaml.org,2002:map', data) def represent_set(self, data): value = {} for key in data: value[key] = None return self.represent_mapping(u'tag:yaml.org,2002:set', value) def represent_date(self, data): value = unicode(data.isoformat()) return self.represent_scalar(u'tag:yaml.org,2002:timestamp', value) def represent_datetime(self, data): value = unicode(data.isoformat(' ')) return self.represent_scalar(u'tag:yaml.org,2002:timestamp', value) def represent_yaml_object(self, tag, data, cls, flow_style=None): if hasattr(data, '__getstate__'): state = data.__getstate__() else: state = data.__dict__.copy() return self.represent_mapping(tag, state, flow_style=flow_style) def represent_undefined(self, data): raise RepresenterError("cannot represent an object: %s" % data) SafeRepresenter.add_representer(type(None), SafeRepresenter.represent_none) SafeRepresenter.add_representer(str, SafeRepresenter.represent_str) SafeRepresenter.add_representer(unicode, SafeRepresenter.represent_unicode) SafeRepresenter.add_representer(bool, SafeRepresenter.represent_bool) SafeRepresenter.add_representer(int, SafeRepresenter.represent_int) SafeRepresenter.add_representer(long, SafeRepresenter.represent_long) SafeRepresenter.add_representer(float, SafeRepresenter.represent_float) SafeRepresenter.add_representer(list, SafeRepresenter.represent_list) SafeRepresenter.add_representer(tuple, SafeRepresenter.represent_list) SafeRepresenter.add_representer(dict, SafeRepresenter.represent_dict) SafeRepresenter.add_representer(set, SafeRepresenter.represent_set) SafeRepresenter.add_representer(datetime.date, SafeRepresenter.represent_date) SafeRepresenter.add_representer(datetime.datetime, SafeRepresenter.represent_datetime) SafeRepresenter.add_representer(None, SafeRepresenter.represent_undefined) class Representer(SafeRepresenter): def represent_str(self, data): tag = None style = None try: data = unicode(data, 'ascii') tag = u'tag:yaml.org,2002:str' except UnicodeDecodeError: try: data = unicode(data, 'utf-8') tag = u'tag:yaml.org,2002:python/str' except UnicodeDecodeError: data = data.encode('base64') tag = u'tag:yaml.org,2002:binary' style = '|' return self.represent_scalar(tag, data, style=style) def represent_unicode(self, data): tag = None try: data.encode('ascii') tag = u'tag:yaml.org,2002:python/unicode' except UnicodeEncodeError: tag = u'tag:yaml.org,2002:str' return self.represent_scalar(tag, data) def represent_long(self, data): tag = u'tag:yaml.org,2002:int' if int(data) is not data: tag = u'tag:yaml.org,2002:python/long' return self.represent_scalar(tag, unicode(data)) def represent_complex(self, data): if data.imag == 0.0: data = u'%r' % data.real elif data.real == 0.0: data = u'%rj' % data.imag elif data.imag > 0: data = u'%r+%rj' % (data.real, data.imag) else: data = u'%r%rj' % (data.real, data.imag) return self.represent_scalar(u'tag:yaml.org,2002:python/complex', data) def represent_tuple(self, data): return self.represent_sequence(u'tag:yaml.org,2002:python/tuple', data) def represent_name(self, data): name = u'%s.%s' % (data.__module__, data.__name__) return self.represent_scalar(u'tag:yaml.org,2002:python/name:'+name, u'') def represent_module(self, data): return self.represent_scalar( u'tag:yaml.org,2002:python/module:'+data.__name__, u'') def represent_instance(self, data): # For instances of classic classes, we use __getinitargs__ and # __getstate__ to serialize the data. # If data.__getinitargs__ exists, the object must be reconstructed by # calling cls(**args), where args is a tuple returned by # __getinitargs__. Otherwise, the cls.__init__ method should never be # called and the class instance is created by instantiating a trivial # class and assigning to the instance's __class__ variable. # If data.__getstate__ exists, it returns the state of the object. # Otherwise, the state of the object is data.__dict__. # We produce either a !!python/object or !!python/object/new node. # If data.__getinitargs__ does not exist and state is a dictionary, we # produce a !!python/object node . Otherwise we produce a # !!python/object/new node. cls = data.__class__ class_name = u'%s.%s' % (cls.__module__, cls.__name__) args = None state = None if hasattr(data, '__getinitargs__'): args = list(data.__getinitargs__()) if hasattr(data, '__getstate__'): state = data.__getstate__() else: state = data.__dict__ if args is None and isinstance(state, dict): return self.represent_mapping( u'tag:yaml.org,2002:python/object:'+class_name, state) if isinstance(state, dict) and not state: return self.represent_sequence( u'tag:yaml.org,2002:python/object/new:'+class_name, args) value = {} if args: value['args'] = args value['state'] = state return self.represent_mapping( u'tag:yaml.org,2002:python/object/new:'+class_name, value) def represent_object(self, data): # We use __reduce__ API to save the data. data.__reduce__ returns # a tuple of length 2-5: # (function, args, state, listitems, dictitems) # For reconstructing, we calls function(*args), then set its state, # listitems, and dictitems if they are not None. # A special case is when function.__name__ == '__newobj__'. In this # case we create the object with args[0].__new__(*args). # Another special case is when __reduce__ returns a string - we don't # support it. # We produce a !!python/object, !!python/object/new or # !!python/object/apply node. cls = type(data) if cls in copy_reg.dispatch_table: reduce = copy_reg.dispatch_table[cls](data) elif hasattr(data, '__reduce_ex__'): reduce = data.__reduce_ex__(2) elif hasattr(data, '__reduce__'): reduce = data.__reduce__() else: raise RepresenterError("cannot represent object: %r" % data) reduce = (list(reduce)+[None]*5)[:5] function, args, state, listitems, dictitems = reduce args = list(args) if state is None: state = {} if listitems is not None: listitems = list(listitems) if dictitems is not None: dictitems = dict(dictitems) if function.__name__ == '__newobj__': function = args[0] args = args[1:] tag = u'tag:yaml.org,2002:python/object/new:' newobj = True else: tag = u'tag:yaml.org,2002:python/object/apply:' newobj = False function_name = u'%s.%s' % (function.__module__, function.__name__) if not args and not listitems and not dictitems \ and isinstance(state, dict) and newobj: return self.represent_mapping( u'tag:yaml.org,2002:python/object:'+function_name, state) if not listitems and not dictitems \ and isinstance(state, dict) and not state: return self.represent_sequence(tag+function_name, args) value = {} if args: value['args'] = args if state or not isinstance(state, dict): value['state'] = state if listitems: value['listitems'] = listitems if dictitems: value['dictitems'] = dictitems return self.represent_mapping(tag+function_name, value) Representer.add_representer(str, Representer.represent_str) Representer.add_representer(unicode, Representer.represent_unicode) Representer.add_representer(long, Representer.represent_long) Representer.add_representer(complex, Representer.represent_complex) Representer.add_representer(tuple, Representer.represent_tuple) Representer.add_representer(type, Representer.represent_name) Representer.add_representer(types.ClassType, Representer.represent_name) Representer.add_representer(types.FunctionType, Representer.represent_name) Representer.add_representer(types.BuiltinFunctionType, Representer.represent_name) Representer.add_representer(types.ModuleType, Representer.represent_module) Representer.add_multi_representer(types.InstanceType, Representer.represent_instance) Representer.add_multi_representer(object, Representer.represent_object)
gpl-3.0
alphaKAI/mal
python/step8_macros.py
42
4847
import sys, traceback import mal_readline import mal_types as types import reader, printer from env import Env import core # read def READ(str): return reader.read_str(str) # eval def is_pair(x): return types._sequential_Q(x) and len(x) > 0 def quasiquote(ast): if not is_pair(ast): return types._list(types._symbol("quote"), ast) elif ast[0] == 'unquote': return ast[1] elif is_pair(ast[0]) and ast[0][0] == 'splice-unquote': return types._list(types._symbol("concat"), ast[0][1], quasiquote(ast[1:])) else: return types._list(types._symbol("cons"), quasiquote(ast[0]), quasiquote(ast[1:])) def is_macro_call(ast, env): return (types._list_Q(ast) and types._symbol_Q(ast[0]) and env.find(ast[0]) and hasattr(env.get(ast[0]), '_ismacro_')) def macroexpand(ast, env): while is_macro_call(ast, env): mac = env.get(ast[0]) ast = macroexpand(mac(*ast[1:]), env) return ast def eval_ast(ast, env): if types._symbol_Q(ast): return env.get(ast) elif types._list_Q(ast): return types._list(*map(lambda a: EVAL(a, env), ast)) elif types._vector_Q(ast): return types._vector(*map(lambda a: EVAL(a, env), ast)) elif types._hash_map_Q(ast): keyvals = [] for k in ast.keys(): keyvals.append(EVAL(k, env)) keyvals.append(EVAL(ast[k], env)) return types._hash_map(*keyvals) else: return ast # primitive value, return unchanged def EVAL(ast, env): while True: #print("EVAL %s" % printer._pr_str(ast)) if not types._list_Q(ast): return eval_ast(ast, env) # apply list ast = macroexpand(ast, env) if not types._list_Q(ast): return ast if len(ast) == 0: return ast a0 = ast[0] if "def!" == a0: a1, a2 = ast[1], ast[2] res = EVAL(a2, env) return env.set(a1, res) elif "let*" == a0: a1, a2 = ast[1], ast[2] let_env = Env(env) for i in range(0, len(a1), 2): let_env.set(a1[i], EVAL(a1[i+1], let_env)) ast = a2 env = let_env # Continue loop (TCO) elif "quote" == a0: return ast[1] elif "quasiquote" == a0: ast = quasiquote(ast[1]); # Continue loop (TCO) elif 'defmacro!' == a0: func = EVAL(ast[2], env) func._ismacro_ = True return env.set(ast[1], func) elif 'macroexpand' == a0: return macroexpand(ast[1], env) elif "do" == a0: eval_ast(ast[1:-1], env) ast = ast[-1] # Continue loop (TCO) elif "if" == a0: a1, a2 = ast[1], ast[2] cond = EVAL(a1, env) if cond is None or cond is False: if len(ast) > 3: ast = ast[3] else: ast = None else: ast = a2 # Continue loop (TCO) elif "fn*" == a0: a1, a2 = ast[1], ast[2] return types._function(EVAL, Env, a2, env, a1) else: el = eval_ast(ast, env) f = el[0] if hasattr(f, '__ast__'): ast = f.__ast__ env = f.__gen_env__(el[1:]) else: return f(*el[1:]) # print def PRINT(exp): return printer._pr_str(exp) # repl repl_env = Env() def REP(str): return PRINT(EVAL(READ(str), repl_env)) # core.py: defined using python for k, v in core.ns.items(): repl_env.set(types._symbol(k), v) repl_env.set(types._symbol('eval'), lambda ast: EVAL(ast, repl_env)) repl_env.set(types._symbol('*ARGV*'), types._list(*sys.argv[2:])) # core.mal: defined using the language itself REP("(def! not (fn* (a) (if a false true)))") REP("(def! load-file (fn* (f) (eval (read-string (str \"(do \" (slurp f) \")\")))))") REP("(defmacro! cond (fn* (& xs) (if (> (count xs) 0) (list 'if (first xs) (if (> (count xs) 1) (nth xs 1) (throw \"odd number of forms to cond\")) (cons 'cond (rest (rest xs)))))))") REP("(defmacro! or (fn* (& xs) (if (empty? xs) nil (if (= 1 (count xs)) (first xs) `(let* (or_FIXME ~(first xs)) (if or_FIXME or_FIXME (or ~@(rest xs))))))))") if len(sys.argv) >= 2: REP('(load-file "' + sys.argv[1] + '")') sys.exit(0) # repl loop while True: try: line = mal_readline.readline("user> ") if line == None: break if line == "": continue print(REP(line)) except reader.Blank: continue except Exception as e: print("".join(traceback.format_exception(*sys.exc_info())))
mpl-2.0
akhmadMizkat/odoo
openerp/addons/base/tests/test_ir_filters.py
36
12520
# -*- coding: utf-8 -*- import functools from openerp import exceptions from openerp.tests import common def noid(d): """ Removes values that are not relevant for the test comparisons """ d.pop('id', None) d.pop('action_id', None) return d class FiltersCase(common.TransactionCase): def build(self, model, *args): Model = self.registry(model) for vars in args: Model.create(self.cr, common.ADMIN_USER_ID, vars, {}) class TestGetFilters(FiltersCase): def setUp(self): super(TestGetFilters, self).setUp() self.USER = self.registry('res.users').name_search(self.cr, self.uid, 'demo')[0] self.USER_ID = self.USER[0] def test_own_filters(self): self.build( 'ir.filters', dict(name='a', user_id=self.USER_ID, model_id='ir.filters'), dict(name='b', user_id=self.USER_ID, model_id='ir.filters'), dict(name='c', user_id=self.USER_ID, model_id='ir.filters'), dict(name='d', user_id=self.USER_ID, model_id='ir.filters')) filters = self.registry('ir.filters').get_filters( self.cr, self.USER_ID, 'ir.filters') self.assertItemsEqual(map(noid, filters), [ dict(name='a', is_default=False, user_id=self.USER, domain='[]', context='{}', sort='[]'), dict(name='b', is_default=False, user_id=self.USER, domain='[]', context='{}', sort='[]'), dict(name='c', is_default=False, user_id=self.USER, domain='[]', context='{}', sort='[]'), dict(name='d', is_default=False, user_id=self.USER, domain='[]', context='{}', sort='[]'), ]) def test_global_filters(self): self.build( 'ir.filters', dict(name='a', user_id=False, model_id='ir.filters'), dict(name='b', user_id=False, model_id='ir.filters'), dict(name='c', user_id=False, model_id='ir.filters'), dict(name='d', user_id=False, model_id='ir.filters'), ) filters = self.registry('ir.filters').get_filters( self.cr, self.USER_ID, 'ir.filters') self.assertItemsEqual(map(noid, filters), [ dict(name='a', is_default=False, user_id=False, domain='[]', context='{}', sort='[]'), dict(name='b', is_default=False, user_id=False, domain='[]', context='{}', sort='[]'), dict(name='c', is_default=False, user_id=False, domain='[]', context='{}', sort='[]'), dict(name='d', is_default=False, user_id=False, domain='[]', context='{}', sort='[]'), ]) def test_no_third_party_filters(self): self.build( 'ir.filters', dict(name='a', user_id=False, model_id='ir.filters'), dict(name='b', user_id=common.ADMIN_USER_ID, model_id='ir.filters'), dict(name='c', user_id=self.USER_ID, model_id='ir.filters'), dict(name='d', user_id=common.ADMIN_USER_ID, model_id='ir.filters') ) filters = self.registry('ir.filters').get_filters( self.cr, self.USER_ID, 'ir.filters') self.assertItemsEqual(map(noid, filters), [ dict(name='a', is_default=False, user_id=False, domain='[]', context='{}', sort='[]'), dict(name='c', is_default=False, user_id=self.USER, domain='[]', context='{}', sort='[]'), ]) class TestOwnDefaults(FiltersCase): def setUp(self): super(TestOwnDefaults, self).setUp() self.USER = self.registry('res.users').name_search(self.cr, self.uid, 'demo')[0] self.USER_ID = self.USER[0] def test_new_no_filter(self): """ When creating a @is_default filter with no existing filter, that new filter gets the default flag """ Filters = self.registry('ir.filters') Filters.create_or_replace(self.cr, self.USER_ID, { 'name': 'a', 'model_id': 'ir.filters', 'user_id': self.USER_ID, 'is_default': True, }) filters = Filters.get_filters(self.cr, self.USER_ID, 'ir.filters') self.assertItemsEqual(map(noid, filters), [ dict(name='a', user_id=self.USER, is_default=True, domain='[]', context='{}', sort='[]') ]) def test_new_filter_not_default(self): """ When creating a @is_default filter with existing non-default filters, the new filter gets the flag """ self.build( 'ir.filters', dict(name='a', user_id=self.USER_ID, model_id='ir.filters'), dict(name='b', user_id=self.USER_ID, model_id='ir.filters'), ) Filters = self.registry('ir.filters') Filters.create_or_replace(self.cr, self.USER_ID, { 'name': 'c', 'model_id': 'ir.filters', 'user_id': self.USER_ID, 'is_default': True, }) filters = Filters.get_filters(self.cr, self.USER_ID, 'ir.filters') self.assertItemsEqual(map(noid, filters), [ dict(name='a', user_id=self.USER, is_default=False, domain='[]', context='{}', sort='[]'), dict(name='b', user_id=self.USER, is_default=False, domain='[]', context='{}', sort='[]'), dict(name='c', user_id=self.USER, is_default=True, domain='[]', context='{}', sort='[]'), ]) def test_new_filter_existing_default(self): """ When creating a @is_default filter where an existing filter is already @is_default, the flag should be *moved* from the old to the new filter """ self.build( 'ir.filters', dict(name='a', user_id=self.USER_ID, model_id='ir.filters'), dict(name='b', is_default=True, user_id=self.USER_ID, model_id='ir.filters'), ) Filters = self.registry('ir.filters') Filters.create_or_replace(self.cr, self.USER_ID, { 'name': 'c', 'model_id': 'ir.filters', 'user_id': self.USER_ID, 'is_default': True, }) filters = Filters.get_filters(self.cr, self.USER_ID, 'ir.filters') self.assertItemsEqual(map(noid, filters), [ dict(name='a', user_id=self.USER, is_default=False, domain='[]', context='{}', sort='[]'), dict(name='b', user_id=self.USER, is_default=False, domain='[]', context='{}', sort='[]'), dict(name='c', user_id=self.USER, is_default=True, domain='[]', context='{}', sort='[]'), ]) def test_update_filter_set_default(self): """ When updating an existing filter to @is_default, if an other filter already has the flag the flag should be moved """ self.build( 'ir.filters', dict(name='a', user_id=self.USER_ID, model_id='ir.filters'), dict(name='b', is_default=True, user_id=self.USER_ID, model_id='ir.filters'), ) Filters = self.registry('ir.filters') Filters.create_or_replace(self.cr, self.USER_ID, { 'name': 'a', 'model_id': 'ir.filters', 'user_id': self.USER_ID, 'is_default': True, }) filters = Filters.get_filters(self.cr, self.USER_ID, 'ir.filters') self.assertItemsEqual(map(noid, filters), [ dict(name='a', user_id=self.USER, is_default=True, domain='[]', context='{}', sort='[]'), dict(name='b', user_id=self.USER, is_default=False, domain='[]', context='{}', sort='[]'), ]) class TestGlobalDefaults(FiltersCase): def setUp(self): super(TestGlobalDefaults, self).setUp() self.USER = self.registry('res.users').name_search(self.cr, self.uid, 'demo')[0] self.USER_ID = self.USER[0] def test_new_filter_not_default(self): """ When creating a @is_default filter with existing non-default filters, the new filter gets the flag """ self.build( 'ir.filters', dict(name='a', user_id=False, model_id='ir.filters'), dict(name='b', user_id=False, model_id='ir.filters'), ) Filters = self.registry('ir.filters') Filters.create_or_replace(self.cr, self.USER_ID, { 'name': 'c', 'model_id': 'ir.filters', 'user_id': False, 'is_default': True, }) filters = Filters.get_filters(self.cr, self.USER_ID, 'ir.filters') self.assertItemsEqual(map(noid, filters), [ dict(name='a', user_id=False, is_default=False, domain='[]', context='{}', sort='[]'), dict(name='b', user_id=False, is_default=False, domain='[]', context='{}', sort='[]'), dict(name='c', user_id=False, is_default=True, domain='[]', context='{}', sort='[]'), ]) def test_new_filter_existing_default(self): """ When creating a @is_default filter where an existing filter is already @is_default, an error should be generated """ self.build( 'ir.filters', dict(name='a', user_id=False, model_id='ir.filters'), dict(name='b', is_default=True, user_id=False, model_id='ir.filters'), ) Filters = self.registry('ir.filters') with self.assertRaises(exceptions.Warning): Filters.create_or_replace(self.cr, self.USER_ID, { 'name': 'c', 'model_id': 'ir.filters', 'user_id': False, 'is_default': True, }) def test_update_filter_set_default(self): """ When updating an existing filter to @is_default, if an other filter already has the flag an error should be generated """ self.build( 'ir.filters', dict(name='a', user_id=False, model_id='ir.filters'), dict(name='b', is_default=True, user_id=False, model_id='ir.filters'), ) Filters = self.registry('ir.filters') with self.assertRaises(exceptions.Warning): Filters.create_or_replace(self.cr, self.USER_ID, { 'name': 'a', 'model_id': 'ir.filters', 'user_id': False, 'is_default': True, }) def test_update_default_filter(self): """ Replacing the current default global filter should not generate any error """ self.build( 'ir.filters', dict(name='a', user_id=False, model_id='ir.filters'), dict(name='b', is_default=True, user_id=False, model_id='ir.filters'), ) Filters = self.registry('ir.filters') context_value = "{'some_key': True}" Filters.create_or_replace(self.cr, self.USER_ID, { 'name': 'b', 'model_id': 'ir.filters', 'user_id': False, 'context': context_value, 'is_default': True, }) filters = Filters.get_filters(self.cr, self.USER_ID, 'ir.filters') self.assertItemsEqual(map(noid, filters), [ dict(name='a', user_id=False, is_default=False, domain='[]', context='{}', sort='[]'), dict(name='b', user_id=False, is_default=True, domain='[]', context=context_value, sort='[]'), ]) from openerp.tests.common import TransactionCase class TestReadGroup(TransactionCase): """Test function read_group with groupby on a many2one field to a model (in test, "user_id" to "res.users") which is ordered by an inherited not stored field (in test, "name" inherited from "res.partners"). """ def setUp(self): super(TestReadGroup, self).setUp() self.ir_filters_model = self.env['ir.filters'] self.res_partner_model = self.env['res.partner'] self.res_users_model = self.env['res.users'] def test_read_group_1(self): self.assertEqual(self.res_users_model._order, "name, login", "Model res.users must be ordered by name, login") self.assertFalse(self.res_users_model._fields['name'].store, "Field name is not stored in res.users") filter_a = self.ir_filters_model.create(dict(name="Filter_A", model_id="ir.filters")) filter_b = self.ir_filters_model.create(dict(name="Filter_B", model_id="ir.filters")) filter_b.write(dict(user_id=False)) res = self.ir_filters_model.read_group([], ['name', 'user_id'], ['user_id']) self.assertTrue(any(val['user_id'] == False for val in res), "At least one group must contain val['user_id'] == False.")
gpl-3.0
LyonsLab/coge
bin/last_wrapper/Bio/Motif/Parsers/MEME.py
3
12014
# Copyright 2008 by Bartek Wilczynski # Adapted from Bio.MEME.Parser by Jason A. Hackney. All rights reserved. # This code is part of the Biopython distribution and governed by its # license. Please see the LICENSE file that should have been included # as part of this package. from Bio.Alphabet import IUPAC from Bio import Seq import re from math import sqrt import sys from Bio.Motif import Motif def read(handle): """Parses the text output of the MEME program into MEME.Record object. Example: >>> f = open("meme.output.txt") >>> from Bio.Motif.Parsers import MEME >>> record = MEME.read(f) >>> for motif in record.motifs: ... for instance in motif.instances: ... print instance.motif_name, instance.sequence_name, instance.strand, instance.pvalue """ record = MEMERecord() __read_version(record, handle) __read_datafile(record, handle) __read_alphabet(record, handle) __read_sequence_names(record, handle) __read_command(record, handle) for line in handle: if line.startswith('MOTIF 1'): break else: raise ValueError('Unexpected end of stream') while True: motif = __create_motif(line) motif.alphabet = record.alphabet record.motifs.append(motif) __read_motif_name(motif, handle) __read_motif_sequences(motif, handle, 'revcomp' in record.command) __skip_unused_lines(handle) try: line = handle.next() except StopIteration: raise ValueError('Unexpected end of stream: Expected to find new motif, or the summary of motifs') if line.startswith("SUMMARY OF MOTIFS"): break if not line.startswith('MOTIF'): raise ValueError("Line does not start with 'MOTIF':\n%s" % line) return record class MEMEMotif (Motif): """A subclass of Motif used in parsing MEME (and MAST) output. This sublcass defines functions and data specific to MEME motifs. This includes the evalue for a motif and the PSSM of the motif. Methods: add_instance_from_values (name = 'default', pvalue = 1, sequence = 'ATA', start = 0, strand = +): create a new instance of the motif with the specified values. add_to_pssm (position): add a new position to the pssm. The position should be a list of nucleotide/amino acid frequencies add_to_logodds (position): add a new position to the log odds matrix. The position should be a tuple of log odds values for the nucleotide/amino acid at that position. compare_motifs (other_motif): returns the maximum correlation between this motif and other_motif """ def __init__ (self): Motif.__init__(self) self.evalue = 0.0 def _numoccurrences (self, number): if type(number) == int: self.num_occurrences = number else: number = int(number) self.num_occurrences = number def get_instance_by_name (self,name): for i in self.instances: if i.sequence_name == name: return i return None def add_instance_from_values (self, name = 'default', pvalue = 1, sequence = 'ATA', start = 0, strand = '+'): inst = MEMEInstance(sequence,self.alphabet) inst._pvalue(pvalue) inst._seqname(name) inst._start(start) inst._strand(strand) if self.length: inst._length(self.length) else: inst._length(len(sequence)) if self.name: inst._motifname(self.name) self.add_instance(inst) def _evalue (self, evalue): if type(evalue) == float: self.evalue = evalue else: evalue = float(evalue) self.evalue = evalue class MEMEInstance(Seq.Seq): """A class describing the instances of a MEME motif, and the data thereof. """ def __init__ (self,*args,**kwds): Seq.Seq.__init__(self,*args,**kwds) self.sequence_name = "" self.start = 0 self.pvalue = 1.0 self.strand = 0 self.length = 0 self.motif_name = "" def _seqname (self, name): self.sequence_name = name def _motifname (self, name): self.motif_name = name def _start (self,start): start = int(start) self.start = start def _pvalue (self,pval): pval = float(pval) self.pvalue = pval def _score (self, score): score = float(score) self.score = score def _strand (self, strand): self.strand = strand def _length (self, length): self.length = length class MEMERecord(object): """A class for holding the results of a MEME run. A MEMERecord is an object that holds the results from running MEME. It implements no methods of its own. """ def __init__ (self): """__init__ (self)""" self.motifs = [] self.version = "" self.datafile = "" self.command = "" self.alphabet = None self.sequence_names = [] def get_motif_by_name (self, name): for m in self.motifs: if m.name == name: return m # Everything below is private def __read_version(record, handle): for line in handle: if line.startswith('MEME version'): break else: raise ValueError("Improper input file. File should contain a line starting MEME version.") line = line.strip() ls = line.split() record.version = ls[2] def __read_datafile(record, handle): for line in handle: if line.startswith('TRAINING SET'): break else: raise ValueError("Unexpected end of stream: 'TRAINING SET' not found.") try: line = handle.next() except StopIteration: raise ValueError("Unexpected end of stream: Expected to find line starting with '****'") if not line.startswith('****'): raise ValueError("Line does not start with '****':\n%s" % line) try: line = handle.next() except StopIteration: raise ValueError("Unexpected end of stream: Expected to find line starting with 'DATAFILE'") if not line.startswith('DATAFILE'): raise ValueError("Line does not start with 'DATAFILE':\n%s" % line) line = line.strip() line = line.replace('DATAFILE= ','') record.datafile = line def __read_alphabet(record, handle): try: line = handle.next() except StopIteration: raise ValueError("Unexpected end of stream: Expected to find line starting with 'ALPHABET'") if not line.startswith('ALPHABET'): raise ValueError("Line does not start with 'ALPHABET':\n%s" % line) line = line.strip() line = line.replace('ALPHABET= ','') if line == 'ACGT': al = IUPAC.unambiguous_dna else: al = IUPAC.protein record.alphabet = al def __read_sequence_names(record, handle): try: line = handle.next() except StopIteration: raise ValueError("Unexpected end of stream: Expected to find line starting with 'Sequence name'") if not line.startswith('Sequence name'): raise ValueError("Line does not start with 'Sequence name':\n%s" % line) try: line = handle.next() except StopIteration: raise ValueError("Unexpected end of stream: Expected to find line starting with '----'") if not line.startswith('----'): raise ValueError("Line does not start with '----':\n%s" % line) for line in handle: if line.startswith('***'): break line = line.strip() ls = line.split() record.sequence_names.append(ls[0]) if len(ls) == 6: record.sequence_names.append(ls[3]) else: raise ValueError("Unexpected end of stream: Expected to find line starting with '***'") def __read_command(record, handle): for line in handle: if line.startswith('command:'): break else: raise ValueError("Unexpected end of stream: Expected to find line starting with 'command'") line = line.strip() line = line.replace('command: ','') record.command = line def __create_motif(line): line = line[5:].strip() ls = line.split() motif = MEMEMotif() motif.length = int(ls[3]) motif._numoccurrences(ls[6]) motif._evalue(ls[12]) return motif def __read_motif_name(motif, handle): for line in handle: if 'sorted by position p-value' in line: break else: raise ValueError('Unexpected end of stream: Failed to find motif name') line = line.strip() ls = line.split() name = " ".join(ls[0:2]) motif.name=name def __read_motif_sequences(motif, handle, rv): try: line = handle.next() except StopIteration: raise ValueError('Unexpected end of stream: Failed to find motif sequences') if not line.startswith('---'): raise ValueError("Line does not start with '---':\n%s" % line) try: line = handle.next() except StopIteration: raise ValueError("Unexpected end of stream: Expected to find line starting with 'Sequence name'") if not line.startswith('Sequence name'): raise ValueError("Line does not start with 'Sequence name':\n%s" % line) try: line = handle.next() except StopIteration: raise ValueError('Unexpected end of stream: Failed to find motif sequences') if not line.startswith('---'): raise ValueError("Line does not start with '---':\n%s" % line) for line in handle: if line.startswith('---'): break line = line.strip() ls = line.split() if rv: #seq = Seq.Seq(ls[5], record.alphabet) motif.add_instance_from_values(name = ls[0], sequence = ls[5], start = ls[2], pvalue = ls[3], strand = ls[1]) else: #seq = Seq.Seq(ls[4], record.alphabet) motif.add_instance_from_values(name = ls[0], sequence = ls[4], start = ls[1], pvalue = ls[2]) else: raise ValueError('Unexpected end of stream') def __skip_unused_lines(handle): for line in handle: if line.startswith('log-odds matrix'): break else: raise ValueError("Unexpected end of stream: Expected to find line starting with 'log-odds matrix'") for line in handle: if line.startswith('---'): break else: raise ValueError("Unexpected end of stream: Expected to find line starting with '---'") for line in handle: if line.startswith('letter-probability matrix'): break else: raise ValueError("Unexpected end of stream: Expected to find line starting with 'letter-probability matrix'") for line in handle: if line.startswith('---'): break else: raise ValueError("Unexpected end of stream: Expected to find line starting with '---'") for line in handle: if line.startswith('Time'): break else: raise ValueError("Unexpected end of stream: Expected to find line starting with 'Time'") try: line = handle.next() except StopIteration: raise ValueError('Unexpected end of stream: Expected to find blank line') if line.strip(): raise ValueError("Expected blank line, but got:\n%s" % line) try: line = handle.next() except StopIteration: raise ValueError("Unexpected end of stream: Expected to find line starting with '***'") if not line.startswith('***'): raise ValueError("Line does not start with '***':\n%s" % line) for line in handle: if line.strip(): break else: raise ValueError("Unexpected end of stream: Expected to find line starting with '***'") if not line.startswith('***'): raise ValueError("Line does not start with '***':\n%s" % line)
bsd-2-clause
bauman/laikaboss
laikaboss/modules/explode_upx.py
20
3720
# Copyright 2015 Lockheed Martin Corporation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from laikaboss.objectmodel import ModuleObject, ExternalVars, QuitScanException, \ GlobalScanTimeoutError, GlobalModuleTimeoutError from laikaboss.si_module import SI_MODULE from laikaboss import config import tempfile import pexpect import os class EXPLODE_UPX(SI_MODULE): '''a module that decompresses upx compressed executables''' def __init__(self,): ''' __init__ function merely needs to set its module_name and nothing more''' self.module_name = "EXPLODE_UPX" self.TEMP_DIR = '/tmp/laikaboss_tmp' if hasattr(config, 'tempdir'): self.TEMP_DIR = config.tempdir.rstrip('/') if not os.path.isdir(self.TEMP_DIR): os.mkdir(self.TEMP_DIR) os.chmod(self.TEMP_DIR, 0777) def _run(self, scanObject, result, depth, args): ''' The core of your laika module. This is how your code will be invoked Requires: Package Dependencies Only Assumes: scanObject.buffer is a upx compressed executable Ensures: 1. No propagating errors 2. Decompressed buffer is returned as a new buffer to scanned Error Handling: 1. If upx decompress fails, output file will not be created attempt to open the decompressed file will throw file not exists exception silently passed Module Execution: 1. Dump the scanObject.buffer into a named temp file 2. Call upx decompresser outputting to the <input_filename>_output 3. Open the decompressed buffer file and read it into a buffer 4. Close and delete the decompressed buffer file 5. If length of the decompressed buffer is > the compressed buffer (decompression worked): True: Add the buffer to the result object False: Do nothing (future perhaps add failed to decompress metadata?) 6. Return ''' moduleResult = [] try: with tempfile.NamedTemporaryFile(dir=self.TEMP_DIR) as temp_file_input: temp_file_input_name = temp_file_input.name temp_file_input.write(scanObject.buffer) temp_file_input.flush() temp_file_output_name = temp_file_input_name+"_output" strCMD = "upx -d "+temp_file_input_name+" -o "+temp_file_output_name outputString = pexpect.run(strCMD) f = open(temp_file_output_name) #if strCMD failed, this will throw a file not exists exception newbuf = f.read() f.close() os.remove(temp_file_output_name) if len(newbuf) > len(scanObject.buffer): moduleResult.append(ModuleObject(buffer=newbuf, externalVars=ExternalVars(filename="e_upx"))) except (QuitScanException, GlobalScanTimeoutError, GlobalModuleTimeoutError): raise except: pass return moduleResult
apache-2.0
WoLpH/CouchPotatoServer
libs/tmdb3/tmdb_auth.py
10
4257
#!/usr/bin/env python # -*- coding: utf-8 -*- #----------------------- # Name: tmdb_auth.py # Python Library # Author: Raymond Wagner # Purpose: Provide authentication and session services for # calls against the TMDB v3 API #----------------------- from datetime import datetime as _pydatetime, \ tzinfo as _pytzinfo import re class datetime( _pydatetime ): """Customized datetime class with ISO format parsing.""" _reiso = re.compile('(?P<year>[0-9]{4})' '-(?P<month>[0-9]{1,2})' '-(?P<day>[0-9]{1,2})' '.' '(?P<hour>[0-9]{2})' ':(?P<min>[0-9]{2})' '(:(?P<sec>[0-9]{2}))?' '(?P<tz>Z|' '(?P<tzdirec>[-+])' '(?P<tzhour>[0-9]{1,2})' '(:)?' '(?P<tzmin>[0-9]{2})?' ')?') class _tzinfo( _pytzinfo): def __init__(self, direc='+', hr=0, min=0): if direc == '-': hr = -1*int(hr) self._offset = timedelta(hours=int(hr), minutes=int(min)) def utcoffset(self, dt): return self._offset def tzname(self, dt): return '' def dst(self, dt): return timedelta(0) @classmethod def fromIso(cls, isotime, sep='T'): match = cls._reiso.match(isotime) if match is None: raise TypeError("time data '%s' does not match ISO 8601 format" \ % isotime) dt = [int(a) for a in match.groups()[:5]] if match.group('sec') is not None: dt.append(int(match.group('sec'))) else: dt.append(0) if match.group('tz'): if match.group('tz') == 'Z': tz = cls._tzinfo() elif match.group('tzmin'): tz = cls._tzinfo(*match.group('tzdirec','tzhour','tzmin')) else: tz = cls._tzinfo(*match.group('tzdirec','tzhour')) dt.append(0) dt.append(tz) return cls(*dt) from request import Request from tmdb_exceptions import * syssession = None def set_session(sessionid): global syssession syssession = Session(sessionid) def get_session(sessionid=None): global syssession if sessionid: return Session(sessionid) elif syssession is not None: return syssession else: return Session.new() class Session( object ): @classmethod def new(cls): return cls(None) def __init__(self, sessionid): self.sessionid = sessionid @property def sessionid(self): if self._sessionid is None: if self._authtoken is None: raise TMDBError("No Auth Token to produce Session for") # TODO: check authtokenexpiration against current time req = Request('authentication/session/new', \ request_token=self._authtoken) req.lifetime = 0 dat = req.readJSON() if not dat['success']: raise TMDBError("Session generation failed") self._sessionid = dat['session_id'] return self._sessionid @sessionid.setter def sessionid(self, value): self._sessionid = value self._authtoken = None self._authtokenexpiration = None if value is None: self.authenticated = False else: self.authenticated = True @property def authtoken(self): if self.authenticated: raise TMDBError("Session is already authenticated") if self._authtoken is None: req = Request('authentication/token/new') req.lifetime = 0 dat = req.readJSON() if not dat['success']: raise TMDBError("Auth Token request failed") self._authtoken = dat['request_token'] self._authtokenexpiration = datetime.fromIso(dat['expires_at']) return self._authtoken @property def callbackurl(self): return "http://www.themoviedb.org/authenticate/"+self._authtoken
gpl-3.0
JakeLowey/HackRPI2
django/conf/locale/pt_BR/formats.py
86
1536
# -*- encoding: utf-8 -*- # This file is distributed under the same license as the Django package. # # The *_FORMAT strings use the Django date format syntax, # see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date DATE_FORMAT = r'j \d\e F \d\e Y' TIME_FORMAT = 'H:i' DATETIME_FORMAT = r'j \d\e F \d\e Y à\s H:i' YEAR_MONTH_FORMAT = r'F \d\e Y' MONTH_DAY_FORMAT = r'j \d\e F' SHORT_DATE_FORMAT = 'd/m/Y' SHORT_DATETIME_FORMAT = 'd/m/Y H:i' FIRST_DAY_OF_WEEK = 0 # Sunday # The *_INPUT_FORMATS strings use the Python strftime format syntax, # see http://docs.python.org/library/datetime.html#strftime-strptime-behavior DATE_INPUT_FORMATS = ( '%d/%m/%Y', '%d/%m/%y', '%Y-%m-%d', # '25/10/2006', '25/10/06', '2006-10-25' # '%d de %b de %Y', '%d de %b, %Y', # '25 de Out de 2006', '25 Out, 2006' # '%d de %B de %Y', '%d de %B, %Y', # '25 de Outubro de 2006', '25 de Outubro, 2006' ) TIME_INPUT_FORMATS = ( '%H:%M:%S', # '14:30:59' '%H:%M', # '14:30' ) DATETIME_INPUT_FORMATS = ( '%d/%m/%Y %H:%M:%S', # '25/10/2006 14:30:59' '%d/%m/%Y %H:%M', # '25/10/2006 14:30' '%d/%m/%Y', # '25/10/2006' '%d/%m/%y %H:%M:%S', # '25/10/06 14:30:59' '%d/%m/%y %H:%M', # '25/10/06 14:30' '%d/%m/%y', # '25/10/06' '%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59' '%Y-%m-%d %H:%M', # '2006-10-25 14:30' '%Y-%m-%d', # '2006-10-25' ) DECIMAL_SEPARATOR = ',' THOUSAND_SEPARATOR = '.' NUMBER_GROUPING = 3
mit
kbrebanov/ansible-modules-extras
system/lvol.py
2
16881
#!/usr/bin/python # -*- coding: utf-8 -*- # (c) 2013, Jeroen Hoekx <[email protected]>, Alexander Bulimov <[email protected]> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. DOCUMENTATION = ''' --- author: - "Jeroen Hoekx (@jhoekx)" - "Alexander Bulimov (@abulimov)" module: lvol short_description: Configure LVM logical volumes description: - This module creates, removes or resizes logical volumes. version_added: "1.1" options: vg: description: - The volume group this logical volume is part of. required: true lv: description: - The name of the logical volume. required: true size: description: - The size of the logical volume, according to lvcreate(8) --size, by default in megabytes or optionally with one of [bBsSkKmMgGtTpPeE] units; or according to lvcreate(8) --extents as a percentage of [VG|PVS|FREE]; Float values must begin with a digit. Resizing using percentage values was not supported prior to 2.1. state: choices: [ "present", "absent" ] default: present description: - Control if the logical volume exists. If C(present) and the volume does not already exist then the C(size) option is required. required: false active: version_added: "2.2" choices: [ "yes", "no" ] default: "yes" description: - Whether the volume is activate and visible to the host. required: false force: version_added: "1.5" choices: [ "yes", "no" ] default: "no" description: - Shrink or remove operations of volumes requires this switch. Ensures that that filesystems get never corrupted/destroyed by mistake. required: false opts: version_added: "2.0" description: - Free-form options to be passed to the lvcreate command snapshot: version_added: "2.1" description: - The name of the snapshot volume required: false pvs: version_added: "2.2" description: - Comma separated list of physical volumes e.g. /dev/sda,/dev/sdb required: false shrink: version_added: "2.2" description: - shrink if current size is higher than size requested required: false default: yes notes: - Filesystems on top of the volume are not resized. ''' EXAMPLES = ''' # Create a logical volume of 512m. - lvol: vg: firefly lv: test size: 512 # Create a logical volume of 512m with disks /dev/sda and /dev/sdb - lvol: vg: firefly lv: test size: 512 pvs: /dev/sda,/dev/sdb # Create cache pool logical volume - lvol: vg: firefly lv: lvcache size: 512m opts: --type cache-pool # Create a logical volume of 512g. - lvol: vg: firefly lv: test size: 512g # Create a logical volume the size of all remaining space in the volume group - lvol: vg: firefly lv: test size: 100%FREE # Create a logical volume with special options - lvol: vg: firefly lv: test size: 512g opts: -r 16 # Extend the logical volume to 1024m. - lvol: vg: firefly lv: test size: 1024 # Extend the logical volume to consume all remaining space in the volume group - lvol: vg: firefly lv: test size: +100%FREE # Extend the logical volume to take all remaining space of the PVs - lvol: vg: firefly lv: test size: 100%PVS # Resize the logical volume to % of VG - lvol: vg: firefly lv: test size: 80%VG force: yes # Reduce the logical volume to 512m - lvol: vg: firefly lv: test size: 512 force: yes # Set the logical volume to 512m and do not try to shrink if size is lower than current one - lvol: vg: firefly lv: test size: 512 shrink: no # Remove the logical volume. - lvol: vg: firefly lv: test state: absent force: yes # Create a snapshot volume of the test logical volume. - lvol: vg: firefly lv: test snapshot: snap1 size: 100m # Deactivate a logical volume - lvol: vg: firefly lv: test active: false # Create a deactivated logical volume - lvol: vg: firefly lv: test size: 512g active: false ''' import re decimal_point = re.compile(r"(\d+)") def mkversion(major, minor, patch): return (1000 * 1000 * int(major)) + (1000 * int(minor)) + int(patch) def parse_lvs(data): lvs = [] for line in data.splitlines(): parts = line.strip().split(';') lvs.append({ 'name': parts[0].replace('[','').replace(']',''), 'size': int(decimal_point.match(parts[1]).group(1)), 'active': (parts[2][4] == 'a') }) return lvs def parse_vgs(data): vgs = [] for line in data.splitlines(): parts = line.strip().split(';') vgs.append({ 'name': parts[0], 'size': int(decimal_point.match(parts[1]).group(1)), 'free': int(decimal_point.match(parts[2]).group(1)), 'ext_size': int(decimal_point.match(parts[3]).group(1)) }) return vgs def get_lvm_version(module): ver_cmd = module.get_bin_path("lvm", required=True) rc, out, err = module.run_command("%s version" % (ver_cmd)) if rc != 0: return None m = re.search("LVM version:\s+(\d+)\.(\d+)\.(\d+).*(\d{4}-\d{2}-\d{2})", out) if not m: return None return mkversion(m.group(1), m.group(2), m.group(3)) def main(): module = AnsibleModule( argument_spec=dict( vg=dict(required=True), lv=dict(required=True), size=dict(type='str'), opts=dict(type='str'), state=dict(choices=["absent", "present"], default='present'), force=dict(type='bool', default='no'), shrink=dict(type='bool', default='yes'), active=dict(type='bool', default='yes'), snapshot=dict(type='str', default=None), pvs=dict(type='str') ), supports_check_mode=True, ) # Determine if the "--yes" option should be used version_found = get_lvm_version(module) if version_found == None: module.fail_json(msg="Failed to get LVM version number") version_yesopt = mkversion(2, 2, 99) # First LVM with the "--yes" option if version_found >= version_yesopt: yesopt = "--yes" else: yesopt = "" vg = module.params['vg'] lv = module.params['lv'] size = module.params['size'] opts = module.params['opts'] state = module.params['state'] force = module.boolean(module.params['force']) shrink = module.boolean(module.params['shrink']) active = module.boolean(module.params['active']) size_opt = 'L' size_unit = 'm' snapshot = module.params['snapshot'] pvs = module.params['pvs'] if pvs is None: pvs = "" else: pvs = pvs.replace(",", " ") if opts is None: opts = "" # Add --test option when running in check-mode if module.check_mode: test_opt = ' --test' else: test_opt = '' if size: # LVCREATE(8) -l --extents option with percentage if '%' in size: size_parts = size.split('%', 1) size_percent = int(size_parts[0]) if size_percent > 100: module.fail_json(msg="Size percentage cannot be larger than 100%") size_whole = size_parts[1] if size_whole == 'ORIGIN': module.fail_json(msg="Snapshot Volumes are not supported") elif size_whole not in ['VG', 'PVS', 'FREE']: module.fail_json(msg="Specify extents as a percentage of VG|PVS|FREE") size_opt = 'l' size_unit = '' if not '%' in size: # LVCREATE(8) -L --size option unit if size[-1].lower() in 'bskmgtpe': size_unit = size[-1].lower() size = size[0:-1] try: float(size) if not size[0].isdigit(): raise ValueError() except ValueError: module.fail_json(msg="Bad size specification of '%s'" % size) # when no unit, megabytes by default if size_opt == 'l': unit = 'm' else: unit = size_unit # Get information on volume group requested vgs_cmd = module.get_bin_path("vgs", required=True) rc, current_vgs, err = module.run_command( "%s --noheadings -o vg_name,size,free,vg_extent_size --units %s --separator ';' %s" % (vgs_cmd, unit, vg)) if rc != 0: if state == 'absent': module.exit_json(changed=False, stdout="Volume group %s does not exist." % vg) else: module.fail_json(msg="Volume group %s does not exist." % vg, rc=rc, err=err) vgs = parse_vgs(current_vgs) this_vg = vgs[0] # Get information on logical volume requested lvs_cmd = module.get_bin_path("lvs", required=True) rc, current_lvs, err = module.run_command( "%s -a --noheadings --nosuffix -o lv_name,size,lv_attr --units %s --separator ';' %s" % (lvs_cmd, unit, vg)) if rc != 0: if state == 'absent': module.exit_json(changed=False, stdout="Volume group %s does not exist." % vg) else: module.fail_json(msg="Volume group %s does not exist." % vg, rc=rc, err=err) changed = False lvs = parse_lvs(current_lvs) if snapshot is None: check_lv = lv else: check_lv = snapshot for test_lv in lvs: if test_lv['name'] == check_lv: this_lv = test_lv break else: this_lv = None if state == 'present' and not size: if this_lv is None: module.fail_json(msg="No size given.") msg = '' if this_lv is None: if state == 'present': ### create LV lvcreate_cmd = module.get_bin_path("lvcreate", required=True) if snapshot is not None: cmd = "%s %s %s -%s %s%s -s -n %s %s %s/%s" % (lvcreate_cmd, test_opt, yesopt, size_opt, size, size_unit, snapshot, opts, vg, lv) else: cmd = "%s %s %s -n %s -%s %s%s %s %s %s" % (lvcreate_cmd, test_opt, yesopt, lv, size_opt, size, size_unit, opts, vg, pvs) rc, _, err = module.run_command(cmd) if rc == 0: changed = True else: module.fail_json(msg="Creating logical volume '%s' failed" % lv, rc=rc, err=err) else: if state == 'absent': ### remove LV if not force: module.fail_json(msg="Sorry, no removal of logical volume %s without force=yes." % (this_lv['name'])) lvremove_cmd = module.get_bin_path("lvremove", required=True) rc, _, err = module.run_command("%s %s --force %s/%s" % (lvremove_cmd, test_opt, vg, this_lv['name'])) if rc == 0: module.exit_json(changed=True) else: module.fail_json(msg="Failed to remove logical volume %s" % (lv), rc=rc, err=err) elif not size: pass elif size_opt == 'l': ### Resize LV based on % value tool = None size_free = this_vg['free'] if size_whole == 'VG' or size_whole == 'PVS': size_requested = size_percent * this_vg['size'] / 100 else: # size_whole == 'FREE': size_requested = size_percent * this_vg['free'] / 100 if '+' in size: size_requested += this_lv['size'] if this_lv['size'] < size_requested: if (size_free > 0) and (('+' not in size) or (size_free >= (size_requested - this_lv['size']))): tool = module.get_bin_path("lvextend", required=True) else: module.fail_json(msg="Logical Volume %s could not be extended. Not enough free space left (%s%s required / %s%s available)" % (this_lv['name'], (size_requested - this_lv['size']), unit, size_free, unit)) elif shrink and this_lv['size'] > size_requested + this_vg['ext_size']: # more than an extent too large if size_requested == 0: module.fail_json(msg="Sorry, no shrinking of %s to 0 permitted." % (this_lv['name'])) elif not force: module.fail_json(msg="Sorry, no shrinking of %s without force=yes" % (this_lv['name'])) else: tool = module.get_bin_path("lvreduce", required=True) tool = '%s %s' % (tool, '--force') if tool: cmd = "%s %s -%s %s%s %s/%s %s" % (tool, test_opt, size_opt, size, size_unit, vg, this_lv['name'], pvs) rc, out, err = module.run_command(cmd) if "Reached maximum COW size" in out: module.fail_json(msg="Unable to resize %s to %s%s" % (lv, size, size_unit), rc=rc, err=err, out=out) elif rc == 0: changed = True msg="Volume %s resized to %s%s" % (this_lv['name'], size_requested, unit) elif "matches existing size" in err: module.exit_json(changed=False, vg=vg, lv=this_lv['name'], size=this_lv['size']) elif "not larger than existing size" in err: module.exit_json(changed=False, vg=vg, lv=this_lv['name'], size=this_lv['size'], msg="Original size is larger than requested size", err=err) else: module.fail_json(msg="Unable to resize %s to %s%s" % (lv, size, size_unit), rc=rc, err=err) else: ### resize LV based on absolute values tool = None if int(size) > this_lv['size']: tool = module.get_bin_path("lvextend", required=True) elif shrink and int(size) < this_lv['size']: if int(size) == 0: module.fail_json(msg="Sorry, no shrinking of %s to 0 permitted." % (this_lv['name'])) if not force: module.fail_json(msg="Sorry, no shrinking of %s without force=yes." % (this_lv['name'])) else: tool = module.get_bin_path("lvreduce", required=True) tool = '%s %s' % (tool, '--force') if tool: cmd = "%s %s -%s %s%s %s/%s %s" % (tool, test_opt, size_opt, size, size_unit, vg, this_lv['name'], pvs) rc, out, err = module.run_command(cmd) if "Reached maximum COW size" in out: module.fail_json(msg="Unable to resize %s to %s%s" % (lv, size, size_unit), rc=rc, err=err, out=out) elif rc == 0: changed = True elif "matches existing size" in err: module.exit_json(changed=False, vg=vg, lv=this_lv['name'], size=this_lv['size']) elif "not larger than existing size" in err: module.exit_json(changed=False, vg=vg, lv=this_lv['name'], size=this_lv['size'], msg="Original size is larger than requested size", err=err) else: module.fail_json(msg="Unable to resize %s to %s%s" % (lv, size, size_unit), rc=rc, err=err) if this_lv is not None: if active: lvchange_cmd = module.get_bin_path("lvchange", required=True) rc, _, err = module.run_command("%s -ay %s/%s" % (lvchange_cmd, vg, this_lv['name'])) if rc == 0: module.exit_json(changed=((not this_lv['active']) or changed), vg=vg, lv=this_lv['name'], size=this_lv['size']) else: module.fail_json(msg="Failed to activate logical volume %s" % (lv), rc=rc, err=err) else: lvchange_cmd = module.get_bin_path("lvchange", required=True) rc, _, err = module.run_command("%s -an %s/%s" % (lvchange_cmd, vg, this_lv['name'])) if rc == 0: module.exit_json(changed=(this_lv['active'] or changed), vg=vg, lv=this_lv['name'], size=this_lv['size']) else: module.fail_json(msg="Failed to deactivate logical volume %s" % (lv), rc=rc, err=err) module.exit_json(changed=changed, msg=msg) # import module snippets from ansible.module_utils.basic import * if __name__ == '__main__': main()
gpl-3.0
pshriwise/dag_slicer
dagmc_slicer/dagmc_slice_tool.py
1
6991
import numpy as np from matplotlib.path import Path from matplotlib.patches import PathPatch import matplotlib.pyplot as plt import numpy as np from dagmc_slicer.Dag_Slicer import Dag_Slicer from matplotlib.widgets import CheckButtons, RadioButtons from matplotlib.colors import rgb2hex class dagmc_slicer(Dag_Slicer): #wrapper for the super init def __init__(self, filename = "", axis = 0, coordinate = 0, by_group = False): super(dagmc_slicer, self).__init__( filename, axis, coordinate, by_group ) self.shown = False self.color_seed = 56 def clear_slice(self): #clear old arrays so there isn't junk data in the way # self.slice_x_pnts = np.array([]) # self.slice_y_pnts = np.array([]) # self.path_coding = np.array([], dtype='int') # self.group_names = np.array([], dtype='str') # self.group_ids = np.array([],dtype='int') self.shown = False def create_slice(self): #clear out old info self.clear_slice() #clear old arrays so there isn't junk data in the way # self.slice_x_pnts = np.array([]) # self.slice_y_pnts = np.array([]) # self.path_coding = np.array([], dtype='int') # self.group_names = np.array([], dtype='str') if self.roam: print("\033[93m"+self.roam_warning+"\033[0m") #run the super function to create the slice if str(self.filename) is not "": a = super(dagmc_slicer, self).create_slice() def rename_group(self, id, new_name): super(dagmc_slicer, self).rename_group(id, new_name) def write_file(self, new_filename): if new_filename == self.filename: continue_result = self.continue_query("Continuing will overwrite the current file. Continue?") if not continue_result: print("Ok. Doing nothing.") return super(dagmc_slicer, self).write_file(new_filename) def continue_query(self, question): reply = str(raw_input(question + ' (y/n) :')).lower().strip() if reply == 'y': return True if reply == 'n': return False else: print("Please reply with either y or n.") self.continue_query(question) def show_slice(self, colors=None): if 0 == len(self.slice_x_pnts): a = self.create_slice() #now setup the plot object all_paths = [] for i in range(len(self.slice_x_pnts)): new_list = [ np.transpose(np.vstack((self.slice_x_pnts[i],self.slice_y_pnts[i]))), self.path_coding[i]] all_paths.append(new_list) if colors == None: colors = [] np.random.seed(self.color_seed) for i in range(len(all_paths)): colors.append(np.random.rand(3,).tolist()) elif len(colors) != len(all_paths): raise ValueError("{} colors are required, {} colors have been specified".format( len(colors), len(all_paths))) #create the patches for this plot patches = [] for i, (coord, code) in enumerate(all_paths): path = Path(coord, code) patches.append(PathPatch(path, color=colors[i], ec='black', lw=1)) #create a new figure fig, ax = plt.subplots() self.figure = fig self.plt_ax = ax #add the patches to the plot for patch in patches: ax.add_patch(patch) self.color_map = {} self.legend_map = {} if 0 != len(self.group_names): labels = ["Group " + str(group_id) + ": " + group_name for group_id,group_name in zip(self.group_ids,self.group_names)] for gid,patch in zip(self.group_ids,patches): self.color_map[gid] = patch.get_facecolor() leg = ax.legend(patches, labels, prop={'size':14}, loc=2, bbox_to_anchor=(1.05,1.), borderaxespad=0.) #create mapping of artist to legend entry for legpatch, patch in zip(leg.get_patches(), patches): legpatch.set_picker(True) self.legend_map[legpatch] = patch #setup the check boxex cax = plt.axes([0.025, 0.5, 0.12, 0.12]) self.check = CheckButtons( cax, ('Visible','Filled'),(True,True) ) self.check.visible = False self.check.on_clicked(self.visiblefunc) #plot axis settings ax.autoscale_view() ax.set_aspect('equal') cid = self.figure.canvas.mpl_connect('pick_event', self.onpick) plt.show() self.shown = True def make_legend(self): legend_items = [] for leg_patch,gid,gname in zip(self.legend_map.keys(),self.group_ids,self.group_names): lb = widgets.Text(gname,description="Group " + str(gid)) bg_color = rgb2hex(self.color_map[gid][:-1]) cb = widgets.Box(background_color=bg_color,height=32,width=80) cb.margin = 10 lb.margin = 10 i = widgets.HBox(children=[cb,lb]) legend_items.append(i) self.new_filename = widgets.Text(description='Filename') exp_but = widgets.Button(description="Export Model") exp_but.on_click(self.write_file) leg = widgets.Box(children=legend_items) leg.children += (exp_but,self.new_filename,) #one more outer hozo box for the figure box = widgets.HBox(children=[leg]) display(box) def export(self): self.write_file(self.new_filename.value) def onpick(self,event): self.picked = event.artist #Reset all legend items to black then highlight current selection [a.set_edgecolor('black') for a in self.legend_map.keys()] event.artist.set_edgecolor('orange') #Get the patch item through the legend map and update the checkbox settings origpatch = self.legend_map[event.artist] [l.set_visible( origpatch.get_visible() ) for l in self.check.lines[0]] [l.set_visible( origpatch.get_fill() ) for l in self.check.lines[1]] #Redraw the plot self.figure.canvas.draw() def visiblefunc(self,label): #Check the current visibility/fill of the patch based #on the state of the check boxes vis = self.check.lines[0][0].get_visible() filled = self.check.lines[1][0].get_visible() #Reflect the changes to the patch in the legend item self.picked.set_alpha( 1.0 if vis else 0.6 ) self.picked.set_fill(filled) #Make changes to the original patch origpatch = self.legend_map[self.picked] origpatch.set_visible(vis) origpatch.set_fill(filled) #Redraw the plot self.figure.canvas.draw()
bsd-2-clause
shakamunyi/neutron-dvr
neutron/tests/unit/openvswitch/test_ovs_defaults.py
16
1470
# Copyright (c) 2012 OpenStack Foundation. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from oslo.config import cfg from neutron.plugins.openvswitch.common import config # noqa from neutron.tests import base class ConfigurationTest(base.BaseTestCase): def test_defaults(self): self.assertEqual('br-int', cfg.CONF.OVS.integration_bridge) self.assertFalse(cfg.CONF.OVS.enable_tunneling) self.assertEqual('br-tun', cfg.CONF.OVS.tunnel_bridge) self.assertEqual(2, cfg.CONF.AGENT.polling_interval) self.assertEqual('sudo', cfg.CONF.AGENT.root_helper) self.assertEqual('local', cfg.CONF.OVS.tenant_network_type) self.assertEqual(0, len(cfg.CONF.OVS.bridge_mappings)) self.assertEqual(0, len(cfg.CONF.OVS.network_vlan_ranges)) self.assertEqual(0, len(cfg.CONF.OVS.tunnel_id_ranges)) self.assertFalse(cfg.CONF.AGENT.l2_population) self.assertFalse(cfg.CONF.AGENT.arp_responder)
apache-2.0
CapOM/ChromiumGStreamerBackend
third_party/google_input_tools/third_party/closure_library/closure/bin/scopify.py
329
6785
#!/usr/bin/python # # Copyright 2010 The Closure Library Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS-IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Automatically converts codebases over to goog.scope. Usage: cd path/to/my/dir; ../../../../javascript/closure/bin/scopify.py Scans every file in this directory, recursively. Looks for existing goog.scope calls, and goog.require'd symbols. If it makes sense to generate a goog.scope call for the file, then we will do so, and try to auto-generate some aliases based on the goog.require'd symbols. Known Issues: When a file is goog.scope'd, the file contents will be indented +2. This may put some lines over 80 chars. These will need to be fixed manually. We will only try to create aliases for capitalized names. We do not check to see if those names will conflict with any existing locals. This creates merge conflicts for every line of every outstanding change. If you intend to run this on your codebase, make sure your team members know. Better yet, send them this script so that they can scopify their outstanding changes and "accept theirs". When an alias is "captured", it can no longer be stubbed out for testing. Run your tests. """ __author__ = '[email protected] (Nick Santos)' import os.path import re import sys REQUIRES_RE = re.compile(r"goog.require\('([^']*)'\)") # Edit this manually if you want something to "always" be aliased. # TODO(nicksantos): Add a flag for this. DEFAULT_ALIASES = {} def Transform(lines): """Converts the contents of a file into javascript that uses goog.scope. Arguments: lines: A list of strings, corresponding to each line of the file. Returns: A new list of strings, or None if the file was not modified. """ requires = [] # Do an initial scan to be sure that this file can be processed. for line in lines: # Skip this file if it has already been scopified. if line.find('goog.scope') != -1: return None # If there are any global vars or functions, then we also have # to skip the whole file. We might be able to deal with this # more elegantly. if line.find('var ') == 0 or line.find('function ') == 0: return None for match in REQUIRES_RE.finditer(line): requires.append(match.group(1)) if len(requires) == 0: return None # Backwards-sort the requires, so that when one is a substring of another, # we match the longer one first. for val in DEFAULT_ALIASES.values(): if requires.count(val) == 0: requires.append(val) requires.sort() requires.reverse() # Generate a map of requires to their aliases aliases_to_globals = DEFAULT_ALIASES.copy() for req in requires: index = req.rfind('.') if index == -1: alias = req else: alias = req[(index + 1):] # Don't scopify lowercase namespaces, because they may conflict with # local variables. if alias[0].isupper(): aliases_to_globals[alias] = req aliases_to_matchers = {} globals_to_aliases = {} for alias, symbol in aliases_to_globals.items(): globals_to_aliases[symbol] = alias aliases_to_matchers[alias] = re.compile('\\b%s\\b' % symbol) # Insert a goog.scope that aliases all required symbols. result = [] START = 0 SEEN_REQUIRES = 1 IN_SCOPE = 2 mode = START aliases_used = set() insertion_index = None num_blank_lines = 0 for line in lines: if mode == START: result.append(line) if re.search(REQUIRES_RE, line): mode = SEEN_REQUIRES elif mode == SEEN_REQUIRES: if (line and not re.search(REQUIRES_RE, line) and not line.isspace()): # There should be two blank lines before goog.scope result += ['\n'] * 2 result.append('goog.scope(function() {\n') insertion_index = len(result) result += ['\n'] * num_blank_lines mode = IN_SCOPE elif line.isspace(): # Keep track of the number of blank lines before each block of code so # that we can move them after the goog.scope line if necessary. num_blank_lines += 1 else: # Print the blank lines we saw before this code block result += ['\n'] * num_blank_lines num_blank_lines = 0 result.append(line) if mode == IN_SCOPE: for symbol in requires: if not symbol in globals_to_aliases: continue alias = globals_to_aliases[symbol] matcher = aliases_to_matchers[alias] for match in matcher.finditer(line): # Check to make sure we're not in a string. # We do this by being as conservative as possible: # if there are any quote or double quote characters # before the symbol on this line, then bail out. before_symbol = line[:match.start(0)] if before_symbol.count('"') > 0 or before_symbol.count("'") > 0: continue line = line.replace(match.group(0), alias) aliases_used.add(alias) if line.isspace(): # Truncate all-whitespace lines result.append('\n') else: result.append(line) if len(aliases_used): aliases_used = [alias for alias in aliases_used] aliases_used.sort() aliases_used.reverse() for alias in aliases_used: symbol = aliases_to_globals[alias] result.insert(insertion_index, 'var %s = %s;\n' % (alias, symbol)) result.append('}); // goog.scope\n') return result else: return None def TransformFileAt(path): """Converts a file into javascript that uses goog.scope. Arguments: path: A path to a file. """ f = open(path) lines = Transform(f.readlines()) if lines: f = open(path, 'w') for l in lines: f.write(l) f.close() if __name__ == '__main__': args = sys.argv[1:] if not len(args): args = '.' for file_name in args: if os.path.isdir(file_name): for root, dirs, files in os.walk(file_name): for name in files: if name.endswith('.js') and \ not os.path.islink(os.path.join(root, name)): TransformFileAt(os.path.join(root, name)) else: if file_name.endswith('.js') and \ not os.path.islink(file_name): TransformFileAt(file_name)
bsd-3-clause
t3wz/mtasa-blue
vendor/google-breakpad/src/tools/gyp/pylib/gyp/__init__.py
574
21473
#!/usr/bin/env python # Copyright (c) 2012 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import copy import gyp.input import optparse import os.path import re import shlex import sys import traceback from gyp.common import GypError # Default debug modes for GYP debug = {} # List of "official" debug modes, but you can use anything you like. DEBUG_GENERAL = 'general' DEBUG_VARIABLES = 'variables' DEBUG_INCLUDES = 'includes' def DebugOutput(mode, message, *args): if 'all' in gyp.debug or mode in gyp.debug: ctx = ('unknown', 0, 'unknown') try: f = traceback.extract_stack(limit=2) if f: ctx = f[0][:3] except: pass if args: message %= args print '%s:%s:%d:%s %s' % (mode.upper(), os.path.basename(ctx[0]), ctx[1], ctx[2], message) def FindBuildFiles(): extension = '.gyp' files = os.listdir(os.getcwd()) build_files = [] for file in files: if file.endswith(extension): build_files.append(file) return build_files def Load(build_files, format, default_variables={}, includes=[], depth='.', params=None, check=False, circular_check=True): """ Loads one or more specified build files. default_variables and includes will be copied before use. Returns the generator for the specified format and the data returned by loading the specified build files. """ if params is None: params = {} flavor = None if '-' in format: format, params['flavor'] = format.split('-', 1) default_variables = copy.copy(default_variables) # Default variables provided by this program and its modules should be # named WITH_CAPITAL_LETTERS to provide a distinct "best practice" namespace, # avoiding collisions with user and automatic variables. default_variables['GENERATOR'] = format # Format can be a custom python file, or by default the name of a module # within gyp.generator. if format.endswith('.py'): generator_name = os.path.splitext(format)[0] path, generator_name = os.path.split(generator_name) # Make sure the path to the custom generator is in sys.path # Don't worry about removing it once we are done. Keeping the path # to each generator that is used in sys.path is likely harmless and # arguably a good idea. path = os.path.abspath(path) if path not in sys.path: sys.path.insert(0, path) else: generator_name = 'gyp.generator.' + format # These parameters are passed in order (as opposed to by key) # because ActivePython cannot handle key parameters to __import__. generator = __import__(generator_name, globals(), locals(), generator_name) for (key, val) in generator.generator_default_variables.items(): default_variables.setdefault(key, val) # Give the generator the opportunity to set additional variables based on # the params it will receive in the output phase. if getattr(generator, 'CalculateVariables', None): generator.CalculateVariables(default_variables, params) # Give the generator the opportunity to set generator_input_info based on # the params it will receive in the output phase. if getattr(generator, 'CalculateGeneratorInputInfo', None): generator.CalculateGeneratorInputInfo(params) # Fetch the generator specific info that gets fed to input, we use getattr # so we can default things and the generators only have to provide what # they need. generator_input_info = { 'non_configuration_keys': getattr(generator, 'generator_additional_non_configuration_keys', []), 'path_sections': getattr(generator, 'generator_additional_path_sections', []), 'extra_sources_for_rules': getattr(generator, 'generator_extra_sources_for_rules', []), 'generator_supports_multiple_toolsets': getattr(generator, 'generator_supports_multiple_toolsets', False), 'generator_wants_static_library_dependencies_adjusted': getattr(generator, 'generator_wants_static_library_dependencies_adjusted', True), 'generator_wants_sorted_dependencies': getattr(generator, 'generator_wants_sorted_dependencies', False), 'generator_filelist_paths': getattr(generator, 'generator_filelist_paths', None), } # Process the input specific to this generator. result = gyp.input.Load(build_files, default_variables, includes[:], depth, generator_input_info, check, circular_check, params['parallel'], params['root_targets']) return [generator] + result def NameValueListToDict(name_value_list): """ Takes an array of strings of the form 'NAME=VALUE' and creates a dictionary of the pairs. If a string is simply NAME, then the value in the dictionary is set to True. If VALUE can be converted to an integer, it is. """ result = { } for item in name_value_list: tokens = item.split('=', 1) if len(tokens) == 2: # If we can make it an int, use that, otherwise, use the string. try: token_value = int(tokens[1]) except ValueError: token_value = tokens[1] # Set the variable to the supplied value. result[tokens[0]] = token_value else: # No value supplied, treat it as a boolean and set it. result[tokens[0]] = True return result def ShlexEnv(env_name): flags = os.environ.get(env_name, []) if flags: flags = shlex.split(flags) return flags def FormatOpt(opt, value): if opt.startswith('--'): return '%s=%s' % (opt, value) return opt + value def RegenerateAppendFlag(flag, values, predicate, env_name, options): """Regenerate a list of command line flags, for an option of action='append'. The |env_name|, if given, is checked in the environment and used to generate an initial list of options, then the options that were specified on the command line (given in |values|) are appended. This matches the handling of environment variables and command line flags where command line flags override the environment, while not requiring the environment to be set when the flags are used again. """ flags = [] if options.use_environment and env_name: for flag_value in ShlexEnv(env_name): value = FormatOpt(flag, predicate(flag_value)) if value in flags: flags.remove(value) flags.append(value) if values: for flag_value in values: flags.append(FormatOpt(flag, predicate(flag_value))) return flags def RegenerateFlags(options): """Given a parsed options object, and taking the environment variables into account, returns a list of flags that should regenerate an equivalent options object (even in the absence of the environment variables.) Any path options will be normalized relative to depth. The format flag is not included, as it is assumed the calling generator will set that as appropriate. """ def FixPath(path): path = gyp.common.FixIfRelativePath(path, options.depth) if not path: return os.path.curdir return path def Noop(value): return value # We always want to ignore the environment when regenerating, to avoid # duplicate or changed flags in the environment at the time of regeneration. flags = ['--ignore-environment'] for name, metadata in options._regeneration_metadata.iteritems(): opt = metadata['opt'] value = getattr(options, name) value_predicate = metadata['type'] == 'path' and FixPath or Noop action = metadata['action'] env_name = metadata['env_name'] if action == 'append': flags.extend(RegenerateAppendFlag(opt, value, value_predicate, env_name, options)) elif action in ('store', None): # None is a synonym for 'store'. if value: flags.append(FormatOpt(opt, value_predicate(value))) elif options.use_environment and env_name and os.environ.get(env_name): flags.append(FormatOpt(opt, value_predicate(os.environ.get(env_name)))) elif action in ('store_true', 'store_false'): if ((action == 'store_true' and value) or (action == 'store_false' and not value)): flags.append(opt) elif options.use_environment and env_name: print >>sys.stderr, ('Warning: environment regeneration unimplemented ' 'for %s flag %r env_name %r' % (action, opt, env_name)) else: print >>sys.stderr, ('Warning: regeneration unimplemented for action %r ' 'flag %r' % (action, opt)) return flags class RegeneratableOptionParser(optparse.OptionParser): def __init__(self): self.__regeneratable_options = {} optparse.OptionParser.__init__(self) def add_option(self, *args, **kw): """Add an option to the parser. This accepts the same arguments as OptionParser.add_option, plus the following: regenerate: can be set to False to prevent this option from being included in regeneration. env_name: name of environment variable that additional values for this option come from. type: adds type='path', to tell the regenerator that the values of this option need to be made relative to options.depth """ env_name = kw.pop('env_name', None) if 'dest' in kw and kw.pop('regenerate', True): dest = kw['dest'] # The path type is needed for regenerating, for optparse we can just treat # it as a string. type = kw.get('type') if type == 'path': kw['type'] = 'string' self.__regeneratable_options[dest] = { 'action': kw.get('action'), 'type': type, 'env_name': env_name, 'opt': args[0], } optparse.OptionParser.add_option(self, *args, **kw) def parse_args(self, *args): values, args = optparse.OptionParser.parse_args(self, *args) values._regeneration_metadata = self.__regeneratable_options return values, args def gyp_main(args): my_name = os.path.basename(sys.argv[0]) parser = RegeneratableOptionParser() usage = 'usage: %s [options ...] [build_file ...]' parser.set_usage(usage.replace('%s', '%prog')) parser.add_option('--build', dest='configs', action='append', help='configuration for build after project generation') parser.add_option('--check', dest='check', action='store_true', help='check format of gyp files') parser.add_option('--config-dir', dest='config_dir', action='store', env_name='GYP_CONFIG_DIR', default=None, help='The location for configuration files like ' 'include.gypi.') parser.add_option('-d', '--debug', dest='debug', metavar='DEBUGMODE', action='append', default=[], help='turn on a debugging ' 'mode for debugging GYP. Supported modes are "variables", ' '"includes" and "general" or "all" for all of them.') parser.add_option('-D', dest='defines', action='append', metavar='VAR=VAL', env_name='GYP_DEFINES', help='sets variable VAR to value VAL') parser.add_option('--depth', dest='depth', metavar='PATH', type='path', help='set DEPTH gyp variable to a relative path to PATH') parser.add_option('-f', '--format', dest='formats', action='append', env_name='GYP_GENERATORS', regenerate=False, help='output formats to generate') parser.add_option('-G', dest='generator_flags', action='append', default=[], metavar='FLAG=VAL', env_name='GYP_GENERATOR_FLAGS', help='sets generator flag FLAG to VAL') parser.add_option('--generator-output', dest='generator_output', action='store', default=None, metavar='DIR', type='path', env_name='GYP_GENERATOR_OUTPUT', help='puts generated build files under DIR') parser.add_option('--ignore-environment', dest='use_environment', action='store_false', default=True, regenerate=False, help='do not read options from environment variables') parser.add_option('-I', '--include', dest='includes', action='append', metavar='INCLUDE', type='path', help='files to include in all loaded .gyp files') # --no-circular-check disables the check for circular relationships between # .gyp files. These relationships should not exist, but they've only been # observed to be harmful with the Xcode generator. Chromium's .gyp files # currently have some circular relationships on non-Mac platforms, so this # option allows the strict behavior to be used on Macs and the lenient # behavior to be used elsewhere. # TODO(mark): Remove this option when http://crbug.com/35878 is fixed. parser.add_option('--no-circular-check', dest='circular_check', action='store_false', default=True, regenerate=False, help="don't check for circular relationships between files") parser.add_option('--no-parallel', action='store_true', default=False, help='Disable multiprocessing') parser.add_option('-S', '--suffix', dest='suffix', default='', help='suffix to add to generated files') parser.add_option('--toplevel-dir', dest='toplevel_dir', action='store', default=None, metavar='DIR', type='path', help='directory to use as the root of the source tree') parser.add_option('-R', '--root-target', dest='root_targets', action='append', metavar='TARGET', help='include only TARGET and its deep dependencies') options, build_files_arg = parser.parse_args(args) build_files = build_files_arg # Set up the configuration directory (defaults to ~/.gyp) if not options.config_dir: home = None home_dot_gyp = None if options.use_environment: home_dot_gyp = os.environ.get('GYP_CONFIG_DIR', None) if home_dot_gyp: home_dot_gyp = os.path.expanduser(home_dot_gyp) if not home_dot_gyp: home_vars = ['HOME'] if sys.platform in ('cygwin', 'win32'): home_vars.append('USERPROFILE') for home_var in home_vars: home = os.getenv(home_var) if home != None: home_dot_gyp = os.path.join(home, '.gyp') if not os.path.exists(home_dot_gyp): home_dot_gyp = None else: break else: home_dot_gyp = os.path.expanduser(options.config_dir) if home_dot_gyp and not os.path.exists(home_dot_gyp): home_dot_gyp = None if not options.formats: # If no format was given on the command line, then check the env variable. generate_formats = [] if options.use_environment: generate_formats = os.environ.get('GYP_GENERATORS', []) if generate_formats: generate_formats = re.split('[\s,]', generate_formats) if generate_formats: options.formats = generate_formats else: # Nothing in the variable, default based on platform. if sys.platform == 'darwin': options.formats = ['xcode'] elif sys.platform in ('win32', 'cygwin'): options.formats = ['msvs'] else: options.formats = ['make'] if not options.generator_output and options.use_environment: g_o = os.environ.get('GYP_GENERATOR_OUTPUT') if g_o: options.generator_output = g_o options.parallel = not options.no_parallel for mode in options.debug: gyp.debug[mode] = 1 # Do an extra check to avoid work when we're not debugging. if DEBUG_GENERAL in gyp.debug: DebugOutput(DEBUG_GENERAL, 'running with these options:') for option, value in sorted(options.__dict__.items()): if option[0] == '_': continue if isinstance(value, basestring): DebugOutput(DEBUG_GENERAL, " %s: '%s'", option, value) else: DebugOutput(DEBUG_GENERAL, " %s: %s", option, value) if not build_files: build_files = FindBuildFiles() if not build_files: raise GypError((usage + '\n\n%s: error: no build_file') % (my_name, my_name)) # TODO(mark): Chromium-specific hack! # For Chromium, the gyp "depth" variable should always be a relative path # to Chromium's top-level "src" directory. If no depth variable was set # on the command line, try to find a "src" directory by looking at the # absolute path to each build file's directory. The first "src" component # found will be treated as though it were the path used for --depth. if not options.depth: for build_file in build_files: build_file_dir = os.path.abspath(os.path.dirname(build_file)) build_file_dir_components = build_file_dir.split(os.path.sep) components_len = len(build_file_dir_components) for index in xrange(components_len - 1, -1, -1): if build_file_dir_components[index] == 'src': options.depth = os.path.sep.join(build_file_dir_components) break del build_file_dir_components[index] # If the inner loop found something, break without advancing to another # build file. if options.depth: break if not options.depth: raise GypError('Could not automatically locate src directory. This is' 'a temporary Chromium feature that will be removed. Use' '--depth as a workaround.') # If toplevel-dir is not set, we assume that depth is the root of our source # tree. if not options.toplevel_dir: options.toplevel_dir = options.depth # -D on the command line sets variable defaults - D isn't just for define, # it's for default. Perhaps there should be a way to force (-F?) a # variable's value so that it can't be overridden by anything else. cmdline_default_variables = {} defines = [] if options.use_environment: defines += ShlexEnv('GYP_DEFINES') if options.defines: defines += options.defines cmdline_default_variables = NameValueListToDict(defines) if DEBUG_GENERAL in gyp.debug: DebugOutput(DEBUG_GENERAL, "cmdline_default_variables: %s", cmdline_default_variables) # Set up includes. includes = [] # If ~/.gyp/include.gypi exists, it'll be forcibly included into every # .gyp file that's loaded, before anything else is included. if home_dot_gyp != None: default_include = os.path.join(home_dot_gyp, 'include.gypi') if os.path.exists(default_include): print 'Using overrides found in ' + default_include includes.append(default_include) # Command-line --include files come after the default include. if options.includes: includes.extend(options.includes) # Generator flags should be prefixed with the target generator since they # are global across all generator runs. gen_flags = [] if options.use_environment: gen_flags += ShlexEnv('GYP_GENERATOR_FLAGS') if options.generator_flags: gen_flags += options.generator_flags generator_flags = NameValueListToDict(gen_flags) if DEBUG_GENERAL in gyp.debug.keys(): DebugOutput(DEBUG_GENERAL, "generator_flags: %s", generator_flags) # Generate all requested formats (use a set in case we got one format request # twice) for format in set(options.formats): params = {'options': options, 'build_files': build_files, 'generator_flags': generator_flags, 'cwd': os.getcwd(), 'build_files_arg': build_files_arg, 'gyp_binary': sys.argv[0], 'home_dot_gyp': home_dot_gyp, 'parallel': options.parallel, 'root_targets': options.root_targets} # Start with the default variables from the command line. [generator, flat_list, targets, data] = Load(build_files, format, cmdline_default_variables, includes, options.depth, params, options.check, options.circular_check) # TODO(mark): Pass |data| for now because the generator needs a list of # build files that came in. In the future, maybe it should just accept # a list, and not the whole data dict. # NOTE: flat_list is the flattened dependency graph specifying the order # that targets may be built. Build systems that operate serially or that # need to have dependencies defined before dependents reference them should # generate targets in the order specified in flat_list. generator.GenerateOutput(flat_list, targets, data, params) if options.configs: valid_configs = targets[flat_list[0]]['configurations'].keys() for conf in options.configs: if conf not in valid_configs: raise GypError('Invalid config specified via --build: %s' % conf) generator.PerformBuild(data, options.configs, params) # Done return 0 def main(args): try: return gyp_main(args) except GypError, e: sys.stderr.write("gyp: %s\n" % e) return 1 # NOTE: setuptools generated console_scripts calls function with no arguments def script_main(): return main(sys.argv[1:]) if __name__ == '__main__': sys.exit(script_main())
gpl-3.0
mpkato/openliveq
openliveq/clickthrough.py
1
1360
from sqlalchemy import Column, Integer, String, Float, Index from .db import Base class Clickthrough(Base): __tablename__ = 'clickthroughs' __table_args__ = ( Index('clickthroughs_query_id_index', "query_id"), Index('clickthroughs_question_id_index', "question_id"), ) ORDERED_ATTRS = [ "query_id", "question_id", "rank", "ctr", "male", "female", "a00", "a10", "a20", "a30", "a40", "a50", "a60"] query_id = Column(String(8), primary_key=True) question_id = Column(String(12), primary_key=True) rank = Column(Integer) ctr = Column(Float) male = Column(Float) female = Column(Float) a00 = Column(Float) a10 = Column(Float) a20 = Column(Float) a30 = Column(Float) a40 = Column(Float) a50 = Column(Float) a60 = Column(Float) @classmethod def readline(cls, line): ls = [l.strip() for l in line.split("\t")] if len(ls) != 13: raise RuntimeError("Invalid format for %s: %s" % (cls.__name__, line)) args = {attr: ls[i] for i, attr in enumerate(cls.ORDERED_ATTRS)} result = Clickthrough(**args) # convertion result.rank = int(result.rank) for attr in cls.ORDERED_ATTRS[3:]: setattr(result, attr, float(getattr(result, attr))) return result
mit
fkfk/linux_gt-i9000-gb
tools/perf/scripts/python/failed-syscalls-by-pid.py
944
1869
# failed system call counts, by pid # (c) 2010, Tom Zanussi <[email protected]> # Licensed under the terms of the GNU GPL License version 2 # # Displays system-wide failed system call totals, broken down by pid. # If a [comm] arg is specified, only syscalls called by [comm] are displayed. import os import sys sys.path.append(os.environ['PERF_EXEC_PATH'] + \ '/scripts/python/Perf-Trace-Util/lib/Perf/Trace') from perf_trace_context import * from Core import * usage = "perf trace -s syscall-counts-by-pid.py [comm]\n"; for_comm = None if len(sys.argv) > 2: sys.exit(usage) if len(sys.argv) > 1: for_comm = sys.argv[1] syscalls = autodict() def trace_begin(): pass def trace_end(): print_error_totals() def raw_syscalls__sys_exit(event_name, context, common_cpu, common_secs, common_nsecs, common_pid, common_comm, id, ret): if for_comm is not None: if common_comm != for_comm: return if ret < 0: try: syscalls[common_comm][common_pid][id][ret] += 1 except TypeError: syscalls[common_comm][common_pid][id][ret] = 1 def print_error_totals(): if for_comm is not None: print "\nsyscall errors for %s:\n\n" % (for_comm), else: print "\nsyscall errors:\n\n", print "%-30s %10s\n" % ("comm [pid]", "count"), print "%-30s %10s\n" % ("------------------------------", \ "----------"), comm_keys = syscalls.keys() for comm in comm_keys: pid_keys = syscalls[comm].keys() for pid in pid_keys: print "\n%s [%d]\n" % (comm, pid), id_keys = syscalls[comm][pid].keys() for id in id_keys: print " syscall: %-16d\n" % (id), ret_keys = syscalls[comm][pid][id].keys() for ret, val in sorted(syscalls[comm][pid][id].iteritems(), key = lambda(k, v): (v, k), reverse = True): print " err = %-20d %10d\n" % (ret, val),
gpl-2.0
studybuffalo/rdrhc_calendar
run.py
1
1459
"""Downloads, extracts, and uploads schedules for AHS CZ pharmacists. Last Update: 2021-May-01 Copyright (c) Notices 2021 Joshua R. Torrance <[email protected]> This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. SHOULD YOU REQUIRE ANY EXCEPTIONS TO THIS LICENSE, PLEASE CONTACT THE COPYRIGHT HOLDERS. """ import logging import logging.config import pathlib import sentry_sdk from modules.config import assemble_app_configuration_details, LOGGING_DICT from modules.manager import run_program # Collect all the application configuration values APP_CONFIG = assemble_app_configuration_details( str(pathlib.Path(__file__).parent.absolute()) ) # Setup Sentry & Logging logging.config.dictConfig(LOGGING_DICT) LOG = logging.getLogger(__name__) sentry_sdk.init(APP_CONFIG['sentry_dsn']) run_program(APP_CONFIG)
gpl-3.0
Jgarcia-IAS/SAT
openerp/addons/stock_account/stock.py
21
14048
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.osv import fields, osv class stock_location_path(osv.osv): _inherit = "stock.location.path" _columns = { 'invoice_state': fields.selection([ ("invoiced", "Invoiced"), ("2binvoiced", "To Be Invoiced"), ("none", "Not Applicable")], "Invoice Status",), } _defaults = { 'invoice_state': '', } def _prepare_push_apply(self, cr, uid, rule, move, context=None): res = super(stock_location_path, self)._prepare_push_apply(cr, uid, rule, move, context=context) res['invoice_state'] = rule.invoice_state or 'none' return res #---------------------------------------------------------- # Procurement Rule #---------------------------------------------------------- class procurement_rule(osv.osv): _inherit = 'procurement.rule' _columns = { 'invoice_state': fields.selection([ ("invoiced", "Invoiced"), ("2binvoiced", "To Be Invoiced"), ("none", "Not Applicable")], "Invoice Status",), } _defaults = { 'invoice_state': '', } #---------------------------------------------------------- # Procurement Order #---------------------------------------------------------- class procurement_order(osv.osv): _inherit = "procurement.order" _columns = { 'invoice_state': fields.selection([("invoiced", "Invoiced"), ("2binvoiced", "To Be Invoiced"), ("none", "Not Applicable") ], "Invoice Control"), } def _run_move_create(self, cr, uid, procurement, context=None): res = super(procurement_order, self)._run_move_create(cr, uid, procurement, context=context) res.update({'invoice_state': procurement.rule_id.invoice_state or procurement.invoice_state or 'none'}) return res _defaults = { 'invoice_state': '' } #---------------------------------------------------------- # Move #---------------------------------------------------------- class stock_move(osv.osv): _inherit = "stock.move" _columns = { 'invoice_state': fields.selection([("invoiced", "Invoiced"), ("2binvoiced", "To Be Invoiced"), ("none", "Not Applicable")], "Invoice Control", select=True, required=True, track_visibility='onchange', states={'draft': [('readonly', False)]}), } _defaults = { 'invoice_state': lambda *args, **argv: 'none' } def _get_master_data(self, cr, uid, move, company, context=None): ''' returns a tuple (browse_record(res.partner), ID(res.users), ID(res.currency)''' currency = company.currency_id.id partner = move.picking_id and move.picking_id.partner_id if partner: code = self.get_code_from_locs(cr, uid, move, context=context) if partner.property_product_pricelist and code == 'outgoing': currency = partner.property_product_pricelist.currency_id.id return partner, uid, currency def _create_invoice_line_from_vals(self, cr, uid, move, invoice_line_vals, context=None): return self.pool.get('account.invoice.line').create(cr, uid, invoice_line_vals, context=context) def _get_price_unit_invoice(self, cr, uid, move_line, type, context=None): """ Gets price unit for invoice @param move_line: Stock move lines @param type: Type of invoice @return: The price unit for the move line """ if context is None: context = {} if type in ('in_invoice', 'in_refund'): return move_line.price_unit else: # If partner given, search price in its sale pricelist if move_line.partner_id and move_line.partner_id.property_product_pricelist: pricelist_obj = self.pool.get("product.pricelist") pricelist = move_line.partner_id.property_product_pricelist.id price = pricelist_obj.price_get(cr, uid, [pricelist], move_line.product_id.id, move_line.product_uom_qty, move_line.partner_id.id, { 'uom': move_line.product_uom.id, 'date': move_line.date, })[pricelist] if price: return price return move_line.product_id.list_price def _get_invoice_line_vals(self, cr, uid, move, partner, inv_type, context=None): fp_obj = self.pool.get('account.fiscal.position') # Get account_id if inv_type in ('out_invoice', 'out_refund'): account_id = move.product_id.property_account_income.id if not account_id: account_id = move.product_id.categ_id.property_account_income_categ.id else: account_id = move.product_id.property_account_expense.id if not account_id: account_id = move.product_id.categ_id.property_account_expense_categ.id fiscal_position = partner.property_account_position account_id = fp_obj.map_account(cr, uid, fiscal_position, account_id) # set UoS if it's a sale and the picking doesn't have one uos_id = move.product_uom.id quantity = move.product_uom_qty if move.product_uos: uos_id = move.product_uos.id quantity = move.product_uos_qty return { 'name': move.name, 'account_id': account_id, 'product_id': move.product_id.id, 'uos_id': uos_id, 'quantity': quantity, 'price_unit': self._get_price_unit_invoice(cr, uid, move, inv_type), 'discount': 0.0, 'account_analytic_id': False, } #---------------------------------------------------------- # Picking #---------------------------------------------------------- class stock_picking(osv.osv): _inherit = 'stock.picking' def __get_invoice_state(self, cr, uid, ids, name, arg, context=None): result = {} for pick in self.browse(cr, uid, ids, context=context): result[pick.id] = 'none' for move in pick.move_lines: if move.invoice_state == 'invoiced': result[pick.id] = 'invoiced' elif move.invoice_state == '2binvoiced': result[pick.id] = '2binvoiced' break return result def __get_picking_move(self, cr, uid, ids, context={}): res = [] for move in self.pool.get('stock.move').browse(cr, uid, ids, context=context): if move.picking_id: res.append(move.picking_id.id) return res def _set_inv_state(self, cr, uid, picking_id, name, value, arg, context=None): pick = self.browse(cr, uid, picking_id, context=context) moves = [x.id for x in pick.move_lines] move_obj= self.pool.get("stock.move") move_obj.write(cr, uid, moves, {'invoice_state': value}, context=context) _columns = { 'invoice_state': fields.function(__get_invoice_state, type='selection', selection=[ ("invoiced", "Invoiced"), ("2binvoiced", "To Be Invoiced"), ("none", "Not Applicable") ], string="Invoice Control", required=True, fnct_inv = _set_inv_state, store={ 'stock.picking': (lambda self, cr, uid, ids, c={}: ids, ['state'], 10), 'stock.move': (__get_picking_move, ['picking_id', 'invoice_state'], 10), }, ), } _defaults = { 'invoice_state': lambda *args, **argv: 'none' } def _create_invoice_from_picking(self, cr, uid, picking, vals, context=None): ''' This function simply creates the invoice from the given values. It is overriden in delivery module to add the delivery costs. ''' invoice_obj = self.pool.get('account.invoice') return invoice_obj.create(cr, uid, vals, context=context) def _get_partner_to_invoice(self, cr, uid, picking, context=None): """ Gets the partner that will be invoiced Note that this function is inherited in the sale and purchase modules @param picking: object of the picking for which we are selecting the partner to invoice @return: object of the partner to invoice """ return picking.partner_id and picking.partner_id.id def action_invoice_create(self, cr, uid, ids, journal_id, group=False, type='out_invoice', context=None): """ Creates invoice based on the invoice state selected for picking. @param journal_id: Id of journal @param group: Whether to create a group invoice or not @param type: Type invoice to be created @return: Ids of created invoices for the pickings """ context = context or {} todo = {} for picking in self.browse(cr, uid, ids, context=context): partner = self._get_partner_to_invoice(cr, uid, picking, context) #grouping is based on the invoiced partner if group: key = partner else: key = picking.id for move in picking.move_lines: if move.invoice_state == '2binvoiced': if (move.state != 'cancel') and not move.scrapped: todo.setdefault(key, []) todo[key].append(move) invoices = [] for moves in todo.values(): invoices += self._invoice_create_line(cr, uid, moves, journal_id, type, context=context) return invoices def _get_invoice_vals(self, cr, uid, key, inv_type, journal_id, move, context=None): if context is None: context = {} partner, currency_id, company_id, user_id = key if inv_type in ('out_invoice', 'out_refund'): account_id = partner.property_account_receivable.id payment_term = partner.property_payment_term.id or False else: account_id = partner.property_account_payable.id payment_term = partner.property_supplier_payment_term.id or False return { 'origin': move.picking_id.name, 'date_invoice': context.get('date_inv', False), 'user_id': user_id, 'partner_id': partner.id, 'account_id': account_id, 'payment_term': payment_term, 'type': inv_type, 'fiscal_position': partner.property_account_position.id, 'company_id': company_id, 'currency_id': currency_id, 'journal_id': journal_id, } def _invoice_create_line(self, cr, uid, moves, journal_id, inv_type='out_invoice', context=None): invoice_obj = self.pool.get('account.invoice') move_obj = self.pool.get('stock.move') invoices = {} for move in moves: company = move.company_id origin = move.picking_id.name partner, user_id, currency_id = move_obj._get_master_data(cr, uid, move, company, context=context) key = (partner, currency_id, company.id, user_id) invoice_vals = self._get_invoice_vals(cr, uid, key, inv_type, journal_id, move, context=context) if key not in invoices: # Get account and payment terms invoice_id = self._create_invoice_from_picking(cr, uid, move.picking_id, invoice_vals, context=context) invoices[key] = invoice_id else: invoice = invoice_obj.browse(cr, uid, invoices[key], context=context) if not invoice.origin or invoice_vals['origin'] not in invoice.origin.split(', '): invoice_origin = filter(None, [invoice.origin, invoice_vals['origin']]) invoice.write({'origin': ', '.join(invoice_origin)}) invoice_line_vals = move_obj._get_invoice_line_vals(cr, uid, move, partner, inv_type, context=context) invoice_line_vals['invoice_id'] = invoices[key] invoice_line_vals['origin'] = origin move_obj._create_invoice_line_from_vals(cr, uid, move, invoice_line_vals, context=context) move_obj.write(cr, uid, move.id, {'invoice_state': 'invoiced'}, context=context) invoice_obj.button_compute(cr, uid, invoices.values(), context=context, set_total=(inv_type in ('in_invoice', 'in_refund'))) return invoices.values() def _prepare_values_extra_move(self, cr, uid, op, product, remaining_qty, context=None): """ Need to pass invoice_state of picking when an extra move is created which is not a copy of a previous """ res = super(stock_picking, self)._prepare_values_extra_move(cr, uid, op, product, remaining_qty, context=context) res.update({'invoice_state': op.picking_id.invoice_state}) if op.linked_move_operation_ids: res.update({'price_unit': op.linked_move_operation_ids[-1].move_id.price_unit}) return res
agpl-3.0
luogangyi/bcec-nova
nova/virt/xenapi/image/utils.py
16
3394
# Copyright 2013 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import shutil import tarfile from nova.image import glance class GlanceImage(object): def __init__(self, context, image_href_or_id): self._context = context self._image_service, self._image_id = glance.get_remote_image_service( context, image_href_or_id) self._cached_meta = None @property def meta(self): if self._cached_meta is None: self._cached_meta = self._image_service.show( self._context, self._image_id) return self._cached_meta def download_to(self, fileobj): return self._image_service.download( self._context, self._image_id, fileobj) def is_raw_tgz(self): return ['raw', 'tgz'] == [ self.meta.get(key) for key in ('disk_format', 'container_format')] def data(self): return self._image_service.download(self._context, self._image_id) class RawImage(object): def __init__(self, glance_image): self.glance_image = glance_image def get_size(self): return int(self.glance_image.meta['size']) def stream_to(self, fileobj): return self.glance_image.download_to(fileobj) class IterableToFileAdapter(object): """A degenerate file-like so that an iterable could be read like a file. As Glance client returns an iterable, but tarfile requires a file like, this is the adapter between the two. This allows tarfile to access the glance stream. """ def __init__(self, iterable): self.iterator = iterable.__iter__() self.remaining_data = '' def read(self, size): chunk = self.remaining_data try: while not chunk: chunk = self.iterator.next() except StopIteration: return '' return_value = chunk[0:size] self.remaining_data = chunk[size:] return return_value class RawTGZImage(object): def __init__(self, glance_image): self.glance_image = glance_image self._tar_info = None self._tar_file = None def _as_file(self): return IterableToFileAdapter(self.glance_image.data()) def _as_tarfile(self): return tarfile.open(mode='r|gz', fileobj=self._as_file()) def get_size(self): if self._tar_file is None: self._tar_file = self._as_tarfile() self._tar_info = self._tar_file.next() return self._tar_info.size def stream_to(self, target_file): if self._tar_file is None: self._tar_file = self._as_tarfile() self._tar_info = self._tar_file.next() source_file = self._tar_file.extractfile(self._tar_info) shutil.copyfileobj(source_file, target_file) self._tar_file.close()
apache-2.0
xfumihiro/powerline
powerline/lint/__init__.py
14
20654
# vim:fileencoding=utf-8:noet from __future__ import (unicode_literals, division, absolute_import, print_function) import os import logging from collections import defaultdict from itertools import chain from functools import partial from powerline import generate_config_finder, get_config_paths, load_config from powerline.segments.vim import vim_modes from powerline.lib.dict import mergedicts_copy from powerline.lib.config import ConfigLoader from powerline.lib.unicode import unicode from powerline.lib.path import join from powerline.lint.markedjson import load from powerline.lint.markedjson.error import echoerr, EchoErr, MarkedError from powerline.lint.checks import (check_matcher_func, check_ext, check_config, check_top_theme, check_color, check_translated_group_name, check_group, check_segment_module, check_exinclude_function, type_keys, check_segment_function, check_args, get_one_segment_function, check_highlight_groups, check_highlight_group, check_full_segment_data, get_all_possible_functions, check_segment_data_key, register_common_name, highlight_group_spec) from powerline.lint.spec import Spec from powerline.lint.context import Context def open_file(path): return open(path, 'rb') def generate_json_config_loader(lhadproblem): def load_json_config(config_file_path, load=load, open_file=open_file): with open_file(config_file_path) as config_file_fp: r, hadproblem = load(config_file_fp) if hadproblem: lhadproblem[0] = True return r return load_json_config function_name_re = '^(\w+\.)*[a-zA-Z_]\w*$' divider_spec = Spec().printable().len( 'le', 3, (lambda value: 'Divider {0!r} is too large!'.format(value))).copy ext_theme_spec = Spec().type(unicode).func(lambda *args: check_config('themes', *args)).copy top_theme_spec = Spec().type(unicode).func(check_top_theme).copy ext_spec = Spec( colorscheme=Spec().type(unicode).func( (lambda *args: check_config('colorschemes', *args)) ), theme=ext_theme_spec(), top_theme=top_theme_spec().optional(), ).copy gen_components_spec = (lambda *components: Spec().list(Spec().type(unicode).oneof(set(components)))) main_spec = (Spec( common=Spec( default_top_theme=top_theme_spec().optional(), term_truecolor=Spec().type(bool).optional(), term_escape_style=Spec().type(unicode).oneof(set(('auto', 'xterm', 'fbterm'))).optional(), # Python is capable of loading from zip archives. Thus checking path # only for existence of the path, not for it being a directory paths=Spec().list( (lambda value, *args: (True, True, not os.path.exists(os.path.expanduser(value.value)))), (lambda value: 'path does not exist: {0}'.format(value)) ).optional(), log_file=Spec().type(unicode).func( ( lambda value, *args: ( True, True, not os.path.isdir(os.path.dirname(os.path.expanduser(value))) ) ), (lambda value: 'directory does not exist: {0}'.format(os.path.dirname(value))) ).optional(), log_level=Spec().re('^[A-Z]+$').func( (lambda value, *args: (True, True, not hasattr(logging, value))), (lambda value: 'unknown debugging level {0}'.format(value)) ).optional(), log_format=Spec().type(unicode).optional(), interval=Spec().either(Spec().cmp('gt', 0.0), Spec().type(type(None))).optional(), reload_config=Spec().type(bool).optional(), watcher=Spec().type(unicode).oneof(set(('auto', 'inotify', 'stat'))).optional(), ).context_message('Error while loading common configuration (key {key})'), ext=Spec( vim=ext_spec().update( components=gen_components_spec('statusline', 'tabline').optional(), local_themes=Spec( __tabline__=ext_theme_spec(), ).unknown_spec( Spec().re(function_name_re).func(partial(check_matcher_func, 'vim')), ext_theme_spec() ), ).optional(), ipython=ext_spec().update( local_themes=Spec( in2=ext_theme_spec(), out=ext_theme_spec(), rewrite=ext_theme_spec(), ), ).optional(), shell=ext_spec().update( components=gen_components_spec('tmux', 'prompt').optional(), local_themes=Spec( continuation=ext_theme_spec(), select=ext_theme_spec(), ), ).optional(), ).unknown_spec( check_ext, ext_spec(), ).context_message('Error while loading extensions configuration (key {key})'), ).context_message('Error while loading main configuration')) term_color_spec = Spec().unsigned().cmp('le', 255).copy true_color_spec = Spec().re( '^[0-9a-fA-F]{6}$', (lambda value: '"{0}" is not a six-digit hexadecimal unsigned integer written as a string'.format(value)) ).copy colors_spec = (Spec( colors=Spec().unknown_spec( Spec().ident(), Spec().either( Spec().tuple(term_color_spec(), true_color_spec()), term_color_spec() ) ).context_message('Error while checking colors (key {key})'), gradients=Spec().unknown_spec( Spec().ident(), Spec().tuple( Spec().len('gt', 1).list(term_color_spec()), Spec().len('gt', 1).list(true_color_spec()).optional(), ) ).context_message('Error while checking gradients (key {key})'), ).context_message('Error while loading colors configuration')) color_spec = Spec().type(unicode).func(check_color).copy name_spec = Spec().type(unicode).len('gt', 0).optional().copy group_name_spec = Spec().ident().copy group_spec = Spec().either(Spec( fg=color_spec(), bg=color_spec(), attrs=Spec().list(Spec().type(unicode).oneof(set(('bold', 'italic', 'underline')))), ), group_name_spec().func(check_group)).copy groups_spec = Spec().unknown_spec( group_name_spec(), group_spec(), ).context_message('Error while loading groups (key {key})').copy colorscheme_spec = (Spec( name=name_spec(), groups=groups_spec(), ).context_message('Error while loading coloscheme')) mode_translations_value_spec = Spec( colors=Spec().unknown_spec( color_spec(), color_spec(), ).optional(), groups=Spec().unknown_spec( group_name_spec().func(check_translated_group_name), group_spec(), ).optional(), ).copy top_colorscheme_spec = (Spec( name=name_spec(), groups=groups_spec(), mode_translations=Spec().unknown_spec( Spec().type(unicode), mode_translations_value_spec(), ).optional().context_message('Error while loading mode translations (key {key})').optional(), ).context_message('Error while loading top-level coloscheme')) vim_mode_spec = Spec().oneof(set(list(vim_modes) + ['nc', 'tab_nc', 'buf_nc'])).copy vim_colorscheme_spec = (Spec( name=name_spec(), groups=groups_spec(), mode_translations=Spec().unknown_spec( vim_mode_spec(), mode_translations_value_spec(), ).optional().context_message('Error while loading mode translations (key {key})'), ).context_message('Error while loading vim colorscheme')) shell_mode_spec = Spec().re('^(?:[\w\-]+|\.safe)$').copy shell_colorscheme_spec = (Spec( name=name_spec(), groups=groups_spec(), mode_translations=Spec().unknown_spec( shell_mode_spec(), mode_translations_value_spec(), ).optional().context_message('Error while loading mode translations (key {key})'), ).context_message('Error while loading shell colorscheme')) args_spec = Spec( pl=Spec().error('pl object must be set by powerline').optional(), segment_info=Spec().error('Segment info dictionary must be set by powerline').optional(), ).unknown_spec(Spec(), Spec()).optional().copy segment_module_spec = Spec().type(unicode).func(check_segment_module).optional().copy exinclude_spec = Spec().re(function_name_re).func(check_exinclude_function).copy segment_spec_base = Spec( name=Spec().re('^[a-zA-Z_]\w*$').optional(), function=Spec().re(function_name_re).func(check_segment_function).optional(), exclude_modes=Spec().list(vim_mode_spec()).optional(), include_modes=Spec().list(vim_mode_spec()).optional(), exclude_function=exinclude_spec().optional(), include_function=exinclude_spec().optional(), draw_hard_divider=Spec().type(bool).optional(), draw_soft_divider=Spec().type(bool).optional(), draw_inner_divider=Spec().type(bool).optional(), display=Spec().type(bool).optional(), module=segment_module_spec(), priority=Spec().type(int, float, type(None)).optional(), after=Spec().printable().optional(), before=Spec().printable().optional(), width=Spec().either(Spec().unsigned(), Spec().cmp('eq', 'auto')).optional(), align=Spec().oneof(set('lr')).optional(), args=args_spec().func(lambda *args, **kwargs: check_args(get_one_segment_function, *args, **kwargs)), contents=Spec().printable().optional(), highlight_groups=Spec().list( highlight_group_spec().re( '^(?:(?!:divider$).)+$', (lambda value: 'it is recommended that only divider highlight group names end with ":divider"') ) ).func(check_highlight_groups).optional(), divider_highlight_group=highlight_group_spec().func(check_highlight_group).re( ':divider$', (lambda value: 'it is recommended that divider highlight group names end with ":divider"') ).optional(), ).func(check_full_segment_data).copy subsegment_spec = segment_spec_base().update( type=Spec().oneof(set((key for key in type_keys if key != 'segment_list'))).optional(), ) segment_spec = segment_spec_base().update( type=Spec().oneof(type_keys).optional(), segments=Spec().optional().list(subsegment_spec), ) segments_spec = Spec().optional().list(segment_spec).copy segdict_spec = Spec( left=segments_spec().context_message('Error while loading segments from left side (key {key})'), right=segments_spec().context_message('Error while loading segments from right side (key {key})'), ).func( (lambda value, *args: (True, True, not (('left' in value) or ('right' in value)))), (lambda value: 'segments dictionary must contain either left, right or both keys') ).context_message('Error while loading segments (key {key})').copy divside_spec = Spec( hard=divider_spec(), soft=divider_spec(), ).copy segment_data_value_spec = Spec( after=Spec().printable().optional(), before=Spec().printable().optional(), display=Spec().type(bool).optional(), args=args_spec().func(lambda *args, **kwargs: check_args(get_all_possible_functions, *args, **kwargs)), contents=Spec().printable().optional(), ).copy dividers_spec = Spec( left=divside_spec(), right=divside_spec(), ).copy spaces_spec = Spec().unsigned().cmp( 'le', 2, (lambda value: 'Are you sure you need such a big ({0}) number of spaces?'.format(value)) ).copy common_theme_spec = Spec( default_module=segment_module_spec().optional(), cursor_space=Spec().type(int, float).cmp('le', 100).cmp('gt', 0).optional(), cursor_columns=Spec().type(int).cmp('gt', 0).optional(), ).context_message('Error while loading theme').copy top_theme_spec = common_theme_spec().update( dividers=dividers_spec(), spaces=spaces_spec(), use_non_breaking_spaces=Spec().type(bool).optional(), segment_data=Spec().unknown_spec( Spec().func(check_segment_data_key), segment_data_value_spec(), ).optional().context_message('Error while loading segment data (key {key})'), ) main_theme_spec = common_theme_spec().update( dividers=dividers_spec().optional(), spaces=spaces_spec().optional(), segment_data=Spec().unknown_spec( Spec().func(check_segment_data_key), segment_data_value_spec(), ).optional().context_message('Error while loading segment data (key {key})'), ) theme_spec = common_theme_spec().update( dividers=dividers_spec().optional(), spaces=spaces_spec().optional(), segment_data=Spec().unknown_spec( Spec().func(check_segment_data_key), segment_data_value_spec(), ).optional().context_message('Error while loading segment data (key {key})'), segments=segdict_spec().update(above=Spec().list(segdict_spec()).optional()), ) def register_common_names(): register_common_name('player', 'powerline.segments.common.players', '_player') def load_json_file(path): with open_file(path) as F: try: config, hadproblem = load(F) except MarkedError as e: return True, None, str(e) else: return hadproblem, config, None def updated_with_config(d): hadproblem, config, error = load_json_file(d['path']) d.update( hadproblem=hadproblem, config=config, error=error, ) return d def find_all_ext_config_files(search_paths, subdir): for config_root in search_paths: top_config_subpath = join(config_root, subdir) if not os.path.isdir(top_config_subpath): if os.path.exists(top_config_subpath): yield { 'error': 'Path {0} is not a directory'.format(top_config_subpath), 'path': top_config_subpath, } continue for ext_name in os.listdir(top_config_subpath): ext_path = os.path.join(top_config_subpath, ext_name) if not os.path.isdir(ext_path): if ext_name.endswith('.json') and os.path.isfile(ext_path): yield updated_with_config({ 'error': False, 'path': ext_path, 'name': ext_name[:-5], 'ext': None, 'type': 'top_' + subdir, }) else: yield { 'error': 'Path {0} is not a directory or configuration file'.format(ext_path), 'path': ext_path, } continue for config_file_name in os.listdir(ext_path): config_file_path = os.path.join(ext_path, config_file_name) if config_file_name.endswith('.json') and os.path.isfile(config_file_path): yield updated_with_config({ 'error': False, 'path': config_file_path, 'name': config_file_name[:-5], 'ext': ext_name, 'type': subdir, }) else: yield { 'error': 'Path {0} is not a configuration file'.format(config_file_path), 'path': config_file_path, } def dict2(d): return defaultdict(dict, ((k, dict(v)) for k, v in d.items())) def check(paths=None, debug=False, echoerr=echoerr, require_ext=None): '''Check configuration sanity :param list paths: Paths from which configuration should be loaded. :param bool debug: Determines whether some information useful for debugging linter should be output. :param function echoerr: Function that will be used to echo the error(s). Should accept four optional keyword parameters: ``problem`` and ``problem_mark``, and ``context`` and ``context_mark``. :param str require_ext: Require configuration for some extension to be present. :return: ``False`` if user configuration seems to be completely sane and ``True`` if some problems were found. ''' hadproblem = False register_common_names() search_paths = paths or get_config_paths() find_config_files = generate_config_finder(lambda: search_paths) logger = logging.getLogger('powerline-lint') logger.setLevel(logging.DEBUG if debug else logging.ERROR) logger.addHandler(logging.StreamHandler()) ee = EchoErr(echoerr, logger) if require_ext: used_main_spec = main_spec.copy() try: used_main_spec['ext'][require_ext].required() except KeyError: used_main_spec['ext'][require_ext] = ext_spec() else: used_main_spec = main_spec lhadproblem = [False] load_json_config = generate_json_config_loader(lhadproblem) config_loader = ConfigLoader(run_once=True, load=load_json_config) lists = { 'colorschemes': set(), 'themes': set(), 'exts': set(), } found_dir = { 'themes': False, 'colorschemes': False, } config_paths = defaultdict(lambda: defaultdict(dict)) loaded_configs = defaultdict(lambda: defaultdict(dict)) for d in chain( find_all_ext_config_files(search_paths, 'colorschemes'), find_all_ext_config_files(search_paths, 'themes'), ): if d['error']: hadproblem = True ee(problem=d['error']) continue if d['hadproblem']: hadproblem = True if d['ext']: found_dir[d['type']] = True lists['exts'].add(d['ext']) if d['name'] == '__main__': pass elif d['name'].startswith('__') or d['name'].endswith('__'): hadproblem = True ee(problem='File name is not supposed to start or end with “__”: {0}'.format( d['path'])) else: lists[d['type']].add(d['name']) config_paths[d['type']][d['ext']][d['name']] = d['path'] loaded_configs[d['type']][d['ext']][d['name']] = d['config'] else: config_paths[d['type']][d['name']] = d['path'] loaded_configs[d['type']][d['name']] = d['config'] for typ in ('themes', 'colorschemes'): if not found_dir[typ]: hadproblem = True ee(problem='Subdirectory {0} was not found in paths {1}'.format(typ, ', '.join(search_paths))) diff = set(config_paths['colorschemes']) - set(config_paths['themes']) if diff: hadproblem = True for ext in diff: typ = 'colorschemes' if ext in config_paths['themes'] else 'themes' if not config_paths['top_' + typ] or typ == 'themes': ee(problem='{0} extension {1} not present in {2}'.format( ext, 'configuration' if ( ext in loaded_configs['themes'] and ext in loaded_configs['colorschemes'] ) else 'directory', typ, )) try: main_config = load_config('config', find_config_files, config_loader) except IOError: main_config = {} ee(problem='Configuration file not found: config.json') hadproblem = True except MarkedError as e: main_config = {} ee(problem=str(e)) hadproblem = True else: if used_main_spec.match( main_config, data={'configs': config_paths, 'lists': lists}, context=Context(main_config), echoerr=ee )[1]: hadproblem = True import_paths = [os.path.expanduser(path) for path in main_config.get('common', {}).get('paths', [])] try: colors_config = load_config('colors', find_config_files, config_loader) except IOError: colors_config = {} ee(problem='Configuration file not found: colors.json') hadproblem = True except MarkedError as e: colors_config = {} ee(problem=str(e)) hadproblem = True else: if colors_spec.match(colors_config, context=Context(colors_config), echoerr=ee)[1]: hadproblem = True if lhadproblem[0]: hadproblem = True top_colorscheme_configs = dict(loaded_configs['top_colorschemes']) data = { 'ext': None, 'top_colorscheme_configs': top_colorscheme_configs, 'ext_colorscheme_configs': {}, 'colors_config': colors_config } for colorscheme, config in loaded_configs['top_colorschemes'].items(): data['colorscheme'] = colorscheme if top_colorscheme_spec.match(config, context=Context(config), data=data, echoerr=ee)[1]: hadproblem = True ext_colorscheme_configs = dict2(loaded_configs['colorschemes']) for ext, econfigs in ext_colorscheme_configs.items(): data = { 'ext': ext, 'top_colorscheme_configs': top_colorscheme_configs, 'ext_colorscheme_configs': ext_colorscheme_configs, 'colors_config': colors_config, } for colorscheme, config in econfigs.items(): data['colorscheme'] = colorscheme if ext == 'vim': spec = vim_colorscheme_spec elif ext == 'shell': spec = shell_colorscheme_spec else: spec = colorscheme_spec if spec.match(config, context=Context(config), data=data, echoerr=ee)[1]: hadproblem = True colorscheme_configs = {} for ext in lists['exts']: colorscheme_configs[ext] = {} for colorscheme in lists['colorschemes']: econfigs = ext_colorscheme_configs[ext] ecconfigs = econfigs.get(colorscheme) mconfigs = ( top_colorscheme_configs.get(colorscheme), econfigs.get('__main__'), ecconfigs, ) if not (mconfigs[0] or mconfigs[2]): continue config = None for mconfig in mconfigs: if not mconfig: continue if config: config = mergedicts_copy(config, mconfig) else: config = mconfig colorscheme_configs[ext][colorscheme] = config theme_configs = dict2(loaded_configs['themes']) top_theme_configs = dict(loaded_configs['top_themes']) for ext, configs in theme_configs.items(): data = { 'ext': ext, 'colorscheme_configs': colorscheme_configs, 'import_paths': import_paths, 'main_config': main_config, 'top_themes': top_theme_configs, 'ext_theme_configs': configs, 'colors_config': colors_config } for theme, config in configs.items(): data['theme'] = theme if theme == '__main__': data['theme_type'] = 'main' spec = main_theme_spec else: data['theme_type'] = 'regular' spec = theme_spec if spec.match(config, context=Context(config), data=data, echoerr=ee)[1]: hadproblem = True for top_theme, config in top_theme_configs.items(): data = { 'ext': None, 'colorscheme_configs': colorscheme_configs, 'import_paths': import_paths, 'main_config': main_config, 'theme_configs': theme_configs, 'ext_theme_configs': None, 'colors_config': colors_config } data['theme_type'] = 'top' data['theme'] = top_theme if top_theme_spec.match(config, context=Context(config), data=data, echoerr=ee)[1]: hadproblem = True return hadproblem
mit
urbaniak/gunicorn
examples/websocket/gevent_websocket.py
29
15600
import collections import errno import re from hashlib import md5, sha1 import base64 from base64 import b64encode, b64decode import socket import struct import logging from socket import error as SocketError import gevent from gunicorn.workers.async import ALREADY_HANDLED logger = logging.getLogger(__name__) WS_KEY = "258EAFA5-E914-47DA-95CA-C5AB0DC85B11" class WebSocketWSGI(object): def __init__(self, handler): self.handler = handler def verify_client(self, ws): pass def _get_key_value(self, key_value): if not key_value: return key_number = int(re.sub("\\D", "", key_value)) spaces = re.subn(" ", "", key_value)[1] if key_number % spaces != 0: return part = key_number / spaces return part def __call__(self, environ, start_response): if not (environ.get('HTTP_CONNECTION').find('Upgrade') != -1 and environ['HTTP_UPGRADE'].lower() == 'websocket'): # need to check a few more things here for true compliance start_response('400 Bad Request', [('Connection','close')]) return [] sock = environ['gunicorn.socket'] version = environ.get('HTTP_SEC_WEBSOCKET_VERSION') ws = WebSocket(sock, environ, version) handshake_reply = ("HTTP/1.1 101 Switching Protocols\r\n" "Upgrade: websocket\r\n" "Connection: Upgrade\r\n") key = environ.get('HTTP_SEC_WEBSOCKET_KEY') if key: ws_key = base64.b64decode(key) if len(ws_key) != 16: start_response('400 Bad Request', [('Connection','close')]) return [] protocols = [] subprotocols = environ.get('HTTP_SEC_WEBSOCKET_PROTOCOL') ws_protocols = [] if subprotocols: for s in subprotocols.split(','): s = s.strip() if s in protocols: ws_protocols.append(s) if ws_protocols: handshake_reply += 'Sec-WebSocket-Protocol: %s\r\n' % ', '.join(ws_protocols) exts = [] extensions = environ.get('HTTP_SEC_WEBSOCKET_EXTENSIONS') ws_extensions = [] if extensions: for ext in extensions.split(','): ext = ext.strip() if ext in exts: ws_extensions.append(ext) if ws_extensions: handshake_reply += 'Sec-WebSocket-Extensions: %s\r\n' % ', '.join(ws_extensions) handshake_reply += ( "Sec-WebSocket-Origin: %s\r\n" "Sec-WebSocket-Location: ws://%s%s\r\n" "Sec-WebSocket-Version: %s\r\n" "Sec-WebSocket-Accept: %s\r\n\r\n" % ( environ.get('HTTP_ORIGIN'), environ.get('HTTP_HOST'), ws.path, version, base64.b64encode(sha1(key + WS_KEY).digest()) )) else: handshake_reply += ( "WebSocket-Origin: %s\r\n" "WebSocket-Location: ws://%s%s\r\n\r\n" % ( environ.get('HTTP_ORIGIN'), environ.get('HTTP_HOST'), ws.path)) sock.sendall(handshake_reply) try: self.handler(ws) except socket.error, e: if e[0] != errno.EPIPE: raise # use this undocumented feature of grainbows to ensure that it # doesn't barf on the fact that we didn't call start_response return ALREADY_HANDLED class WebSocket(object): """A websocket object that handles the details of serialization/deserialization to the socket. The primary way to interact with a :class:`WebSocket` object is to call :meth:`send` and :meth:`wait` in order to pass messages back and forth with the browser. Also available are the following properties: path The path value of the request. This is the same as the WSGI PATH_INFO variable, but more convenient. protocol The value of the Websocket-Protocol header. origin The value of the 'Origin' header. environ The full WSGI environment for this request. """ def __init__(self, sock, environ, version=76): """ :param socket: The eventlet socket :type socket: :class:`eventlet.greenio.GreenSocket` :param environ: The wsgi environment :param version: The WebSocket spec version to follow (default is 76) """ self.socket = sock self.origin = environ.get('HTTP_ORIGIN') self.protocol = environ.get('HTTP_WEBSOCKET_PROTOCOL') self.path = environ.get('PATH_INFO') self.environ = environ self.version = version self.websocket_closed = False self._buf = "" self._msgs = collections.deque() #self._sendlock = semaphore.Semaphore() @staticmethod def encode_hybi(buf, opcode, base64=False): """ Encode a HyBi style WebSocket frame. Optional opcode: 0x0 - continuation 0x1 - text frame (base64 encode buf) 0x2 - binary frame (use raw buf) 0x8 - connection close 0x9 - ping 0xA - pong """ if base64: buf = b64encode(buf) b1 = 0x80 | (opcode & 0x0f) # FIN + opcode payload_len = len(buf) if payload_len <= 125: header = struct.pack('>BB', b1, payload_len) elif payload_len > 125 and payload_len < 65536: header = struct.pack('>BBH', b1, 126, payload_len) elif payload_len >= 65536: header = struct.pack('>BBQ', b1, 127, payload_len) #print("Encoded: %s" % repr(header + buf)) return header + buf, len(header), 0 @staticmethod def decode_hybi(buf, base64=False): """ Decode HyBi style WebSocket packets. Returns: {'fin' : 0_or_1, 'opcode' : number, 'mask' : 32_bit_number, 'hlen' : header_bytes_number, 'length' : payload_bytes_number, 'payload' : decoded_buffer, 'left' : bytes_left_number, 'close_code' : number, 'close_reason' : string} """ f = {'fin' : 0, 'opcode' : 0, 'mask' : 0, 'hlen' : 2, 'length' : 0, 'payload' : None, 'left' : 0, 'close_code' : None, 'close_reason' : None} blen = len(buf) f['left'] = blen if blen < f['hlen']: return f # Incomplete frame header b1, b2 = struct.unpack_from(">BB", buf) f['opcode'] = b1 & 0x0f f['fin'] = (b1 & 0x80) >> 7 has_mask = (b2 & 0x80) >> 7 f['length'] = b2 & 0x7f if f['length'] == 126: f['hlen'] = 4 if blen < f['hlen']: return f # Incomplete frame header (f['length'],) = struct.unpack_from('>xxH', buf) elif f['length'] == 127: f['hlen'] = 10 if blen < f['hlen']: return f # Incomplete frame header (f['length'],) = struct.unpack_from('>xxQ', buf) full_len = f['hlen'] + has_mask * 4 + f['length'] if blen < full_len: # Incomplete frame return f # Incomplete frame header # Number of bytes that are part of the next frame(s) f['left'] = blen - full_len # Process 1 frame if has_mask: # unmask payload f['mask'] = buf[f['hlen']:f['hlen']+4] b = c = '' if f['length'] >= 4: data = struct.unpack('<I', buf[f['hlen']:f['hlen']+4])[0] of1 = f['hlen']+4 b = '' for i in xrange(0, int(f['length']/4)): mask = struct.unpack('<I', buf[of1+4*i:of1+4*(i+1)])[0] b += struct.pack('I', data ^ mask) if f['length'] % 4: l = f['length'] % 4 of1 = f['hlen'] of2 = full_len - l c = '' for i in range(0, l): mask = struct.unpack('B', buf[of1 + i])[0] data = struct.unpack('B', buf[of2 + i])[0] c += chr(data ^ mask) f['payload'] = b + c else: print("Unmasked frame: %s" % repr(buf)) f['payload'] = buf[(f['hlen'] + has_mask * 4):full_len] if base64 and f['opcode'] in [1, 2]: try: f['payload'] = b64decode(f['payload']) except: print("Exception while b64decoding buffer: %s" % repr(buf)) raise if f['opcode'] == 0x08: if f['length'] >= 2: f['close_code'] = struct.unpack_from(">H", f['payload']) if f['length'] > 3: f['close_reason'] = f['payload'][2:] return f @staticmethod def _pack_message(message): """Pack the message inside ``00`` and ``FF`` As per the dataframing section (5.3) for the websocket spec """ if isinstance(message, unicode): message = message.encode('utf-8') elif not isinstance(message, str): message = str(message) packed = "\x00%s\xFF" % message return packed def _parse_messages(self): """ Parses for messages in the buffer *buf*. It is assumed that the buffer contains the start character for a message, but that it may contain only part of the rest of the message. Returns an array of messages, and the buffer remainder that didn't contain any full messages.""" msgs = [] end_idx = 0 buf = self._buf while buf: if self.version in ['7', '8', '13']: frame = self.decode_hybi(buf, base64=False) #print("Received buf: %s, frame: %s" % (repr(buf), frame)) if frame['payload'] == None: break else: if frame['opcode'] == 0x8: # connection close self.websocket_closed = True break #elif frame['opcode'] == 0x1: else: msgs.append(frame['payload']); #msgs.append(frame['payload'].decode('utf-8', 'replace')); #buf = buf[-frame['left']:] if frame['left']: buf = buf[-frame['left']:] else: buf = '' else: frame_type = ord(buf[0]) if frame_type == 0: # Normal message. end_idx = buf.find("\xFF") if end_idx == -1: #pragma NO COVER break msgs.append(buf[1:end_idx].decode('utf-8', 'replace')) buf = buf[end_idx+1:] elif frame_type == 255: # Closing handshake. assert ord(buf[1]) == 0, "Unexpected closing handshake: %r" % buf self.websocket_closed = True break else: raise ValueError("Don't understand how to parse this type of message: %r" % buf) self._buf = buf return msgs def send(self, message): """Send a message to the browser. *message* should be convertable to a string; unicode objects should be encodable as utf-8. Raises socket.error with errno of 32 (broken pipe) if the socket has already been closed by the client.""" if self.version in ['7', '8', '13']: packed, lenhead, lentail = self.encode_hybi(message, opcode=0x01, base64=False) else: packed = self._pack_message(message) # if two greenthreads are trying to send at the same time # on the same socket, sendlock prevents interleaving and corruption #self._sendlock.acquire() try: self.socket.sendall(packed) finally: pass #self._sendlock.release() def wait(self): """Waits for and deserializes messages. Returns a single message; the oldest not yet processed. If the client has already closed the connection, returns None. This is different from normal socket behavior because the empty string is a valid websocket message.""" while not self._msgs: # Websocket might be closed already. if self.websocket_closed: return None # no parsed messages, must mean buf needs more data delta = self.socket.recv(8096) if delta == '': return None self._buf += delta msgs = self._parse_messages() self._msgs.extend(msgs) return self._msgs.popleft() def _send_closing_frame(self, ignore_send_errors=False): """Sends the closing frame to the client, if required.""" if self.version in ['7', '8', '13'] and not self.websocket_closed: msg = '' #if code != None: # msg = struct.pack(">H%ds" % (len(reason)), code) buf, h, t = self.encode_hybi(msg, opcode=0x08, base64=False) self.socket.sendall(buf) self.websocket_closed = True elif self.version == 76 and not self.websocket_closed: try: self.socket.sendall("\xff\x00") except SocketError: # Sometimes, like when the remote side cuts off the connection, # we don't care about this. if not ignore_send_errors: #pragma NO COVER raise self.websocket_closed = True def close(self): """Forcibly close the websocket; generally it is preferable to return from the handler method.""" self._send_closing_frame() self.socket.shutdown(True) self.socket.close() # demo app import os import random def handle(ws): """ This is the websocket handler function. Note that we can dispatch based on path in here, too.""" if ws.path == '/echo': while True: m = ws.wait() if m is None: break ws.send(m) elif ws.path == '/data': for i in xrange(10000): ws.send("0 %s %s\n" % (i, random.random())) gevent.sleep(0.1) wsapp = WebSocketWSGI(handle) def app(environ, start_response): """ This resolves to the web page or the websocket depending on the path.""" if environ['PATH_INFO'] == '/' or environ['PATH_INFO'] == "": data = open(os.path.join( os.path.dirname(__file__), 'websocket.html')).read() data = data % environ start_response('200 OK', [('Content-Type', 'text/html'), ('Content-Length', len(data))]) return [data] else: return wsapp(environ, start_response)
mit
stevecassidy/annotationrdf
docs/conf.py
1
8481
#!/usr/bin/env python # -*- coding: utf-8 -*- # # complexity documentation build configuration file, created by # sphinx-quickstart on Tue Jul 9 22:26:36 2013. # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys import os # If extensions (or modules to document with autodoc) are in another # directory, add these directories to sys.path here. If the directory is # relative to the documentation root, use os.path.abspath to make it # absolute, like shown here. #sys.path.insert(0, os.path.abspath('.')) # Get the project root dir, which is the parent dir of this cwd = os.getcwd() project_root = os.path.dirname(cwd) # Insert the project root dir as the first element in the PYTHONPATH. # This lets us ensure that the source package is imported, and that its # version is used. sys.path.insert(0, project_root) import annotationrdf # -- General configuration --------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode'] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'Annotation to RDF' copyright = u'2014, Steve Cassidy' # The version info for the project you're documenting, acts as replacement # for |version| and |release|, also used in various other places throughout # the built documents. # # The short X.Y version. version = annotationrdf.__version__ # The full version, including alpha/beta/rc tags. release = annotationrdf.__version__ # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to # some non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build'] # The reST default role (used for this markup: `text`) to use for all # documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built # documents. #keep_warnings = False # -- Options for HTML output ------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'default' # Theme options are theme-specific and customize the look and feel of a # theme further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. #html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # "<project> v<release> documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as # html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the # top of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon # of the docs. This file should be a Windows icon file (.ico) being # 16x16 or 32x32 pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) # here, relative to this directory. They are copied after the builtin # static files, so a file named "default.css" will overwrite the builtin # "default.css". html_static_path = ['_static'] # If not '', a 'Last updated on:' timestamp is inserted at every page # bottom, using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names # to template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. # Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. # Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages # will contain a <link> tag referring to it. The value of this option # must be the base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'annotationrdfdoc' # -- Options for LaTeX output ------------------------------------------ latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. #'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass # [howto/manual]). latex_documents = [ ('index', 'annotationrdf.tex', u'Annotation to RDF Documentation', u'Steve Cassidy', 'manual'), ] # The name of an image file (relative to this directory) to place at # the top of the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings # are parts, not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output ------------------------------------ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'annotationrdf', u'Annotation to RDF Documentation', [u'Steve Cassidy'], 1) ] # If true, show URL addresses after external links. #man_show_urls = False # -- Options for Texinfo output ---------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ('index', 'annotationrdf', u'Annotation to RDF Documentation', u'Steve Cassidy', 'annotationrdf', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. #texinfo_appendices = [] # If false, no module index is generated. #texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. #texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. #texinfo_no_detailmenu = False
bsd-3-clause
mgedmin/ansible-modules-core
cloud/amazon/ec2_eip.py
16
9966
#!/usr/bin/python # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. DOCUMENTATION = ''' --- module: ec2_eip short_description: associate an EC2 elastic IP with an instance. description: - This module associates AWS EC2 elastic IP addresses with instances version_added: "1.4" options: instance_id: description: - The EC2 instance id required: false public_ip: description: - The elastic IP address to associate with the instance. - If absent, allocate a new address required: false state: description: - If present, associate the IP with the instance. - If absent, disassociate the IP with the instance. required: false choices: ['present', 'absent'] default: present region: description: - the EC2 region to use required: false default: null aliases: [ ec2_region ] in_vpc: description: - allocate an EIP inside a VPC or not required: false default: false version_added: "1.4" reuse_existing_ip_allowed: description: - Reuse an EIP that is not associated to an instance (when available), instead of allocating a new one. required: false default: false version_added: "1.6" extends_documentation_fragment: aws author: "Lorin Hochstein (@lorin) <[email protected]>" notes: - This module will return C(public_ip) on success, which will contain the public IP address associated with the instance. - There may be a delay between the time the Elastic IP is assigned and when the cloud instance is reachable via the new address. Use wait_for and pause to delay further playbook execution until the instance is reachable, if necessary. ''' EXAMPLES = ''' - name: associate an elastic IP with an instance ec2_eip: instance_id=i-1212f003 ip=93.184.216.119 - name: disassociate an elastic IP from an instance ec2_eip: instance_id=i-1212f003 ip=93.184.216.119 state=absent - name: allocate a new elastic IP and associate it with an instance ec2_eip: instance_id=i-1212f003 - name: allocate a new elastic IP without associating it to anything action: ec2_eip register: eip - name: output the IP debug: msg="Allocated IP is {{ eip.public_ip }}" - name: another way of allocating an elastic IP without associating it to anything ec2_eip: state='present' - name: provision new instances with ec2 ec2: keypair=mykey instance_type=c1.medium image=emi-40603AD1 wait=yes''' ''' group=webserver count=3 register: ec2 - name: associate new elastic IPs with each of the instances ec2_eip: "instance_id={{ item }}" with_items: ec2.instance_ids - name: allocate a new elastic IP inside a VPC in us-west-2 ec2_eip: region=us-west-2 in_vpc=yes register: eip - name: output the IP debug: msg="Allocated IP inside a VPC is {{ eip.public_ip }}" ''' try: import boto.ec2 HAS_BOTO = True except ImportError: HAS_BOTO = False class EIPException(Exception): pass def associate_ip_and_instance(ec2, address, instance_id, check_mode): if address_is_associated_with_instance(ec2, address, instance_id): return {'changed': False} # If we're in check mode, nothing else to do if not check_mode: if address.domain == 'vpc': res = ec2.associate_address(instance_id, allocation_id=address.allocation_id) else: res = ec2.associate_address(instance_id, public_ip=address.public_ip) if not res: raise EIPException('association failed') return {'changed': True} def disassociate_ip_and_instance(ec2, address, instance_id, check_mode): if not address_is_associated_with_instance(ec2, address, instance_id): return {'changed': False} # If we're in check mode, nothing else to do if not check_mode: if address.domain == 'vpc': res = ec2.disassociate_address( association_id=address.association_id) else: res = ec2.disassociate_address(public_ip=address.public_ip) if not res: raise EIPException('disassociation failed') return {'changed': True} def _find_address_by_ip(ec2, public_ip): try: return ec2.get_all_addresses([public_ip])[0] except boto.exception.EC2ResponseError as e: if "Address '{}' not found.".format(public_ip) not in e.message: raise def _find_address_by_instance_id(ec2, instance_id): addresses = ec2.get_all_addresses(None, {'instance-id': instance_id}) if addresses: return addresses[0] def find_address(ec2, public_ip, instance_id): """ Find an existing Elastic IP address """ if public_ip: return _find_address_by_ip(ec2, public_ip) elif instance_id: return _find_address_by_instance_id(ec2, instance_id) def address_is_associated_with_instance(ec2, address, instance_id): """ Check if the elastic IP is currently associated with the instance """ if address: return address and address.instance_id == instance_id return False def allocate_address(ec2, domain, reuse_existing_ip_allowed): """ Allocate a new elastic IP address (when needed) and return it """ if reuse_existing_ip_allowed: domain_filter = {'domain': domain or 'standard'} all_addresses = ec2.get_all_addresses(filters=domain_filter) unassociated_addresses = [a for a in all_addresses if not a.instance_id] if unassociated_addresses: return unassociated_addresses[0] return ec2.allocate_address(domain=domain) def release_address(ec2, address, check_mode): """ Release a previously allocated elastic IP address """ # If we're in check mode, nothing else to do if not check_mode: if not address.release(): EIPException('release failed') return {'changed': True} def find_instance(ec2, instance_id): """ Attempt to find the EC2 instance and return it """ reservations = ec2.get_all_reservations(instance_ids=[instance_id]) if len(reservations) == 1: instances = reservations[0].instances if len(instances) == 1: return instances[0] raise EIPException("could not find instance" + instance_id) def ensure_present(ec2, domain, address, instance_id, reuse_existing_ip_allowed, check_mode): changed = False # Return the EIP object since we've been given a public IP if not address: if check_mode: return {'changed': True} address = allocate_address(ec2, domain, reuse_existing_ip_allowed) changed = True if instance_id: # Allocate an IP for instance since no public_ip was provided instance = find_instance(ec2, instance_id) if instance.vpc_id: domain = 'vpc' # Associate address object (provided or allocated) with instance assoc_result = associate_ip_and_instance(ec2, address, instance_id, check_mode) changed = changed or assoc_result['changed'] return {'changed': changed, 'public_ip': address.public_ip} def ensure_absent(ec2, domain, address, instance_id, check_mode): if not address: return {'changed': False} # disassociating address from instance if instance_id: return disassociate_ip_and_instance(ec2, address, instance_id, check_mode) # releasing address else: return release_address(ec2, address, check_mode) def main(): argument_spec = ec2_argument_spec() argument_spec.update(dict( instance_id=dict(required=False), public_ip=dict(required=False, aliases=['ip']), state=dict(required=False, default='present', choices=['present', 'absent']), in_vpc=dict(required=False, type='bool', default=False), reuse_existing_ip_allowed=dict(required=False, type='bool', default=False), wait_timeout=dict(default=300), )) module = AnsibleModule( argument_spec=argument_spec, supports_check_mode=True ) if not HAS_BOTO: module.fail_json(msg='boto required for this module') ec2 = ec2_connect(module) instance_id = module.params.get('instance_id') public_ip = module.params.get('public_ip') state = module.params.get('state') in_vpc = module.params.get('in_vpc') domain = 'vpc' if in_vpc else None reuse_existing_ip_allowed = module.params.get('reuse_existing_ip_allowed') try: address = find_address(ec2, public_ip, instance_id) if state == 'present': result = ensure_present(ec2, domain, address, instance_id, reuse_existing_ip_allowed, module.check_mode) else: result = ensure_absent(ec2, domain, address, instance_id, module.check_mode) except (boto.exception.EC2ResponseError, EIPException) as e: module.fail_json(msg=str(e)) module.exit_json(**result) # import module snippets from ansible.module_utils.basic import * # noqa from ansible.module_utils.ec2 import * # noqa if __name__ == '__main__': main()
gpl-3.0
TheoChevalier/bedrock
bedrock/press/tests.py
10
12312
# -*- coding: utf-8 -*- # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. import datetime from django.core import mail from django.test.client import RequestFactory from bedrock.base.urlresolvers import reverse from mock import Mock, patch from nose.tools import eq_, ok_ from bedrock.press import forms as press_forms, views as press_views from bedrock.press.forms import (PressInquiryForm, SpeakerRequestForm) from bedrock.mozorg.tests import TestCase class TestPressInquiry(TestCase): def setUp(self): self.factory = RequestFactory() self.view = press_views.PressInquiryView.as_view() with self.activate('en-US'): self.url = reverse('press.press-inquiry') self.data = { 'jobtitle': 'Senior Inquiry Person', 'name': 'IceCat FireBadger', 'user_email': '[email protected]', 'media_org': 'Big Money', 'inquiry': 'Want to know private stuff', 'deadline': datetime.date.today() + datetime.timedelta(days=1) } def tearDown(self): mail.outbox = [] def test_view_post_valid_data(self): """ A valid POST should 302 redirect. """ request = self.factory.post(self.url, self.data) # make sure CSRF doesn't hold us up request._dont_enforce_csrf_checks = True response = self.view(request) eq_(response.status_code, 302) eq_(response['Location'], '/en-US/press/press-inquiry/?success=True') def test_view_post_missing_data(self): """ POST with missing data should return 200 and contain form errors in the template. """ self.data.update(name='') # remove required name request = self.factory.post(self.url, self.data) # make sure CSRF doesn't hold us up request._dont_enforce_csrf_checks = True response = self.view(request) eq_(response.status_code, 200) self.assertIn('Please enter your name.', response.content) def test_view_post_honeypot(self): """ POST with honeypot text box filled should return 200 and contain general form error message. """ self.data['office_fax'] = 'spammer' request = self.factory.post(self.url, self.data) # make sure CSRF doesn't hold us up request._dont_enforce_csrf_checks = True response = self.view(request) eq_(response.status_code, 200) self.assertIn('An error has occurred', response.content) def test_form_valid_data(self): """ Form should be valid. """ form = PressInquiryForm(self.data) # make sure form is valid ok_(form.is_valid()) def test_form_missing_data(self): """ With incorrect data (missing email), form should not be valid and should have user_email in the errors hash. """ self.data.update(user_email='') # remove required user_email form = PressInquiryForm(self.data) # make sure form is invalid ok_(not form.is_valid()) # make sure user_email errors are in form self.assertIn('user_email', form.errors) def test_form_honeypot(self): """ Form with honeypot text box filled should not be valid. """ self.data['office_fax'] = 'spammer' form = PressInquiryForm(self.data) eq_(False, form.is_valid()) @patch('bedrock.press.views.render_to_string', return_value='rendered') @patch('bedrock.press.views.EmailMessage') def test_email(self, mock_email_message, mock_render_to_string): """ Make sure email is sent with expected values. """ mock_send = mock_email_message.return_value.send # create POST request request = self.factory.post(self.url, self.data) # make sure CSRF doesn't hold us up request._dont_enforce_csrf_checks = True # submit POST request self.view(request) # make sure email was sent mock_send.assert_called_once_with() # make sure email values are correct mock_email_message.assert_called_once_with( press_views.PRESS_INQUIRY_EMAIL_SUBJECT, 'rendered', press_views.PRESS_INQUIRY_EMAIL_FROM, press_views.PRESS_INQUIRY_EMAIL_TO) class TestSpeakerRequest(TestCase): def setUp(self): self.factory = RequestFactory() self.view = press_views.SpeakerRequestView.as_view() with self.activate('en-US'): self.url = reverse('press.speaker-request') self.data = { 'sr_event_name': 'Test Event', 'sr_event_url': 'www.mozilla.org', 'sr_event_date': datetime.date.today() + datetime.timedelta(days=1), 'sr_event_time': '12:00 PM', 'sr_contact_name': 'The Dude', 'sr_contact_email': '[email protected]', } def tearDown(self): mail.outbox = [] def test_view_post_valid_data(self): """ A valid POST should 302 redirect. """ request = self.factory.post(self.url, self.data) # make sure CSRF doesn't hold us up request._dont_enforce_csrf_checks = True response = self.view(request) eq_(response.status_code, 302) eq_(response['Location'], '/en-US/press/speakerrequest/?success=True') def test_view_post_missing_data(self): """ POST with missing data should return 200 and contain form errors in the template. """ self.data.update(sr_event_url='') # remove required url request = self.factory.post(self.url, self.data) # make sure CSRF doesn't hold us up request._dont_enforce_csrf_checks = True response = self.view(request) eq_(response.status_code, 200) self.assertIn('Please enter a URL', response.content) def test_view_post_honeypot(self): """ POST with honeypot text box filled should return 200 and contain general form error message. """ self.data['office_fax'] = 'spammer' request = self.factory.post(self.url, self.data) # make sure CSRF doesn't hold us up request._dont_enforce_csrf_checks = True response = self.view(request) eq_(response.status_code, 200) self.assertIn('An error has occurred', response.content) def test_form_valid_data(self): """ Form should be valid. """ form = SpeakerRequestForm(self.data) # make sure form is valid ok_(form.is_valid()) def test_form_missing_data(self): """ With incorrect data (missing url), form should not be valid and should have url in the errors hash. """ self.data.update(sr_event_url='') # remove required url form = SpeakerRequestForm(self.data) # make sure form is invalid ok_(not form.is_valid()) # make sure url errors are in form self.assertIn('sr_event_url', form.errors) def test_form_honeypot(self): """ Form with honeypot text box filled should not be valid. """ self.data['office_fax'] = 'spammer' form = SpeakerRequestForm(self.data) eq_(False, form.is_valid()) def test_form_valid_attachement(self): """ Form should be valid when attachment under/at size limit. """ # attachment within size limit mock_attachment = Mock( _size=press_forms.SPEAKER_REQUEST_FILE_SIZE_LIMIT) form = SpeakerRequestForm( self.data, { 'sr_attachment': mock_attachment}) # make sure form is valid ok_(form.is_valid()) def test_form_invalid_attachement(self): """ Form should be invalid and contain attachment errors when attachment over size limit. """ # attachment within size limit mock_attachment = Mock( _size=(press_forms.SPEAKER_REQUEST_FILE_SIZE_LIMIT + 1)) form = SpeakerRequestForm( self.data, { 'sr_attachment': mock_attachment}) # make sure form is not valid ok_(not form.is_valid()) # make sure attachment errors are in form self.assertIn('sr_attachment', form.errors) @patch('bedrock.press.views.render_to_string', return_value='rendered') @patch('bedrock.press.views.EmailMessage') def test_email(self, mock_email_message, mock_render_to_string): """ Make sure email is sent with expected values. """ mock_send = mock_email_message.return_value.send # create POST request request = self.factory.post(self.url, self.data) # make sure CSRF doesn't hold us up request._dont_enforce_csrf_checks = True # submit POST request self.view(request) # make sure email was sent mock_send.assert_called_once_with() # make sure email values are correct mock_email_message.assert_called_once_with( press_views.SPEAKER_REQUEST_EMAIL_SUBJECT, 'rendered', press_views.SPEAKER_REQUEST_EMAIL_FROM, press_views.SPEAKER_REQUEST_EMAIL_TO) @patch('bedrock.press.views.render_to_string', return_value='rendered') @patch('bedrock.press.views.EmailMessage') def test_email_with_attachement( self, mock_email_message, mock_render_to_string): """ Make sure email is sent with attachment. """ mock_attachment = Mock( content_type='text/plain', _size=(press_forms.SPEAKER_REQUEST_FILE_SIZE_LIMIT)) # make sure name attribute is treated as string mock_attachment.name = 'img.jpg' # create POST request request = self.factory.post(self.url, self.data) # make sure CSRF doesn't hold us up request._dont_enforce_csrf_checks = True # add mock attachment to files dict request.FILES['sr_attachment'] = mock_attachment # submit POST request self.view(request) # make sure attachment was attached mock_email_message.return_value.attach.assert_called_once_with( 'img.jpg', mock_attachment.read.return_value, 'text/plain') mock_attachment.read.assert_called_once_with() # make sure email was sent mock_email_message.return_value.send.assert_called_once_with() # make sure email values are correct mock_email_message.assert_called_once_with( press_views.SPEAKER_REQUEST_EMAIL_SUBJECT, 'rendered', press_views.SPEAKER_REQUEST_EMAIL_FROM, press_views.SPEAKER_REQUEST_EMAIL_TO) def test_emails_not_escaped(self): """ Strings in the fraud report form should not be HTML escaped when inserted into the email, which is just text. E.g. if they entered J'adore le ''Renard de feu'' the email should not contain J&#39;adore le &#39;&#39;Renard de feu&#39;&#39; Tags are still stripped, though. """ STRING1 = u"<blink>J'adore Citröns</blink> & <Piñatas> so there" EXPECTED1 = u"J'adore Citröns & so there" STRING2 = u"J'adore Piñatas & <fromage> so here" EXPECTED2 = u"J'adore Piñatas & so here" STRING3 = u"J'adore <coffee>el café</coffee> también" EXPECTED3 = u"J'adore el café también" self.data.update(sr_contact_title=STRING1, sr_event_theme=STRING2, sr_event_format=STRING3) request = self.factory.post(self.url, self.data) # make sure CSRF doesn't hold us up request._dont_enforce_csrf_checks = True self.view(request) eq_(len(mail.outbox), 1) m = mail.outbox[0] self.assertIn(EXPECTED1, m.body) self.assertIn(EXPECTED2, m.body) self.assertIn(EXPECTED3, m.body)
mpl-2.0
hoosteeno/kuma
vendor/packages/translate/convert/test_po2txt.py
26
2729
#!/usr/bin/env python # -*- coding: utf-8 -*- from translate.convert import po2txt, test_convert from translate.misc import wStringIO class TestPO2Txt: def po2txt(self, posource, txttemplate=None): """helper that converts po source to txt source without requiring files""" inputfile = wStringIO.StringIO(posource) print(inputfile.getvalue()) outputfile = wStringIO.StringIO() if txttemplate: templatefile = wStringIO.StringIO(txttemplate) else: templatefile = None assert po2txt.converttxt(inputfile, outputfile, templatefile) print(outputfile.getvalue()) return outputfile.getvalue() def test_basic(self): """test basic conversion""" txttemplate = "Heading\n\nBody text" posource = 'msgid "Heading"\nmsgstr "Opskrif"\n\nmsgid "Body text"\nmsgstr "Lyfteks"\n' assert self.po2txt(posource, txttemplate) == "Opskrif\n\nLyfteks" def test_nonascii(self): """test conversion with non-ascii text""" txttemplate = "Heading\n\nFile content" posource = 'msgid "Heading"\nmsgstr "Opskrif"\n\nmsgid "File content"\nmsgstr "Lêerinhoud"\n' assert self.po2txt(posource, txttemplate) == "Opskrif\n\nLêerinhoud" def test_blank_handling(self): """check that we discard blank messages""" txttemplate = "Heading\n\nBody text" posource = 'msgid "Heading"\nmsgstr "Opskrif"\n\nmsgid "Body text"\nmsgstr ""\n' assert self.po2txt(posource) == "Opskrif\n\nBody text" assert self.po2txt(posource, txttemplate) == "Opskrif\n\nBody text" def test_fuzzy_handling(self): """check that we handle fuzzy message correctly""" txttemplate = "Heading\n\nBody text" posource = '#, fuzzy\nmsgid "Heading"\nmsgstr "Opskrif"\n\nmsgid "Body text"\nmsgstr "Lyfteks"\n' assert self.po2txt(posource) == "Heading\n\nLyfteks" assert self.po2txt(posource, txttemplate) == "Heading\n\nLyfteks" class TestPO2TxtCommand(test_convert.TestConvertCommand, TestPO2Txt): """Tests running actual po2txt commands on files""" convertmodule = po2txt defaultoptions = {"progress": "none"} def test_help(self): """tests getting help""" options = test_convert.TestConvertCommand.test_help(self) options = self.help_check(options, "-t TEMPLATE, --template=TEMPLATE") options = self.help_check(options, "--threshold=PERCENT") options = self.help_check(options, "--fuzzy") options = self.help_check(options, "--nofuzzy") options = self.help_check(options, "--encoding") options = self.help_check(options, "-w WRAP, --wrap=WRAP", last=True)
mpl-2.0
dgarros/ansible
lib/ansible/modules/network/cloudengine/ce_info_center_debug.py
39
21824
#!/usr/bin/python # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # ANSIBLE_METADATA = {'status': ['preview'], 'supported_by': 'community', 'metadata_version': '1.0'} DOCUMENTATION = ''' --- module: ce_info_center_debug version_added: "2.4" short_description: Manages information center debug configuration on HUAWEI CloudEngine switches. description: - Manages information center debug configurations on HUAWEI CloudEngine switches. author: - wangdezhuang (@CloudEngine-Ansible) options: state: description: - Specify desired state of the resource. required: false default: present choices: ['present','absent'] debug_time_stamp: description: - Timestamp type of debugging information. required: false default: null choices: ['date_boot', 'date_second', 'date_tenthsecond', 'date_millisecond', 'shortdate_second', 'shortdate_tenthsecond', 'shortdate_millisecond', 'formatdate_second', 'formatdate_tenthsecond', 'formatdate_millisecond'] module_name: description: - Module name of the rule. The value is a string of 1 to 31 case-insensitive characters. The default value is default. Please use lower-case letter, such as [aaa, acl, arp, bfd]. required: false default: null channel_id: description: - Number of a channel. The value is an integer ranging from 0 to 9. The default value is 0. required: false default: null debug_enable: description: - Whether a device is enabled to output debugging information. required: false default: no_use choices: ['no_use','true','false'] debug_level: description: - Debug level permitted to output. required: false default: null choices: ['emergencies', 'alert', 'critical', 'error', 'warning', 'notification', 'informational', 'debugging'] ''' EXAMPLES = ''' - name: CloudEngine info center debug test hosts: cloudengine connection: local gather_facts: no vars: cli: host: "{{ inventory_hostname }}" port: "{{ ansible_ssh_port }}" username: "{{ username }}" password: "{{ password }}" transport: cli tasks: - name: "Config debug time stamp" ce_info_center_debug: state: present debug_time_stamp: date_boot provider: "{{ cli }}" - name: "Undo debug time stamp" ce_info_center_debug: state: absent debug_time_stamp: date_boot provider: "{{ cli }}" - name: "Config debug module log level" ce_info_center_debug: state: present module_name: aaa channel_id: 1 debug_enable: true debug_level: error provider: "{{ cli }}" - name: "Undo debug module log level" ce_info_center_debug: state: absent module_name: aaa channel_id: 1 debug_enable: true debug_level: error provider: "{{ cli }}" ''' RETURN = ''' changed: description: check to see if a change was made on the device returned: always type: boolean sample: true proposed: description: k/v pairs of parameters passed into module returned: always type: dict sample: {"state": "present", "debug_time_stamp": "date_boot"} existing: description: k/v pairs of existing aaa server returned: always type: dict sample: {"debugTimeStamp": "DATE_MILLISECOND"} end_state: description: k/v pairs of aaa params after module execution returned: always type: dict sample: {"debugTimeStamp": "DATE_BOOT"} updates: description: command sent to the device returned: always type: list sample: ["info-center timestamp debugging boot"] ''' from xml.etree import ElementTree from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.ce import get_nc_config, set_nc_config, ce_argument_spec # get info center debug global CE_GET_DEBUG_GLOBAL_HEADER = """ <filter type="subtree"> <syslog xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0"> <globalParam> """ CE_GET_DEBUG_GLOBAL_TAIL = """ </globalParam> </syslog> </filter> """ # merge info center debug global CE_MERGE_DEBUG_GLOBAL_HEADER = """ <config> <syslog xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0"> <globalParam operation="merge"> """ CE_MERGE_DEBUG_GLOBAL_TAIL = """ </globalParam> </syslog> </config> """ # get info center debug source CE_GET_DEBUG_SOURCE_HEADER = """ <filter type="subtree"> <syslog xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0"> <icSources> <icSource> """ CE_GET_DEBUG_SOURCE_TAIL = """ </icSource> </icSources> </syslog> </filter> """ # merge info center debug source CE_MERGE_DEBUG_SOURCE_HEADER = """ <config> <syslog xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0"> <icSources> <icSource operation="merge"> """ CE_MERGE_DEBUG_SOURCE_TAIL = """ </icSource> </icSources> </syslog> </config> """ # delete info center debug source CE_DELETE_DEBUG_SOURCE_HEADER = """ <config> <syslog xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0"> <icSources> <icSource operation="delete"> """ CE_DELETE_DEBUG_SOURCE_TAIL = """ </icSource> </icSources> </syslog> </config> """ TIME_STAMP_DICT = {"date_boot": "boot", "date_second": "date precision-time second", "date_tenthsecond": "date precision-time tenth-second", "date_millisecond": "date precision-time millisecond", "shortdate_second": "short-date precision-time second", "shortdate_tenthsecond": "short-date precision-time tenth-second", "shortdate_millisecond": "short-date precision-time millisecond", "formatdate_second": "format-date precision-time second", "formatdate_tenthsecond": "format-date precision-time tenth-second", "formatdate_millisecond": "format-date precision-time millisecond"} CHANNEL_DEFAULT_DBG_STATE = {"0": "true", "1": "true", "2": "false", "3": "false", "4": "false", "5": "false", "6": "false", "7": "false", "8": "false", "9": "false"} CHANNEL_DEFAULT_DBG_LEVEL = {"0": "debugging", "1": "debugging", "2": "debugging", "3": "debugging", "4": "debugging", "5": "debugging", "6": "debugging", "7": "debugging", "8": "debugging", "9": "debugging"} class InfoCenterDebug(object): """ Manages info center debug configuration """ def __init__(self, **kwargs): """ Init function """ # argument spec argument_spec = kwargs["argument_spec"] self.spec = argument_spec self.module = AnsibleModule(argument_spec=self.spec, supports_check_mode=True) # module args self.state = self.module.params['state'] self.debug_time_stamp = self.module.params['debug_time_stamp'] or None self.module_name = self.module.params['module_name'] or None self.channel_id = self.module.params['channel_id'] or None self.debug_enable = self.module.params['debug_enable'] self.debug_level = self.module.params['debug_level'] or None # cur config self.cur_global_cfg = dict() self.cur_source_cfg = dict() # state self.changed = False self.updates_cmd = list() self.results = dict() self.proposed = dict() self.existing = dict() self.end_state = dict() def check_global_args(self): """ Check global args """ need_cfg = False find_flag = False self.cur_global_cfg["global_cfg"] = [] if self.debug_time_stamp: conf_str = CE_GET_DEBUG_GLOBAL_HEADER conf_str += "<debugTimeStamp></debugTimeStamp>" conf_str += CE_GET_DEBUG_GLOBAL_TAIL xml_str = get_nc_config(self.module, conf_str) if "<data/>" in xml_str: find_flag = False else: xml_str = xml_str.replace('\r', '').replace('\n', '').\ replace('xmlns="urn:ietf:params:xml:ns:netconf:base:1.0"', "").\ replace('xmlns="http://www.huawei.com/netconf/vrp"', "") root = ElementTree.fromstring(xml_str) global_cfg = root.findall("data/syslog/globalParam") if global_cfg: for tmp in global_cfg: tmp_dict = dict() for site in tmp: if site.tag in ["debugTimeStamp"]: tmp_dict[site.tag] = site.text self.cur_global_cfg["global_cfg"].append(tmp_dict) if self.cur_global_cfg["global_cfg"]: for tmp in self.cur_global_cfg["global_cfg"]: find_flag = True if tmp.get("debugTimeStamp").lower() != self.debug_time_stamp: find_flag = False if find_flag: break else: find_flag = False if self.state == "present": need_cfg = bool(not find_flag) else: need_cfg = bool(find_flag) self.cur_global_cfg["need_cfg"] = need_cfg def check_source_args(self): """ Check source args """ need_cfg = False find_flag = False self.cur_source_cfg["source_cfg"] = [] if self.module_name: if len(self.module_name) < 1 or len(self.module_name) > 31: self.module.fail_json( msg='Error: The module_name is out of [1 - 31].') if not self.channel_id: self.module.fail_json( msg='Error: Please input channel_id at the same time.') if self.channel_id: if self.channel_id.isdigit(): if int(self.channel_id) < 0 or int(self.channel_id) > 9: self.module.fail_json( msg='Error: The value of channel_id is out of [0 - 9].') else: self.module.fail_json( msg='Error: The channel_id is not digit.') conf_str = CE_GET_DEBUG_SOURCE_HEADER if self.module_name != "default": conf_str += "<moduleName>%s</moduleName>" % self.module_name.upper() else: conf_str += "<moduleName>default</moduleName>" if self.channel_id: conf_str += "<icChannelId></icChannelId>" if self.debug_enable != 'no_use': conf_str += "<dbgEnFlg></dbgEnFlg>" if self.debug_level: conf_str += "<dbgEnLevel></dbgEnLevel>" conf_str += CE_GET_DEBUG_SOURCE_TAIL xml_str = get_nc_config(self.module, conf_str) if "<data/>" in xml_str: find_flag = False else: xml_str = xml_str.replace('\r', '').replace('\n', '').\ replace('xmlns="urn:ietf:params:xml:ns:netconf:base:1.0"', "").\ replace('xmlns="http://www.huawei.com/netconf/vrp"', "") root = ElementTree.fromstring(xml_str) source_cfg = root.findall("data/syslog/icSources/icSource") if source_cfg: for tmp in source_cfg: tmp_dict = dict() for site in tmp: if site.tag in ["moduleName", "icChannelId", "dbgEnFlg", "dbgEnLevel"]: tmp_dict[site.tag] = site.text self.cur_source_cfg["source_cfg"].append(tmp_dict) if self.cur_source_cfg["source_cfg"]: for tmp in self.cur_source_cfg["source_cfg"]: find_flag = True if self.module_name and tmp.get("moduleName").lower() != self.module_name.lower(): find_flag = False if self.channel_id and tmp.get("icChannelId") != self.channel_id: find_flag = False if self.debug_enable != 'no_use' and tmp.get("dbgEnFlg") != self.debug_enable: find_flag = False if self.debug_level and tmp.get("dbgEnLevel") != self.debug_level: find_flag = False if find_flag: break else: find_flag = False if self.state == "present": need_cfg = bool(not find_flag) else: need_cfg = bool(find_flag) self.cur_source_cfg["need_cfg"] = need_cfg def get_proposed(self): """ Get proposed """ self.proposed["state"] = self.state if self.debug_time_stamp: self.proposed["debug_time_stamp"] = self.debug_time_stamp if self.module_name: self.proposed["module_name"] = self.module_name if self.channel_id: self.proposed["channel_id"] = self.channel_id if self.debug_enable != 'no_use': self.proposed["debug_enable"] = self.debug_enable if self.debug_level: self.proposed["debug_level"] = self.debug_level def get_existing(self): """ Get existing """ if self.cur_global_cfg["global_cfg"]: self.existing["global_cfg"] = self.cur_global_cfg["global_cfg"] if self.cur_source_cfg["source_cfg"]: self.existing["source_cfg"] = self.cur_source_cfg["source_cfg"] def get_end_state(self): """ Get end state """ self.check_global_args() if self.cur_global_cfg["global_cfg"]: self.end_state["global_cfg"] = self.cur_global_cfg["global_cfg"] self.check_source_args() if self.cur_source_cfg["source_cfg"]: self.end_state["source_cfg"] = self.cur_source_cfg["source_cfg"] def merge_debug_global(self): """ Merge debug global """ conf_str = CE_MERGE_DEBUG_GLOBAL_HEADER if self.debug_time_stamp: conf_str += "<debugTimeStamp>%s</debugTimeStamp>" % self.debug_time_stamp.upper() conf_str += CE_MERGE_DEBUG_GLOBAL_TAIL recv_xml = set_nc_config(self.module, conf_str) if "<ok/>" not in recv_xml: self.module.fail_json(msg='Error: Merge debug global failed.') if self.debug_time_stamp: cmd = "info-center timestamp debugging " + TIME_STAMP_DICT.get(self.debug_time_stamp) self.updates_cmd.append(cmd) self.changed = True def delete_debug_global(self): """ Delete debug global """ conf_str = CE_MERGE_DEBUG_GLOBAL_HEADER if self.debug_time_stamp: conf_str += "<debugTimeStamp>DATE_MILLISECOND</debugTimeStamp>" conf_str += CE_MERGE_DEBUG_GLOBAL_TAIL recv_xml = set_nc_config(self.module, conf_str) if "<ok/>" not in recv_xml: self.module.fail_json(msg='Error: delete debug global failed.') if self.debug_time_stamp: cmd = "undo info-center timestamp debugging" self.updates_cmd.append(cmd) self.changed = True def merge_debug_source(self): """ Merge debug source """ conf_str = CE_MERGE_DEBUG_SOURCE_HEADER if self.module_name: conf_str += "<moduleName>%s</moduleName>" % self.module_name if self.channel_id: conf_str += "<icChannelId>%s</icChannelId>" % self.channel_id if self.debug_enable != 'no_use': conf_str += "<dbgEnFlg>%s</dbgEnFlg>" % self.debug_enable if self.debug_level: conf_str += "<dbgEnLevel>%s</dbgEnLevel>" % self.debug_level conf_str += CE_MERGE_DEBUG_SOURCE_TAIL recv_xml = set_nc_config(self.module, conf_str) if "<ok/>" not in recv_xml: self.module.fail_json(msg='Error: Merge debug source failed.') cmd = "info-center source" if self.module_name: cmd += " %s" % self.module_name if self.channel_id: cmd += " channel %s" % self.channel_id if self.debug_enable != 'no_use': if self.debug_enable == "true": cmd += " debug state on" else: cmd += " debug state off" if self.debug_level: cmd += " level %s" % self.debug_level self.updates_cmd.append(cmd) self.changed = True def delete_debug_source(self): """ Delete debug source """ if self.debug_enable == 'no_use' and not self.debug_level: conf_str = CE_DELETE_DEBUG_SOURCE_HEADER if self.module_name: conf_str += "<moduleName>%s</moduleName>" % self.module_name if self.channel_id: conf_str += "<icChannelId>%s</icChannelId>" % self.channel_id conf_str += CE_DELETE_DEBUG_SOURCE_TAIL else: conf_str = CE_MERGE_DEBUG_SOURCE_HEADER if self.module_name: conf_str += "<moduleName>%s</moduleName>" % self.module_name if self.channel_id: conf_str += "<icChannelId>%s</icChannelId>" % self.channel_id if self.debug_enable != 'no_use': conf_str += "<dbgEnFlg>%s</dbgEnFlg>" % CHANNEL_DEFAULT_DBG_STATE.get(self.channel_id) if self.debug_level: conf_str += "<dbgEnLevel>%s</dbgEnLevel>" % CHANNEL_DEFAULT_DBG_LEVEL.get(self.channel_id) conf_str += CE_MERGE_DEBUG_SOURCE_TAIL recv_xml = set_nc_config(self.module, conf_str) if "<ok/>" not in recv_xml: self.module.fail_json(msg='Error: Delete debug source failed.') cmd = "undo info-center source" if self.module_name: cmd += " %s" % self.module_name if self.channel_id: cmd += " channel %s" % self.channel_id if self.debug_enable != 'no_use': cmd += " debug state" if self.debug_level: cmd += " level" self.updates_cmd.append(cmd) self.changed = True def work(self): """ work function """ self.check_global_args() self.check_source_args() self.get_proposed() self.get_existing() if self.state == "present": if self.cur_global_cfg["need_cfg"]: self.merge_debug_global() if self.cur_source_cfg["need_cfg"]: self.merge_debug_source() else: if self.cur_global_cfg["need_cfg"]: self.delete_debug_global() if self.cur_source_cfg["need_cfg"]: self.delete_debug_source() self.get_end_state() self.results['changed'] = self.changed self.results['proposed'] = self.proposed self.results['existing'] = self.existing self.results['end_state'] = self.end_state self.results['updates'] = self.updates_cmd self.module.exit_json(**self.results) def main(): """ Module main """ argument_spec = dict( state=dict(choices=['present', 'absent'], default='present'), debug_time_stamp=dict(choices=['date_boot', 'date_second', 'date_tenthsecond', 'date_millisecond', 'shortdate_second', 'shortdate_tenthsecond', 'shortdate_millisecond', 'formatdate_second', 'formatdate_tenthsecond', 'formatdate_millisecond']), module_name=dict(type='str'), channel_id=dict(type='str'), debug_enable=dict(type='str', default='no_use', choices=['no_use', 'true', 'false']), debug_level=dict(choices=['emergencies', 'alert', 'critical', 'error', 'warning', 'notification', 'informational', 'debugging']) ) argument_spec.update(ce_argument_spec) module = InfoCenterDebug(argument_spec=argument_spec) module.work() if __name__ == '__main__': main()
gpl-3.0
vitmod/enigma2
lib/python/Components/ServiceScan.py
6
7598
from enigma import eComponentScan, iDVBFrontend from Components.NimManager import nimmanager as nimmgr from Components.Converter.ChannelNumbers import channelnumbers class ServiceScan: Idle = 1 Running = 2 Done = 3 Error = 4 Errors = { 0: _("error starting scanning"), 1: _("error while scanning"), 2: _("no resource manager"), 3: _("no channel list") } def scanStatusChanged(self): if self.state == self.Running: self.progressbar.setValue(self.scan.getProgress()) self.lcd_summary.updateProgress(self.scan.getProgress()) if self.scan.isDone(): errcode = self.scan.getError() if errcode == 0: self.state = self.Done self.servicelist.listAll() else: self.state = self.Error self.errorcode = errcode self.network.setText("") self.transponder.setText("") else: result = self.foundServices + self.scan.getNumServices() percentage = self.scan.getProgress() if percentage > 99: percentage = 99 #TRANSLATORS: The stb is performing a channel scan, progress percentage is printed in '%d' (and '%%' will show a single '%' symbol) message = ngettext("Scanning - %d%% completed", "Scanning - %d%% completed", percentage) % percentage message += ", " #TRANSLATORS: Intermediate scanning result, '%d' channel(s) have been found so far message += ngettext("%d channel found", "%d channels found", result) % result self.text.setText(message) transponder = self.scan.getCurrentTransponder() network = "" tp_text = "" if transponder: tp_type = transponder.getSystem() if tp_type == iDVBFrontend.feSatellite: network = _("Satellite") tp = transponder.getDVBS() orb_pos = tp.orbital_position try: sat_name = str(nimmgr.getSatDescription(orb_pos)) except KeyError: sat_name = "" if orb_pos > 1800: # west orb_pos = 3600 - orb_pos h = _("W") else: h = _("E") if ("%d.%d" % (orb_pos/10, orb_pos%10)) in sat_name: network = sat_name else: network = "%s %d.%d %s" % (sat_name, orb_pos / 10, orb_pos % 10, h) tp_text = { tp.System_DVB_S : "DVB-S", tp.System_DVB_S2 : "DVB-S2" }.get(tp.system, "") if tp_text == "DVB-S2": tp_text = "%s %s" % ( tp_text, { tp.Modulation_Auto : "Auto", tp.Modulation_QPSK : "QPSK", tp.Modulation_8PSK : "8PSK", tp.Modulation_QAM16 : "QAM16" }.get(tp.modulation, "")) tp_text = "%s %d%c / %d / %s" % ( tp_text, tp.frequency/1000, { tp.Polarisation_Horizontal : 'H', tp.Polarisation_Vertical : 'V', tp.Polarisation_CircularLeft : 'L', tp.Polarisation_CircularRight : 'R' }.get(tp.polarisation, ' '), tp.symbol_rate/1000, { tp.FEC_Auto : "AUTO", tp.FEC_1_2 : "1/2", tp.FEC_2_3 : "2/3", tp.FEC_3_4 : "3/4", tp.FEC_5_6 : "5/6", tp.FEC_7_8 : "7/8", tp.FEC_8_9 : "8/9", tp.FEC_3_5 : "3/5", tp.FEC_4_5 : "4/5", tp.FEC_9_10 : "9/10", tp.FEC_None : "NONE" }.get(tp.fec, "")) elif tp_type == iDVBFrontend.feCable: network = _("Cable") tp = transponder.getDVBC() tp_text = "DVB-C %s %d / %d / %s" %( { tp.Modulation_Auto : "AUTO", tp.Modulation_QAM16 : "QAM16", tp.Modulation_QAM32 : "QAM32", tp.Modulation_QAM64 : "QAM64", tp.Modulation_QAM128 : "QAM128", tp.Modulation_QAM256 : "QAM256" }.get(tp.modulation, ""), tp.frequency, tp.symbol_rate/1000, { tp.FEC_Auto : "AUTO", tp.FEC_1_2 : "1/2", tp.FEC_2_3 : "2/3", tp.FEC_3_4 : "3/4", tp.FEC_5_6 : "5/6", tp.FEC_7_8 : "7/8", tp.FEC_8_9 : "8/9", tp.FEC_3_5 : "3/5", tp.FEC_4_5 : "4/5", tp.FEC_9_10 : "9/10", tp.FEC_None : "NONE" }.get(tp.fec_inner, "")) elif tp_type == iDVBFrontend.feTerrestrial: network = _("Terrestrial") tp = transponder.getDVBT() channel = channelnumbers.getChannelNumber(tp.frequency, self.scanList[self.run]["feid"]) if channel: channel = _("CH") + "%s " % channel freqMHz = "%0.1f MHz" % (tp.frequency/1000000.) tp_text = "%s %s %s %s" %( { tp.System_DVB_T_T2 : "DVB-T/T2", tp.System_DVB_T : "DVB-T", tp.System_DVB_T2 : "DVB-T2" }.get(tp.system, ""), { tp.Modulation_QPSK : "QPSK", tp.Modulation_QAM16 : "QAM16", tp.Modulation_QAM64 : "QAM64", tp.Modulation_Auto : "AUTO", tp.Modulation_QAM256 : "QAM256" }.get(tp.modulation, ""), "%s%s" % (channel, freqMHz.replace(".0","")), { tp.Bandwidth_8MHz : "Bw 8MHz", tp.Bandwidth_7MHz : "Bw 7MHz", tp.Bandwidth_6MHz : "Bw 6MHz", tp.Bandwidth_Auto : "Bw Auto", tp.Bandwidth_5MHz : "Bw 5MHz", tp.Bandwidth_1_712MHz : "Bw 1.712MHz", tp.Bandwidth_10MHz : "Bw 10MHz" }.get(tp.bandwidth, "")) else: print "unknown transponder type in scanStatusChanged" self.network.setText(network) self.transponder.setText(tp_text) if self.state == self.Done: result = self.foundServices + self.scan.getNumServices() self.text.setText(ngettext("Scanning completed, %d channel found", "Scanning completed, %d channels found", result) % result) if self.state == self.Error: self.text.setText(_("ERROR - failed to scan (%s)!") % (self.Errors[self.errorcode]) ) if self.state == self.Done or self.state == self.Error: if self.run != len(self.scanList) - 1: self.foundServices += self.scan.getNumServices() self.execEnd() self.run += 1 self.execBegin() def __init__(self, progressbar, text, servicelist, passNumber, scanList, network, transponder, frontendInfo, lcd_summary): self.foundServices = 0 self.progressbar = progressbar self.text = text self.servicelist = servicelist self.passNumber = passNumber self.scanList = scanList self.frontendInfo = frontendInfo self.transponder = transponder self.network = network self.run = 0 self.lcd_summary = lcd_summary def doRun(self): self.scan = eComponentScan() self.frontendInfo.frontend_source = lambda : self.scan.getFrontend() self.feid = self.scanList[self.run]["feid"] self.flags = self.scanList[self.run]["flags"] self.networkid = 0 if self.scanList[self.run].has_key("networkid"): self.networkid = self.scanList[self.run]["networkid"] self.state = self.Idle self.scanStatusChanged() for x in self.scanList[self.run]["transponders"]: self.scan.addInitial(x) def updatePass(self): size = len(self.scanList) if size > 1: self.passNumber.setText(_("pass") + " " + str(self.run + 1) + "/" + str(size) + " (" + _("Tuner") + " " + str(self.scanList[self.run]["feid"]) + ")") def execBegin(self): self.doRun() self.updatePass() self.scan.statusChanged.get().append(self.scanStatusChanged) self.scan.newService.get().append(self.newService) self.servicelist.clear() self.state = self.Running err = self.scan.start(self.feid, self.flags, self.networkid) self.frontendInfo.updateFrontendData() if err: self.state = self.Error self.errorcode = 0 self.scanStatusChanged() def execEnd(self): self.scan.statusChanged.get().remove(self.scanStatusChanged) self.scan.newService.get().remove(self.newService) if not self.isDone(): print "*** warning *** scan was not finished!" del self.scan def isDone(self): return self.state == self.Done or self.state == self.Error def newService(self): newServiceName = self.scan.getLastServiceName() newServiceRef = self.scan.getLastServiceRef() self.servicelist.addItem((newServiceName, newServiceRef)) self.lcd_summary.updateService(newServiceName) def destroy(self): pass
gpl-2.0