language
stringclasses
6 values
original_string
stringlengths
25
887k
text
stringlengths
25
887k
Python
def test(self): """Test thread exit during breakpoint handling.""" self.build(dictionary=self.getBuildFlags()) exe = self.getBuildArtifact("a.out") self.runCmd("file " + exe, CURRENT_EXECUTABLE_SET) # This should create a breakpoint in the main thread. lldbutil.run_break_set_by_file_and_line( self, "main.cpp", self.breakpoint, num_expected_locations=1) # Run the program. self.runCmd("run", RUN_SUCCEEDED) # The stop reason of the thread should be breakpoint. self.expect("thread list", STOPPED_DUE_TO_BREAKPOINT, substrs=['stopped', 'stop reason = breakpoint']) # Get the target process target = self.dbg.GetSelectedTarget() process = target.GetProcess() # The exit probably occurred during breakpoint handling, but it isn't # guaranteed. The main thing we're testing here is that the debugger # handles this cleanly is some way. # Get the number of threads num_threads = process.GetNumThreads() # Make sure we see at least five threads self.assertTrue( num_threads >= 5, 'Number of expected threads and actual threads do not match.') # Run to completion self.runCmd("continue") # At this point, the inferior process should have exited. self.assertEqual( process.GetState(), lldb.eStateExited, PROCESS_EXITED)
def test(self): """Test thread exit during breakpoint handling.""" self.build(dictionary=self.getBuildFlags()) exe = self.getBuildArtifact("a.out") self.runCmd("file " + exe, CURRENT_EXECUTABLE_SET) # This should create a breakpoint in the main thread. lldbutil.run_break_set_by_file_and_line( self, "main.cpp", self.breakpoint, num_expected_locations=1) # Run the program. self.runCmd("run", RUN_SUCCEEDED) # The stop reason of the thread should be breakpoint. self.expect("thread list", STOPPED_DUE_TO_BREAKPOINT, substrs=['stopped', 'stop reason = breakpoint']) # Get the target process target = self.dbg.GetSelectedTarget() process = target.GetProcess() # The exit probably occurred during breakpoint handling, but it isn't # guaranteed. The main thing we're testing here is that the debugger # handles this cleanly is some way. # Get the number of threads num_threads = process.GetNumThreads() # Make sure we see at least five threads self.assertTrue( num_threads >= 5, 'Number of expected threads and actual threads do not match.') # Run to completion self.runCmd("continue") # At this point, the inferior process should have exited. self.assertEqual( process.GetState(), lldb.eStateExited, PROCESS_EXITED)
Python
def use_llvm_tool(self, name, search_env=None, required=False, quiet=False, search_paths=None, use_installed=False): """Find the executable program 'name', optionally using the specified environment variable as an override before searching the build directory and then optionally the configuration's PATH.""" # If the override is specified in the environment, use it without # validation. tool = None if search_env: tool = self.config.environment.get(search_env) if not tool: if search_paths is None: search_paths = [self.config.llvm_tools_dir] # Use the specified search paths. path = os.pathsep.join(search_paths) tool = lit.util.which(name, path) if not tool and use_installed: # Otherwise look in the path, if enabled. tool = lit.util.which(name, self.config.environment['PATH']) if required and not tool: message = "couldn't find '{}' program".format(name) if search_env: message = message + \ ', try setting {} in your environment'.format(search_env) self.lit_config.fatal(message) if tool: tool = os.path.normpath(tool) if not self.lit_config.quiet and not quiet: self.lit_config.note('using {}: {}'.format(name, tool)) return tool
def use_llvm_tool(self, name, search_env=None, required=False, quiet=False, search_paths=None, use_installed=False): """Find the executable program 'name', optionally using the specified environment variable as an override before searching the build directory and then optionally the configuration's PATH.""" # If the override is specified in the environment, use it without # validation. tool = None if search_env: tool = self.config.environment.get(search_env) if not tool: if search_paths is None: search_paths = [self.config.llvm_tools_dir] # Use the specified search paths. path = os.pathsep.join(search_paths) tool = lit.util.which(name, path) if not tool and use_installed: # Otherwise look in the path, if enabled. tool = lit.util.which(name, self.config.environment['PATH']) if required and not tool: message = "couldn't find '{}' program".format(name) if search_env: message = message + \ ', try setting {} in your environment'.format(search_env) self.lit_config.fatal(message) if tool: tool = os.path.normpath(tool) if not self.lit_config.quiet and not quiet: self.lit_config.note('using {}: {}'.format(name, tool)) return tool
Python
def use_clang(self, additional_tool_dirs=[], additional_flags=[], required=True, use_installed=False): """Configure the test suite to be able to invoke clang. Sets up some environment variables important to clang, locates a just-built or optionally an installed clang, and add a set of standard substitutions useful to any test suite that makes use of clang. """ # Clear some environment variables that might affect Clang. # # This first set of vars are read by Clang, but shouldn't affect tests # that aren't specifically looking for these features, or are required # simply to run the tests at all. # # FIXME: Should we have a tool that enforces this? # safe_env_vars = ( # 'TMPDIR', 'TEMP', 'TMP', 'USERPROFILE', 'PWD', # 'MACOSX_DEPLOYMENT_TARGET', 'IPHONEOS_DEPLOYMENT_TARGET', # 'VCINSTALLDIR', 'VC100COMNTOOLS', 'VC90COMNTOOLS', # 'VC80COMNTOOLS') possibly_dangerous_env_vars = [ 'COMPILER_PATH', 'RC_DEBUG_OPTIONS', 'CINDEXTEST_PREAMBLE_FILE', 'LIBRARY_PATH', 'CPATH', 'C_INCLUDE_PATH', 'CPLUS_INCLUDE_PATH', 'OBJC_INCLUDE_PATH', 'OBJCPLUS_INCLUDE_PATH', 'LIBCLANG_TIMING', 'LIBCLANG_OBJTRACKING', 'LIBCLANG_LOGGING', 'LIBCLANG_BGPRIO_INDEX', 'LIBCLANG_BGPRIO_EDIT', 'LIBCLANG_NOTHREADS', 'LIBCLANG_RESOURCE_USAGE', 'LIBCLANG_CODE_COMPLETION_LOGGING', ] # Clang/Win32 may refer to %INCLUDE%. vsvarsall.bat sets it. if platform.system() != 'Windows': possibly_dangerous_env_vars.append('INCLUDE') self.clear_environment(possibly_dangerous_env_vars) # Tweak the PATH to include the tools dir and the scripts dir. # Put Clang first to avoid LLVM from overriding out-of-tree clang # builds. exe_dir_props = [self.config.name.lower() + '_tools_dir', 'clang_tools_dir', 'llvm_tools_dir'] paths = [getattr(self.config, pp) for pp in exe_dir_props if getattr(self.config, pp, None)] paths = additional_tool_dirs + paths self.with_environment('PATH', paths, append_path=True) lib_dir_props = [ self.config.name.lower() + '_libs_dir', 'clang_libs_dir', 'llvm_shlib_dir', 'llvm_libs_dir', ] lib_paths = [getattr(self.config, pp) for pp in lib_dir_props if getattr(self.config, pp, None)] self.with_environment('LD_LIBRARY_PATH', lib_paths, append_path=True) shl = getattr(self.config, 'llvm_shlib_dir', None) pext = getattr(self.config, 'llvm_plugin_ext', None) if shl: self.config.substitutions.append(('%llvmshlibdir', shl)) if pext: self.config.substitutions.append(('%pluginext', pext)) # Discover the 'clang' and 'clangcc' to use. self.config.clang = self.use_llvm_tool( 'clang', search_env='CLANG', required=required, search_paths=paths, use_installed=use_installed) if self.config.clang: self.config.available_features.add('clang') builtin_include_dir = self.get_clang_builtin_include_dir( self.config.clang) tool_substitutions = [ ToolSubst('%clang', command=self.config.clang, extra_args=additional_flags), ToolSubst('%clang_analyze_cc1', command='%clang_cc1', extra_args=['-analyze', '%analyze', '-setup-static-analyzer']+additional_flags), ToolSubst('%clang_cc1', command=self.config.clang, extra_args=['-cc1', '-internal-isystem', builtin_include_dir, '-nostdsysteminc'] + additional_flags), ToolSubst('%clang_cpp', command=self.config.clang, extra_args=['--driver-mode=cpp']+additional_flags), ToolSubst('%clang_cl', command=self.config.clang, extra_args=['--driver-mode=cl']+additional_flags), ToolSubst('%clangxx', command=self.config.clang, extra_args=['--driver-mode=g++']+additional_flags), ] self.add_tool_substitutions(tool_substitutions) self.config.substitutions.append( ('%resource_dir', builtin_include_dir)) self.config.substitutions.append( ('%itanium_abi_triple', self.make_itanium_abi_triple(self.config.target_triple))) self.config.substitutions.append( ('%ms_abi_triple', self.make_msabi_triple(self.config.target_triple))) # The host triple might not be set, at least if we're compiling clang # from an already installed llvm. if (self.config.host_triple and self.config.host_triple != '@LLVM_HOST_TRIPLE@'): self.config.substitutions.append( ('%target_itanium_abi_host_triple', '--target=' + self.make_itanium_abi_triple( self.config.host_triple))) else: self.config.substitutions.append( ('%target_itanium_abi_host_triple', '')) # FIXME: Find nicer way to prohibit this. def prefer(this, to): return '''\"*** Do not use '%s' in tests, use '%s'. ***\"''' % ( to, this) self.config.substitutions.append( (' clang ', prefer('%clang', 'clang'))) self.config.substitutions.append( (r' clang\+\+ ', prefer('%clangxx', 'clang++'))) self.config.substitutions.append( (' clang-cc ', prefer('%clang_cc1', 'clang-cc'))) self.config.substitutions.append( (' clang-cl ', prefer('%clang_cl', 'clang-cl'))) self.config.substitutions.append( (' clang -cc1 -analyze ', prefer('%clang_analyze_cc1', 'clang -cc1 -analyze'))) self.config.substitutions.append( (' clang -cc1 ', prefer('%clang_cc1', 'clang -cc1'))) self.config.substitutions.append( (' %clang-cc1 ', '''\"*** invalid substitution, use '%clang_cc1'. ***\"''')) self.config.substitutions.append( (' %clang-cpp ', '''\"*** invalid substitution, use '%clang_cpp'. ***\"''')) self.config.substitutions.append( (' %clang-cl ', '''\"*** invalid substitution, use '%clang_cl'. ***\"'''))
def use_clang(self, additional_tool_dirs=[], additional_flags=[], required=True, use_installed=False): """Configure the test suite to be able to invoke clang. Sets up some environment variables important to clang, locates a just-built or optionally an installed clang, and add a set of standard substitutions useful to any test suite that makes use of clang. """ # Clear some environment variables that might affect Clang. # # This first set of vars are read by Clang, but shouldn't affect tests # that aren't specifically looking for these features, or are required # simply to run the tests at all. # # FIXME: Should we have a tool that enforces this? # safe_env_vars = ( # 'TMPDIR', 'TEMP', 'TMP', 'USERPROFILE', 'PWD', # 'MACOSX_DEPLOYMENT_TARGET', 'IPHONEOS_DEPLOYMENT_TARGET', # 'VCINSTALLDIR', 'VC100COMNTOOLS', 'VC90COMNTOOLS', # 'VC80COMNTOOLS') possibly_dangerous_env_vars = [ 'COMPILER_PATH', 'RC_DEBUG_OPTIONS', 'CINDEXTEST_PREAMBLE_FILE', 'LIBRARY_PATH', 'CPATH', 'C_INCLUDE_PATH', 'CPLUS_INCLUDE_PATH', 'OBJC_INCLUDE_PATH', 'OBJCPLUS_INCLUDE_PATH', 'LIBCLANG_TIMING', 'LIBCLANG_OBJTRACKING', 'LIBCLANG_LOGGING', 'LIBCLANG_BGPRIO_INDEX', 'LIBCLANG_BGPRIO_EDIT', 'LIBCLANG_NOTHREADS', 'LIBCLANG_RESOURCE_USAGE', 'LIBCLANG_CODE_COMPLETION_LOGGING', ] # Clang/Win32 may refer to %INCLUDE%. vsvarsall.bat sets it. if platform.system() != 'Windows': possibly_dangerous_env_vars.append('INCLUDE') self.clear_environment(possibly_dangerous_env_vars) # Tweak the PATH to include the tools dir and the scripts dir. # Put Clang first to avoid LLVM from overriding out-of-tree clang # builds. exe_dir_props = [self.config.name.lower() + '_tools_dir', 'clang_tools_dir', 'llvm_tools_dir'] paths = [getattr(self.config, pp) for pp in exe_dir_props if getattr(self.config, pp, None)] paths = additional_tool_dirs + paths self.with_environment('PATH', paths, append_path=True) lib_dir_props = [ self.config.name.lower() + '_libs_dir', 'clang_libs_dir', 'llvm_shlib_dir', 'llvm_libs_dir', ] lib_paths = [getattr(self.config, pp) for pp in lib_dir_props if getattr(self.config, pp, None)] self.with_environment('LD_LIBRARY_PATH', lib_paths, append_path=True) shl = getattr(self.config, 'llvm_shlib_dir', None) pext = getattr(self.config, 'llvm_plugin_ext', None) if shl: self.config.substitutions.append(('%llvmshlibdir', shl)) if pext: self.config.substitutions.append(('%pluginext', pext)) # Discover the 'clang' and 'clangcc' to use. self.config.clang = self.use_llvm_tool( 'clang', search_env='CLANG', required=required, search_paths=paths, use_installed=use_installed) if self.config.clang: self.config.available_features.add('clang') builtin_include_dir = self.get_clang_builtin_include_dir( self.config.clang) tool_substitutions = [ ToolSubst('%clang', command=self.config.clang, extra_args=additional_flags), ToolSubst('%clang_analyze_cc1', command='%clang_cc1', extra_args=['-analyze', '%analyze', '-setup-static-analyzer']+additional_flags), ToolSubst('%clang_cc1', command=self.config.clang, extra_args=['-cc1', '-internal-isystem', builtin_include_dir, '-nostdsysteminc'] + additional_flags), ToolSubst('%clang_cpp', command=self.config.clang, extra_args=['--driver-mode=cpp']+additional_flags), ToolSubst('%clang_cl', command=self.config.clang, extra_args=['--driver-mode=cl']+additional_flags), ToolSubst('%clangxx', command=self.config.clang, extra_args=['--driver-mode=g++']+additional_flags), ] self.add_tool_substitutions(tool_substitutions) self.config.substitutions.append( ('%resource_dir', builtin_include_dir)) self.config.substitutions.append( ('%itanium_abi_triple', self.make_itanium_abi_triple(self.config.target_triple))) self.config.substitutions.append( ('%ms_abi_triple', self.make_msabi_triple(self.config.target_triple))) # The host triple might not be set, at least if we're compiling clang # from an already installed llvm. if (self.config.host_triple and self.config.host_triple != '@LLVM_HOST_TRIPLE@'): self.config.substitutions.append( ('%target_itanium_abi_host_triple', '--target=' + self.make_itanium_abi_triple( self.config.host_triple))) else: self.config.substitutions.append( ('%target_itanium_abi_host_triple', '')) # FIXME: Find nicer way to prohibit this. def prefer(this, to): return '''\"*** Do not use '%s' in tests, use '%s'. ***\"''' % ( to, this) self.config.substitutions.append( (' clang ', prefer('%clang', 'clang'))) self.config.substitutions.append( (r' clang\+\+ ', prefer('%clangxx', 'clang++'))) self.config.substitutions.append( (' clang-cc ', prefer('%clang_cc1', 'clang-cc'))) self.config.substitutions.append( (' clang-cl ', prefer('%clang_cl', 'clang-cl'))) self.config.substitutions.append( (' clang -cc1 -analyze ', prefer('%clang_analyze_cc1', 'clang -cc1 -analyze'))) self.config.substitutions.append( (' clang -cc1 ', prefer('%clang_cc1', 'clang -cc1'))) self.config.substitutions.append( (' %clang-cc1 ', '''\"*** invalid substitution, use '%clang_cc1'. ***\"''')) self.config.substitutions.append( (' %clang-cpp ', '''\"*** invalid substitution, use '%clang_cpp'. ***\"''')) self.config.substitutions.append( (' %clang-cl ', '''\"*** invalid substitution, use '%clang_cl'. ***\"'''))
Python
def use_lld(self, additional_tool_dirs=[], required=True, use_installed=False): """Configure the test suite to be able to invoke lld. Sets up some environment variables important to lld, locates a just-built or optionally an installed lld, and add a set of standard substitutions useful to any test suite that makes use of lld. """ # Tweak the PATH to include the tools dir and the scripts dir. exe_dir_props = [self.config.name.lower() + '_tools_dir', 'lld_tools_dir', 'llvm_tools_dir'] paths = [getattr(self.config, pp) for pp in exe_dir_props if getattr(self.config, pp, None)] paths = additional_tool_dirs + paths self.with_environment('PATH', paths, append_path=True) lib_dir_props = [self.config.name.lower() + '_libs_dir', 'lld_libs_dir', 'llvm_libs_dir'] lib_paths = [getattr(self.config, pp) for pp in lib_dir_props if getattr(self.config, pp, None)] self.with_environment('LD_LIBRARY_PATH', lib_paths, append_path=True) # Discover the LLD executables to use. ld_lld = self.use_llvm_tool('ld.lld', required=required, search_paths=paths, use_installed=use_installed) lld_link = self.use_llvm_tool('lld-link', required=required, search_paths=paths, use_installed=use_installed) ld64_lld = self.use_llvm_tool('ld64.lld', required=required, search_paths=paths, use_installed=use_installed) wasm_ld = self.use_llvm_tool('wasm-ld', required=required, search_paths=paths, use_installed=use_installed) was_found = ld_lld and lld_link and ld64_lld and wasm_ld tool_substitutions = [] if ld_lld: tool_substitutions.append(ToolSubst(r'ld\.lld', command=ld_lld)) self.config.available_features.add('ld.lld') if lld_link: tool_substitutions.append(ToolSubst('lld-link', command=lld_link)) self.config.available_features.add('lld-link') if ld64_lld: tool_substitutions.append(ToolSubst(r'ld64\.lld', command=ld64_lld)) self.config.available_features.add('ld64.lld') if wasm_ld: tool_substitutions.append(ToolSubst('wasm-ld', command=wasm_ld)) self.config.available_features.add('wasm-ld') self.add_tool_substitutions(tool_substitutions) return was_found
def use_lld(self, additional_tool_dirs=[], required=True, use_installed=False): """Configure the test suite to be able to invoke lld. Sets up some environment variables important to lld, locates a just-built or optionally an installed lld, and add a set of standard substitutions useful to any test suite that makes use of lld. """ # Tweak the PATH to include the tools dir and the scripts dir. exe_dir_props = [self.config.name.lower() + '_tools_dir', 'lld_tools_dir', 'llvm_tools_dir'] paths = [getattr(self.config, pp) for pp in exe_dir_props if getattr(self.config, pp, None)] paths = additional_tool_dirs + paths self.with_environment('PATH', paths, append_path=True) lib_dir_props = [self.config.name.lower() + '_libs_dir', 'lld_libs_dir', 'llvm_libs_dir'] lib_paths = [getattr(self.config, pp) for pp in lib_dir_props if getattr(self.config, pp, None)] self.with_environment('LD_LIBRARY_PATH', lib_paths, append_path=True) # Discover the LLD executables to use. ld_lld = self.use_llvm_tool('ld.lld', required=required, search_paths=paths, use_installed=use_installed) lld_link = self.use_llvm_tool('lld-link', required=required, search_paths=paths, use_installed=use_installed) ld64_lld = self.use_llvm_tool('ld64.lld', required=required, search_paths=paths, use_installed=use_installed) wasm_ld = self.use_llvm_tool('wasm-ld', required=required, search_paths=paths, use_installed=use_installed) was_found = ld_lld and lld_link and ld64_lld and wasm_ld tool_substitutions = [] if ld_lld: tool_substitutions.append(ToolSubst(r'ld\.lld', command=ld_lld)) self.config.available_features.add('ld.lld') if lld_link: tool_substitutions.append(ToolSubst('lld-link', command=lld_link)) self.config.available_features.add('lld-link') if ld64_lld: tool_substitutions.append(ToolSubst(r'ld64\.lld', command=ld64_lld)) self.config.available_features.add('ld64.lld') if wasm_ld: tool_substitutions.append(ToolSubst('wasm-ld', command=wasm_ld)) self.config.available_features.add('wasm-ld') self.add_tool_substitutions(tool_substitutions) return was_found
Python
def boilerplate(attr: EncodingAttr): """Returns boilerplate main method. This method sets up a boilerplate main method that calls the generated sparse kernel. For convenience, this part is purely done as string input. """ return f""" func @main(%c: tensor<3x2xf64>) -> tensor<3x2xf64> attributes {{ llvm.emit_c_interface }} {{ %0 = constant dense<[ [ 1.1, 0.0, 0.0, 1.4 ], [ 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 3.3, 0.0 ]]> : tensor<3x4xf64> %a = sparse_tensor.convert %0 : tensor<3x4xf64> to tensor<3x4xf64, {attr}> %b = constant dense<[ [ 1.0, 2.0 ], [ 4.0, 3.0 ], [ 5.0, 6.0 ], [ 8.0, 7.0 ]]> : tensor<4x2xf64> %1 = call @spMxM(%a, %b, %c) : (tensor<3x4xf64, {attr}>, tensor<4x2xf64>, tensor<3x2xf64>) -> tensor<3x2xf64> return %1 : tensor<3x2xf64> }} """
def boilerplate(attr: EncodingAttr): """Returns boilerplate main method. This method sets up a boilerplate main method that calls the generated sparse kernel. For convenience, this part is purely done as string input. """ return f""" func @main(%c: tensor<3x2xf64>) -> tensor<3x2xf64> attributes {{ llvm.emit_c_interface }} {{ %0 = constant dense<[ [ 1.1, 0.0, 0.0, 1.4 ], [ 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 3.3, 0.0 ]]> : tensor<3x4xf64> %a = sparse_tensor.convert %0 : tensor<3x4xf64> to tensor<3x4xf64, {attr}> %b = constant dense<[ [ 1.0, 2.0 ], [ 4.0, 3.0 ], [ 5.0, 6.0 ], [ 8.0, 7.0 ]]> : tensor<4x2xf64> %1 = call @spMxM(%a, %b, %c) : (tensor<3x4xf64, {attr}>, tensor<4x2xf64>, tensor<3x2xf64>) -> tensor<3x2xf64> return %1 : tensor<3x2xf64> }} """
Python
def run_to_source_breakpoint(test, bkpt_pattern, source_spec, launch_info = None, exe_name = "a.out", bkpt_module = None, in_cwd = True, only_one_thread = True, extra_images = None): """Start up a target, using exe_name as the executable, and run it to a breakpoint set by source regex bkpt_pattern. The rest of the behavior is the same as run_to_name_breakpoint. """ target = run_to_breakpoint_make_target(test, exe_name, in_cwd) # Set the breakpoints breakpoint = target.BreakpointCreateBySourceRegex( bkpt_pattern, source_spec, bkpt_module) test.assertTrue(breakpoint.GetNumLocations() > 0, 'No locations found for source breakpoint: "%s", file: "%s", dir: "%s"' %(bkpt_pattern, source_spec.GetFilename(), source_spec.GetDirectory())) return run_to_breakpoint_do_run(test, target, breakpoint, launch_info, only_one_thread, extra_images)
def run_to_source_breakpoint(test, bkpt_pattern, source_spec, launch_info = None, exe_name = "a.out", bkpt_module = None, in_cwd = True, only_one_thread = True, extra_images = None): """Start up a target, using exe_name as the executable, and run it to a breakpoint set by source regex bkpt_pattern. The rest of the behavior is the same as run_to_name_breakpoint. """ target = run_to_breakpoint_make_target(test, exe_name, in_cwd) # Set the breakpoints breakpoint = target.BreakpointCreateBySourceRegex( bkpt_pattern, source_spec, bkpt_module) test.assertTrue(breakpoint.GetNumLocations() > 0, 'No locations found for source breakpoint: "%s", file: "%s", dir: "%s"' %(bkpt_pattern, source_spec.GetFilename(), source_spec.GetDirectory())) return run_to_breakpoint_do_run(test, target, breakpoint, launch_info, only_one_thread, extra_images)
Python
def _memoizeExpensiveOperation(extractCacheKey): """ Allows memoizing a very expensive operation. We pickle the cache key to make sure we store an immutable representation of it. If we stored an object and the object was referenced elsewhere, it could be changed from under our feet, which would break the cache. """ def decorator(function): cache = {} def f(*args, **kwargs): cacheKey = pickle.dumps(extractCacheKey(*args, **kwargs)) if cacheKey not in cache: cache[cacheKey] = function(*args, **kwargs) return cache[cacheKey] return f return decorator
def _memoizeExpensiveOperation(extractCacheKey): """ Allows memoizing a very expensive operation. We pickle the cache key to make sure we store an immutable representation of it. If we stored an object and the object was referenced elsewhere, it could be changed from under our feet, which would break the cache. """ def decorator(function): cache = {} def f(*args, **kwargs): cacheKey = pickle.dumps(extractCacheKey(*args, **kwargs)) if cacheKey not in cache: cache[cacheKey] = function(*args, **kwargs) return cache[cacheKey] return f return decorator
Python
def _executeScriptInternal(test, commands): """ Returns (stdout, stderr, exitCode, timeoutInfo) TODO: This really should be easier to access from Lit itself """ parsedCommands = libcxx.test.format.parseScript(test, preamble=commands) litConfig = lit.LitConfig.LitConfig( progname='lit', path=[], quiet=False, useValgrind=False, valgrindLeakCheck=False, valgrindArgs=[], noExecute=False, debug=False, isWindows=platform.system() == 'Windows', params={}) _, tmpBase = libcxx.test.format._getTempPaths(test) execDir = os.path.dirname(test.getExecPath()) for d in (execDir, os.path.dirname(tmpBase)): if not os.path.exists(d): os.makedirs(d) res = lit.TestRunner.executeScriptInternal(test, litConfig, tmpBase, parsedCommands, execDir) if isinstance(res, lit.Test.Result): # Handle failure to parse the Lit test res = ('', res.output, 127, None) (out, err, exitCode, timeoutInfo) = res # TODO: As a temporary workaround until https://reviews.llvm.org/D81892 lands, manually # split any stderr output that is included in stdout. It shouldn't be there, but # the Lit internal shell conflates stderr and stdout. conflatedErrorOutput = re.search("(# command stderr:.+$)", out, flags=re.DOTALL) if conflatedErrorOutput: conflatedErrorOutput = conflatedErrorOutput.group(0) out = out[:-len(conflatedErrorOutput)] err += conflatedErrorOutput return (out, err, exitCode, timeoutInfo)
def _executeScriptInternal(test, commands): """ Returns (stdout, stderr, exitCode, timeoutInfo) TODO: This really should be easier to access from Lit itself """ parsedCommands = libcxx.test.format.parseScript(test, preamble=commands) litConfig = lit.LitConfig.LitConfig( progname='lit', path=[], quiet=False, useValgrind=False, valgrindLeakCheck=False, valgrindArgs=[], noExecute=False, debug=False, isWindows=platform.system() == 'Windows', params={}) _, tmpBase = libcxx.test.format._getTempPaths(test) execDir = os.path.dirname(test.getExecPath()) for d in (execDir, os.path.dirname(tmpBase)): if not os.path.exists(d): os.makedirs(d) res = lit.TestRunner.executeScriptInternal(test, litConfig, tmpBase, parsedCommands, execDir) if isinstance(res, lit.Test.Result): # Handle failure to parse the Lit test res = ('', res.output, 127, None) (out, err, exitCode, timeoutInfo) = res # TODO: As a temporary workaround until https://reviews.llvm.org/D81892 lands, manually # split any stderr output that is included in stdout. It shouldn't be there, but # the Lit internal shell conflates stderr and stdout. conflatedErrorOutput = re.search("(# command stderr:.+$)", out, flags=re.DOTALL) if conflatedErrorOutput: conflatedErrorOutput = conflatedErrorOutput.group(0) out = out[:-len(conflatedErrorOutput)] err += conflatedErrorOutput return (out, err, exitCode, timeoutInfo)
Python
def sourceBuilds(config, source): """ Return whether the program in the given string builds successfully. This is done by compiling and linking a program that consists of the given source with the %{cxx} substitution, and seeing whether that succeeds. """ with _makeConfigTest(config) as test: with open(test.getSourcePath(), 'w') as sourceFile: sourceFile.write(source) out, err, exitCode, timeoutInfo = _executeScriptInternal(test, ['%{build}']) _executeScriptInternal(test, ['rm %t.exe']) return exitCode == 0
def sourceBuilds(config, source): """ Return whether the program in the given string builds successfully. This is done by compiling and linking a program that consists of the given source with the %{cxx} substitution, and seeing whether that succeeds. """ with _makeConfigTest(config) as test: with open(test.getSourcePath(), 'w') as sourceFile: sourceFile.write(source) out, err, exitCode, timeoutInfo = _executeScriptInternal(test, ['%{build}']) _executeScriptInternal(test, ['rm %t.exe']) return exitCode == 0
Python
def programOutput(config, program, args=None, testPrefix=''): """ Compiles a program for the test target, run it on the test target and return the output. If the program fails to compile or run, None is returned instead. Note that execution of the program is done through the %{exec} substitution, which means that the program may be run on a remote host depending on what %{exec} does. """ if args is None: args = [] with _makeConfigTest(config, testPrefix=testPrefix) as test: with open(test.getSourcePath(), 'w') as source: source.write(program) try: _, _, exitCode, _ = _executeScriptInternal(test, ['%{build}']) if exitCode != 0: return None out, err, exitCode, _ = _executeScriptInternal(test, ["%{{run}} {}".format(' '.join(args))]) if exitCode != 0: return None actualOut = re.search("# command output:\n(.+)\n$", out, flags=re.DOTALL) actualOut = actualOut.group(1) if actualOut else "" return actualOut finally: _executeScriptInternal(test, ['rm %t.exe'])
def programOutput(config, program, args=None, testPrefix=''): """ Compiles a program for the test target, run it on the test target and return the output. If the program fails to compile or run, None is returned instead. Note that execution of the program is done through the %{exec} substitution, which means that the program may be run on a remote host depending on what %{exec} does. """ if args is None: args = [] with _makeConfigTest(config, testPrefix=testPrefix) as test: with open(test.getSourcePath(), 'w') as source: source.write(program) try: _, _, exitCode, _ = _executeScriptInternal(test, ['%{build}']) if exitCode != 0: return None out, err, exitCode, _ = _executeScriptInternal(test, ["%{{run}} {}".format(' '.join(args))]) if exitCode != 0: return None actualOut = re.search("# command output:\n(.+)\n$", out, flags=re.DOTALL) actualOut = actualOut.group(1) if actualOut else "" return actualOut finally: _executeScriptInternal(test, ['rm %t.exe'])
Python
def hasAnyLocale(config, locales): """ Return whether the runtime execution environment supports a given locale. Different systems may use different names for a locale, so this function checks whether any of the passed locale names is supported by setlocale() and returns true if one of them works. This is done by executing a program that tries to set the given locale using %{exec} -- this means that the command may be executed on a remote host depending on the %{exec} substitution. """ program = """ #include <locale.h> #include <stdio.h> int main(int argc, char** argv) { // For debugging purposes print which locales are (not) supported. for (int i = 1; i < argc; i++) { if (::setlocale(LC_ALL, argv[i]) != NULL) { printf("%s is supported.\\n", argv[i]); return 0; } printf("%s is not supported.\\n", argv[i]); } return 1; } """ return programOutput(config, program, args=[pipes.quote(l) for l in locales], testPrefix="check_locale_" + locales[0]) is not None
def hasAnyLocale(config, locales): """ Return whether the runtime execution environment supports a given locale. Different systems may use different names for a locale, so this function checks whether any of the passed locale names is supported by setlocale() and returns true if one of them works. This is done by executing a program that tries to set the given locale using %{exec} -- this means that the command may be executed on a remote host depending on the %{exec} substitution. """ program = """ #include <locale.h> #include <stdio.h> int main(int argc, char** argv) { // For debugging purposes print which locales are (not) supported. for (int i = 1; i < argc; i++) { if (::setlocale(LC_ALL, argv[i]) != NULL) { printf("%s is supported.\\n", argv[i]); return 0; } printf("%s is not supported.\\n", argv[i]); } return 1; } """ return programOutput(config, program, args=[pipes.quote(l) for l in locales], testPrefix="check_locale_" + locales[0]) is not None
Python
def compilerMacros(config, flags=''): """ Return a dictionary of predefined compiler macros. The keys are strings representing macros, and the values are strings representing what each macro is defined to. If the optional `flags` argument (a string) is provided, these flags will be added to the compiler invocation when generating the macros. """ with _makeConfigTest(config) as test: with open(test.getSourcePath(), 'w') as sourceFile: # Make sure files like <__config> are included, since they can define # additional macros. sourceFile.write("#include <cstddef>") unparsedOutput, err, exitCode, timeoutInfo = _executeScriptInternal(test, [ "%{{cxx}} %s -dM -E %{{flags}} %{{compile_flags}} {}".format(flags) ]) parsedMacros = dict() defines = (l.strip() for l in unparsedOutput.split('\n') if l.startswith('#define ')) for line in defines: line = line[len('#define '):] macro, _, value = line.partition(' ') parsedMacros[macro] = value return parsedMacros
def compilerMacros(config, flags=''): """ Return a dictionary of predefined compiler macros. The keys are strings representing macros, and the values are strings representing what each macro is defined to. If the optional `flags` argument (a string) is provided, these flags will be added to the compiler invocation when generating the macros. """ with _makeConfigTest(config) as test: with open(test.getSourcePath(), 'w') as sourceFile: # Make sure files like <__config> are included, since they can define # additional macros. sourceFile.write("#include <cstddef>") unparsedOutput, err, exitCode, timeoutInfo = _executeScriptInternal(test, [ "%{{cxx}} %s -dM -E %{{flags}} %{{compile_flags}} {}".format(flags) ]) parsedMacros = dict() defines = (l.strip() for l in unparsedOutput.split('\n') if l.startswith('#define ')) for line in defines: line = line[len('#define '):] macro, _, value = line.partition(' ') parsedMacros[macro] = value return parsedMacros
Python
def applyTo(self, config): """ Applies the action to the given configuration. This should modify the configuration object in place, and return nothing. If applying the action to the configuration would yield an invalid configuration, and it is possible to diagnose it here, this method should produce an error. For example, it should be an error to modify a substitution in a way that we know for sure is invalid (e.g. adding a compiler flag when we know the compiler doesn't support it). Failure to do so early may lead to difficult-to-diagnose issues down the road. """ pass
def applyTo(self, config): """ Applies the action to the given configuration. This should modify the configuration object in place, and return nothing. If applying the action to the configuration would yield an invalid configuration, and it is possible to diagnose it here, this method should produce an error. For example, it should be an error to modify a substitution in a way that we know for sure is invalid (e.g. adding a compiler flag when we know the compiler doesn't support it). Failure to do so early may lead to difficult-to-diagnose issues down the road. """ pass
Python
def pretty(self, config, litParams): """ Returns a short and human-readable string describing what this action does. This is used for logging purposes when running the test suite, so it should be kept concise. """ pass
def pretty(self, config, litParams): """ Returns a short and human-readable string describing what this action does. This is used for logging purposes when running the test suite, so it should be kept concise. """ pass
Python
def pretty(self, config): """ Returns the Feature's name. """ return self._getName(config)
def pretty(self, config): """ Returns the Feature's name. """ return self._getName(config)
Python
def _getValue(self, config, litParams): """ Return the value of the parameter given the configuration objects. """ param = getattr(config, self.name, None) param = litParams.get(self.name, param) if param is None and self._default is None: raise ValueError("Parameter {} doesn't have a default value, but it was not specified in the Lit parameters or in the Lit config".format(self.name)) getDefault = lambda: self._default(config) if callable(self._default) else self._default if param is not None: (pretty, value) = (param, self._parse(param)) else: value = getDefault() pretty = '{} (default)'.format(value) if self._choices and value not in self._choices: raise ValueError("Got value '{}' for parameter '{}', which is not in the provided set of possible choices: {}".format(value, self.name, self._choices)) return (pretty, value)
def _getValue(self, config, litParams): """ Return the value of the parameter given the configuration objects. """ param = getattr(config, self.name, None) param = litParams.get(self.name, param) if param is None and self._default is None: raise ValueError("Parameter {} doesn't have a default value, but it was not specified in the Lit parameters or in the Lit config".format(self.name)) getDefault = lambda: self._default(config) if callable(self._default) else self._default if param is not None: (pretty, value) = (param, self._parse(param)) else: value = getDefault() pretty = '{} (default)'.format(value) if self._choices and value not in self._choices: raise ValueError("Got value '{}' for parameter '{}', which is not in the provided set of possible choices: {}".format(value, self.name, self._choices)) return (pretty, value)
Python
def pretty(self, config, litParams): """ Return a pretty representation of the parameter's name and value. """ (prettyParameterValue, _) = self._getValue(config, litParams) return "{}={}".format(self.name, prettyParameterValue)
def pretty(self, config, litParams): """ Return a pretty representation of the parameter's name and value. """ (prettyParameterValue, _) = self._getValue(config, litParams) return "{}={}".format(self.name, prettyParameterValue)
Python
def mmt4d(lhs=TensorDef(TV.LhsType, S.M, S.K, S.M0, S.K0), rhs=TensorDef(TV.RhsType, S.N, S.K, S.K0, S.N0), accum=TensorDef(TV.AccumType, S.M, S.N, S.M0, S.N0, output=True)): """Performs a matrix-matrix-transpose multiplication of two 4D inputs. Differences from linalg.matmul: * The right hand side is transposed, whence the 't' in 'mmt'. * The input and output tensors have a 4D shape instead of a 2D shape. They are interpreted as 2D matrices with one level of 2D tile subdivision, whence the 2+2=4 dimensions. The inner tile dimensions are identified with '0' suffixes below, for instance the LHS matrix shape (M, K, M0, K0) reads as: MxK tiles, each of shape M0xK0. """ domain(D.m, D.n, D.m0, D.n0, D.k, D.k0) implements(ContractionOpInterface) accum[D.m, D.n, D.m0, D.n0] += cast(TV.AccumType, lhs[D.m, D.k, D.m0, D.k0]) * cast(TV.AccumType, rhs[D.n, D.k, D.k0, D.n0])
def mmt4d(lhs=TensorDef(TV.LhsType, S.M, S.K, S.M0, S.K0), rhs=TensorDef(TV.RhsType, S.N, S.K, S.K0, S.N0), accum=TensorDef(TV.AccumType, S.M, S.N, S.M0, S.N0, output=True)): """Performs a matrix-matrix-transpose multiplication of two 4D inputs. Differences from linalg.matmul: * The right hand side is transposed, whence the 't' in 'mmt'. * The input and output tensors have a 4D shape instead of a 2D shape. They are interpreted as 2D matrices with one level of 2D tile subdivision, whence the 2+2=4 dimensions. The inner tile dimensions are identified with '0' suffixes below, for instance the LHS matrix shape (M, K, M0, K0) reads as: MxK tiles, each of shape M0xK0. """ domain(D.m, D.n, D.m0, D.n0, D.k, D.k0) implements(ContractionOpInterface) accum[D.m, D.n, D.m0, D.n0] += cast(TV.AccumType, lhs[D.m, D.k, D.m0, D.k0]) * cast(TV.AccumType, rhs[D.n, D.k, D.k0, D.n0])
Python
def conv_2d_nchw( I=TensorDef(T1, S.N, S.C, S.IH, S.IW), K=TensorDef(T2, S.F, S.C, S.KH, S.KW), O=TensorDef(U, S.N, S.F, S.OH, S.OW, S.C, output=True), strides=AttributeDef(S.SH, S.SW), dilations=AttributeDef(S.DH, S.DW)): """Performs 2-D convolution. Numeric casting is performed on the operands to the inner multiply, promoting them to the same data type as the accumulator/output. """ domain(D.n, D.f, D.oh, D.ow, D.c, D.kh, D.kw) O[D.n, D.f, D.oh, D.ow] += cast( U, I[D.n, D.c, D.oh * S.SH + D.kh * S.DH, D.ow * S.SW + D.kw * S.DW, ]) * cast(U, K[D.f, D.c, D.kh, D.kw])
def conv_2d_nchw( I=TensorDef(T1, S.N, S.C, S.IH, S.IW), K=TensorDef(T2, S.F, S.C, S.KH, S.KW), O=TensorDef(U, S.N, S.F, S.OH, S.OW, S.C, output=True), strides=AttributeDef(S.SH, S.SW), dilations=AttributeDef(S.DH, S.DW)): """Performs 2-D convolution. Numeric casting is performed on the operands to the inner multiply, promoting them to the same data type as the accumulator/output. """ domain(D.n, D.f, D.oh, D.ow, D.c, D.kh, D.kw) O[D.n, D.f, D.oh, D.ow] += cast( U, I[D.n, D.c, D.oh * S.SH + D.kh * S.DH, D.ow * S.SW + D.kw * S.DW, ]) * cast(U, K[D.f, D.c, D.kh, D.kw])
Python
def conv_2d_nhwc_hwcf( I=TensorDef(T1, S.N, S.IH, S.IW, S.C), K=TensorDef(T2, S.KH, S.KW, S.C, S.F), O=TensorDef(U, S.N, S.OH, S.OW, S.F, output=True), strides=AttributeDef(S.SH, S.SW), dilations=AttributeDef(S.DH, S.DW)): """Performs 2-D convolution. Numeric casting is performed on the operands to the inner multiply, promoting them to the same data type as the accumulator/output. """ domain(D.n, D.oh, D.ow, D.f, D.kh, D.kw, D.c) O[D.n, D.oh, D.ow, D.f] += cast( U, I[D.n, D.oh * S.SH + D.kh * S.DH, D.ow * S.SW + D.kw * S.DW, D.c ]) * cast(U, K[D.kh, D.kw, D.c, D.f])
def conv_2d_nhwc_hwcf( I=TensorDef(T1, S.N, S.IH, S.IW, S.C), K=TensorDef(T2, S.KH, S.KW, S.C, S.F), O=TensorDef(U, S.N, S.OH, S.OW, S.F, output=True), strides=AttributeDef(S.SH, S.SW), dilations=AttributeDef(S.DH, S.DW)): """Performs 2-D convolution. Numeric casting is performed on the operands to the inner multiply, promoting them to the same data type as the accumulator/output. """ domain(D.n, D.oh, D.ow, D.f, D.kh, D.kw, D.c) O[D.n, D.oh, D.ow, D.f] += cast( U, I[D.n, D.oh * S.SH + D.kh * S.DH, D.ow * S.SW + D.kw * S.DW, D.c ]) * cast(U, K[D.kh, D.kw, D.c, D.f])
Python
def conv_2d_nhwc_hwcf_q( I=TensorDef(T1, S.N, S.IH, S.IW, S.C), K=TensorDef(T2, S.KH, S.KW, S.C, S.F), IZp=ScalarDef(I32), KZp=ScalarDef(I32), O=TensorDef(U, S.N, S.OH, S.OW, S.F, output=True), strides=AttributeDef(S.SH, S.SW), dilations=AttributeDef(S.DH, S.DW)): """Performs 2-D convolution with zero point offsets. Numeric casting is performed on the operands to the inner multiply, promoting them to the same data type as the accumulator/output. This includes the zero point offsets common to quantized operations. """ domain(D.n, D.oh, D.ow, D.f, D.kh, D.kw, D.c) O[D.n, D.oh, D.ow, D.f] += (cast( U, I[D.n, D.oh * S.SH + D.kh * S.DH, D.ow * S.SW + D.kw * S.DW, D.c ]) - cast(U, IZp)) * (cast(U, K[D.kh, D.kw, D.c, D.f]) - cast(U, KZp))
def conv_2d_nhwc_hwcf_q( I=TensorDef(T1, S.N, S.IH, S.IW, S.C), K=TensorDef(T2, S.KH, S.KW, S.C, S.F), IZp=ScalarDef(I32), KZp=ScalarDef(I32), O=TensorDef(U, S.N, S.OH, S.OW, S.F, output=True), strides=AttributeDef(S.SH, S.SW), dilations=AttributeDef(S.DH, S.DW)): """Performs 2-D convolution with zero point offsets. Numeric casting is performed on the operands to the inner multiply, promoting them to the same data type as the accumulator/output. This includes the zero point offsets common to quantized operations. """ domain(D.n, D.oh, D.ow, D.f, D.kh, D.kw, D.c) O[D.n, D.oh, D.ow, D.f] += (cast( U, I[D.n, D.oh * S.SH + D.kh * S.DH, D.ow * S.SW + D.kw * S.DW, D.c ]) - cast(U, IZp)) * (cast(U, K[D.kh, D.kw, D.c, D.f]) - cast(U, KZp))
Python
def depthwise_conv2D_nhwc( I=TensorDef(T1, S.N, S.IH, S.IW, S.IC), K=TensorDef(T2, S.KH, S.KW, S.IC, S.CM), O=TensorDef(U, S.N, S.OH, S.OW, S.IC, S.CM, output=True), strides=AttributeDef(S.SH, S.SW), dilations=AttributeDef(S.DH, S.DW)): """Performs depth-wise 2-D convolution. Numeric casting is performed on the operands to the inner multiply, promoting them to the same data type as the accumulator/output. """ domain(D.n, D.oh, D.ow, D.kh, D.kw, D.ic, D.cm) O[D.n, D.oh, D.ow, D.ic, D.cm] += cast( U, I[D.n, D.oh * S.SH + D.kh * S.DH, D.ow * S.SW + D.kw * S.DW, D.ic]) * cast(U, K[D.kh, D.kw, D.ic, D.cm])
def depthwise_conv2D_nhwc( I=TensorDef(T1, S.N, S.IH, S.IW, S.IC), K=TensorDef(T2, S.KH, S.KW, S.IC, S.CM), O=TensorDef(U, S.N, S.OH, S.OW, S.IC, S.CM, output=True), strides=AttributeDef(S.SH, S.SW), dilations=AttributeDef(S.DH, S.DW)): """Performs depth-wise 2-D convolution. Numeric casting is performed on the operands to the inner multiply, promoting them to the same data type as the accumulator/output. """ domain(D.n, D.oh, D.ow, D.kh, D.kw, D.ic, D.cm) O[D.n, D.oh, D.ow, D.ic, D.cm] += cast( U, I[D.n, D.oh * S.SH + D.kh * S.DH, D.ow * S.SW + D.kw * S.DW, D.ic]) * cast(U, K[D.kh, D.kw, D.ic, D.cm])
Python
def depthwise_conv2D_nhwc_q( I=TensorDef(T1, S.N, S.IH, S.IW, S.IC), K=TensorDef(T2, S.KH, S.KW, S.IC, S.CM), IZp=ScalarDef(I32), KZp=ScalarDef(I32), O=TensorDef(U, S.N, S.OH, S.OW, S.IC, S.CM, output=True), strides=AttributeDef(S.SH, S.SW), dilations=AttributeDef(S.DH, S.DW)): """Performs depth-wise 2-D convolution. Numeric casting is performed on the operands to the inner multiply, promoting them to the same data type as the accumulator/output. """ domain(D.n, D.oh, D.ow, D.kh, D.kw, D.ic, D.cm) O[D.n, D.oh, D.ow, D.ic, D.cm] += ( (cast(U, I[D.n, D.oh * S.SH + D.kh * S.DH, D.ow * S.SW + D.kw * S.DW, D.ic]) - cast(U, IZp)) * (cast(U, K[D.kh, D.kw, D.ic, D.cm]) - cast(U, KZp)))
def depthwise_conv2D_nhwc_q( I=TensorDef(T1, S.N, S.IH, S.IW, S.IC), K=TensorDef(T2, S.KH, S.KW, S.IC, S.CM), IZp=ScalarDef(I32), KZp=ScalarDef(I32), O=TensorDef(U, S.N, S.OH, S.OW, S.IC, S.CM, output=True), strides=AttributeDef(S.SH, S.SW), dilations=AttributeDef(S.DH, S.DW)): """Performs depth-wise 2-D convolution. Numeric casting is performed on the operands to the inner multiply, promoting them to the same data type as the accumulator/output. """ domain(D.n, D.oh, D.ow, D.kh, D.kw, D.ic, D.cm) O[D.n, D.oh, D.ow, D.ic, D.cm] += ( (cast(U, I[D.n, D.oh * S.SH + D.kh * S.DH, D.ow * S.SW + D.kw * S.DW, D.ic]) - cast(U, IZp)) * (cast(U, K[D.kh, D.kw, D.ic, D.cm]) - cast(U, KZp)))
Python
def pooling_nchw_max( I=TensorDef(T1, S.N, S.C, S.H, S.W), K=TensorDef(T2, S.KH, S.KW, index_dims=[D.kh, D.kw]), O=TensorDef(U, S.N, S.C, S.OH, S.OW, output=True), strides=AttributeDef(S.SH, S.SW), dilations=AttributeDef(S.DH, S.DW)): """Performs max pooling. Numeric casting is performed on the input operand, promoting it to the same data type as the accumulator/output. """ domain(D.n, D.c, D.oh, D.ow, D.kh, D.kw) O[D.n, D.c, D.oh, D.ow] = ReduceFn.max(D.kh, D.kw)( cast(U, I[D.n, D.c, D.oh * S.SH + D.kh * S.DH, D.ow * S.SW + D.kw * S.DW, ]))
def pooling_nchw_max( I=TensorDef(T1, S.N, S.C, S.H, S.W), K=TensorDef(T2, S.KH, S.KW, index_dims=[D.kh, D.kw]), O=TensorDef(U, S.N, S.C, S.OH, S.OW, output=True), strides=AttributeDef(S.SH, S.SW), dilations=AttributeDef(S.DH, S.DW)): """Performs max pooling. Numeric casting is performed on the input operand, promoting it to the same data type as the accumulator/output. """ domain(D.n, D.c, D.oh, D.ow, D.kh, D.kw) O[D.n, D.c, D.oh, D.ow] = ReduceFn.max(D.kh, D.kw)( cast(U, I[D.n, D.c, D.oh * S.SH + D.kh * S.DH, D.ow * S.SW + D.kw * S.DW, ]))
Python
def test(self): """Tests deferencing lvalue/rvalue references via LLDB's builtin type system.""" self.build() lldbutil.run_to_source_breakpoint(self, "// break here", lldb.SBFileSpec("main.cpp")) # Take an lvalue reference and call `Dereference` on the SBValue. # The result should be `TTT` (and *not* for example the underlying type # 'int'). lref_val = self.expect_var_path("l_ref", type="TTT &") self.assertEqual(lref_val.Dereference().GetType().GetName(), "TTT") # Same as above for rvalue references. rref_val = self.expect_var_path("r_ref", type="TTT &&") self.assertEqual(rref_val.Dereference().GetType().GetName(), "TTT")
def test(self): """Tests deferencing lvalue/rvalue references via LLDB's builtin type system.""" self.build() lldbutil.run_to_source_breakpoint(self, "// break here", lldb.SBFileSpec("main.cpp")) # Take an lvalue reference and call `Dereference` on the SBValue. # The result should be `TTT` (and *not* for example the underlying type # 'int'). lref_val = self.expect_var_path("l_ref", type="TTT &") self.assertEqual(lref_val.Dereference().GetType().GetName(), "TTT") # Same as above for rvalue references. rref_val = self.expect_var_path("r_ref", type="TTT &&") self.assertEqual(rref_val.Dereference().GetType().GetName(), "TTT")
Python
def convenience_registers_with_process_attach(self, test_16bit_regs): """Test convenience registers after a 'process attach'.""" exe = self.getBuildArtifact("a.out") # Spawn a new process pid = self.spawnSubprocess(exe, ['wait_for_attach']).pid if self.TraceOn(): print("pid of spawned process: %d" % pid) self.runCmd("process attach -p %d" % pid) # Check that "register read eax" works. self.runCmd("register read eax") if self.getArchitecture() in ['amd64', 'x86_64']: self.expect("expr -- ($rax & 0xffffffff) == $eax", substrs=['true']) if test_16bit_regs: self.expect("expr -- $ax == (($ah << 8) | $al)", substrs=['true'])
def convenience_registers_with_process_attach(self, test_16bit_regs): """Test convenience registers after a 'process attach'.""" exe = self.getBuildArtifact("a.out") # Spawn a new process pid = self.spawnSubprocess(exe, ['wait_for_attach']).pid if self.TraceOn(): print("pid of spawned process: %d" % pid) self.runCmd("process attach -p %d" % pid) # Check that "register read eax" works. self.runCmd("register read eax") if self.getArchitecture() in ['amd64', 'x86_64']: self.expect("expr -- ($rax & 0xffffffff) == $eax", substrs=['true']) if test_16bit_regs: self.expect("expr -- $ax == (($ah << 8) | $al)", substrs=['true'])
Python
def make_static_lib_name(self, name): """Return the full filename for the specified library name""" if self.target_info.is_windows() and not self.target_info.is_mingw(): assert name == 'c++' # Only allow libc++ to use this function for now. return 'lib' + name + '.lib' else: return 'lib' + name + '.a'
def make_static_lib_name(self, name): """Return the full filename for the specified library name""" if self.target_info.is_windows() and not self.target_info.is_mingw(): assert name == 'c++' # Only allow libc++ to use this function for now. return 'lib' + name + '.lib' else: return 'lib' + name + '.a'
Python
def hash_file(self, rel_path, is_text=None): """Read file at given relative path and return content hash as hex string (hash is SHA-1 hash of content). If is_text is None, determine whether file is text like Git does (it's treated as text if there's no NUL byte in first 8000 bytes). If file is text, the line endings are normalized to LF by stripping out any CR characters in the input. This is done to avoid hash differences between line endings on Windows (CR LF) and Linux/macOS (LF), especially with "automatic" line ending conversion when using Git or Subversion. """ with self.open(rel_path) as file: chunk = file.read(self.hash_chunk_size) if is_text is None: is_text = chunk.find(b'\x00', 0, self.IS_TEXT_BYTES) == -1 hash_obj = self.hash_class() while chunk: if is_text: chunk = chunk.replace(b'\r', b'') hash_obj.update(chunk) chunk = file.read(self.hash_chunk_size) return hash_obj.hexdigest()
def hash_file(self, rel_path, is_text=None): """Read file at given relative path and return content hash as hex string (hash is SHA-1 hash of content). If is_text is None, determine whether file is text like Git does (it's treated as text if there's no NUL byte in first 8000 bytes). If file is text, the line endings are normalized to LF by stripping out any CR characters in the input. This is done to avoid hash differences between line endings on Windows (CR LF) and Linux/macOS (LF), especially with "automatic" line ending conversion when using Git or Subversion. """ with self.open(rel_path) as file: chunk = file.read(self.hash_chunk_size) if is_text is None: is_text = chunk.find(b'\x00', 0, self.IS_TEXT_BYTES) == -1 hash_obj = self.hash_class() while chunk: if is_text: chunk = chunk.replace(b'\r', b'') hash_obj.update(chunk) chunk = file.read(self.hash_chunk_size) return hash_obj.hexdigest()
Python
def make_key(self, rel_path, file_hash): """Convert relative path and file hash to destination key, for example, a "rel_path" of 'images/logo.png' would become something like 'images/logo_deadbeef12345678.png'. The number of characters in the hash part of the destiation key is specified by the "hash_length" initializer argument. """ rel_file, ext = os.path.splitext(rel_path) key = '{}_{:.{}}{}'.format(rel_file, file_hash, self.hash_length, ext) return key
def make_key(self, rel_path, file_hash): """Convert relative path and file hash to destination key, for example, a "rel_path" of 'images/logo.png' would become something like 'images/logo_deadbeef12345678.png'. The number of characters in the hash part of the destiation key is specified by the "hash_length" initializer argument. """ rel_file, ext = os.path.splitext(rel_path) key = '{}_{:.{}}{}'.format(rel_file, file_hash, self.hash_length, ext) return key
Python
def walk_files(self): """Generate list of relative paths starting at the source root and walking the directory tree recursively. Relative paths in the yielded values are canonicalized to always use use '/' (forward slash) as a path separator, regardless of running platform. """ if isinstance(self.root, bytes): # Mainly because os.walk() doesn't handle Unicode chars in walked # paths on Windows if a bytes path is specified (easy on Python 2.x # with "str") walk_root = self.root.decode(sys.getfilesystemencoding()) else: walk_root = self.root # Ensure that errors while walking are raised as hard errors, unless # ignore_walk_errors is True or it's an error listing the root dir # (on Python 2.x on Windows, error.filename includes the '*.*') def onerror(error): if (not self.ignore_walk_errors or error.filename == walk_root or error.filename == os.path.join(walk_root, '*.*')): raise error else: logger.debug('ignoring error scanning source tree: %s', error) walker = self.os_walk(walk_root, onerror=onerror, followlinks=self.follow_symlinks) for root, dirs, files in walker: if not self.dot_names: dirs[:] = [d for d in dirs if not d.startswith('.')] for file in files: if not self.dot_names and file.startswith('.'): continue rel_path = os.path.relpath(os.path.join(root, file), walk_root) rel_path = rel_path.replace('\\', '/') if self.include and not any(fnmatch.fnmatch(rel_path, i) for i in self.include): continue if self.exclude and any(fnmatch.fnmatch(rel_path, e) for e in self.exclude): continue yield rel_path
def walk_files(self): """Generate list of relative paths starting at the source root and walking the directory tree recursively. Relative paths in the yielded values are canonicalized to always use use '/' (forward slash) as a path separator, regardless of running platform. """ if isinstance(self.root, bytes): # Mainly because os.walk() doesn't handle Unicode chars in walked # paths on Windows if a bytes path is specified (easy on Python 2.x # with "str") walk_root = self.root.decode(sys.getfilesystemencoding()) else: walk_root = self.root # Ensure that errors while walking are raised as hard errors, unless # ignore_walk_errors is True or it's an error listing the root dir # (on Python 2.x on Windows, error.filename includes the '*.*') def onerror(error): if (not self.ignore_walk_errors or error.filename == walk_root or error.filename == os.path.join(walk_root, '*.*')): raise error else: logger.debug('ignoring error scanning source tree: %s', error) walker = self.os_walk(walk_root, onerror=onerror, followlinks=self.follow_symlinks) for root, dirs, files in walker: if not self.dot_names: dirs[:] = [d for d in dirs if not d.startswith('.')] for file in files: if not self.dot_names and file.startswith('.'): continue rel_path = os.path.relpath(os.path.join(root, file), walk_root) rel_path = rel_path.replace('\\', '/') if self.include and not any(fnmatch.fnmatch(rel_path, i) for i in self.include): continue if self.exclude and any(fnmatch.fnmatch(rel_path, e) for e in self.exclude): continue yield rel_path
Python
def build_key_map(self): """Walk directory tree starting at source root and build a dict that maps relative path to key including content-based hash. The relative paths (keys of the returned dict) are "canonical", meaning '\' is converted to '/' on Windows, so that users of the mapping can always look up keys using 'dir/file.ext' style paths, regardless of operating system. """ if self.cache_key_map and self._key_map is not None: return self._key_map keys_by_path = {} for rel_path in self.walk_files(): file_hash = self.hash_file(rel_path) key = self.make_key(rel_path, file_hash) keys_by_path[rel_path] = key if self.cache_key_map: self._key_map = keys_by_path return keys_by_path
def build_key_map(self): """Walk directory tree starting at source root and build a dict that maps relative path to key including content-based hash. The relative paths (keys of the returned dict) are "canonical", meaning '\' is converted to '/' on Windows, so that users of the mapping can always look up keys using 'dir/file.ext' style paths, regardless of operating system. """ if self.cache_key_map and self._key_map is not None: return self._key_map keys_by_path = {} for rel_path in self.walk_files(): file_hash = self.hash_file(rel_path) key = self.make_key(rel_path, file_hash) keys_by_path[rel_path] = key if self.cache_key_map: self._key_map = keys_by_path return keys_by_path
Python
def upload(self, key, source, rel_path): """Upload single file from source instance and relative path to destination at "key". """ raise NotImplementedError
def upload(self, key, source, rel_path): """Upload single file from source instance and relative path to destination at "key". """ raise NotImplementedError
Python
def upload(source, destination, force=False, dry_run=False, continue_on_errors=False): """Upload missing files from source to destination (an instance of a Destination subclass). Return a Result namedtuple, which includes the source key map, set of destination keys, and upload statistics. If "source" is a string, FileSource(source) is used as the source instance. Otherwise "source" must be a FileSource instance. The contents of each source file is hashed by the source and included in the destination key. This is so that if a file changes, it's uploaded again under a new filename to break caching. For example, 'images/logo.png' will become something like 'images/logo_deadbeef12345678.png'. If force is True, upload even if files are there already. If dry_run is True, log what would be uploaded instead of actually uploading. If continue_on_errors is True, it will continue uploading other files even if some uploads fail (the default is to raise DestinationError on first error). """ if isinstance(source, (str, bytes)): source = FileSource(source) if isinstance(destination, (str, bytes)): destination = FileDestination(destination) try: source_key_map = source.build_key_map() except Exception as error: raise SourceError('ERROR scanning source tree', error) try: destination_keys = set(destination.walk_keys()) except Exception as error: raise DestinationError('ERROR listing keys at {}'.format(destination), error) options = [] if force: options.append('force') if dry_run: options.append('dry_run') if continue_on_errors: options.append('continue_on_errors') logger.info('starting upload from %s (%d files) to %s (%d existing keys)%s', source, len(source_key_map), destination, len(destination_keys), ', options: ' + ', '.join(options) if options else '') num_scanned = 0 num_uploaded = 0 num_errors = 0 for rel_path, key in sorted(source_key_map.items()): num_scanned += 1 if not force and key in destination_keys: logger.debug('already uploaded %s, skipping', key) continue if key in destination_keys: verb = 'would force upload' if dry_run else 'force uploading' else: verb = 'would upload' if dry_run else 'uploading' logger.warning('%s %s to %s', verb, rel_path, key) if not dry_run: try: destination.upload(key, source, rel_path) num_uploaded += 1 except Exception as error: if not continue_on_errors: raise DestinationError('ERROR uploading to {}'.format(key), error, key=key) logger.error('ERROR uploading to %s: %s', key, error) num_errors += 1 else: num_uploaded += 1 logger.info('finished upload: uploaded %d, skipped %d, errors with %d', num_uploaded, len(source_key_map) - num_uploaded, num_errors) result = Result(source_key_map, destination_keys, num_scanned, num_uploaded, num_errors) return result
def upload(source, destination, force=False, dry_run=False, continue_on_errors=False): """Upload missing files from source to destination (an instance of a Destination subclass). Return a Result namedtuple, which includes the source key map, set of destination keys, and upload statistics. If "source" is a string, FileSource(source) is used as the source instance. Otherwise "source" must be a FileSource instance. The contents of each source file is hashed by the source and included in the destination key. This is so that if a file changes, it's uploaded again under a new filename to break caching. For example, 'images/logo.png' will become something like 'images/logo_deadbeef12345678.png'. If force is True, upload even if files are there already. If dry_run is True, log what would be uploaded instead of actually uploading. If continue_on_errors is True, it will continue uploading other files even if some uploads fail (the default is to raise DestinationError on first error). """ if isinstance(source, (str, bytes)): source = FileSource(source) if isinstance(destination, (str, bytes)): destination = FileDestination(destination) try: source_key_map = source.build_key_map() except Exception as error: raise SourceError('ERROR scanning source tree', error) try: destination_keys = set(destination.walk_keys()) except Exception as error: raise DestinationError('ERROR listing keys at {}'.format(destination), error) options = [] if force: options.append('force') if dry_run: options.append('dry_run') if continue_on_errors: options.append('continue_on_errors') logger.info('starting upload from %s (%d files) to %s (%d existing keys)%s', source, len(source_key_map), destination, len(destination_keys), ', options: ' + ', '.join(options) if options else '') num_scanned = 0 num_uploaded = 0 num_errors = 0 for rel_path, key in sorted(source_key_map.items()): num_scanned += 1 if not force and key in destination_keys: logger.debug('already uploaded %s, skipping', key) continue if key in destination_keys: verb = 'would force upload' if dry_run else 'force uploading' else: verb = 'would upload' if dry_run else 'uploading' logger.warning('%s %s to %s', verb, rel_path, key) if not dry_run: try: destination.upload(key, source, rel_path) num_uploaded += 1 except Exception as error: if not continue_on_errors: raise DestinationError('ERROR uploading to {}'.format(key), error, key=key) logger.error('ERROR uploading to %s: %s', key, error) num_errors += 1 else: num_uploaded += 1 logger.info('finished upload: uploaded %d, skipped %d, errors with %d', num_uploaded, len(source_key_map) - num_uploaded, num_errors) result = Result(source_key_map, destination_keys, num_scanned, num_uploaded, num_errors) return result
Python
def delete(source, destination, force=False, dry_run=False, continue_on_errors=False): """Delete files from destination (an instance of a Destination subclass) that are no longer present in source tree. Return a Result namedtuple, which includes the source key map, set of destination keys, and deletion statistics. If "source" is a string, FileSource(source) is used as the source instance. Otherwise "source" must be a FileSource instance. This function does a sanity check to ensure you're not deleting ALL keys at the destination by accident (for example, specifying an empty directory for the source tree). If it would delete all destination keys, it raises DeleteAllKeysError. To override and delete all anyway, specify force=True. If dry_run is True, log what would be deleted instead of actually deleting. If continue_on_errors is True, it will continue deleting other files even if some deletes fail (the default is to raise DestinationError on first error). """ if isinstance(source, (str, bytes)): source = FileSource(source) if isinstance(destination, (str, bytes)): destination = FileDestination(destination) try: source_key_map = source.build_key_map() except Exception as error: raise SourceError('ERROR scanning source tree', error) source_keys = set(source_key_map.values()) try: destination_keys = set(destination.walk_keys()) except Exception as error: raise DestinationError('ERROR listing keys at {}'.format(destination), error) options = [] if dry_run: options.append('dry_run') if continue_on_errors: options.append('continue_on_errors') logger.info('starting delete from %s (%d files) to %s (%d existing keys)%s', source, len(source_key_map), destination, len(destination_keys), ', options: ' + ', '.join(options) if options else '') if not force: num_to_delete = sum(1 for k in destination_keys if k not in source_keys) if num_to_delete >= len(destination_keys): raise DeleteAllKeysError( "ERROR - would delete all {} destination keys, " "you probably didn't intend this! (use -f/--force or " "force=True to override)".format(len(destination_keys))) num_scanned = 0 num_deleted = 0 num_errors = 0 for key in sorted(destination_keys): num_scanned += 1 if key in source_keys: logger.debug('still using %s, skipping', key) continue verb = 'would delete' if dry_run else 'deleting' logger.warning('%s %s', verb, key) if not dry_run: try: destination.delete(key) num_deleted += 1 except Exception as error: if not continue_on_errors: raise DestinationError('ERROR deleting {}'.format(key), error, key=key) logger.error('ERROR deleting %s: %s', key, error) num_errors += 1 else: num_deleted += 1 logger.info('finished delete: deleted %d, errors with %d', num_deleted, num_errors) result = Result(source_key_map, destination_keys, num_scanned, num_deleted, num_errors) return result
def delete(source, destination, force=False, dry_run=False, continue_on_errors=False): """Delete files from destination (an instance of a Destination subclass) that are no longer present in source tree. Return a Result namedtuple, which includes the source key map, set of destination keys, and deletion statistics. If "source" is a string, FileSource(source) is used as the source instance. Otherwise "source" must be a FileSource instance. This function does a sanity check to ensure you're not deleting ALL keys at the destination by accident (for example, specifying an empty directory for the source tree). If it would delete all destination keys, it raises DeleteAllKeysError. To override and delete all anyway, specify force=True. If dry_run is True, log what would be deleted instead of actually deleting. If continue_on_errors is True, it will continue deleting other files even if some deletes fail (the default is to raise DestinationError on first error). """ if isinstance(source, (str, bytes)): source = FileSource(source) if isinstance(destination, (str, bytes)): destination = FileDestination(destination) try: source_key_map = source.build_key_map() except Exception as error: raise SourceError('ERROR scanning source tree', error) source_keys = set(source_key_map.values()) try: destination_keys = set(destination.walk_keys()) except Exception as error: raise DestinationError('ERROR listing keys at {}'.format(destination), error) options = [] if dry_run: options.append('dry_run') if continue_on_errors: options.append('continue_on_errors') logger.info('starting delete from %s (%d files) to %s (%d existing keys)%s', source, len(source_key_map), destination, len(destination_keys), ', options: ' + ', '.join(options) if options else '') if not force: num_to_delete = sum(1 for k in destination_keys if k not in source_keys) if num_to_delete >= len(destination_keys): raise DeleteAllKeysError( "ERROR - would delete all {} destination keys, " "you probably didn't intend this! (use -f/--force or " "force=True to override)".format(len(destination_keys))) num_scanned = 0 num_deleted = 0 num_errors = 0 for key in sorted(destination_keys): num_scanned += 1 if key in source_keys: logger.debug('still using %s, skipping', key) continue verb = 'would delete' if dry_run else 'deleting' logger.warning('%s %s', verb, key) if not dry_run: try: destination.delete(key) num_deleted += 1 except Exception as error: if not continue_on_errors: raise DestinationError('ERROR deleting {}'.format(key), error, key=key) logger.error('ERROR deleting %s: %s', key, error) num_errors += 1 else: num_deleted += 1 logger.info('finished delete: deleted %d, errors with %d', num_deleted, num_errors) result = Result(source_key_map, destination_keys, num_scanned, num_deleted, num_errors) return result
Python
def main(args=None): """Command line endpoint for uploading/deleting. If args not specified, the sys.argv command line arguments are used. Run "cdnupload.py -h" for detailed help on the arguments. """ if args is None: args = sys.argv[1:] description = """ cdnupload {version} -- (c) Ben Hoyt 2017 -- github.com/benhoyt/cdnupload Upload static files from given source directory to destination directory or Amazon S3 bucket, with content-based hash in filenames for versioning. """.format(version=__version__) parser = argparse.ArgumentParser( description=description, formatter_class=argparse.RawDescriptionHelpFormatter, ) parser.add_argument('source', help='source directory') parser.add_argument('destination', help='destination directory (or s3://bucket/path)') parser.add_argument('dest_args', nargs='*', default=[], help='optional Destination() keyword args, for example: ' '"max-age=3600"') parser.add_argument('-a', '--action', default='upload', choices=['upload', 'delete', 'dest-help'], help='action to perform (upload, delete, or show help ' 'for given Destination class), default %(default)s') parser.add_argument('-d', '--dry-run', action='store_true', help='show what script would upload or delete instead of ' 'actually doing it') parser.add_argument('-e', '--exclude', action='append', metavar='PATTERN', help='exclude source file if its relative path ' 'matches, for example *.txt or __pycache__/* ' '(may be specified multiple times)') parser.add_argument('-f', '--force', action='store_true', help='force upload even if destination file already exists, ' 'or force delete even if it would delete all keys at ' 'destination') parser.add_argument('-i', '--include', action='append', metavar='PATTERN', help='only include source file if its relative path ' 'matches, for example *.png or images/* (may be ' 'specified multiple times)') parser.add_argument('-k', '--key-map', metavar='FILENAME', help='write source key map to given file as JSON ' '(but only after successful upload or delete)') parser.add_argument('-l', '--log-level', default='default', choices=[k for k, v in LOG_LEVELS], help='set logging level') parser.add_argument('-v', '--version', action='version', version=__version__) less_common = parser.add_argument_group('less commonly-used arguments') less_common.add_argument('--continue-on-errors', action='store_true', help='continue after upload or delete errors') less_common.add_argument('--dot-names', action='store_true', help="include source files and directories starting " "with '.'") less_common.add_argument('--follow-symlinks', action='store_true', help='follow symbolic links when walking source tree') less_common.add_argument('--hash-length', default=DEFAULT_HASH_LENGTH, type=int, metavar='N', help='number of hex chars of hash to use for ' 'destination key (default %(default)d)') less_common.add_argument('--ignore-walk-errors', action='store_true', help='ignore errors when walking source tree, ' 'except for error on root directory') less_common.add_argument('--license', help="deprecated (cdnupload now has a simple MIT license)") args = parser.parse_args() logging.basicConfig(level=logging.WARNING, format='%(message)s') log_level = next(v for k, v in LOG_LEVELS if k == args.log_level) logger.setLevel(log_level) match = re.match(r'(\w+):', args.destination) if match: scheme = match.group(1) if scheme == 's3': destination_class = S3Destination else: module_name = 'cdnupload_' + scheme try: module = __import__(module_name) except ImportError as error: parser.error("can't import handler for scheme {!r}: {}".format( scheme, error)) if not hasattr(module, 'Destination'): parser.error('{} module has no Destination class'.format( module_name)) destination_class = getattr(module, 'Destination') else: destination_class = FileDestination if args.action == 'dest-help': import inspect import textwrap arg_spec = inspect.getargspec(destination_class.__init__) args_str = inspect.formatargspec(*arg_spec) if args_str.startswith('(self, '): args_str = '(' + args_str[7:] args_wrapped = textwrap.fill( args_str, width=79, initial_indent=' ' * 4, subsequent_indent=' ' * 5, break_long_words=False, break_on_hyphens=False) print('{}'.format(destination_class.__name__)) print(args_wrapped) print() print(inspect.getdoc(destination_class)) return 0 source = FileSource( args.source, dot_names=args.dot_names, include=args.include, exclude=args.exclude, ignore_walk_errors=args.ignore_walk_errors, follow_symlinks=args.follow_symlinks, hash_length=args.hash_length, ) dest_kwargs = {} for arg in args.dest_args: name, sep, value = arg.partition('=') if not sep: value = True name = name.replace('-', '_') existing = dest_kwargs.get(name) if existing is not None: if isinstance(existing, list): existing.append(value) else: dest_kwargs[name] = [existing, value] else: dest_kwargs[name] = value try: destination = destination_class(args.destination, **dest_kwargs) except Exception as error: logger.error('ERROR creating %s instance: %s', destination_class.__name__, error) return 1 action_args = dict( source=source, destination=destination, force=args.force, dry_run=args.dry_run, continue_on_errors=args.continue_on_errors, ) try: if args.action == 'upload': result = upload(**action_args) elif args.action == 'delete': result = delete(**action_args) else: assert 'unexpected action {!r}'.format(args.action) num_errors = result.num_errors except Error as error: logger.error('%s', error) num_errors = 1 if num_errors == 0 and args.key_map: try: logger.info('writing key map JSON to {}'.format(args.key_map)) with open(args.key_map, 'w') as f: json.dump(result.source_key_map, f, sort_keys=True, indent=4) except Exception as error: logger.error('ERROR writing key map file: {}'.format(error)) num_errors += 1 return 1 if num_errors else 0
def main(args=None): """Command line endpoint for uploading/deleting. If args not specified, the sys.argv command line arguments are used. Run "cdnupload.py -h" for detailed help on the arguments. """ if args is None: args = sys.argv[1:] description = """ cdnupload {version} -- (c) Ben Hoyt 2017 -- github.com/benhoyt/cdnupload Upload static files from given source directory to destination directory or Amazon S3 bucket, with content-based hash in filenames for versioning. """.format(version=__version__) parser = argparse.ArgumentParser( description=description, formatter_class=argparse.RawDescriptionHelpFormatter, ) parser.add_argument('source', help='source directory') parser.add_argument('destination', help='destination directory (or s3://bucket/path)') parser.add_argument('dest_args', nargs='*', default=[], help='optional Destination() keyword args, for example: ' '"max-age=3600"') parser.add_argument('-a', '--action', default='upload', choices=['upload', 'delete', 'dest-help'], help='action to perform (upload, delete, or show help ' 'for given Destination class), default %(default)s') parser.add_argument('-d', '--dry-run', action='store_true', help='show what script would upload or delete instead of ' 'actually doing it') parser.add_argument('-e', '--exclude', action='append', metavar='PATTERN', help='exclude source file if its relative path ' 'matches, for example *.txt or __pycache__/* ' '(may be specified multiple times)') parser.add_argument('-f', '--force', action='store_true', help='force upload even if destination file already exists, ' 'or force delete even if it would delete all keys at ' 'destination') parser.add_argument('-i', '--include', action='append', metavar='PATTERN', help='only include source file if its relative path ' 'matches, for example *.png or images/* (may be ' 'specified multiple times)') parser.add_argument('-k', '--key-map', metavar='FILENAME', help='write source key map to given file as JSON ' '(but only after successful upload or delete)') parser.add_argument('-l', '--log-level', default='default', choices=[k for k, v in LOG_LEVELS], help='set logging level') parser.add_argument('-v', '--version', action='version', version=__version__) less_common = parser.add_argument_group('less commonly-used arguments') less_common.add_argument('--continue-on-errors', action='store_true', help='continue after upload or delete errors') less_common.add_argument('--dot-names', action='store_true', help="include source files and directories starting " "with '.'") less_common.add_argument('--follow-symlinks', action='store_true', help='follow symbolic links when walking source tree') less_common.add_argument('--hash-length', default=DEFAULT_HASH_LENGTH, type=int, metavar='N', help='number of hex chars of hash to use for ' 'destination key (default %(default)d)') less_common.add_argument('--ignore-walk-errors', action='store_true', help='ignore errors when walking source tree, ' 'except for error on root directory') less_common.add_argument('--license', help="deprecated (cdnupload now has a simple MIT license)") args = parser.parse_args() logging.basicConfig(level=logging.WARNING, format='%(message)s') log_level = next(v for k, v in LOG_LEVELS if k == args.log_level) logger.setLevel(log_level) match = re.match(r'(\w+):', args.destination) if match: scheme = match.group(1) if scheme == 's3': destination_class = S3Destination else: module_name = 'cdnupload_' + scheme try: module = __import__(module_name) except ImportError as error: parser.error("can't import handler for scheme {!r}: {}".format( scheme, error)) if not hasattr(module, 'Destination'): parser.error('{} module has no Destination class'.format( module_name)) destination_class = getattr(module, 'Destination') else: destination_class = FileDestination if args.action == 'dest-help': import inspect import textwrap arg_spec = inspect.getargspec(destination_class.__init__) args_str = inspect.formatargspec(*arg_spec) if args_str.startswith('(self, '): args_str = '(' + args_str[7:] args_wrapped = textwrap.fill( args_str, width=79, initial_indent=' ' * 4, subsequent_indent=' ' * 5, break_long_words=False, break_on_hyphens=False) print('{}'.format(destination_class.__name__)) print(args_wrapped) print() print(inspect.getdoc(destination_class)) return 0 source = FileSource( args.source, dot_names=args.dot_names, include=args.include, exclude=args.exclude, ignore_walk_errors=args.ignore_walk_errors, follow_symlinks=args.follow_symlinks, hash_length=args.hash_length, ) dest_kwargs = {} for arg in args.dest_args: name, sep, value = arg.partition('=') if not sep: value = True name = name.replace('-', '_') existing = dest_kwargs.get(name) if existing is not None: if isinstance(existing, list): existing.append(value) else: dest_kwargs[name] = [existing, value] else: dest_kwargs[name] = value try: destination = destination_class(args.destination, **dest_kwargs) except Exception as error: logger.error('ERROR creating %s instance: %s', destination_class.__name__, error) return 1 action_args = dict( source=source, destination=destination, force=args.force, dry_run=args.dry_run, continue_on_errors=args.continue_on_errors, ) try: if args.action == 'upload': result = upload(**action_args) elif args.action == 'delete': result = delete(**action_args) else: assert 'unexpected action {!r}'.format(args.action) num_errors = result.num_errors except Error as error: logger.error('%s', error) num_errors = 1 if num_errors == 0 and args.key_map: try: logger.info('writing key map JSON to {}'.format(args.key_map)) with open(args.key_map, 'w') as f: json.dump(result.source_key_map, f, sort_keys=True, indent=4) except Exception as error: logger.error('ERROR writing key map file: {}'.format(error)) num_errors += 1 return 1 if num_errors else 0
Python
def print_weight(self, weight: int) -> None: """ Print a numerical value on scoreboard. Note you should recall it very quickly because the scoreboard can interrupt weight showing """ self._port.write((str(weight).zfill(7)[::-1] + '=').encode('ascii'))
def print_weight(self, weight: int) -> None: """ Print a numerical value on scoreboard. Note you should recall it very quickly because the scoreboard can interrupt weight showing """ self._port.write((str(weight).zfill(7)[::-1] + '=').encode('ascii'))
Python
def pyvboxmanage(configuration_files, setting, verbose, quiet, dry_run): """ PyVBoxManage is a wrapper tool around VBoxManage that facilitates the orchestration of VBoxManage commands from a simple YAML configuration file that matches the input opts/args for VBoxManage. This makes it possible to implement common sequences of VBoxManage commands such as spinning up a new dev/test instance with different hardware just by using a single command line with a configuration file. Variables, output redirection, exit-triggers and returncode-exceptions are available to make flexible setups. Documentation available https://pyvboxmanage.readthedocs.io """ ctx = click.get_current_context() ctx.ensure_object(dict) if quiet: logger.init(name=NAME, level='critical') elif verbose: logger.init(name=NAME, level='debug') else: logger.init(name=NAME, level='info') logger.debug('{} v{}'.format(NAME, VERSION)) variable_settings = {} for s in setting: if '=' not in s: raise PyVBoxManageException('--setting values must be in the format var_name="Some value"', s) variable_settings[s.partition('=')[0].strip()] = s.partition('=')[-1].strip() PyVBoxManage(configuration_files=configuration_files, variable_settings=variable_settings, dry_run=dry_run).main()
def pyvboxmanage(configuration_files, setting, verbose, quiet, dry_run): """ PyVBoxManage is a wrapper tool around VBoxManage that facilitates the orchestration of VBoxManage commands from a simple YAML configuration file that matches the input opts/args for VBoxManage. This makes it possible to implement common sequences of VBoxManage commands such as spinning up a new dev/test instance with different hardware just by using a single command line with a configuration file. Variables, output redirection, exit-triggers and returncode-exceptions are available to make flexible setups. Documentation available https://pyvboxmanage.readthedocs.io """ ctx = click.get_current_context() ctx.ensure_object(dict) if quiet: logger.init(name=NAME, level='critical') elif verbose: logger.init(name=NAME, level='debug') else: logger.init(name=NAME, level='info') logger.debug('{} v{}'.format(NAME, VERSION)) variable_settings = {} for s in setting: if '=' not in s: raise PyVBoxManageException('--setting values must be in the format var_name="Some value"', s) variable_settings[s.partition('=')[0].strip()] = s.partition('=')[-1].strip() PyVBoxManage(configuration_files=configuration_files, variable_settings=variable_settings, dry_run=dry_run).main()
Python
def five_year_check(stockTicker): """Figure out if a stock has risen or been created within the last five years. Args: stockTicker(str): Symbol of the stock we're querying Returns: True if the stock's current price is higher than it was five years ago, or the stock IPO'd within the last five years False otherwise """ instrument = r.get_instruments_by_symbols(stockTicker) list_date = instrument[0].get("list_date") if ((pd.Timestamp("now") - pd.to_datetime(list_date)) < pd.Timedelta("5 Y")): return True fiveyear = r.get_historicals(stockTicker,span='5year',bounds='regular') closingPrices = [] for item in fiveyear: closingPrices.append(float(item['close_price'])) recent_price = closingPrices[len(closingPrices) - 1] oldest_price = closingPrices[0] return (recent_price > oldest_price)
def five_year_check(stockTicker): """Figure out if a stock has risen or been created within the last five years. Args: stockTicker(str): Symbol of the stock we're querying Returns: True if the stock's current price is higher than it was five years ago, or the stock IPO'd within the last five years False otherwise """ instrument = r.get_instruments_by_symbols(stockTicker) list_date = instrument[0].get("list_date") if ((pd.Timestamp("now") - pd.to_datetime(list_date)) < pd.Timedelta("5 Y")): return True fiveyear = r.get_historicals(stockTicker,span='5year',bounds='regular') closingPrices = [] for item in fiveyear: closingPrices.append(float(item['close_price'])) recent_price = closingPrices[len(closingPrices) - 1] oldest_price = closingPrices[0] return (recent_price > oldest_price)
Python
def golden_cross(stockTicker, n1, n2, days, direction=""): """Determine if a golden/death cross has occured for a specified stock in the last X trading days Args: stockTicker(str): Symbol of the stock we're querying n1(int): Specifies the short-term indicator as an X-day moving average. n2(int): Specifies the long-term indicator as an X-day moving average. (n1 should be smaller than n2 to produce meaningful results, e.g n1=50, n2=200) days(int): Specifies the maximum number of days that the cross can occur by direction(str): "above" if we are searching for an upwards cross, "below" if we are searching for a downwaords cross. Optional, used for printing purposes Returns: 1 if the short-term indicator crosses above the long-term one 0 if there is no cross between the indicators -1 if the short-term indicator crosses below the long-term one False if direction == "above" and five_year_check(stockTicker) returns False, meaning that we're considering whether to buy the stock but it hasn't risen overall in the last five years, suggesting it contains fundamental issues """ if(direction == "above" and not five_year_check(stockTicker)): return False history = r.get_historicals(stockTicker,span='year',bounds='regular') closingPrices = [] dates = [] for item in history: closingPrices.append(float(item['close_price'])) dates.append(item['begins_at']) price = pd.Series(closingPrices) dates = pd.Series(dates) dates = pd.to_datetime(dates) sma1 = ta.volatility.bollinger_mavg(price, n=int(n1), fillna=False) sma2 = ta.volatility.bollinger_mavg(price, n=int(n2), fillna=False) series = [price.rename("Price"), sma1.rename("Indicator1"), sma2.rename("Indicator2"), dates.rename("Dates")] df = pd.concat(series, axis=1) cross = get_last_crossing(df, days, symbol=stockTicker, direction=direction) # if(cross): # show_plot(price, sma1, sma2, dates, symbol=stockTicker, label1=str(n1)+" day SMA", label2=str(n2)+" day SMA") return cross
def golden_cross(stockTicker, n1, n2, days, direction=""): """Determine if a golden/death cross has occured for a specified stock in the last X trading days Args: stockTicker(str): Symbol of the stock we're querying n1(int): Specifies the short-term indicator as an X-day moving average. n2(int): Specifies the long-term indicator as an X-day moving average. (n1 should be smaller than n2 to produce meaningful results, e.g n1=50, n2=200) days(int): Specifies the maximum number of days that the cross can occur by direction(str): "above" if we are searching for an upwards cross, "below" if we are searching for a downwaords cross. Optional, used for printing purposes Returns: 1 if the short-term indicator crosses above the long-term one 0 if there is no cross between the indicators -1 if the short-term indicator crosses below the long-term one False if direction == "above" and five_year_check(stockTicker) returns False, meaning that we're considering whether to buy the stock but it hasn't risen overall in the last five years, suggesting it contains fundamental issues """ if(direction == "above" and not five_year_check(stockTicker)): return False history = r.get_historicals(stockTicker,span='year',bounds='regular') closingPrices = [] dates = [] for item in history: closingPrices.append(float(item['close_price'])) dates.append(item['begins_at']) price = pd.Series(closingPrices) dates = pd.Series(dates) dates = pd.to_datetime(dates) sma1 = ta.volatility.bollinger_mavg(price, n=int(n1), fillna=False) sma2 = ta.volatility.bollinger_mavg(price, n=int(n2), fillna=False) series = [price.rename("Price"), sma1.rename("Indicator1"), sma2.rename("Indicator2"), dates.rename("Dates")] df = pd.concat(series, axis=1) cross = get_last_crossing(df, days, symbol=stockTicker, direction=direction) # if(cross): # show_plot(price, sma1, sma2, dates, symbol=stockTicker, label1=str(n1)+" day SMA", label2=str(n2)+" day SMA") return cross
Python
def sell_holdings(symbol, holdings_data): """ Place an order to sell all holdings of a stock. Args: symbol(str): Symbol of the stock we want to sell holdings_data(dict): dict obtained from get_modified_holdings() method """ shares_owned = int(float(holdings_data[symbol].get("quantity"))) r.order_sell_market(symbol, shares_owned) print("####### Selling " + str(shares_owned) + " shares of " + symbol + " #######")
def sell_holdings(symbol, holdings_data): """ Place an order to sell all holdings of a stock. Args: symbol(str): Symbol of the stock we want to sell holdings_data(dict): dict obtained from get_modified_holdings() method """ shares_owned = int(float(holdings_data[symbol].get("quantity"))) r.order_sell_market(symbol, shares_owned) print("####### Selling " + str(shares_owned) + " shares of " + symbol + " #######")
Python
def buy_holdings(potential_buys, profile_data, holdings_data): """ Places orders to buy holdings of stocks. This method will try to order an appropriate amount of shares such that your holdings of the stock will roughly match the average for the rest of your portfoilio. If the share price is too high considering the rest of your holdings and the amount of buying power in your account, it will not order any shares. Args: potential_buys(list): List of strings, the strings are the symbols of stocks we want to buy symbol(str): Symbol of the stock we want to sell holdings_data(dict): dict obtained from r.build_holdings() or get_modified_holdings() method """ cash = float(profile_data.get('cash')) portfolio_value = float(profile_data.get('equity')) - cash ideal_position_size = (safe_division(portfolio_value, len(holdings_data))+cash/len(potential_buys))/(2 * len(potential_buys)) prices = r.get_latest_price(potential_buys) for i in range(0, len(potential_buys)): stock_price = float(prices[i]) if(ideal_position_size < stock_price < ideal_position_size*1.5): num_shares = int(ideal_position_size*1.5/stock_price) elif (stock_price < ideal_position_size): num_shares = int(ideal_position_size/stock_price) else: print("####### Tried buying shares of " + potential_buys[i] + ", but not enough buying power to do so#######") break print("####### Buying " + str(num_shares) + " shares of " + potential_buys[i] + " #######") r.order_buy_market(potential_buys[i], num_shares)
def buy_holdings(potential_buys, profile_data, holdings_data): """ Places orders to buy holdings of stocks. This method will try to order an appropriate amount of shares such that your holdings of the stock will roughly match the average for the rest of your portfoilio. If the share price is too high considering the rest of your holdings and the amount of buying power in your account, it will not order any shares. Args: potential_buys(list): List of strings, the strings are the symbols of stocks we want to buy symbol(str): Symbol of the stock we want to sell holdings_data(dict): dict obtained from r.build_holdings() or get_modified_holdings() method """ cash = float(profile_data.get('cash')) portfolio_value = float(profile_data.get('equity')) - cash ideal_position_size = (safe_division(portfolio_value, len(holdings_data))+cash/len(potential_buys))/(2 * len(potential_buys)) prices = r.get_latest_price(potential_buys) for i in range(0, len(potential_buys)): stock_price = float(prices[i]) if(ideal_position_size < stock_price < ideal_position_size*1.5): num_shares = int(ideal_position_size*1.5/stock_price) elif (stock_price < ideal_position_size): num_shares = int(ideal_position_size/stock_price) else: print("####### Tried buying shares of " + potential_buys[i] + ", but not enough buying power to do so#######") break print("####### Buying " + str(num_shares) + " shares of " + potential_buys[i] + " #######") r.order_buy_market(potential_buys[i], num_shares)
Python
def scan_stocks(): """ The main method. Sells stocks in your portfolio if their 50 day moving average crosses below the 200 day, and buys stocks in your watchlist if the opposite happens. ############################################################################################### WARNING: Comment out the sell_holdings and buy_holdings lines if you don't actually want to execute the trade. ############################################################################################### If you sell a stock, this updates tradehistory.txt with information about the position, how much you've earned/lost, etc. """ print("----- Starting scan... -----\n") register_matplotlib_converters() watchlist_symbols = get_watchlist_symbols() portfolio_symbols = get_portfolio_symbols() holdings_data = get_modified_holdings() potential_buys = [] sells = [] print("Current Portfolio: " + str(portfolio_symbols) + "\n") print("Current Watchlist: " + str(watchlist_symbols) + "\n") print("----- Scanning portfolio for stocks to sell -----\n") for symbol in portfolio_symbols: cross = golden_cross(symbol, n1=50, n2=200, days=30, direction="below") if(cross == -1): sell_holdings(symbol, holdings_data) sells.append(symbol) profile_data = r.build_user_profile() print("\n----- Scanning watchlist for stocks to buy -----\n") for symbol in watchlist_symbols: if(symbol not in portfolio_symbols): cross = golden_cross(symbol, n1=50, n2=200, days=10, direction="above") if(cross == 1): potential_buys.append(symbol) if(len(potential_buys) > 0): buy_holdings(potential_buys, profile_data, holdings_data) if(len(sells) > 0): update_trade_history(sells, holdings_data, "tradehistory.txt") print("----- Scan over -----\n")
def scan_stocks(): """ The main method. Sells stocks in your portfolio if their 50 day moving average crosses below the 200 day, and buys stocks in your watchlist if the opposite happens. ############################################################################################### WARNING: Comment out the sell_holdings and buy_holdings lines if you don't actually want to execute the trade. ############################################################################################### If you sell a stock, this updates tradehistory.txt with information about the position, how much you've earned/lost, etc. """ print("----- Starting scan... -----\n") register_matplotlib_converters() watchlist_symbols = get_watchlist_symbols() portfolio_symbols = get_portfolio_symbols() holdings_data = get_modified_holdings() potential_buys = [] sells = [] print("Current Portfolio: " + str(portfolio_symbols) + "\n") print("Current Watchlist: " + str(watchlist_symbols) + "\n") print("----- Scanning portfolio for stocks to sell -----\n") for symbol in portfolio_symbols: cross = golden_cross(symbol, n1=50, n2=200, days=30, direction="below") if(cross == -1): sell_holdings(symbol, holdings_data) sells.append(symbol) profile_data = r.build_user_profile() print("\n----- Scanning watchlist for stocks to buy -----\n") for symbol in watchlist_symbols: if(symbol not in portfolio_symbols): cross = golden_cross(symbol, n1=50, n2=200, days=10, direction="above") if(cross == 1): potential_buys.append(symbol) if(len(potential_buys) > 0): buy_holdings(potential_buys, profile_data, holdings_data) if(len(sells) > 0): update_trade_history(sells, holdings_data, "tradehistory.txt") print("----- Scan over -----\n")
Python
def process_image(imagename, resultname='temp.sift'): """ process an image and save the results in a .key ascii file""" print "working on ", imagename # if dense == False: if imagename[-3:] != 'pgm': # create a pgm file, image is resized, if it is too big. # sift returns an error if more than 8000 features are found size = (MAXSIZE, MAXSIZE) im = Image.open(imagename).convert('L') im.thumbnail(size, Image.ANTIALIAS) im.save('tmp.pgm') imagename = 'tmp.pgm' # check if linux or windows if os.name == "posix": cmmd = "./sift < " + imagename + " > " + resultname else: cmmd = "siftWin32 < " + imagename + " > " + resultname # run extraction command returnvalue = subprocess.call(cmmd, shell=True) if returnvalue == 127: # removing empty resultfile created by output redirection os.remove(resultname) raise IOError("SIFT executable not found") if returnvalue == 2: # removing empty resultfile created by output redirection os.remove(resultname) raise IOError("image " + imagename + " not found") if os.path.getsize(resultname) == 0: raise IOError("extracting SIFT features failed " + resultname)
def process_image(imagename, resultname='temp.sift'): """ process an image and save the results in a .key ascii file""" print "working on ", imagename # if dense == False: if imagename[-3:] != 'pgm': # create a pgm file, image is resized, if it is too big. # sift returns an error if more than 8000 features are found size = (MAXSIZE, MAXSIZE) im = Image.open(imagename).convert('L') im.thumbnail(size, Image.ANTIALIAS) im.save('tmp.pgm') imagename = 'tmp.pgm' # check if linux or windows if os.name == "posix": cmmd = "./sift < " + imagename + " > " + resultname else: cmmd = "siftWin32 < " + imagename + " > " + resultname # run extraction command returnvalue = subprocess.call(cmmd, shell=True) if returnvalue == 127: # removing empty resultfile created by output redirection os.remove(resultname) raise IOError("SIFT executable not found") if returnvalue == 2: # removing empty resultfile created by output redirection os.remove(resultname) raise IOError("image " + imagename + " not found") if os.path.getsize(resultname) == 0: raise IOError("extracting SIFT features failed " + resultname)
Python
def read_features_from_file(filename='temp.sift', dense=False): """ read feature properties and return in matrix form""" if exists(filename) != False | os.path.getsize(filename) == 0: raise IOError("wrong file path or file empty: " + filename) if dense == True: with open(filename, 'rb') as f: locs, descriptors = cPickle.load(f) else: f = open(filename, 'r') header = f.readline().split() num = int(header[0]) # the number of features featlength = int(header[1]) # the length of the descriptor if featlength != 128: # should be 128 in this case raise RuntimeError( 'Keypoint descriptor length invalid (should be 128).') locs = zeros((num, 4)) descriptors = zeros((num, featlength)) # parse the .key file e = f.read().split() # split the rest into individual elements pos = 0 for point in range(num): # row, col, scale, orientation of each feature for i in range(4): locs[point, i] = float(e[pos + i]) pos += 4 # the descriptor values of each feature for i in range(featlength): descriptors[point, i] = int(e[pos + i]) # print descriptors[point] pos += 128 # normalize each input vector to unit length descriptors[point] = descriptors[point] / \ linalg.norm(descriptors[point]) # doubt!! # print descriptors[point] f.close() return locs, descriptors
def read_features_from_file(filename='temp.sift', dense=False): """ read feature properties and return in matrix form""" if exists(filename) != False | os.path.getsize(filename) == 0: raise IOError("wrong file path or file empty: " + filename) if dense == True: with open(filename, 'rb') as f: locs, descriptors = cPickle.load(f) else: f = open(filename, 'r') header = f.readline().split() num = int(header[0]) # the number of features featlength = int(header[1]) # the length of the descriptor if featlength != 128: # should be 128 in this case raise RuntimeError( 'Keypoint descriptor length invalid (should be 128).') locs = zeros((num, 4)) descriptors = zeros((num, featlength)) # parse the .key file e = f.read().split() # split the rest into individual elements pos = 0 for point in range(num): # row, col, scale, orientation of each feature for i in range(4): locs[point, i] = float(e[pos + i]) pos += 4 # the descriptor values of each feature for i in range(featlength): descriptors[point, i] = int(e[pos + i]) # print descriptors[point] pos += 128 # normalize each input vector to unit length descriptors[point] = descriptors[point] / \ linalg.norm(descriptors[point]) # doubt!! # print descriptors[point] f.close() return locs, descriptors
Python
def service_server_status(): """ Indicate the service has started up correctly and is ready to accept requests. **Method** GET **Url** /status Returns: * **200 OK** When the service is ready to receive requests. """ try: if car_pooling is not None: return Response(status_code=status.HTTP_200_OK) except Exception: return
def service_server_status(): """ Indicate the service has started up correctly and is ready to accept requests. **Method** GET **Url** /status Returns: * **200 OK** When the service is ready to receive requests. """ try: if car_pooling is not None: return Response(status_code=status.HTTP_200_OK) except Exception: return
Python
async def service_car_load(car_load: list, request: Request): """ Load the list of available cars in the service and remove all previous data (existing journeys and cars). This method may be called more than once during the life cycle of the service. **Method** PUT **Url** /cars **Body** _required_ The list of cars to load. **Content Type** `application/json` Sample: [ { "id": 1, "seats": 4 }, { "id": 2, "seats": 6 } ] Returns: * **200 OK** When the list is registered correctly. * **400 Bad Request** When there is a failure in the request format, expected headers, or the payload can't be unmarshalled. """ try: if request.headers["content-type"] != 'application/json': # bad content type return Response(status_code=status.HTTP_400_BAD_REQUEST) if car_load is None: # null request return Response(status_code=status.HTTP_400_BAD_REQUEST) if len(car_load) >= 0: # at least one car: bad request for car in car_load: if len(car) >= 2: # at least two items car_id = car['id'] # must be 'id' car_seats = car['seats'] # must be 'seats' if car_pooling.add(int(car_id), int(car_seats)) is None: return Response(status_code=status.HTTP_400_BAD_REQUEST) else: return Response(status_code=status.HTTP_400_BAD_REQUEST) return Response(status_code=status.HTTP_200_OK) else: return Response(status_code=status.HTTP_400_BAD_REQUEST) except Exception: return Response(status_code=status.HTTP_400_BAD_REQUEST)
async def service_car_load(car_load: list, request: Request): """ Load the list of available cars in the service and remove all previous data (existing journeys and cars). This method may be called more than once during the life cycle of the service. **Method** PUT **Url** /cars **Body** _required_ The list of cars to load. **Content Type** `application/json` Sample: [ { "id": 1, "seats": 4 }, { "id": 2, "seats": 6 } ] Returns: * **200 OK** When the list is registered correctly. * **400 Bad Request** When there is a failure in the request format, expected headers, or the payload can't be unmarshalled. """ try: if request.headers["content-type"] != 'application/json': # bad content type return Response(status_code=status.HTTP_400_BAD_REQUEST) if car_load is None: # null request return Response(status_code=status.HTTP_400_BAD_REQUEST) if len(car_load) >= 0: # at least one car: bad request for car in car_load: if len(car) >= 2: # at least two items car_id = car['id'] # must be 'id' car_seats = car['seats'] # must be 'seats' if car_pooling.add(int(car_id), int(car_seats)) is None: return Response(status_code=status.HTTP_400_BAD_REQUEST) else: return Response(status_code=status.HTTP_400_BAD_REQUEST) return Response(status_code=status.HTTP_200_OK) else: return Response(status_code=status.HTTP_400_BAD_REQUEST) except Exception: return Response(status_code=status.HTTP_400_BAD_REQUEST)
Python
async def service_journey_request(journey_request: dict, request: Request): """ A group of people requests to perform a journey. **Method** POST **Url** /journey **Body** _required_ The group of people that wants to perform the journey **Content Type** `application/json` Sample: { "id": 1, "people": 4 } Returns: * **200 OK** or **202 Accepted** When the group is registered correctly * **400 Bad Request** When there is a failure in the request format or the payload can't be unmarshalled. """ try: if request.headers["content-type"] != 'application/json': return Response(status_code=status.HTTP_400_BAD_REQUEST) if journey_request is None: return Response(status_code=status.HTTP_400_BAD_REQUEST) if len(journey_request) >= 2: journey_id = journey_request['id'] journey_passengers = journey_request['people'] allocated_car = car_pooling.journey(journey_id, journey_passengers) if allocated_car == CarPooling.BAD_REQUEST: return Response(status_code=status.HTTP_400_BAD_REQUEST) return Response(status_code=status.HTTP_202_ACCEPTED) else: return Response(status_code=status.HTTP_400_BAD_REQUEST) except Exception: return Response(status_code=status.HTTP_400_BAD_REQUEST)
async def service_journey_request(journey_request: dict, request: Request): """ A group of people requests to perform a journey. **Method** POST **Url** /journey **Body** _required_ The group of people that wants to perform the journey **Content Type** `application/json` Sample: { "id": 1, "people": 4 } Returns: * **200 OK** or **202 Accepted** When the group is registered correctly * **400 Bad Request** When there is a failure in the request format or the payload can't be unmarshalled. """ try: if request.headers["content-type"] != 'application/json': return Response(status_code=status.HTTP_400_BAD_REQUEST) if journey_request is None: return Response(status_code=status.HTTP_400_BAD_REQUEST) if len(journey_request) >= 2: journey_id = journey_request['id'] journey_passengers = journey_request['people'] allocated_car = car_pooling.journey(journey_id, journey_passengers) if allocated_car == CarPooling.BAD_REQUEST: return Response(status_code=status.HTTP_400_BAD_REQUEST) return Response(status_code=status.HTTP_202_ACCEPTED) else: return Response(status_code=status.HTTP_400_BAD_REQUEST) except Exception: return Response(status_code=status.HTTP_400_BAD_REQUEST)
Python
async def service_dropoff_request(*, ID: str = Form(...), request: Request): """ A group of people requests to be dropped off. Whether they traveled or not. **Method** POST **Url** /dropoff **Body** _required_ A form with the group ID, such that `ID=X` **Content Type** `application/x-www-form-urlencoded` Returns: * **200 OK** or **204 No Content** When the group is unregistered correctly. * **404 Not Found** When the group is not to be found. * **400 Bad Request** When there is a failure in the request format or the payload can't be unmarshalled. """ try: if request.headers["content-type"] != 'application/x-www-form-urlencoded': return Response(status_code=status.HTTP_400_BAD_REQUEST) if ID is None: return Response(status_code=status.HTTP_400_BAD_REQUEST) journey_id = int(ID) group_id = car_pooling.drop_off(journey_id) if group_id is None: # drop-off group not found return Response(status_code=status.HTTP_404_NOT_FOUND) else: # group unregistered correctly return Response(status_code=status.HTTP_204_NO_CONTENT) except Exception: return Response(status_code=status.HTTP_400_BAD_REQUEST)
async def service_dropoff_request(*, ID: str = Form(...), request: Request): """ A group of people requests to be dropped off. Whether they traveled or not. **Method** POST **Url** /dropoff **Body** _required_ A form with the group ID, such that `ID=X` **Content Type** `application/x-www-form-urlencoded` Returns: * **200 OK** or **204 No Content** When the group is unregistered correctly. * **404 Not Found** When the group is not to be found. * **400 Bad Request** When there is a failure in the request format or the payload can't be unmarshalled. """ try: if request.headers["content-type"] != 'application/x-www-form-urlencoded': return Response(status_code=status.HTTP_400_BAD_REQUEST) if ID is None: return Response(status_code=status.HTTP_400_BAD_REQUEST) journey_id = int(ID) group_id = car_pooling.drop_off(journey_id) if group_id is None: # drop-off group not found return Response(status_code=status.HTTP_404_NOT_FOUND) else: # group unregistered correctly return Response(status_code=status.HTTP_204_NO_CONTENT) except Exception: return Response(status_code=status.HTTP_400_BAD_REQUEST)
Python
async def service_location_request(*, ID: str = Form(...), request: Request): """ Given a group ID such that `ID=X`, return the car the group is traveling with, or no car if they are still waiting to be served (i.e.- journey requested) **Method** POST **Url** /location **Body** _required_ A url encoded form with the group ID such that `ID=X` **Content Type** `application/x-www-form-urlencoded` **Accept** `application/json` Returns: * **200 OK** With the car as the payload when the group is assigned to a car. * **204 No Content** When the group is waiting to be assigned to a car. * **404 Not Found** When the group is not to be found. * **400 Bad Request** When there is a failure in the request format or the payload can't be unmarshalled. """ try: if request.headers["content-type"] != 'application/x-www-form-urlencoded': return Response(status_code=status.HTTP_400_BAD_REQUEST) if ID is None: return Response(status_code=status.HTTP_400_BAD_REQUEST) journey_id = int(ID) car_id = car_pooling.journey_location.is_allocated(journey_id) if car_id is None: # car ID not found return Response(status_code=status.HTTP_404_NOT_FOUND) elif car_pooling.journey_request.is_waiting(journey_id): # car ID waiting return Response(status_code=status.HTTP_204_NO_CONTENT) else: # car ID located return JSONResponse(content={'car_id': car_id}, status_code=status.HTTP_200_OK) except Exception: return Response(status_code=status.HTTP_400_BAD_REQUEST)
async def service_location_request(*, ID: str = Form(...), request: Request): """ Given a group ID such that `ID=X`, return the car the group is traveling with, or no car if they are still waiting to be served (i.e.- journey requested) **Method** POST **Url** /location **Body** _required_ A url encoded form with the group ID such that `ID=X` **Content Type** `application/x-www-form-urlencoded` **Accept** `application/json` Returns: * **200 OK** With the car as the payload when the group is assigned to a car. * **204 No Content** When the group is waiting to be assigned to a car. * **404 Not Found** When the group is not to be found. * **400 Bad Request** When there is a failure in the request format or the payload can't be unmarshalled. """ try: if request.headers["content-type"] != 'application/x-www-form-urlencoded': return Response(status_code=status.HTTP_400_BAD_REQUEST) if ID is None: return Response(status_code=status.HTTP_400_BAD_REQUEST) journey_id = int(ID) car_id = car_pooling.journey_location.is_allocated(journey_id) if car_id is None: # car ID not found return Response(status_code=status.HTTP_404_NOT_FOUND) elif car_pooling.journey_request.is_waiting(journey_id): # car ID waiting return Response(status_code=status.HTTP_204_NO_CONTENT) else: # car ID located return JSONResponse(content={'car_id': car_id}, status_code=status.HTTP_200_OK) except Exception: return Response(status_code=status.HTTP_400_BAD_REQUEST)
Python
def on_channel_open(self, channel): """Overwrite the on_channel_open method. Run all of the normal on_channel_open commands, then create the HEARTBEAT_MESSAGES and DEVICE_MESSAGES objects. """ super().on_channel_open(channel) self.HEARTBEAT_MESSAGES = HeartbeatMessage(self._connection, self._channel) self.DEVICE_MESSAGES = DeviceMessage(self._channel)
def on_channel_open(self, channel): """Overwrite the on_channel_open method. Run all of the normal on_channel_open commands, then create the HEARTBEAT_MESSAGES and DEVICE_MESSAGES objects. """ super().on_channel_open(channel) self.HEARTBEAT_MESSAGES = HeartbeatMessage(self._connection, self._channel) self.DEVICE_MESSAGES = DeviceMessage(self._channel)
Python
def stop(self): """Overwrite the stop method. Stop the HEARTBEAT_MESSAGES and DEVICE_MESSAGES objects, then stop the rest of the items. """ self.HEARTBEAT_MESSAGES.set_stopping(True) self.DEVICE_MESSAGES.set_stopping(True) self.HEARTBEAT_MESSAGES.stop_consuming() self.DEVICE_MESSAGES.stop_consuming() super().stop()
def stop(self): """Overwrite the stop method. Stop the HEARTBEAT_MESSAGES and DEVICE_MESSAGES objects, then stop the rest of the items. """ self.HEARTBEAT_MESSAGES.set_stopping(True) self.DEVICE_MESSAGES.set_stopping(True) self.HEARTBEAT_MESSAGES.stop_consuming() self.DEVICE_MESSAGES.stop_consuming() super().stop()
Python
def stop_consuming(self): """Overwrite the stop_consuming method. Close the database session. """ self._session.close() super().stop_consuming()
def stop_consuming(self): """Overwrite the stop_consuming method. Close the database session. """ self._session.close() super().stop_consuming()
Python
def schedule_device_check(self): """If we are not closing our connection to RabbitMQ, schedule another device check.""" if self._stopping: return self._connection.ioloop.call_later( self.DEVICE_CONNECTION_INTERVAL, self.device_check )
def schedule_device_check(self): """If we are not closing our connection to RabbitMQ, schedule another device check.""" if self._stopping: return self._connection.ioloop.call_later( self.DEVICE_CONNECTION_INTERVAL, self.device_check )
Python
def on_new_device(self, device_id): """Check if device has been configured or not when a new device connects.""" device = self._session.query(Device).filter_by(device_id=device_id).first() # device has not been added to the db if not device: # device has been seen before if device_id in NEW_DEVICES: NEW_DEVICES[device_id].on_message_received() # device has not been seen before else: self.LOGGER.info(f"{device_id} connected. Has not been configured yet.") NEW_DEVICES[device_id] = DeviceRep(device_id) return "new" # device has been added to the db but was currently a NEW_DEVICE if device_id in NEW_DEVICES: self.LOGGER.info(f"{device_id} now connected and configured.") device_object = NEW_DEVICES.pop(device_id) device_object.on_message_received() CONNECTED_DEVICES[device_id] = device_object # device has been added to the db but has not been seen yet else: self.LOGGER.info(f"{device_id} connected.") CONNECTED_DEVICES[device_id] = DeviceRep(device_id) device.connected = True self._session.commit() return "connected"
def on_new_device(self, device_id): """Check if device has been configured or not when a new device connects.""" device = self._session.query(Device).filter_by(device_id=device_id).first() # device has not been added to the db if not device: # device has been seen before if device_id in NEW_DEVICES: NEW_DEVICES[device_id].on_message_received() # device has not been seen before else: self.LOGGER.info(f"{device_id} connected. Has not been configured yet.") NEW_DEVICES[device_id] = DeviceRep(device_id) return "new" # device has been added to the db but was currently a NEW_DEVICE if device_id in NEW_DEVICES: self.LOGGER.info(f"{device_id} now connected and configured.") device_object = NEW_DEVICES.pop(device_id) device_object.on_message_received() CONNECTED_DEVICES[device_id] = device_object # device has been added to the db but has not been seen yet else: self.LOGGER.info(f"{device_id} connected.") CONNECTED_DEVICES[device_id] = DeviceRep(device_id) device.connected = True self._session.commit() return "connected"
Python
def on_message(self, unused_channel, basic_deliver, properties, body): """Invoked by pika when a message is delivered from RabbitMQ. The channel is passed for your convenience. The basic_deliver object that is passed in carries the exchange, routing key, delivery tag and a redelivered flag for the message. The properties passed in is an instance of BasicProperties with the message properties and the body is the message that was sent. :param pika.channel.Channel unused_channel: The channel object :param pika.Spec.Basic.Deliver: basic_deliver method :param pika.Spec.BasicProperties: properties :param str|unicode body: The message body """ payload = json.loads(body) command = payload["command"] device_id = payload["id"] return_message = "done" LOGGER.debug(f"Received request {command} for {device_id}") if command == "device_status": if device_id in NEW_DEVICES: return_message = "new" elif device_id in CONNECTED_DEVICES: return_message = "connected" else: return_message = "disconnected" else: return_message = "unknown command" self.acknowledge_message(basic_deliver.delivery_tag) reply_properties = pika.BasicProperties() self._channel.basic_publish( exchange="", routing_key=properties.reply_to, properties=reply_properties, body=return_message, )
def on_message(self, unused_channel, basic_deliver, properties, body): """Invoked by pika when a message is delivered from RabbitMQ. The channel is passed for your convenience. The basic_deliver object that is passed in carries the exchange, routing key, delivery tag and a redelivered flag for the message. The properties passed in is an instance of BasicProperties with the message properties and the body is the message that was sent. :param pika.channel.Channel unused_channel: The channel object :param pika.Spec.Basic.Deliver: basic_deliver method :param pika.Spec.BasicProperties: properties :param str|unicode body: The message body """ payload = json.loads(body) command = payload["command"] device_id = payload["id"] return_message = "done" LOGGER.debug(f"Received request {command} for {device_id}") if command == "device_status": if device_id in NEW_DEVICES: return_message = "new" elif device_id in CONNECTED_DEVICES: return_message = "connected" else: return_message = "disconnected" else: return_message = "unknown command" self.acknowledge_message(basic_deliver.delivery_tag) reply_properties = pika.BasicProperties() self._channel.basic_publish( exchange="", routing_key=properties.reply_to, properties=reply_properties, body=return_message, )
Python
def register_teardown_request(app): """Register the teardown context function.""" def teardown_request(_): """The teardown app context function that is called for every request.""" session = get_session() session.remove() app.teardown_appcontext(teardown_request)
def register_teardown_request(app): """Register the teardown context function.""" def teardown_request(_): """The teardown app context function that is called for every request.""" session = get_session() session.remove() app.teardown_appcontext(teardown_request)
Python
def teardown_request(_): """The teardown app context function that is called for every request.""" session = get_session() session.remove()
def teardown_request(_): """The teardown app context function that is called for every request.""" session = get_session() session.remove()
Python
def _create(cls, model_class, *args, **kwargs): """Override the _create classmethod. Does not actually change from the default, but for some reason it needs to be specified otherwise SubFactory elements do not get the primary key created correctly. """ obj = model_class(*args, **kwargs) obj.save() return obj
def _create(cls, model_class, *args, **kwargs): """Override the _create classmethod. Does not actually change from the default, but for some reason it needs to be specified otherwise SubFactory elements do not get the primary key created correctly. """ obj = model_class(*args, **kwargs) obj.save() return obj
Python
def delete(user_id): """Delete a User by ID.""" dbsession = get_session() item = User.get_by_id(user_id, session=dbsession) if item is None: abort(404, message="User not found.") item.delete(session=dbsession)
def delete(user_id): """Delete a User by ID.""" dbsession = get_session() item = User.get_by_id(user_id, session=dbsession) if item is None: abort(404, message="User not found.") item.delete(session=dbsession)
Python
def create(cls, session=None, **kwargs): """Create a new record and save it the database.""" if session is None: session = get_session() instance = cls(**kwargs) return instance.save(session)
def create(cls, session=None, **kwargs): """Create a new record and save it the database.""" if session is None: session = get_session() instance = cls(**kwargs) return instance.save(session)
Python
def update(self, session=None, commit=True, **kwargs): """Update specific fields of a record.""" if session is None: session = get_session() for attr, value in kwargs.items(): setattr(self, attr, value) return self.save(session) if commit else self
def update(self, session=None, commit=True, **kwargs): """Update specific fields of a record.""" if session is None: session = get_session() for attr, value in kwargs.items(): setattr(self, attr, value) return self.save(session) if commit else self
Python
def delete(self, session=None, commit=True): """Remove the record from the database.""" if session is None: session = get_session() session.delete(self) return commit and session.commit()
def delete(self, session=None, commit=True): """Remove the record from the database.""" if session is None: session = get_session() session.delete(self) return commit and session.commit()
Python
def reference_col(tablename, nullable=False, pk_name="id", **kwargs): """Column that adds primary key foreign key reference. Usage: :: category_id = reference_col('category') category = relationship('Category', backref='categories') """ return Column(ForeignKey(f"{tablename}.{pk_name}"), nullable=nullable, **kwargs)
def reference_col(tablename, nullable=False, pk_name="id", **kwargs): """Column that adds primary key foreign key reference. Usage: :: category_id = reference_col('category') category = relationship('Category', backref='categories') """ return Column(ForeignKey(f"{tablename}.{pk_name}"), nullable=nullable, **kwargs)
Python
def login_with_creds(self, username, password): """Send the login data to the login url.""" return self.post( url_for("public.home"), data=dict(username=username, password=password) )
def login_with_creds(self, username, password): """Send the login data to the login url.""" return self.post( url_for("public.home"), data=dict(username=username, password=password) )
Python
def flaskclient(app): """Create a flask test client for tests. Alternative to testapp that supports logging a user in.""" app.test_client_class = HtmlTestClient with app.test_client() as client: yield client
def flaskclient(app): """Create a flask test client for tests. Alternative to testapp that supports logging a user in.""" app.test_client_class = HtmlTestClient with app.test_client() as client: yield client
Python
def tables(dbsession): """Create all tables for testing. Delete when done.""" create_all_tables() yield dbsession.close() drop_all_tables()
def tables(dbsession): """Create all tables for testing. Delete when done.""" create_all_tables() yield dbsession.close() drop_all_tables()
Python
def user(tables): """A user for the tests.""" user = UserFactory(password="myprecious") user.save() return user
def user(tables): """A user for the tests.""" user = UserFactory(password="myprecious") user.save() return user
Python
def admin_user(tables): """An admin user for the tests.""" user = User(username="admin", email="[email protected]", password="admin") user.is_admin = True user.save() return user
def admin_user(tables): """An admin user for the tests.""" user = User(username="admin", email="[email protected]", password="admin") user.is_admin = True user.save() return user
Python
def auth_headers(admin_user, flaskclient, tables): """Log in the admin user and get an access_token.""" data = {"username": admin_user.username, "password": "admin"} url = url_for("api_auth.AuthLogin") rep = flaskclient.post( url, json=data, headers={"content-type": "application/json"}, ) tokens = rep.get_json() return { "content-type": "application/json", "authorization": f"Bearer { tokens['access_token'] }", }
def auth_headers(admin_user, flaskclient, tables): """Log in the admin user and get an access_token.""" data = {"username": admin_user.username, "password": "admin"} url = url_for("api_auth.AuthLogin") rep = flaskclient.post( url, json=data, headers={"content-type": "application/json"}, ) tokens = rep.get_json() return { "content-type": "application/json", "authorization": f"Bearer { tokens['access_token'] }", }
Python
def send_create(): """Send a 'create' message via RabbitMQ to all connected devices.""" click.echo("sending create message to all devices") send_create_message()
def send_create(): """Send a 'create' message via RabbitMQ to all connected devices.""" click.echo("sending create message to all devices") send_create_message()
Python
def create_revision(message): """Create a database migration using alembic.""" config = get_config() alembic_cnf = AlConfig(config.PROJECT_ROOT + "/migrations/alembic.ini") alembic_cnf.set_main_option("script_location", config.PROJECT_ROOT + "/migrations") al_command.revision(alembic_cnf, message=message, autogenerate=True)
def create_revision(message): """Create a database migration using alembic.""" config = get_config() alembic_cnf = AlConfig(config.PROJECT_ROOT + "/migrations/alembic.ini") alembic_cnf.set_main_option("script_location", config.PROJECT_ROOT + "/migrations") al_command.revision(alembic_cnf, message=message, autogenerate=True)
Python
def database_upgrade(revision): """Upgrade database to given revision.""" config = get_config() alembic_cnf = AlConfig(config.PROJECT_ROOT + "/migrations/alembic.ini") alembic_cnf.set_main_option("script_location", config.PROJECT_ROOT + "/migrations") al_command.upgrade(alembic_cnf, revision)
def database_upgrade(revision): """Upgrade database to given revision.""" config = get_config() alembic_cnf = AlConfig(config.PROJECT_ROOT + "/migrations/alembic.ini") alembic_cnf.set_main_option("script_location", config.PROJECT_ROOT + "/migrations") al_command.upgrade(alembic_cnf, revision)
Python
def database_base_seed(dbsession, tables): """Enter the base configuration data into the database.""" system = SystemSetup() dbsession.add(system) user = User( username="admin", email="[email protected]", password="admin", active=True, is_admin=True, ) dbsession.add(user) dbsession.commit()
def database_base_seed(dbsession, tables): """Enter the base configuration data into the database.""" system = SystemSetup() dbsession.add(system) user = User( username="admin", email="[email protected]", password="admin", active=True, is_admin=True, ) dbsession.add(user) dbsession.commit()
Python
def configure_logging(config): """Configure logging for the entire app.""" logger = logging.getLogger("fm") logfile_path = config.LOG_FILE log_level = config.LOG_LEVEL logger.setLevel(log_level) logger.propagate = False formatter = logging.Formatter( "%(asctime)s - %(name)s - %(levelname)s - %(message)s" ) file_handler = RotatingFileHandler( logfile_path, mode="a", maxBytes=1024 * 1024, backupCount=10 ) file_handler.setFormatter(formatter) logger.addHandler(file_handler) if log_level == logging.DEBUG: console_handler = logging.StreamHandler() console_handler.setFormatter(formatter) logger.addHandler(console_handler) install_mp_handler(logger=logger) return logger
def configure_logging(config): """Configure logging for the entire app.""" logger = logging.getLogger("fm") logfile_path = config.LOG_FILE log_level = config.LOG_LEVEL logger.setLevel(log_level) logger.propagate = False formatter = logging.Formatter( "%(asctime)s - %(name)s - %(levelname)s - %(message)s" ) file_handler = RotatingFileHandler( logfile_path, mode="a", maxBytes=1024 * 1024, backupCount=10 ) file_handler.setFormatter(formatter) logger.addHandler(file_handler) if log_level == logging.DEBUG: console_handler = logging.StreamHandler() console_handler.setFormatter(formatter) logger.addHandler(console_handler) install_mp_handler(logger=logger) return logger
Python
def main(): # sourcery skip: extract-method """Main starting point for program.""" config = get_config() logger = configure_logging(config) # pika_test() presence_controller = Process(target=presence_service) # presence_controller.start() device_controller = Process(target=run_device) # device_controller.start() try: # presence_controller.join() # device_controller.join() print("skip") except KeyboardInterrupt: logger.warning("Keyboard interrupt in main") time.sleep(1) presence_controller.terminate() device_controller.terminate() presence_controller.join() device_controller.join()
def main(): # sourcery skip: extract-method """Main starting point for program.""" config = get_config() logger = configure_logging(config) # pika_test() presence_controller = Process(target=presence_service) # presence_controller.start() device_controller = Process(target=run_device) # device_controller.start() try: # presence_controller.join() # device_controller.join() print("skip") except KeyboardInterrupt: logger.warning("Keyboard interrupt in main") time.sleep(1) presence_controller.terminate() device_controller.terminate() presence_controller.join() device_controller.join()
Python
def first_setup(): """First time setup. load required data.""" click.echo("First time setup") session = get_session() try: system = session.query(SystemSetup).one() except NoResultFound: system = SystemSetup() session.add(system) if system.first_setup_complete: click.echo("Setup has already been run") if not click.confirm("Do you want to run first time setup again?"): session.close() return system.first_setup_complete = True system.first_setup_time = datetime.now() session.commit() session.close() if click.confirm("Do you want to change the device name?"): name = click.prompt("Please enter a new device name") set_device_name(name) if click.confirm("Do you want to set hardware informations?"): hardware_version = click.prompt( "Enter the hardware version", default="pi3_0001" ) set_hardware_info(hardware_version) if click.confirm("Do you want to set the software information?"): software_version = click.prompt("Enter the software version") set_software_info(software_version) if click.confirm("Do you want to set details for the interfaces?"): interfaces = get_interfaces() x = 1 interface_details = [] for interface in interfaces: click.echo(f"{x}. {interface}") x = x + 1 interface_details.append(get_interface_details(interface)) set_interfaces(interface_details) click.echo("First time setup is complete")
def first_setup(): """First time setup. load required data.""" click.echo("First time setup") session = get_session() try: system = session.query(SystemSetup).one() except NoResultFound: system = SystemSetup() session.add(system) if system.first_setup_complete: click.echo("Setup has already been run") if not click.confirm("Do you want to run first time setup again?"): session.close() return system.first_setup_complete = True system.first_setup_time = datetime.now() session.commit() session.close() if click.confirm("Do you want to change the device name?"): name = click.prompt("Please enter a new device name") set_device_name(name) if click.confirm("Do you want to set hardware informations?"): hardware_version = click.prompt( "Enter the hardware version", default="pi3_0001" ) set_hardware_info(hardware_version) if click.confirm("Do you want to set the software information?"): software_version = click.prompt("Enter the software version") set_software_info(software_version) if click.confirm("Do you want to set details for the interfaces?"): interfaces = get_interfaces() x = 1 interface_details = [] for interface in interfaces: click.echo(f"{x}. {interface}") x = x + 1 interface_details.append(get_interface_details(interface)) set_interfaces(interface_details) click.echo("First time setup is complete")
Python
def execute_tool(description, *args): """Execute a checking tool with its arguments.""" # Add the virtual environment to the path for subprocess calls my_env = os.environ.copy() my_env["PATH"] = os.pathsep.join([pipenv_path, my_env["PATH"]]) command_line = list(args) click.echo(f"{description}: {' '.join(command_line)}") rv = call(command_line, env=my_env) return rv
def execute_tool(description, *args): """Execute a checking tool with its arguments.""" # Add the virtual environment to the path for subprocess calls my_env = os.environ.copy() my_env["PATH"] = os.pathsep.join([pipenv_path, my_env["PATH"]]) command_line = list(args) click.echo(f"{description}: {' '.join(command_line)}") rv = call(command_line, env=my_env) return rv
Python
def lint(fix_imports, check): """Lint and check code style with black, flake8 and isort.""" skip = [ "requirements", "migrations", "__pycache__", "fm_database.egg-info", "build", ] root_files = glob("*.py") root_directories = [ name for name in next(os.walk("."))[1] if not name.startswith(".") ] files_and_directories = [ arg for arg in root_files + root_directories if arg not in skip ] # Get the virtual environment to the path for subprocess calls pipenv_path = run(["pipenv", "--venv"], check=True, stdout=PIPE) pipenv_path = pipenv_path.stdout.decode().replace("\n", "") pipenv_path = os.path.join(pipenv_path, "bin") my_env = os.environ.copy() my_env["PATH"] = os.pathsep.join([pipenv_path, my_env["PATH"]]) def execute_tool(description, *args): """Execute a checking tool with its arguments.""" command_line = list(args) + files_and_directories click.echo(f"{description}: {' '.join(command_line)}") rv = call(command_line, env=my_env) if rv != 0: sys.exit(rv) isort_args = ["--profile", "black"] black_args = ["--diff"] mypy_args = ["--warn-unused-ignores", "--show-error-codes"] pylint_args = ["--load-plugins", ""] if check: isort_args.append("--check") black_args.append("--check") # mypy_args.append("--check") if fix_imports: execute_tool("Fixing import order", "isort", *isort_args) execute_tool("Formatting style", "black", *black_args) execute_tool("Checking code style", "flake8") execute_tool("Checking for code errors", "pylint", *pylint_args) execute_tool("Checking static types", "mypy", *mypy_args)
def lint(fix_imports, check): """Lint and check code style with black, flake8 and isort.""" skip = [ "requirements", "migrations", "__pycache__", "fm_database.egg-info", "build", ] root_files = glob("*.py") root_directories = [ name for name in next(os.walk("."))[1] if not name.startswith(".") ] files_and_directories = [ arg for arg in root_files + root_directories if arg not in skip ] # Get the virtual environment to the path for subprocess calls pipenv_path = run(["pipenv", "--venv"], check=True, stdout=PIPE) pipenv_path = pipenv_path.stdout.decode().replace("\n", "") pipenv_path = os.path.join(pipenv_path, "bin") my_env = os.environ.copy() my_env["PATH"] = os.pathsep.join([pipenv_path, my_env["PATH"]]) def execute_tool(description, *args): """Execute a checking tool with its arguments.""" command_line = list(args) + files_and_directories click.echo(f"{description}: {' '.join(command_line)}") rv = call(command_line, env=my_env) if rv != 0: sys.exit(rv) isort_args = ["--profile", "black"] black_args = ["--diff"] mypy_args = ["--warn-unused-ignores", "--show-error-codes"] pylint_args = ["--load-plugins", ""] if check: isort_args.append("--check") black_args.append("--check") # mypy_args.append("--check") if fix_imports: execute_tool("Fixing import order", "isort", *isort_args) execute_tool("Formatting style", "black", *black_args) execute_tool("Checking code style", "flake8") execute_tool("Checking for code errors", "pylint", *pylint_args) execute_tool("Checking static types", "mypy", *mypy_args)
Python
def execute_tool(description, *args): """Execute a checking tool with its arguments.""" command_line = list(args) + files_and_directories click.echo(f"{description}: {' '.join(command_line)}") rv = call(command_line, env=my_env) if rv != 0: sys.exit(rv)
def execute_tool(description, *args): """Execute a checking tool with its arguments.""" command_line = list(args) + files_and_directories click.echo(f"{description}: {' '.join(command_line)}") rv = call(command_line, env=my_env) if rv != 0: sys.exit(rv)
Python
def after_celery_logging(logger, *args, **kwargs): """Called after the celery logging.""" config = get_config() celery_logger = logger logfile_path = config.CELERY_LOG_FILE log_level = config.CELERY_MAIN_PROCESS_LOG_LEVEL celery_logger.setLevel(log_level) celery_formatter = logging.Formatter( "%(asctime)s - %(name)s - %(levelname)s - %(message)s" ) file_handler = RotatingFileHandler( logfile_path, mode="a", maxBytes=1024 * 1024, backupCount=10 ) file_handler.setFormatter(celery_formatter) celery_logger.addHandler(file_handler)
def after_celery_logging(logger, *args, **kwargs): """Called after the celery logging.""" config = get_config() celery_logger = logger logfile_path = config.CELERY_LOG_FILE log_level = config.CELERY_MAIN_PROCESS_LOG_LEVEL celery_logger.setLevel(log_level) celery_formatter = logging.Formatter( "%(asctime)s - %(name)s - %(levelname)s - %(message)s" ) file_handler = RotatingFileHandler( logfile_path, mode="a", maxBytes=1024 * 1024, backupCount=10 ) file_handler.setFormatter(celery_formatter) celery_logger.addHandler(file_handler)
Python
def after_celery_task_logging(logger, *args, **kwargs): """Called after a celery task for logging.""" config = get_config() logfile_path = config.CELERY_LOG_FILE log_level = config.LOG_LEVEL logger.setLevel(log_level) formatter = logging.Formatter( "%(asctime)s - %(name)s - %(levelname)s - %(message)s" ) file_handler = RotatingFileHandler( logfile_path, mode="a", maxBytes=1024 * 1024, backupCount=10 ) file_handler.setFormatter(formatter) logger.addHandler(file_handler) # if log_level == logging.DEBUG: # console_handler = logging.StreamHandler() # console_handler.setFormatter(formatter) # logger.addHandler(console_handler)
def after_celery_task_logging(logger, *args, **kwargs): """Called after a celery task for logging.""" config = get_config() logfile_path = config.CELERY_LOG_FILE log_level = config.LOG_LEVEL logger.setLevel(log_level) formatter = logging.Formatter( "%(asctime)s - %(name)s - %(levelname)s - %(message)s" ) file_handler = RotatingFileHandler( logfile_path, mode="a", maxBytes=1024 * 1024, backupCount=10 ) file_handler.setFormatter(formatter) logger.addHandler(file_handler) # if log_level == logging.DEBUG: # console_handler = logging.StreamHandler() # console_handler.setFormatter(formatter) # logger.addHandler(console_handler)
Python
def monkeysession(request): """Create a MonkeyPatch object that can be scoped to a session. https://github.com/pytest-dev/pytest/issues/363#issuecomment-289830794 """ mpatch = MonkeyPatch() yield mpatch mpatch.undo()
def monkeysession(request): """Create a MonkeyPatch object that can be scoped to a session. https://github.com/pytest-dev/pytest/issues/363#issuecomment-289830794 """ mpatch = MonkeyPatch() yield mpatch mpatch.undo()
Python
def presence_service(): """Presence service that responds to pings.""" logger = logging.getLogger("fm.presence") session = get_session() interface = session.query(Interface.interface).filter_by(is_for_fm=True).scalar() session.close() config = get_config() presence_port = config.PRESENCE_PORT if interface is None: logger.warning( "Interface from database is None. Has initial configuration been run? Setting interface to 'eth0'" ) interface = "eth0" logger.debug("Interface and port are: %s %s", interface, presence_port) broadcast_address = get_ip_of_interface(interface, broadcast=True) logger.debug("Presence broadcast address is: %s", broadcast_address) # Create UDP socket sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP) # Ask operating system to let us do broadcasts from socket sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1) # Bind UDP socket to local port so we can receive pings sock.bind((broadcast_address, int(presence_port))) logger.info("Presence service is active") try: # send ping every 5 seconds for first 1 day for _ in range(int(86400 / 60)): # logger.debug("Sending ping via: %s:%s", broadcast_address, presence_port) sock.sendto(b"!", 0, (broadcast_address, int(presence_port))) time.sleep(5) # then send once every minute until stopped while True: # Broadcast our beacon # logger.debug("Sending ping via: %s:%s", broadcast_address, presence_port) sock.sendto(b"!", 0, (broadcast_address, int(presence_port))) time.sleep(60) except KeyboardInterrupt: logger.info("Stopping presence service") sock.close()
def presence_service(): """Presence service that responds to pings.""" logger = logging.getLogger("fm.presence") session = get_session() interface = session.query(Interface.interface).filter_by(is_for_fm=True).scalar() session.close() config = get_config() presence_port = config.PRESENCE_PORT if interface is None: logger.warning( "Interface from database is None. Has initial configuration been run? Setting interface to 'eth0'" ) interface = "eth0" logger.debug("Interface and port are: %s %s", interface, presence_port) broadcast_address = get_ip_of_interface(interface, broadcast=True) logger.debug("Presence broadcast address is: %s", broadcast_address) # Create UDP socket sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP) # Ask operating system to let us do broadcasts from socket sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1) # Bind UDP socket to local port so we can receive pings sock.bind((broadcast_address, int(presence_port))) logger.info("Presence service is active") try: # send ping every 5 seconds for first 1 day for _ in range(int(86400 / 60)): # logger.debug("Sending ping via: %s:%s", broadcast_address, presence_port) sock.sendto(b"!", 0, (broadcast_address, int(presence_port))) time.sleep(5) # then send once every minute until stopped while True: # Broadcast our beacon # logger.debug("Sending ping via: %s:%s", broadcast_address, presence_port) sock.sendto(b"!", 0, (broadcast_address, int(presence_port))) time.sleep(60) except KeyboardInterrupt: logger.info("Stopping presence service") sock.close()
Python
def hostapd_file(interface, wifi_ssid, wifi_password): """Create a hostapd configuration file.""" contents = ( "# This is the name of the WiFi interface\n" f"interface={interface}\n" "\n" "# Use the nl80211 driver with the brcmfmac driver\n" "driver=nl80211\n" "\n" "# This is the name of the network\n" f"ssid={wifi_ssid}\n" "\n" "# Use the 2.4Ghz band\n" "hw_mode=g\n" "\n" "# Use channel 6\n" "channel=6\n" "\n" "# Enable 802.11n\n" "ieee80211n=1\n" "\n" "# Enable WMM\n" "wmm_enabled=1\n" "\n" "# Enable 40Mhz channels with 20ns guard interval\n" "ht_capab=[HT40][SHORT-GI-20]\n" "\n" "# Accept all MAC addresses\n" "macaddr_acl=0\n" "\n" "# Use WPA authentication\n" "auth_algs=1\n" "\n" "# Require clients to know the network name\n" "ignore_broadcast_ssid=0\n" "\n" "# Use WPA2\n" "wpa=2\n" "\n" "# Use a pre-shared key\n" "wpa_key_mgmt=WPA-PSK\n" "\n" "# The network passphrase\n" f"wpa_passphrase={wifi_password}\n" "\n" "# Use AES, instead of TKIP\n" "rsn_pairwise=CCMP\n" ) with open("/tmp/hostapd_temp", "w") as f: f.write(contents) command = ["sudo", "mv", "/tmp/hostapd_temp", "/etc/hostapd/hostapd.conf"] subprocess.check_call(command)
def hostapd_file(interface, wifi_ssid, wifi_password): """Create a hostapd configuration file.""" contents = ( "# This is the name of the WiFi interface\n" f"interface={interface}\n" "\n" "# Use the nl80211 driver with the brcmfmac driver\n" "driver=nl80211\n" "\n" "# This is the name of the network\n" f"ssid={wifi_ssid}\n" "\n" "# Use the 2.4Ghz band\n" "hw_mode=g\n" "\n" "# Use channel 6\n" "channel=6\n" "\n" "# Enable 802.11n\n" "ieee80211n=1\n" "\n" "# Enable WMM\n" "wmm_enabled=1\n" "\n" "# Enable 40Mhz channels with 20ns guard interval\n" "ht_capab=[HT40][SHORT-GI-20]\n" "\n" "# Accept all MAC addresses\n" "macaddr_acl=0\n" "\n" "# Use WPA authentication\n" "auth_algs=1\n" "\n" "# Require clients to know the network name\n" "ignore_broadcast_ssid=0\n" "\n" "# Use WPA2\n" "wpa=2\n" "\n" "# Use a pre-shared key\n" "wpa_key_mgmt=WPA-PSK\n" "\n" "# The network passphrase\n" f"wpa_passphrase={wifi_password}\n" "\n" "# Use AES, instead of TKIP\n" "rsn_pairwise=CCMP\n" ) with open("/tmp/hostapd_temp", "w") as f: f.write(contents) command = ["sudo", "mv", "/tmp/hostapd_temp", "/etc/hostapd/hostapd.conf"] subprocess.check_call(command)
Python
def interface_file(wlan0_dhcp=True, wlan1_dhcp=True): """Create a network interfaces file.""" # templates for dhcp and static wlan_dhcp = ( "allow-hotplug {interface}\n" "iface {interface} inet dhcp\n" " wpa-conf /etc/wpa_supplicant/wpa_supplicant.conf\n" "\n" ) wlan_static = ( "allow-hotplug {interface}\n" "iface {interface} inet static\n" " pre-up iptables-restore < /etc/iptables.ipv4.nat \n" " address 10.10.1.1\n" " netmask 255.255.255.0\n" " network 10.10.1.0\n" " broadcast 10.10.1.255\n" "\n" ) joined_contents = [] contents = ( "# Include files from /etc/network/interfaces.d:\n" "source-directory /etc/network/interfaces.d\n" "\n" "auto lo\n" "iface lo inet loopback\n" "\n" "auto eth0\n" "iface eth0 inet dhcp\n" "\n" ) joined_contents.append(contents) # build the string for wlan0. Either dhcp or static if wlan0_dhcp: joined_contents.append(wlan_dhcp.format(interface="wlan0")) else: joined_contents.append(wlan_static.format(interface="wlan0")) # build the string for wlan0. Either dhcp or static if wlan1_dhcp: joined_contents.append(wlan_dhcp.format(interface="wlan1")) else: joined_contents.append(wlan_static.format(interface="wlan1")) # compile all the joined_contents into one string and write it to file with open("/tmp/interface_temp", "w") as f: f.write("".join(joined_contents)) command = ["sudo", "mv", "/tmp/interface_temp", "/etc/network/interfaces"] subprocess.check_call(command)
def interface_file(wlan0_dhcp=True, wlan1_dhcp=True): """Create a network interfaces file.""" # templates for dhcp and static wlan_dhcp = ( "allow-hotplug {interface}\n" "iface {interface} inet dhcp\n" " wpa-conf /etc/wpa_supplicant/wpa_supplicant.conf\n" "\n" ) wlan_static = ( "allow-hotplug {interface}\n" "iface {interface} inet static\n" " pre-up iptables-restore < /etc/iptables.ipv4.nat \n" " address 10.10.1.1\n" " netmask 255.255.255.0\n" " network 10.10.1.0\n" " broadcast 10.10.1.255\n" "\n" ) joined_contents = [] contents = ( "# Include files from /etc/network/interfaces.d:\n" "source-directory /etc/network/interfaces.d\n" "\n" "auto lo\n" "iface lo inet loopback\n" "\n" "auto eth0\n" "iface eth0 inet dhcp\n" "\n" ) joined_contents.append(contents) # build the string for wlan0. Either dhcp or static if wlan0_dhcp: joined_contents.append(wlan_dhcp.format(interface="wlan0")) else: joined_contents.append(wlan_static.format(interface="wlan0")) # build the string for wlan0. Either dhcp or static if wlan1_dhcp: joined_contents.append(wlan_dhcp.format(interface="wlan1")) else: joined_contents.append(wlan_static.format(interface="wlan1")) # compile all the joined_contents into one string and write it to file with open("/tmp/interface_temp", "w") as f: f.write("".join(joined_contents)) command = ["sudo", "mv", "/tmp/interface_temp", "/etc/network/interfaces"] subprocess.check_call(command)
Python
def dnsmasq_file(interface): """Create a dnsmasq configuration file.""" contents = ( "interface={interface} # Use interface {interface}\n" "listen-address=10.10.1.1 # Explicitly specifiy the address to listen on\n" "bind-interfaces # Bind to the interface\n" "server=8.8.8.8 # Forward DNS requests to Google DNS\n" "domain-needed # Dont forward short names\n" "bogus-priv # Never forward addresses in non-routed space\n" "dhcp-range=10.10.1.50,10.10.1.150,12h" ).format(interface=interface) with open("/tmp/dnsmasq_temp", "w") as f: f.write(contents) command = ["sudo", "mv", "/tmp/dnsmasq_temp", "/etc/dnsmasq.conf"] subprocess.check_call(command)
def dnsmasq_file(interface): """Create a dnsmasq configuration file.""" contents = ( "interface={interface} # Use interface {interface}\n" "listen-address=10.10.1.1 # Explicitly specifiy the address to listen on\n" "bind-interfaces # Bind to the interface\n" "server=8.8.8.8 # Forward DNS requests to Google DNS\n" "domain-needed # Dont forward short names\n" "bogus-priv # Never forward addresses in non-routed space\n" "dhcp-range=10.10.1.50,10.10.1.150,12h" ).format(interface=interface) with open("/tmp/dnsmasq_temp", "w") as f: f.write(contents) command = ["sudo", "mv", "/tmp/dnsmasq_temp", "/etc/dnsmasq.conf"] subprocess.check_call(command)
Python
def dhcpcd_file(interface=None): """Create a dhcpdc configuration file.""" top_part = ( "# Inform the DHCP server of our hostname for DDNS.\n" "hostname\n" "\n" "# Use the hardware address of the interface for the Client ID.\n" "clientid\n" "\n" "# Persist interface configuration when dhcpcd exits.\n" "persistent\n" "\n" "option rapid_commit\n" "\n" "# A list of options to request from the DHCP server.\n" "option domain_name_servers, domain_name, domain_search, host_name\n" "option classless_static_routes\n" "# Most distributions have NTP support.\n" "option ntp_servers\n" "\n" "# A ServerID is required by RFC2131.\n" "require dhcp_server_identifier\n" "\n" "# Generate Stable Private IPv6 Addresses instead of hardware based ones\n" "slaac private\n" "\n" "# A hook script is provided to lookup the hostname if not set by the DHCP\n" "# server, but it should not be run by default.\n" "nohook lookup-hostname\n" "\n" ) contents = [] contents.append(top_part) if interface: contents.append(f"denyinterfaces {interface}\n") # compile all the contents into one string and write it to file with open("/tmp/dhcpcd_temp", "w") as f: f.write("".join(contents)) command = ["sudo", "mv", "/tmp/dhcpcd_temp", "/etc/dhcpcd.conf"] subprocess.check_call(command)
def dhcpcd_file(interface=None): """Create a dhcpdc configuration file.""" top_part = ( "# Inform the DHCP server of our hostname for DDNS.\n" "hostname\n" "\n" "# Use the hardware address of the interface for the Client ID.\n" "clientid\n" "\n" "# Persist interface configuration when dhcpcd exits.\n" "persistent\n" "\n" "option rapid_commit\n" "\n" "# A list of options to request from the DHCP server.\n" "option domain_name_servers, domain_name, domain_search, host_name\n" "option classless_static_routes\n" "# Most distributions have NTP support.\n" "option ntp_servers\n" "\n" "# A ServerID is required by RFC2131.\n" "require dhcp_server_identifier\n" "\n" "# Generate Stable Private IPv6 Addresses instead of hardware based ones\n" "slaac private\n" "\n" "# A hook script is provided to lookup the hostname if not set by the DHCP\n" "# server, but it should not be run by default.\n" "nohook lookup-hostname\n" "\n" ) contents = [] contents.append(top_part) if interface: contents.append(f"denyinterfaces {interface}\n") # compile all the contents into one string and write it to file with open("/tmp/dhcpcd_temp", "w") as f: f.write("".join(contents)) command = ["sudo", "mv", "/tmp/dhcpcd_temp", "/etc/dhcpcd.conf"] subprocess.check_call(command)
Python
def wpa_supplicant_file(networks): """Create a wpa_supplicant file. networks is a list of networks with a 'ssid' and 'password' entry """ network_ssid = ( "network={{\n" ' ssid="{ssid}"\n' ' psk="{password}"\n' " key_mgmt=WPA-PSK\n" "}}\n" "\n" ) header = ( "ctrl_interface=DIR=/var/run/wpa_supplicant GROUP=netdev\n" "update_config=1\n" "country=CA\n" "\n" ) contents = [] contents.append(header) # go through all the networks for network in networks: contents.append( network_ssid.format(ssid=network["ssid"], password=network["password"]) ) # compile all the contents into one string and write it to file with open("/tmp/wpa_supplicant_temp", "w") as f: f.write("".join(contents)) command = [ "sudo", "mv", "/tmp/wpa_supplicant_temp", "/etc/wpa_supplicant/wpa_supplicant.conf", ] subprocess.check_call(command)
def wpa_supplicant_file(networks): """Create a wpa_supplicant file. networks is a list of networks with a 'ssid' and 'password' entry """ network_ssid = ( "network={{\n" ' ssid="{ssid}"\n' ' psk="{password}"\n' " key_mgmt=WPA-PSK\n" "}}\n" "\n" ) header = ( "ctrl_interface=DIR=/var/run/wpa_supplicant GROUP=netdev\n" "update_config=1\n" "country=CA\n" "\n" ) contents = [] contents.append(header) # go through all the networks for network in networks: contents.append( network_ssid.format(ssid=network["ssid"], password=network["password"]) ) # compile all the contents into one string and write it to file with open("/tmp/wpa_supplicant_temp", "w") as f: f.write("".join(contents)) command = [ "sudo", "mv", "/tmp/wpa_supplicant_temp", "/etc/wpa_supplicant/wpa_supplicant.conf", ] subprocess.check_call(command)
Python
def post(login_args): """Login and return JWT Auth.""" username = login_args["username"] password = login_args["password"] user = User.query.filter_by(username=username).first() if user is None or not user.check_password(password): abort(400, message="User not found or bad password.") access_token = create_access_token(identity=user, fresh=True) refresh_token = create_refresh_token(identity=user) local_timezone = datetime.utcnow().astimezone().tzinfo access_decoded = decode_token(access_token) access_expires = datetime.fromtimestamp( access_decoded["exp"], local_timezone ).isoformat() refresh_decoded = decode_token(refresh_token) refresh_expires = datetime.fromtimestamp( refresh_decoded["exp"], local_timezone ).isoformat() return { "access_token": access_token, "access_expires": access_expires, "refresh_token": refresh_token, "refresh_expires": refresh_expires, }
def post(login_args): """Login and return JWT Auth.""" username = login_args["username"] password = login_args["password"] user = User.query.filter_by(username=username).first() if user is None or not user.check_password(password): abort(400, message="User not found or bad password.") access_token = create_access_token(identity=user, fresh=True) refresh_token = create_refresh_token(identity=user) local_timezone = datetime.utcnow().astimezone().tzinfo access_decoded = decode_token(access_token) access_expires = datetime.fromtimestamp( access_decoded["exp"], local_timezone ).isoformat() refresh_decoded = decode_token(refresh_token) refresh_expires = datetime.fromtimestamp( refresh_decoded["exp"], local_timezone ).isoformat() return { "access_token": access_token, "access_expires": access_expires, "refresh_token": refresh_token, "refresh_expires": refresh_expires, }
Python
def post(): """Return a new access token using the refresh token.""" current_user_id = get_jwt_identity() current_user = User.query.filter_by(id=current_user_id).one_or_none() access_token = create_access_token(identity=current_user) local_timezone = datetime.utcnow().astimezone().tzinfo access_decoded = decode_token(access_token) access_expires = datetime.fromtimestamp( access_decoded["exp"], local_timezone ).isoformat() return { "access_token": access_token, "access_expires": access_expires, }
def post(): """Return a new access token using the refresh token.""" current_user_id = get_jwt_identity() current_user = User.query.filter_by(id=current_user_id).one_or_none() access_token = create_access_token(identity=current_user) local_timezone = datetime.utcnow().astimezone().tzinfo access_decoded = decode_token(access_token) access_expires = datetime.fromtimestamp( access_decoded["exp"], local_timezone ).isoformat() return { "access_token": access_token, "access_expires": access_expires, }
Python
def user_loader_callback(_jwt_header, jwt_data): """Load the user given JWT. A callback function that loades a user from the database whenever a protected route is accessed. This returns a User or else None """ identity = jwt_data["sub"] return User.query.filter_by(id=identity).one_or_none()
def user_loader_callback(_jwt_header, jwt_data): """Load the user given JWT. A callback function that loades a user from the database whenever a protected route is accessed. This returns a User or else None """ identity = jwt_data["sub"] return User.query.filter_by(id=identity).one_or_none()
Python
def user_identity_lookup(user: User): """Return the user identity. A callback function that takes whatever object is passed in as the identity when creating JWTs and converts it to a JSON serializable format. """ return user.id
def user_identity_lookup(user: User): """Return the user identity. A callback function that takes whatever object is passed in as the identity when creating JWTs and converts it to a JSON serializable format. """ return user.id
Python
def refresh_interfaces(): """Refresh all interfaces. Update with current information.""" session = get_session() ap_present = False interfaces = get_interfaces() # update all interfaces.active to be False by default session.query(Interface).update({Interface.is_active: False}) for my_interface in interfaces: try: interface = session.query(Interface).filter_by(interface=my_interface).one() interface.is_active = True # see if there is an interface that is configured for an ap if interface.state == "ap": ap_present = True # must be a new interface so lets add it except NoResultFound: new_interface = Interface(my_interface) new_interface.is_active = True new_interface.is_for_fm = False new_interface.state = "dhcp" session.add(new_interface) session.commit() session.close() if ap_present: set_ap_mode() else: set_wpa_mode()
def refresh_interfaces(): """Refresh all interfaces. Update with current information.""" session = get_session() ap_present = False interfaces = get_interfaces() # update all interfaces.active to be False by default session.query(Interface).update({Interface.is_active: False}) for my_interface in interfaces: try: interface = session.query(Interface).filter_by(interface=my_interface).one() interface.is_active = True # see if there is an interface that is configured for an ap if interface.state == "ap": ap_present = True # must be a new interface so lets add it except NoResultFound: new_interface = Interface(my_interface) new_interface.is_active = True new_interface.is_for_fm = False new_interface.state = "dhcp" session.add(new_interface) session.commit() session.close() if ap_present: set_ap_mode() else: set_wpa_mode()
Python
def scan_wifi(interface=None): """Scan the interface for the available wifi networks. Returns a list of strings that are the found networks """ # if no interface is given, try find an interface in the database # that has the state set to 'dhcp' and is not 'eth' if interface is None: session = get_session() interfaces = session.query(Interface).all() for x in interfaces: if not x.interface.startswith("eth"): if x.state == "dhcp": interface = x.interface session.close() # exit if still no interface if interface is None: logger.warning("No interface available to scan wifi networks") return [] # scan the interface for networks command = ["sudo", "iwlist", interface, "scan"] output = subprocess.check_output(command, universal_newlines=True) index = output.find('ESSID:"') ssid = [] while index > 0: stop = output.find('"\n', index) ssid.append(output[(index + 7) : stop]) output = output[(stop + 2) :] index = output.find('ESSID:"') return ssid
def scan_wifi(interface=None): """Scan the interface for the available wifi networks. Returns a list of strings that are the found networks """ # if no interface is given, try find an interface in the database # that has the state set to 'dhcp' and is not 'eth' if interface is None: session = get_session() interfaces = session.query(Interface).all() for x in interfaces: if not x.interface.startswith("eth"): if x.state == "dhcp": interface = x.interface session.close() # exit if still no interface if interface is None: logger.warning("No interface available to scan wifi networks") return [] # scan the interface for networks command = ["sudo", "iwlist", interface, "scan"] output = subprocess.check_output(command, universal_newlines=True) index = output.find('ESSID:"') ssid = [] while index > 0: stop = output.find('"\n', index) ssid.append(output[(index + 7) : stop]) output = output[(stop + 2) :] index = output.find('ESSID:"') return ssid
Python
def add_wifi_network(wifi_name, wifi_password, interface=None): """Add a given wifi to the list of available wifi networks.""" session = get_session() if interface is None: interfaces = session.query(Interface).all() for x in interfaces: # find first available wlan interface that is not dhcp if x.interface != "eth0" and x.state == "dhcp": interface = x.interface break if interface is None: logger.error("No interface available to add new wifi network") return None # have an interface. now create a Wifi entry new_wifi = Wifi() new_wifi.wifi_name = wifi_name new_wifi.wifi_password = wifi_password new_wifi.wifi_mode = "dhcp" new_wifi.interface = interface session.add(new_wifi) session.commit() session.close() return new_wifi
def add_wifi_network(wifi_name, wifi_password, interface=None): """Add a given wifi to the list of available wifi networks.""" session = get_session() if interface is None: interfaces = session.query(Interface).all() for x in interfaces: # find first available wlan interface that is not dhcp if x.interface != "eth0" and x.state == "dhcp": interface = x.interface break if interface is None: logger.error("No interface available to add new wifi network") return None # have an interface. now create a Wifi entry new_wifi = Wifi() new_wifi.wifi_name = wifi_name new_wifi.wifi_password = wifi_password new_wifi.wifi_mode = "dhcp" new_wifi.interface = interface session.add(new_wifi) session.commit() session.close() return new_wifi
Python
def wifi_info(): """ Get a list of wifi details for all wlan interfaces. For each interface, a dictionary of details is added to the list Keys of the dictionary are: interface: the interface if ap: clients: the number of clients currently connected ssid: the ssid of the ap password: the password of the ap if dhcp: state: either the SSID currently connected to or False state_boolean: boolean value for state. True or False if state: address: the IPV4 address ssid: the ssid of the dhcp interface password: the password of the dhcp interface """ logger.debug("getting wifi information") wlan_interfaces = get_interfaces(only_wlan=True) wifi = [] session = get_session() for w_interface in wlan_interfaces: try: info = {} interface = session.query(Interface).filter_by(interface=w_interface).one() info["interface"] = interface if interface.state == "ap": info["clients"] = wifi_ap_clients(interface.interface) info["ssid"] = interface.credentials[0].wifi_name info["password"] = interface.credentials[0].wifi_password else: info["state"] = wifi_dhcp_info(interface.interface) if info["state"] is False: info["state_boolean"] = False else: info["state_boolean"] = True if w_interface in netifaces.interfaces(): address = netifaces.ifaddresses(w_interface) info["address"] = address[netifaces.AF_INET][0]["addr"] if interface.credentials: info["ssid"] = interface.credentials[0].wifi_name info["password"] = interface.credentials[0].wifi_password wifi.append(info) except NoResultFound: pass session.close() return wifi
def wifi_info(): """ Get a list of wifi details for all wlan interfaces. For each interface, a dictionary of details is added to the list Keys of the dictionary are: interface: the interface if ap: clients: the number of clients currently connected ssid: the ssid of the ap password: the password of the ap if dhcp: state: either the SSID currently connected to or False state_boolean: boolean value for state. True or False if state: address: the IPV4 address ssid: the ssid of the dhcp interface password: the password of the dhcp interface """ logger.debug("getting wifi information") wlan_interfaces = get_interfaces(only_wlan=True) wifi = [] session = get_session() for w_interface in wlan_interfaces: try: info = {} interface = session.query(Interface).filter_by(interface=w_interface).one() info["interface"] = interface if interface.state == "ap": info["clients"] = wifi_ap_clients(interface.interface) info["ssid"] = interface.credentials[0].wifi_name info["password"] = interface.credentials[0].wifi_password else: info["state"] = wifi_dhcp_info(interface.interface) if info["state"] is False: info["state_boolean"] = False else: info["state_boolean"] = True if w_interface in netifaces.interfaces(): address = netifaces.ifaddresses(w_interface) info["address"] = address[netifaces.AF_INET][0]["addr"] if interface.credentials: info["ssid"] = interface.credentials[0].wifi_name info["password"] = interface.credentials[0].wifi_password wifi.append(info) except NoResultFound: pass session.close() return wifi
Python
def wifi_ap_clients(interface): """Return the list of ap clients given an interface name.""" logger.debug("getting wifi clients") command = ["iw", "dev", interface, "station", "dump"] client_info = subprocess.check_output(command, universal_newlines=True) client_count = client_info.count("Station") return client_count
def wifi_ap_clients(interface): """Return the list of ap clients given an interface name.""" logger.debug("getting wifi clients") command = ["iw", "dev", interface, "station", "dump"] client_info = subprocess.check_output(command, universal_newlines=True) client_count = client_info.count("Station") return client_count
Python
def wifi_dhcp_info(interface): """Returns the SSID that is connected for a given interface name else returns False.""" command = ["iw", interface, "link"] output = subprocess.check_output(command, universal_newlines=True) if output.startswith("Not connected."): return False start_index = output.find("SSID: ") end_index = output.find("\n", start_index) ssid = output[(start_index + 6) : end_index] return ssid
def wifi_dhcp_info(interface): """Returns the SSID that is connected for a given interface name else returns False.""" command = ["iw", interface, "link"] output = subprocess.check_output(command, universal_newlines=True) if output.startswith("Not connected."): return False start_index = output.find("SSID: ") end_index = output.find("\n", start_index) ssid = output[(start_index + 6) : end_index] return ssid
Python
def session_scope(): """Provide a transactional scope for a session around a series of operations. Example usage: 'with session_scope() as session: do session related work. make sure to commit the session if needed. """ try: yield db_session except Exception as ex: # noqa B902 db_session.rollback() raise ex finally: db_session.close()
def session_scope(): """Provide a transactional scope for a session around a series of operations. Example usage: 'with session_scope() as session: do session related work. make sure to commit the session if needed. """ try: yield db_session except Exception as ex: # noqa B902 db_session.rollback() raise ex finally: db_session.close()
Python
def grainbin_update(info): """Celery task for grainbin update messages.""" session = get_session() # the device_id is found from the name which is in the form of DEVICE_ID.BIN_NUMBER # get all the characters up until the first '.' grainbin_name: str = info["name"] device_id = grainbin_name.split(".")[0] bus_number = info["bus_number"] LOGGER.debug(f"Received grainbin update from {grainbin_name}") grainbin = ( session.query(Grainbin) .filter_by(device_id=device_id, bus_number=bus_number) .one_or_none() ) if grainbin is None: LOGGER.info(f"Adding new grainbin to the databse. {grainbin_name}") grainbin = Grainbin(device_id=device_id, bus_number=bus_number) grainbin.bus_number_string = info["bus_number_string"] grainbin.save(session=session) grainbin.total_updates += 1 grainbin.average_temp = info["average_temp"] sensor_data: list = info["sensor_data"] for sensor in sensor_data: new_grainbin_update = GrainbinUpdate(grainbin.id) new_grainbin_update.timestamp = info["created_at"] new_grainbin_update.update_index = grainbin.total_updates new_grainbin_update.sensor_name = sensor["sensor_name"] new_grainbin_update.temperature = sensor["temperature"] new_grainbin_update.temphigh = sensor["temphigh"] new_grainbin_update.templow = sensor["templow"] LOGGER.debug(f"New update saved for device. {new_grainbin_update}") session.add(new_grainbin_update) session.commit() return True
def grainbin_update(info): """Celery task for grainbin update messages.""" session = get_session() # the device_id is found from the name which is in the form of DEVICE_ID.BIN_NUMBER # get all the characters up until the first '.' grainbin_name: str = info["name"] device_id = grainbin_name.split(".")[0] bus_number = info["bus_number"] LOGGER.debug(f"Received grainbin update from {grainbin_name}") grainbin = ( session.query(Grainbin) .filter_by(device_id=device_id, bus_number=bus_number) .one_or_none() ) if grainbin is None: LOGGER.info(f"Adding new grainbin to the databse. {grainbin_name}") grainbin = Grainbin(device_id=device_id, bus_number=bus_number) grainbin.bus_number_string = info["bus_number_string"] grainbin.save(session=session) grainbin.total_updates += 1 grainbin.average_temp = info["average_temp"] sensor_data: list = info["sensor_data"] for sensor in sensor_data: new_grainbin_update = GrainbinUpdate(grainbin.id) new_grainbin_update.timestamp = info["created_at"] new_grainbin_update.update_index = grainbin.total_updates new_grainbin_update.sensor_name = sensor["sensor_name"] new_grainbin_update.temperature = sensor["temperature"] new_grainbin_update.temphigh = sensor["temphigh"] new_grainbin_update.templow = sensor["templow"] LOGGER.debug(f"New update saved for device. {new_grainbin_update}") session.add(new_grainbin_update) session.commit() return True