Unnamed: 0
int64
0
10k
function
stringlengths
79
138k
label
stringclasses
20 values
info
stringlengths
42
261
3,600
def mkdirp(path): try: os.makedirs(path) except __HOLE__: pass
OSError
dataset/ETHPy150Open CenterForOpenScience/osf.io/scripts/analytics/utils.py/mkdirp
3,601
def identify_file(self, filename): """Identify the type of @param filename. Call self.handle_matches instead of returning a value. """ self.current_file = filename self.matchtype = "signature" try: t0 = time.clock() f = open(filename, 'rb') size = os.stat(filename)[6] self.current_filesize = size if self.current_filesize == 0: sys.stderr.write("FIDO: Zero byte file (empty): Path is: {0}\n".format(filename)) bofbuffer, eofbuffer = self.get_buffers(f, size, seekable=True) matches = self.match_formats(bofbuffer, eofbuffer) # from here is also repeated in walk_zip # we should make this uniform in a next version! # # filesize is made conditional because files with 0 bytes # are falsely characterised being 'rtf' (due to wacky sig) # in these cases we try to match the extension instead if len(matches) > 0 and self.current_filesize > 0: self.handle_matches(filename, matches, time.clock() - t0, self.matchtype) elif len(matches) == 0 or self.current_filesize == 0: matches = self.match_extensions(filename) self.handle_matches(filename, matches, time.clock() - t0, "extension") # till here matey! if self.zip: self.identify_contents(filename, type=self.container_type(matches)) except __HOLE__: #print >> sys.stderr, "FIDO: Error in identify_file: Path is {0}".format(filename) sys.stderr.write("FIDO: Error in identify_file: Path is {0}\n".format(filename))
IOError
dataset/ETHPy150Open openpreserve/fido/fido/fido.py/Fido.identify_file
3,602
def walk_zip(self, filename, fileobj=None): """Identify the type of each item in the zip @param fileobj. If fileobj is not provided, open @param filename. Call self.handle_matches instead of returning a value. """ # IN 2.7+: with zipfile.ZipFile((fileobj if fileobj != None else filename), 'r') as stream: import zipfile, tempfile try: zipstream = None zipstream = zipfile.ZipFile((fileobj if fileobj != None else filename), 'r') for item in zipstream.infolist(): if item.file_size == 0: continue #TODO: Find a better test for isdir t0 = time.clock() # with zipstream.open(item) as f: f = None try: f = zipstream.open(item) item_name = filename + '!' + item.filename self.current_file = item_name self.current_filesize = item.file_size if self.current_filesize == 0: sys.stderr.write("FIDO: Zero byte file (empty): Path is: {0}\n".format(item_name)) bofbuffer, eofbuffer = self.get_buffers(f, item.file_size) finally: if f != None: f.close() matches = self.match_formats(bofbuffer, eofbuffer) if len(matches) > 0 and self.current_filesize > 0: self.handle_matches(item_name, matches, time.clock() - t0, "signature") elif len(matches) == 0 or self.current_filesize == 0: matches = self.match_extensions(item_name) self.handle_matches(item_name, matches, time.clock() - t0, "extension") if self.container_type(matches): target = tempfile.SpooledTemporaryFile(prefix='Fido') #with zipstream.open(item) as source: try: source = zipstream.open(item) self.copy_stream(source, target) #target.seek(0) self.identify_contents(item_name, target, self.container_type(matches)) finally: source.close() except __HOLE__: sys.stderr.write("FIDO: ZipError {0}\n".format(filename)) except zipfile.BadZipfile: sys.stderr.write("FIDO: ZipError {0}\n".format(filename)) finally: if zipstream != None: zipstream.close()
IOError
dataset/ETHPy150Open openpreserve/fido/fido/fido.py/Fido.walk_zip
3,603
def main(arglist=None): # The argparse package was introduced in 2.7 t0 = time.clock() from argparselocal import ArgumentParser, RawTextHelpFormatter if arglist == None: arglist = sys.argv[1:] if len(arglist) == False: arglist.append("-h") parser = ArgumentParser(description=defaults['description'], epilog=defaults['epilog'], fromfile_prefix_chars='@', formatter_class=RawTextHelpFormatter) parser.add_argument('-v', default=False, action='store_true', help='show version information') parser.add_argument('-q', default=False, action='store_true', help='run (more) quietly') parser.add_argument('-recurse', default=False, action='store_true', help='recurse into subdirectories') parser.add_argument('-zip', default=False, action='store_true', help='recurse into zip and tar files') parser.add_argument('-nocontainer', default=False, action='store_true', help='disable deep scan of container documents, increases speed but may reduce accuracy with big files') parser.add_argument('-pronom_only', default=False, action='store_true', help='disables loading of format extensions file, only PRONOM signatures are loaded, may reduce accuracy of results') group = parser.add_mutually_exclusive_group() group.add_argument('-input', default=False, help='file containing a list of files to check, one per line. - means stdin') group.add_argument('files', nargs='*', default=[], metavar='FILE', help='files to check. If the file is -, then read content from stdin. In this case, python must be invoked with -u or it may convert the line terminators.') parser.add_argument('-filename', default=None, help='filename if file contents passed through STDIN') parser.add_argument('-useformats', metavar='INCLUDEPUIDS', default=None, help='comma separated string of formats to use in identification') parser.add_argument('-nouseformats', metavar='EXCLUDEPUIDS', default=None, help='comma separated string of formats not to use in identification') parser.add_argument('-matchprintf', metavar='FORMATSTRING', default=None, help='format string (Python style) to use on match. See nomatchprintf, README.txt.') parser.add_argument('-nomatchprintf', metavar='FORMATSTRING', default=None, help='format string (Python style) to use if no match. See README.txt') parser.add_argument('-bufsize', type=int, default=None, help='size (in bytes) of the buffer to match against (default='+str(defaults['bufsize'])+' bytes)') parser.add_argument('-container_bufsize', type=int, default=None, help='size (in bytes) of the buffer to match against (default='+str(defaults['container_bufsize'])+' bytes)') parser.add_argument('-loadformats', default=None, metavar='XML1,...,XMLn', help='comma separated string of XML format files to add.') parser.add_argument('-confdir', default=None, help='configuration directory to load_fido_xml, for example, the format specifications from.') # what is this doing here only once? #mydir = os.path.abspath(os.path.dirname(__file__)) # PROCESS ARGUMENTS args = parser.parse_args(arglist) # print args # sys.exit() # process confdir # load versions.xml # and stick it in defaults if args.confdir: versionsFile = os.path.join(os.path.abspath(args.confdir), defaults['versions_file']) else: versionsFile = os.path.join(os.path.abspath(defaults['conf_dir']), defaults['versions_file']) try: versions = VET.parse(versionsFile) except Exception, e: sys.stderr.write("An error occured loading versions.xml:\n{0}".format(e)) sys.exit() defaults['xml_pronomSignature'] = versions.find("pronomSignature").text # defaults['xml_pronomContainerSignature'] = versions.find("pronomContainerSignature").text defaults['containersignature_file'] = versions.find("pronomContainerSignature").text defaults['xml_fidoExtensionSignature'] = versions.find("fidoExtensionSignature").text defaults['format_files'] = [] defaults['format_files'].append(defaults['xml_pronomSignature']) if args.pronom_only: versionHeader = "FIDO v{0} ({1}, {2})\n".format(version,defaults['xml_pronomSignature'],defaults['containersignature_file']) else: versionHeader = "FIDO v{0} ({1}, {2}, {3})\n".format(version,defaults['xml_pronomSignature'],defaults['containersignature_file'],defaults['xml_fidoExtensionSignature']) defaults['format_files'].append(defaults['xml_fidoExtensionSignature']) if args.v : sys.stdout.write(versionHeader) sys.exit(0) if args.matchprintf != None: args.matchprintf = args.matchprintf.decode('string_escape') if args.nomatchprintf != None: args.nomatchprintf = args.nomatchprintf.decode('string_escape') fido = Fido(quiet=args.q, bufsize=args.bufsize, container_bufsize=args.container_bufsize, printmatch=args.matchprintf, printnomatch=args.nomatchprintf, zip=args.zip, nocontainer = args.nocontainer, conf_dir=args.confdir) #TODO: Allow conf options to be dis-included if args.loadformats: for file in args.loadformats.split(','): fido.load_fido_xml(file) #TODO: remove from maps if args.useformats: args.useformats = args.useformats.split(',') fido.formats = [f for f in fido.formats if f.find('puid').text in args.useformats] elif args.nouseformats: args.nouseformats = args.nouseformats.split(',') fido.formats = [f for f in fido.formats if f.find('puid').text not in args.nouseformats] # Set up to use stdin, or open input files: if args.input == '-': args.files = sys.stdin elif args.input: args.files = open(args.input, 'r') # RUN try: if not args.q: sys.stderr.write(versionHeader) sys.stderr.flush() if (not args.input) and len(args.files) == 1 and args.files[0] == '-': if fido.zip == True: raise RuntimeError("Multiple content read from stdin not yet supported.") sys.exit(1) fido.identify_multi_object_stream(sys.stdin) else: fido.identify_stream(sys.stdin, args.filename) else: for file in list_files(args.files, args.recurse): fido.identify_file(file) except __HOLE__: msg = "FIDO: Interrupt while identifying file {0}" sys.stderr.write(msg.format(fido.current_file)) sys.exit(1) if not args.q: sys.stdout.flush() fido.print_summary(time.clock() - t0) sys.stderr.flush()
KeyboardInterrupt
dataset/ETHPy150Open openpreserve/fido/fido/fido.py/main
3,604
def prepare_impl(t, cwd, ver, wafdir): Options.tooldir = [t] Options.launch_dir = cwd # some command-line options can be processed immediately if '--version' in sys.argv: opt_obj = Options.Handler() opt_obj.curdir = cwd opt_obj.parse_args() sys.exit(0) # now find the wscript file msg1 = 'Waf: Please run waf from a directory containing a file named "%s" or run distclean' % WSCRIPT_FILE # in theory projects can be configured in an autotool-like manner: # mkdir build && cd build && ../waf configure && ../waf build_dir_override = None candidate = None lst = os.listdir(cwd) search_for_candidate = True if WSCRIPT_FILE in lst: candidate = cwd elif 'configure' in sys.argv and not WSCRIPT_BUILD_FILE in lst: # autotool-like configuration calldir = os.path.abspath(os.path.dirname(sys.argv[0])) if WSCRIPT_FILE in os.listdir(calldir): candidate = calldir search_for_candidate = False else: error('arg[0] directory does not contain a wscript file') sys.exit(1) build_dir_override = cwd # climb up to find a script if it is not found while search_for_candidate: if len(cwd) <= 3: break # stop at / or c: dirlst = os.listdir(cwd) if WSCRIPT_FILE in dirlst: candidate = cwd if 'configure' in sys.argv and candidate: break if Options.lockfile in dirlst: env = Environment.Environment() try: env.load(os.path.join(cwd, Options.lockfile)) except: error('could not load %r' % Options.lockfile) try: os.stat(env['cwd']) except: candidate = cwd else: candidate = env['cwd'] break cwd = os.path.dirname(cwd) # climb up if not candidate: # check if the user only wanted to display the help if '-h' in sys.argv or '--help' in sys.argv: warn('No wscript file found: the help message may be incomplete') opt_obj = Options.Handler() opt_obj.curdir = cwd opt_obj.parse_args() else: error(msg1) sys.exit(0) # We have found wscript, but there is no guarantee that it is valid try: os.chdir(candidate) except OSError: raise Utils.WafError("the folder %r is unreadable" % candidate) # define the main module containing the functions init, shutdown, .. Utils.set_main_module(os.path.join(candidate, WSCRIPT_FILE)) if build_dir_override: d = getattr(Utils.g_module, BLDDIR, None) if d: # test if user has set the blddir in wscript. msg = ' Overriding build directory %s with %s' % (d, build_dir_override) warn(msg) Utils.g_module.blddir = build_dir_override # bind a few methods and classes by default def set_def(obj, name=''): n = name or obj.__name__ if not n in Utils.g_module.__dict__: setattr(Utils.g_module, n, obj) for k in [dist, distclean, distcheck, clean, install, uninstall]: set_def(k) set_def(Configure.ConfigurationContext, 'configure_context') for k in ['build', 'clean', 'install', 'uninstall']: set_def(Build.BuildContext, k + '_context') # now parse the options from the user wscript file opt_obj = Options.Handler(Utils.g_module) opt_obj.curdir = candidate try: f = Utils.g_module.set_options except __HOLE__: pass else: opt_obj.sub_options(['']) opt_obj.parse_args() if not 'init' in Utils.g_module.__dict__: Utils.g_module.init = Utils.nada if not 'shutdown' in Utils.g_module.__dict__: Utils.g_module.shutdown = Utils.nada main()
AttributeError
dataset/ETHPy150Open appcelerator-archive/poc-nodejs-desktop/Resources/nodejs/builds/linux/node/lib/node/wafadmin/Scripting.py/prepare_impl
3,605
def prepare(t, cwd, ver, wafdir): if WAFVERSION != ver: msg = 'Version mismatch: waf %s <> wafadmin %s (wafdir %s)' % (ver, WAFVERSION, wafdir) print('\033[91mError: %s\033[0m' % msg) sys.exit(1) #""" try: prepare_impl(t, cwd, ver, wafdir) except Utils.WafError, e: error(str(e)) sys.exit(1) except __HOLE__: Utils.pprint('RED', 'Interrupted') sys.exit(68) """ import cProfile, pstats cProfile.runctx("import Scripting; Scripting.prepare_impl(t, cwd, ver, wafdir)", {}, {'t': t, 'cwd':cwd, 'ver':ver, 'wafdir':wafdir}, 'profi.txt') p = pstats.Stats('profi.txt') p.sort_stats('time').print_stats(45) #"""
KeyboardInterrupt
dataset/ETHPy150Open appcelerator-archive/poc-nodejs-desktop/Resources/nodejs/builds/linux/node/lib/node/wafadmin/Scripting.py/prepare
3,606
def main(): global commands commands = Options.arg_line[:] while commands: x = commands.pop(0) ini = datetime.datetime.now() if x == 'configure': fun = configure elif x == 'build': fun = build else: fun = getattr(Utils.g_module, x, None) if not fun: raise Utils.WscriptError('No such command %r' % x) ctx = getattr(Utils.g_module, x + '_context', Utils.Context)() if x in ['init', 'shutdown', 'dist', 'distclean', 'distcheck']: # compatibility TODO remove in waf 1.6 try: fun(ctx) except __HOLE__: fun() else: fun(ctx) ela = '' if not Options.options.progress_bar: ela = ' (%s)' % Utils.get_elapsed_time(ini) if x != 'init' and x != 'shutdown': info('%r finished successfully%s' % (x, ela)) if not commands and x != 'shutdown': commands.append('shutdown')
TypeError
dataset/ETHPy150Open appcelerator-archive/poc-nodejs-desktop/Resources/nodejs/builds/linux/node/lib/node/wafadmin/Scripting.py/main
3,607
def configure(conf): src = getattr(Options.options, SRCDIR, None) if not src: src = getattr(Utils.g_module, SRCDIR, None) if not src: src = getattr(Utils.g_module, 'top', None) if not src: src = '.' incomplete_src = 1 src = os.path.abspath(src) bld = getattr(Options.options, BLDDIR, None) if not bld: bld = getattr(Utils.g_module, BLDDIR, None) if not bld: bld = getattr(Utils.g_module, 'out', None) if not bld: bld = 'build' incomplete_bld = 1 if bld == '.': raise Utils.WafError('Setting blddir="." may cause distclean problems') bld = os.path.abspath(bld) try: os.makedirs(bld) except __HOLE__: pass # It is not possible to compile specific targets in the configuration # this may cause configuration errors if autoconfig is set targets = Options.options.compile_targets Options.options.compile_targets = None Options.is_install = False conf.srcdir = src conf.blddir = bld conf.post_init() if 'incomplete_src' in vars(): conf.check_message_1('Setting srcdir to') conf.check_message_2(src) if 'incomplete_bld' in vars(): conf.check_message_1('Setting blddir to') conf.check_message_2(bld) # calling to main wscript's configure() conf.sub_config(['']) conf.store() # this will write a configure lock so that subsequent builds will # consider the current path as the root directory (see prepare_impl). # to remove: use 'waf distclean' env = Environment.Environment() env[BLDDIR] = bld env[SRCDIR] = src env['argv'] = sys.argv env['commands'] = Options.commands env['options'] = Options.options.__dict__ # conf.hash & conf.files hold wscript files paths and hash # (used only by Configure.autoconfig) env['hash'] = conf.hash env['files'] = conf.files env['environ'] = dict(conf.environ) env['cwd'] = os.path.split(Utils.g_module.root_path)[0] if Utils.g_module.root_path != src: # in case the source dir is somewhere else env.store(os.path.join(src, Options.lockfile)) env.store(Options.lockfile) Options.options.compile_targets = targets
OSError
dataset/ETHPy150Open appcelerator-archive/poc-nodejs-desktop/Resources/nodejs/builds/linux/node/lib/node/wafadmin/Scripting.py/configure
3,608
def clean(bld): '''removes the build files''' try: proj = Environment.Environment(Options.lockfile) except __HOLE__: raise Utils.WafError('Nothing to clean (project not configured)') bld.load_dirs(proj[SRCDIR], proj[BLDDIR]) bld.load_envs() bld.is_install = 0 # False # read the scripts - and set the path to the wscript path (useful for srcdir='/foo/bar') bld.add_subdirs([os.path.split(Utils.g_module.root_path)[0]]) try: bld.clean() finally: bld.save()
IOError
dataset/ETHPy150Open appcelerator-archive/poc-nodejs-desktop/Resources/nodejs/builds/linux/node/lib/node/wafadmin/Scripting.py/clean
3,609
def check_configured(bld): if not Configure.autoconfig: return bld conf_cls = getattr(Utils.g_module, 'configure_context', Utils.Context) bld_cls = getattr(Utils.g_module, 'build_context', Utils.Context) def reconf(proj): back = (Options.commands, Options.options.__dict__, Logs.zones, Logs.verbose) Options.commands = proj['commands'] Options.options.__dict__ = proj['options'] conf = conf_cls() conf.environ = proj['environ'] configure(conf) (Options.commands, Options.options.__dict__, Logs.zones, Logs.verbose) = back try: proj = Environment.Environment(Options.lockfile) except IOError: conf = conf_cls() configure(conf) else: try: bld = bld_cls() bld.load_dirs(proj[SRCDIR], proj[BLDDIR]) bld.load_envs() except Utils.WafError: reconf(proj) return bld_cls() try: proj = Environment.Environment(Options.lockfile) except __HOLE__: raise Utils.WafError('Auto-config: project does not configure (bug)') h = 0 try: for file in proj['files']: if file.endswith('configure'): h = hash((h, Utils.readf(file))) else: mod = Utils.load_module(file) h = hash((h, mod.waf_hash_val)) except (OSError, IOError): warn('Reconfiguring the project: a file is unavailable') reconf(proj) else: if (h != proj['hash']): warn('Reconfiguring the project: the configuration has changed') reconf(proj) return bld_cls()
IOError
dataset/ETHPy150Open appcelerator-archive/poc-nodejs-desktop/Resources/nodejs/builds/linux/node/lib/node/wafadmin/Scripting.py/check_configured
3,610
def build_impl(bld): # compile the project and/or install the files try: proj = Environment.Environment(Options.lockfile) except __HOLE__: raise Utils.WafError("Project not configured (run 'waf configure' first)") bld.load_dirs(proj[SRCDIR], proj[BLDDIR]) bld.load_envs() info("Waf: Entering directory `%s'" % bld.bldnode.abspath()) bld.add_subdirs([os.path.split(Utils.g_module.root_path)[0]]) # execute something immediately before the build starts bld.pre_build() try: bld.compile() finally: if Options.options.progress_bar: print('') info("Waf: Leaving directory `%s'" % bld.bldnode.abspath()) # execute something immediately after a successful build bld.post_build() bld.install()
IOError
dataset/ETHPy150Open appcelerator-archive/poc-nodejs-desktop/Resources/nodejs/builds/linux/node/lib/node/wafadmin/Scripting.py/build_impl
3,611
def distclean(ctx=None): '''removes the build directory''' global commands lst = os.listdir('.') for f in lst: if f == Options.lockfile: try: proj = Environment.Environment(f) except: Logs.warn('could not read %r' % f) continue try: shutil.rmtree(proj[BLDDIR]) except IOError: pass except OSError, e: if e.errno != errno.ENOENT: Logs.warn('project %r cannot be removed' % proj[BLDDIR]) try: os.remove(f) except __HOLE__, e: if e.errno != errno.ENOENT: Logs.warn('file %r cannot be removed' % f) # remove the local waf cache if not commands and f.startswith('.waf'): shutil.rmtree(f, ignore_errors=True) # FIXME waf 1.6 a unique ctx parameter, and remove the optional appname and version
OSError
dataset/ETHPy150Open appcelerator-archive/poc-nodejs-desktop/Resources/nodejs/builds/linux/node/lib/node/wafadmin/Scripting.py/distclean
3,612
def dist(appname='', version=''): '''makes a tarball for redistributing the sources''' # return return (distdirname, tarballname) import tarfile if not appname: appname = Utils.g_module.APPNAME if not version: version = Utils.g_module.VERSION tmp_folder = appname + '-' + version if g_gz in ['gz', 'bz2']: arch_name = tmp_folder + '.tar.' + g_gz else: arch_name = tmp_folder + '.' + 'zip' # remove the previous dir try: shutil.rmtree(tmp_folder) except (OSError, __HOLE__): pass # remove the previous archive try: os.remove(arch_name) except (OSError, IOError): pass # copy the files into the temporary folder blddir = getattr(Utils.g_module, BLDDIR, None) if not blddir: blddir = getattr(Utils.g_module, 'out', None) copytree('.', tmp_folder, blddir) # undocumented hook for additional cleanup dist_hook = getattr(Utils.g_module, 'dist_hook', None) if dist_hook: back = os.getcwd() os.chdir(tmp_folder) try: dist_hook() finally: # go back to the root directory os.chdir(back) if g_gz in ['gz', 'bz2']: tar = tarfile.open(arch_name, 'w:' + g_gz) tar.add(tmp_folder) tar.close() else: Utils.zip_folder(tmp_folder, arch_name, tmp_folder) try: from hashlib import sha1 as sha except ImportError: from sha import sha try: digest = " (sha=%r)" % sha(Utils.readf(arch_name)).hexdigest() except: digest = '' info('New archive created: %s%s' % (arch_name, digest)) if os.path.exists(tmp_folder): shutil.rmtree(tmp_folder) return arch_name # FIXME waf 1.6 a unique ctx parameter, and remove the optional appname and version
IOError
dataset/ETHPy150Open appcelerator-archive/poc-nodejs-desktop/Resources/nodejs/builds/linux/node/lib/node/wafadmin/Scripting.py/dist
3,613
def match_preview(self, index, *args): if not self.p.cwdidx: return if not self.p.fileops.recursive and index.parent() != self.p.cwdidx: return target = helpers.splitpath_os(self.p.get_path(index)) if self.p.cwd in target[0] and target in self.p.targets: idx = self.p.targets.index(target) try: # If preview differs from its original name, show the preview. if target[1] + target[2] != self.p.previews[idx][1]: for i in ["utf-8", "latin1"]: try: return self.p.previews[idx][1].decode(i) except __HOLE__: pass return self.p.previews[idx][1] # Otherwise show "\1" to indicate that nothing changed. else: return "\\1" except IndexError: return "err"
UnicodeDecodeError
dataset/ETHPy150Open mikar/demimove/demimove/gui.py/DirModel.match_preview
3,614
def create_historytab(self): historyfile = os.path.join(self.fileops.configdir, "history.txt") try: with codecs.open(historyfile, encoding="utf-8") as f: data = f.read() except __HOLE__: historyfile = os.path.join(self.basedir, "data/history.txt") with codecs.open(historyfile, encoding="utf-8") as f: data = f.read() self.historymodel = history.HistoryTreeModel(data, self) self.historytree.setModel(self.historymodel)
IOError
dataset/ETHPy150Open mikar/demimove/demimove/gui.py/DemiMoveGUI.create_historytab
3,615
def main(): "Main entry point for demimove-ui." startdir = os.getcwd() configfile = None try: args = docopt(__doc__, version="0.2") # args["-v"] = 3 # Force debug logging fileop = fileops.FileOps(verbosity=args["-v"], quiet=args["--quiet"]) if args["<path>"]: startdir = args["<path>"] if args["--config"]: configfile = args["--config"] except __HOLE__: fileop = fileops.FileOps() log.error("Please install docopt to use the CLI.") app = QtGui.QApplication(sys.argv) app.setApplicationName("demimove-ui") gui = DemiMoveGUI(startdir, fileop, configfile) gui.show() sys.exit(app.exec_())
NameError
dataset/ETHPy150Open mikar/demimove/demimove/gui.py/main
3,616
def upgrade_connection(self): """ Validate and 'upgrade' the HTTP request to a WebSocket request. If an upgrade succeeded then then handler will have `start_response` with a status of `101`, the environ will also be updated with `wsgi.websocket` and `wsgi.websocket_version` keys. :param environ: The WSGI environ dict. :param start_response: The callable used to start the response. :param stream: File like object that will be read from/written to by the underlying WebSocket object, if created. :return: The WSGI response iterator is something went awry. """ self.logger.debug("Attempting to upgrade connection") version = self.environ.get("HTTP_SEC_WEBSOCKET_VERSION") if version not in self.SUPPORTED_VERSIONS: msg = "Unsupported WebSocket Version: {0}".format(version) self.logger.warning(msg) self.start_response('400 Bad Request', [ ('Sec-WebSocket-Version', ', '.join(self.SUPPORTED_VERSIONS)) ]) return [msg] key = self.environ.get("HTTP_SEC_WEBSOCKET_KEY", '').strip() if not key: # 5.2.1 (3) msg = "Sec-WebSocket-Key header is missing/empty" self.logger.warning(msg) self.start_response('400 Bad Request', []) return [msg] try: key_len = len(base64.b64decode(key)) except __HOLE__: msg = "Invalid key: {0}".format(key) self.logger.warning(msg) self.start_response('400 Bad Request', []) return [msg] if key_len != 16: # 5.2.1 (3) msg = "Invalid key: {0}".format(key) self.logger.warning(msg) self.start_response('400 Bad Request', []) return [msg] # Check for WebSocket Protocols requested_protocols = self.environ.get( 'HTTP_SEC_WEBSOCKET_PROTOCOL', '') protocol = None if hasattr(self.application, 'app_protocol'): allowed_protocol = self.application.app_protocol( self.environ['PATH_INFO']) if allowed_protocol and allowed_protocol in requested_protocols: protocol = allowed_protocol self.logger.debug("Protocol allowed: {0}".format(protocol)) self.websocket = WebSocket(self.environ, Stream(self), self) self.environ.update({ 'wsgi.websocket_version': version, 'wsgi.websocket': self.websocket }) headers = [ ("Upgrade", "websocket"), ("Connection", "Upgrade"), ("Sec-WebSocket-Accept", base64.b64encode( hashlib.sha1(key + self.GUID).digest())), ] if protocol: headers.append(("Sec-WebSocket-Protocol", protocol)) self.logger.debug("WebSocket request accepted, switching protocols") self.start_response("101 Switching Protocols", headers)
TypeError
dataset/ETHPy150Open jgelens/gevent-websocket/geventwebsocket/handler.py/WebSocketHandler.upgrade_connection
3,617
def write_to_screen(self, cli, screen, mouse_handlers, write_position): """ Write window to screen. This renders the user control, the margins and copies everything over to the absolute position at the given screen. """ # Calculate margin sizes. left_margin_widths = [self._get_margin_width(cli, m) for m in self.left_margins] right_margin_widths = [self._get_margin_width(cli, m) for m in self.right_margins] total_margin_width = sum(left_margin_widths + right_margin_widths) # Render UserControl. ui_content = self.content.create_content( cli, write_position.width - total_margin_width, write_position.height) assert isinstance(ui_content, UIContent) # Scroll content. wrap_lines = self.wrap_lines(cli) scroll_func = self._scroll_when_linewrapping if wrap_lines else self._scroll_without_linewrapping scroll_func( ui_content, write_position.width - total_margin_width, write_position.height, cli) # Write body digraph = cli.vi_state.waiting_for_digraph visible_line_to_row_col, rowcol_to_yx = self._copy_body( ui_content, screen, write_position, sum(left_margin_widths), write_position.width - total_margin_width, self.vertical_scroll, self.horizontal_scroll, has_focus=self.content.has_focus(cli), wrap_lines=wrap_lines, vertical_scroll_2=self.vertical_scroll_2, always_hide_cursor=self.always_hide_cursor(cli), cursor_char='?' if digraph else False) # Remember render info. (Set before generating the margins. They need this.) x_offset=write_position.xpos + sum(left_margin_widths) y_offset=write_position.ypos self.render_info = WindowRenderInfo( ui_content=ui_content, horizontal_scroll=self.horizontal_scroll, vertical_scroll=self.vertical_scroll, window_width=write_position.width - total_margin_width, window_height=write_position.height, configured_scroll_offsets=self.scroll_offsets, visible_line_to_row_col=visible_line_to_row_col, rowcol_to_yx=rowcol_to_yx, x_offset=x_offset, y_offset=y_offset, wrap_lines=wrap_lines) # Set mouse handlers. def mouse_handler(cli, mouse_event): """ Wrapper around the mouse_handler of the `UIControl` that turns screen coordinates into line coordinates. """ # Find row/col position first. yx_to_rowcol = dict((v, k) for k, v in rowcol_to_yx.items()) y = mouse_event.position.y x = mouse_event.position.x # If clicked below the content area, look for a position in the # last line instead. max_y = write_position.ypos + len(visible_line_to_row_col) - 1 y = min(max_y, y) while x >= 0: try: row, col = yx_to_rowcol[y, x] except __HOLE__: # Try again. (When clicking on the right side of double # width characters, or on the right side of the input.) x -= 1 else: # Found position, call handler of UIControl. result = self.content.mouse_handler( cli, MouseEvent(position=Point(x=col, y=row), event_type=mouse_event.event_type)) break else: # nobreak. # (No x/y coordinate found for the content. This happens in # case of a FillControl, that only specifies a background, but # doesn't have a content. Report (0,0) instead.) result = self.content.mouse_handler( cli, MouseEvent(position=Point(x=0, y=0), event_type=mouse_event.event_type)) # If it returns NotImplemented, handle it here. if result == NotImplemented: return self._mouse_handler(cli, mouse_event) return result mouse_handlers.set_mouse_handler_for_range( x_min=write_position.xpos + sum(left_margin_widths), x_max=write_position.xpos + write_position.width - total_margin_width, y_min=write_position.ypos, y_max=write_position.ypos + write_position.height, handler=mouse_handler) # Render and copy margins. move_x = 0 def render_margin(m, width): " Render margin. Return `Screen`. " # Retrieve margin tokens. tokens = m.create_margin(cli, self.render_info, width, write_position.height) # Turn it into a UIContent object. # already rendered those tokens using this size.) return TokenListControl.static(tokens).create_content( cli, width + 1, write_position.height) for m, width in zip(self.left_margins, left_margin_widths): # Create screen for margin. margin_screen = render_margin(m, width) # Copy and shift X. self._copy_margin(margin_screen, screen, write_position, move_x, width) move_x += width move_x = write_position.width - sum(right_margin_widths) for m, width in zip(self.right_margins, right_margin_widths): # Create screen for margin. margin_screen = render_margin(m, width) # Copy and shift X. self._copy_margin(margin_screen, screen, write_position, move_x, width) move_x += width
KeyError
dataset/ETHPy150Open jonathanslenders/python-prompt-toolkit/prompt_toolkit/layout/containers.py/Window.write_to_screen
3,618
@classmethod def _copy_body(cls, ui_content, new_screen, write_position, move_x, width, vertical_scroll=0, horizontal_scroll=0, has_focus=False, wrap_lines=False, vertical_scroll_2=0, always_hide_cursor=False, cursor_char=None): """ Copy the UIContent into the output screen. :param cursor_char: When '?', put a question mark underneath the cursor. Used when going into Vi digraph mode. """ xpos = write_position.xpos + move_x ypos = write_position.ypos line_count = ui_content.line_count new_buffer = new_screen.data_buffer empty_char = _CHAR_CACHE['', Token] # Map visible line number to (row, col) of input. # 'col' will always be zero if line wrapping is off. visible_line_to_row_col = {} rowcol_to_yx = {} # Maps (row, col) from the input to (y, x) screen coordinates. # Fill background with default_char first. default_char = ui_content.default_char if default_char: for y in range(ypos, ypos + write_position.height): new_buffer_row = new_buffer[y] for x in range(xpos, xpos + width): new_buffer_row[x] = default_char # Copy content. def copy(): y = - vertical_scroll_2 lineno = vertical_scroll while y < write_position.height and lineno < line_count: # Take the next line and copy it in the real screen. line = ui_content.get_line(lineno) col = 0 x = -horizontal_scroll visible_line_to_row_col[y] = (lineno, horizontal_scroll) for token, text in line: new_buffer_row = new_buffer[y + ypos] for c in text: char = _CHAR_CACHE[c, token] char_width = char.width # Wrap when the line width is exceeded. if wrap_lines and x + char_width > width: visible_line_to_row_col[y + 1] = ( lineno, visible_line_to_row_col[y][1] + x) y += 1 x = -horizontal_scroll # This would be equal to zero. # (horizontal_scroll=0 when wrap_lines.) new_buffer_row = new_buffer[y + ypos] if y >= write_position.height: return y # Break out of all for loops. # Set character in screen and shift 'x'. if x >= 0 and y >= 0 and x < write_position.width: new_buffer_row[x + xpos] = char # When we print a multi width character, make sure # to erase the neighbous positions in the screen. # (The empty string if different from everything, # so next redraw this cell will repaint anyway.) if char_width > 1: for i in range(1, char_width): new_buffer_row[x + xpos + i] = empty_char # If this is a zero width characters, then it's # probably part of a decomposed unicode character. # See: https://en.wikipedia.org/wiki/Unicode_equivalence # Merge it in the previous cell. elif char_width == 0 and x - 1 >= 0: prev_char = new_buffer_row[x + xpos - 1] char2 = _CHAR_CACHE[prev_char.char + c, prev_char.token] new_buffer_row[x + xpos - 1] = char2 # Keep track of write position for each character. rowcol_to_yx[lineno, col] = (y + ypos, x + xpos) col += 1 x += char_width lineno += 1 y += 1 return y y = copy() def cursor_pos_to_screen_pos(row, col): " Translate row/col from UIContent to real Screen coordinates. " try: y, x = rowcol_to_yx[row, col] except __HOLE__: # Normally this should never happen. (It is a bug, if it happens.) # But to be sure, return (0, 0) return Point(y=0, x=0) # raise ValueError( # 'Invalid position. row=%r col=%r, vertical_scroll=%r, ' # 'horizontal_scroll=%r, height=%r' % # (row, col, vertical_scroll, horizontal_scroll, write_position.height)) else: return Point(y=y, x=x) # Set cursor and menu positions. if has_focus and ui_content.cursor_position: new_screen.cursor_position = cursor_pos_to_screen_pos( ui_content.cursor_position.y, ui_content.cursor_position.x) if always_hide_cursor: new_screen.show_cursor = False else: new_screen.show_cursor = ui_content.show_cursor if cursor_char: cpos = new_screen.cursor_position new_screen.data_buffer[cpos.y][cpos.x] = \ _CHAR_CACHE[cursor_char, new_screen.data_buffer[cpos.y][cpos.x].token] if not new_screen.menu_position and ui_content.menu_position: new_screen.menu_position = cursor_pos_to_screen_pos( ui_content.menu_position.y, ui_content.menu_position.x) # Update output screne height. new_screen.height = max(new_screen.height, ypos + write_position.height) return visible_line_to_row_col, rowcol_to_yx
KeyError
dataset/ETHPy150Open jonathanslenders/python-prompt-toolkit/prompt_toolkit/layout/containers.py/Window._copy_body
3,619
def test_adds_error(self): try: raise ValueError() except __HOLE__: exc = sys.exc_info() plugin = self._make_one() plugin.addError(case.Test(FakeTestCase()), exc) line = plugin.tracker._test_cases['FakeTestCase'][0] self.assertFalse(line.ok)
ValueError
dataset/ETHPy150Open mblayman/tappy/tap/tests/test_nose_plugin.py/TestNosePlugin.test_adds_error
3,620
def test_adds_skip(self): # Since Python versions earlier than 2.7 don't support skipping tests, # this test has to hack around that limitation. try: plugin = self._make_one() plugin.addError(case.Test( FakeTestCase()), (unittest.SkipTest, 'a reason', None)) line = plugin.tracker._test_cases['FakeTestCase'][0] self.assertEqual(line.directive.text, 'SKIP a reason') except __HOLE__: self.assertTrue( True, 'Pass because this Python does not support SkipTest.')
AttributeError
dataset/ETHPy150Open mblayman/tappy/tap/tests/test_nose_plugin.py/TestNosePlugin.test_adds_skip
3,621
def test_adds_failure(self): try: raise ValueError() except __HOLE__: exc = sys.exc_info() plugin = self._make_one() plugin.addFailure(case.Test(FakeTestCase()), exc) line = plugin.tracker._test_cases['FakeTestCase'][0] self.assertFalse(line.ok)
ValueError
dataset/ETHPy150Open mblayman/tappy/tap/tests/test_nose_plugin.py/TestNosePlugin.test_adds_failure
3,622
def get_env_variable(var_name): """ Get the environment variable or return exception """ try: return os.environ[var_name] except __HOLE__: error_msg = "Set the %s environment variable" % var_name raise ImproperlyConfigured(error_msg)
KeyError
dataset/ETHPy150Open erikr/happinesspackets/happinesspackets/settings/base.py/get_env_variable
3,623
def runTests(self): """Runs all tests. """ _SetupLogging("LOGTOSTDERR" in os.environ) sys.stderr.write("Running %s\n" % self.progName) sys.stderr.flush() # Ensure assertions will be evaluated if not __debug__: raise Exception("Not running in debug mode, assertions would not be" " evaluated") # Check again, this time with a real assertion try: assert False except __HOLE__: pass else: raise Exception("Assertion not evaluated") return unittest.TestProgram.runTests(self) # pylint: disable=R0904
AssertionError
dataset/ETHPy150Open ganeti/ganeti/test/py/testutils/__init__.py/GanetiTestProgram.runTests
3,624
def patch_object(*args, **kwargs): """Unified patch_object for various versions of Python Mock. Different Python Mock versions provide incompatible versions of patching an object. More recent versions use _patch_object, older ones used patch_object. This function unifies the different variations. """ import mock try: # pylint: disable=W0212 return mock._patch_object(*args, **kwargs) except __HOLE__: # pylint: disable=E1101 return mock.patch_object(*args, **kwargs)
AttributeError
dataset/ETHPy150Open ganeti/ganeti/test/py/testutils/__init__.py/patch_object
3,625
def pop(self, key, default=None): try: v = self[key] except __HOLE__: if default is None: raise else: return default else: self.expire_cookie(key) return v
KeyError
dataset/ETHPy150Open aht/suas/suas/session.py/CookieSession.pop
3,626
@classmethod def load(klass, request, response): """ Load the session cookies from the request, returning a new instance with the response. """ try: id = request.cookies['SID'][1:-SIG_LEN-1] except KeyError: raise NoSIDError else: c = SignedCookie(SECRET_KEY + id) c.load(request.environ['HTTP_COOKIE']) try: user = User.get_by_key_name(c['user'].value) except __HOLE__: user = None session = klass(user, response) session.cookies = c return session
KeyError
dataset/ETHPy150Open aht/suas/suas/session.py/CookieSession.load
3,627
def initialize(self, request, response): super(RequestHandler, self).initialize(request, response) try: self.session = CookieSession.load(request, response) except NoSIDError: self.session = CookieSession(None, self.response) except BadSignatureError: self.session = CookieSession(None, self.response) else: self.session.flash_msg = self.session.pop('flash_msg', '') now = timegm( gmtime() ) try: atime = int( self.session['atime'] ) except __HOLE__: self.session.end() return if now - atime > SESSION_TTL: self.session.end() return if now - atime > SID_TTL: self.session.regen()
ValueError
dataset/ETHPy150Open aht/suas/suas/session.py/RequestHandler.initialize
3,628
def __call__(self, f, prewrapper_func=None): """ This method is called on execution of the decorator to get the wrapper defined inside. The C{prewrapper_func} keyword argument is available for subclasses. If defined, the output of the prewrapper function is returned instead of the wrapper function. The prewrapper function should have one argument: C{wrapper} which is the original function that would have been returned. """ def require_obj_permissions_wrapper(obj, *args, **kw): """ Wrapper for the called method that checks the permissions before calling the method. """ # check for the permission try: if self.pop_user_kw: user = kw.pop(self.user_kw) else: user = kw[self.user_kw] except __HOLE__: raise PermissionSignatureError(self.user_kw) missing, temp = ExpedientPermission.objects.get_missing_for_target( user, self.perm_names, obj) if missing: raise PermissionDenied(missing.name, obj, user) # All is good. Call the function return f(obj, *args, **kw) # call the prewrapper if it is defined if prewrapper_func: return prewrapper_func(require_obj_permissions_wrapper) else: return require_obj_permissions_wrapper
KeyError
dataset/ETHPy150Open fp7-ofelia/ocf/optin_manager/src/python/openflow/common/permissions/decorators.py/require_obj_permissions.__call__
3,629
def get_message(self, correlation_id): try: while correlation_id not in self.replies: self.consumer.channel.connection.client.drain_events( timeout=self.timeout ) body, message = self.replies.pop(correlation_id) self.provider.handle_message(body, message) except socket.timeout: timeout_error = RpcTimeout(self.timeout) event = self.provider._reply_events.pop(correlation_id) event.send_exception(timeout_error) # timeout is implemented using socket timeout, so when it # fires the connection is closed, causing the reply queue # to be deleted self._setup_consumer() except ConnectionError as exc: for event in self.provider._reply_events.values(): rpc_connection_error = RpcConnectionError( 'Disconnected while waiting for reply: %s', exc) event.send_exception(rpc_connection_error) self.provider._reply_events.clear() # In case this was a temporary error, attempt to reconnect. If # we fail, the connection error will bubble. self._setup_consumer() except __HOLE__ as exc: event = self.provider._reply_events.pop(correlation_id) event.send_exception(exc) # exception may have killed the connection self._setup_consumer()
KeyboardInterrupt
dataset/ETHPy150Open onefinestay/nameko/nameko/standalone/rpc.py/PollingQueueConsumer.get_message
3,630
def do_urlencode(value): """Escape strings for use in URLs (uses UTF-8 encoding). It accepts both dictionaries and regular strings as well as pairwise iterables. .. versionadded:: 2.7 """ itemiter = None if isinstance(value, dict): itemiter = iteritems(value) elif not isinstance(value, string_types): try: itemiter = iter(value) except __HOLE__: pass if itemiter is None: return unicode_urlencode(value) return u'&'.join(unicode_urlencode(k) + '=' + unicode_urlencode(v, for_qs=True) for k, v in itemiter)
TypeError
dataset/ETHPy150Open GoogleCloudPlatform/appengine-flask-skeleton/lib/jinja2/filters.py/do_urlencode
3,631
@environmentfilter def do_first(environment, seq): """Return the first item of a sequence.""" try: return next(iter(seq)) except __HOLE__: return environment.undefined('No first item, sequence was empty.')
StopIteration
dataset/ETHPy150Open GoogleCloudPlatform/appengine-flask-skeleton/lib/jinja2/filters.py/do_first
3,632
@environmentfilter def do_last(environment, seq): """Return the last item of a sequence.""" try: return next(iter(reversed(seq))) except __HOLE__: return environment.undefined('No last item, sequence was empty.')
StopIteration
dataset/ETHPy150Open GoogleCloudPlatform/appengine-flask-skeleton/lib/jinja2/filters.py/do_last
3,633
@environmentfilter def do_random(environment, seq): """Return a random item from the sequence.""" try: return choice(seq) except __HOLE__: return environment.undefined('No random item, sequence was empty.')
IndexError
dataset/ETHPy150Open GoogleCloudPlatform/appengine-flask-skeleton/lib/jinja2/filters.py/do_random
3,634
def do_int(value, default=0, base=10): """Convert the value into an integer. If the conversion doesn't work it will return ``0``. You can override this default using the first parameter. You can also override the default base (10) in the second parameter, which handles input with prefixes such as 0b, 0o and 0x for bases 2, 8 and 16 respectively. """ try: return int(value, base) except (TypeError, ValueError): # this quirk is necessary so that "42.23"|int gives 42. try: return int(float(value)) except (__HOLE__, ValueError): return default
TypeError
dataset/ETHPy150Open GoogleCloudPlatform/appengine-flask-skeleton/lib/jinja2/filters.py/do_int
3,635
def do_float(value, default=0.0): """Convert the value into a floating point number. If the conversion doesn't work it will return ``0.0``. You can override this default using the first parameter. """ try: return float(value) except (TypeError, __HOLE__): return default
ValueError
dataset/ETHPy150Open GoogleCloudPlatform/appengine-flask-skeleton/lib/jinja2/filters.py/do_float
3,636
def do_reverse(value): """Reverse the object or return an iterator that iterates over it the other way round. """ if isinstance(value, string_types): return value[::-1] try: return reversed(value) except TypeError: try: rv = list(value) rv.reverse() return rv except __HOLE__: raise FilterArgumentError('argument must be iterable')
TypeError
dataset/ETHPy150Open GoogleCloudPlatform/appengine-flask-skeleton/lib/jinja2/filters.py/do_reverse
3,637
@environmentfilter def do_attr(environment, obj, name): """Get an attribute of an object. ``foo|attr("bar")`` works like ``foo.bar`` just that always an attribute is returned and items are not looked up. See :ref:`Notes on subscriptions <notes-on-subscriptions>` for more details. """ try: name = str(name) except UnicodeError: pass else: try: value = getattr(obj, name) except __HOLE__: pass else: if environment.sandboxed and not \ environment.is_safe_attribute(obj, name, value): return environment.unsafe_undefined(obj, name) return value return environment.undefined(obj=obj, name=name)
AttributeError
dataset/ETHPy150Open GoogleCloudPlatform/appengine-flask-skeleton/lib/jinja2/filters.py/do_attr
3,638
def plotmca(self, mca, title=None, set_title=True, as_mca2=False, fullrange=False, init=False, **kws): if as_mca2: self.mca2 = mca kws['new'] = False else: self.mca = mca self.panel.conf.show_grid = False if init: self.xview_range = (min(self.mca.energy), max(self.mca.energy)) else: self.xview_range = self.panel.axes.get_axes().get_xlim() atitles = [] if self.mca is not None: if hasattr(self.mca, 'title'): atitles.append(self.mca.title) if hasattr(self.mca, 'filename'): atitles.append(" File={:s}".format(self.mca.filename)) if hasattr(self.mca, 'npixels'): atitles.append(" {:.0f} Pixels".format(self.mca.npixels)) if hasattr(self.mca, 'real_time'): try: rtime_str = " RealTime={:.2f} sec".format(self.mca.real_time) except ValueError: rtime_str = " RealTime= %s sec".format(str(self.mca.real_time)) atitles.append(rtime_str) try: self.plot(self.mca.energy, self.mca.counts, mca=self.mca, **kws) except __HOLE__: pass if as_mca2: if hasattr(self.mca2, 'title'): atitles.append(" BG={:s}".format(self.mca2.title)) elif hasattr(self.mca2, 'filename'): atitles.append(" BG_File={:s}".format(self.mca2.filename)) if hasattr(self.mca, 'real_time'): atitles.append(" BG_RealTime={:.2f} sec".format(self.mca2.real_time)) self.oplot(self.mca2.energy, self.mca2.counts, mca=self.mca2, **kws) if title is None: title =' '.join(atitles) if set_title: self.SetTitle(title)
ValueError
dataset/ETHPy150Open xraypy/xraylarch/plugins/wx/xrfdisplay.py/XRFDisplayFrame.plotmca
3,639
def latest_version(*names, **kwargs): ''' Return the latest version of the named package available for upgrade or installation. If more than one package name is specified, a dict of name/version pairs is returned. If the latest version of a given package is already installed, an empty string will be returned for that package. CLI Example: .. code-block:: bash salt '*' pkg.latest_version <package name> salt '*' pkg.latest_version <package1> <package2> <package3> ... ''' refresh = salt.utils.is_true(kwargs.pop('refresh', False)) if len(names) == 0: return '' # Refresh before looking for the latest version available if refresh: refresh_db() ret = {} # Initialize the dict with empty strings for name in names: ret[name] = '' cmd = ['pacman', '-Sp', '--needed', '--print-format', '%n %v'] cmd.extend(names) if 'root' in kwargs: cmd.extend(('-r', kwargs['root'])) out = __salt__['cmd.run_stdout'](cmd, output_loglevel='trace', python_shell=False) for line in salt.utils.itertools.split(out, '\n'): try: name, version_num = line.split() # Only add to return dict if package is in the list of packages # passed, otherwise dependencies will make their way into the # return data. if name in names: ret[name] = version_num except (ValueError, __HOLE__): pass pkgs = {} for name in names: if not ret[name]: if not pkgs: pkgs = list_pkgs() if name in pkgs: ret[name] = pkgs[name] # Return a string if only one package name passed if len(names) == 1: return ret[names[0]] return ret # available_version is being deprecated
IndexError
dataset/ETHPy150Open saltstack/salt/salt/modules/pacman.py/latest_version
3,640
def list_pkgs(versions_as_list=False, **kwargs): ''' List the packages currently installed as a dict:: {'<package_name>': '<version>'} CLI Example: .. code-block:: bash salt '*' pkg.list_pkgs ''' versions_as_list = salt.utils.is_true(versions_as_list) # not yet implemented or not applicable if any([salt.utils.is_true(kwargs.get(x)) for x in ('removed', 'purge_desired')]): return {} if 'pkg.list_pkgs' in __context__: if versions_as_list: return __context__['pkg.list_pkgs'] else: ret = copy.deepcopy(__context__['pkg.list_pkgs']) __salt__['pkg_resource.stringify'](ret) return ret cmd = ['pacman', '-Q'] if 'root' in kwargs: cmd.extend(('-r', kwargs['root'])) ret = {} out = __salt__['cmd.run'](cmd, output_loglevel='trace', python_shell=False) for line in salt.utils.itertools.split(out, '\n'): if not line: continue try: name, version_num = line.split()[0:2] except __HOLE__: log.error('Problem parsing pacman -Q: Unexpected formatting in ' 'line: \'{0}\''.format(line)) else: __salt__['pkg_resource.add_pkg'](ret, name, version_num) __salt__['pkg_resource.sort_pkglist'](ret) __context__['pkg.list_pkgs'] = copy.deepcopy(ret) if not versions_as_list: __salt__['pkg_resource.stringify'](ret) return ret
ValueError
dataset/ETHPy150Open saltstack/salt/salt/modules/pacman.py/list_pkgs
3,641
def collect_file_tests(lines, lines_to_execute): makecase = lambda t: IntegrationTestCase(t, correct, line_nr, column, start, line) start = None correct = None test_type = None for line_nr, line in enumerate(lines, 1): if correct is not None: r = re.match('^(\d+)\s*(.*)$', correct) if r: column = int(r.group(1)) correct = r.group(2) start += r.regs[2][0] # second group, start index else: column = len(line) - 1 # -1 for the \n if test_type == '!': yield makecase(TEST_ASSIGNMENTS) elif test_type == '<': yield makecase(TEST_USAGES) elif correct.startswith('['): yield makecase(TEST_COMPLETIONS) else: yield makecase(TEST_DEFINITIONS) correct = None else: try: r = re.search(r'(?:^|(?<=\s))#([?!<])\s*([^\n]*)', line) # test_type is ? for completion and ! for goto_assignments test_type = r.group(1) correct = r.group(2) # Quick hack to make everything work (not quite a bloody unicorn hack though). if correct == '': correct = ' ' start = r.start() except __HOLE__: correct = None else: # skip the test, if this is not specified test if lines_to_execute and line_nr not in lines_to_execute: correct = None
AttributeError
dataset/ETHPy150Open davidhalter/jedi/test/run.py/collect_file_tests
3,642
def collect_dir_tests(base_dir, test_files, check_thirdparty=False): for f_name in os.listdir(base_dir): files_to_execute = [a for a in test_files.items() if f_name.startswith(a[0])] lines_to_execute = reduce(lambda x, y: x + y[1], files_to_execute, []) if f_name.endswith(".py") and (not test_files or files_to_execute): skip = None if check_thirdparty: lib = f_name.replace('_.py', '') try: # there is always an underline at the end. # It looks like: completion/thirdparty/pylab_.py __import__(lib) except __HOLE__: skip = 'Thirdparty-Library %s not found.' % lib path = os.path.join(base_dir, f_name) if is_py3: source = open(path, encoding='utf-8').read() else: source = unicode(open(path).read(), 'UTF-8') for case in collect_file_tests(StringIO(source), lines_to_execute): case.path = path case.source = source if skip: case.skip = skip yield case
ImportError
dataset/ETHPy150Open davidhalter/jedi/test/run.py/collect_dir_tests
3,643
def create_directory(directory_path, ensure_writability = False): """ Creates a directory (with subdirs) if it doesn't exist. @param directory_path: the path of the directory and subdirectories to be created. """ if ensure_writability: if not os.access(os.path.dirname(directory_path), os.W_OK): return False try: os.makedirs(directory_path) return True except __HOLE__, e: if e.errno != errno.EEXIST: raise return False
OSError
dataset/ETHPy150Open victor-gil-sepulveda/pyProCT/pyproct/tools/scriptTools.py/create_directory
3,644
def vararg_callback(option, opt_str, value, parser): """ Parses a list of float numbers. To be used with 'optparse'. Got from: http://docs.python.org/2/library/optparse.html @param option: ... @param opt_str: ... @param value: ... @param parser: ... """ assert value is None value = [] def floatable(my_str): try: float(my_str) return True except __HOLE__: return False for arg in parser.rargs: # stop on --foo like options if arg[:2] == "--" and len(arg) > 2: break # stop on -a, but not on -3 or -3.0 if arg[:1] == "-" and len(arg) > 1 and not floatable(arg): break value.append(float(arg)) del parser.rargs[:len(value)] setattr(parser.values, option.dest, value)
ValueError
dataset/ETHPy150Open victor-gil-sepulveda/pyProCT/pyproct/tools/scriptTools.py/vararg_callback
3,645
def run(self): '''Thread main loop''' try: self.instance._doc_text = None self.instance._help_text = None self.instance._execute() # used for uper class to generate event after execution self.instance._after_execute() except __HOLE__: pass ##############################################################################
KeyboardInterrupt
dataset/ETHPy150Open ipython/ipython-py3k/IPython/deathrow/gui/wx/ipshell_nonblocking.py/_CodeExecutor.run
3,646
def do_execute(self, line): """ Tell the thread to process the 'line' command """ self._line_to_execute = line if self._threading: #we launch the ipython line execution in a thread to make it #interruptible with include it in self namespace to be able #to call ce.raise_exc(KeyboardInterrupt) self.ce = _CodeExecutor(self) self.ce.start() else: try: self._doc_text = None self._help_text = None self._execute() # used for uper class to generate event after execution self._after_execute() except __HOLE__: pass #----------------------- IPython management section ----------------------
KeyboardInterrupt
dataset/ETHPy150Open ipython/ipython-py3k/IPython/deathrow/gui/wx/ipshell_nonblocking.py/NonBlockingIPShell.do_execute
3,647
def _execute(self): ''' Executes the current line provided by the shell object. ''' orig_stdout = sys.stdout sys.stdout = Term.cout #self.sys_displayhook_ori = sys.displayhook #sys.displayhook = self.displayhook try: line = self._IP.raw_input(None, self._iter_more) if self._IP.autoindent: self._IP.readline_startup_hook(None) except __HOLE__: self._IP.write('\nKeyboardInterrupt\n') self._IP.resetbuffer() # keep cache in sync with the prompt counter: self._IP.outputcache.prompt_count -= 1 if self._IP.autoindent: self._IP.indent_current_nsp = 0 self._iter_more = 0 except: self._IP.showtraceback() else: self._IP.write(str(self._IP.outputcache.prompt_out).strip()) self._iter_more = self._IP.push_line(line) if (self._IP.SyntaxTB.last_syntax_error and \ self._IP.autoedit_syntax): self._IP.edit_syntax_error() if self._iter_more: self._prompt = str(self._IP.outputcache.prompt2).strip() if self._IP.autoindent: self._IP.readline_startup_hook(self._IP.pre_readline) else: self._prompt = str(self._IP.outputcache.prompt1).strip() self._IP.indent_current_nsp = 0 #we set indentation to 0 sys.stdout = orig_stdout #sys.displayhook = self.sys_displayhook_ori
KeyboardInterrupt
dataset/ETHPy150Open ipython/ipython-py3k/IPython/deathrow/gui/wx/ipshell_nonblocking.py/NonBlockingIPShell._execute
3,648
@bitness.setter def bitness(self, bits): try: self.segment_t.bitness = self.BITS_TO_BITNESS[bits] except __HOLE__: raise exceptions.InvalidBitness("Got {}. Expecting 16, 32 or 64.".format(bits))
KeyError
dataset/ETHPy150Open tmr232/Sark/sark/code/segment.py/Segment.bitness
3,649
def assertTemplateDoesNotExist(self, name): try: self.assertTemplateExists(name) except __HOLE__: return raise AssertionError('Template exists: %s' % name)
AssertionError
dataset/ETHPy150Open disqus/django-mailviews/mailviews/tests/tests.py/EmailMessageViewTestCase.assertTemplateDoesNotExist
3,650
def main(): try: projectfolders.create_folders(args.project[0], cur_dir) #Creates all of the project folders we need projectfiles.create_files(args.project[0], cur_dir) #Creates all of the project files we need except __HOLE__ as e: print(e.strerror)
IOError
dataset/ETHPy150Open Aaronontheweb/scaffold-py/scaffold/__main__.py/main
3,651
def get_cluster_info(host, port): """ return dict with info about nodes in cluster and current version { 'nodes': [ 'IP:port', 'IP:port', ], 'version': '1.4.4' } """ client = Telnet(host, int(port)) client.write(b'version\n') res = client.read_until(b'\r\n').strip() version_list = res.split(b' ') if len(version_list) != 2 or version_list[0] != b'VERSION': raise WrongProtocolData('version', res) version = version_list[1] if StrictVersion(smart_text(version)) >= StrictVersion('1.4.14'): cmd = b'config get cluster\n' else: cmd = b'get AmazonElastiCache:cluster\n' client.write(cmd) res = client.read_until(b'\n\r\nEND\r\n') client.close() ls = list(filter(None, re.compile(br'\r?\n').split(res))) if len(ls) != 4: raise WrongProtocolData(cmd, res) try: version = int(ls[1]) except ValueError: raise WrongProtocolData(cmd, res) nodes = [] try: for node in ls[2].split(b' '): host, ip, port = node.split(b'|') nodes.append('{}:{}'.format(smart_text(ip or host), smart_text(port))) except __HOLE__: raise WrongProtocolData(cmd, res) return { 'version': version, 'nodes': nodes }
ValueError
dataset/ETHPy150Open gusdan/django-elasticache/django_elasticache/cluster_utils.py/get_cluster_info
3,652
def check_type(self, interface, typename, type): print "Checking type %s" % typename v = type() for n in dir(v): if n[0] == '_': continue try: value = getattr(v, n) except __HOLE__, errstr: if str(errstr) == "unknown union level": print "ERROR: Unknown union level in %s.%s" % (typename, n) self.errcount += 1 continue print str(errstr)[1:21] if str(errstr)[0:21] == "Can not convert C Type": print "ERROR: Unknown C type for %s.%s" % (typename, n) self.errcount += 1 continue else: print "ERROR: Failed to instantiate %s.%s" % (typename, n) self.errcount += 1 continue except Exception: print "ERROR: Failed to instantiate %s.%s" % (typename, n) self.errcount += 1 continue # now try setting the value back try: print "Setting %s.%s" % (typename, n) setattr(v, n, value) except Exception, e: if isinstance(e, AttributeError) and str(e).endswith("is read-only"): # readonly, ignore continue else: print "ERROR: Failed to set %s.%s: %r: %s" % (typename, n, e.__class__, e) self.errcount += 1 continue # and try a comparison try: if value != getattr(v, n): print "ERROR: Comparison failed for %s.%s: %r != %r" % (typename, n, value, getattr(v, n)) continue except Exception, e: print "ERROR: compare exception for %s.%s: %r: %s" % (typename, n, e.__class__, e) continue
TypeError
dataset/ETHPy150Open byt3bl33d3r/pth-toolkit/lib/python2.7/site-packages/samba/tests/dcerpc/testrpc.py/RpcTests.check_type
3,653
def _extract_nexe(program_path, processed_images, command): """ Given a `command`, search through the listed tar images (`processed_images`) and extract the nexe matching `command` to the target `program_path` on the host file system. :param program_path: Location (including filename) which specifies the destination of the extracted nexe. :param processed_images: Output of :func:`_process_images`. :param command: The name of a nexe, such as `python` or `myapp.nexe`. """ with open(program_path, 'w') as program_fp: for zvm_image, _, _ in processed_images: try: tf = tarfile.open(zvm_image) nexe_fp = tf.extractfile(command) # once we've found the nexe the user wants to run, # we're done program_fp.write(nexe_fp.read()) return program_path except __HOLE__: # program not found in this image, # go to the next and keep searching pass finally: tf.close()
KeyError
dataset/ETHPy150Open zerovm/zerovm-cli/zvshlib/zvsh.py/_extract_nexe
3,654
def add_image_args(self, zvm_image): if not zvm_image: return img_cache = {} for img in zvm_image: (imgpath, imgmp, imgacc) = (img.split(',') + [None] * 3)[:3] dev_name = img_cache.get(imgpath) if not dev_name: dev_name = self.create_manifest_channel(imgpath) img_cache[imgpath] = dev_name self.nvram_fstab.append((dev_name, imgmp or '/', imgacc or 'ro')) nexe = None try: tar = tarfile.open(name=imgpath) nexe = tar.extractfile(self.program) tmpnexe_fn = os.path.join(self.tmpdir, 'boot.%d' % self.node_id) tmpnexe_fd = open(tmpnexe_fn, 'wb') read_iter = iter(lambda: nexe.read(65535), b'') for chunk in read_iter: tmpnexe_fd.write(chunk) tmpnexe_fd.close() self.program = tmpnexe_fn except (__HOLE__, tarfile.ReadError): pass
KeyError
dataset/ETHPy150Open zerovm/zerovm-cli/zvshlib/zvsh.py/ZvShell.add_image_args
3,655
def parse_return_code(report): rc = report.split('\n', 5)[2] try: rc = int(rc) except __HOLE__: rc = int(rc.replace('user return code = ', '')) return rc
ValueError
dataset/ETHPy150Open zerovm/zerovm-cli/zvshlib/zvsh.py/parse_return_code
3,656
def run(self): try: self.process = Popen(self.command, stdin=PIPE, stdout=PIPE) self.spawn(True, self.stdin_reader) err_reader = self.spawn(True, self.stderr_reader) rep_reader = self.spawn(True, self.report_reader) writer = self.spawn(True, self.stdout_write) self.process.wait() rep_reader.join() self.rc = parse_return_code(self.report) if self.process.returncode == 0: writer.join() err_reader.join() except (__HOLE__, Exception): pass finally: if self.process: self.process.wait() if self.process.returncode > 0: self.print_error(self.process.returncode) rc = self.rc if self.getrc: rc = self.process.returncode else: rc |= self.process.returncode << 4 sys.exit(rc)
KeyboardInterrupt
dataset/ETHPy150Open zerovm/zerovm-cli/zvshlib/zvsh.py/ZvRunner.run
3,657
def stdin_reader(self): if sys.stdin.isatty(): try: for l in sys.stdin: self.process.stdin.write(l) except __HOLE__: pass else: try: for l in iter(lambda: sys.stdin.read(65535), b''): self.process.stdin.write(l) except IOError: pass self.process.stdin.close()
IOError
dataset/ETHPy150Open zerovm/zerovm-cli/zvshlib/zvsh.py/ZvRunner.stdin_reader
3,658
def stderr_reader(self): err = open(self.stderr) try: for l in iter(lambda: err.read(65535), b''): sys.stderr.write(l) except __HOLE__: pass err.close()
IOError
dataset/ETHPy150Open zerovm/zerovm-cli/zvshlib/zvsh.py/ZvRunner.stderr_reader
3,659
def spawn(argv, master_read=pty_read, stdin_read=pty_read): """Create a spawned process. Based on pty.spawn code.""" # TODO(larsbutler): This type check won't work with python3 # See http://packages.python.org/six/#six.string_types # for a possible solution. if isinstance(argv, (basestring)): argv = (argv,) pid, master_fd = pty.fork() if pid == pty.CHILD: os.execlp(argv[0], *argv) try: mode = tty.tcgetattr(pty.STDIN_FILENO) tty.setraw(pty.STDIN_FILENO) restore = 1 except tty.error: # This is the same as termios.error restore = 0 # get pseudo-terminal window size buf = array.array('h', [0, 0, 0, 0]) fcntl.ioctl(pty.STDOUT_FILENO, termios.TIOCGWINSZ, buf, True) # pass window size settings to forked one fcntl.ioctl(master_fd, termios.TIOCSWINSZ, buf) try: pty_copy(master_fd, master_read, stdin_read) except (__HOLE__, OSError): if restore: tty.tcsetattr(pty.STDIN_FILENO, tty.TCSAFLUSH, mode) os.close(master_fd)
IOError
dataset/ETHPy150Open zerovm/zerovm-cli/zvshlib/zvsh.py/spawn
3,660
def _run_gdb(self): # user wants to debug the program zvsh_args = DebugArgs() zvsh_args.parse(self.cmd_line[1:]) self.args = zvsh_args.args self.zvsh = ZvShell(self.config, self.args.zvm_save_dir) # a month until debug session will time out self.zvsh.config['manifest']['Timeout'] = 60 * 60 * 24 * 30 manifest_file = self.zvsh.add_arguments(self.args) zvm_run = [DEBUG_EXECUTABLE, DEBUG_OPTIONS, manifest_file] command_line = [GDB, '--command=%s' % self.zvsh.add_debug_script()] command_line.extend(self.args.gdb_args) command_line.append('--args') command_line.extend(zvm_run) print (' '.join(command_line)) try: spawn(command_line) except (__HOLE__, Exception): pass finally: self.zvsh.cleanup()
KeyboardInterrupt
dataset/ETHPy150Open zerovm/zerovm-cli/zvshlib/zvsh.py/Shell._run_gdb
3,661
def get_version(): """Get the version info from the mpld3 package without importing it""" import ast with open(os.path.join(os.path.abspath('../'), "gemini", "version.py"), "r") as init_file: module = ast.parse(init_file.read()) version = (ast.literal_eval(node.value) for node in ast.walk(module) if isinstance(node, ast.Assign) and node.targets[0].id == "__version__") try: return next(version) except __HOLE__: raise ValueError("version could not be located")
StopIteration
dataset/ETHPy150Open arq5x/gemini/docs/conf.py/get_version
3,662
def form(url, method="post", multipart=False, hidden_fields=None, **attrs): """An open tag for a form that will submit to ``url``. You must close the form yourself by calling ``end_form()`` or outputting </form>. Options: ``method`` The method to use when submitting the form, usually either "GET" or "POST". If "PUT", "DELETE", or another verb is used, a hidden input with name _method is added to simulate the verb over POST. ``multipart`` If set to True, the enctype is set to "multipart/form-data". You must set it to true when uploading files, or the browser will submit the filename rather than the file. ``hidden_fields`` Additional hidden fields to add to the beginning of the form. It may be a dict or an iterable of key-value tuples. This is implemented by calling the object's ``.items()`` method if it has one, or just iterating the object. (This will successfuly get multiple values for the same key in WebOb MultiDict objects.) Because input tags must be placed in a block tag rather than directly inside the form, all hidden fields will be put in a '<div style="display:none">'. The style prevents the <div> from being displayed or affecting the layout. """ fields = [] attrs["action"] = url if multipart: attrs["enctype"] = "multipart/form-data" if method.lower() in ['post', 'get']: attrs['method'] = method else: attrs['method'] = "post" field = hidden("_method", method, id=None) fields.append(field) if hidden_fields is not None: try: it = hidden_fields.items() except __HOLE__: it = hidden_fields for name, value in it: field = hidden(name, value, id=None) fields.append(field) if fields: div = HTML.tag("div", style="display:none", _nl=True, *fields) else: div = None return HTML.tag("form", div, _closed=False, **attrs)
AttributeError
dataset/ETHPy150Open mikeorr/WebHelpers2/webhelpers2/html/tags.py/form
3,663
def _add_nodes(self, states, container): # to be able to process children recursively as well as the state dict of a machine states = states.values() if isinstance(states, dict) else states for state in states: if state.name in self.seen: continue elif hasattr(state, 'children') and len(state.children) > 0: self.seen.append(state.name) sub = container.add_subgraph(name="cluster_" + state._name, label=state.name, rank='same') self._add_nodes(state.children, sub) else: try: shape = self.style_attributes['node']['default']['shape'] except __HOLE__: shape = 'circle' self.seen.append(state.name) container.add_node(n=state.name, shape=shape)
KeyError
dataset/ETHPy150Open tyarkoni/transitions/transitions/extensions/diagrams.py/AGraph._add_nodes
3,664
def set_edge_state(self, edge_from, edge_to, state='default'): """ Mark a node as active by changing the attributes """ assert hasattr(self, 'graph') edge = self.graph.get_edge(edge_from, edge_to) # Reset all the edges for e in self.graph.edges_iter(): self.set_edge_style(e, 'default') try: self.set_edge_style(edge, state) except __HOLE__: self.set_edge_style(edge, 'default')
KeyError
dataset/ETHPy150Open tyarkoni/transitions/transitions/extensions/diagrams.py/GraphMachine.set_edge_state
3,665
def set_node_state(self, node_name=None, state='default', reset=False): assert hasattr(self, 'graph') if node_name is None: node_name = self.state if reset: for n in self.graph.nodes_iter(): self.set_node_style(n, 'default') if self.graph.has_node(node_name): node = self.graph.get_node(node_name) func = self.set_node_style else: path = node_name.split(NestedState.separator) node = self.graph while len(path) > 0: node = node.get_subgraph('cluster_'+ path.pop(0)) func = self.set_graph_style try: func(node, state) except __HOLE__: func(node, 'default')
KeyError
dataset/ETHPy150Open tyarkoni/transitions/transitions/extensions/diagrams.py/GraphMachine.set_node_state
3,666
def createPaths(self, prefix = './'): """creates log directory path creates physical directories on disk """ dt = datetime.datetime.now() self.path = "{0}_{1}_{2:04d}{3:02d}{4:02d}_{5:02d}{6:02d}{7:02d}".format( prefix, self.name, dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second) #self.path = "%s_%s_%04d%02d%02d_%02d%02d%02d" % \ #(prefix, self.name, #dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second) try: self.path = os.path.abspath(self.path) #convert to proper absolute path if not os.path.exists(self.path): os.makedirs(self.path) except __HOLE__ as ex: console.terse("Error: creating server log directory '{0}'\n".format(ex)) return False console.concise(" Created Server {0} Log Directory = '{1}'\n".format(self.name, self.path)) self.logPath = os.path.join(self.path, "{0}.txt".format('log')) self.logPath = os.path.abspath(self.logPath) #convert to proper absolute path return True
OSError
dataset/ETHPy150Open ioflo/ioflo/ioflo/base/serving.py/Server.createPaths
3,667
def reopenLog(self): """closes if open then reopens """ self.closeLog() #innocuous to call close() on unopened file try: self.logFile = open(self.logPath, 'a+') except __HOLE__ as ex: console.terse("Error: creating server log file '{0}\n".format(ex)) self.logFile = None return False console.concise(" Created Server Log file {0}\n".format(self.logPath)) return True
IOError
dataset/ETHPy150Open ioflo/ioflo/ioflo/base/serving.py/Server.reopenLog
3,668
def log(self, msg): """Called by runner """ stamp = self.store.stamp #self.stamp is last time run so don't use try: self.logFile.write("%0.4f\t%s\n" % (float(stamp), msg)) except __HOLE__ as ex: #if stamp is not a number then type error console.terse("{0}\n".format(ex)) except ValueError as ex: #if self.logFile already closed then ValueError console.terse("{0}\n".format(ex))
TypeError
dataset/ETHPy150Open ioflo/ioflo/ioflo/base/serving.py/Server.log
3,669
def _align_nums(nums): """ Given an array of numerator coefficient arrays [[a_1, a_2,..., a_n],..., [b_1, b_2,..., b_m]], this function pads shorter numerator arrays with zero's so that all numerators have the same length. Such alignment is necessary for functions like 'tf2ss', which needs the alignment when dealing with SIMO transfer functions. """ try: # The statement can throw a ValueError if one # of the numerators is a single digit and another # is array-like e.g. if nums = [5, [1, 2, 3]] nums = asarray(nums) if not np.issubdtype(nums.dtype, np.number): raise ValueError("dtype of numerator is non-numeric") return nums except __HOLE__: nums = list(nums) maxwidth = len(max(nums, key=lambda num: atleast_1d(num).size)) for index, num in enumerate(nums): num = atleast_1d(num).tolist() nums[index] = [0] * (maxwidth - len(num)) + num return atleast_1d(nums)
ValueError
dataset/ETHPy150Open scipy/scipy/scipy/signal/filter_design.py/_align_nums
3,670
def lp2lp(b, a, wo=1.0): """ Transform a lowpass filter prototype to a different frequency. Return an analog low-pass filter with cutoff frequency `wo` from an analog low-pass filter prototype with unity cutoff frequency, in transfer function ('ba') representation. """ a, b = map(atleast_1d, (a, b)) try: wo = float(wo) except __HOLE__: wo = float(wo[0]) d = len(a) n = len(b) M = max((d, n)) pwo = pow(wo, numpy.arange(M - 1, -1, -1)) start1 = max((n - d, 0)) start2 = max((d - n, 0)) b = b * pwo[start1] / pwo[start2:] a = a * pwo[start1] / pwo[start1:] return normalize(b, a)
TypeError
dataset/ETHPy150Open scipy/scipy/scipy/signal/filter_design.py/lp2lp
3,671
def lp2hp(b, a, wo=1.0): """ Transform a lowpass filter prototype to a highpass filter. Return an analog high-pass filter with cutoff frequency `wo` from an analog low-pass filter prototype with unity cutoff frequency, in transfer function ('ba') representation. """ a, b = map(atleast_1d, (a, b)) try: wo = float(wo) except __HOLE__: wo = float(wo[0]) d = len(a) n = len(b) if wo != 1: pwo = pow(wo, numpy.arange(max((d, n)))) else: pwo = numpy.ones(max((d, n)), b.dtype.char) if d >= n: outa = a[::-1] * pwo outb = resize(b, (d,)) outb[n:] = 0.0 outb[:n] = b[::-1] * pwo[:n] else: outb = b[::-1] * pwo outa = resize(a, (n,)) outa[d:] = 0.0 outa[:d] = a[::-1] * pwo[:d] return normalize(outb, outa)
TypeError
dataset/ETHPy150Open scipy/scipy/scipy/signal/filter_design.py/lp2hp
3,672
def iirdesign(wp, ws, gpass, gstop, analog=False, ftype='ellip', output='ba'): """Complete IIR digital and analog filter design. Given passband and stopband frequencies and gains, construct an analog or digital IIR filter of minimum order for a given basic type. Return the output in numerator, denominator ('ba'), pole-zero ('zpk') or second order sections ('sos') form. Parameters ---------- wp, ws : float Passband and stopband edge frequencies. For digital filters, these are normalized from 0 to 1, where 1 is the Nyquist frequency, pi radians/sample. (`wp` and `ws` are thus in half-cycles / sample.) For example: - Lowpass: wp = 0.2, ws = 0.3 - Highpass: wp = 0.3, ws = 0.2 - Bandpass: wp = [0.2, 0.5], ws = [0.1, 0.6] - Bandstop: wp = [0.1, 0.6], ws = [0.2, 0.5] For analog filters, `wp` and `ws` are angular frequencies (e.g. rad/s). gpass : float The maximum loss in the passband (dB). gstop : float The minimum attenuation in the stopband (dB). analog : bool, optional When True, return an analog filter, otherwise a digital filter is returned. ftype : str, optional The type of IIR filter to design: - Butterworth : 'butter' - Chebyshev I : 'cheby1' - Chebyshev II : 'cheby2' - Cauer/elliptic: 'ellip' - Bessel/Thomson: 'bessel' output : {'ba', 'zpk', 'sos'}, optional Type of output: numerator/denominator ('ba'), pole-zero ('zpk'), or second-order sections ('sos'). Default is 'ba'. Returns ------- b, a : ndarray, ndarray Numerator (`b`) and denominator (`a`) polynomials of the IIR filter. Only returned if ``output='ba'``. z, p, k : ndarray, ndarray, float Zeros, poles, and system gain of the IIR filter transfer function. Only returned if ``output='zpk'``. sos : ndarray Second-order sections representation of the IIR filter. Only returned if ``output=='sos'``. See Also -------- butter : Filter design using order and critical points cheby1, cheby2, ellip, bessel buttord : Find order and critical points from passband and stopband spec cheb1ord, cheb2ord, ellipord iirfilter : General filter design using order and critical frequencies Notes ----- The ``'sos'`` output parameter was added in 0.16.0. """ try: ordfunc = filter_dict[ftype][1] except KeyError: raise ValueError("Invalid IIR filter type: %s" % ftype) except __HOLE__: raise ValueError(("%s does not have order selection. Use " "iirfilter function.") % ftype) wp = atleast_1d(wp) ws = atleast_1d(ws) band_type = 2 * (len(wp) - 1) band_type += 1 if wp[0] >= ws[0]: band_type += 1 btype = {1: 'lowpass', 2: 'highpass', 3: 'bandstop', 4: 'bandpass'}[band_type] N, Wn = ordfunc(wp, ws, gpass, gstop, analog=analog) return iirfilter(N, Wn, rp=gpass, rs=gstop, analog=analog, btype=btype, ftype=ftype, output=output)
IndexError
dataset/ETHPy150Open scipy/scipy/scipy/signal/filter_design.py/iirdesign
3,673
def iirfilter(N, Wn, rp=None, rs=None, btype='band', analog=False, ftype='butter', output='ba'): """ IIR digital and analog filter design given order and critical points. Design an Nth-order digital or analog filter and return the filter coefficients. Parameters ---------- N : int The order of the filter. Wn : array_like A scalar or length-2 sequence giving the critical frequencies. For digital filters, `Wn` is normalized from 0 to 1, where 1 is the Nyquist frequency, pi radians/sample. (`Wn` is thus in half-cycles / sample.) For analog filters, `Wn` is an angular frequency (e.g. rad/s). rp : float, optional For Chebyshev and elliptic filters, provides the maximum ripple in the passband. (dB) rs : float, optional For Chebyshev and elliptic filters, provides the minimum attenuation in the stop band. (dB) btype : {'bandpass', 'lowpass', 'highpass', 'bandstop'}, optional The type of filter. Default is 'bandpass'. analog : bool, optional When True, return an analog filter, otherwise a digital filter is returned. ftype : str, optional The type of IIR filter to design: - Butterworth : 'butter' - Chebyshev I : 'cheby1' - Chebyshev II : 'cheby2' - Cauer/elliptic: 'ellip' - Bessel/Thomson: 'bessel' output : {'ba', 'zpk', 'sos'}, optional Type of output: numerator/denominator ('ba'), pole-zero ('zpk'), or second-order sections ('sos'). Default is 'ba'. Returns ------- b, a : ndarray, ndarray Numerator (`b`) and denominator (`a`) polynomials of the IIR filter. Only returned if ``output='ba'``. z, p, k : ndarray, ndarray, float Zeros, poles, and system gain of the IIR filter transfer function. Only returned if ``output='zpk'``. sos : ndarray Second-order sections representation of the IIR filter. Only returned if ``output=='sos'``. See Also -------- butter : Filter design using order and critical points cheby1, cheby2, ellip, bessel buttord : Find order and critical points from passband and stopband spec cheb1ord, cheb2ord, ellipord iirdesign : General filter design using passband and stopband spec Notes ----- The ``'sos'`` output parameter was added in 0.16.0. Examples -------- Generate a 17th-order Chebyshev II bandpass filter and plot the frequency response: >>> from scipy import signal >>> import matplotlib.pyplot as plt >>> b, a = signal.iirfilter(17, [50, 200], rs=60, btype='band', ... analog=True, ftype='cheby2') >>> w, h = signal.freqs(b, a, 1000) >>> fig = plt.figure() >>> ax = fig.add_subplot(111) >>> ax.semilogx(w, 20 * np.log10(abs(h))) >>> ax.set_title('Chebyshev Type II bandpass frequency response') >>> ax.set_xlabel('Frequency [radians / second]') >>> ax.set_ylabel('Amplitude [dB]') >>> ax.axis((10, 1000, -100, 10)) >>> ax.grid(which='both', axis='both') >>> plt.show() """ ftype, btype, output = [x.lower() for x in (ftype, btype, output)] Wn = asarray(Wn) try: btype = band_dict[btype] except KeyError: raise ValueError("'%s' is an invalid bandtype for filter." % btype) try: typefunc = filter_dict[ftype][0] except KeyError: raise ValueError("'%s' is not a valid basic IIR filter." % ftype) if output not in ['ba', 'zpk', 'sos']: raise ValueError("'%s' is not a valid output form." % output) if rp is not None and rp < 0: raise ValueError("passband ripple (rp) must be positive") if rs is not None and rs < 0: raise ValueError("stopband attenuation (rs) must be positive") # Get analog lowpass prototype if typefunc == buttap: z, p, k = typefunc(N) elif typefunc == besselap: z, p, k = typefunc(N, norm=bessel_norms[ftype]) elif typefunc == cheb1ap: if rp is None: raise ValueError("passband ripple (rp) must be provided to " "design a Chebyshev I filter.") z, p, k = typefunc(N, rp) elif typefunc == cheb2ap: if rs is None: raise ValueError("stopband attenuation (rs) must be provided to " "design an Chebyshev II filter.") z, p, k = typefunc(N, rs) elif typefunc == ellipap: if rs is None or rp is None: raise ValueError("Both rp and rs must be provided to design an " "elliptic filter.") z, p, k = typefunc(N, rp, rs) else: raise NotImplementedError("'%s' not implemented in iirfilter." % ftype) # Pre-warp frequencies for digital filter design if not analog: if numpy.any(Wn < 0) or numpy.any(Wn > 1): raise ValueError("Digital filter critical frequencies " "must be 0 <= Wn <= 1") fs = 2.0 warped = 2 * fs * tan(pi * Wn / fs) else: warped = Wn # transform to lowpass, bandpass, highpass, or bandstop if btype in ('lowpass', 'highpass'): if numpy.size(Wn) != 1: raise ValueError('Must specify a single critical frequency Wn') if btype == 'lowpass': z, p, k = _zpklp2lp(z, p, k, wo=warped) elif btype == 'highpass': z, p, k = _zpklp2hp(z, p, k, wo=warped) elif btype in ('bandpass', 'bandstop'): try: bw = warped[1] - warped[0] wo = sqrt(warped[0] * warped[1]) except __HOLE__: raise ValueError('Wn must specify start and stop frequencies') if btype == 'bandpass': z, p, k = _zpklp2bp(z, p, k, wo=wo, bw=bw) elif btype == 'bandstop': z, p, k = _zpklp2bs(z, p, k, wo=wo, bw=bw) else: raise NotImplementedError("'%s' not implemented in iirfilter." % btype) # Find discrete equivalent if necessary if not analog: z, p, k = _zpkbilinear(z, p, k, fs=fs) # Transform to proper out type (pole-zero, state-space, numer-denom) if output == 'zpk': return z, p, k elif output == 'ba': return zpk2tf(z, p, k) elif output == 'sos': return zpk2sos(z, p, k)
IndexError
dataset/ETHPy150Open scipy/scipy/scipy/signal/filter_design.py/iirfilter
3,674
def mkdir_p(path): try: os.makedirs(path) except __HOLE__ as exc: # Python >2.5 if exc.errno == errno.EEXIST and os.path.isdir(path): pass else: raise
OSError
dataset/ETHPy150Open Akagi201/learning-python/flask/flask-boost/test/application/utils/helpers.py/mkdir_p
3,675
@staticmethod def sync_repository_last_pull_time(row): """ Last pull synchronization date/time for this repository """ try: repository_id = row["sync_repository.id"] except __HOLE__: return "-" table = current.s3db.sync_task query = (table.repository_id == repository_id) task = current.db(query).select(orderby=~table.last_pull, limitby=(0,1)).first() if task and task.last_pull: return S3DateTime.datetime_represent(task.last_pull, utc=True) else: return current.T("never") # -------------------------------------------------------------------------
AttributeError
dataset/ETHPy150Open sahana/eden/modules/s3db/sync.py/SyncDataModel.sync_repository_last_pull_time
3,676
@staticmethod def sync_repository_last_push_time(row): """ Last push synchronization date/time for this repository """ try: repository_id = row["sync_repository.id"] except __HOLE__: return "-" table = current.s3db.sync_task query = (table.repository_id == repository_id) task = current.db(query).select(orderby=~table.last_push, limitby=(0,1)).first() if task and task.last_push: return S3DateTime.datetime_represent(task.last_push, utc=True) else: return current.T("never") # -------------------------------------------------------------------------
AttributeError
dataset/ETHPy150Open sahana/eden/modules/s3db/sync.py/SyncDataModel.sync_repository_last_push_time
3,677
def get_specific_reference_set_list(self, mendeley_discipline, provider, interaction): lookup_dict = dict(provider=provider, interaction=interaction, year=self.year, genre=self.genre, host=self.host, mendeley_discipline=mendeley_discipline) lookup_key = ReferenceSetList.build_lookup_key(**lookup_dict) try: percentile_list = reference_set_lists[lookup_key] return {"percentile_list": percentile_list, "mendeley_discipline": mendeley_discipline} except __HOLE__: return None
KeyError
dataset/ETHPy150Open Impactstory/total-impact-webapp/totalimpactwebapp/reference_set.py/ProductLevelReferenceSet.get_specific_reference_set_list
3,678
def process_profile(self, profile): logger.info(u"build_refsets: on {url_slug}".format(url_slug=profile.url_slug)) for product in profile.products_not_removed: if product.biblio.display_title == "no title": # logger.info("no good biblio for tiid {tiid}".format( # tiid=product.tiid)) continue year = product.year try: year = year.replace("'", "").replace('"', '') if int(year[0:4]) < 2000: year = "pre2000" except (__HOLE__, AttributeError): year = "unknown" self.record_product( year=year, genre=product.genre, host=product.host, mendeley_discipline=product.mendeley_discipline) for metric in product.metrics: raw_value = metric.most_recent_snap.raw_value # only add to histogram if it is a number, not a string or mendeley dict etc if not isinstance(raw_value, (int, long, float)): continue self.record_metric( year=year, genre=product.genre, host=product.host, mendeley_discipline=product.mendeley_discipline, provider=metric.provider, interaction=metric.interaction, raw_value=raw_value)
ValueError
dataset/ETHPy150Open Impactstory/total-impact-webapp/totalimpactwebapp/reference_set.py/RefsetBuilder.process_profile
3,679
def launchServer(pipeline, options): # first follow up on the previously reported total number of # stages in the pipeline with how many have already finished: pipeline.printNumberProcessedStages() # expensive, so only create for pipelines that will actually run pipeline.shutdown_ev = Event() # for ideological reasons this should live in a method, but pipeline init is # rather baroque anyway, and arguably launchServer/pipelineDaemon ought to be # a single method with cleaned-up initialization #executors_local = pipeline.options.local or (pipeline.options.queue_type is None) executors_local = pipeline.options.queue_type in [None, 'pbs'] if executors_local: # measured once -- we assume that server memory usage will be # roughly constant at this point pipeline.memAvail = pipeline.options.mem - (float(resource.getrusage(resource.RUSAGE_SELF).ru_maxrss) / 10**6) # 2^20? else: pipeline.memAvail = pipeline.options.mem # is the server going to be verbose or not? if options.verbose: def verboseprint(*args): # Print each argument separately so caller doesn't need to # stuff everything to be printed into a single string for arg in args: print(arg,) print() else: verboseprint = lambda *a: None # getIpAddress is similar to socket.gethostbyname(...) # but uses a hack to attempt to avoid returning localhost (127....) network_address = Pyro4.socketutil.getIpAddress(socket.gethostname(), workaround127 = True, ipVersion = 4) daemon = Pyro4.core.Daemon(host=network_address) pipelineURI = daemon.register(pipeline) if options.use_ns: # in the future we might want to launch a nameserver here # instead of relying on a separate executable running ns = Pyro4.locateNS() ns.register("pipeline", pipelineURI) else: # If not using Pyro NameServer, must write uri to file for reading by client. uf = open(options.urifile, 'w') uf.write(pipelineURI.asString()) uf.close() pipeline.setVerbosity(options.verbose) shutdown_time = pe.WAIT_TIMEOUT + pipeline.options.latency_tolerance try: # start Pyro server t = Process(target=daemon.requestLoop) # t.daemon = True # this isn't allowed t.start() # at this point requests made to the Pyro daemon will touch process `t`'s copy # of the pipeline, so modifiying `pipeline` won't have any effect. The exception is # communication through its multiprocessing.Event, which we use below to wait # for termination. #FIXME does this leak the memory used by the old pipeline? #if so, avoid doing this or at least `del` the old graph ... verboseprint("Daemon is running at: %s" % daemon.locationStr) logger.info("Daemon is running at: %s", daemon.locationStr) verboseprint("The pipeline's uri is: %s" % str(pipelineURI)) logger.info("The pipeline's uri is: %s", str(pipelineURI)) # handle SIGTERM (sent by SciNet 15-30s before hard kill) by setting # the shutdown event (we shouldn't actually see a SIGTERM on PBS # since PBS submission logic gives us a lifetime related to our walltime # request ...) def handler(sig, _stack): pipeline.shutdown_ev.set() signal.signal(signal.SIGTERM, handler) # spawn a loop to manage executors in a separate process # (here we use a proxy to make calls to manageExecutors because (a) # processes don't share memory, (b) so that its logic is # not interleaved with calls from executors. We could instead use a `select` # for both Pyro and non-Pyro socket events; see the Pyro documentation) p = Pyro4.Proxy(pipelineURI) def loop(): try: logger.debug("Auxiliary loop started") logger.debug("memory limit: %dG; available after server overhead: %.4fG" % (pipeline.options.mem, pipeline.memAvail)) while p.continueLoop(): p.manageExecutors() pipeline.shutdown_ev.wait(LOOP_INTERVAL) except: logger.exception("Server loop encountered a problem. Shutting down.") finally: logger.info("Server loop going to shut down ...") p.set_shutdown_ev() h = Process(target=loop) h.daemon = True h.start() try: jid = os.environ["PBS_JOBID"] output = check_output(['qstat', '-f', jid]) time_left = int(re.search('Walltime.Remaining = (\d*)', output).group(1)) logger.debug("Time remaining: %d s" % time_left) time_to_live = time_left - shutdown_time except: logger.info("I couldn't determine your remaining walltime from qstat.") time_to_live = None flag = pipeline.shutdown_ev.wait(time_to_live) if not flag: logger.info("Time's up!") pipeline.shutdown_ev.set() # FIXME if we terminate abnormally, we should _actually_ kill child executors (if running locally) except __HOLE__: logger.exception("Caught keyboard interrupt, killing executors and shutting down server.") print("\nKeyboardInterrupt caught: cleaning up, shutting down executors.\n") sys.stdout.flush() except: logger.exception("Exception running server in daemon.requestLoop. Server shutting down.") print("%s" % sys.exc_info()) else: # allow time for all clients to contact the server and be told to shut down # (we could instead add a way for the server to notify its registered clients): # otherwise they will crash when they try to contact the (shutdown) server. # It's not important that clients shut down properly (if they see a server crash, they # will cancel their running jobs, but they're done by the time the server exits) # TODO this only makes sense if we are actually shutting down nicely, # and not because we're out of walltime, in which case this doesn't help # (client jobs will die anyway) #print("Sleeping %d s to allow time for clients to shutdown..." % pe.SHUTDOWN_TIME) #time.sleep(pe.SHUTDOWN_TIME) # trying to access variables from `p` in the `finally` clause (in order # to print a shutdown message) hangs for some reason, so do it here instead p.printShutdownMessage() finally: # brutal, but awkward to do with our system of `Event`s # could send a signal to `t` instead: t.terminate()
KeyboardInterrupt
dataset/ETHPy150Open Mouse-Imaging-Centre/pydpiper/pydpiper/pipeline.py/launchServer
3,680
def parse(feedparser, args): """parse a feed using feedparser""" entries = [] args = irc3.utils.Config(args) max_date = datetime.datetime.now() - datetime.timedelta(days=2) for filename in args['filenames']: try: with open(filename + '.updated') as fd: updated = fd.read().strip() except (OSError, IOError): updated = '0' feed = feedparser.parse(filename) for e in feed.entries: if e.updated <= updated: # skip already sent entries continue try: updated_parsed = e.updated_parsed except __HOLE__: continue if datetime.datetime(*updated_parsed[:7]) < max_date: # skip entries older than 2 days continue e['filename'] = filename e['feed'] = args entries.append((e.updated, e)) if entries: entries = sorted(entries, key=itemgetter(0)) with open(filename + '.updated', 'w') as fd: fd.write(str(entries[-1][0])) return entries
AttributeError
dataset/ETHPy150Open gawel/irc3/irc3/plugins/feeds.py/parse
3,681
def imports(self): """show some warnings if needed""" try: import feedparser self.feedparser = feedparser except ImportError: # pragma: no cover self.bot.log.critical('feedparser is not installed') self.feedparser = None try: import requests except __HOLE__: # pragma: no cover self.bot.log.critical('requests is not installed') self.session = None else: self.session = requests.Session() self.session.headers.update(self.headers)
ImportError
dataset/ETHPy150Open gawel/irc3/irc3/plugins/feeds.py/Feeds.imports
3,682
def _cmake_version(cmake_path): command = os.path.join(cmake_path, "cmake") try: simple_exe.output = "" simple_exe('%s --version' % command) version_match = re.search('cmake version ([0-9.]+)', simple_exe.output) del simple_exe.output if version_match: return Version(version_match.group(1)) except __HOLE__: # WindowsError pass return None
OSError
dataset/ETHPy150Open biicode/client/setups/cmake.py/_cmake_version
3,683
def check_output(command, timeout=None, ignore=None, **kwargs): """This is a version of subprocess.check_output that adds a timeout parameter to kill the subprocess if it does not return within the specified time.""" # pylint: disable=too-many-branches if ignore is None: ignore = [] elif isinstance(ignore, int): ignore = [ignore] elif not isinstance(ignore, list) and ignore != 'all': message = 'Invalid value for ignore parameter: "{}"; must be an int or a list' raise ValueError(message.format(ignore)) if 'stdout' in kwargs: raise ValueError('stdout argument not allowed, it will be overridden.') def callback(pid): try: check_output_logger.debug('{} timed out; sending SIGKILL'.format(pid)) os.killpg(pid, signal.SIGKILL) except __HOLE__: pass # process may have already terminated. process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, preexec_fn=preexec_function, **kwargs) if timeout: timer = threading.Timer(timeout, callback, [process.pid, ]) timer.start() try: output, error = process.communicate() finally: if timeout: timer.cancel() retcode = process.poll() if retcode: if retcode == -9: # killed, assume due to timeout callback raise TimeoutError(command, output='\n'.join([output, error])) elif ignore != 'all' and retcode not in ignore: raise CalledProcessErrorWithStderr(retcode, command, output=output, error=error) return output, error
OSError
dataset/ETHPy150Open ARM-software/workload-automation/wlauto/utils/misc.py/check_output
3,684
def diff_tokens(before_token, after_token): """ Creates a diff of two tokens. If the two tokens are the same it just returns returns the token (whitespace tokens are considered the same irrespective of type/number of whitespace characters in the token). If the tokens are numeric, the difference between the two values is returned. Otherwise, a string in the form [before -> after] is returned. """ if before_token.isspace() and after_token.isspace(): return after_token elif before_token.isdigit() and after_token.isdigit(): try: diff = int(after_token) - int(before_token) return str(diff) except __HOLE__: return "[%s -> %s]" % (before_token, after_token) elif before_token == after_token: return after_token else: return "[%s -> %s]" % (before_token, after_token)
ValueError
dataset/ETHPy150Open ARM-software/workload-automation/wlauto/utils/misc.py/diff_tokens
3,685
def reverse_lookup(lat, lon): if(lat is None or lon is None): return None key = get_key() try: params = {'format': 'json', 'key': key, 'lat': lat, 'lon': lon} headers = {"Accept-Language": constants.accepted_language} r = requests.get( 'http://open.mapquestapi.com/nominatim/v1/reverse.php?%s' % urllib.urlencode(params), headers=headers ) return r.json() except requests.exceptions.RequestException as e: if(constants.debug is True): print e return None except __HOLE__ as e: if(constants.debug is True): print r.text print e return None
ValueError
dataset/ETHPy150Open jmathai/elodie/elodie/geolocation.py/reverse_lookup
3,686
def lookup(name): if(name is None or len(name) == 0): return None key = get_key() try: params = {'format': 'json', 'key': key, 'location': name} if(constants.debug is True): print 'http://open.mapquestapi.com/geocoding/v1/address?%s' % urllib.urlencode(params) # noqa r = requests.get( 'http://open.mapquestapi.com/geocoding/v1/address?%s' % urllib.urlencode(params) ) return r.json() except requests.exceptions.RequestException as e: if(constants.debug is True): print e return None except __HOLE__ as e: if(constants.debug is True): print r.text print e return None
ValueError
dataset/ETHPy150Open jmathai/elodie/elodie/geolocation.py/lookup
3,687
def from_pyfile(self, filename, silent=False): """Updates the values in the config from a Python file. This function behaves as if the file was imported as module with the :meth:`from_object` function. :param filename: the filename of the config. This can either be an absolute filename or a filename relative to the root path. :param silent: set to ``True`` if you want silent failure for missing files. .. versionadded:: 0.7 `silent` parameter. """ filename = os.path.join(self.root_path, filename) d = types.ModuleType('config') d.__file__ = filename try: with open(filename) as config_file: exec(compile(config_file.read(), filename, 'exec'), d.__dict__) except __HOLE__ as e: if silent and e.errno in (errno.ENOENT, errno.EISDIR): return False e.strerror = 'Unable to load configuration file (%s)' % e.strerror raise self.from_object(d) return True
IOError
dataset/ETHPy150Open pallets/flask/flask/config.py/Config.from_pyfile
3,688
def from_json(self, filename, silent=False): """Updates the values in the config from a JSON file. This function behaves as if the JSON object was a dictionary and passed to the :meth:`from_mapping` function. :param filename: the filename of the JSON file. This can either be an absolute filename or a filename relative to the root path. :param silent: set to ``True`` if you want silent failure for missing files. .. versionadded:: 1.0 """ filename = os.path.join(self.root_path, filename) try: with open(filename) as json_file: obj = json.loads(json_file.read()) except __HOLE__ as e: if silent and e.errno in (errno.ENOENT, errno.EISDIR): return False e.strerror = 'Unable to load configuration file (%s)' % e.strerror raise return self.from_mapping(obj)
IOError
dataset/ETHPy150Open pallets/flask/flask/config.py/Config.from_json
3,689
def handle(self): digest = self.request.get("d") # expects client-side hashing if not digest or len(digest) != 64: return {"success" : False, "reason" : "format"} try: digest.decode("hex") except __HOLE__: return {"success" : False, "reason" : "format"} ret = self.store_digest(digest) if not ret["success"]: del ret["args"] return ret doc_dict = self.doc.to_dict() pay_address = doc_dict.get('payment_address') if not pay_address: return {"success" : False, "reason" : "cant generate pay address"} ret["pay_address"] = pay_address ret["price"] = MIN_SATOSHIS_PAYMENT return ret
TypeError
dataset/ETHPy150Open maraoz/proofofexistence/api.py/ExternalRegisterHandler.handle
3,690
def test_method(self): """ :py:obj:`Context` can be instantiated with one of :py:obj:`SSLv2_METHOD`, :py:obj:`SSLv3_METHOD`, :py:obj:`SSLv23_METHOD`, :py:obj:`TLSv1_METHOD`, :py:obj:`TLSv1_1_METHOD`, or :py:obj:`TLSv1_2_METHOD`. """ methods = [ SSLv3_METHOD, SSLv23_METHOD, TLSv1_METHOD] for meth in methods: Context(meth) maybe = [SSLv2_METHOD, TLSv1_1_METHOD, TLSv1_2_METHOD] for meth in maybe: try: Context(meth) except (Error, __HOLE__): # Some versions of OpenSSL have SSLv2 / TLSv1.1 / TLSv1.2, some # don't. Difficult to say in advance. pass self.assertRaises(TypeError, Context, "") self.assertRaises(ValueError, Context, 10)
ValueError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/pyopenssl/OpenSSL/test/test_ssl.py/ContextTests.test_method
3,691
def defaults(): """ Parse `/etc/blueprintignore` and `~/.blueprintignore` to build the default `Rules` object. """ r = None # Check for a fresh cache of the complete blueprintignore(5) rules. if _mtime('/etc/blueprintignore') < _mtime(CACHE) \ and _mtime(os.path.expanduser('~/.blueprintignore')) < _mtime(CACHE) \ and _mtime(__file__) < _mtime(CACHE): try: r = Rules(json.load(open(CACHE))) logging.info('using cached blueprintignore(5) rules') return r except (OSError, __HOLE__): pass # Cache things that are ignored by default first. r = Rules({ 'file': IGNORE.items(), 'package': [('apt', package, False) for package in _apt()] + [('yum', package, False) for package in _yum()], 'service': [('sysvinit', 'skeleton', False)], 'source': [('/', False), ('/usr/local', True)], }) # Cache the patterns stored in the blueprintignore files. logging.info('parsing blueprintignore(5) rules') try: for pathname in ['/etc/blueprintignore', os.path.expanduser('~/.blueprintignore')]: r.parse(open(pathname), negate=True) except IOError: pass # Store the cache to disk. f = _cache_open(CACHE, 'w') json.dump(r, f, indent=2, sort_keys=True) f.close() return r
ValueError
dataset/ETHPy150Open devstructure/blueprint/blueprint/rules.py/defaults
3,692
def _apt(): """ Return the set of packages that should never appear in a blueprint because they're already guaranteed (to some degree) to be there. """ CACHE = '/tmp/blueprint-apt-exclusions' # Read from a cached copy. try: return set([line.rstrip() for line in open(CACHE)]) except IOError: pass logging.info('searching for APT packages to exclude') # Start with the root packages for the various Ubuntu installations. s = set(['grub-pc', 'installation-report', 'language-pack-en', 'language-pack-gnome-en', 'linux-generic-pae', 'linux-server', 'os-prober', 'ubuntu-desktop', 'ubuntu-minimal', 'ubuntu-standard', 'wireless-crda']) # Find the essential and required packages. Every server's got 'em, no # one wants to muddle their blueprint with 'em. for field in ('Essential', 'Priority'): try: p = subprocess.Popen(['dpkg-query', '-f=${{Package}} ${{{0}}}\n'.format(field), '-W'], close_fds=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) except __HOLE__: _cache_open(CACHE, 'w').close() return s for line in p.stdout: try: package, property = line.rstrip().split() if property in ('yes', 'important', 'required', 'standard'): s.add(package) except ValueError: pass # Walk the dependency tree all the way to the leaves. s = deps.apt(s) # Write to a cache. logging.info('caching excluded APT packages') f = _cache_open(CACHE, 'w') for package in sorted(s): f.write('{0}\n'.format(package)) f.close() return s
OSError
dataset/ETHPy150Open devstructure/blueprint/blueprint/rules.py/_apt
3,693
def _yum(): """ Return the set of packages that should never appear in a blueprint because they're already guaranteed (to some degree) to be there. """ CACHE = '/tmp/blueprint-yum-exclusions' # Read from a cached copy. try: return set([line.rstrip() for line in open(CACHE)]) except IOError: pass logging.info('searching for Yum packages to exclude') # Start with a few groups that install common packages. s = set(['gpg-pubkey']) pattern = re.compile(r'^ (\S+)') try: p = subprocess.Popen(['yum', 'groupinfo', 'core','base', 'gnome-desktop'], close_fds=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) except __HOLE__: _cache_open(CACHE, 'w').close() return s for line in p.stdout: match = pattern.match(line) if match is not None: s.add(match.group(1)) # Walk the dependency tree all the way to the leaves. s = deps.yum(s) # Write to a cache. logging.info('caching excluded Yum packages') f = _cache_open(CACHE, 'w') for package in sorted(s): f.write('{0}\n'.format(package)) f.close() return s
OSError
dataset/ETHPy150Open devstructure/blueprint/blueprint/rules.py/_yum
3,694
def _mtime(pathname): try: return os.stat(pathname).st_mtime except __HOLE__: return 0
OSError
dataset/ETHPy150Open devstructure/blueprint/blueprint/rules.py/_mtime
3,695
def parse(self, f, negate=False): """ Parse rules from the given file-like object. This is used both for `blueprintignore`(5) and for `blueprint-rules`(1). """ for pattern in f: pattern = pattern.rstrip() # Comments and blank lines. if '' == pattern or '#' == pattern[0]: continue # Negated lines. if '!' == pattern[0]: pattern = pattern[1:] ignored = negate else: ignored = not negate # Normalize file resources, which don't need the : and type # qualifier, into the same format as others, like packages. if ':' == pattern[0]: try: restype, pattern = pattern[1:].split(':', 2) except __HOLE__: continue else: restype = 'file' # Ignore a package and its dependencies or unignore a single # package. Empirically, the best balance of power and # granularity comes from this arrangement. Take # build-esseantial's mutual dependence with dpkg-dev as an # example of why. if 'package' == restype: try: manager, package = pattern.split('/') except ValueError: logging.warning('invalid package rule "{0}"'. format(pattern)) continue self['package'].append((manager, package, ignored)) if not ignored: for dep in getattr(deps, manager, lambda(arg): [])(package): self['package'].append((manager, dep, ignored)) elif 'service' == restype: try: manager, service = pattern.split('/') except ValueError: logging.warning('invalid service rule "{0}"'. format(pattern)) continue self['service'].append((manager, service, ignored)) # Ignore or unignore a file, glob, or directory tree. else: self[restype].append((pattern, ignored)) return self
ValueError
dataset/ETHPy150Open devstructure/blueprint/blueprint/rules.py/Rules.parse
3,696
def get(self, key, default=None): try: return self.__getitem__(key) except __HOLE__: return default
KeyError
dataset/ETHPy150Open cooper-software/siteglass/siteglass/config.py/Config.get
3,697
def get_version(self): if self._incremented_version: return self._version if not self.has_version(): return None path = self.get('global.cache_bust.versioning.filename', './version') if os.path.exists(path): version = open(path, 'r').read() try: version = int(version) except __HOLE__: version = 0 else: version = 0 version += 1 open(path, 'w').write(str(version)) self._version = version self._incremented_version = True return self._version
ValueError
dataset/ETHPy150Open cooper-software/siteglass/siteglass/config.py/Config.get_version
3,698
def unicode_from_html(content): """Attempts to decode an HTML string into unicode. If unsuccessful, the original content is returned. """ encodings = get_encodings_from_content(content) for encoding in encodings: try: return unicode(content, encoding) except (UnicodeError, __HOLE__): pass return content
TypeError
dataset/ETHPy150Open bububa/pyTOP/pyTOP/packages/requests/utils.py/unicode_from_html
3,699
def get_unicode_from_response(r): """Returns the requested content back in unicode. :param r: Response object to get unicode content from. Tried: 1. charset from content-type 2. every encodings from ``<meta ... charset=XXX>`` 3. fall back and replace all unicode characters """ tried_encodings = [] # Try charset from content-type encoding = get_encoding_from_headers(r.headers) if encoding: try: return unicode(r.content, encoding) except UnicodeError: tried_encodings.append(encoding) # Fall back: try: return unicode(r.content, encoding, errors='replace') except __HOLE__: return r.content
TypeError
dataset/ETHPy150Open bububa/pyTOP/pyTOP/packages/requests/utils.py/get_unicode_from_response