Unnamed: 0
int64
0
10k
function
stringlengths
79
138k
label
stringclasses
20 values
info
stringlengths
42
261
2,500
def no_jump_into_finally_block(output): try: try: output.append(3) x = 1 finally: output.append(6) except __HOLE__ as e: output.append('finally' in str(e))
ValueError
dataset/ETHPy150Open amrdraz/kodr/app/brython/www/src/Lib/test/test_sys_settrace.py/no_jump_into_finally_block
2,501
def no_jump_out_of_finally_block(output): try: try: output.append(3) finally: output.append(5) output.append(6) except __HOLE__ as e: output.append('finally' in str(e))
ValueError
dataset/ETHPy150Open amrdraz/kodr/app/brython/www/src/Lib/test/test_sys_settrace.py/no_jump_out_of_finally_block
2,502
def no_jump_to_non_integers(output): try: output.append(2) except __HOLE__ as e: output.append('integer' in str(e))
ValueError
dataset/ETHPy150Open amrdraz/kodr/app/brython/www/src/Lib/test/test_sys_settrace.py/no_jump_to_non_integers
2,503
def no_jump_without_trace_function(): try: previous_frame = sys._getframe().f_back previous_frame.f_lineno = previous_frame.f_lineno except __HOLE__ as e: # This is the exception we wanted; make sure the error message # talks about trace functions. if 'trace' not in str(e): raise else: # Something's wrong - the expected exception wasn't raised. raise RuntimeError("Trace-function-less jump failed to fail")
ValueError
dataset/ETHPy150Open amrdraz/kodr/app/brython/www/src/Lib/test/test_sys_settrace.py/no_jump_without_trace_function
2,504
def now_playing(self, cardinal, user, channel, msg): # Before we do anything, let's make sure we'll be able to query Last.fm if self.api_key is None: cardinal.sendMsg( channel, "Last.fm plugin is not configured. Please set API key." ) self.logger.error( "Attempt to get now playing failed, API key not set" ) return if not self.conn: cardinal.sendMsg( channel, "Unable to access local Last.fm database." ) self.logger.error( "Attempt to get now playing failed, no database connection" ) return # Open the cursor for the query to find a saved Last.fm username c = self.conn.cursor() message = msg.split() # If using natural syntax, remove Cardinal's name if message[0] != '.np' and message[0] != '.nowplaying': message.pop(0) # If they supplied user parameter, use that for the query instead if len(message) >= 2: nick = message[1] c.execute("SELECT username FROM users WHERE nick=?", (nick,)) else: nick = user.group(1) vhost = user.group(3) c.execute( "SELECT username FROM users WHERE nick=? OR vhost=?", (nick, vhost) ) result = c.fetchone() # Use the returned username, or the entered/user's nick otherwise if not result: try: username = message[1] except __HOLE__: username = user.group(1) else: username = result[0] try: uh = urllib2.urlopen( "http://ws.audioscrobbler.com/2.0/?method=user.getrecenttracks" "&user=%s&api_key=%s&limit=1&format=json" % (username, self.api_key) ) content = json.load(uh) except Exception: cardinal.sendMsg(channel, "Unable to connect to Last.fm.") self.logger.exception("Failed to connect to Last.fm") return if 'error' in content and content['error'] == 10: cardinal.sendMsg( channel, "Last.fm plugin is not configured. Please set API key." ) self.logger.error( "Attempt to get now playing failed, API key incorrect" ) return elif 'error' in content and content['error'] == 6: cardinal.sendMsg( channel, "Your Last.fm username is incorrect. No user exists by the " "username %s." % str(username) ) return try: song = content['recenttracks']['track'][0]['name'] artist = content['recenttracks']['track'][0]['artist']['#text'] cardinal.sendMsg( channel, "%s is now listening to: %s by %s" % (str(username), str(song), str(artist)) ) except IndexError: cardinal.sendMsg( channel, "Unable to find any tracks played. " "(Is your Last.fm username correct?)" )
IndexError
dataset/ETHPy150Open JohnMaguire/Cardinal/plugins/lastfm/plugin.py/LastfmPlugin.now_playing
2,505
def compare(self, cardinal, user, channel, msg): # Before we do anything, let's make sure we'll be able to query Last.fm if self.api_key is None: cardinal.sendMsg( channel, "Last.fm plugin is not configured correctly. " "Please set API key." ) self.logger.error( "Attempt to compare users failed, API key not set" ) return if not self.conn: cardinal.sendMsg( channel, "Unable to access local Last.fm database." ) self.logger.error( "Attempt to compare users failed, no database connection" ) return # Open the cursor for the query to find a saved Last.fm username c = self.conn.cursor() # If they supplied user parameter, use that for the query instead message = msg.split() if message[0] != '.compare': message.pop(0) if len(message) < 2: cardinal.sendMsg(channel, "Syntax: .compare <username> [username]") nick = message[1] c.execute("SELECT username FROM users WHERE nick=?", (nick,)) result = c.fetchone() if not result: username1 = nick else: username1 = result[0] if len(message) >= 3: nick = message[2] c.execute("SELECT username FROM users WHERE nick=?", (nick,)) else: nick = user.group(1) vhost = user.group(3) c.execute( "SELECT username FROM users WHERE nick=? OR vhost=?", (nick, vhost) ) result = c.fetchone() # Use the returned username, or the entered/user's nick otherwise if not result: username2 = nick else: username2 = result[0] try: uh = urllib2.urlopen( "http://ws.audioscrobbler.com/2.0/?method=tasteometer.compare" "&type1=user&type2=user&value1=%s&value2=%s&api_key=%s" "&format=json" % (username1, username2, self.api_key)) content = json.load(uh) except Exception: cardinal.sendMsg(channel, "Unable to connect to Last.fm.") self.logger.exception("Failed to connect to Last.fm") return if 'error' in content and content['error'] == 10: cardinal.sendMsg( channel, "Last.fm plugin is not configured. Please set API key." ) self.logger.error( "Attempt to compare users failed, API key incorrect" ) return elif 'error' in content and content['error'] == 7: cardinal.sendMsg( channel, "One of the Last.fm usernames was invalid. Please try again." ) return try: result = content['comparison']['result'] score = int(float(result['score']) * 100) artists = [] if 'artist' not in result['artists']: # Return early to avoid error on looping through artists cardinal.sendMsg( channel, "According to Last.fm's Tasteometer, %s and %s share none " "of the same music." % (str(username1), str(username2)) ) return # Account for Last.fm giving a string instead of a list if only # one artist is shared if not isinstance(result['artists']['artist'], list): artists.append(str(result['artists']['artist']['name'])) else: # Loop through all artists to grab artist names for i in range(len(result['artists']['artist'])): artists.append(str(result['artists']['artist'][i]['name'])) cardinal.sendMsg( channel, "According to Last.fm's Tasteometer, %s and %s's music " "preferences are %d%% compatible! Some artists they have in " "common include: %s" % (str(username1), str(username2), score, ', '.join(artists)) ) except __HOLE__: cardinal.sendMsg(channel, "An unknown error has occurred.") self.logger.exception("An unknown error occurred comparing users")
KeyError
dataset/ETHPy150Open JohnMaguire/Cardinal/plugins/lastfm/plugin.py/LastfmPlugin.compare
2,506
def _check_imports(): """ Dynamically remove optimizers we don't have """ optlist = ['ALPSO', 'CONMIN', 'FSQP', 'IPOPT', 'NLPQLP', 'NSGA2', 'PSQP', 'SLSQP', 'SNOPT', 'NLPY_AUGLAG', 'NOMAD'] for optimizer in optlist[:]: try: exec('from pyoptsparse import %s' % optimizer) except __HOLE__: optlist.remove(optimizer) return optlist
ImportError
dataset/ETHPy150Open OpenMDAO/OpenMDAO/openmdao/drivers/pyoptsparse_driver.py/_check_imports
2,507
def run(self, problem): """pyOpt execution. Note that pyOpt controls the execution, and the individual optimizers (i.e., SNOPT) control the iteration. Args ---- problem : `Problem` Our parent `Problem`. """ self.pyopt_solution = None rel = problem.root._probdata.relevance # Metadata Setup self.metadata = create_local_meta(None, self.options['optimizer']) self.iter_count = 0 update_local_meta(self.metadata, (self.iter_count,)) # Initial Run with problem.root._dircontext: problem.root.solve_nonlinear(metadata=self.metadata) opt_prob = Optimization(self.options['title'], self._objfunc) # Add all parameters param_meta = self.get_desvar_metadata() self.indep_list = indep_list = list(param_meta) param_vals = self.get_desvars() for name, meta in iteritems(param_meta): opt_prob.addVarGroup(name, meta['size'], type='c', value=param_vals[name], lower=meta['lower'], upper=meta['upper']) opt_prob.finalizeDesignVariables() # Figure out parameter subsparsity for paramcomp index connections. # sub_param_conns is empty unless there are some index conns. # full_param_conns gets filled with the connections to the entire # parameter so that those params can be filtered out of the sparse # set if the full path is also relevant sub_param_conns = {} full_param_conns = {} for name in indep_list: pathname = problem.root.unknowns.metadata(name)['pathname'] sub_param_conns[name] = {} full_param_conns[name] = set() for target, info in iteritems(problem.root.connections): src, indices = info if src == pathname: if indices is not None: # Need to map the connection indices onto the desvar # indices if both are declared. dv_idx = param_meta[name].get('indices') indices = set(indices) if dv_idx is not None: indices.intersection_update(dv_idx) ldv_idx = list(dv_idx) mapped_idx = [ldv_idx.index(item) for item in indices] sub_param_conns[name][target] = mapped_idx else: sub_param_conns[name][target] = indices else: full_param_conns[name].add(target) # Add all objectives objs = self.get_objectives() self.quantities = list(objs) self.sparsity = OrderedDict() self.sub_sparsity = OrderedDict() for name in objs: opt_prob.addObj(name) self.sparsity[name] = self.indep_list # Calculate and save gradient for any linear constraints. lcons = self.get_constraints(lintype='linear').keys() if len(lcons) > 0: self.lin_jacs = problem.calc_gradient(indep_list, lcons, return_format='dict') #print("Linear Gradient") #print(self.lin_jacs) # Add all equality constraints econs = self.get_constraints(ctype='eq', lintype='nonlinear') con_meta = self.get_constraint_metadata() self.quantities += list(econs) for name in self.get_constraints(ctype='eq'): meta = con_meta[name] size = meta['size'] lower = upper = meta['equals'] # Sparsify Jacobian via relevance rels = rel.relevant[name] wrt = rels.intersection(indep_list) self.sparsity[name] = wrt if meta['linear']: opt_prob.addConGroup(name, size, lower=lower, upper=upper, linear=True, wrt=wrt, jac=self.lin_jacs[name]) else: jac = self._build_sparse(name, wrt, size, param_vals, sub_param_conns, full_param_conns, rels) opt_prob.addConGroup(name, size, lower=lower, upper=upper, wrt=wrt, jac=jac) # Add all inequality constraints incons = self.get_constraints(ctype='ineq', lintype='nonlinear') self.quantities += list(incons) for name in self.get_constraints(ctype='ineq'): meta = con_meta[name] size = meta['size'] # Bounds - double sided is supported lower = meta['lower'] upper = meta['upper'] # Sparsify Jacobian via relevance rels = rel.relevant[name] wrt = rels.intersection(indep_list) self.sparsity[name] = wrt if meta['linear']: opt_prob.addConGroup(name, size, upper=upper, lower=lower, linear=True, wrt=wrt, jac=self.lin_jacs[name]) else: jac = self._build_sparse(name, wrt, size, param_vals, sub_param_conns, full_param_conns, rels) opt_prob.addConGroup(name, size, upper=upper, lower=lower, wrt=wrt, jac=jac) # Instantiate the requested optimizer optimizer = self.options['optimizer'] try: exec('from pyoptsparse import %s' % optimizer) except __HOLE__: msg = "Optimizer %s is not available in this installation." % \ optimizer raise ImportError(msg) optname = vars()[optimizer] opt = optname() #Set optimization options for option, value in self.opt_settings.items(): opt.setOption(option, value) self._problem = problem # Execute the optimization problem if self.options['gradient method'] == 'pyopt_fd': # Use pyOpt's internal finite difference fd_step = problem.root.fd_options['step_size'] sol = opt(opt_prob, sens='FD', sensStep=fd_step, storeHistory=self.hist_file) elif self.options['gradient method'] == 'snopt_fd': if self.options['optimizer']=='SNOPT': # Use SNOPT's internal finite difference fd_step = problem.root.fd_options['step_size'] sol = opt(opt_prob, sens=None, sensStep=fd_step, storeHistory=self.hist_file) else: msg = "SNOPT's internal finite difference can only be used with SNOPT" raise Exception(msg) else: # Use OpenMDAO's differentiator for the gradient sol = opt(opt_prob, sens=self._gradfunc, storeHistory=self.hist_file) self._problem = None # Print results if self.options['print_results']: print(sol) # Pull optimal parameters back into framework and re-run, so that # framework is left in the right final state dv_dict = sol.getDVs() for name in indep_list: val = dv_dict[name] self.set_desvar(name, val) with self.root._dircontext: self.root.solve_nonlinear(metadata=self.metadata) # Save the most recent solution. self.pyopt_solution = sol try: exit_status = sol.optInform['value'] self.exit_flag = 1 if exit_status > 2: # bad self.exit_flag = 0 except KeyError: #nothing is here, so something bad happened! self.exit_flag = 0
ImportError
dataset/ETHPy150Open OpenMDAO/OpenMDAO/openmdao/drivers/pyoptsparse_driver.py/pyOptSparseDriver.run
2,508
def parse(self, field, data): try: return FieldParser.parse(self, field, data) except __HOLE__: return InvalidValue(data)
ValueError
dataset/ETHPy150Open olemb/dbfread/examples/print_invalid_values.py/MyFieldParser.parse
2,509
def mkdir_p(path): # Python doesn't have an analog to `mkdir -p` < Python 3.2. try: os.makedirs(path) except __HOLE__ as e: if e.errno == errno.EEXIST and os.path.isdir(path): pass else: raise
OSError
dataset/ETHPy150Open zulip/zulip/zulip_tools.py/mkdir_p
2,510
def get_deployment_lock(error_rerun_script): start_time = time.time() got_lock = False while time.time() - start_time < 300: try: os.mkdir(LOCK_DIR) got_lock = True break except __HOLE__: print(WARNING + "Another deployment in progress; waiting for lock... (If no deployment is running, rmdir %s)" % (LOCK_DIR,) + ENDC) sys.stdout.flush() time.sleep(3) if not got_lock: print(FAIL + "Deployment already in progress. Please run\n" + " %s\n" % (error_rerun_script,) + "manually when the previous deployment finishes, or run\n" + " rmdir %s\n" % (LOCK_DIR,) + "if the previous deployment crashed." + ENDC) sys.exit(1)
OSError
dataset/ETHPy150Open zulip/zulip/zulip_tools.py/get_deployment_lock
2,511
def cleanup(self): # This code sometimes runs when the rest of this module # has already been deleted, so it can't use any globals # or import anything. if self.__tempfiles: for file in self.__tempfiles: try: self.__unlink(file) except __HOLE__: pass del self.__tempfiles[:] if self.tempcache: self.tempcache.clear()
OSError
dataset/ETHPy150Open ofermend/medicare-demo/socialite/jython/Lib/urllib.py/URLopener.cleanup
2,512
def retrieve(self, url, filename=None, reporthook=None, data=None): """retrieve(url) returns (filename, headers) for a local object or (tempfilename, headers) for a remote object.""" url = unwrap(toBytes(url)) if self.tempcache and url in self.tempcache: return self.tempcache[url] type, url1 = splittype(url) if filename is None and (not type or type == 'file'): try: fp = self.open_local_file(url1) hdrs = fp.info() fp.close() return url2pathname(splithost(url1)[1]), hdrs except __HOLE__, msg: pass fp = self.open(url, data) headers = fp.info() if filename: tfp = open(filename, 'wb') else: import tempfile garbage, path = splittype(url) garbage, path = splithost(path or "") path, garbage = splitquery(path or "") path, garbage = splitattr(path or "") suffix = os.path.splitext(path)[1] (fd, filename) = tempfile.mkstemp(suffix) self.__tempfiles.append(filename) tfp = os.fdopen(fd, 'wb') result = filename, headers if self.tempcache is not None: self.tempcache[url] = result bs = 1024*8 size = -1 read = 0 blocknum = 0 if reporthook: if "content-length" in headers: size = int(headers["Content-Length"]) reporthook(blocknum, bs, size) while 1: block = fp.read(bs) if block == "": break read += len(block) tfp.write(block) blocknum += 1 if reporthook: reporthook(blocknum, bs, size) fp.close() tfp.close() del fp del tfp # raise exception if actual size does not match content-length header if size >= 0 and read < size: raise ContentTooShortError("retrieval incomplete: got only %i out " "of %i bytes" % (read, size), result) return result # Each method named open_<type> knows how to open that type of URL
IOError
dataset/ETHPy150Open ofermend/medicare-demo/socialite/jython/Lib/urllib.py/URLopener.retrieve
2,513
def open_local_file(self, url): """Use local file.""" import mimetypes, mimetools, email.Utils try: from cStringIO import StringIO except __HOLE__: from StringIO import StringIO host, file = splithost(url) localname = url2pathname(file) try: stats = os.stat(localname) except OSError, e: raise IOError(e.errno, e.strerror, e.filename) size = stats.st_size modified = email.Utils.formatdate(stats.st_mtime, usegmt=True) mtype = mimetypes.guess_type(url)[0] headers = mimetools.Message(StringIO( 'Content-Type: %s\nContent-Length: %d\nLast-modified: %s\n' % (mtype or 'text/plain', size, modified))) if not host: urlfile = file if file[:1] == '/': urlfile = 'file://' + file return addinfourl(open(localname, 'rb'), headers, urlfile) host, port = splitport(host) if not port \ and socket.gethostbyname(host) in (localhost(), thishost()): urlfile = file if file[:1] == '/': urlfile = 'file://' + file return addinfourl(open(localname, 'rb'), headers, urlfile) raise IOError, ('local file error', 'not on local host')
ImportError
dataset/ETHPy150Open ofermend/medicare-demo/socialite/jython/Lib/urllib.py/URLopener.open_local_file
2,514
def open_ftp(self, url): """Use FTP protocol.""" if not isinstance(url, str): raise IOError, ('ftp error', 'proxy support for ftp protocol currently not implemented') import mimetypes, mimetools try: from cStringIO import StringIO except __HOLE__: from StringIO import StringIO host, path = splithost(url) if not host: raise IOError, ('ftp error', 'no host given') host, port = splitport(host) user, host = splituser(host) if user: user, passwd = splitpasswd(user) else: passwd = None host = unquote(host) user = unquote(user or '') passwd = unquote(passwd or '') host = socket.gethostbyname(host) if not port: import ftplib port = ftplib.FTP_PORT else: port = int(port) path, attrs = splitattr(path) path = unquote(path) dirs = path.split('/') dirs, file = dirs[:-1], dirs[-1] if dirs and not dirs[0]: dirs = dirs[1:] if dirs and not dirs[0]: dirs[0] = '/' key = user, host, port, '/'.join(dirs) # XXX thread unsafe! if len(self.ftpcache) > MAXFTPCACHE: # Prune the cache, rather arbitrarily for k in self.ftpcache.keys(): if k != key: v = self.ftpcache[k] del self.ftpcache[k] v.close() try: if not key in self.ftpcache: self.ftpcache[key] = \ ftpwrapper(user, passwd, host, port, dirs) if not file: type = 'D' else: type = 'I' for attr in attrs: attr, value = splitvalue(attr) if attr.lower() == 'type' and \ value in ('a', 'A', 'i', 'I', 'd', 'D'): type = value.upper() (fp, retrlen) = self.ftpcache[key].retrfile(file, type) mtype = mimetypes.guess_type("ftp:" + url)[0] headers = "" if mtype: headers += "Content-Type: %s\n" % mtype if retrlen is not None and retrlen >= 0: headers += "Content-Length: %d\n" % retrlen headers = mimetools.Message(StringIO(headers)) return addinfourl(fp, headers, "ftp:" + url) except ftperrors(), msg: raise IOError, ('ftp error', msg), sys.exc_info()[2]
ImportError
dataset/ETHPy150Open ofermend/medicare-demo/socialite/jython/Lib/urllib.py/URLopener.open_ftp
2,515
def open_data(self, url, data=None): """Use "data" URL.""" if not isinstance(url, str): raise IOError, ('data error', 'proxy support for data protocol currently not implemented') # ignore POSTed data # # syntax of data URLs: # dataurl := "data:" [ mediatype ] [ ";base64" ] "," data # mediatype := [ type "/" subtype ] *( ";" parameter ) # data := *urlchar # parameter := attribute "=" value import mimetools try: from cStringIO import StringIO except ImportError: from StringIO import StringIO try: [type, data] = url.split(',', 1) except __HOLE__: raise IOError, ('data error', 'bad data URL') if not type: type = 'text/plain;charset=US-ASCII' semi = type.rfind(';') if semi >= 0 and '=' not in type[semi:]: encoding = type[semi+1:] type = type[:semi] else: encoding = '' msg = [] msg.append('Date: %s'%time.strftime('%a, %d %b %Y %T GMT', time.gmtime(time.time()))) msg.append('Content-type: %s' % type) if encoding == 'base64': import base64 data = base64.decodestring(data) else: data = unquote(data) msg.append('Content-Length: %d' % len(data)) msg.append('') msg.append(data) msg = '\n'.join(msg) f = StringIO(msg) headers = mimetools.Message(f, 0) #f.fileno = None # needed for addinfourl return addinfourl(f, headers, url)
ValueError
dataset/ETHPy150Open ofermend/medicare-demo/socialite/jython/Lib/urllib.py/URLopener.open_data
2,516
def prompt_user_passwd(self, host, realm): """Override this in a GUI environment!""" import getpass try: user = raw_input("Enter username for %s at %s: " % (realm, host)) passwd = getpass.getpass("Enter password for %s in %s at %s: " % (user, realm, host)) return user, passwd except __HOLE__: print return None, None # Utility functions
KeyboardInterrupt
dataset/ETHPy150Open ofermend/medicare-demo/socialite/jython/Lib/urllib.py/FancyURLopener.prompt_user_passwd
2,517
def noheaders(): """Return an empty mimetools.Message object.""" global _noheaders if _noheaders is None: import mimetools try: from cStringIO import StringIO except __HOLE__: from StringIO import StringIO _noheaders = mimetools.Message(StringIO(), 0) _noheaders.fp.close() # Recycle file descriptor return _noheaders # Utility classes
ImportError
dataset/ETHPy150Open ofermend/medicare-demo/socialite/jython/Lib/urllib.py/noheaders
2,518
def splitnport(host, defport=-1): """Split host and port, returning numeric port. Return given default port if no ':' found; defaults to -1. Return numerical port if a valid number are found after ':'. Return None if ':' but not a valid number.""" global _nportprog if _nportprog is None: import re _nportprog = re.compile('^(.*):(.*)$') match = _nportprog.match(host) if match: host, port = match.group(1, 2) try: if not port: raise ValueError, "no digits" nport = int(port) except __HOLE__: nport = None return host, nport return host, defport
ValueError
dataset/ETHPy150Open ofermend/medicare-demo/socialite/jython/Lib/urllib.py/splitnport
2,519
def unquote(s): """unquote('abc%20def') -> 'abc def'.""" res = s.split('%') for i in xrange(1, len(res)): item = res[i] try: res[i] = _hextochr[item[:2]] + item[2:] except KeyError: res[i] = '%' + item except __HOLE__: res[i] = unichr(int(item[:2], 16)) + item[2:] return "".join(res)
UnicodeDecodeError
dataset/ETHPy150Open ofermend/medicare-demo/socialite/jython/Lib/urllib.py/unquote
2,520
def quote(s, safe = '/'): """quote('abc def') -> 'abc%20def' Each part of a URL, e.g. the path info, the query, etc., has a different set of reserved characters that must be quoted. RFC 2396 Uniform Resource Identifiers (URI): Generic Syntax lists the following reserved characters. reserved = ";" | "/" | "?" | ":" | "@" | "&" | "=" | "+" | "$" | "," Each of these characters is reserved in some component of a URL, but not necessarily in all of them. By default, the quote function is intended for quoting the path section of a URL. Thus, it will not encode '/'. This character is reserved, but in typical usage the quote function is being called on a path where the existing slash characters are used as reserved characters. """ cachekey = (safe, always_safe) try: safe_map = _safemaps[cachekey] except __HOLE__: safe += always_safe safe_map = {} for i in range(256): c = chr(i) safe_map[c] = (c in safe) and c or ('%%%02X' % i) _safemaps[cachekey] = safe_map res = map(safe_map.__getitem__, s) return ''.join(res)
KeyError
dataset/ETHPy150Open ofermend/medicare-demo/socialite/jython/Lib/urllib.py/quote
2,521
def urlencode(query,doseq=0): """Encode a sequence of two-element tuples or dictionary into a URL query string. If any values in the query arg are sequences and doseq is true, each sequence element is converted to a separate parameter. If the query arg is a sequence of two-element tuples, the order of the parameters in the output will match the order of parameters in the input. """ if hasattr(query,"items"): # mapping objects query = query.items() else: # it's a bother at times that strings and string-like objects are # sequences... try: # non-sequence items should not work with len() # non-empty strings will fail this if len(query) and not isinstance(query[0], tuple): raise TypeError # zero-length sequences of all types will get here and succeed, # but that's a minor nit - since the original implementation # allowed empty dicts that type of behavior probably should be # preserved for consistency except TypeError: ty,va,tb = sys.exc_info() raise TypeError, "not a valid non-string sequence or mapping object", tb l = [] if not doseq: # preserve old behavior for k, v in query: k = quote_plus(str(k)) v = quote_plus(str(v)) l.append(k + '=' + v) else: for k, v in query: k = quote_plus(str(k)) if isinstance(v, str): v = quote_plus(v) l.append(k + '=' + v) elif _is_unicode(v): # is there a reasonable way to convert to ASCII? # encode generates a string, but "replace" or "ignore" # lose information and "strict" can raise UnicodeError v = quote_plus(v.encode("ASCII","replace")) l.append(k + '=' + v) else: try: # is this a sufficient test for sequence-ness? x = len(v) except __HOLE__: # not a sequence v = quote_plus(str(v)) l.append(k + '=' + v) else: # loop over the sequence for elt in v: l.append(k + '=' + quote_plus(str(elt))) return '&'.join(l) # Proxy handling
TypeError
dataset/ETHPy150Open ofermend/medicare-demo/socialite/jython/Lib/urllib.py/urlencode
2,522
def getproxies_internetconfig(): """Return a dictionary of scheme -> proxy server URL mappings. By convention the mac uses Internet Config to store proxies. An HTTP proxy, for instance, is stored under the HttpProxy key. """ try: import ic except __HOLE__: return {} try: config = ic.IC() except ic.error: return {} proxies = {} # HTTP: if 'UseHTTPProxy' in config and config['UseHTTPProxy']: try: value = config['HTTPProxyHost'] except ic.error: pass else: proxies['http'] = 'http://%s' % value # FTP: XXXX To be done. # Gopher: XXXX To be done. return proxies
ImportError
dataset/ETHPy150Open ofermend/medicare-demo/socialite/jython/Lib/urllib.py/getproxies_internetconfig
2,523
def getproxies_registry(): """Return a dictionary of scheme -> proxy server URL mappings. Win32 uses the registry to store proxies. """ proxies = {} try: import _winreg except __HOLE__: # Std module, so should be around - but you never know! return proxies try: internetSettings = _winreg.OpenKey(_winreg.HKEY_CURRENT_USER, r'Software\Microsoft\Windows\CurrentVersion\Internet Settings') proxyEnable = _winreg.QueryValueEx(internetSettings, 'ProxyEnable')[0] if proxyEnable: # Returned as Unicode but problems if not converted to ASCII proxyServer = str(_winreg.QueryValueEx(internetSettings, 'ProxyServer')[0]) if '=' in proxyServer: # Per-protocol settings for p in proxyServer.split(';'): protocol, address = p.split('=', 1) # See if address has a type:// prefix import re if not re.match('^([^/:]+)://', address): address = '%s://%s' % (protocol, address) proxies[protocol] = address else: # Use one setting for all protocols if proxyServer[:5] == 'http:': proxies['http'] = proxyServer else: proxies['http'] = 'http://%s' % proxyServer proxies['ftp'] = 'ftp://%s' % proxyServer internetSettings.Close() except (WindowsError, ValueError, TypeError): # Either registry key not found etc, or the value in an # unexpected format. # proxies already set up to be empty so nothing to do pass return proxies
ImportError
dataset/ETHPy150Open ofermend/medicare-demo/socialite/jython/Lib/urllib.py/getproxies_registry
2,524
def proxy_bypass(host): try: import _winreg import re except __HOLE__: # Std modules, so should be around - but you never know! return 0 try: internetSettings = _winreg.OpenKey(_winreg.HKEY_CURRENT_USER, r'Software\Microsoft\Windows\CurrentVersion\Internet Settings') proxyEnable = _winreg.QueryValueEx(internetSettings, 'ProxyEnable')[0] proxyOverride = str(_winreg.QueryValueEx(internetSettings, 'ProxyOverride')[0]) # ^^^^ Returned as Unicode but problems if not converted to ASCII except WindowsError: return 0 if not proxyEnable or not proxyOverride: return 0 # try to make a host list from name and IP address. rawHost, port = splitport(host) host = [rawHost] try: addr = socket.gethostbyname(rawHost) if addr != rawHost: host.append(addr) except socket.error: pass try: fqdn = socket.getfqdn(rawHost) if fqdn != rawHost: host.append(fqdn) except socket.error: pass # make a check value list from the registry entry: replace the # '<local>' string by the localhost entry and the corresponding # canonical entry. proxyOverride = proxyOverride.split(';') i = 0 while i < len(proxyOverride): if proxyOverride[i] == '<local>': proxyOverride[i:i+1] = ['localhost', '127.0.0.1', socket.gethostname(), socket.gethostbyname( socket.gethostname())] i += 1 # print proxyOverride # now check if we match one of the registry values. for test in proxyOverride: test = test.replace(".", r"\.") # mask dots test = test.replace("*", r".*") # change glob sequence test = test.replace("?", r".") # change glob char for val in host: # print "%s <--> %s" %( test, val ) if re.match(test, val, re.I): return 1 return 0
ImportError
dataset/ETHPy150Open ofermend/medicare-demo/socialite/jython/Lib/urllib.py/proxy_bypass
2,525
def error(self, obj, name, value): """Returns an informative and descriptive error string.""" wtype = "value" wvalue = value info = "an array-like object" # pylint: disable=E1101 if self.shape and hasattr(value, 'shape') and value.shape: if self.shape != value.shape: info += " of shape %s" % str(self.shape) wtype = "shape" wvalue = str(value.shape) vtype = type(value) msg = "Variable '%s' must be %s, but a %s of %s (%s) was specified." % \ (name, info, wtype, wvalue, vtype) try: obj.raise_exception(msg, ValueError) except __HOLE__: raise ValueError(msg)
AttributeError
dataset/ETHPy150Open OpenMDAO/OpenMDAO-Framework/openmdao.main/src/openmdao/main/datatypes/array.py/Array.error
2,526
def _validate_with_metadata(self, obj, name, value, src_units): """Perform validation and unit conversion using metadata from the source trait. """ # pylint: disable=E1101 dst_units = self.units try: pq = PhysicalQuantity(1.0, src_units) except __HOLE__: raise NameError("while setting value of %s: undefined unit '%s'" % (src_units, name)) try: pq.convert_to_unit(dst_units) except NameError: raise NameError("undefined unit '%s' for variable '%s'" % (dst_units, name)) except TypeError: msg = "%s: units '%s' are incompatible " % (name, src_units) + \ "with assigning units of '%s'" % (dst_units) raise TypeError(msg) value *= pq.value return value
NameError
dataset/ETHPy150Open OpenMDAO/OpenMDAO-Framework/openmdao.main/src/openmdao/main/datatypes/array.py/Array._validate_with_metadata
2,527
def main(): args = parse_args() warnings.simplefilter('always') resolver_kwargs = {} if args.order is not None: resolver_kwargs['order'] = args.order.split(',') if args.options is not None: resolver_kwargs['options'] = json.loads(args.options) resolver = get_resolver(**resolver_kwargs) resolver.load_locations(location_file=args.location_file) # Variables for statistics. city_found = county_found = state_found = country_found = 0 has_place = has_coordinates = has_geo = has_profile_location = 0 resolution_method_counts = collections.defaultdict(int) skipped_tweets = resolved_tweets = total_tweets = 0 for i, input_line in enumerate(args.input_file): # Show warnings from the input file, not the Python source code. def showwarning(message, category, filename, lineno, file=sys.stderr, line=None): sys.stderr.write(warnings.formatwarning( message, category, args.input_file.name, i+1, line='')) warnings.showwarning = showwarning try: tweet = json.loads(input_line) except __HOLE__: warnings.warn('Invalid JSON object') skipped_tweets += 1 continue # Collect statistics on the tweet. if tweet.get('place'): has_place += 1 if tweet.get('coordinates'): has_coordinates += 1 if tweet.get('geo'): has_geo += 1 if tweet.get('user', {}).get('location', ''): has_profile_location += 1 # Perform the actual resolution. resolution = resolver.resolve_tweet(tweet) if resolution: location = resolution[1] tweet['location'] = location # More statistics. resolution_method_counts[location.resolution_method] += 1 if location.city: city_found += 1 elif location.county: county_found += 1 elif location.state: state_found += 1 elif location.country: country_found += 1 resolved_tweets += 1 print >> args.output_file, json.dumps(tweet, cls=LocationEncoder) total_tweets += 1 if args.statistics: print >> sys.stderr, 'Skipped %d tweets.' % skipped_tweets print >> sys.stderr, ('Tweets with "place" key: %d; ' '"coordinates" key: %d; ' '"geo" key: %d.' % ( has_place, has_coordinates, has_geo)) print >> sys.stderr, ('Resolved %d tweets to a city, ' '%d to a county, %d to a state, ' 'and %d to a country.' % ( city_found, county_found, state_found, country_found)) print >> sys.stderr, ('Tweet resolution methods: %s.' % ( ', '.join('%d by %s' % (v, k) for (k, v) in resolution_method_counts.iteritems()))) print >> sys.stderr, 'Resolved locations for %d of %d tweets.' % ( resolved_tweets, total_tweets)
ValueError
dataset/ETHPy150Open Kitware/minerva/server/libs/carmen/cli.py/main
2,528
def __main_loop(self): """The main program loop, reads JSON requests from stdin, writes JSON responses to stdout """ while(True): # reset result and log self.result = False self.log = [] # get the request string, strip the ending "\n" self.__request = self.__reader.readline().rstrip() # store the start time self._start_time = time.time() # when pdns is exiting it sends an empty line if self.__request == '': return # deserialize input try: obj = json.loads(self.__request) except ValueError: self.log.append('error: cannot parse input "{}"' .format(self.__request)) self.__write_response() return # get method name method_name = 'do_{}'.format(obj['method']) try: # get method method = getattr(self, method_name) except __HOLE__: self.result = False self.log.append('warning: method "{}" is not implemented' .format(method_name, self.__request)) self.__write_response() continue # DEBUG ONLY #method(obj['parameters']) #self.__write_response() #continue # execute method try: method(obj['parameters']) except Exception: self.result = False self.log.append('error: method "{}" failed to execute' .format(method_name, self.__request)) self.__write_response() continue # write response self.__write_response()
AttributeError
dataset/ETHPy150Open polaris-gslb/polaris-gslb/polaris_pdns/core/remotebackend.py/RemoteBackend.__main_loop
2,529
def __get(self, name, obj, default=None): try: attr = getattr(self, name) except __HOLE__: return default if callable(attr): return attr(obj) return attr
AttributeError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.5/django/contrib/sitemaps/__init__.py/Sitemap.__get
2,530
def _sync_from_artifact_store(jobstep): """Checks and creates new artifacts from the artifact store.""" url = '{base}/buckets/{jobstep_id}/artifacts/'.format( base=current_app.config.get('ARTIFACTS_SERVER'), jobstep_id=jobstep.id.hex, ) job = jobstep.job try: res = requests.get(url, timeout=ARTIFACTS_REQUEST_TIMEOUT_SECS) res.raise_for_status() artifacts = res.json() for artifact in artifacts: # Artifact name is guaranteed to be unique in an artifact store bucket. artifact_name = artifact['name'] artifact_path = artifact['relativePath'] if artifact_name in LOGSOURCE_WHITELIST: _, created = get_or_create(LogSource, where={ 'name': artifact_name, 'job': job, 'step': jobstep, }, defaults={ 'project': job.project, 'date_created': job.date_started, 'in_artifact_store': True, }) if created: try: db.session.commit() except IntegrityError as err: db.session.rollback() current_app.logger.error( 'DB Error while inserting LogSource %s', artifact_name, exc_info=True) # If this artifact is a logsource, don't add it to the list of # test artifacts. continue art, created = get_or_create(Artifact, where={ # Don't conflict with same artifacts uploaded by other means (Jenkins/Mesos) 'name': ARTIFACTSTORE_PREFIX + artifact_path, 'step_id': jobstep.id, 'job_id': jobstep.job_id, 'project_id': jobstep.project_id, }) if created: art.file.storage = 'changes.storage.artifactstore.ArtifactStoreFileStorage' filename = 'buckets/{jobstep_id}/artifacts/{artifact_name}'.format( jobstep_id=jobstep.id.hex, artifact_name=artifact_name, ) art.file.save(None, filename) try: db.session.add(art) db.session.commit() except IntegrityError, err: db.session.rollback() current_app.logger.error( 'DB Error while inserting artifact %s: %s', filename, err) except (ConnectionError, __HOLE__, SSLError, Timeout) as err: if isinstance(err, HTTPError) and err.response is not None and err.response.status_code == 404: # While not all plans use the Artifact Store, 404s are normal and expected. # No sense in reporting them. pass else: # Log to sentry - unable to contact artifacts store current_app.logger.warning('Error fetching url %s: %s', url, err, exc_info=True) except Exception, err: current_app.logger.error('Error updating artifacts for jobstep %s: %s', jobstep, err, exc_info=True) raise err
HTTPError
dataset/ETHPy150Open dropbox/changes/changes/jobs/sync_job_step.py/_sync_from_artifact_store
2,531
def verify(self, mhash, S): """Verify that a certain PKCS#1 PSS signature is authentic. This function checks if the party holding the private half of the given RSA key has really signed the message. This function is called ``RSASSA-PSS-VERIFY``, and is specified in section 8.1.2 of RFC3447. :Parameters: mhash : hash object The hash that was carried out over the message. This is an object belonging to the `Crypto.Hash` module. S : string The signature that needs to be validated. :Return: True if verification is correct. False otherwise. """ # TODO: Verify the key is RSA # Set defaults for salt length and mask generation function if self._saltLen == None: sLen = mhash.digest_size else: sLen = self._saltLen if self._mgfunc: mgf = self._mgfunc else: mgf = lambda x,y: MGF1(x,y,mhash) modBits = Crypto.Util.number.size(self._key.n) # See 8.1.2 in RFC3447 k = ceil_div(modBits,8) # Convert from bits to bytes # Step 1 if len(S) != k: return False # Step 2a (O2SIP), 2b (RSAVP1), and partially 2c (I2OSP) # Note that signature must be smaller than the module # but RSA.py won't complain about it. # TODO: Fix RSA object; don't do it here. em = self._key.encrypt(S, 0)[0] # Step 2c emLen = ceil_div(modBits-1,8) em = bchr(0x00)*(emLen-len(em)) + em # Step 3 try: result = EMSA_PSS_VERIFY(mhash, em, modBits-1, mgf, sLen) except __HOLE__: return False # Step 4 return result
ValueError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/pycrypto-2.6.1/lib/Crypto/Signature/PKCS1_PSS.py/PSS_SigScheme.verify
2,532
def ApplyPluginsToBatch(self, hunt_urn, plugins, batch, batch_index): exceptions_by_plugin = {} for plugin_def, plugin in plugins: logging.debug("Processing hunt %s with %s, batch %d", hunt_urn, plugin_def.plugin_name, batch_index) try: plugin.ProcessResponses(batch) stats.STATS.IncrementCounter("hunt_results_ran_through_plugin", delta=len(batch), fields=[plugin_def.plugin_name]) plugin_status = output_plugin.OutputPluginBatchProcessingStatus( plugin_descriptor=plugin_def, status="SUCCESS", batch_index=batch_index, batch_size=len(batch)) except Exception as e: # pylint: disable=broad-except stats.STATS.IncrementCounter("hunt_output_plugin_errors", fields=[plugin_def.plugin_name]) plugin_status = output_plugin.OutputPluginBatchProcessingStatus( plugin_descriptor=plugin_def, status="ERROR", summary=utils.SmartStr(e), batch_index=batch_index, batch_size=len(batch)) logging.exception("Error processing hunt results: hunt %s, " "plugin %s, batch %d", hunt_urn, plugin_def.plugin_name, batch_index) self.Log("Error processing hunt results (hunt %s, " "plugin %s, batch %d): %s" % (hunt_urn, plugin_def.plugin_name, batch_index, e)) exceptions_by_plugin[plugin_def] = e # TODO(user): Change to use StaticAdd once all active hunts are # migrated. try: aff4.FACTORY.Open(self.StatusCollectionUrn(hunt_urn), "PluginStatusCollection", mode="w", token=self.token).Add(plugin_status) except IOError: collects.PackedVersionedCollection.AddToCollection( self.StatusCollectionUrn(hunt_urn), [plugin_status], sync=False, token=self.token) if plugin_status.status == plugin_status.Status.ERROR: try: aff4.FACTORY.Open(self.ErrorsCollectionUrn(hunt_urn), "PluginStatusCollection", mode="w", token=self.token).Add(plugin_status) except __HOLE__: collects.PackedVersionedCollection.AddToCollection( self.ErrorsCollectionUrn(hunt_urn), [plugin_status], sync=False, token=self.token) return exceptions_by_plugin
IOError
dataset/ETHPy150Open google/grr/grr/lib/hunts/standard.py/ProcessHuntResultsCronFlow.ApplyPluginsToBatch
2,533
def delete_user(email, profile="splunk"): ''' Delete a splunk user by email CLI Example: salt myminion splunk_user.delete '[email protected]' ''' client = _get_splunk(profile) user = list_users(profile).get(email) if user: try: client.users.delete(user.name) except (AuthenticationError, __HOLE__) as e: log.info('Exception: {0}'.format(str(e))) return False else: return False return user.name not in client.users
HTTPError
dataset/ETHPy150Open saltstack/salt/salt/modules/splunk.py/delete_user
2,534
def __init__(self, *args, **kwargs): try: del kwargs['name'] del kwargs['expected_type'] except __HOLE__: pass super().__init__(*args, name='dart_sdk_path', expected_type=str, **kwargs)
KeyError
dataset/ETHPy150Open guillermooo/dart-sublime-bundle/lib/sdk.py/DartSdkPathSetting.__init__
2,535
def __init__(self, *args, **kwargs): try: del kwargs['name'] del kwargs['expected_type'] except __HOLE__: pass super().__init__(*args, name='dart_dartium_path', expected_type=str, **kwargs)
KeyError
dataset/ETHPy150Open guillermooo/dart-sublime-bundle/lib/sdk.py/DartiumPathSetting.__init__
2,536
def add(self, repo_url=None, ppa=None): """ This function used to add apt repositories and or ppa's If repo_url is provided adds repo file to /etc/apt/sources.list.d/ If ppa is provided add apt-repository using add-apt-repository command. """ if repo_url is not None: repo_file_path = ("/etc/apt/sources.list.d/" + EEVariables().ee_repo_file) try: if not os.path.isfile(repo_file_path): with open(repo_file_path, encoding='utf-8', mode='a') as repofile: repofile.write(repo_url) repofile.write('\n') repofile.close() elif repo_url not in open(repo_file_path, encoding='utf-8').read(): with open(repo_file_path, encoding='utf-8', mode='a') as repofile: repofile.write(repo_url) repofile.write('\n') repofile.close() return True except __HOLE__ as e: Log.debug(self, "{0}".format(e)) Log.error(self, "File I/O error.") except Exception as e: Log.debug(self, "{0}".format(e)) Log.error(self, "Unable to add repo") if ppa is not None: EEShellExec.cmd_exec(self, "add-apt-repository -y '{ppa_name}'" .format(ppa_name=ppa))
IOError
dataset/ETHPy150Open EasyEngine/easyengine/ee/core/apt_repo.py/EERepo.add
2,537
def remove(self, ppa=None, repo_url=None): """ This function used to remove ppa's If ppa is provided adds repo file to /etc/apt/sources.list.d/ command. """ if ppa: EEShellExec.cmd_exec(self, "add-apt-repository -y " "--remove '{ppa_name}'" .format(ppa_name=ppa)) elif repo_url: repo_file_path = ("/etc/apt/sources.list.d/" + EEVariables().ee_repo_file) try: repofile = open(repo_file_path, "w+") repofile.write(repofile.read().replace(repo_url, "")) repofile.close() except __HOLE__ as e: Log.debug(self, "{0}".format(e)) Log.error(self, "File I/O error.") except Exception as e: Log.debug(self, "{0}".format(e)) Log.error(self, "Unable to remove repo")
IOError
dataset/ETHPy150Open EasyEngine/easyengine/ee/core/apt_repo.py/EERepo.remove
2,538
def undeployed(name, url='http://localhost:8080/manager', timeout=180): ''' Enforce that the WAR will be un-deployed from the server name the context path to deploy url : http://localhost:8080/manager the URL of the server manager webapp timeout : 180 timeout for HTTP request to the tomcat manager Example: .. code-block:: yaml jenkins: tomcat.undeployed: - name: /ran - require: - service: application-service ''' # Prepare ret = {'name': name, 'result': True, 'changes': {}, 'comment': ''} if not __salt__['tomcat.status'](url, timeout): ret['comment'] = 'Tomcat Manager does not response' ret['result'] = False return ret try: version = __salt__['tomcat.ls'](url, timeout)[name]['version'] ret['changes'] = {'undeploy': version} except __HOLE__: return ret # Test if __opts__['test']: ret['result'] = None return ret undeploy = __salt__['tomcat.undeploy'](name, url, timeout=timeout) if undeploy.startswith('FAIL'): ret['result'] = False ret['comment'] = undeploy return ret return ret
KeyError
dataset/ETHPy150Open saltstack/salt/salt/states/tomcat.py/undeployed
2,539
def _colorize(text, colorize=True): if not colorize or not sys.stdout.isatty(): return text try: from pygments import highlight from pygments.formatters import TerminalFormatter from pygments.lexers import PythonLexer return highlight(text, PythonLexer(), TerminalFormatter()) except __HOLE__: return text
ImportError
dataset/ETHPy150Open scrapy/scrapy/scrapy/utils/display.py/_colorize
2,540
def main(): usagestr = "usage: %prog [-h] [options] [args]" parser = optparse.OptionParser(usage = usagestr) parser.set_defaults(numnodes = 5) parser.add_option("-n", "--numnodes", dest = "numnodes", type = int, help = "number of nodes") def usage(msg = None, err = 0): sys.stdout.write("\n") if msg: sys.stdout.write(msg + "\n\n") parser.print_help() sys.exit(err) # parse command line options (options, args) = parser.parse_args() if options.numnodes < 1: usage("invalid number of nodes: %s" % options.numnodes) for a in args: sys.stderr.write("ignoring command line argument: '%s'\n" % a) start = datetime.datetime.now() # IP subnet prefix = ipaddr.IPv4Prefix("10.83.0.0/16") # session with some EMANE initialization cfg = {'verbose': 'false'} session = pycore.Session(cfg = cfg, persistent = True) session.master = True session.location.setrefgeo(47.57917,-122.13232,2.00000) session.location.refscale = 150.0 session.cfg['emane_models'] = "RfPipe, Ieee80211abg, Bypass" session.emane.loadmodels() if 'server' in globals(): server.addsession(session) # EMANE WLAN print "creating EMANE WLAN wlan1" wlan = session.addobj(cls = pycore.nodes.EmaneNode, name = "wlan1") wlan.setposition(x=80,y=50) names = EmaneIeee80211abgModel.getnames() values = list(EmaneIeee80211abgModel.getdefaultvalues()) # TODO: change any of the EMANE 802.11 parameter values here for i in range(0, len(names)): print "EMANE 80211 \"%s\" = \"%s\"" % (names[i], values[i]) try: values[ names.index('pathlossmode') ] = '2ray' except __HOLE__: values[ names.index('propagationmodel') ] = '2ray' session.emane.setconfig(wlan.objid, EmaneIeee80211abgModel._name, values) services_str = "zebra|OSPFv3MDR|vtysh|IPForward" print "creating %d nodes with addresses from %s" % \ (options.numnodes, prefix) for i in xrange(1, options.numnodes + 1): tmp = session.addobj(cls = pycore.nodes.CoreNode, name = "n%d" % i, objid=i) tmp.newnetif(wlan, ["%s/%s" % (prefix.addr(i), prefix.prefixlen)]) tmp.cmd([SYSCTL_BIN, "net.ipv4.icmp_echo_ignore_broadcasts=0"]) tmp.setposition(x=150*i,y=150) session.services.addservicestonode(tmp, "", services_str, verbose=False) n.append(tmp) # this starts EMANE, etc. session.node_count = str(options.numnodes + 1) session.instantiate() # start a shell on node 1 n[1].term("bash") print "elapsed time: %s" % (datetime.datetime.now() - start)
ValueError
dataset/ETHPy150Open coreemu/core/daemon/examples/netns/emane80211.py/main
2,541
def test_connect_in_mocking(self): """Ensure that the connect() method works properly in mocking. """ try: import mongomock except __HOLE__: raise SkipTest('you need mongomock installed to run this testcase') connect('mongoenginetest', host='mongomock://localhost') conn = get_connection() self.assertTrue(isinstance(conn, mongomock.MongoClient)) connect('mongoenginetest2', host='mongomock://localhost', alias='testdb2') conn = get_connection('testdb2') self.assertTrue(isinstance(conn, mongomock.MongoClient)) connect('mongoenginetest3', host='mongodb://localhost', is_mock=True, alias='testdb3') conn = get_connection('testdb3') self.assertTrue(isinstance(conn, mongomock.MongoClient)) connect('mongoenginetest4', is_mock=True, alias='testdb4') conn = get_connection('testdb4') self.assertTrue(isinstance(conn, mongomock.MongoClient)) connect(host='mongodb://localhost:27017/mongoenginetest5', is_mock=True, alias='testdb5') conn = get_connection('testdb5') self.assertTrue(isinstance(conn, mongomock.MongoClient)) connect(host='mongomock://localhost:27017/mongoenginetest6', alias='testdb6') conn = get_connection('testdb6') self.assertTrue(isinstance(conn, mongomock.MongoClient)) connect(host='mongomock://localhost:27017/mongoenginetest7', is_mock=True, alias='testdb7') conn = get_connection('testdb7') self.assertTrue(isinstance(conn, mongomock.MongoClient))
ImportError
dataset/ETHPy150Open MongoEngine/mongoengine/tests/test_connection.py/ConnectionTest.test_connect_in_mocking
2,542
def __init__(self, rradir, rrdcache=None): if rrdtool is None: raise errors.InitError( "The python module 'rrdtool' is not installed") if not os.path.exists(rradir): try: os.makedirs(rradir) except __HOLE__, ex: raise errors.InitError("Cannot create %r: %s" % (rradir, ex)) elif not os.path.isdir(rradir): raise errors.InitError("%r is not a directory!" % rradir) elif not os.access(rradir, os.R_OK | os.W_OK | os.X_OK): raise errors.InitError( "%r is not readable and/or writeable!" % rradir) def opened_ok(result): log.info("Connected to rrdcached on %s", rrdcache) def opened_fail(result): log.error("Failed to connect to rrdcached: %s" % result) self._rradir = rradir self._rrdapi = RRDTwistedAPI() if rrdcache: d = self._rrdapi.open(rrdcache) d.addCallback(opened_ok) d.addErrback(opened_fail) else: log.info("No rrdcached, updates will be direct.")
OSError
dataset/ETHPy150Open marineam/nagcat/python/nagcat/trend.py/TrendMaster.__init__
2,543
def __init__(self, conf, rradir, start=None, rrdapi=None, private=False): self._step = util.Interval(conf.get("repeat", "1m")).seconds self._ds_list = {'_state': {'type': "GAUGE", 'min': None, 'max': None}} # _ds_order is the order in the actual file and is set in validate() self._ds_order = None self._start = start if rrdapi is None: self._rrdapi = RRDTwistedAPI() else: self._rrdapi = rrdapi def parse_limit(new, old, key): limit = old.get(key, None) if limit is not None: try: limit = float(limit) except ValueError: raise errors.ConfigError(old, "Invalid %s: %s" % (key, limit)) new[key] = limit def parse_ds(ds_name, ds_conf): if 'trend' not in ds_conf or 'type' not in ds_conf['trend']: return ds_conf['trend'].expand() new = { 'type': ds_conf['trend.type'].upper() } if new['type'] not in self.TYPES: raise errors.ConfigError(ds_conf['trend'], "Invalid type: %s" % new['type']) parse_limit(new, ds_conf['trend'], 'min') parse_limit(new, ds_conf['trend'], 'max') self._ds_list[ds_name] = new parse_ds('_result', conf) if conf['query.type'] == "compound": for subname, subconf in conf['query'].iteritems(): if not isinstance(subconf, coil.struct.Struct): continue parse_ds(subname, subconf) else: parse_ds('query', conf['query']) # Default to a 1 minute step when repeat is useless if self._step == 0: self._step = 60 rras = conf.get('trend.rra', None) clean = re.compile('^[^\d]*').sub self._rras = self.RRAS.copy() if isinstance(rras, coil.struct.Struct): for interval, period in rras.iteritems(): interval = clean('', interval) try: interval = int(util.Interval(interval)) except util.IntervalError: raise errors.ConfigError(conf, "Invalid RRA interval: %r" % interval) try: period = int(util.Interval(period)) except util.IntervalError: raise errors.ConfigError(conf, "Invalid RRA period: %r" % period) if not period: del self._rras[interval] else: self._rras[interval] = period elif rras is not None: raise errors.ConfigError(conf, "trend.rra must be a struct, got: %r" % rras) self._rradir = os.path.abspath(os.path.join(rradir, conf['host'])) self._rrafile = os.path.join(self._rradir, "%s.rrd" % conf['description']) if not os.path.exists(self._rradir): try: os.makedirs(self._rradir) except __HOLE__, ex: raise errors.InitError("Cannot create directory %s: %s" % (self._rradir, ex)) coil_file = os.path.join(self._rradir, "%s.coil" % conf['description']) # If the config is marked as private then we must make sure # it is not world readable. This impacts both access on the # local host and other tools such as Railroad. if private: mode = 0640 else: mode = 0644 try: coil_fd = os.open( coil_file, os.O_WRONLY|os.O_TRUNC|os.O_CREAT, mode) # Force a chmod/chown just in case the file already existed os.fchown(coil_fd, os.getuid(), os.getgid()) os.fchmod(coil_fd, mode) os.write(coil_fd, '%s\n' % conf) os.close(coil_fd) except OSError, ex: raise errors.InitError("Cannot write to %s: %s" % (coil_file, ex)) if os.path.exists(self._rrafile): try: self.validate() except MismatchError: self.replace() else: self.create() log.debug("Loaded trending config: %s", self._ds_list)
OSError
dataset/ETHPy150Open marineam/nagcat/python/nagcat/trend.py/Trend.__init__
2,544
@urlmatch(netloc=r'(.*\.)?api\.weixin\.qq\.com$') def wechat_api_mock(url, request): path = url.path.replace('/cgi-bin/component/', '').replace('/', '_') res_file = os.path.join(_FIXTURE_PATH, '%s.json' % path) content = { 'errcode': 99999, 'errmsg': 'can not find fixture %s' % res_file, } headers = { 'Content-Type': 'application/json' } try: with open(res_file, 'rb') as f: content = json.loads(f.read().decode('utf-8')) except (__HOLE__, ValueError): pass return response(200, content, headers, request=request)
IOError
dataset/ETHPy150Open jxtech/wechatpy/tests/test_component_api.py/wechat_api_mock
2,545
@contextmanager def patch_os_environment(remove=None, **values): """ Context manager for patching the operating system environment. """ old_values = {} remove = remove or [] for key in remove: old_values[key] = os.environ.pop(key) for key, value in values.iteritems(): old_values[key] = os.getenv(key) os.environ[key] = value try: yield finally: for old_key, old_value in old_values.iteritems(): if old_value is None: # Value was not present when we entered, so del it out if it's # still present. try: del os.environ[key] except __HOLE__: pass else: # Restore the old value. os.environ[old_key] = old_value
KeyError
dataset/ETHPy150Open quantopian/zipline/zipline/testing/core.py/patch_os_environment
2,546
def _update_metrics(self, slug, game): metrics = MetricsSession.get_metrics(slug) inverse_mapping = get_inverse_mapping_table(game) for session in metrics: try: s = _Session(session['timestamp']) fileDict = {} for entry in session['entries']: try: (filename, size, mimetype, status) = \ (entry['file'], int(entry['size']), entry['type'], entry['status']) except __HOLE__: break try: asset_name = inverse_mapping[os.path.basename(filename)] except KeyError: asset_name = filename _, ext = os.path.splitext(asset_name) ext = ext[1:] if ext else 'unknown' # Add the request to the session. s.add_request(size) # Add the request to the by_file metrics. if filename not in fileDict: fileDict[filename] = _File(asset_name, filename, size, mimetype, status) s.add_file(size) s.humanize() timestamp = s.timestamp self._session_overviews.append((timestamp, s)) except KeyError as e: LOG.error("Potentially corrupted file found. Can't extract metrics data: %s", str(e))
TypeError
dataset/ETHPy150Open turbulenz/turbulenz_local/turbulenz_local/controllers/localv1/metrics.py/MetricsController._update_metrics
2,547
@classmethod def as_csv(cls, slug, timestamp): timestamp_format = '%Y-%m-%d_%H-%M-%S' try: filename = '%s-%s.csv' % (slug, time.strftime(timestamp_format, time.gmtime(float(timestamp)))) except __HOLE__: abort(404, 'Invalid timestamp: %s' % timestamp) response.content_type = 'text/csv' response.content_disposition = 'attachment; filename=%s' % filename data = MetricsSession.get_data_as_csv(slug, timestamp) if not data: abort(404, 'Session does not exist: %s' % timestamp) return data
ValueError
dataset/ETHPy150Open turbulenz/turbulenz_local/turbulenz_local/controllers/localv1/metrics.py/MetricsController.as_csv
2,548
@classmethod @jsonify def as_json(cls, slug, timestamp): timestamp_format = '%Y-%m-%d_%H-%M-%S' try: filename = '%s-%s.json' % (slug, time.strftime(timestamp_format, time.gmtime(float(timestamp)))) except __HOLE__: abort(404, 'Invalid timestamp: %s' % timestamp) response.content_disposition = 'attachment; filename=%s' % filename data = MetricsSession.get_data_as_json(slug, timestamp) if not data: abort(404, 'Session does not exist: %s' % timestamp) return data
ValueError
dataset/ETHPy150Open turbulenz/turbulenz_local/turbulenz_local/controllers/localv1/metrics.py/MetricsController.as_json
2,549
def serve_forever(self): """ Run the DAAP server. Start by advertising the server via Bonjour. Then serve requests until CTRL + C is received. """ # Verify that the provider has a server. if self.provider.server is None: raise ValueError( "Cannot start server because the provider has no server to " "publish.") # Verify that the provider has a database to advertise. if not self.provider.server.databases: raise ValueError( "Cannot start server because the provider has no databases to " "publish.") # Create WSGI server and run it. self.server = WSGIServer((self.ip, self.port), application=self.app) # Register Bonjour. if self.bonjour: self.bonjour.publish(self) # Start server until finished try: self.server.serve_forever() except __HOLE__: pass finally: # Unregister Bonjour if self.bonjour: self.bonjour.unpublish(self)
KeyboardInterrupt
dataset/ETHPy150Open basilfx/flask-daapserver/daapserver/__init__.py/DaapServer.serve_forever
2,550
def __contains__(self, shape): try: return shape.in_sphere(self) except __HOLE__: raise TypeError( "No 'in_sphere' method supplied by %s" % type(shape) )
AttributeError
dataset/ETHPy150Open PythonProgramming/Beginning-Game-Development-with-Python-and-Pygame/gameobjects/sphere.py/Sphere.__contains__
2,551
def intersects(self, shape): try: return shape.intersects_sphere(self) except __HOLE__: raise TypeError( "No 'intersects_sphere' method supplied by %s" % type(shape) )
AttributeError
dataset/ETHPy150Open PythonProgramming/Beginning-Game-Development-with-Python-and-Pygame/gameobjects/sphere.py/Sphere.intersects
2,552
def parse_bytesio(bytesio): """Parse the shebang from a file opened for reading binary.""" if bytesio.read(2) != b'#!': return () first_line = bytesio.readline() try: first_line = first_line.decode('US-ASCII') except __HOLE__: return () # Require only printable ascii for c in first_line: if c not in printable: return () # shlex.split is horribly broken in py26 on text strings cmd = tuple(shlex.split(five.n(first_line))) if cmd[0] == '/usr/bin/env': cmd = cmd[1:] return cmd
UnicodeDecodeError
dataset/ETHPy150Open pre-commit/pre-commit/pre_commit/parse_shebang.py/parse_bytesio
2,553
def _extract_error_json(body): """Return error_message from the HTTP response body.""" error_json = {} try: body_json = json.loads(body) if 'error_message' in body_json: raw_msg = body_json['error_message'] error_json = json.loads(raw_msg) else: error_body = body_json['errors'][0] raw_msg = error_body['title'] error_json = {'faultstring': error_body['title'], 'debuginfo': error_body['detail']} except __HOLE__: return {} return error_json
ValueError
dataset/ETHPy150Open openstack/python-magnumclient/magnumclient/common/httpclient.py/_extract_error_json
2,554
def json_request(self, method, url, **kwargs): kwargs.setdefault('headers', {}) kwargs['headers'].setdefault('Content-Type', 'application/json') kwargs['headers'].setdefault('Accept', 'application/json') if 'body' in kwargs: kwargs['body'] = json.dumps(kwargs['body']) resp, body_iter = self._http_request(url, method, **kwargs) content_type = resp.getheader('content-type', None) if resp.status == 204 or resp.status == 205 or content_type is None: return resp, list() if 'application/json' in content_type: body = ''.join([chunk for chunk in body_iter]) try: body = json.loads(body) except __HOLE__: LOG.error('Could not decode response body as JSON') else: body = None return resp, body
ValueError
dataset/ETHPy150Open openstack/python-magnumclient/magnumclient/common/httpclient.py/HTTPClient.json_request
2,555
def json_request(self, method, url, **kwargs): kwargs.setdefault('headers', {}) kwargs['headers'].setdefault('Content-Type', 'application/json') kwargs['headers'].setdefault('Accept', 'application/json') if 'body' in kwargs: kwargs['data'] = json.dumps(kwargs.pop('body')) resp = self._http_request(url, method, **kwargs) body = resp.content content_type = resp.headers.get('content-type', None) status = resp.status_code if status == 204 or status == 205 or content_type is None: return resp, list() if 'application/json' in content_type: try: body = resp.json() except __HOLE__: LOG.error('Could not decode response body as JSON') else: body = None return resp, body
ValueError
dataset/ETHPy150Open openstack/python-magnumclient/magnumclient/common/httpclient.py/SessionClient.json_request
2,556
def description(self, test): try: # Wrapped _ErrorHolder objects have their own description return trim_docstring(test.description) except __HOLE__: # Fall back to the docstring on the method itself. if test._testMethodDoc: return trim_docstring(test._testMethodDoc) else: return 'No description'
AttributeError
dataset/ETHPy150Open pybee/cricket/cricket/pipes.py/PipedTestResult.description
2,557
def __init__(self, driver, timeout, poll_frequency=POLL_FREQUENCY, ignored_exceptions=None): """Constructor, takes a WebDriver instance and timeout in seconds. :Args: - driver - Instance of WebDriver (Ie, Firefox, Chrome or Remote) - timeout - Number of seconds before timing out - poll_frequency - sleep interval between calls By default, it is 0.5 second. - ignored_exceptions - iterable structure of exception classes ignored during calls. By default, it contains NoSuchElementException only. Example: from selenium.webdriver.support.ui import WebDriverWait \n element = WebDriverWait(driver, 10).until(lambda x: x.find_element_by_id("someId")) \n is_disappeared = WebDriverWait(driver, 30, 1, (ElementNotVisibleException)).\ \n until_not(lambda x: x.find_element_by_id("someId").is_displayed()) """ self._driver = driver self._timeout = timeout self._poll = poll_frequency # avoid the divide by zero if self._poll == 0: self._poll = POLL_FREQUENCY exceptions = list(IGNORED_EXCEPTIONS) if ignored_exceptions is not None: try: exceptions.extend(iter(ignored_exceptions)) except __HOLE__: # ignored_exceptions is not iterable exceptions.append(ignored_exceptions) self._ignored_exceptions = tuple(exceptions)
TypeError
dataset/ETHPy150Open RoseOu/flasky/venv/lib/python2.7/site-packages/selenium/webdriver/support/wait.py/WebDriverWait.__init__
2,558
@classmethod def get_first_root_node(cls): """ :returns: The first root node in the tree or ``None`` if it is empty. """ try: return cls.get_root_nodes()[0] except __HOLE__: return None
IndexError
dataset/ETHPy150Open tabo/django-treebeard/treebeard/models.py/Node.get_first_root_node
2,559
@classmethod def get_last_root_node(cls): """ :returns: The last root node in the tree or ``None`` if it is empty. """ try: return cls.get_root_nodes().reverse()[0] except __HOLE__: return None
IndexError
dataset/ETHPy150Open tabo/django-treebeard/treebeard/models.py/Node.get_last_root_node
2,560
def get_first_child(self): """ :returns: The leftmost node's child, or None if it has no children. """ try: return self.get_children()[0] except __HOLE__: return None
IndexError
dataset/ETHPy150Open tabo/django-treebeard/treebeard/models.py/Node.get_first_child
2,561
def get_last_child(self): """ :returns: The rightmost node's child, or None if it has no children. """ try: return self.get_children().reverse()[0] except __HOLE__: return None
IndexError
dataset/ETHPy150Open tabo/django-treebeard/treebeard/models.py/Node.get_last_child
2,562
def __new__(cls, name, bases, attrs): try: PrefixedSettings except __HOLE__: # Creating the PrefixedSettings class. Continue. pass else: if PrefixedSettings in bases: prefix = get_prefix(attrs, name) attrs = prefix_attributes(prefix, attrs) return super(PrefixedSettingsBase, cls).__new__(cls, name, bases, attrs)
NameError
dataset/ETHPy150Open matthewwithanm/django-classbasedsettings/cbsettings/settings.py/PrefixedSettingsBase.__new__
2,563
def __new__(cls, name, bases, attrs): try: AppSettings except __HOLE__: # Creating the AppSettings class. Continue. pass else: if AppSettings in bases: prefix = get_prefix(attrs, name, use_app_name=True) attrs = prefix_attributes(prefix, attrs) return super(AppSettingsBase, cls).__new__(cls, name, bases, attrs)
NameError
dataset/ETHPy150Open matthewwithanm/django-classbasedsettings/cbsettings/settings.py/AppSettingsBase.__new__
2,564
def load(self, bytes, base_url=None): """ Takes a bytestring and returns a document. """ try: data = json.loads(bytes.decode('utf-8')) except __HOLE__ as exc: raise ParseError('Malformed JSON. %s' % exc) doc = _parse_document(data, base_url) if not isinstance(doc, Document): raise ParseError('Top level node must be a document.') return doc
ValueError
dataset/ETHPy150Open core-api/python-client/coreapi/codecs/hal.py/HALCodec.load
2,565
def serve(self): while self._event.is_set(): try: self.handle_request() except __HOLE__: # When server is being closed, while loop can run once # after setting self._event = False depending on how it # is scheduled. pass
TypeError
dataset/ETHPy150Open pydy/pydy/pydy/viz/server.py/StoppableHTTPServer.serve
2,566
def __init__(self): # pragma: no cover libraw = util.find_library('raw') try: if libraw is not None: super(LibRaw, self).__init__(libraw) else: raise ImportError except (ImportError, AttributeError, __HOLE__, IOError): raise ImportError('Cannot find LibRaw on your system!') try: structs = { 16: structs_16, 17: structs_17, }[self.version_number[1]] except KeyError: raise ImportError( 'Unsupported Libraw version: %s.%s.%s.' % self.version_number ) libraw_data_t = structs.libraw_data_t libraw_decoder_info_t = structs.libraw_decoder_info_t libraw_processed_image_t = structs.libraw_processed_image_t # Define arg types self.libraw_init.argtypes = [c_int] # enum LibRaw_progress self.libraw_strprogress.argtypes = [c_int] self.libraw_unpack_function_name.argtypes = [POINTER(libraw_data_t)] self.libraw_subtract_black.argtypes = [POINTER(libraw_data_t)] self.libraw_open_file.argtypes = [POINTER(libraw_data_t), c_char_p] self.libraw_open_file_ex.argtypes = [ POINTER(libraw_data_t), c_char_p, c_int64 ] self.libraw_open_buffer.argtypes = [ POINTER(libraw_data_t), c_void_p, c_int64 ] self.libraw_unpack.argtypes = [POINTER(libraw_data_t)] self.libraw_unpack_thumb.argtypes = [POINTER(libraw_data_t)] self.libraw_recycle_datastream.argtypes = [POINTER(libraw_data_t)] self.libraw_recycle.argtypes = [POINTER(libraw_data_t)] self.libraw_close.argtypes = [POINTER(libraw_data_t)] self.libraw_set_memerror_handler.argtypes = [ POINTER(libraw_data_t), memory_callback, c_void_p, ] self.libraw_set_dataerror_handler.argtypes = [ POINTER(libraw_data_t), data_callback, c_void_p, ] self.libraw_set_progress_handler.argtypes = [ POINTER(libraw_data_t), progress_callback, c_void_p, ] self.libraw_adjust_sizes_info_only.argtypes = [ POINTER(libraw_data_t) ] self.libraw_dcraw_ppm_tiff_writer.argtypes = [ POINTER(libraw_data_t), c_char_p ] self.libraw_dcraw_thumb_writer.argtypes = [ POINTER(libraw_data_t), c_char_p ] self.libraw_dcraw_process.argtypes = [POINTER(libraw_data_t)] self.libraw_dcraw_make_mem_image.argtypes = [ POINTER(libraw_data_t), POINTER(c_int) ] self.libraw_dcraw_make_mem_thumb.argtypes = [ POINTER(libraw_data_t), POINTER(c_int) ] self.libraw_dcraw_clear_mem.argtypes = [ POINTER(libraw_processed_image_t) ] self.libraw_raw2image.argtypes = [POINTER(libraw_data_t)] self.libraw_free_image.argtypes = [POINTER(libraw_data_t)] self.libraw_get_decoder_info.argtypes = [ POINTER(libraw_data_t), POINTER(libraw_decoder_info_t) ] self.libraw_COLOR.argtypes = [ POINTER(libraw_data_t), c_int, c_int ] # Define return types self.libraw_init.restype = POINTER(libraw_data_t) self.libraw_version.restype = c_char_p self.libraw_strprogress.restype = c_char_p self.libraw_versionNumber.restype = c_int self.libraw_cameraCount.restype = c_int self.libraw_cameraList.restype = POINTER( c_char_p * self.libraw_cameraCount() ) self.libraw_unpack_function_name.restype = c_char_p self.libraw_subtract_black.restype = POINTER(libraw_data_t) self.libraw_open_file.restype = c_error self.libraw_open_file_ex.restype = c_error self.libraw_open_buffer.restype = c_error self.libraw_unpack.restype = c_error self.libraw_unpack_thumb.restype = c_error self.libraw_adjust_sizes_info_only.restype = c_error self.libraw_dcraw_ppm_tiff_writer.restype = c_error self.libraw_dcraw_thumb_writer.restype = c_error self.libraw_dcraw_process.restype = c_error self.libraw_dcraw_make_mem_image.restype = POINTER( libraw_processed_image_t) self.libraw_dcraw_make_mem_thumb.restype = POINTER( libraw_processed_image_t) self.libraw_raw2image.restype = c_error self.libraw_get_decoder_info.restype = c_error self.libraw_COLOR.restype = c_int # Some special Windows-only garbage: try: self.libraw_open_wfile.argtypes = [ POINTER(libraw_data_t), c_wchar_p ] self.libraw_open_wfile_ex.argtypes = [ POINTER(libraw_data_t), c_wchar_p, c_int64 ] self.libraw_open_wfile.restype = c_error self.libraw_open_wfile_ex.restype = c_error except AttributeError: pass
OSError
dataset/ETHPy150Open photoshell/rawkit/libraw/bindings.py/LibRaw.__init__
2,567
@login_required @permission_required("waitinglist.manage_cohorts") def cohort_member_add(request, pk): cohort = Cohort.objects.get(pk=pk) if "invite_next" in request.POST: try: N = int(request.POST["invite_next"]) except __HOLE__: return redirect("waitinglist_cohort_detail", cohort.id) # people who are NOT invited or on the site already waiting_list = WaitingListEntry.objects.exclude( email__in=SignupCode.objects.values("email") ).exclude( email__in=User.objects.values("email") ) emails = waiting_list.values_list("email", flat=True)[:N] else: email = request.POST["email"].strip() if email: emails = [email] else: emails = [] for email in emails: if not SignupCode.objects.filter(email=email).exists(): signup_code = SignupCode.create(email=email, max_uses=1, expiry=730) signup_code.save() SignupCodeCohort.objects.create(signup_code=signup_code, cohort=cohort) return redirect("waitinglist_cohort_detail", cohort.id)
ValueError
dataset/ETHPy150Open pinax/django-waitinglist/waitinglist/views.py/cohort_member_add
2,568
def _update_workflow_nodes_json(workflow, json_nodes, id_map, user): """Ideally would get objects from form validation instead.""" nodes = [] for json_node in json_nodes: node = get_or_create_node(workflow, json_node, save=False) if node.node_type == 'subworkflow': try: node.sub_workflow = Workflow.objects.get(id=int(json_node['sub_workflow'])) except __HOLE__: # sub_workflow is None node.sub_workflow = None except Workflow.DoesNotExist: raise StructuredException(code="INVALID_REQUEST_ERROR", message=_('Error saving workflow'), data={'errors': 'Chosen subworkflow does not exist.'}, error_code=400) elif node.node_type == 'fork' and json_node['node_type'] == 'decision': node.save() # Need to save in case database throws error when performing delete. node = node.convert_to_decision() node.save() id_map[str(json_node['id'])] = node.id for key in json_node: if key == 'data': if isinstance(json_node[key], basestring): node.data = json_node[key] else: node.data = json.dumps(json_node[key]) elif key not in ('node_ptr', 'child_nodes', 'workflow', 'id', 'sub_workflow'): setattr(node, key, format_field_value(key, json_node[key])) node.workflow = workflow node.save() # Keep track of nodes in order of received list # so that we may iterate over them again in the same order # when we handle links nodes.append(node) # Delete unused nodes from workflow old_nodes = Node.objects.filter(workflow=workflow).exclude(id__in=map(lambda x: x.id, nodes)) for node in old_nodes: node.get_full_node().delete() return nodes
TypeError
dataset/ETHPy150Open cloudera/hue/apps/oozie/src/oozie/views/api.py/_update_workflow_nodes_json
2,569
def extract_panel_definitions_from_model_class(model, exclude=None): if hasattr(model, 'panels'): return model.panels panels = [] _exclude = [] if exclude: _exclude.extend(exclude) fields = fields_for_model(model, exclude=_exclude, formfield_callback=formfield_for_dbfield) for field_name, field in fields.items(): try: panel_class = field.widget.get_panel() except __HOLE__: panel_class = FieldPanel panel = panel_class(field_name) panels.append(panel) return panels
AttributeError
dataset/ETHPy150Open torchbox/wagtail/wagtail/wagtailadmin/edit_handlers.py/extract_panel_definitions_from_model_class
2,570
def classes(self): """ Additional CSS classnames to add to whatever kind of object this is at output. Subclasses of EditHandler should override this, invoking super(B, self).classes() to append more classes specific to the situation. """ classes = [] try: classes.append(self.classname) except __HOLE__: pass return classes
AttributeError
dataset/ETHPy150Open torchbox/wagtail/wagtail/wagtailadmin/edit_handlers.py/EditHandler.classes
2,571
@cached_classmethod def target_models(cls): if cls.page_type: target_models = [] for page_type in cls.page_type: try: target_models.append(resolve_model_string(page_type)) except LookupError: raise ImproperlyConfigured( "{0}.page_type must be of the form 'app_label.model_name', given {1!r}".format( cls.__name__, page_type ) ) except __HOLE__: raise ImproperlyConfigured( "{0}.page_type refers to model {1!r} that has not been installed".format( cls.__name__, page_type ) ) return target_models else: return [cls.model._meta.get_field(cls.field_name).rel.to]
ValueError
dataset/ETHPy150Open torchbox/wagtail/wagtail/wagtailadmin/edit_handlers.py/BasePageChooserPanel.target_models
2,572
def mount(location, access='rw', root=None): ''' Mount an image CLI Example: .. code-block:: bash salt '*' guest.mount /srv/images/fedora.qcow ''' if root is None: root = os.path.join( tempfile.gettempdir(), 'guest', location.lstrip(os.sep).replace('/', '.') ) log.debug('Using root {0}'.format(root)) if not os.path.isdir(root): try: os.makedirs(root) except __HOLE__: # Somehow the path already exists pass while True: if os.listdir(root): # Stuff is in there, don't use it hash_type = getattr(hashlib, __opts__.get('hash_type', 'md5')) rand = hash_type(os.urandom(32)).hexdigest() root = os.path.join( tempfile.gettempdir(), 'guest', location.lstrip(os.sep).replace('/', '.') + rand ) log.debug('Establishing new root as {0}'.format(root)) else: break cmd = 'guestmount -i -a {0} --{1} {2}'.format(location, access, root) __salt__['cmd.run'](cmd, python_shell=False) return root
OSError
dataset/ETHPy150Open saltstack/salt/salt/modules/guestfs.py/mount
2,573
def run(self): """ fetch artefacts """ source_path = self.workflow.source.path sources_file_path = os.path.join(source_path, 'sources') artefacts = "" try: with open(sources_file_path, 'r') as f: artefacts = f.read() self.log.info('sources file:\n%s', artefacts) except __HOLE__ as ex: if ex.errno == 2: self.log.info("no sources file") else: raise else: cur_dir = os.getcwd() os.chdir(source_path) subprocess.check_call(self.command.split()) os.chdir(cur_dir) return artefacts
IOError
dataset/ETHPy150Open projectatomic/atomic-reactor/atomic_reactor/plugins/pre_pyrpkg_fetch_artefacts.py/DistgitFetchArtefactsPlugin.run
2,574
def unicode_strings(buf, n=4): reg = b"((?:[%s]\x00){4,})" % (ASCII_BYTE) ascii_re = re.compile(reg) for match in ascii_re.finditer(buf): try: if isinstance(match.group(), array.array): yield match.group().tostring().decode("utf-16") else: yield match.group().decode("utf-16") except __HOLE__: pass
UnicodeDecodeError
dataset/ETHPy150Open williballenthin/INDXParse/get_file_info.py/unicode_strings
2,575
def create_safe_datetime(fn): try: return fn() except __HOLE__: return datetime.datetime(1970, 1, 1, 0, 0, 0)
ValueError
dataset/ETHPy150Open williballenthin/INDXParse/get_file_info.py/create_safe_datetime
2,576
def main(): parser = argparse.ArgumentParser(description='Inspect ' 'a given MFT file record.') parser.add_argument('-a', action="store", metavar="cache_size", type=int, dest="cache_size", default=1024, help="Size of cache.") parser.add_argument('-p', action="store", metavar="prefix", nargs=1, dest="prefix", default="\\.", help="Prefix paths with `prefix` rather than \\.\\") parser.add_argument('-v', action="store_true", dest="verbose", help="Print debugging information") parser.add_argument('mft', action="store", help="Path to MFT") parser.add_argument('record_or_path', action="store", help="MFT record or file path to inspect") results = parser.parse_args() if results.verbose: logging.basicConfig(level=logging.DEBUG) with Mmap(results.mft) as buf: record_cache = Cache(results.cache_size) path_cache = Cache(results.cache_size) enum = MFTEnumerator(buf, record_cache=record_cache, path_cache=path_cache) should_use_inode = False try: record_num = int(results.record_or_path) should_use_inode = True except __HOLE__: should_use_inode = False if should_use_inode: record = enum.get_record(record_num) path = results.prefix + enum.get_path(record) print_indx_info(record, path) else: path = results.record_or_path record = enum.get_record_by_path(path) print_indx_info(record, results.prefix + path)
ValueError
dataset/ETHPy150Open williballenthin/INDXParse/get_file_info.py/main
2,577
def zip_longest(*args, **kwds): # noqa fillvalue = kwds.get("fillvalue") def sentinel(counter=([fillvalue] * (len(args) - 1)).pop): yield counter() # yields the fillvalue, or raises IndexError fillers = itertools.repeat(fillvalue) iters = [itertools.chain(it, sentinel(), fillers) for it in args] try: for tup in itertools.izip(*iters): yield tup except __HOLE__: pass ############## itertools.chain.from_iterable ################################
IndexError
dataset/ETHPy150Open aparo/pyes/pyes/utils/compat.py/zip_longest
2,578
def handle_noargs(self, **options): self.set_options(**options) found_files = SortedDict() manifest = ConfiguredStaticFilesManifest() manifest.clear() ignore_patterns = getattr(settings, 'ECSTATIC_MANIFEST_EXCLUDES', []) for finder in finders.get_finders(): for path, storage in finder.list(ignore_patterns): # Prefix the relative path if the source storage contains it if getattr(storage, 'prefix', None): prefixed_path = os.path.join(storage.prefix, path) else: prefixed_path = path if prefixed_path not in found_files: found_files[prefixed_path] = path for path in found_files.values() + settings.ECSTATIC_MANIFEST_EXTRAS: try: generate_url = self.storage.generate_url except __HOLE__: raise AttributeError('%s doesn\'t define a generate_url method.' ' Did you remember to extend StaticManifestMixin?' % self.storage) hashed_name = generate_url(path) manifest.add(path, hashed_name) manifest.flush()
AttributeError
dataset/ETHPy150Open hzdg/django-ecstatic/ecstatic/management/commands/createstaticmanifest.py/Command.handle_noargs
2,579
def __init__(self, xfm, reference): self.xfm = xfm self.reference = None if isstr(reference): import nibabel try: self.reference = nibabel.load(reference) self.shape = self.reference.shape[:3][::-1] except __HOLE__: self.reference = reference elif isinstance(reference, tuple): self.shape = reference else: self.reference = reference self.shape = self.reference.shape[:3][::-1]
IOError
dataset/ETHPy150Open gallantlab/pycortex/cortex/xfm.py/Transform.__init__
2,580
def __repr__(self): try: path, fname = os.path.split(self.reference.get_filename()) return "<Transform into %s space>"%fname except __HOLE__: return "<Reference free affine transform>"
AttributeError
dataset/ETHPy150Open gallantlab/pycortex/cortex/xfm.py/Transform.__repr__
2,581
@classmethod def from_fsl(cls, xfm, func_nii, anat_nii): """Converts an fsl transform to a pycortex transform. Converts a transform computed using FSL's FLIRT to a transform ("xfm") object in pycortex. The transform must have been computed FROM the nifti volume specified in `func_nii` TO the volume specified in `anat_nii` (See Notes below). Parameters ---------- xfm : array 4x4 transformation matrix, loaded from an FSL .mat file, for a transform computed FROM the func_nii volume TO the anat_nii volume. Alternatively, a string file name for the FSL .mat file. anat_nii : str or nibabel.Nifti1Image nibabel image object (or path to nibabel-readable image) for anatomical volume from which cortical surface was created func_nii : str or nibabel.Nifti1Image nibabel image object (or string path to nibabel-readable image) for (functional) data volume to be projected onto cortical surface Returns ------- xfm : cortex.xfm.Transform object A pycortex COORD transform. Notes ----- The transform is assumed to be computed FROM the functional data TO the anatomical data. In FSL speak, that means that the arguments to flirt should have been: flirt -in <func_nii> -ref <anat_nii> ... """ ## -- Adapted from dipy.external.fsl.flirt2aff -- ## import nibabel import numpy.linalg as npl inv = npl.inv # Load transform from text file, if string is provided if isinstance(xfm,(str,unicode)): with open(xfm,'r') as fid: L = fid.readlines() xfm = np.array([[np.float(s) for s in ll.split() if s] for ll in L]) # Internally, pycortex computes the OPPOSITE transform: from anatomical volume to functional volume. # Thus, assign anat to "infile" (starting point for transform) infile = anat_nii # Assign func to "reffile" (end point for transform) reffile = func_nii # and invert the usual direction (change from func>anat to anat>func) xfm = inv(xfm) try: inIm = nibabel.load(infile) except __HOLE__: inIm = infile refIm = nibabel.load(reffile) in_hdr = inIm.get_header() ref_hdr = refIm.get_header() # get_zooms gets the positive voxel sizes as returned in the header inspace = np.diag(in_hdr.get_zooms()[:3] + (1,)) refspace = np.diag(ref_hdr.get_zooms()[:3] + (1,)) # Since FSL does not use the full transform info in the nifti header, # determine whether the transform indicates that the X axis should be # flipped; if so, flip the X axis (for both infile and reffile) if npl.det(in_hdr.get_best_affine())>=0: inspace = np.dot(inspace, _x_flipper(in_hdr.get_data_shape()[0])) if npl.det(ref_hdr.get_best_affine())>=0: refspace = np.dot(refspace, _x_flipper(ref_hdr.get_data_shape()[0])) inAffine = inIm.get_affine() coord = np.dot(inv(refspace),np.dot(xfm,np.dot(inspace,inv(inAffine)))) return cls(coord, refIm)
AttributeError
dataset/ETHPy150Open gallantlab/pycortex/cortex/xfm.py/Transform.from_fsl
2,582
def to_fsl(self, anat_nii, direction='func>anat'): """Converts a pycortex transform to an FSL transform. Uses the stored "reference" file provided when the transform was created (usually a functional data or statistical volume) and the supplied anatomical file to create an FSL transform. By default, returns the transform FROM the refernce volume (usually the functional data volume) to the anatomical volume (`anat_nii` input). Parameters ---------- anat_nii : str or nibabel.Nifti1Image nibabel image object (or path to nibabel-readable image) for anatomical volume from which cortical surface was created direction : str, optional {'func>anat', 'anat>func'} Direction of transform to return. Defaults to 'func>anat' Notes ----- This function will only work for "coord" transform objects, (those retrieved with cortex.db.get_xfm(xfmtype='coord',...)). It will fail hard for "magnet" transforms! """ import nibabel import numpy.linalg as npl inv = npl.inv ## -- Internal notes -- ## # pycortex transforms are internally stored as anatomical space -> functional data space # transforms. Thus the anatomical file is the "infile" in FSL-speak. infile = anat_nii try: inIm = nibabel.load(infile) except __HOLE__: inIm = infile in_hdr = inIm.get_header() ref_hdr = self.reference.get_header() # get_zooms gets the positive voxel sizes as returned in the header inspace = np.diag(in_hdr.get_zooms()[:3] + (1,)) refspace = np.diag(ref_hdr.get_zooms()[:3] + (1,)) # Since FSL does not use the full transform info in the nifti header, # determine whether the transform indicates that the X axis should be # flipped; if so, flip the X axis (for both infile and reffile) if npl.det(in_hdr.get_best_affine())>=0: print("Determinant is > 0: FLIPPING!") inspace = np.dot(inspace, _x_flipper(in_hdr.get_data_shape()[0])) if npl.det(ref_hdr.get_best_affine())>=0: print("Determinant is > 0: FLIPPING!") refspace = np.dot(refspace, _x_flipper(ref_hdr.get_data_shape()[0])) inAffine = inIm.get_affine() fslx = np.dot(refspace,np.dot(self.xfm,np.dot(inAffine,inv(inspace)))) if direction=='func>anat': return inv(fslx) elif direction=='anat>func': return fslx
AttributeError
dataset/ETHPy150Open gallantlab/pycortex/cortex/xfm.py/Transform.to_fsl
2,583
def isstr(obj): """Check for stringy-ness in python 2.7 or 3""" try: return isinstance(obj, basestring) except __HOLE__: return isinstance(obj, str)
NameError
dataset/ETHPy150Open gallantlab/pycortex/cortex/xfm.py/isstr
2,584
def createException(service_exception, provider_nsa): # nsiconnection.ServiceException (binding) -> error.NSIError try: exception_type = error.lookup(service_exception.errorId) variables = [ (tvp.type, tvp.value) for tvp in service_exception.variables ] if service_exception.variables else None ex = exception_type(service_exception.text, service_exception.nsaId or provider_nsa, service_exception.connectionId, variables) except __HOLE__ as e: log.msg('Error looking up error id: %s. Message: %s' % (service_exception.errorId, str(e)), system=LOG_SYSTEM) ex = error.InternalServerError(service_exception.text) return ex
AssertionError
dataset/ETHPy150Open NORDUnet/opennsa/opennsa/protocols/nsi2/helper.py/createException
2,585
def parseLabel(label_part): if not '=' in label_part: raise error.PayloadError('No = in urn label part (%s)' % label_part) label_short_type, label_value = label_part.split('=') try: label_type = LABEL_MAP[label_short_type] except __HOLE__: raise error.PayloadError('Label type %s not recognized') return nsa.Label(label_type, label_value)
KeyError
dataset/ETHPy150Open NORDUnet/opennsa/opennsa/protocols/nsi2/helper.py/parseLabel
2,586
def get_actual_status_img(request, output_format='png', width=600, height=600): if reportlab is None: messages.error(request, _("Module") + " reportlab " + _("is not available")) try: redirect_to = request.META['HTTP_REFERER'] except __HOLE__: redirect_to = '/' return HttpResponseRedirect(redirect_to) status_list = Status.objects.all() drawing = Drawing(width, height) pie = Pie3d() pie.x = 100 pie.y = 100 pie.width = width / 2 pie.height = height / 2 pie.labels = [ s.name for s in status_list ] pie.data = [ s.task_set.count() for s in status_list ] pie.slices[3].fontColor = colors.red pie.slices[0].fillColor = colors.darkcyan pie.slices[1].fillColor = colors.blueviolet pie.slices[2].fillColor = colors.blue pie.slices[3].fillColor = colors.cyan pie.slices[4].fillColor = colors.aquamarine pie.slices[5].fillColor = colors.cadetblue pie.slices[6].fillColor = colors.lightcoral drawing.add(pie) image = drawing.asString(output_format) response = HttpResponse(image, mimetype='image/%s' % output_format.lower()) return response
KeyError
dataset/ETHPy150Open lukaszb/django-projector/projector/views/reports.py/get_actual_status_img
2,587
def handle_accept(self): if _debug: TCPServerDirector._debug("handle_accept") try: client, addr = self.accept() except socket.error: TCPServerDirector._warning('accept() threw an exception') return except __HOLE__: TCPServerDirector._warning('accept() threw EWOULDBLOCK') return if _debug: TCPServerDirector._debug(" - connection %r, %r", client, addr) # create a server server = self.actorClass(self, client, addr) # add it to our pool self.servers[addr] = server # return it to the dispatcher return server
TypeError
dataset/ETHPy150Open JoelBender/bacpypes/py27/bacpypes/tcp.py/TCPServerDirector.handle_accept
2,588
def remove_actor(self, actor): if _debug: TCPServerDirector._debug("remove_actor %r", actor) try: del self.servers[actor.peer] except __HOLE__: TCPServerDirector._warning("remove_actor: %r not an actor", actor) # tell the ASE the server has gone away if self.serviceElement: self.sap_request(delPeer=actor.peer)
KeyError
dataset/ETHPy150Open JoelBender/bacpypes/py27/bacpypes/tcp.py/TCPServerDirector.remove_actor
2,589
def _is_compute_port(self, port): try: if (port['device_id'] and uuidutils.is_uuid_like(port['device_id']) and port['device_owner'].startswith( constants.DEVICE_OWNER_COMPUTE_PREFIX)): return True except (__HOLE__, AttributeError): pass return False
KeyError
dataset/ETHPy150Open openstack/neutron/neutron/notifiers/nova.py/Notifier._is_compute_port
2,590
def send_events(self, batched_events): LOG.debug("Sending events: %s", batched_events) try: response = self.nclient.server_external_events.create( batched_events) except nova_exceptions.NotFound: LOG.debug("Nova returned NotFound for event: %s", batched_events) except Exception: LOG.exception(_LE("Failed to notify nova on events: %s"), batched_events) else: if not isinstance(response, list): LOG.error(_LE("Error response returned from nova: %s"), response) return response_error = False for event in response: try: code = event['code'] except __HOLE__: response_error = True continue if code != 200: LOG.warning(_LW("Nova event: %s returned with failed " "status"), event) else: LOG.info(_LI("Nova event response: %s"), event) if response_error: LOG.error(_LE("Error response returned from nova: %s"), response)
KeyError
dataset/ETHPy150Open openstack/neutron/neutron/notifiers/nova.py/Notifier.send_events
2,591
def run_upgrade_script(app, script_name): try: cur_script = __import__('datamigrations.%s' % script_name) except __HOLE__: # return if the script doesn't exist return print "Running %s.upgrade" % script_name script = getattr(cur_script, script_name) script.upgrade(app)
ImportError
dataset/ETHPy150Open jkossen/imposter/datamigrations/__init__.py/run_upgrade_script
2,592
def run_downgrade_script(app, script_name): try: cur_script = __import__('datamigrations.%s' % script_name) except __HOLE__: # return if the script doesn't exist return print "Running %s.downgrade" % script_name script = getattr(cur_script, script_name) script.downgrade(app)
ImportError
dataset/ETHPy150Open jkossen/imposter/datamigrations/__init__.py/run_downgrade_script
2,593
def setUp(self): """ Patch the L{ls} module's time function so the results of L{lsLine} are deterministic. """ self.now = 123456789 def fakeTime(): return self.now self.patch(ls, 'time', fakeTime) # Make sure that the timezone ends up the same after these tests as # it was before. if 'TZ' in os.environ: self.addCleanup(operator.setitem, os.environ, 'TZ', os.environ['TZ']) self.addCleanup(time.tzset) else: def cleanup(): # os.environ.pop is broken! Don't use it! Ever! Or die! try: del os.environ['TZ'] except __HOLE__: pass time.tzset() self.addCleanup(cleanup)
KeyError
dataset/ETHPy150Open twisted/twisted/twisted/conch/test/test_cftp.py/ListingTests.setUp
2,594
def __init__(self, data, subject, xfmname, mask=None, **kwargs): """Three possible variables: raw, volume, movie, vertex. Enumerated with size: raw volume movie: (t, z, y, x, c) raw volume image: (z, y, x, c) reg volume movie: (t, z, y, x) reg volume image: (z, y, x) raw linear movie: (t, v, c) reg linear movie: (t, v) raw linear image: (v, c) reg linear image: (v,) """ if isinstance(data, str): import nibabel nib = nibabel.load(data) data = nib.get_data().T self.data = data try: basestring except __HOLE__: subject = subject if isinstance(subject, str) else subject.decode('utf-8') xfmname = xfmname if isinstance(xfmname, str) else xfmname.decode('utf-8') self.subject = subject self.xfmname = xfmname self.attrs = kwargs self._check_size(mask) self.masked = Masker(self) #self.add_numpy_methods()
NameError
dataset/ETHPy150Open gallantlab/pycortex/cortex/dataset.py/VolumeData.__init__
2,595
def __init__(self, data, subject, **kwargs): """Vertex Data possibilities raw linear movie: (t, v, c) reg linear movie: (t, v) raw linear image: (v, c) reg linear image: (v,) where t is the number of time points, c is colors (i.e. RGB), and v is the number of vertices (either in both hemispheres or one hemisphere) """ try: basestring except __HOLE__: subject = subject if isinstance(subject, str) else subject.decode('utf-8') self.subject = subject self.attrs = kwargs left, right = surfs.getSurf(self.subject, "fiducial") self.llen = len(left[0]) self.rlen = len(right[0]) self._set_data(data)
NameError
dataset/ETHPy150Open gallantlab/pycortex/cortex/dataset.py/VertexData.__init__
2,596
def _hdf_write(h5, data, name="data", group="/datasets"): import tables atom = tables.Atom.from_dtype(data.dtype) filt = tables.filters.Filters(complevel=9, complib='blosc', shuffle=True) create = False try: ds = h5.getNode("%s/%s"%(group, name)) ds[:] = data except tables.NoSuchNodeError: create = True except __HOLE__: h5.removeNode("%s/%s"%(group, name)) create = True if create: ds = h5.createCArray(group, name, atom, data.shape, filters=filt, createparents=True) ds[:] = data return ds
ValueError
dataset/ETHPy150Open gallantlab/pycortex/cortex/dataset.py/_hdf_write
2,597
def _real_extract(self, url): track_id = self._match_id(url) data = {'ax': 1, 'ts': time.time()} data_encoded = compat_urllib_parse.urlencode(data) complete_url = url + "?" + data_encoded request = compat_urllib_request.Request(complete_url) response, urlh = self._download_webpage_handle( request, track_id, 'Downloading webpage with the url') cookie = urlh.headers.get('Set-Cookie', '') html_tracks = self._html_search_regex( r'(?ms)<script type="application/json" id="displayList-data">\s*(.*?)\s*</script>', response, 'tracks') try: track_list = json.loads(html_tracks) track = track_list['tracks'][0] except __HOLE__: raise ExtractorError('Hypemachine contained invalid JSON.') key = track['key'] track_id = track['id'] artist = track['artist'] title = track['song'] serve_url = "http://hypem.com/serve/source/%s/%s" % (track_id, key) request = compat_urllib_request.Request( serve_url, '', {'Content-Type': 'application/json'}) request.add_header('cookie', cookie) song_data = self._download_json(request, track_id, 'Downloading metadata') final_url = song_data["url"] return { 'id': track_id, 'url': final_url, 'ext': 'mp3', 'title': title, 'uploader': artist, }
ValueError
dataset/ETHPy150Open yasoob/youtube-dl-GUI/youtube_dl/extractor/hypem.py/HypemIE._real_extract
2,598
def _from_class_value(self, value, value_type): type_factory = self._type_factory collation = self._collation bytes_to_unicode = self._bytes_to_unicode allow_tz = self._allow_tz if issubclass(value_type, bool): return type_factory.BitN elif issubclass(value_type, six.integer_types): if value is None: return type_factory.IntN(8) if -2 ** 31 <= value <= 2 ** 31 - 1: return type_factory.IntN(4) elif -2 ** 63 <= value <= 2 ** 63 - 1: return type_factory.IntN(8) elif -10 ** 38 + 1 <= value <= 10 ** 38 - 1: return type_factory.Decimal(0, 38) else: raise DataError('Numeric value out of range') elif issubclass(value_type, float): return type_factory.FloatN(8) elif issubclass(value_type, Binary): if value: if len(value) <= 8000: return type_factory.VarBinary(8000) else: return type_factory.long_binary_type() else: return type_factory.long_binary_type() elif issubclass(value_type, six.binary_type): if bytes_to_unicode: return type_factory.long_string_type(collation=collation) else: return type_factory.long_varchar_type(collation=collation) elif issubclass(value_type, six.string_types): return type_factory.long_string_type(collation=collation) elif issubclass(value_type, datetime): if value and value.tzinfo and allow_tz: return type_factory.datetime_with_tz(precision=6) else: return type_factory.datetime(precision=6) elif issubclass(value_type, date): return type_factory.date() elif issubclass(value_type, time): return type_factory.time(precision=6) elif issubclass(value_type, Decimal): if value is None: return type_factory.Decimal() else: return type_factory.Decimal.from_value(value) elif issubclass(value_type, uuid.UUID): return type_factory.UniqueIdentifier.instance elif issubclass(value_type, TableValuedParam): columns = value.columns rows = value.rows if columns is None: # trying to auto detect columns using data from first row if rows is None: # rows are not present too, this means # entire tvp has value of NULL pass else: try: rows = iter(rows) except TypeError: raise DataError('rows should be iterable') try: row = next(rows) except __HOLE__: # no rows raise DataError("Cannot infer columns from rows for TVP because there are no rows") else: # put row back rows = itertools.chain([row], rows) # use first row to infer types of columns columns = [] try: cell_iter = iter(row) except TypeError: raise DataError('Each row in table should be an iterable') for cell in cell_iter: if isinstance(cell, TableValuedParam): raise DataError('TVP type cannot have nested TVP types') col_type = self.from_value(cell) col = Column(type=col_type) columns.append(col) return Table(typ_schema=value.typ_schema, typ_name=value.typ_name, columns=columns, rows=rows) else: raise DataError('Cannot infer TDS type from Python value: {!r}'.format(value))
StopIteration
dataset/ETHPy150Open denisenkom/pytds/pytds/tds_types.py/TdsTypeInferrer._from_class_value
2,599
def test_fast_dot(): # Check fast dot blas wrapper function if fast_dot is np.dot: return rng = np.random.RandomState(42) A = rng.random_sample([2, 10]) B = rng.random_sample([2, 10]) try: linalg.get_blas_funcs(['gemm'])[0] has_blas = True except (__HOLE__, ValueError): has_blas = False if has_blas: # Test _fast_dot for invalid input. # Maltyped data. for dt1, dt2 in [['f8', 'f4'], ['i4', 'i4']]: assert_raises(ValueError, _fast_dot, A.astype(dt1), B.astype(dt2).T) # Malformed data. # ndim == 0 E = np.empty(0) assert_raises(ValueError, _fast_dot, E, E) # ndim == 1 assert_raises(ValueError, _fast_dot, A, A[0]) # ndim > 2 assert_raises(ValueError, _fast_dot, A.T, np.array([A, A])) # min(shape) == 1 assert_raises(ValueError, _fast_dot, A, A[0, :][None, :]) # test for matrix mismatch error assert_raises(ValueError, _fast_dot, A, A) # Test cov-like use case + dtypes. for dtype in ['f8', 'f4']: A = A.astype(dtype) B = B.astype(dtype) # col < row C = np.dot(A.T, A) C_ = fast_dot(A.T, A) assert_almost_equal(C, C_, decimal=5) C = np.dot(A.T, B) C_ = fast_dot(A.T, B) assert_almost_equal(C, C_, decimal=5) C = np.dot(A, B.T) C_ = fast_dot(A, B.T) assert_almost_equal(C, C_, decimal=5) # Test square matrix * rectangular use case. A = rng.random_sample([2, 2]) for dtype in ['f8', 'f4']: A = A.astype(dtype) B = B.astype(dtype) C = np.dot(A, B) C_ = fast_dot(A, B) assert_almost_equal(C, C_, decimal=5) C = np.dot(A.T, B) C_ = fast_dot(A.T, B) assert_almost_equal(C, C_, decimal=5) if has_blas: for x in [np.array([[d] * 10] * 2) for d in [np.inf, np.nan]]: assert_raises(ValueError, _fast_dot, x, x.T)
AttributeError
dataset/ETHPy150Open scikit-learn/scikit-learn/sklearn/utils/tests/test_extmath.py/test_fast_dot