Unnamed: 0
int64
0
10k
function
stringlengths
79
138k
label
stringclasses
20 values
info
stringlengths
42
261
2,200
def deep_getattr(obj, pathname): """Returns a tuple of the form (value, restofpath), if restofpath is None, value is the actual desired value, else it's the closest containing object in this process, and restofpath will contain the string that would resolve the desired object within the containing object. pathname is a dotted path string and obj is the starting containing object. """ parts = pathname.split('.') for i, name in enumerate(parts): try: obj = getattr(obj, name) except __HOLE__: return (obj, '.'.join(parts[i:])) return (obj, None)
AttributeError
dataset/ETHPy150Open OpenMDAO/OpenMDAO-Framework/openmdao.main/src/openmdao/main/index.py/deep_getattr
2,201
def setUp(self): try: import jirafs_list_table # noqa except __HOLE__: raise SkipTest( "Push command macropatch tests require the " "jira-list-table package to be installed." ) super(TestPushCommandWithMacropatch, self).setUp() run_command_method_with_kwargs( 'plugins', folder=self.ticketfolder, args=Mock( enable='list_table', ) )
ImportError
dataset/ETHPy150Open coddingtonbear/jirafs/tests/commands/test_push.py/TestPushCommandWithMacropatch.setUp
2,202
@login_manager.user_loader def load_user(userid): try: return models.user.User.query.get(int(userid)) except (TypeError, __HOLE__): pass
ValueError
dataset/ETHPy150Open omab/python-social-auth/examples/flask_example/__init__.py/load_user
2,203
@app.context_processor def inject_user(): try: return {'user': g.user} except __HOLE__: return {'user': None}
AttributeError
dataset/ETHPy150Open omab/python-social-auth/examples/flask_example/__init__.py/inject_user
2,204
def __init__(self, parent): parent.title = "Volume Probe" parent.categories = ["Wizards"] parent.dependencies = [] parent.contributors = ["Alex Yarmarkovich"] # replace with "Firstname Lastname (Org)" parent.helpText = """ """ parent.helpText = string.Template(""" This module helps organize layouts and volume compositing to help compare images Please refer to <a href=\"$a/Documentation/$b.$c/Modules/VolumeProbe\"> the documentation</a>. """).substitute({ 'a':parent.slicerWikiUrl, 'b':slicer.app.majorVersion, 'c':slicer.app.minorVersion }) parent.acknowledgementText = """ This file was originally developed by Alex Yarmarkovich. It was partially funded by NIH grant 9999999 """ # replace with organization, grant and thanks. self.parent = parent # Add this test to the SelfTest module's list for discovery when the module # is created. Since this module may be discovered before SelfTests itself, # create the list if it doesn't already exist. try: slicer.selfTests except __HOLE__: slicer.selfTests = {} slicer.selfTests['VolumeProbe'] = self.runTest
AttributeError
dataset/ETHPy150Open acil-bwh/SlicerCIP/Scripted/VolumeProbe/VolumeProbe.py/VolumeProbe.__init__
2,205
def onReload(self,moduleName="VolumeProbe"): """Generic reload method for any scripted module. ModuleWizard will subsitute correct default moduleName. """ import imp, sys, os, slicer widgetName = moduleName + "Widget" # reload the source code # - set source file path # - load the module to the global space filePath = eval('slicer.modules.%s.path' % moduleName.lower()) p = os.path.dirname(filePath) if not sys.path.__contains__(p): sys.path.insert(0,p) fp = open(filePath, "r") globals()[moduleName] = imp.load_module( moduleName, fp, filePath, ('.py', 'r', imp.PY_SOURCE)) fp.close() # rebuild the widget # - find and hide the existing widget # - create a new widget in the existing parent parent = slicer.util.findChildren(name='%s Reload' % moduleName)[0].parent().parent() for child in parent.children(): try: child.hide() except __HOLE__: pass # Remove spacer items item = parent.layout().itemAt(0) while item: parent.layout().removeItem(item) item = parent.layout().itemAt(0) # create new widget inside existing parent globals()[widgetName.lower()] = eval( 'globals()["%s"].%s(parent)' % (moduleName, widgetName)) globals()[widgetName.lower()].setup()
AttributeError
dataset/ETHPy150Open acil-bwh/SlicerCIP/Scripted/VolumeProbe/VolumeProbe.py/VolumeProbeWidget.onReload
2,206
def __getitem__(self, key): try: return dict.__getitem__(self, key) except __HOLE__: return self.__missing__(key)
KeyError
dataset/ETHPy150Open YelpArchive/python-gearman/gearman/compat.py/defaultdict.__getitem__
2,207
def get_psd(self, data, NFFT, FS): """By calling 'psd' within axes, it both calculates and plots the spectrum""" try: Pxx, freqs = self.axes.psd(data, NFFT = NFFT, Fs = FS) self.need_refresh = True except __HOLE__ as err_re: print("Warning:", err_re) Pxx, freqs = None, None return Pxx, freqs
RuntimeError
dataset/ETHPy150Open ericgibert/supersid/supersid/tksidviewer.py/tkSidViewer.get_psd
2,208
def refresh_psd(self, z=None): """redraw the graphic PSD plot if needed i.e.new data have been given to get_psd""" if self.need_refresh: try: self.canvas.draw() self.need_refresh = False except __HOLE__ as err_idx: print("Warning:", err_idx) self.tk_root.after(2000, self.refresh_psd)
IndexError
dataset/ETHPy150Open ericgibert/supersid/supersid/tksidviewer.py/tkSidViewer.refresh_psd
2,209
def test_b_before_a(): os.environ['CONF'] = sample('b.yaml') sys.argv = sys.argv[0:1] + ['--conf-mysql', 'host: localhost', '--conf', sample('a.yaml')] try: waiter() except __HOLE__: pass
SystemExit
dataset/ETHPy150Open EverythingMe/click-config/click_config/test/test_inotify.py/test_b_before_a
2,210
def test_a_before_b(): os.environ['CONF'] = sample('a.yaml') sys.argv = sys.argv[0:1] + ['--conf-mysql', 'host: localhost', '--conf', sample('b.yaml'), '--expected-port', '777'] try: passer() except __HOLE__: pass
SystemExit
dataset/ETHPy150Open EverythingMe/click-config/click_config/test/test_inotify.py/test_a_before_b
2,211
def test_overrides(): os.environ['CONF'] = sample('a.yaml') sys.argv = sys.argv[0:1] + ['--conf-mysql', 'host: localhost\nport: 888', '--conf', sample('b.yaml'), '--expected-port', '888'] try: passer() except __HOLE__: pass
SystemExit
dataset/ETHPy150Open EverythingMe/click-config/click_config/test/test_inotify.py/test_overrides
2,212
def to_html(self, image_file, image_url, center=True): """ Return the HTML code that should be uesd to display this graph (including a client-side image map). :param image_url: The URL of the image file for this graph; this should be generated separately with the `write()` method. """ # If dotversion >1.8.10, then we can generate the image and # the cmapx with a single call to dot. Otherwise, we need to # run dot twice. if get_dot_version() > [1,8,10]: cmapx = self._run_dot('-Tgif', '-o%s' % image_file, '-Tcmapx') if cmapx is None: return '' # failed to render else: if not self.write(image_file): return '' # failed to render cmapx = self.render('cmapx') or '' # Decode the cmapx (dot uses utf-8) try: cmapx = cmapx.decode('utf-8') except __HOLE__: log.debug('%s: unable to decode cmapx from dot; graph will ' 'not have clickable regions' % image_file) cmapx = '' title = plaintext_to_html(self.title or '') caption = plaintext_to_html(self.caption or '') if title or caption: css_class = 'graph-with-title' else: css_class = 'graph-without-title' if len(title)+len(caption) > 80: title_align = 'left' table_width = ' width="600"' else: title_align = 'center' table_width = '' if center: s = '<center>' if title or caption: s += ('<table border="0" cellpadding="0" cellspacing="0" ' 'class="graph"%s>\n <tr><td align="center">\n' % table_width) s += (' %s\n <img src="%s" alt=%r usemap="#%s" ' 'ismap="ismap" class="%s" />\n' % (cmapx.strip(), image_url, title, self.uid, css_class)) if title or caption: s += ' </td></tr>\n <tr><td align=%r>\n' % title_align if title: s += '<span class="graph-title">%s</span>' % title if title and caption: s += ' -- ' if caption: s += '<span class="graph-caption">%s</span>' % caption s += '\n </td></tr>\n</table><br />' if center: s += '</center>' return s
UnicodeDecodeError
dataset/ETHPy150Open QuantSoftware/QuantSoftwareToolkit/Legacy/epydoc-3.0.1/epydoc/docwriter/dotgraph.py/DotGraph.to_html
2,213
def _run_dot(self, *options): try: result, err = run_subprocess((DOT_COMMAND,)+options, self.to_dotfile()) if err: log.warning("Graphviz dot warning(s):\n%s" % err) except __HOLE__, e: log.warning("Unable to render Graphviz dot graph:\n%s" % e) #log.debug(self.to_dotfile()) return None return result
OSError
dataset/ETHPy150Open QuantSoftware/QuantSoftwareToolkit/Legacy/epydoc-3.0.1/epydoc/docwriter/dotgraph.py/DotGraph._run_dot
2,214
def uml_class_tree_graph(class_doc, linker, context=None, **options): """ Return a `DotGraph` that graphically displays the class hierarchy for the given class, using UML notation. Options: - max_attributes - max_operations - show_private_vars - show_magic_vars - link_attributes """ nodes = {} # ClassDoc -> DotGraphUmlClassNode exclude = options.get('exclude', ()) # Create nodes for class_doc and all its bases. for cls in class_doc.mro(): if cls.pyval is object: continue # don't include `object`. if cls in exclude: break # stop if we get to an excluded class. if cls == class_doc: color = SELECTED_BG else: color = BASECLASS_BG nodes[cls] = DotGraphUmlClassNode(cls, linker, context, show_inherited_vars=False, collapsed=False, bgcolor=color) # Create nodes for all class_doc's subclasses. queue = [class_doc] for cls in queue: if (isinstance(cls, ClassDoc) and cls.subclasses not in (None, UNKNOWN)): for subcls in cls.subclasses: subcls_name = subcls.canonical_name[-1] if subcls not in nodes and subcls not in exclude: queue.append(subcls) nodes[subcls] = DotGraphUmlClassNode( subcls, linker, context, collapsed=True, bgcolor=SUBCLASS_BG) # Only show variables in the class where they're defined for # *class_doc*. mro = class_doc.mro() for name, var in class_doc.variables.items(): i = mro.index(var.container) for base in mro[i+1:]: if base.pyval is object: continue # don't include `object`. overridden_var = base.variables.get(name) if overridden_var and overridden_var.container == base: try: if isinstance(overridden_var.value, RoutineDoc): nodes[base].operations.remove(overridden_var) else: nodes[base].attributes.remove(overridden_var) except __HOLE__: pass # var is filtered (eg private or magic) # Keep track of which nodes are part of the inheritance graph # (since link_attributes might add new nodes) inheritance_nodes = set(nodes.values()) # Turn attributes into links. if options.get('link_attributes', True): for node in nodes.values(): node.link_attributes(nodes) # Make sure that none of the new attribute edges break the # rank ordering assigned by inheritance. for edge in node.edges: if edge.end in inheritance_nodes: edge['constraint'] = 'False' # Construct the graph. graph = DotGraph('UML class diagram for %s' % class_doc.canonical_name, body='ranksep=.2\n;nodesep=.3\n') graph.nodes = nodes.values() # Add inheritance edges. for node in inheritance_nodes: for base in node.class_doc.bases: if base in nodes: graph.edges.append(DotGraphEdge(nodes[base], node, dir='back', arrowtail='empty', headport='body', tailport='body', color=INH_LINK_COLOR, weight=100, style='bold')) # And we're done! return graph ######################################################################
ValueError
dataset/ETHPy150Open QuantSoftware/QuantSoftwareToolkit/Legacy/epydoc-3.0.1/epydoc/docwriter/dotgraph.py/uml_class_tree_graph
2,215
def get_dot_version(): global _dot_version if _dot_version is None: try: out, err = run_subprocess([DOT_COMMAND, '-V']) version_info = err or out m = _DOT_VERSION_RE.match(version_info) if m: _dot_version = [int(x) for x in m.group(1).split('.')] else: _dot_version = (0,) except __HOLE__, e: _dot_version = (0,) log.info('Detected dot version %s' % _dot_version) return _dot_version ###################################################################### #{ Helper Functions ######################################################################
OSError
dataset/ETHPy150Open QuantSoftware/QuantSoftwareToolkit/Legacy/epydoc-3.0.1/epydoc/docwriter/dotgraph.py/get_dot_version
2,216
def write_csv(rows, delim): writer = csv.writer(click.get_text_stream('stdout'), delimiter=delim) try: [writer.writerow(row) for row in rows] except (OSError, __HOLE__): sys.stderr.close()
IOError
dataset/ETHPy150Open learntextvis/textkit/textkit/utils.py/write_csv
2,217
def output(line): try: click.echo(line) except (__HOLE__, IOError): sys.stderr.close()
OSError
dataset/ETHPy150Open learntextvis/textkit/textkit/utils.py/output
2,218
def error_reason(self, workername, response): "extracts error message from response" for r in response: try: return r[workername].get('error', 'Unknown reason') except __HOLE__: pass logger.error("Failed to extract error reason from '%s'", response) return 'Unknown reason'
KeyError
dataset/ETHPy150Open mher/flower/flower/api/control.py/ControlHandler.error_reason
2,219
@classmethod def init_table(cls, key, method): cls.method = None if method == 'table' else method.lower() cls.key = key if cls.method: try: __import__('M2Crypto') except __HOLE__: logger.error( 'M2Crypto is required to use encryption other than default method') sys.exit(1) if method: cls.encrypt_table, cls.decrypt_table = get_table(key) else: try: # make an Encryptor to test if the settings if OK Crypto() except Exception as e: logger.error(e) sys.exit(1)
ImportError
dataset/ETHPy150Open mrknow/filmkodi/plugin.video.mrknow/lib/test/test.py/Crypto.init_table
2,220
def ContentGenerator(csv_file, batch_size, create_csv_reader=csv.reader, create_csv_writer=csv.writer): """Retrieves CSV data up to a batch size at a time. Args: csv_file: A file-like object for reading CSV data. batch_size: Maximum number of CSV rows to yield on each iteration. create_csv_reader, create_csv_writer: Used for dependency injection. Yields: Tuple (entity_count, csv_content) where: entity_count: Number of entities contained in the csv_content. Will be less than or equal to the batch_size and greater than 0. csv_content: String containing the CSV content containing the next entity_count entities. """ try: csv.field_size_limit(800000) except __HOLE__: pass reader = create_csv_reader(csv_file, skipinitialspace=True) exhausted = False while not exhausted: rows_written = 0 content = StringIO.StringIO() writer = create_csv_writer(content) try: for i in xrange(batch_size): row = reader.next() writer.writerow(row) rows_written += 1 except StopIteration: exhausted = True if rows_written > 0: yield rows_written, content.getvalue()
AttributeError
dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/tools/bulkload_client.py/ContentGenerator
2,221
def PostEntities(host_port, uri, cookie, kind, content): """Posts Entity records to a remote endpoint over HTTP. Args: host_port: String containing the "host:port" pair; the port is optional. uri: Relative URI to access on the remote host (e.g., '/bulkload'). cookie: String containing the Cookie header to use, if any. kind: Kind of the Entity records being posted. content: String containing the CSV data for the entities. Raises: BadServerStatusError if the server was contactable but returns an error. PostError If an error occurred while connecting to the server or reading or writing data. """ logging.debug('Connecting to %s', host_port) try: body = urllib.urlencode({ constants.KIND_PARAM: kind, constants.CSV_PARAM: content, }) headers = { 'Content-Type': 'application/x-www-form-urlencoded', 'Content-Length': len(body), 'Cookie': cookie, } logging.debug('Posting %d bytes to http://%s%s', len(body), host_port, uri) connection = httplib.HTTPConnection(host_port) try: connection.request('POST', uri, body, headers) response = connection.getresponse() status = response.status reason = response.reason content = response.read() logging.debug('Received response code %d: %s', status, reason) if status != httplib.OK: raise BadServerStatusError('Received code %d: %s\n%s' % ( status, reason, content)) finally: connection.close() except (__HOLE__, httplib.HTTPException, socket.error), e: logging.debug('Encountered exception accessing HTTP server: %s', e) raise PostError(e)
IOError
dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/tools/bulkload_client.py/PostEntities
2,222
def postOptions(self): # Mandatory parameters # Validate image if self['image'] is None: raise usage.UsageError( "image parameter must be provided" ) # Validate mountpoint if self['mountpoint'] is None: raise usage.UsageError("mountpoint is a mandatory parameter") else: try: FilePath(self['mountpoint']) except __HOLE__: raise usage.UsageError("mountpoint has to be an absolute path") # Validate app per node if self['apps-per-node'] is None: raise usage.UsageError("apps-per-node is a mandatory parameter") else: try: self['apps-per-node'] = int(self['apps-per-node']) except ValueError: raise usage.UsageError("apps-per-node has to be an integer") # Validate control node if self['control-node'] is None: raise usage.UsageError("control-node is a mandatory parameter") else: try: IPAddress(self['control-node']) except ValueError: raise usage.UsageError("control-node has to be an IP address") # Validate certificate directory if self['cert-directory'] is None: raise usage.UsageError("cert-directory is a mandatory parameter") # Validate optional parameters # Note that we don't check if those parameters are None, because # all of them have default value and can't be none. If they are, # and exception will be raised try: self['max-size'] = int(self['max-size']) except ValueError: raise usage.UsageError( "The max-size timeout must be an integer.") try: self['wait'] = timedelta(seconds=int(self['wait'])) except ValueError: raise usage.UsageError("The wait timeout must be an integer.")
ValueError
dataset/ETHPy150Open ClusterHQ/flocker/benchmark/cluster_containers_setup.py/ContainerOptions.postOptions
2,223
def _conditional_import_module(self, module_name): """Import a module and return a reference to it or None on failure.""" try: exec('import '+module_name) except __HOLE__ as error: if self._warn_on_extension_import: warnings.warn('Did a C extension fail to compile? %s' % error) return locals().get(module_name)
ImportError
dataset/ETHPy150Open amrdraz/kodr/app/brython/www/src/Lib/test/test_hashlib.py/HashLibTestCase._conditional_import_module
2,224
def test_get_builtin_constructor(self): get_builtin_constructor = hashlib.__dict__[ '__get_builtin_constructor'] self.assertRaises(ValueError, get_builtin_constructor, 'test') try: import _md5 except __HOLE__: pass # This forces an ImportError for "import _md5" statements sys.modules['_md5'] = None try: self.assertRaises(ValueError, get_builtin_constructor, 'md5') finally: if '_md5' in locals(): sys.modules['_md5'] = _md5 else: del sys.modules['_md5'] self.assertRaises(TypeError, get_builtin_constructor, 3)
ImportError
dataset/ETHPy150Open amrdraz/kodr/app/brython/www/src/Lib/test/test_hashlib.py/HashLibTestCase.test_get_builtin_constructor
2,225
def handle(self, *args, **options): if options['devices']: devices = Device.objects.filter(is_active=True) self.stdout.write("Devices list:\n") for device in devices: self.stdout.write("(#%s) %s\n" % (device.id, device.name)) self.stdout.write("\n") else: collapse_key = options['collapse_key'] try: id = args[0] message = args[1] except __HOLE__: raise CommandError( "Invalid params. You have to put all params: " "python manage.py gcm_messenger <device_id> <msg>") try: device = Device.objects.get(pk=int(id), is_active=True) except Device.DoesNotExist: raise CommandError( 'Unknown device (id=%s). Check list: ' 'python manage.py gcm_messenger --devices' % id) else: result = device.send_message( {'message': message}, collapse_key=collapse_key) self.stdout.write("[OK] device #%s (%s): %s\n" % (id, device.name, result))
IndexError
dataset/ETHPy150Open bogdal/django-gcm/gcm/management/commands/gcm_messenger.py/Command.handle
2,226
def serve(self): self.trans.listen() while not self.closed: try: client = self.trans.accept() t = threading.Thread(target=self.handle, args=(client,)) t.setDaemon(self.daemon) t.start() except __HOLE__: raise except Exception as x: logger.exception(x)
KeyboardInterrupt
dataset/ETHPy150Open eleme/thriftpy/thriftpy/server.py/TThreadedServer.serve
2,227
def Run(self): """Utility main loop. """ while True: self._AddPendingToQueue() # Collect all active daemon names daemons = self._GetActiveDaemonNames(self._queue) if not daemons: break # Collection daemon status data data = self._CollectDaemonStatus(self._lu, daemons) # Use data delay = self.MAX_DELAY for diskie in self._queue: if not diskie.active: continue try: try: all_daemon_data = data[diskie.node_name] except __HOLE__: result = diskie.SetDaemonData(False, None) else: result = \ diskie.SetDaemonData(True, all_daemon_data[diskie.GetDaemonName()]) if not result: # Daemon not yet ready, retry soon delay = min(3.0, delay) continue if diskie.CheckFinished(): # Transfer finished diskie.Finalize() continue # Normal case: check again in 5 seconds delay = min(5.0, delay) if not diskie.CheckListening(): # Not yet listening, retry soon delay = min(1.0, delay) continue if not diskie.CheckConnected(): # Not yet connected, retry soon delay = min(1.0, delay) continue except _ImportExportError, err: logging.exception("%s failed", diskie.MODE_TEXT) diskie.Finalize(error=str(err)) if not compat.any(diskie.active for diskie in self._queue): break # Wait a bit delay = min(self.MAX_DELAY, max(self.MIN_DELAY, delay)) logging.debug("Waiting for %ss", delay) time.sleep(delay)
KeyError
dataset/ETHPy150Open ganeti/ganeti/lib/masterd/instance.py/ImportExportLoop.Run
2,228
def CheckRemoteExportHandshake(cds, handshake): """Checks the handshake of a remote import/export. @type cds: string @param cds: Cluster domain secret @type handshake: sequence @param handshake: Handshake sent by remote peer """ try: (version, hmac_digest, hmac_salt) = handshake except (__HOLE__, ValueError), err: return "Invalid data: %s" % err if not utils.VerifySha1Hmac(cds, _GetImportExportHandshakeMessage(version), hmac_digest, salt=hmac_salt): return "Hash didn't match, clusters don't share the same domain secret" if version != constants.RIE_VERSION: return ("Clusters don't have the same remote import/export protocol" " (local=%s, remote=%s)" % (constants.RIE_VERSION, version)) return None
TypeError
dataset/ETHPy150Open ganeti/ganeti/lib/masterd/instance.py/CheckRemoteExportHandshake
2,229
def CheckRemoteExportDiskInfo(cds, disk_index, disk_info): """Verifies received disk information for an export. @type cds: string @param cds: Cluster domain secret @type disk_index: number @param disk_index: Index of disk (included in hash) @type disk_info: sequence @param disk_info: Disk information sent by remote peer """ try: (host, port, magic, hmac_digest, hmac_salt) = disk_info except (__HOLE__, ValueError), err: raise errors.GenericError("Invalid data: %s" % err) if not (host and port and magic): raise errors.GenericError("Missing destination host, port or magic") msg = _GetRieDiskInfoMessage(disk_index, host, port, magic) if not utils.VerifySha1Hmac(cds, msg, hmac_digest, salt=hmac_salt): raise errors.GenericError("HMAC is wrong") if netutils.IP6Address.IsValid(host) or netutils.IP4Address.IsValid(host): destination = host else: destination = netutils.Hostname.GetNormalizedName(host) return (destination, utils.ValidateServiceName(port), magic)
TypeError
dataset/ETHPy150Open ganeti/ganeti/lib/masterd/instance.py/CheckRemoteExportDiskInfo
2,230
def tile(A, reps): """Construct an array by repeating A the number of times given by reps. Args: A (cupy.ndarray): Array to transform. reps (int or tuple): The number of repeats. Returns: cupy.ndarray: Transformed array with repeats. .. seealso:: :func:`numpy.tile` """ try: tup = tuple(reps) except __HOLE__: tup = (reps,) d = len(tup) if tup.count(1) == len(tup) and isinstance(A, cupy.ndarray): # Fixes the problem that the function does not make a copy if A is a # array and the repetitions are 1 in all dimensions return cupy.array(A, copy=True, ndmin=d) else: # Note that no copy of zero-sized arrays is made. However since they # have no data there is no risk of an inadvertent overwrite. c = cupy.array(A, copy=False, ndmin=d) if d < c.ndim: tup = (1,) * (c.ndim - d) + tup shape_out = tuple(s * t for s, t in zip(c.shape, tup)) if c.size == 0: return cupy.empty(shape_out, dtype=c.dtype) c_shape = [] ret_shape = [] for dim_in, nrep in zip(c.shape, tup): if nrep == 1: c_shape.append(dim_in) ret_shape.append(dim_in) elif dim_in == 1: c_shape.append(dim_in) ret_shape.append(nrep) else: c_shape.append(1) c_shape.append(dim_in) ret_shape.append(nrep) ret_shape.append(dim_in) ret = cupy.empty(ret_shape, dtype=c.dtype) if ret.size: ret[...] = c.reshape(c_shape) return ret.reshape(shape_out)
TypeError
dataset/ETHPy150Open pfnet/chainer/cupy/manipulation/tiling.py/tile
2,231
def simpleFunction1(): try: raise TypeError, (3,x,x,x) except __HOLE__: pass
TypeError
dataset/ETHPy150Open kayhayen/Nuitka/tests/basics/Referencing_2.py/simpleFunction1
2,232
def simpleFunction6(): def nested_args_function((a,b), c): return a, b, c try: nested_args_function((1,), 3) except __HOLE__: pass
ValueError
dataset/ETHPy150Open kayhayen/Nuitka/tests/basics/Referencing_2.py/simpleFunction6
2,233
def simpleFunction7(): def nested_args_function((a,b), c): return a, b, c try: nested_args_function((1, 2, 3), 3) except __HOLE__: pass # These need stderr to be wrapped.
ValueError
dataset/ETHPy150Open kayhayen/Nuitka/tests/basics/Referencing_2.py/simpleFunction7
2,234
def get_exemplar_from_file(cldr_file_path): try: return exemplar_from_file_cache[cldr_file_path] except __HOLE__: pass data_file = path.join(CLDR_DIR, cldr_file_path) try: root = ElementTree.parse(data_file).getroot() except IOError: exemplar_from_file_cache[cldr_file_path] = None return None for tag in root.iter('exemplarCharacters'): if 'type' in tag.attrib: continue exemplar_from_file_cache[cldr_file_path] = exemplar_string_to_list( tag.text) return exemplar_from_file_cache[cldr_file_path] return None
KeyError
dataset/ETHPy150Open googlei18n/nototools/nototools/generate_website_data.py/get_exemplar_from_file
2,235
def get_language_name_from_file(language, cldr_file_path): cache_key = (language, cldr_file_path) try: return language_name_from_file_cache[cache_key] except KeyError: pass data_file = path.join(CLDR_DIR, cldr_file_path) try: root = ElementTree.parse(data_file).getroot() except __HOLE__: language_name_from_file_cache[cache_key] = None return None parent = root.find('.//languages') if parent is None: return None for tag in parent: assert tag.tag == 'language' if tag.get('type').replace('_', '-') == language: language_name_from_file_cache[cache_key] = tag.text return language_name_from_file_cache[cache_key] return None
IOError
dataset/ETHPy150Open googlei18n/nototools/nototools/generate_website_data.py/get_language_name_from_file
2,236
def get_native_language_name(lang_scr): """Get the name of a language in its own locale.""" try: return extra_locale_data.NATIVE_NAMES[lang_scr] except __HOLE__: pass if '-' in lang_scr: language = lang_scr.split('-')[0] else: language = lang_scr locl = lang_scr while locl != 'root': for directory in ['common', 'seed']: file_path = path.join( directory, 'main', locl.replace('-', '_')+'.xml') for name_to_find in [lang_scr, language]: native_name = get_language_name_from_file( name_to_find, file_path) if native_name: return native_name locl = find_parent_locale(locl) return None
KeyError
dataset/ETHPy150Open googlei18n/nototools/nototools/generate_website_data.py/get_native_language_name
2,237
def get_english_language_name(lang_scr): try: return english_language_name[lang_scr] except __HOLE__: lang, script = lang_scr.split('-') name = '%s (%s script)' % ( english_language_name[lang], english_script_name[script]) print "Constructing name '%s' for %s." % (name, lang_scr) return name
KeyError
dataset/ETHPy150Open googlei18n/nototools/nototools/generate_website_data.py/get_english_language_name
2,238
def create_langs_object(): langs = {} for lang_scr in sorted(set(written_in_scripts) | all_used_lang_scrs): lang_object = {} if '-' in lang_scr: language, script = lang_scr.split('-') else: language = lang_scr try: script = find_likely_script(language) except __HOLE__: print "no likely script for %s" % language continue lang_object['name'] = get_english_language_name(lang_scr) native_name = get_native_language_name(lang_scr) if native_name is not None: lang_object['nameNative'] = native_name lang_object['rtl'] = is_script_rtl(script) if script == 'Kana': script = 'Jpan' if script not in supported_scripts: # Scripts we don't have fonts for yet print('No font supports the %s script (%s) needed for the %s language.' % (english_script_name[script], script, lang_object['name'])) assert script in { 'Bass', # Bassa Vah 'Lina', # Linear A 'Mani', # Manichaean 'Merc', # Meroitic Cursive 'Mroo', # Mro 'Narb', # Old North Arabian 'Orya', # Oriya 'Plrd', # Miao 'Sora', # Sora Sompeng 'Thaa', # Thaana 'Tibt', # Tibetan } lang_object['families'] = [] else: sample_text = get_sample_text(language, script) lang_object['sample'] = sample_text if script in {'Latn', 'Grek', 'Cyrl'}: query_script = '' else: query_script = script # FIXME(roozbeh): Figure out if the language is actually supported # by the font + Noto LGC. If it's not, don't claim support. fonts = [font for font in all_fonts if font.script == query_script] # For certain languages of Pakistan, add Nastaliq font if lang_scr in {'bal', 'hnd', 'hno', 'ks-Arab', 'lah', 'pa-Arab', 'skr', 'ur'}: fonts += [font for font in all_fonts if font.script == 'Aran'] family_keys = set([font.key for font in fonts]) lang_object['families'] = sorted(family_keys) for family in family_keys: family_to_langs[family].add(lang_scr) langs[lang_scr] = lang_object return langs
KeyError
dataset/ETHPy150Open googlei18n/nototools/nototools/generate_website_data.py/create_langs_object
2,239
def _get_open_fds(): fds = set() for fd in range(3,resource.getrlimit(resource.RLIMIT_NOFILE)[0]): try: flags = fcntl.fcntl(fd, fcntl.F_GETFD) except __HOLE__: continue fds.add(fd) return fds
IOError
dataset/ETHPy150Open natduca/quickopen/src/test_runner.py/_get_open_fds
2,240
def writeRecord(self,dn,entry): """ dn string-representation of distinguished name entry dictionary holding the LDAP entry {attr:data} """ # Write line dn: first self._output_file.write( '%s<dsml:entry dn="%s">\n' % ( self._indent*2,replace_char(dn) ) ) objectclasses = entry.get('objectclass',entry.get('objectClass',[])) self._output_file.write('%s<dsml:objectclass>\n' % (self._indent*3)) for oc in objectclasses: self._output_file.write('%s<dsml:oc-value>%s</dsml:oc-value>\n' % (self._indent*4,oc)) self._output_file.write('%s</dsml:objectclass>\n' % (self._indent*3)) attr_types = entry.keys()[:] try: attr_types.remove('objectclass') attr_types.remove('objectClass') except __HOLE__: pass attr_types.sort() for attr_type in attr_types: self._output_file.write('%s<dsml:attr name="%s">\n' % (self._indent*3,attr_type)) for attr_value_item in entry[attr_type]: needs_base64_encoding = self._needs_base64_encoding( attr_type,attr_value_item ) if needs_base64_encoding: attr_value_item = base64.encodestring(attr_value_item) else: attr_value_item = replace_char(attr_value_item) self._output_file.write('%s<dsml:value%s>\n' % ( self._indent*4, ' encoding="base64"'*needs_base64_encoding ) ) self._output_file.write('%s%s\n' % ( self._indent*5, attr_value_item ) ) self._output_file.write('%s</dsml:value>\n' % ( self._indent*4, ) ) self._output_file.write('%s</dsml:attr>\n' % (self._indent*3)) self._output_file.write('%s</dsml:entry>\n' % (self._indent*2)) return
ValueError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/python-ldap-2.3.13/Lib/dsml.py/DSMLWriter.writeRecord
2,241
def ordinal(value): """ Converts an integer to its ordinal as a string. 1 is '1st', 2 is '2nd', 3 is '3rd', etc. Works for any integer. """ try: value = int(value) except __HOLE__: return value t = (_('th'), _('st'), _('nd'), _('rd'), _('th'), _('th'), _('th'), _('th'), _('th'), _('th')) if value % 100 in (11, 12, 13): # special case return "%d%s" % (value, t[0]) return '%d%s' % (value, t[value % 10])
ValueError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-0.96/django/contrib/humanize/templatetags/humanize.py/ordinal
2,242
def apnumber(value): """ For numbers 1-9, returns the number spelled out. Otherwise, returns the number. This follows Associated Press style. """ try: value = int(value) except __HOLE__: return value if not 0 < value < 10: return value return (_('one'), _('two'), _('three'), _('four'), _('five'), _('six'), _('seven'), _('eight'), _('nine'))[value-1]
ValueError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-0.96/django/contrib/humanize/templatetags/humanize.py/apnumber
2,243
def edge_connectivity(G, s=None, t=None, flow_func=None): r"""Returns the edge connectivity of the graph or digraph G. The edge connectivity is equal to the minimum number of edges that must be removed to disconnect G or render it trivial. If source and target nodes are provided, this function returns the local edge connectivity: the minimum number of edges that must be removed to break all paths from source to target in G. Parameters ---------- G : NetworkX graph Undirected or directed graph s : node Source node. Optional. Default value: None. t : node Target node. Optional. Default value: None. flow_func : function A function for computing the maximum flow among a pair of nodes. The function has to accept at least three parameters: a Digraph, a source node, and a target node. And return a residual network that follows NetworkX conventions (see :meth:`maximum_flow` for details). If flow_func is None, the default maximum flow function (:meth:`edmonds_karp`) is used. See below for details. The choice of the default function may change from version to version and should not be relied on. Default value: None. Returns ------- K : integer Edge connectivity for G, or local edge connectivity if source and target were provided Examples -------- >>> # Platonic icosahedral graph is 5-edge-connected >>> G = nx.icosahedral_graph() >>> nx.edge_connectivity(G) 5 You can use alternative flow algorithms for the underlying maximum flow computation. In dense networks the algorithm :meth:`shortest_augmenting_path` will usually perform better than the default :meth:`edmonds_karp`, which is faster for sparse networks with highly skewed degree distributions. Alternative flow functions have to be explicitly imported from the flow package. >>> from networkx.algorithms.flow import shortest_augmenting_path >>> nx.edge_connectivity(G, flow_func=shortest_augmenting_path) 5 If you specify a pair of nodes (source and target) as parameters, this function returns the value of local edge connectivity. >>> nx.edge_connectivity(G, 3, 7) 5 If you need to perform several local computations among different pairs of nodes on the same graph, it is recommended that you reuse the data structures used in the maximum flow computations. See :meth:`local_edge_connectivity` for details. Notes ----- This is a flow based implementation of global edge connectivity. For undirected graphs the algorithm works by finding a 'small' dominating set of nodes of G (see algorithm 7 in [1]_ ) and computing local maximum flow (see :meth:`local_edge_connectivity`) between an arbitrary node in the dominating set and the rest of nodes in it. This is an implementation of algorithm 6 in [1]_ . For directed graphs, the algorithm does n calls to the maximum flow function. This is an implementation of algorithm 8 in [1]_ . See also -------- :meth:`local_edge_connectivity` :meth:`local_node_connectivity` :meth:`node_connectivity` :meth:`maximum_flow` :meth:`edmonds_karp` :meth:`preflow_push` :meth:`shortest_augmenting_path` References ---------- .. [1] Abdol-Hossein Esfahanian. Connectivity Algorithms. http://www.cse.msu.edu/~cse835/Papers/Graph_connectivity_revised.pdf """ if (s is not None and t is None) or (s is None and t is not None): raise nx.NetworkXError('Both source and target must be specified.') # Local edge connectivity if s is not None and t is not None: if s not in G: raise nx.NetworkXError('node %s not in graph' % s) if t not in G: raise nx.NetworkXError('node %s not in graph' % t) return local_edge_connectivity(G, s, t, flow_func=flow_func) # Global edge connectivity # reuse auxiliary digraph and residual network H = build_auxiliary_edge_connectivity(G) R = build_residual_network(H, 'capacity') kwargs = dict(flow_func=flow_func, auxiliary=H, residual=R) if G.is_directed(): # Algorithm 8 in [1] if not nx.is_weakly_connected(G): return 0 # initial value for \lambda is minimum degree L = min(d for n, d in G.degree()) nodes = list(G) n = len(nodes) for i in range(n): kwargs['cutoff'] = L try: L = min(L, local_edge_connectivity(G, nodes[i], nodes[i+1], **kwargs)) except __HOLE__: # last node! L = min(L, local_edge_connectivity(G, nodes[i], nodes[0], **kwargs)) return L else: # undirected # Algorithm 6 in [1] if not nx.is_connected(G): return 0 # initial value for \lambda is minimum degree L = min(d for n, d in G.degree()) # A dominating set is \lambda-covering # We need a dominating set with at least two nodes for node in G: D = nx.dominating_set(G, start_with=node) v = D.pop() if D: break else: # in complete graphs the dominating sets will always be of one node # thus we return min degree return L for w in D: kwargs['cutoff'] = L L = min(L, local_edge_connectivity(G, v, w, **kwargs)) return L
IndexError
dataset/ETHPy150Open networkx/networkx/networkx/algorithms/connectivity/connectivity.py/edge_connectivity
2,244
def setUp(self): """Use the same server for all tests.""" global _server if not _server: _server = Server() _server.start() wait = 5 running = False while not running and wait > 0: try: urlopen(WebGuiTest.defaulturl).read() running = True except (URLError, socket_error, __HOLE__): wait -= 1 sleep(1)
IOError
dataset/ETHPy150Open pympler/pympler/test/gui/test_web.py/WebGuiTest.setUp
2,245
def get(self, url, status=200): conn = HTTPConnection(self.defaulthost) conn.request("GET", url) response = conn.getresponse() body = response.read() conn.close() self.assertEqual(response.status, status) try: body = body.decode() except __HOLE__: pass return body
UnicodeDecodeError
dataset/ETHPy150Open pympler/pympler/test/gui/test_web.py/WebGuiTest.get
2,246
def _check_cloudera_manager_started(self, manager): try: conn = telnetlib.Telnet(manager.management_ip, CM_API_PORT) conn.close() return True except __HOLE__: return False
IOError
dataset/ETHPy150Open openstack/sahara/sahara/plugins/cdh/plugin_utils.py/AbstractPluginUtils._check_cloudera_manager_started
2,247
def run(self, edit): self._setup_cmd() import os, subprocess, sys try: print "*** Executing command is : " + self.CMD + self.view.file_name() retcode = subprocess.call(self.CMD+self.view.file_name(), shell=True) if retcode < 0: print >>sys.stderr, "Aborted : ", -retcode else: print >>sys.stderr, "Return code : ", retcode except __HOLE__, e: print >>sys.stderr, "OSError cptured : ", e self._force_refresh()
OSError
dataset/ETHPy150Open ikeike443/Sublime-Scalariform/Scalariform.py/ScalariformCommand.run
2,248
def run(self): """ runs fdmnes """ self.wfdmfile() # write fdmfile.txt self.wconvfile() # write convfile.txt try: subprocess.call('fdmnes', shell=True) except __HOLE__: print("check 'fdmnes' executable exists!")
OSError
dataset/ETHPy150Open xraypy/xraylarch/plugins/math/convolution1D.py/FdmnesConv.run
2,249
@record def test_update_page_fail(self): # Arrange blob_name = self._create_blob(2048) data = self.get_random_bytes(512) resp1 = self.bs.update_page(self.container_name, blob_name, data, 0, 511) # Act try: self.bs.update_page(self.container_name, blob_name, data, 1024, 1536) except __HOLE__ as e: self.assertEqual(str(e), 'end_range must align with 512 page size') return # Assert raise Exception('Page range validation failed to throw on failure case')
ValueError
dataset/ETHPy150Open Azure/azure-storage-python/tests/test_page_blob.py/StoragePageBlobTest.test_update_page_fail
2,250
def create_capture(source = 0): '''source: <int> or '<int>|<filename>|synth [:<param_name>=<value> [:...]]' ''' source = str(source).strip() chunks = source.split(':') # hanlde drive letter ('c:', ...) if len(chunks) > 1 and len(chunks[0]) == 1 and chunks[0].isalpha(): chunks[1] = chunks[0] + ':' + chunks[1] del chunks[0] source = chunks[0] # Source is 0 try: source = int(source) except __HOLE__: pass params = dict( s.split('=') for s in chunks[1:] ) cap = None if source == 'synth': Class = classes.get(params.get('class', None), VideoSynthBase) try: cap = Class(**params) except: pass else: # Here is where the actual Video Capture is created cap = cv2.VideoCapture(source) if 'size' in params: w, h = map(int, params['size'].split('x')) cap.set(cv2.cv.CV_CAP_PROP_FRAME_WIDTH, w) cap.set(cv2.cv.CV_CAP_PROP_FRAME_HEIGHT, h) if cap is None or not cap.isOpened(): print 'Warning: unable to open video source: ', source if fallback is not None: return create_capture(fallback, None) return cap cap = create_capture(fn) while True: flag, img = cap.read() gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) thrs1 = cv2.getTrackbarPos('thrs1', 'edge') thrs2 = cv2.getTrackbarPos('thrs2', 'edge') edge = cv2.Canny(gray, thrs1, thrs2, apertureSize=5) vis = img.copy() vis /= 2 vis[edge != 0] = (0, 255, 0) cv2.imshow('edge', vis) ch = cv2.waitKey(5) if ch == 27: break cv2.destroyAllWindows()
ValueError
dataset/ETHPy150Open bluquar/cubr/color.py/create_capture
2,251
def __call__(self, query, maxresult, catcherrors, normalize): swipl_fid = PL_open_foreign_frame() swipl_head = PL_new_term_ref() swipl_args = PL_new_term_refs(2) swipl_goalCharList = swipl_args swipl_bindingList = swipl_args + 1 PL_put_list_chars(swipl_goalCharList, query) swipl_predicate = PL_predicate("pyrun", 2, None) plq = catcherrors and (PL_Q_NODEBUG|PL_Q_CATCH_EXCEPTION) or PL_Q_NORMAL swipl_qid = PL_open_query(None, plq, swipl_predicate, swipl_args) Prolog._queryIsOpen = True # From now on, the query will be considered open try: while maxresult and PL_next_solution(swipl_qid): maxresult -= 1 bindings = [] swipl_list = PL_copy_term_ref(swipl_bindingList) t = getTerm(swipl_list) if normalize: try: v = t.value except __HOLE__: v = {} for r in [x.value for x in t]: v.update(r) yield v else: yield t if PL_exception(swipl_qid): term = getTerm(PL_exception(swipl_qid)) raise PrologError("".join(["Caused by: '", query, "'. ", "Returned: '", str(term), "'."])) finally: # This ensures that, whatever happens, we close the query PL_cut_query(swipl_qid) PL_discard_foreign_frame(swipl_fid) Prolog._queryIsOpen = False
AttributeError
dataset/ETHPy150Open yuce/pyswip/pyswip/prolog.py/Prolog._QueryWrapper.__call__
2,252
@classmethod def _checkaxis(cls, axis=None, shape=None, **kwargs): """ Check and expand a tuple of axis on Array, >>> A = Array(1, shape=(3, 3, 3)) >>> print A.formated() [[[1, 1, 1], [1, 1, 1], [1, 1, 1]], <BLANKLINE> [[1, 1, 1], [1, 1, 1], [1, 1, 1]], <BLANKLINE> [[1, 1, 1], [1, 1, 1], [1, 1, 1]]] >>> Array._checkaxis(axis=(1,), shape=(3, 3, 3)) (1,) >>> Array._checkaxis(shape=(3, 3, 3), fill=True) (0, 1, 2) >>> Array._checkaxis(shape=(3, 3, 3), fill=True, reverse=True) (2, 1, 0) >>> Array._checkaxis(axis=(1, 3), shape=(3, 3, 3)) Traceback (most recent call last): ... ValueError: axis 3 in axis list (1, 3) doesn't exist for an Array of shape (3, 3, 3) >>> Array._checkaxis(axis=(1, 1, 2), shape=(3, 3, 3)) Traceback (most recent call last): ... ValueError: axis 1 is present more than once in axis list (1, 1, 2) """ shape, ndim, size = cls._expandshape(shape=shape) fill = kwargs.get('fill',False) reverse = kwargs.get('reverse',False) if axis is None : axis = [] if not hasattr(axis, '__iter__') : axis = [axis] if len(axis) == 0 : if fill : if reverse : axis = range(ndim-1, -1, -1) else : axis = range(0, ndim, 1) else : try : if len(axis) == 1 and hasattr(axis[0], '__iter__') : axis = [range(ndim)[x] for x in axis[0]] else : axis = [range(ndim)[x] for x in axis] except __HOLE__ : raise ValueError, "axis %s in axis list %s doesn't exist for an Array of shape %s" % (x, tuple(axis), shape) for x in axis : if axis.count(x) > 1 : raise ValueError, "axis %s is present more than once in axis list %s" % (x, tuple(axis)) return tuple(axis)
IndexError
dataset/ETHPy150Open CountZer0/PipelineConstructionSet/python/maya/site-packages/pymel-1.0.3/pymel/util/arrays.py/Array._checkaxis
2,253
def get_index(array, key, default=None): try: return array.index(key) except __HOLE__: return default
ValueError
dataset/ETHPy150Open redhat-cip/software-factory/image/edeploy/mngids.py/get_index
2,254
def main(): uids = {} gids = {} debug('ORIG %s' % str(sys.argv)) IDS = '/etc/ids.tables' try: exec(open(IDS).read()) except __HOLE__: pass parse(open('/etc/passwd').read(), uids) parse(open('/etc/group').read(), gids, True) parse_cmdline(sys.argv, uids, gids) # debug('REWRITTEN %s' % str(sys.argv)) ret = subprocess.call(sys.argv) if ret != 0: sys.exit(ret) # parse(open('/etc/passwd').read(), uids) parse(open('/etc/group').read(), gids, True) # out = open(IDS, 'w') print('uids = ', file=out, end='') pprint.pprint(uids, out) print('gids = ', file=out, end='') pprint.pprint(gids, out) out.close()
IOError
dataset/ETHPy150Open redhat-cip/software-factory/image/edeploy/mngids.py/main
2,255
def getSRS(self, srsname, typename): """Returns None or Crs object for given name @param typename: feature name @type typename: String """ if not isinstance(srsname, Crs): srs = Crs(srsname) else: srs = srsname try: index = self.contents[typename].crsOptions.index(srs) # Return the Crs string that was pulled directly from the # GetCaps document (the 'id' attribute in the Crs object). return self.contents[typename].crsOptions[index] except __HOLE__: options = ", ".join(map(lambda x: x.id, self.contents[typename].crsOptions)) log.warning("Requested srsName '%s' not available for requested typename '%s'. \ Options are: %s. " % (srs.getcode(), typename, options)) return None
ValueError
dataset/ETHPy150Open geopython/OWSLib/owslib/feature/__init__.py/WebFeatureService_.getSRS
2,256
def _TearDown(self): """Performs operations to clean things up after performing diagnostics.""" if not self.teardown_completed: temp_file_dict.clear() try: for fpath in self.temporary_files: os.remove(fpath) if self.delete_directory: os.rmdir(self.directory) except __HOLE__: pass if self.threads > 1 or self.processes > 1: args = [obj for obj in self.temporary_objects] self.Apply(_DeleteWrapper, args, _PerfdiagExceptionHandler, arg_checker=DummyArgChecker, parallel_operations_override=True, process_count=self.processes, thread_count=self.threads) else: for object_name in self.temporary_objects: self.Delete(object_name, self.gsutil_api) self.teardown_completed = True
OSError
dataset/ETHPy150Open GoogleCloudPlatform/gsutil/gslib/commands/perfdiag.py/PerfDiagCommand._TearDown
2,257
def _GetTcpStats(self): """Tries to parse out TCP packet information from netstat output. Returns: A dictionary containing TCP information, or None if netstat is not available. """ # netstat return code is non-zero for -s on Linux, so don't raise on error. try: netstat_output = self._Exec(['netstat', '-s'], return_output=True, raise_on_error=False) except OSError: self.logger.warning('netstat not found on your system; some measurement ' 'data will be missing') return None netstat_output = netstat_output.strip().lower() found_tcp = False tcp_retransmit = None tcp_received = None tcp_sent = None for line in netstat_output.split('\n'): # Header for TCP section is "Tcp:" in Linux/Mac and # "TCP Statistics for" in Windows. if 'tcp:' in line or 'tcp statistics' in line: found_tcp = True # Linux == "segments retransmited" (sic), Mac == "retransmit timeouts" # Windows == "segments retransmitted". if (found_tcp and tcp_retransmit is None and ('segments retransmited' in line or 'retransmit timeouts' in line or 'segments retransmitted' in line)): tcp_retransmit = ''.join(c for c in line if c in string.digits) # Linux+Windows == "segments received", Mac == "packets received". if (found_tcp and tcp_received is None and ('segments received' in line or 'packets received' in line)): tcp_received = ''.join(c for c in line if c in string.digits) # Linux == "segments send out" (sic), Mac+Windows == "packets sent". if (found_tcp and tcp_sent is None and ('segments send out' in line or 'packets sent' in line or 'segments sent' in line)): tcp_sent = ''.join(c for c in line if c in string.digits) result = {} try: result['tcp_retransmit'] = int(tcp_retransmit) result['tcp_received'] = int(tcp_received) result['tcp_sent'] = int(tcp_sent) except (__HOLE__, TypeError): result['tcp_retransmit'] = None result['tcp_received'] = None result['tcp_sent'] = None return result
ValueError
dataset/ETHPy150Open GoogleCloudPlatform/gsutil/gslib/commands/perfdiag.py/PerfDiagCommand._GetTcpStats
2,258
def _CollectSysInfo(self): """Collects system information.""" sysinfo = {} # All exceptions that might be raised from socket module calls. socket_errors = ( socket.error, socket.herror, socket.gaierror, socket.timeout) # Find out whether HTTPS is enabled in Boto. sysinfo['boto_https_enabled'] = boto.config.get('Boto', 'is_secure', True) # Look up proxy info. proxy_host = boto.config.get('Boto', 'proxy', None) proxy_port = boto.config.getint('Boto', 'proxy_port', 0) sysinfo['using_proxy'] = bool(proxy_host) if boto.config.get('Boto', 'proxy_rdns', False): self.logger.info('DNS lookups are disallowed in this environment, so ' 'some information is not included in this perfdiag run.') # Get the local IP address from socket lib. try: sysinfo['ip_address'] = socket.gethostbyname(socket.gethostname()) except socket_errors: sysinfo['ip_address'] = '' # Record the temporary directory used since it can affect performance, e.g. # when on a networked filesystem. sysinfo['tempdir'] = self.directory # Produces an RFC 2822 compliant GMT timestamp. sysinfo['gmt_timestamp'] = time.strftime('%a, %d %b %Y %H:%M:%S +0000', time.gmtime()) # Execute a CNAME lookup on Google DNS to find what Google server # it's routing to. cmd = ['nslookup', '-type=CNAME', self.XML_API_HOST] try: nslookup_cname_output = self._Exec(cmd, return_output=True) m = re.search(r' = (?P<googserv>[^.]+)\.', nslookup_cname_output) sysinfo['googserv_route'] = m.group('googserv') if m else None except (CommandException, OSError): sysinfo['googserv_route'] = '' # Try to determine the latency of a DNS lookup for the Google hostname # endpoint. Note: we don't piggyback on gethostbyname_ex below because # the _ex version requires an extra RTT. try: t0 = time.time() socket.gethostbyname(self.XML_API_HOST) t1 = time.time() sysinfo['google_host_dns_latency'] = t1 - t0 except socket_errors: pass # Look up IP addresses for Google Server. try: (hostname, _, ipaddrlist) = socket.gethostbyname_ex(self.XML_API_HOST) sysinfo['googserv_ips'] = ipaddrlist except socket_errors: ipaddrlist = [] sysinfo['googserv_ips'] = [] # Reverse lookup the hostnames for the Google Server IPs. sysinfo['googserv_hostnames'] = [] for googserv_ip in ipaddrlist: try: (hostname, _, ipaddrlist) = socket.gethostbyaddr(googserv_ip) sysinfo['googserv_hostnames'].append(hostname) except socket_errors: pass # Query o-o to find out what the Google DNS thinks is the user's IP. try: cmd = ['nslookup', '-type=TXT', 'o-o.myaddr.google.com.'] nslookup_txt_output = self._Exec(cmd, return_output=True) m = re.search(r'text\s+=\s+"(?P<dnsip>[\.\d]+)"', nslookup_txt_output) sysinfo['dns_o-o_ip'] = m.group('dnsip') if m else None except (CommandException, OSError): sysinfo['dns_o-o_ip'] = '' # Try to determine the latency of connecting to the Google hostname # endpoint. sysinfo['google_host_connect_latencies'] = {} for googserv_ip in ipaddrlist: try: sock = socket.socket() t0 = time.time() sock.connect((googserv_ip, self.XML_API_PORT)) t1 = time.time() sysinfo['google_host_connect_latencies'][googserv_ip] = t1 - t0 except socket_errors: pass # If using a proxy, try to determine the latency of a DNS lookup to resolve # the proxy hostname and the latency of connecting to the proxy. if proxy_host: proxy_ip = None try: t0 = time.time() proxy_ip = socket.gethostbyname(proxy_host) t1 = time.time() sysinfo['proxy_dns_latency'] = t1 - t0 except socket_errors: pass try: sock = socket.socket() t0 = time.time() sock.connect((proxy_ip or proxy_host, proxy_port)) t1 = time.time() sysinfo['proxy_host_connect_latency'] = t1 - t0 except socket_errors: pass # Try and find the number of CPUs in the system if available. try: sysinfo['cpu_count'] = multiprocessing.cpu_count() except NotImplementedError: sysinfo['cpu_count'] = None # For *nix platforms, obtain the CPU load. try: sysinfo['load_avg'] = list(os.getloadavg()) except (AttributeError, OSError): sysinfo['load_avg'] = None # Try and collect memory information from /proc/meminfo if possible. mem_total = None mem_free = None mem_buffers = None mem_cached = None try: with open('/proc/meminfo', 'r') as f: for line in f: if line.startswith('MemTotal'): mem_total = (int(''.join(c for c in line if c in string.digits)) * 1000) elif line.startswith('MemFree'): mem_free = (int(''.join(c for c in line if c in string.digits)) * 1000) elif line.startswith('Buffers'): mem_buffers = (int(''.join(c for c in line if c in string.digits)) * 1000) elif line.startswith('Cached'): mem_cached = (int(''.join(c for c in line if c in string.digits)) * 1000) except (__HOLE__, ValueError): pass sysinfo['meminfo'] = {'mem_total': mem_total, 'mem_free': mem_free, 'mem_buffers': mem_buffers, 'mem_cached': mem_cached} # Get configuration attributes from config module. sysinfo['gsutil_config'] = {} for attr in dir(config): attr_value = getattr(config, attr) # Filter out multiline strings that are not useful. if attr.isupper() and not (isinstance(attr_value, basestring) and '\n' in attr_value): sysinfo['gsutil_config'][attr] = attr_value sysinfo['tcp_proc_values'] = {} stats_to_check = [ '/proc/sys/net/core/rmem_default', '/proc/sys/net/core/rmem_max', '/proc/sys/net/core/wmem_default', '/proc/sys/net/core/wmem_max', '/proc/sys/net/ipv4/tcp_timestamps', '/proc/sys/net/ipv4/tcp_sack', '/proc/sys/net/ipv4/tcp_window_scaling', ] for fname in stats_to_check: try: with open(fname, 'r') as f: value = f.read() sysinfo['tcp_proc_values'][os.path.basename(fname)] = value.strip() except IOError: pass self.results['sysinfo'] = sysinfo
IOError
dataset/ETHPy150Open GoogleCloudPlatform/gsutil/gslib/commands/perfdiag.py/PerfDiagCommand._CollectSysInfo
2,259
def _DisplayResults(self): """Displays results collected from diagnostic run.""" print print '=' * 78 print 'DIAGNOSTIC RESULTS'.center(78) print '=' * 78 if 'latency' in self.results: print print '-' * 78 print 'Latency'.center(78) print '-' * 78 print ('Operation Size Trials Mean (ms) Std Dev (ms) ' 'Median (ms) 90th % (ms)') print ('========= ========= ====== ========= ============ ' '=========== ===========') for key in sorted(self.results['latency']): trials = sorted(self.results['latency'][key]) op, numbytes = key.split('_') numbytes = int(numbytes) if op == 'METADATA': print 'Metadata'.rjust(9), '', print MakeHumanReadable(numbytes).rjust(9), '', self._DisplayStats(trials) if op == 'DOWNLOAD': print 'Download'.rjust(9), '', print MakeHumanReadable(numbytes).rjust(9), '', self._DisplayStats(trials) if op == 'UPLOAD': print 'Upload'.rjust(9), '', print MakeHumanReadable(numbytes).rjust(9), '', self._DisplayStats(trials) if op == 'DELETE': print 'Delete'.rjust(9), '', print MakeHumanReadable(numbytes).rjust(9), '', self._DisplayStats(trials) if 'write_throughput' in self.results: print print '-' * 78 print 'Write Throughput'.center(78) print '-' * 78 write_thru = self.results['write_throughput'] print 'Copied %s %s file(s) for a total transfer size of %s.' % ( self.num_objects, MakeHumanReadable(write_thru['file_size']), MakeHumanReadable(write_thru['total_bytes_copied'])) print 'Write throughput: %s/s.' % ( MakeBitsHumanReadable(write_thru['bytes_per_second'] * 8)) if 'parallelism' in write_thru: # Compatibility with old versions. print 'Parallelism strategy: %s' % write_thru['parallelism'] if 'write_throughput_file' in self.results: print print '-' * 78 print 'Write Throughput With File I/O'.center(78) print '-' * 78 write_thru_file = self.results['write_throughput_file'] print 'Copied %s %s file(s) for a total transfer size of %s.' % ( self.num_objects, MakeHumanReadable(write_thru_file['file_size']), MakeHumanReadable(write_thru_file['total_bytes_copied'])) print 'Write throughput: %s/s.' % ( MakeBitsHumanReadable(write_thru_file['bytes_per_second'] * 8)) if 'parallelism' in write_thru_file: # Compatibility with old versions. print 'Parallelism strategy: %s' % write_thru_file['parallelism'] if 'read_throughput' in self.results: print print '-' * 78 print 'Read Throughput'.center(78) print '-' * 78 read_thru = self.results['read_throughput'] print 'Copied %s %s file(s) for a total transfer size of %s.' % ( self.num_objects, MakeHumanReadable(read_thru['file_size']), MakeHumanReadable(read_thru['total_bytes_copied'])) print 'Read throughput: %s/s.' % ( MakeBitsHumanReadable(read_thru['bytes_per_second'] * 8)) if 'parallelism' in read_thru: # Compatibility with old versions. print 'Parallelism strategy: %s' % read_thru['parallelism'] if 'read_throughput_file' in self.results: print print '-' * 78 print 'Read Throughput With File I/O'.center(78) print '-' * 78 read_thru_file = self.results['read_throughput_file'] print 'Copied %s %s file(s) for a total transfer size of %s.' % ( self.num_objects, MakeHumanReadable(read_thru_file['file_size']), MakeHumanReadable(read_thru_file['total_bytes_copied'])) print 'Read throughput: %s/s.' % ( MakeBitsHumanReadable(read_thru_file['bytes_per_second'] * 8)) if 'parallelism' in read_thru_file: # Compatibility with old versions. print 'Parallelism strategy: %s' % read_thru_file['parallelism'] if 'listing' in self.results: print print '-' * 78 print 'Listing'.center(78) print '-' * 78 listing = self.results['listing'] insert = listing['insert'] delete = listing['delete'] print 'After inserting %s objects:' % listing['num_files'] print (' Total time for objects to appear: %.2g seconds' % insert['time_took']) print ' Number of listing calls made: %s' % insert['num_listing_calls'] print (' Individual listing call latencies: [%s]' % ', '.join('%.2gs' % lat for lat in insert['list_latencies'])) print (' Files reflected after each call: [%s]' % ', '.join(map(str, insert['files_seen_after_listing']))) print 'After deleting %s objects:' % listing['num_files'] print (' Total time for objects to appear: %.2g seconds' % delete['time_took']) print ' Number of listing calls made: %s' % delete['num_listing_calls'] print (' Individual listing call latencies: [%s]' % ', '.join('%.2gs' % lat for lat in delete['list_latencies'])) print (' Files reflected after each call: [%s]' % ', '.join(map(str, delete['files_seen_after_listing']))) if 'sysinfo' in self.results: print print '-' * 78 print 'System Information'.center(78) print '-' * 78 info = self.results['sysinfo'] print 'IP Address: \n %s' % info['ip_address'] print 'Temporary Directory: \n %s' % info['tempdir'] print 'Bucket URI: \n %s' % self.results['bucket_uri'] print 'gsutil Version: \n %s' % self.results.get('gsutil_version', 'Unknown') print 'boto Version: \n %s' % self.results.get('boto_version', 'Unknown') if 'gmt_timestamp' in info: ts_string = info['gmt_timestamp'] timetuple = None try: # Convert RFC 2822 string to Linux timestamp. timetuple = time.strptime(ts_string, '%a, %d %b %Y %H:%M:%S +0000') except ValueError: pass if timetuple: # Converts the GMT time tuple to local Linux timestamp. localtime = calendar.timegm(timetuple) localdt = datetime.datetime.fromtimestamp(localtime) print 'Measurement time: \n %s' % localdt.strftime( '%Y-%m-%d %I:%M:%S %p %Z') print 'Google Server: \n %s' % info['googserv_route'] print ('Google Server IP Addresses: \n %s' % ('\n '.join(info['googserv_ips']))) print ('Google Server Hostnames: \n %s' % ('\n '.join(info['googserv_hostnames']))) print 'Google DNS thinks your IP is: \n %s' % info['dns_o-o_ip'] print 'CPU Count: \n %s' % info['cpu_count'] print 'CPU Load Average: \n %s' % info['load_avg'] try: print ('Total Memory: \n %s' % MakeHumanReadable(info['meminfo']['mem_total'])) # Free memory is really MemFree + Buffers + Cached. print 'Free Memory: \n %s' % MakeHumanReadable( info['meminfo']['mem_free'] + info['meminfo']['mem_buffers'] + info['meminfo']['mem_cached']) except TypeError: pass if 'netstat_end' in info and 'netstat_start' in info: netstat_after = info['netstat_end'] netstat_before = info['netstat_start'] for tcp_type in ('sent', 'received', 'retransmit'): try: delta = (netstat_after['tcp_%s' % tcp_type] - netstat_before['tcp_%s' % tcp_type]) print 'TCP segments %s during test:\n %d' % (tcp_type, delta) except __HOLE__: pass else: print ('TCP segment counts not available because "netstat" was not ' 'found during test runs') if 'disk_counters_end' in info and 'disk_counters_start' in info: print 'Disk Counter Deltas:\n', disk_after = info['disk_counters_end'] disk_before = info['disk_counters_start'] print '', 'disk'.rjust(6), for colname in ['reads', 'writes', 'rbytes', 'wbytes', 'rtime', 'wtime']: print colname.rjust(8), print for diskname in sorted(disk_after): before = disk_before[diskname] after = disk_after[diskname] (reads1, writes1, rbytes1, wbytes1, rtime1, wtime1) = before (reads2, writes2, rbytes2, wbytes2, rtime2, wtime2) = after print '', diskname.rjust(6), deltas = [reads2-reads1, writes2-writes1, rbytes2-rbytes1, wbytes2-wbytes1, rtime2-rtime1, wtime2-wtime1] for delta in deltas: print str(delta).rjust(8), print if 'tcp_proc_values' in info: print 'TCP /proc values:\n', for item in info['tcp_proc_values'].iteritems(): print ' %s = %s' % item if 'boto_https_enabled' in info: print 'Boto HTTPS Enabled: \n %s' % info['boto_https_enabled'] if 'using_proxy' in info: print 'Requests routed through proxy: \n %s' % info['using_proxy'] if 'google_host_dns_latency' in info: print ('Latency of the DNS lookup for Google Storage server (ms): ' '\n %.1f' % (info['google_host_dns_latency'] * 1000.0)) if 'google_host_connect_latencies' in info: print 'Latencies connecting to Google Storage server IPs (ms):' for ip, latency in info['google_host_connect_latencies'].iteritems(): print ' %s = %.1f' % (ip, latency * 1000.0) if 'proxy_dns_latency' in info: print ('Latency of the DNS lookup for the configured proxy (ms): ' '\n %.1f' % (info['proxy_dns_latency'] * 1000.0)) if 'proxy_host_connect_latency' in info: print ('Latency connecting to the configured proxy (ms): \n %.1f' % (info['proxy_host_connect_latency'] * 1000.0)) if 'request_errors' in self.results and 'total_requests' in self.results: print print '-' * 78 print 'In-Process HTTP Statistics'.center(78) print '-' * 78 total = int(self.results['total_requests']) numerrors = int(self.results['request_errors']) numbreaks = int(self.results['connection_breaks']) availability = (((total - numerrors) / float(total)) * 100 if total > 0 else 100) print 'Total HTTP requests made: %d' % total print 'HTTP 5xx errors: %d' % numerrors print 'HTTP connections broken: %d' % numbreaks print 'Availability: %.7g%%' % availability if 'error_responses_by_code' in self.results: sorted_codes = sorted( self.results['error_responses_by_code'].iteritems()) if sorted_codes: print 'Error responses by code:' print '\n'.join(' %s: %s' % c for c in sorted_codes) if self.output_file: with open(self.output_file, 'w') as f: json.dump(self.results, f, indent=2) print print "Output file written to '%s'." % self.output_file print
TypeError
dataset/ETHPy150Open GoogleCloudPlatform/gsutil/gslib/commands/perfdiag.py/PerfDiagCommand._DisplayResults
2,260
def _ParsePositiveInteger(self, val, msg): """Tries to convert val argument to a positive integer. Args: val: The value (as a string) to convert to a positive integer. msg: The error message to place in the CommandException on an error. Returns: A valid positive integer. Raises: CommandException: If the supplied value is not a valid positive integer. """ try: val = int(val) if val < 1: raise CommandException(msg) return val except __HOLE__: raise CommandException(msg)
ValueError
dataset/ETHPy150Open GoogleCloudPlatform/gsutil/gslib/commands/perfdiag.py/PerfDiagCommand._ParsePositiveInteger
2,261
def _ParseArgs(self): """Parses arguments for perfdiag command.""" # From -n. self.num_objects = 5 # From -c. self.processes = 1 # From -k. self.threads = 1 # From -p self.parallel_strategy = None # From -y self.num_slices = 4 # From -s. self.thru_filesize = 1048576 # From -d. self.directory = tempfile.gettempdir() # Keep track of whether or not to delete the directory upon completion. self.delete_directory = False # From -t. self.diag_tests = set(self.DEFAULT_DIAG_TESTS) # From -o. self.output_file = None # From -i. self.input_file = None # From -m. self.metadata_keys = {} if self.sub_opts: for o, a in self.sub_opts: if o == '-n': self.num_objects = self._ParsePositiveInteger( a, 'The -n parameter must be a positive integer.') if o == '-c': self.processes = self._ParsePositiveInteger( a, 'The -c parameter must be a positive integer.') if o == '-k': self.threads = self._ParsePositiveInteger( a, 'The -k parameter must be a positive integer.') if o == '-p': if a.lower() in self.PARALLEL_STRATEGIES: self.parallel_strategy = a.lower() else: raise CommandException( "'%s' is not a valid parallelism strategy." % a) if o == '-y': self.num_slices = self._ParsePositiveInteger( a, 'The -y parameter must be a positive integer.') if o == '-s': try: self.thru_filesize = HumanReadableToBytes(a) except ValueError: raise CommandException('Invalid -s parameter.') if o == '-d': self.directory = a if not os.path.exists(self.directory): self.delete_directory = True os.makedirs(self.directory) if o == '-t': self.diag_tests = set() for test_name in a.strip().split(','): if test_name.lower() not in self.ALL_DIAG_TESTS: raise CommandException("List of test names (-t) contains invalid " "test name '%s'." % test_name) self.diag_tests.add(test_name) if o == '-m': pieces = a.split(':') if len(pieces) != 2: raise CommandException( "Invalid metadata key-value combination '%s'." % a) key, value = pieces self.metadata_keys[key] = value if o == '-o': self.output_file = os.path.abspath(a) if o == '-i': self.input_file = os.path.abspath(a) if not os.path.isfile(self.input_file): raise CommandException("Invalid input file (-i): '%s'." % a) try: with open(self.input_file, 'r') as f: self.results = json.load(f) self.logger.info("Read input file: '%s'.", self.input_file) except __HOLE__: raise CommandException("Could not decode input file (-i): '%s'." % a) return # If parallelism is specified, default parallelism strategy to fan. if (self.processes > 1 or self.threads > 1) and not self.parallel_strategy: self.parallel_strategy = self.FAN elif self.processes == 1 and self.threads == 1 and self.parallel_strategy: raise CommandException( 'Cannot specify parallelism strategy (-p) without also specifying ' 'multiple threads and/or processes (-c and/or -k).') if not self.args: self.RaiseWrongNumberOfArgumentsException() self.bucket_url = StorageUrlFromString(self.args[0]) self.provider = self.bucket_url.scheme if not self.bucket_url.IsCloudUrl() and self.bucket_url.IsBucket(): raise CommandException('The perfdiag command requires a URL that ' 'specifies a bucket.\n"%s" is not ' 'valid.' % self.args[0]) if (self.thru_filesize > HumanReadableToBytes('2GiB') and (self.RTHRU in self.diag_tests or self.WTHRU in self.diag_tests)): raise CommandException( 'For in-memory tests maximum file size is 2GiB. For larger file ' 'sizes, specify rthru_file and/or wthru_file with the -t option.') perform_slice = self.parallel_strategy in (self.SLICE, self.BOTH) slice_not_available = ( self.provider == 's3' and self.diag_tests.intersection(self.WTHRU, self.WTHRU_FILE)) if perform_slice and slice_not_available: raise CommandException('Sliced uploads are not available for s3. ' 'Use -p fan or sequential uploads for s3.') # Ensure the bucket exists. self.gsutil_api.GetBucket(self.bucket_url.bucket_name, provider=self.bucket_url.scheme, fields=['id']) self.exceptions = [httplib.HTTPException, socket.error, socket.gaierror, socket.timeout, httplib.BadStatusLine, ServiceException] # Command entry point.
ValueError
dataset/ETHPy150Open GoogleCloudPlatform/gsutil/gslib/commands/perfdiag.py/PerfDiagCommand._ParseArgs
2,262
@jinja2.contextfunction @library.global_function def breadcrumbs(context, items=list(), add_default=True, id=None): """ Show a list of breadcrumbs. If url is None, it won't be a link. Accepts: [(url, label)] """ if add_default: first_crumb = u'Home' crumbs = [(reverse('home'), _lazy(first_crumb))] else: crumbs = [] # add user-defined breadcrumbs if items: try: crumbs += items except __HOLE__: crumbs.append(items) c = {'breadcrumbs': crumbs, 'id': id} return jinja2.Markup(render_to_string('layout/breadcrumbs.html', c))
TypeError
dataset/ETHPy150Open mozilla/kitsune/kitsune/sumo/templatetags/jinja_helpers.py/breadcrumbs
2,263
@jinja2.contextfunction @library.global_function def datetimeformat(context, value, format='shortdatetime'): """ Returns a formatted date/time using Babel's locale settings. Uses the timezone from settings.py, if the user has not been authenticated. """ if not isinstance(value, datetime.datetime): # Expecting date value raise ValueError( 'Unexpected value {value} passed to datetimeformat'.format( value=value)) request = context.get('request') default_tzinfo = convert_tzinfo = timezone(settings.TIME_ZONE) if value.tzinfo is None: value = default_tzinfo.localize(value) new_value = value.astimezone(default_tzinfo) else: new_value = value if hasattr(request, 'session'): if 'timezone' not in request.session: if hasattr(request, 'user') and request.user.is_authenticated(): try: convert_tzinfo = (Profile.objects.get(user=request.user).timezone or default_tzinfo) except (Profile.DoesNotExist, __HOLE__): pass request.session['timezone'] = convert_tzinfo else: convert_tzinfo = request.session['timezone'] convert_value = new_value.astimezone(convert_tzinfo) locale = _babel_locale(_contextual_locale(context)) # If within a day, 24 * 60 * 60 = 86400s if format == 'shortdatetime': # Check if the date is today today = datetime.datetime.now(tz=convert_tzinfo).toordinal() if convert_value.toordinal() == today: formatted = _lazy(u'Today at %s') % format_time( convert_value, format='short', tzinfo=convert_tzinfo, locale=locale) else: formatted = format_datetime(convert_value, format='short', tzinfo=convert_tzinfo, locale=locale) elif format == 'longdatetime': formatted = format_datetime(convert_value, format='long', tzinfo=convert_tzinfo, locale=locale) elif format == 'date': formatted = format_date(convert_value, locale=locale) elif format == 'time': formatted = format_time(convert_value, tzinfo=convert_tzinfo, locale=locale) elif format == 'datetime': formatted = format_datetime(convert_value, tzinfo=convert_tzinfo, locale=locale) elif format == 'year': formatted = format_datetime(convert_value, format='yyyy', tzinfo=convert_tzinfo, locale=locale) else: # Unknown format raise DateTimeFormatError return jinja2.Markup('<time datetime="%s">%s</time>' % (convert_value.isoformat(), formatted))
AttributeError
dataset/ETHPy150Open mozilla/kitsune/kitsune/sumo/templatetags/jinja_helpers.py/datetimeformat
2,264
@library.global_function def static(path): """Generate a URL for the specified static file.""" try: return django_static(path) except __HOLE__ as err: log.error('Static helper error: %s' % err) return ''
ValueError
dataset/ETHPy150Open mozilla/kitsune/kitsune/sumo/templatetags/jinja_helpers.py/static
2,265
def _module_exists(self, module_name): """ Returns True iff module_name exists (but isn't necessarily importable). """ # imp.find_module doesn't handle hierarchical module names, so we split # on full stops and keep feeding it the path it returns until we run # out of module name. module_name, path = module_name.split('.'), None while module_name: try: path = [imp.find_module(module_name.pop(0), path)[1]] except __HOLE__, e: return False return True
ImportError
dataset/ETHPy150Open mollyproject/mollyproject/molly/conf/settings.py/Application._module_exists
2,266
def test_get_raises_exception_with_full_traceback(self): exc_class_get = None exc_class_set = None exc_instance_get = None exc_instance_set = None exc_traceback_get = None exc_traceback_set = None future = self.future_class() try: raise NameError('foo') except __HOLE__: exc_class_set, exc_instance_set, exc_traceback_set = sys.exc_info() future.set_exception() # We could move to another thread at this point try: future.get() except NameError: exc_class_get, exc_instance_get, exc_traceback_get = sys.exc_info() self.assertEqual(exc_class_set, exc_class_get) self.assertEqual(exc_instance_set, exc_instance_get) exc_traceback_list_set = list(reversed( traceback.extract_tb(exc_traceback_set))) exc_traceback_list_get = list(reversed( traceback.extract_tb(exc_traceback_get))) # All frames from the first traceback should be included in the # traceback from the future.get() reraise self.assert_(len(exc_traceback_list_set) < len(exc_traceback_list_get)) for i, frame in enumerate(exc_traceback_list_set): self.assertEquals(frame, exc_traceback_list_get[i])
NameError
dataset/ETHPy150Open jodal/pykka/tests/future_test.py/FutureTest.test_get_raises_exception_with_full_traceback
2,267
def push_message(parsed_message): """ Spawned as a greenlet to push parsed messages through ZeroMQ. """ try: # This will be the representation to send to the Announcers. json_str = unified.encode_to_json(parsed_message) except __HOLE__: logger.error('Unable to serialize a parsed message.') return # Push a zlib compressed JSON representation of the message to # announcers. compressed_msg = zlib.compress(json_str) sender.send(compressed_msg)
TypeError
dataset/ETHPy150Open gtaylor/EVE-Market-Data-Relay/emdr/daemons/gateway/order_pusher.py/push_message
2,268
@classmethod def run(cls, command, cwd=".", **kwargs): """ Make a subprocess call, collect its output and returncode. Returns CommandResult instance as ValueObject. """ assert isinstance(command, six.string_types) command_result = CommandResult() command_result.command = command use_shell = cls.USE_SHELL if "shell" in kwargs: use_shell = kwargs.pop("shell") # -- BUILD COMMAND ARGS: if six.PY2 and isinstance(command, six.text_type): # -- PREPARE-FOR: shlex.split() # In PY2, shlex.split() requires bytes string (non-unicode). # In PY3, shlex.split() accepts unicode string. command = codecs.encode(command, "utf-8") cmdargs = shlex.split(command) # -- TRANSFORM COMMAND (optional) command0 = cmdargs[0] real_command = cls.COMMAND_MAP.get(command0, None) if real_command: cmdargs0 = real_command.split() cmdargs = cmdargs0 + cmdargs[1:] preprocessors = cls.PREPROCESSOR_MAP.get(command0) if preprocessors: cmdargs = cls.preprocess_command(preprocessors, cmdargs, command, cwd) # -- RUN COMMAND: try: process = subprocess.Popen(cmdargs, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True, shell=use_shell, cwd=cwd, **kwargs) out, err = process.communicate() if six.PY2: # py3: we get unicode strings, py2 not default_encoding = 'UTF-8' out = six.text_type(out, process.stdout.encoding or default_encoding) err = six.text_type(err, process.stderr.encoding or default_encoding) process.poll() assert process.returncode is not None command_result.stdout = out command_result.stderr = err command_result.returncode = process.returncode if cls.DEBUG: print("shell.cwd={0}".format(kwargs.get("cwd", None))) print("shell.command: {0}".format(" ".join(cmdargs))) print("shell.command.output:\n{0};".format(command_result.output)) except __HOLE__ as e: command_result.stderr = u"OSError: %s" % e command_result.returncode = e.errno assert e.errno != 0 postprocessors = cls.POSTPROCESSOR_MAP.get(command0) if postprocessors: command_result = cls.postprocess_command(postprocessors, command_result) return command_result # ----------------------------------------------------------------------------- # PREPROCESSOR: # -----------------------------------------------------------------------------
OSError
dataset/ETHPy150Open behave/behave/behave4cmd0/command_shell.py/Command.run
2,269
def seq_arrival(self, seq_num): ''' returns the packet in which the specified sequence number first arrived. ''' try: return self.arrival_data.find_le(seq_num)[1] except __HOLE__: return None
ValueError
dataset/ETHPy150Open andrewf/pcap2har/pcap2har/tcp/direction.py/Direction.seq_arrival
2,270
@classmethod def Open(cls, **kwargs): """See IterOpen, but raises if multiple or no matches found.""" handle_iter = cls.IterOpen(**kwargs) try: handle = next(handle_iter) except StopIteration: # No matching interface, raise. raise usb_exceptions.DeviceNotFoundError( 'Open failed with args: %s', kwargs) try: multiple_handle = next(handle_iter) except __HOLE__: # Exactly one matching device, return it. return handle # We have more than one device, close the ones we opened and bail. handle.Close() multiple_handle.Close() raise usb_exceptions.MultipleInterfacesFoundError(kwargs) # pylint: disable=too-many-arguments
StopIteration
dataset/ETHPy150Open google/openhtf/openhtf/plugs/usb/local_usb.py/LibUsbHandle.Open
2,271
@click.command() @click.option('--config', help="Path to configuration file. Default: ~/.curator/curator.yml", type=click.Path(exists=True), default=CONFIG_FILE ) @click.option('--dry-run', is_flag=True, help='Do not perform any changes.') @click.argument('action_file', type=click.Path(exists=True), nargs=1) @click.version_option(version=__version__) def cli(config, dry_run, action_file): """ Curator for Elasticsearch indices. See http://elastic.co/guide/en/elasticsearch/client/curator/current """ # Get config from yaml file yaml_config = get_yaml(config) # Get default options and overwrite with any changes try: yaml_log_opts = prune_nones(yaml_config['logging']) log_opts = LOGGING_DEFAULTS log_opts.update(yaml_log_opts) except __HOLE__: # Use the defaults if there is no logging section log_opts = LOGGING_DEFAULTS # Set up logging loginfo = LogInfo(log_opts) logging.root.addHandler(loginfo.handler) logging.root.setLevel(loginfo.numeric_log_level) logger = logging.getLogger('curator.cli') # Set up NullHandler() to handle nested elasticsearch.trace Logger # instance in elasticsearch python client logging.getLogger('elasticsearch.trace').addHandler(NullHandler()) # Get default client options and overwrite with any changes try: yaml_client = prune_nones(yaml_config['client']) client_args = CLIENT_DEFAULTS client_args.update(yaml_client) except KeyError: logger.critical( 'Unable to read client configuration. ' 'Please check the configuration file: {0}'.format(config) ) sys.exit(1) test_client_options(client_args) # Create a client object client = get_client(**client_args) ######################################### ### Start working on the actions here ### ######################################### actions = get_yaml(action_file)['actions'] logger.debug('Full list of actions: {0}'.format(actions)) action_keys = sorted(list(actions.keys())) for idx in action_keys: if 'action' in actions[idx] and actions[idx]['action'] is not None: action = actions[idx]['action'].lower() else: raise MissingArgument('No value for "action" provided') logger.info('Action #{0}: {1}'.format(idx, action)) if not 'options' in actions[idx] or \ type(actions[idx]['options']) is not type(dict()): actions[idx]['options'] = OPTION_DEFAULTS # Assign and remove these keys from the options as the action will # raise an exception if they are passed as kwargs action_disabled = actions[idx]['options'].pop('disable_action', False) continue_if_exception = ( actions[idx]['options'].pop('continue_if_exception', False)) logger.debug( 'continue_if_exception = {0}'.format(continue_if_exception)) kwargs = {} kwargs['master_timeout'] = ( client_args['timeout'] if client_args['timeout'] <= 300 else 300) kwargs['dry_run'] = dry_run ### Skip to next action if 'disabled' if action_disabled: logger.info( 'Action "{0}" not performed because "disable_action" is set to ' 'True'.format(action) ) continue ########################## ### Process the action ### ########################## try: logger.debug('TRY: actions: {0} kwargs: ' '{1}'.format(actions[idx], kwargs) ) process_action(client, actions[idx], **kwargs) except Exception as e: logger.error( 'Failed to complete action: {0}. {1}: ' '{2}'.format(action, type(e), e) ) if continue_if_exception: logger.info( 'Continuing execution with next action because ' '"continue_if_exception" is set to True for action ' '{0}'.format(action) ) else: sys.exit(1)
KeyError
dataset/ETHPy150Open elastic/curator/curator/cli.py/cli
2,272
@cached_property def has_ddl_transactions(self): """ Tests the database using feature detection to see if it has transactional DDL support. """ self._possibly_initialise() connection = self._get_connection() if hasattr(connection.features, "confirm") and not connection.features._confirmed: connection.features.confirm() # Django 1.3's MySQLdb backend doesn't raise DatabaseError exceptions = (DatabaseError, ) try: from MySQLdb import OperationalError exceptions += (OperationalError, ) except __HOLE__: pass # Now do the test if getattr(connection.features, 'supports_transactions', True): cursor = connection.cursor() self.start_transaction() cursor.execute('CREATE TABLE DDL_TRANSACTION_TEST (X INT)') self.rollback_transaction() try: try: cursor.execute('CREATE TABLE DDL_TRANSACTION_TEST (X INT)') except exceptions: return False else: return True finally: cursor.execute('DROP TABLE DDL_TRANSACTION_TEST') else: return False
ImportError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/South-1.0.2/south/db/generic.py/DatabaseOperations.has_ddl_transactions
2,273
def lookup_constraint(self, db_name, table_name, column_name=None): """ return a set() of constraints for db_name.table_name.column_name """ def _lookup(): table = self._constraint_cache[db_name][table_name] if table is INVALID: raise INVALID elif column_name is None: return list(table.items()) else: return table[column_name] try: ret = _lookup() return ret except INVALID: del self._constraint_cache[db_name][table_name] self._fill_constraint_cache(db_name, table_name) except __HOLE__: if self._is_valid_cache(db_name, table_name): return [] self._fill_constraint_cache(db_name, table_name) return self.lookup_constraint(db_name, table_name, column_name)
KeyError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/South-1.0.2/south/db/generic.py/DatabaseOperations.lookup_constraint
2,274
def _set_cache(self, table_name, column_name=None, value=INVALID): db_name = self._get_setting('NAME') try: if column_name is not None: self._constraint_cache[db_name][table_name][column_name] = value else: self._constraint_cache[db_name][table_name] = value except (LookupError, __HOLE__): pass
TypeError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/South-1.0.2/south/db/generic.py/DatabaseOperations._set_cache
2,275
def _is_valid_cache(self, db_name, table_name): # we cache per-table so if the table is there it is valid try: return self._constraint_cache[db_name][table_name] is not INVALID except __HOLE__: return False
KeyError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/South-1.0.2/south/db/generic.py/DatabaseOperations._is_valid_cache
2,276
def _is_multidb(self): try: from django.db import connections connections # Prevents "unused import" warning except __HOLE__: return False else: return True
ImportError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/South-1.0.2/south/db/generic.py/DatabaseOperations._is_multidb
2,277
def _has_setting(self, setting_name): """ Existence-checking version of _get_setting. """ try: self._get_setting(setting_name) except (__HOLE__, AttributeError): return False else: return True
KeyError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/South-1.0.2/south/db/generic.py/DatabaseOperations._has_setting
2,278
def _get_schema_name(self): try: return self._get_setting('schema') except (KeyError, __HOLE__): return self.default_schema_name
AttributeError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/South-1.0.2/south/db/generic.py/DatabaseOperations._get_schema_name
2,279
def _db_type_for_alter_column(self, field): """ Returns a field's type suitable for ALTER COLUMN. By default it just returns field.db_type(). To be overriden by backend specific subclasses @param field: The field to generate type for """ try: return field.db_type(connection=self._get_connection()) except __HOLE__: return field.db_type()
TypeError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/South-1.0.2/south/db/generic.py/DatabaseOperations._db_type_for_alter_column
2,280
@invalidate_table_constraints def alter_column(self, table_name, name, field, explicit_name=True, ignore_constraints=False): """ Alters the given column name so it will match the given field. Note that conversion between the two by the database must be possible. Will not automatically add _id by default; to have this behavour, pass explicit_name=False. @param table_name: The name of the table to add the column to @param name: The name of the column to alter @param field: The new field definition to use """ if self.dry_run: if self.debug: print(' - no dry run output for alter_column() due to dynamic DDL, sorry') return # hook for the field to do any resolution prior to it's attributes being queried if hasattr(field, 'south_init'): field.south_init() # Add _id or whatever if we need to field.set_attributes_from_name(name) if not explicit_name: name = field.column else: field.column = name if not ignore_constraints: # Drop all check constraints. Note that constraints will be added back # with self.alter_string_set_type and self.alter_string_drop_null. if self.has_check_constraints: check_constraints = self._constraints_affecting_columns(table_name, [name], "CHECK") for constraint in check_constraints: self.execute(self.delete_check_sql % { 'table': self.quote_name(table_name), 'constraint': self.quote_name(constraint), }) # Drop all foreign key constraints try: self.delete_foreign_key(table_name, name) except __HOLE__: # There weren't any pass # First, change the type params = { "column": self.quote_name(name), "type": self._db_type_for_alter_column(field), "table_name": self.quote_name(table_name) } # SQLs is a list of (SQL, values) pairs. sqls = [] # Only alter the column if it has a type (Geometry ones sometimes don't) if params["type"] is not None: sqls.append((self.alter_string_set_type % params, [])) # Add any field- and backend- specific modifications self._alter_add_column_mods(field, name, params, sqls) # Next, nullity if field.null or field.has_default(): sqls.append((self.alter_string_set_null % params, [])) else: sqls.append((self.alter_string_drop_null % params, [])) # Do defaults self._alter_set_defaults(field, name, params, sqls) # Actually change the column (step 1 -- Nullity may need to be fixed) if self.allows_combined_alters: sqls, values = zip(*sqls) self.execute( "ALTER TABLE %s %s;" % (self.quote_name(table_name), ", ".join(sqls)), flatten(values), ) else: # Databases like e.g. MySQL don't like more than one alter at once. for sql, values in sqls: self.execute("ALTER TABLE %s %s;" % (self.quote_name(table_name), sql), values) if not field.null and field.has_default(): # Final fixes self._update_nulls_to_default(params, field) self.execute("ALTER TABLE %s %s;" % (self.quote_name(table_name), self.alter_string_drop_null % params), []) if not ignore_constraints: # Add back FK constraints if needed if field.rel and self.supports_foreign_keys: self.execute( self.foreign_key_sql( table_name, field.column, field.rel.to._meta.db_table, field.rel.to._meta.get_field(field.rel.field_name).column ) )
ValueError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/South-1.0.2/south/db/generic.py/DatabaseOperations.alter_column
2,281
def column_sql(self, table_name, field_name, field, tablespace='', with_name=True, field_prepared=False): """ Creates the SQL snippet for a column. Used by add_column and add_table. """ # If the field hasn't already been told its attribute name, do so. if not field_prepared: field.set_attributes_from_name(field_name) # hook for the field to do any resolution prior to it's attributes being queried if hasattr(field, 'south_init'): field.south_init() # Possible hook to fiddle with the fields (e.g. defaults & TEXT on MySQL) field = self._field_sanity(field) try: sql = field.db_type(connection=self._get_connection()) except __HOLE__: sql = field.db_type() if sql: # Some callers, like the sqlite stuff, just want the extended type. if with_name: field_output = [self.quote_name(field.column), sql] else: field_output = [sql] field_output.append('%sNULL' % (not field.null and 'NOT ' or '')) if field.primary_key: field_output.append('PRIMARY KEY') elif field.unique: # Just use UNIQUE (no indexes any more, we have delete_unique) field_output.append('UNIQUE') tablespace = field.db_tablespace or tablespace if tablespace and getattr(self._get_connection().features, "supports_tablespaces", False) and field.unique: # We must specify the index tablespace inline, because we # won't be generating a CREATE INDEX statement for this field. field_output.append(self._get_connection().ops.tablespace_sql(tablespace, inline=True)) sql = ' '.join(field_output) sqlparams = () # if the field is "NOT NULL" and a default value is provided, create the column with it # this allows the addition of a NOT NULL field to a table with existing rows if not getattr(field, '_suppress_default', False): if field.has_default(): default = field.get_default() # If the default is actually None, don't add a default term if default is not None: # If the default is a callable, then call it! if callable(default): default = default() default = field.get_db_prep_save(default, connection=self._get_connection()) default = self._default_value_workaround(default) # Now do some very cheap quoting. TODO: Redesign return values to avoid this. if isinstance(default, string_types): default = "'%s'" % default.replace("'", "''") # Escape any % signs in the output (bug #317) if isinstance(default, string_types): default = default.replace("%", "%%") # Add it in sql += " DEFAULT %s" sqlparams = (default) elif (not field.null and field.blank) or (field.get_default() == ''): if field.empty_strings_allowed and self._get_connection().features.interprets_empty_strings_as_nulls: sql += " DEFAULT ''" # Error here would be nice, but doesn't seem to play fair. #else: # raise ValueError("Attempting to add a non null column that isn't character based without an explicit default value.") if field.rel and self.supports_foreign_keys: self.add_deferred_sql( self.foreign_key_sql( table_name, field.column, field.rel.to._meta.db_table, field.rel.to._meta.get_field(field.rel.field_name).column ) ) # Things like the contrib.gis module fields have this in 1.1 and below if hasattr(field, 'post_create_sql'): for stmt in field.post_create_sql(no_style(), table_name): self.add_deferred_sql(stmt) # In 1.2 and above, you have to ask the DatabaseCreation stuff for it. # This also creates normal indexes in 1.1. if hasattr(self._get_connection().creation, "sql_indexes_for_field"): # Make a fake model to pass in, with only db_table model = self.mock_model("FakeModelForGISCreation", table_name) for stmt in self._get_connection().creation.sql_indexes_for_field(model, field, no_style()): self.add_deferred_sql(stmt) if sql: return sql % sqlparams else: return None
TypeError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/South-1.0.2/south/db/generic.py/DatabaseOperations.column_sql
2,282
def create_index_name(self, table_name, column_names, suffix=""): """ Generate a unique name for the index """ # If there is just one column in the index, use a default algorithm from Django if len(column_names) == 1 and not suffix: try: _hash = self._digest([column_names[0]]) except __HOLE__: # Django < 1.5 backward compatibility. _hash = self._digest(column_names[0]) return self.shorten_name( '%s_%s' % (table_name, _hash), ) # Else generate the name for the index by South table_name = table_name.replace('"', '').replace('.', '_') index_unique_name = '_%x' % abs(hash((table_name, ','.join(column_names)))) # If the index name is too long, truncate it index_name = ('%s_%s%s%s' % (table_name, column_names[0], index_unique_name, suffix)).replace('"', '').replace('.', '_') if len(index_name) > self.max_index_name_length: part = ('_%s%s%s' % (column_names[0], index_unique_name, suffix)) index_name = '%s%s' % (table_name[:(self.max_index_name_length - len(part))], part) return index_name
TypeError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/South-1.0.2/south/db/generic.py/DatabaseOperations.create_index_name
2,283
def send_pending_create_signals(self, verbosity=0, interactive=False): # Group app_labels together signals = SortedDict() for (app_label, model_names) in self.pending_create_signals: try: signals[app_label].extend(model_names) except __HOLE__: signals[app_label] = list(model_names) # Send only one signal per app. for (app_label, model_names) in signals.items(): self.really_send_create_signal(app_label, list(set(model_names)), verbosity=verbosity, interactive=interactive) self.pending_create_signals = []
KeyError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/South-1.0.2/south/db/generic.py/DatabaseOperations.send_pending_create_signals
2,284
def create(self, request, *args, **kwargs): try: token = Token.objects.get(key=request.DATA["service_key"]) serviceToken = ServiceTokens.objects.get(token_id=token) service = serviceToken.service_id except ServiceTokens.DoesNotExist: return Response({}, status=status.HTTP_404_NOT_FOUND) except Token.DoesNotExist: return Response({}, status=status.HTTP_403_FORBIDDEN) with transaction.atomic(): try: incident = Incident.objects.get( incident_key=request.DATA["incident_key"], service_key=service) event_log_message = "%s api key changed %s from %s to %s" % ( serviceToken.name, incident.incident_key, incident.event_type, request.DATA['event_type']) except (Incident.DoesNotExist, KeyError): incident = Incident() try: incident.incident_key = request.DATA["incident_key"] except __HOLE__: if request.DATA["event_type"] == Incident.TRIGGER: incident.incident_key = base64.urlsafe_b64encode( uuid.uuid1().bytes).replace( '=', '') else: response = {} response["status"] = "failure" response["message"] = "Mandatory parameter missing" return Response( response, status=status.HTTP_400_BAD_REQUEST) incident.service_key = service event_log_message = "%s api key created %s with status %s" % ( serviceToken.name, incident.incident_key, request.DATA['event_type']) if self.is_relevant(incident, request.DATA['event_type']): event_log = EventLog() # Anonymous user for testing if request.user.is_anonymous(): user = None else: user = request.user event_log.user = user event_log.service_key = incident.service_key event_log.data = event_log_message event_log.occurred_at = timezone.now() incident.event_type = request.DATA["event_type"] incident.description = request.DATA["description"][:100] incident.details = request.DATA["details"] incident.occurred_at = timezone.now() try: incident.full_clean() except ValidationError as e: return Response( {'errors': e.messages}, status=status.HTTP_400_BAD_REQUEST) incident.save() event_log.incident_key = incident event_log.action = incident.event_type event_log.save() servicesilenced = ServiceSilenced.objects.filter( service=service).count() > 0 if incident.event_type == Incident.TRIGGER and not servicesilenced: NotificationHelper.notify_incident(incident) if incident.event_type == "resolve" or incident.event_type == Incident.ACKNOWLEDGE: ScheduledNotification.remove_all_for_incident(incident) headers = self.get_success_headers(request.POST) response = {} response["status"] = "success" response["message"] = "Event processed" response["incident_key"] = incident.incident_key return Response( response, status=status.HTTP_201_CREATED, headers=headers)
KeyError
dataset/ETHPy150Open ustream/openduty/openduty/incidents.py/IncidentViewSet.create
2,285
@login_required() @require_http_methods(["POST"]) def update_type(request): event_type = request.POST['event_type'] event_types = ('acknowledge', 'resolve') incident_ids = request.POST.getlist('selection', None) if not event_type: messages.error(request, 'Invalid event modification!') return HttpResponseRedirect(request.POST['url']) try: if incident_ids: _update_type(request.user, incident_ids, event_type) else: id = request.POST.get('id') _update_type(request.user, [id], event_type) except Incident.DoesNotExist: messages.error(request, 'Incident not found') return HttpResponseRedirect(request.POST['url']) except __HOLE__ as e: messages.error(request, e.messages) return HttpResponseRedirect(request.POST['url'])
ValidationError
dataset/ETHPy150Open ustream/openduty/openduty/incidents.py/update_type
2,286
@login_required() @require_http_methods(["POST"]) def forward_incident(request): try: with transaction.atomic(): incident = Incident.objects.get(id=request.POST['id']) user = User.objects.get(id=request.POST['user_id']) ScheduledNotification.remove_all_for_incident(incident) NotificationHelper.notify_user_about_incident(incident, user) event_log_message = "%s changed assignee of incident : %s to %s" % ( request.user.username, incident.incident_key, user.username) event_log = EventLog() event_log.user = request.user event_log.action = "forward" event_log.incident_key = incident event_log.service_key = incident.service_key event_log.data = event_log_message event_log.occurred_at = timezone.now() event_log.save() except Incident.DoesNotExist: messages.error(request, 'Incident not found') return HttpResponseRedirect(request.POST['url']) except User.DoesNotExist: messages.error(request, 'Incident not found') return HttpResponseRedirect(request.POST['url']) except __HOLE__ as e: messages.error(request, e.messages) return HttpResponseRedirect(request.POST['url'])
ValidationError
dataset/ETHPy150Open ustream/openduty/openduty/incidents.py/forward_incident
2,287
def __getattr__(self, key): try: return self[key] except __HOLE__ as e: raise AttributeError(e)
KeyError
dataset/ETHPy150Open dask/dask/dask/dataframe/groupby.py/DataFrameGroupBy.__getattr__
2,288
def _set_metadata(self): context = self._config.context config = self._config.plugins[self.full_name] log.debug('Populating snapshot and ami metadata for tagging and naming') creator = context.ami.get('creator', config.get('creator', 'aminator')) context.ami.tags.creator = creator context.snapshot.tags.creator = creator metadata = context.package.attributes metadata['arch'] = context.base_ami.architecture metadata['base_ami_name'] = context.base_ami.name metadata['base_ami_id'] = context.base_ami.id metadata['base_ami_version'] = context.base_ami.tags.get('base_ami_version', '') suffix = context.ami.get('suffix', None) if not suffix: suffix = config.suffix_format.format(datetime.utcnow()) metadata['suffix'] = suffix for tag in config.tag_formats: try: context.ami.tags[tag] = config.tag_formats[tag].format(**metadata) context.snapshot.tags[tag] = config.tag_formats[tag].format(**metadata) except __HOLE__ as e: errstr = 'Tag format requires information not available in package metadata: {0}'.format(e.message) log.warn(errstr) log.debug(errstr, exc_info=True) # in case someone uses a tag format based on metadata not available # in this package continue default_description = config.description_format.format(**metadata) description = context.snapshot.get('description', default_description) context.ami.description = description context.snapshot.description = description
KeyError
dataset/ETHPy150Open Netflix/aminator/aminator/plugins/finalizer/tagging_base.py/TaggingBaseFinalizerPlugin._set_metadata
2,289
def _scrub_checklist_input(self, indices, tags): # pylint: disable=no-self-use """Validate input and transform indices to appropriate tags. :param list indices: input :param list tags: Original tags of the checklist :returns: valid tags the user selected :rtype: :class:`list` of :class:`str` """ # They should all be of type int try: indices = [int(index) for index in indices] except __HOLE__: return [] # Remove duplicates indices = list(set(indices)) # Check all input is within range for index in indices: if index < 1 or index > len(tags): return [] # Transform indices to appropriate tags return [tags[index - 1] for index in indices]
ValueError
dataset/ETHPy150Open letsencrypt/letsencrypt/certbot/display/util.py/FileDisplay._scrub_checklist_input
2,290
def _get_valid_int_ans(self, max_): """Get a numerical selection. :param int max: The maximum entry (len of choices), must be positive :returns: tuple of the form (`code`, `selection`) where `code` - str display exit code ('ok' or cancel') `selection` - int user's selection :rtype: tuple """ selection = -1 if max_ > 1: input_msg = ("Select the appropriate number " "[1-{max_}] then [enter] (press 'c' to " "cancel): ".format(max_=max_)) else: input_msg = ("Press 1 [enter] to confirm the selection " "(press 'c' to cancel): ") while selection < 1: ans = raw_input(input_msg) if ans.startswith("c") or ans.startswith("C"): return CANCEL, -1 try: selection = int(ans) if selection < 1 or selection > max_: selection = -1 raise ValueError except __HOLE__: self.outfile.write( "{0}** Invalid input **{0}".format(os.linesep)) return OK, selection
ValueError
dataset/ETHPy150Open letsencrypt/letsencrypt/certbot/display/util.py/FileDisplay._get_valid_int_ans
2,291
def t4(): a = [] try: with open(os.getcwd() + '//ml-100k' + '/u.item') as item: for line in item: a = line.split('|')[5:24] print a #(itemId,title)=line.split('|')[0:2] #movies[itemId]=title except __HOLE__ as err: print('File error: ' + str(err))
IOError
dataset/ETHPy150Open clasnake/recommender/test.py/t4
2,292
def get_handler(self, name): try: return self.handlers[name] except __HOLE__: raise DataError("No keyword handler with name '%s' found" % name)
KeyError
dataset/ETHPy150Open shellderp/sublime-robot-plugin/lib/robot/common/libraries.py/BaseLibrary.get_handler
2,293
def __normalize(self, event=None): ew = event.widget contents = ew.get() icursor = ew.index(INSERT) if contents and contents[0] in 'xX' and self.__hexp.get(): contents = '0' + contents # Figure out the contents in the current base. try: if self.__hexp.get(): v = int(contents, 16) else: v = int(contents) except __HOLE__: v = None # If value is not legal, or empty, delete the last character inserted # and ring the bell. Don't ring the bell if the field is empty (it'll # just equal zero. if v is None: pass elif v < 0 or v > 255: i = ew.index(INSERT) if event.char: contents = contents[:i-1] + contents[i:] icursor -= 1 ew.bell() elif self.__hexp.get(): contents = hex(v)[2:] else: contents = int(v) ew.delete(0, END) ew.insert(0, contents) ew.icursor(icursor)
ValueError
dataset/ETHPy150Open Southpaw-TACTIC/TACTIC/src/context/client/tactic-api-python-4.0.api04/Tools/pynche/TypeinViewer.py/TypeinViewer.__normalize
2,294
def validate(self, value): """ Validate value. If value is valid, returns `True` and `False` otherwise. :param value: Value to validate """ # useful for filters with date conversions, see if conversion in clean() raises ValueError try: self.clean(value) return True except __HOLE__: return False
ValueError
dataset/ETHPy150Open flask-admin/flask-admin/flask_admin/model/filters.py/BaseFilter.validate
2,295
def validate(self, value): try: value = [datetime.datetime.strptime(range, '%Y-%m-%d').date() for range in value.split(' to ')] # if " to " is missing, fail validation # sqlalchemy's .between() will not work if end date is before start date if (len(value) == 2) and (value[0] <= value[1]): return True else: return False except __HOLE__: return False
ValueError
dataset/ETHPy150Open flask-admin/flask-admin/flask_admin/model/filters.py/BaseDateBetweenFilter.validate
2,296
def validate(self, value): try: value = [datetime.datetime.strptime(range, '%Y-%m-%d %H:%M:%S') for range in value.split(' to ')] if (len(value) == 2) and (value[0] <= value[1]): return True else: return False except __HOLE__: return False
ValueError
dataset/ETHPy150Open flask-admin/flask-admin/flask_admin/model/filters.py/BaseDateTimeBetweenFilter.validate
2,297
def validate(self, value): try: timetuples = [time.strptime(range, '%H:%M:%S') for range in value.split(' to ')] if (len(timetuples) == 2) and (timetuples[0] <= timetuples[1]): return True else: return False except __HOLE__: raise return False
ValueError
dataset/ETHPy150Open flask-admin/flask-admin/flask_admin/model/filters.py/BaseTimeBetweenFilter.validate
2,298
def do_GET(self): (scm, netloc, path, params, query, fragment) = urlparse.urlparse( self.path, 'http') if scm not in ('http', 'ftp') or fragment or not netloc: self.send_error(400, "bad url %s" % self.path) return soc = None try: if scm == 'http': soc = self._connect_to(netloc) if soc: self.log_request() soc.send("%s %s %s\r\n" % (self.command, urlparse.urlunparse(('', '', path, params, query, '')), self.request_version)) self.headers['Connection'] = 'close' del self.headers['Proxy-Connection'] for key_val in self.headers.items(): soc.send("%s: %s\r\n" % key_val) soc.send("\r\n") self._read_write(soc) elif scm == 'ftp': # fish out user and password information i = netloc.find ('@') if i >= 0: login_info, netloc = netloc[:i], netloc[i+1:] try: user, passwd = login_info.split (':', 1) except __HOLE__: user, passwd = "anonymous", None else: user, passwd ="anonymous", None self.log_request () try: ftp = ftplib.FTP (netloc) ftp.login (user, passwd) if self.command == "GET": ftp.retrbinary ("RETR %s"%path, self.connection.send) ftp.quit () except Exception, e: self.server.logger.log (logging.WARNING, "FTP Exception: %s", e) finally: if soc: soc.close() self.connection.close()
ValueError
dataset/ETHPy150Open splunk/splunk-sdk-python/examples/handlers/tiny-proxy.py/ProxyHandler.do_GET
2,299
def test_build(self): fault = None try: raise TypeError("Unknown type") except __HOLE__: fault = amf0.build_fault(*sys.exc_info()) self.assertTrue(isinstance(fault, remoting.ErrorFault)) self.assertEqual(fault.level, 'error') self.assertEqual(fault.code, 'TypeError') self.assertEqual(fault.details, None)
TypeError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/PyAMF-0.6.1/pyamf/tests/test_gateway.py/FaultTestCase.test_build