Unnamed: 0
int64
0
10k
function
stringlengths
79
138k
label
stringclasses
20 values
info
stringlengths
42
261
2,300
def test_build_traceback(self): fault = None try: raise TypeError("Unknown type") except __HOLE__: fault = amf0.build_fault(include_traceback=True, *sys.exc_info()) self.assertTrue(isinstance(fault, remoting.ErrorFault)) self.assertEqual(fault.level, 'error') self.assertEqual(fault.code, 'TypeError') self.assertTrue("\\n" not in fault.details)
TypeError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/PyAMF-0.6.1/pyamf/tests/test_gateway.py/FaultTestCase.test_build_traceback
2,301
def test_encode(self): encoder = pyamf.get_encoder(pyamf.AMF0) decoder = pyamf.get_decoder(pyamf.AMF0) decoder.stream = encoder.stream try: raise TypeError("Unknown type") except __HOLE__: encoder.writeElement(amf0.build_fault(*sys.exc_info())) buffer = encoder.stream buffer.seek(0, 0) fault = decoder.readElement() old_fault = amf0.build_fault(*sys.exc_info()) self.assertEqual(fault.level, old_fault.level) self.assertEqual(fault.type, old_fault.type) self.assertEqual(fault.code, old_fault.code) self.assertEqual(fault.details, old_fault.details) self.assertEqual(fault.description, old_fault.description)
TypeError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/PyAMF-0.6.1/pyamf/tests/test_gateway.py/FaultTestCase.test_encode
2,302
def __init__(self, conf, engine=None): super(SQLAlchemyBackend, self).__init__(conf) if engine is not None: self._engine = engine self._owns_engine = False else: self._engine = self._create_engine(self._conf) self._owns_engine = True self._validated = False self._upgrade_lock = threading.Lock() try: self._max_retries = misc.as_int(self._conf.get('max_retries')) except __HOLE__: self._max_retries = 0
TypeError
dataset/ETHPy150Open openstack/taskflow/taskflow/persistence/backends/impl_sqlalchemy.py/SQLAlchemyBackend.__init__
2,303
def _run_services(self, services): """Service runner main loop.""" if not services: self._logger.critical('no services to run, bailing!') return service_thread_map = {service: threading.Thread(target=service.run) for service in services} # Start services. for service, service_thread in service_thread_map.items(): self._logger.info('starting service {}'.format(service)) try: service_thread.start() except (__HOLE__, service.ServiceError): self.shutdown(service_thread_map) raise self.StartupFailure('service {} failed to start, shutting down!'.format(service)) # Monitor services. while not self.is_killed: for service, service_thread in service_thread_map.items(): if not service_thread.is_alive(): self.shutdown(service_thread_map) raise self.RuntimeFailure('service failure for {}, shutting down!'.format(service)) else: # Avoid excessive CPU utilization. service_thread.join(self.JOIN_TIMEOUT_SECONDS)
RuntimeError
dataset/ETHPy150Open pantsbuild/pants/src/python/pants/pantsd/pants_daemon.py/PantsDaemon._run_services
2,304
def iter_paths(filepath): if not filepath: return try: with open(filepath) as f: for line in f: # Use `#` for comments if line.startswith("#"): continue # Remove trailing spaces and newlines, then normalize to avoid duplicates. path = os.path.normpath(line.strip()) if path: yield path except __HOLE__: pass
IOError
dataset/ETHPy150Open tmr232/Sark/plugins/plugin_loader.py/iter_paths
2,305
def init(self): # Show usage message. usage_message = ["Loading plugins from system-wide and user-specific lists:", " System-wide List: {}".format(SYS_PLUGIN_LIST_PATH), " User-specific List: {}".format(USER_PLUGIN_LIST_PATH)] if PROJECT_PLUGIN_LIST_PATH: usage_message.append(" Project-specific List: {}".format(PROJECT_PLUGIN_LIST_PATH)) message(*usage_message) # Make sure the files exist. If not - create them. if not os.path.isfile(SYS_PLUGIN_LIST_PATH): try: with open(SYS_PLUGIN_LIST_PATH, "wb"): message("Created system plugin list at {}".format(SYS_PLUGIN_LIST_PATH)) except __HOLE__: message("Failed creating system plugin list at {}".format(SYS_PLUGIN_LIST_PATH)) if not os.path.isfile(USER_PLUGIN_LIST_PATH): try: with open(USER_PLUGIN_LIST_PATH, "wb"): message("Created user plugin list at {}".format(USER_PLUGIN_LIST_PATH)) except IOError: message("Failed creating user plugin list at {}".format(USER_PLUGIN_LIST_PATH)) for path in iter_plugin_paths(): # This check is not needed, but saves us from the dreaded error message-box # that pops when a python plugin is not found. if not os.path.isfile(path): message("Plugin file not found: {}".format(path)) continue idaapi.load_plugin(path) return idaapi.PLUGIN_SKIP
IOError
dataset/ETHPy150Open tmr232/Sark/plugins/plugin_loader.py/PluginLoader.init
2,306
def relay(self, bot, relay_ins): def channel_for(channel_id): return bot.slack_client.server.channels.find(channel_id) def name(channel): if self.name: if not channel.name.startswith(self.name): return None return channel.name.split('-', 1)[1] else: return channel.name for data in relay_ins: if 'channel' in data: channel = channel_for(data['channel']) if channel is None: continue if data['type'] == 'join': line = u'join #{}'.format(name(channel)) self.irc_client.send_line(line) elif data['type'] == 'message': print('do?', data) message = data.get('text', '') try: import html message = html.unescape(message) except __HOLE__: pass message = message.replace('\r', ' ').replace('\n', r' ') user_id = data.get('user', None) if user_id: user = bot.slack_client.server.users.find(user_id) else: user = None user # usable, but not yet if message: line = u'privmsg #{} :{}'.format(name(channel), message) self.irc_client.send_line(line) else: line = u'privmsg #{} :{}'.format(self.config['irc'].get('nick', 'slairck'), unicode(data)) self.irc_client.send_line(line)
ImportError
dataset/ETHPy150Open youknowone/slairck/ircbot.py/IrcBot.relay
2,307
def ensure_dependencies(): if not shell('python', '--version', capture=True).startswith('Python 2.7'): raise EnvironmentError('Python 2.7 is required.') try: shell('pg_config' + ext, capture=True) except __HOLE__ as e: if e.errno != os.errno.ENOENT: raise raise EnvironmentError('Postgresql is required. (Make sure pg_config is on your PATH.)')
OSError
dataset/ETHPy150Open gratipay/gratipay.com/gratipay.py/ensure_dependencies
2,308
@register.filter def sort_by(items, attr): """ General sort filter - sorts by either attribute or key. """ def key_func(item): try: return getattr(item, attr) except AttributeError: try: return item[attr] except __HOLE__: getattr(item, attr) # Reraise AttributeError return sorted(items, key=key_func)
TypeError
dataset/ETHPy150Open stephenmcd/mezzanine/mezzanine/core/templatetags/mezzanine_tags.py/sort_by
2,309
@register.tag def ifinstalled(parser, token): """ Old-style ``if`` tag that renders contents if the given app is installed. The main use case is: {% ifinstalled app_name %} {% include "app_name/template.html" %} {% endifinstalled %} so we need to manually pull out all tokens if the app isn't installed, since if we used a normal ``if`` tag with a False arg, the include tag will still try and find the template to include. """ try: tag, app = token.split_contents() except __HOLE__: raise TemplateSyntaxError("ifinstalled should be in the form: " "{% ifinstalled app_name %}" "{% endifinstalled %}") end_tag = "end" + tag unmatched_end_tag = 1 if app.strip("\"'") not in settings.INSTALLED_APPS: while unmatched_end_tag: token = parser.tokens.pop(0) if token.token_type == TOKEN_BLOCK: block_name = token.contents.split()[0] if block_name == tag: unmatched_end_tag += 1 if block_name == end_tag: unmatched_end_tag -= 1 parser.tokens.insert(0, token) nodelist = parser.parse((end_tag,)) parser.delete_first_token() class IfInstalledNode(Node): def render(self, context): return nodelist.render(context) return IfInstalledNode()
ValueError
dataset/ETHPy150Open stephenmcd/mezzanine/mezzanine/core/templatetags/mezzanine_tags.py/ifinstalled
2,310
@register.simple_tag def thumbnail(image_url, width, height, upscale=True, quality=95, left=.5, top=.5, padding=False, padding_color="#fff"): """ Given the URL to an image, resizes the image using the given width and height on the first time it is requested, and returns the URL to the new resized image. If width or height are zero then original ratio is maintained. When ``upscale`` is False, images smaller than the given size will not be grown to fill that size. The given width and height thus act as maximum dimensions. """ if not image_url: return "" try: from PIL import Image, ImageFile, ImageOps except ImportError: return "" image_url = unquote(str(image_url)).split("?")[0] if image_url.startswith(settings.MEDIA_URL): image_url = image_url.replace(settings.MEDIA_URL, "", 1) image_dir, image_name = os.path.split(image_url) image_prefix, image_ext = os.path.splitext(image_name) filetype = {".png": "PNG", ".gif": "GIF"}.get(image_ext, "JPEG") thumb_name = "%s-%sx%s" % (image_prefix, width, height) if not upscale: thumb_name += "-no-upscale" if left != .5 or top != .5: left = min(1, max(0, left)) top = min(1, max(0, top)) thumb_name = "%s-%sx%s" % (thumb_name, left, top) thumb_name += "-padded-%s" % padding_color if padding else "" thumb_name = "%s%s" % (thumb_name, image_ext) # `image_name` is used here for the directory path, as each image # requires its own sub-directory using its own name - this is so # we can consistently delete all thumbnails for an individual # image, which is something we do in filebrowser when a new image # is written, allowing us to purge any previously generated # thumbnails that may match a new image name. thumb_dir = os.path.join(settings.MEDIA_ROOT, image_dir, settings.THUMBNAILS_DIR_NAME, image_name) if not os.path.exists(thumb_dir): try: os.makedirs(thumb_dir) except __HOLE__: pass thumb_path = os.path.join(thumb_dir, thumb_name) thumb_url = "%s/%s/%s" % (settings.THUMBNAILS_DIR_NAME, quote(image_name.encode("utf-8")), quote(thumb_name.encode("utf-8"))) image_url_path = os.path.dirname(image_url) if image_url_path: thumb_url = "%s/%s" % (image_url_path, thumb_url) try: thumb_exists = os.path.exists(thumb_path) except UnicodeEncodeError: # The image that was saved to a filesystem with utf-8 support, # but somehow the locale has changed and the filesystem does not # support utf-8. from mezzanine.core.exceptions import FileSystemEncodingChanged raise FileSystemEncodingChanged() if thumb_exists: # Thumbnail exists, don't generate it. return thumb_url elif not default_storage.exists(image_url): # Requested image does not exist, just return its URL. return image_url f = default_storage.open(image_url) try: image = Image.open(f) except: # Invalid image format. return image_url image_info = image.info to_width = int(width) to_height = int(height) from_width = image.size[0] from_height = image.size[1] if not upscale: to_width = min(to_width, from_width) to_height = min(to_height, from_height) # Set dimensions. if to_width == 0: to_width = from_width * to_height // from_height elif to_height == 0: to_height = from_height * to_width // from_width if image.mode not in ("P", "L", "RGBA"): try: image = image.convert("RGBA") except: return image_url # Required for progressive jpgs. ImageFile.MAXBLOCK = 2 * (max(image.size) ** 2) # Padding. if padding and to_width and to_height: from_ratio = float(from_width) / from_height to_ratio = float(to_width) / to_height pad_size = None if to_ratio < from_ratio: pad_height = int(to_height * (float(from_width) / to_width)) pad_size = (from_width, pad_height) pad_top = (pad_height - from_height) // 2 pad_left = 0 elif to_ratio > from_ratio: pad_width = int(to_width * (float(from_height) / to_height)) pad_size = (pad_width, from_height) pad_top = 0 pad_left = (pad_width - from_width) // 2 if pad_size is not None: pad_container = Image.new("RGBA", pad_size, padding_color) pad_container.paste(image, (pad_left, pad_top)) image = pad_container # Create the thumbnail. to_size = (to_width, to_height) to_pos = (left, top) try: image = ImageOps.fit(image, to_size, Image.ANTIALIAS, 0, to_pos) image = image.save(thumb_path, filetype, quality=quality, **image_info) # Push a remote copy of the thumbnail if MEDIA_URL is # absolute. if "://" in settings.MEDIA_URL: with open(thumb_path, "rb") as f: default_storage.save(thumb_url, File(f)) except Exception: # If an error occurred, a corrupted image may have been saved, # so remove it, otherwise the check for it existing will just # return the corrupted image next time it's requested. try: os.remove(thumb_path) except Exception: pass return image_url return thumb_url
OSError
dataset/ETHPy150Open stephenmcd/mezzanine/mezzanine/core/templatetags/mezzanine_tags.py/thumbnail
2,311
@register.filter def richtext_filters(content): """ Takes a value edited via the WYSIWYG editor, and passes it through each of the functions specified by the RICHTEXT_FILTERS setting. """ filter_names = settings.RICHTEXT_FILTERS if not filter_names: try: filter_names = [settings.RICHTEXT_FILTER] except __HOLE__: pass else: from warnings import warn warn("The `RICHTEXT_FILTER` setting is deprecated in favor of " "the new plural setting `RICHTEXT_FILTERS`.") for filter_name in filter_names: filter_func = import_dotted_path(filter_name) content = filter_func(content) return content
AttributeError
dataset/ETHPy150Open stephenmcd/mezzanine/mezzanine/core/templatetags/mezzanine_tags.py/richtext_filters
2,312
@register.to_end_tag def editable(parsed, context, token): """ Add the required HTML to the parsed content for in-line editing, such as the icon and edit form if the object is deemed to be editable - either it has an ``editable`` method which returns ``True``, or the logged in user has change permissions for the model. """ def parse_field(field): field = field.split(".") obj = context.get(field.pop(0), None) attr = field.pop() while field: obj = getattr(obj, field.pop(0)) if callable(obj): # Allows {% editable page.get_content_model.content %} obj = obj() return obj, attr fields = [parse_field(f) for f in token.split_contents()[1:]] if fields: fields = [f for f in fields if len(f) == 2 and f[0] is fields[0][0]] if not parsed.strip(): try: parsed = "".join([str(getattr(*field)) for field in fields]) except __HOLE__: pass if settings.INLINE_EDITING_ENABLED and fields and "request" in context: obj = fields[0][0] if isinstance(obj, Model) and is_editable(obj, context["request"]): field_names = ",".join([f[1] for f in fields]) context["editable_form"] = get_edit_form(obj, field_names) context["original"] = parsed t = get_template("includes/editable_form.html") return t.render(context) return parsed
AttributeError
dataset/ETHPy150Open stephenmcd/mezzanine/mezzanine/core/templatetags/mezzanine_tags.py/editable
2,313
def admin_app_list(request): """ Adopted from ``django.contrib.admin.sites.AdminSite.index``. Returns a list of lists of models grouped and ordered according to ``mezzanine.conf.ADMIN_MENU_ORDER``. Called from the ``admin_dropdown_menu`` template tag as well as the ``app_list`` dashboard widget. """ app_dict = {} # Model or view --> (group index, group title, item index, item title). menu_order = {} for (group_index, group) in enumerate(settings.ADMIN_MENU_ORDER): group_title, items = group for (item_index, item) in enumerate(items): if isinstance(item, (tuple, list)): item_title, item = item else: item_title = None menu_order[item] = (group_index, group_title, item_index, item_title) # Add all registered models, using group and title from menu order. for (model, model_admin) in admin.site._registry.items(): opts = model._meta in_menu = not hasattr(model_admin, "in_menu") or model_admin.in_menu() if in_menu and request.user.has_module_perms(opts.app_label): perms = model_admin.get_model_perms(request) admin_url_name = "" if perms["change"]: admin_url_name = "changelist" change_url = admin_url(model, admin_url_name) else: change_url = None if perms["add"]: admin_url_name = "add" add_url = admin_url(model, admin_url_name) else: add_url = None if admin_url_name: model_label = "%s.%s" % (opts.app_label, opts.object_name) try: app_index, app_title, model_index, model_title = \ menu_order[model_label] except __HOLE__: app_index = None app_title = opts.app_config.verbose_name.title() model_index = None model_title = None else: del menu_order[model_label] if not model_title: model_title = capfirst(model._meta.verbose_name_plural) if app_title not in app_dict: app_dict[app_title] = { "index": app_index, "name": app_title, "models": [], } app_dict[app_title]["models"].append({ "index": model_index, "perms": model_admin.get_model_perms(request), "name": model_title, "object_name": opts.object_name, "admin_url": change_url, "add_url": add_url }) # Menu may also contain view or url pattern names given as (title, name). for (item_url, item) in menu_order.items(): app_index, app_title, item_index, item_title = item try: item_url = reverse(item_url) except NoReverseMatch: continue if app_title not in app_dict: app_dict[app_title] = { "index": app_index, "name": app_title, "models": [], } app_dict[app_title]["models"].append({ "index": item_index, "perms": {"custom": True}, "name": item_title, "admin_url": item_url, }) app_list = list(app_dict.values()) sort = lambda x: (x["index"] if x["index"] is not None else 999, x["name"]) for app in app_list: app["models"].sort(key=sort) app_list.sort(key=sort) return app_list
KeyError
dataset/ETHPy150Open stephenmcd/mezzanine/mezzanine/core/templatetags/mezzanine_tags.py/admin_app_list
2,314
@register.inclusion_tag("admin/includes/dropdown_menu.html", takes_context=True) def admin_dropdown_menu(context): """ Renders the app list for the admin dropdown menu navigation. """ user = context["request"].user template_vars = {} if user.is_staff: template_vars["dropdown_menu_app_list"] = admin_app_list( context["request"]) if user.is_superuser: sites = Site.objects.all() else: try: sites = user.sitepermissions.sites.all() except __HOLE__: sites = Site.objects.none() template_vars["dropdown_menu_sites"] = list(sites) template_vars["dropdown_menu_selected_site_id"] = current_site_id() template_vars["settings"] = context["settings"] template_vars["request"] = context["request"] return template_vars
ObjectDoesNotExist
dataset/ETHPy150Open stephenmcd/mezzanine/mezzanine/core/templatetags/mezzanine_tags.py/admin_dropdown_menu
2,315
@register.simple_tag(takes_context=True) def translate_url(context, language): """ Translates the current URL for the given language code, eg: {% translate_url de %} """ try: request = context["request"] except __HOLE__: return "" view = resolve(request.path) current_language = translation.get_language() translation.activate(language) try: url = reverse(view.func, args=view.args, kwargs=view.kwargs) except NoReverseMatch: try: url_name = (view.url_name if not view.namespace else '%s:%s' % (view.namespace, view.url_name)) url = reverse(url_name, args=view.args, kwargs=view.kwargs) except NoReverseMatch: url_name = "admin:" + view.url_name url = reverse(url_name, args=view.args, kwargs=view.kwargs) translation.activate(current_language) if context['request'].META["QUERY_STRING"]: url += "?" + context['request'].META["QUERY_STRING"] return url
KeyError
dataset/ETHPy150Open stephenmcd/mezzanine/mezzanine/core/templatetags/mezzanine_tags.py/translate_url
2,316
def validate_bug_tracker(input_url): """Validate a bug tracker URL. This checks that the given URL string contains one (and only one) `%s` Python format specification type (no other types are supported). """ try: # Ignore escaped `%`'s test_url = input_url.replace('%%', '') if test_url.find('%s') == -1: raise TypeError # Ensure an arbitrary value can be inserted into the URL string test_url = test_url % 1 except (__HOLE__, ValueError): raise ValidationError([ _("%s has invalid format specification type(s). Use only one " "'%%s' to mark the location of the bug id. If the URL contains " "encoded values (e.g. '%%20'), prepend the encoded values with " "an additional '%%'.") % input_url])
TypeError
dataset/ETHPy150Open reviewboard/reviewboard/reviewboard/admin/validation.py/validate_bug_tracker
2,317
def validate_bug_tracker_base_hosting_url(input_url): """Check that hosting service bug URLs don't contain %s.""" # Try formatting the URL using an empty tuple to verify that it # doesn't contain any format characters. try: input_url % () except __HOLE__: raise ValidationError([ _("The URL '%s' is not valid because it contains a format " "character. For bug trackers other than 'Custom Bug Tracker', " "use the base URL of the server. If you need a '%%' character, " "prepend it with an additional '%%'.") % input_url])
TypeError
dataset/ETHPy150Open reviewboard/reviewboard/reviewboard/admin/validation.py/validate_bug_tracker_base_hosting_url
2,318
def test_migrate_gis(self): """ Tests basic usage of the migrate command when a model uses Geodjango fields. Regression test for ticket #22001: https://code.djangoproject.com/ticket/22001 It's also used to showcase an error in migrations where spatialite is enabled and geo tables are renamed resulting in unique constraint failure on geometry_columns. Regression for ticket #23030: https://code.djangoproject.com/ticket/23030 """ # Make sure the right tables exist self.assertTableExists("gis_migrations_neighborhood") self.assertTableExists("gis_migrations_household") self.assertTableExists("gis_migrations_family") if connection.features.supports_raster: self.assertTableExists("gis_migrations_heatmap") # Unmigrate everything call_command("migrate", "gis_migrations", "zero", verbosity=0) # Make sure it's all gone self.assertTableNotExists("gis_migrations_neighborhood") self.assertTableNotExists("gis_migrations_household") self.assertTableNotExists("gis_migrations_family") if connection.features.supports_raster: self.assertTableNotExists("gis_migrations_heatmap") # Even geometry columns metadata try: GeoColumn = connection.ops.geometry_columns() except __HOLE__: # Not all GIS backends have geometry columns model pass else: qs = GeoColumn.objects.filter( **{'%s__in' % GeoColumn.table_name_col(): ["gis_neighborhood", "gis_household"]} ) self.assertEqual(qs.count(), 0) # Revert the "unmigration" call_command("migrate", "gis_migrations", verbosity=0)
NotImplementedError
dataset/ETHPy150Open django/django/tests/gis_tests/gis_migrations/test_commands.py/MigrateTests.test_migrate_gis
2,319
def notify_errors(request_id, error): """Add errors to a config request.""" try: _REQUESTS[request_id].notify_errors(request_id, error) except __HOLE__: # If request_id does not exist pass
KeyError
dataset/ETHPy150Open home-assistant/home-assistant/homeassistant/components/configurator.py/notify_errors
2,320
def request_done(request_id): """Mark a configuration request as done.""" try: _REQUESTS.pop(request_id).request_done(request_id) except __HOLE__: # If request_id does not exist pass
KeyError
dataset/ETHPy150Open home-assistant/home-assistant/homeassistant/components/configurator.py/request_done
2,321
def _get_instance(hass): """Get an instance per hass object.""" try: return _INSTANCES[hass] except __HOLE__: _INSTANCES[hass] = Configurator(hass) if DOMAIN not in hass.config.components: hass.config.components.append(DOMAIN) return _INSTANCES[hass]
KeyError
dataset/ETHPy150Open home-assistant/home-assistant/homeassistant/components/configurator.py/_get_instance
2,322
def from_url(url, db=None): """Returns an active Redis client generated from the given database URL. Will attempt to extract the database id from the path url fragment, if none is provided. """ url = urlparse(url) # Make sure it's a redis database. if url.scheme: assert url.scheme == 'redis' # Attempt to resolve database id. if db is None: try: db = int(url.path.replace('/', '')) except (AttributeError, __HOLE__): db = DEFAULT_DATABASE_ID return Redis( host=url.hostname, port=url.port, db=db, password=url.password)
ValueError
dataset/ETHPy150Open kumar303/jstestnet/vendor-local/redis-2.4.13/redis/utils.py/from_url
2,323
def destroy_datastore(datastore_path, history_path): """Destroys the appengine datastore at the specified paths.""" for path in [datastore_path, history_path]: if not path: continue try: os.remove(path) except __HOLE__, e: if e.errno != 2: logging.error("Failed to clear datastore: %s" % e)
OSError
dataset/ETHPy150Open CollabQ/CollabQ/appengine_django/db/base.py/destroy_datastore
2,324
def _from_json(self, datastring): try: return jsonutils.loads(datastring) except __HOLE__: msg = _("cannot understand JSON") raise exceptions.MalformedRequestBody(msg)
ValueError
dataset/ETHPy150Open openstack/sahara/sahara/utils/wsgi.py/JSONDeserializer._from_json
2,325
def AttachParents(self, tree, parent = None): """ Attach parents to the tree. Also create the cache at the each node, by exercising tree.GetFlattenedChildren().""" tree.SetParent(parent) for c in tree.GetFlattenedChildren(): if c is not None: try: self.AttachParents(c, tree) except __HOLE__ as e: print(c, parent, tree) raise AttributeError(e) return
AttributeError
dataset/ETHPy150Open spranesh/Redhawk/redhawk/common/tree_converter.py/TreeConverter.AttachParents
2,326
def pytest_configure(): if not settings.configured: os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings' try: django.setup() except __HOLE__: pass
AttributeError
dataset/ETHPy150Open jpadilla/django-jwt-auth/tests/conftest.py/pytest_configure
2,327
def list_directory(path, ignore_directories, ignore_extensions): for root, dirs, files in os.walk(path): # skip over directories to ignore for dir in ignore_directories: try: dirs.remove(dir) except __HOLE__: pass # we are interested in the directory itself yield root for file in files: _, ext = os.path.splitext(file) if ext in ignore_extensions: continue yield os.path.join(root, file)
ValueError
dataset/ETHPy150Open faassen/bowerstatic/bowerstatic/autoversion.py/list_directory
2,328
def build(runas, tgt, dest_dir, spec, sources, deps, env, template, saltenv='base', log_dir='/var/log/salt/pkgbuild'): # pylint: disable=unused-argument ''' Given the package destination directory, the tarball containing debian files (e.g. control) and package sources, use pbuilder to safely build the platform package CLI Example: Debian salt '*' pkgbuild.make_src_pkg deb-8-x86_64 /var/www/html https://raw.githubusercontent.com/saltstack/libnacl/master/pkg/deb/python-libnacl.control https://pypi.python.org/packages/source/l/libnacl/libnacl-1.3.5.tar.gz This example command should build the libnacl package for Debian using pbuilder and place it in /var/www/html/ on the minion ''' ret = {} try: os.makedirs(dest_dir) except __HOLE__ as exc: if exc.errno != errno.EEXIST: raise dsc_dir = tempfile.mkdtemp() try: dscs = make_src_pkg(dsc_dir, spec, sources, env, template, saltenv) except Exception as exc: shutil.rmtree(dsc_dir) log.error('Failed to make src package') return ret # dscs should only contain salt orig and debian tarballs and dsc file for dsc in dscs: afile = os.path.basename(dsc) adist = os.path.join(dest_dir, afile) shutil.copy(dsc, adist) if dsc.endswith('.dsc'): dbase = os.path.dirname(dsc) results_dir = tempfile.mkdtemp() try: __salt__['cmd.run']('chown {0} -R {1}'.format(runas, dbase)) __salt__['cmd.run']('chown {0} -R {1}'.format(runas, results_dir)) cmd = 'pbuilder --create' __salt__['cmd.run'](cmd, runas=runas, python_shell=True) cmd = 'pbuilder --build --buildresult {1} {0}'.format( dsc, results_dir) __salt__['cmd.run'](cmd, runas=runas, python_shell=True) for bfile in os.listdir(results_dir): full = os.path.join(results_dir, bfile) bdist = os.path.join(dest_dir, bfile) shutil.copy(full, bdist) ret.setdefault('Packages', []).append(bdist) except Exception as exc: log.error('Error building from {0}: {1}'.format(dsc, exc)) finally: shutil.rmtree(results_dir) shutil.rmtree(dsc_dir) return ret
OSError
dataset/ETHPy150Open saltstack/salt/salt/modules/debbuild.py/build
2,329
def parse_creation(creation_string): """Parses creation string from header format. Parse creation date of the format: YYYY-mm-dd HH:MM:SS.ffffff Y: Year m: Month (01-12) d: Day (01-31) H: Hour (00-24) M: Minute (00-59) S: Second (00-59) f: Microsecond Args: creation_string: String creation date format. Returns: datetime object parsed from creation_string. Raises: CreationFormatError when the creation string is formatted incorrectly. """ def split(string, by, count): result = string.split(by, count) if len(result) != count + 1: raise CreationFormatError( 'Could not parse creation %s.' % creation_string) return result timestamp_string, microsecond = split(creation_string, '.', 1) try: timestamp = time.strptime(timestamp_string, BASE_CREATION_HEADER_FORMAT) microsecond = int(microsecond) except __HOLE__: raise CreationFormatError('Could not parse creation %s.' % creation_string) return datetime.datetime(*timestamp[:6] + tuple([microsecond]))
ValueError
dataset/ETHPy150Open CollabQ/CollabQ/.google_appengine/google/appengine/api/blobstore/blobstore.py/parse_creation
2,330
def loadPrices(self): success = False for priority in self.loadPriorities: try: getattr(self, priority)() success = True break except URLError as e: print "Error loading " + priority + " url " + str(e) except (__HOLE__, KeyError, TypeError) as e: print "Error reading " + priority + " data" + str(e) if not success: # pragma: no cover print "BtcPrice unable to load Bitcoin exchange price"
ValueError
dataset/ETHPy150Open OpenBazaar/OpenBazaar-Server/market/btcprice.py/BtcPrice.loadPrices
2,331
def _fit_transform(self, X): """Assumes X contains only categorical features.""" X = check_array(X, dtype=np.int) if np.any(X < 0): raise ValueError("X needs to contain only non-negative integers.") n_samples, n_features = X.shape if (isinstance(self.n_values, six.string_types) and self.n_values == 'auto'): n_values = np.max(X, axis=0) + 1 elif isinstance(self.n_values, numbers.Integral): if (np.max(X, axis=0) >= self.n_values).any(): raise ValueError("Feature out of bounds for n_values=%d" % self.n_values) n_values = np.empty(n_features, dtype=np.int) n_values.fill(self.n_values) else: try: n_values = np.asarray(self.n_values, dtype=int) except (ValueError, __HOLE__): raise TypeError("Wrong type for parameter `n_values`. Expected" " 'auto', int or array of ints, got %r" % type(X)) if n_values.ndim < 1 or n_values.shape[0] != X.shape[1]: raise ValueError("Shape mismatch: if n_values is an array," " it has to be of shape (n_features,).") self.n_values_ = n_values n_values = np.hstack([[0], n_values]) indices = np.cumsum(n_values) self.feature_indices_ = indices column_indices = (X + indices[:-1]).ravel() row_indices = np.repeat(np.arange(n_samples, dtype=np.int32), n_features) data = np.ones(n_samples * n_features) out = sparse.coo_matrix((data, (row_indices, column_indices)), shape=(n_samples, indices[-1]), dtype=self.dtype).tocsr() if (isinstance(self.n_values, six.string_types) and self.n_values == 'auto'): mask = np.array(out.sum(axis=0)).ravel() != 0 active_features = np.where(mask)[0] out = out[:, active_features] self.active_features_ = active_features return out if self.sparse else out.toarray()
TypeError
dataset/ETHPy150Open scikit-learn/scikit-learn/sklearn/preprocessing/data.py/OneHotEncoder._fit_transform
2,332
@response_server.route('/weather/', methods=['GET', 'POST']) def weather(): # Get destination from url query string: # 'node' : destination # 'Digits' : input digits from user if request.method == 'POST': dtmf = request.form.get('Digits', -1) else: dtmf = -1 try: dtmf = int(dtmf) except __HOLE__: dtmf = -1 zipcode = str(dtmf) zipcode = zipcode.replace('#', '').replace('*', '')[:5] if len(zipcode)!=5: #todo check zipcode format r = plivohelper.Response() r.addSpeak("Invalid Zipcode") else: print "zipcode %s" % zipcode r = plivohelper.Response() xml_weather = fetch_weather(zipcode) if xml_weather and xml_weather.find('City not found')==-1: weather = parse(xml_weather) r.addSpeak("It is currently %s degrees fahrenheit and %s in %s." % (weather['temp'], weather['conditions'], weather['location'])) else: r.addSpeak("Error getting the weather forecast for zipcode %s." % zipcode) print "RESTXML Response => %s" % r return render_template('response_template.xml', response=r)
ValueError
dataset/ETHPy150Open plivo/plivohelper-python/examples/weatherbyphone/weather.py/weather
2,333
@contextmanager def TemporaryDirectory(suffix='', prefix=None, dir=None): name = mkdtemp(suffix=suffix, prefix=prefix, dir=dir) try: yield name finally: try: shutil.rmtree(name) except __HOLE__ as e: # ENOENT - no such file or directory if e.errno != errno.ENOENT: raise e
OSError
dataset/ETHPy150Open airbnb/airflow/airflow/utils/file.py/TemporaryDirectory
2,334
def _really_load(self, f, filename, ignore_discard, ignore_expires): now = time.time() magic = f.readline() if not re.search(self.magic_re, magic): f.close() raise LoadError( "%r does not look like a Netscape format cookies file" % filename) try: while 1: line = f.readline() if line == "": break # last field may be absent, so keep any trailing tab if line.endswith("\n"): line = line[:-1] # skip comments and blank lines XXX what is $ for? if (line.strip().startswith(("#", "$")) or line.strip() == ""): continue domain, domain_specified, path, secure, expires, name, value = \ line.split("\t") secure = (secure == "TRUE") domain_specified = (domain_specified == "TRUE") if name == "": # cookies.txt regards 'Set-Cookie: foo' as a cookie # with no name, whereas cookielib regards it as a # cookie with no value. name = value value = None initial_dot = domain.startswith(".") assert domain_specified == initial_dot discard = False if expires == "": expires = None discard = True # assume path_specified is false c = Cookie(0, name, value, None, False, domain, domain_specified, initial_dot, path, False, secure, expires, discard, None, None, {}) if not ignore_discard and c.discard: continue if not ignore_expires and c.is_expired(now): continue self.set_cookie(c) except __HOLE__: raise except Exception: _warn_unhandled_exception() raise LoadError("invalid Netscape format cookies file %r: %r" % (filename, line))
IOError
dataset/ETHPy150Open babble/babble/include/jython/Lib/_MozillaCookieJar.py/MozillaCookieJar._really_load
2,335
def play_music(self, lib, opts, args): """Execute query, create temporary playlist and execute player command passing that playlist, at request insert optional arguments. """ command_str = config['play']['command'].get() if not command_str: command_str = util.open_anything() use_folders = config['play']['use_folders'].get(bool) relative_to = config['play']['relative_to'].get() raw = config['play']['raw'].get(bool) warning_threshold = config['play']['warning_threshold'].get(int) # We use -2 as a default value for warning_threshold to detect if it is # set or not. We can't use a falsey value because it would have an # actual meaning in the configuration of this plugin, and we do not use # -1 because some people might use it as a value to obtain no warning, # which wouldn't be that bad of a practice. if warning_threshold == -2: # if warning_threshold has not been set by user, look for # warning_treshold, to preserve backwards compatibility. See #1803. # warning_treshold has the correct default value of 100. warning_threshold = config['play']['warning_treshold'].get(int) if relative_to: relative_to = util.normpath(relative_to) # Add optional arguments to the player command. if opts.args: if ARGS_MARKER in command_str: command_str = command_str.replace(ARGS_MARKER, opts.args) else: command_str = u"{} {}".format(command_str, opts.args) # Perform search by album and add folders rather than tracks to # playlist. if opts.album: selection = lib.albums(ui.decargs(args)) paths = [] sort = lib.get_default_album_sort() for album in selection: if use_folders: paths.append(album.item_dir()) else: paths.extend(item.path for item in sort.sort(album.items())) item_type = 'album' # Perform item query and add tracks to playlist. else: selection = lib.items(ui.decargs(args)) paths = [item.path for item in selection] if relative_to: paths = [relpath(path, relative_to) for path in paths] item_type = 'track' item_type += 's' if len(selection) > 1 else '' if not selection: ui.print_(ui.colorize('text_warning', u'No {0} to play.'.format(item_type))) return # Warn user before playing any huge playlists. if warning_threshold and len(selection) > warning_threshold: ui.print_(ui.colorize( 'text_warning', u'You are about to queue {0} {1}.'.format( len(selection), item_type))) if ui.input_options(('Continue', 'Abort')) == 'a': return ui.print_(u'Playing {0} {1}.'.format(len(selection), item_type)) if raw: open_args = paths else: open_args = [self._create_tmp_playlist(paths)] self._log.debug(u'executing command: {} {}', command_str, b' '.join(open_args)) try: util.interactive_open(open_args, command_str) except __HOLE__ as exc: raise ui.UserError( "Could not play the query: {0}".format(exc))
OSError
dataset/ETHPy150Open beetbox/beets/beetsplug/play.py/PlayPlugin.play_music
2,336
def _compute_loss(self, model_output): """ Computes the loss for the whole batch. Notes: ------ Unless overriden, the default behavior is to return the mean of all individual losses. """ try: return T.mean(self.losses) except __HOLE__: raise NotImplementedError("Subclass of 'Loss' must either implement '_compute_loss(model_output)' or '_compute_losses(model_output)'.")
NotImplementedError
dataset/ETHPy150Open SMART-Lab/smartlearner/smartlearner/interfaces/loss.py/Loss._compute_loss
2,337
def get(self, *args, **kwargs): '''Service a GET request to the '/map' URI. The 'bbox' parameter contains 4 coordinates "l" (w), "b" (s), "r" (e) and "t" (n).''' # Sanity check the input. bbox_arg = self.get_argument('bbox', None) if not bbox_arg: raise tornado.web.HTTPError(400) # Bad Syntax bbox = bbox_arg.split(',') if len(bbox) != 4: raise tornado.web.HTTPError(400) try: w,s,e,n = map(float, bbox) except __HOLE__: raise tornado.web.HTTPError(400) # Check the "l,b,r,t" coordinates passed in for sanity. if w < C.LON_MIN or w > C.LON_MAX or \ e < C.LON_MIN or e > C.LON_MAX or \ s < C.LAT_MIN or s > C.LAT_MAX or \ n < C.LAT_MIN or n > C.LAT_MAX or \ n < s or e < w: raise tornado.web.HTTPError(400) nodelist, ways, relations = self.handle_map(bbox) response = self.build_bbox_response(nodelist, ways, relations, bbox) self.set_header(C.CONTENT_TYPE, C.TEXT_XML) self.write(response_to_xml(response))
ValueError
dataset/ETHPy150Open MapQuest/mapquest-osm-server/src/python/frontend/maphandler.py/MapHandler.get
2,338
def _flush_stream(self, stream): try: stream.flush() except __HOLE__: pass
IOError
dataset/ETHPy150Open aws/aws-cli/awscli/formatter.py/Formatter._flush_stream
2,339
def __call__(self, command_name, response, stream=None): if stream is None: # Retrieve stdout on invocation instead of at import time # so that if anything wraps stdout we'll pick up those changes # (specifically colorama on windows wraps stdout). stream = self._get_default_stream() # I think the interfaces between non-paginated # and paginated responses can still be cleaned up. if is_response_paginated(response): response_data = response.build_full_result() else: response_data = response self._remove_request_id(response_data) if self._args.query is not None: response_data = self._args.query.search(response_data) try: self._format_response(command_name, response_data, stream) except __HOLE__ as e: # If the reading end of our stdout stream has closed the file # we can just exit. pass finally: # flush is needed to avoid the "close failed in file object # destructor" in python2.x (see http://bugs.python.org/issue11380). self._flush_stream(stream)
IOError
dataset/ETHPy150Open aws/aws-cli/awscli/formatter.py/FullyBufferedFormatter.__call__
2,340
def _format_response(self, command_name, response, stream): if self._build_table(command_name, response): try: self.table.render(stream) except __HOLE__: # If they're piping stdout to another process which exits before # we're done writing all of our output, we'll get an error about a # closed pipe which we can safely ignore. pass
IOError
dataset/ETHPy150Open aws/aws-cli/awscli/formatter.py/TableFormatter._format_response
2,341
def __send_request_json(self, request): req = urllib2.Request(url=request) req.add_header('Accept', 'application/json') try: response = urllib2.urlopen(req) assert response.code == 200 data = response.read() return json.loads(data.decode('utf-8')) except __HOLE__: return False
ValueError
dataset/ETHPy150Open marcuz/libpynexmo/nexmomessage/nexmo.py/Nexmo.__send_request_json
2,342
def start(self, config_file): cmdline = [self.path, 'run', config_file] stdout = sys.stdout if not isinstance(sys.stdout, StringIO) else None stderr = sys.stderr if not isinstance(sys.stderr, StringIO) else None try: self.process = self.executor.execute(cmdline, stdout=stdout, stderr=stderr) except __HOLE__ as exc: self.log.error("Failed to start phantom-benchmark utility: %s", traceback.format_exc()) self.log.error("Failed command: %s", cmdline) raise RuntimeError("Failed to start phantom-benchmark utility: %s" % exc)
OSError
dataset/ETHPy150Open Blazemeter/taurus/bzt/modules/pbench.py/PBenchTool.start
2,343
def check_if_installed(self): self.log.debug("Trying phantom: %s", self.tool_path) try: pbench = shell_exec([self.tool_path], stderr=subprocess.STDOUT) pbench_out, pbench_err = pbench.communicate() self.log.debug("PBench check: %s", pbench_out) if pbench_err: self.log.warning("PBench check stderr: %s", pbench_err) return True except (CalledProcessError, __HOLE__): self.log.debug("Check failed: %s", traceback.format_exc()) self.log.error("Phantom check failed. Consider installing it") return False
OSError
dataset/ETHPy150Open Blazemeter/taurus/bzt/modules/pbench.py/PBench.check_if_installed
2,344
def lookup_casstype_simple(casstype): """ Given a Cassandra type name (either fully distinguished or not), hand back the CassandraType class responsible for it. If a name is not recognized, a custom _UnrecognizedType subclass will be created for it. This function does not handle complex types (so no type parameters-- nothing with parentheses). Use lookup_casstype() instead if you might need that. """ shortname = trim_if_startswith(casstype, apache_cassandra_type_prefix) try: typeclass = _casstypes[shortname] except __HOLE__: typeclass = mkUnrecognizedType(casstype) return typeclass
KeyError
dataset/ETHPy150Open datastax/python-driver/cassandra/cqltypes.py/lookup_casstype_simple
2,345
def lookup_casstype(casstype): """ Given a Cassandra type as a string (possibly including parameters), hand back the CassandraType class responsible for it. If a name is not recognized, a custom _UnrecognizedType subclass will be created for it. Example: >>> lookup_casstype('org.apache.cassandra.db.marshal.MapType(org.apache.cassandra.db.marshal.UTF8Type,org.apache.cassandra.db.marshal.Int32Type)') <class 'cassandra.types.MapType(UTF8Type, Int32Type)'> """ if isinstance(casstype, (CassandraType, CassandraTypeType)): return casstype try: return parse_casstype_args(casstype) except (ValueError, __HOLE__, IndexError) as e: raise ValueError("Don't know how to parse type string %r: %s" % (casstype, e))
AssertionError
dataset/ETHPy150Open datastax/python-driver/cassandra/cqltypes.py/lookup_casstype
2,346
@staticmethod def serialize(dec, protocol_version): try: sign, digits, exponent = dec.as_tuple() except __HOLE__: try: sign, digits, exponent = Decimal(dec).as_tuple() except Exception: raise TypeError("Invalid type for Decimal value: %r", dec) unscaled = int(''.join([str(digit) for digit in digits])) if sign: unscaled *= -1 scale = int32_pack(-exponent) unscaled = varint_pack(unscaled) return scale + unscaled
AttributeError
dataset/ETHPy150Open datastax/python-driver/cassandra/cqltypes.py/DecimalType.serialize
2,347
@staticmethod def serialize(uuid, protocol_version): try: return uuid.bytes except __HOLE__: raise TypeError("Got a non-UUID object for a UUID value")
AttributeError
dataset/ETHPy150Open datastax/python-driver/cassandra/cqltypes.py/UUIDType.serialize
2,348
@staticmethod def serialize(var, protocol_version): try: return var.encode('ascii') except __HOLE__: return var
UnicodeDecodeError
dataset/ETHPy150Open datastax/python-driver/cassandra/cqltypes.py/AsciiType.serialize
2,349
@staticmethod def interpret_datestring(val): if val[-5] in ('+', '-'): offset = (int(val[-4:-2]) * 3600 + int(val[-2:]) * 60) * int(val[-5] + '1') val = val[:-5] else: offset = -time.timezone for tformat in cql_timestamp_formats: try: tval = time.strptime(val, tformat) except __HOLE__: continue # scale seconds to millis for the raw value return (calendar.timegm(tval) + offset) * 1e3 else: raise ValueError("can't interpret %r as a date" % (val,))
ValueError
dataset/ETHPy150Open datastax/python-driver/cassandra/cqltypes.py/DateType.interpret_datestring
2,350
@staticmethod def serialize(v, protocol_version): try: # v is datetime timestamp_seconds = calendar.timegm(v.utctimetuple()) timestamp = timestamp_seconds * 1e3 + getattr(v, 'microsecond', 0) / 1e3 except __HOLE__: try: timestamp = calendar.timegm(v.timetuple()) * 1e3 except AttributeError: # Ints and floats are valid timestamps too if type(v) not in _number_types: raise TypeError('DateType arguments must be a datetime, date, or timestamp') timestamp = v return int64_pack(long(timestamp))
AttributeError
dataset/ETHPy150Open datastax/python-driver/cassandra/cqltypes.py/DateType.serialize
2,351
@staticmethod def serialize(timeuuid, protocol_version): try: return timeuuid.bytes except __HOLE__: raise TypeError("Got a non-UUID object for a UUID value")
AttributeError
dataset/ETHPy150Open datastax/python-driver/cassandra/cqltypes.py/TimeUUIDType.serialize
2,352
@staticmethod def serialize(val, protocol_version): try: days = val.days_from_epoch except __HOLE__: if isinstance(val, six.integer_types): # the DB wants offset int values, but util.Date init takes days from epoch # here we assume int values are offset, as they would appear in CQL # short circuit to avoid subtracting just to add offset return uint32_pack(val) days = util.Date(val).days_from_epoch return uint32_pack(days + SimpleDateType.EPOCH_OFFSET_DAYS)
AttributeError
dataset/ETHPy150Open datastax/python-driver/cassandra/cqltypes.py/SimpleDateType.serialize
2,353
@staticmethod def serialize(val, protocol_version): try: nano = val.nanosecond_time except __HOLE__: nano = util.Time(val).nanosecond_time return int64_pack(nano)
AttributeError
dataset/ETHPy150Open datastax/python-driver/cassandra/cqltypes.py/TimeType.serialize
2,354
@staticmethod def serialize(ustr, protocol_version): try: return ustr.encode('utf-8') except __HOLE__: # already utf-8 return ustr
UnicodeDecodeError
dataset/ETHPy150Open datastax/python-driver/cassandra/cqltypes.py/UTF8Type.serialize
2,355
@classmethod def serialize_safe(cls, themap, protocol_version): key_type, value_type = cls.subtypes pack = int32_pack if protocol_version >= 3 else uint16_pack buf = io.BytesIO() buf.write(pack(len(themap))) try: items = six.iteritems(themap) except __HOLE__: raise TypeError("Got a non-map object for a map value") inner_proto = max(3, protocol_version) for key, val in items: keybytes = key_type.to_binary(key, inner_proto) valbytes = value_type.to_binary(val, inner_proto) buf.write(pack(len(keybytes))) buf.write(keybytes) buf.write(pack(len(valbytes))) buf.write(valbytes) return buf.getvalue()
AttributeError
dataset/ETHPy150Open datastax/python-driver/cassandra/cqltypes.py/MapType.serialize_safe
2,356
@classmethod def evict_udt_class(cls, keyspace, udt_name): if six.PY2 and isinstance(udt_name, unicode): udt_name = udt_name.encode('utf-8') try: del cls._cache[(keyspace, udt_name)] except __HOLE__: pass
KeyError
dataset/ETHPy150Open datastax/python-driver/cassandra/cqltypes.py/UserType.evict_udt_class
2,357
@classmethod def serialize_safe(cls, val, protocol_version): proto_version = max(3, protocol_version) buf = io.BytesIO() for i, (fieldname, subtype) in enumerate(zip(cls.fieldnames, cls.subtypes)): # first treat as a tuple, else by custom type try: item = val[i] except __HOLE__: item = getattr(val, fieldname) if item is not None: packed_item = subtype.to_binary(item, proto_version) buf.write(int32_pack(len(packed_item))) buf.write(packed_item) else: buf.write(int32_pack(-1)) return buf.getvalue()
TypeError
dataset/ETHPy150Open datastax/python-driver/cassandra/cqltypes.py/UserType.serialize_safe
2,358
@classmethod def _make_udt_tuple_type(cls, name, field_names): # fallback to positional named, then unnamed tuples # for CQL identifiers that aren't valid in Python, try: t = namedtuple(name, field_names) except ValueError: try: t = namedtuple(name, util._positional_rename_invalid_identifiers(field_names)) log.warn("could not create a namedtuple for '%s' because one or more field names are not valid Python identifiers (%s); " \ "returning positionally-named fields" % (name, field_names)) except __HOLE__: t = None log.warn("could not create a namedtuple for '%s' because the name is not a valid Python identifier; " \ "will return tuples in its place" % (name,)) return t
ValueError
dataset/ETHPy150Open datastax/python-driver/cassandra/cqltypes.py/UserType._make_udt_tuple_type
2,359
def root_bdm(self): """It only makes sense to call this method when the BlockDeviceMappingList contains BlockDeviceMappings from exactly one instance rather than BlockDeviceMappings from multiple instances. For example, you should not call this method from a BlockDeviceMappingList created by get_by_instance_uuids(), but you may call this method from a BlockDeviceMappingList created by get_by_instance_uuid(). """ if len(self.instance_uuids) > 1: raise exception.UndefinedRootBDM() try: return next(bdm_obj for bdm_obj in self if bdm_obj.is_root) except __HOLE__: return
StopIteration
dataset/ETHPy150Open BU-NU-CLOUD-SP16/Trusted-Platform-Module-nova/nova/objects/block_device.py/BlockDeviceMappingList.root_bdm
2,360
def bootstrap_twilio_gateway(apps, twilio_rates_filename): currency_class = apps.get_model('accounting', 'Currency') if apps else Currency sms_gateway_fee_class = apps.get_model('smsbillables', 'SmsGatewayFee') if apps else SmsGatewayFee sms_gateway_fee_criteria_class = apps.get_model('smsbillables', 'SmsGatewayFeeCriteria') if apps else SmsGatewayFeeCriteria # iso -> provider -> rate def get_twilio_data(): twilio_file = open(twilio_rates_filename) twilio_csv = csv.reader(twilio_file.read().splitlines()) twilio_data = {} skip = 0 for row in twilio_csv: if skip < 4: skip += 1 continue else: try: iso = row[0].lower() provider = row[2].split('-')[1].lower().replace(' ', '') rate = float(row[3]) if not(iso in twilio_data): twilio_data[iso] = {} twilio_data[iso][provider] = rate except IndexError: log_smsbillables_info("Twilio index error %s:" % row) twilio_file.close() return twilio_data # iso -> provider -> (country code, number of subscribers) def get_mach_data(): mach_workbook = xlrd.open_workbook('corehq/apps/smsbillables/management/' 'commands/pricing_data/Syniverse_coverage_list_DIAMONDplus.xls') mach_table = mach_workbook.sheet_by_index(0) mach_data = {} try: row = 7 while True: country_code = int(mach_table.cell_value(row, 0)) iso = mach_table.cell_value(row, 1) network = mach_table.cell_value(row, 5).lower().replace(' ', '') subscribers = 0 try: subscribers = int(mach_table.cell_value(row, 10).replace('.', '')) except ValueError: log_smsbillables_info("Incomplete subscriber data for country code %d" % country_code) if not(iso in mach_data): mach_data[iso] = {} mach_data[iso][network] = (country_code, subscribers) row += 1 except __HOLE__: pass return mach_data twilio_data = get_twilio_data() mach_data = get_mach_data() for iso in twilio_data: if iso in mach_data: weighted_price = 0 total_subscriptions = 0 country_code = None calculate_other = False for twilio_provider in twilio_data[iso]: if twilio_provider == 'other': calculate_other = True else: for mach_provider in mach_data[iso]: try: if twilio_provider in mach_provider: country_code, subscriptions = mach_data[iso][mach_provider] weighted_price += twilio_data[iso][twilio_provider] * subscriptions total_subscriptions += subscriptions mach_data[iso][mach_provider] = country_code, 0 break except UnicodeDecodeError: pass if calculate_other: other_rate_twilio = twilio_data[iso]['other'] for _, subscriptions in mach_data[iso].values(): weighted_price += other_rate_twilio * subscriptions total_subscriptions += subscriptions if country_code is not None: weighted_price = weighted_price / total_subscriptions SmsGatewayFee.create_new( SQLTwilioBackend.get_api_id(), OUTGOING, weighted_price, country_code=country_code, currency=currency_class.objects.get(code="USD"), fee_class=sms_gateway_fee_class, criteria_class=sms_gateway_fee_criteria_class, ) else: log_smsbillables_info("%s not in mach_data" % iso) # https://www.twilio.com/help/faq/sms/will-i-be-charged-if-twilio-encounters-an-error-when-sending-an-sms SmsGatewayFee.create_new( SQLTwilioBackend.get_api_id(), OUTGOING, 0.00, country_code=None, currency=currency_class.objects.get(code="USD"), fee_class=sms_gateway_fee_class, criteria_class=sms_gateway_fee_criteria_class, ) log_smsbillables_info("Updated Twilio gateway fees.")
IndexError
dataset/ETHPy150Open dimagi/commcare-hq/corehq/apps/smsbillables/management/commands/bootstrap_twilio_gateway.py/bootstrap_twilio_gateway
2,361
def load_backend(self, path): module_name, attr_name = path.rsplit('.', 1) try: mod = import_module(module_name) except (ImportError, ValueError), e: raise ImproperlyConfigured('Error importing backend module %s: "%s"' % (module_name, e)) try: return getattr(mod, attr_name)() except __HOLE__: raise ImproperlyConfigured('Module "%s" does not define a "%s" backend' % (module_name, attr_name))
AttributeError
dataset/ETHPy150Open adieu/django-dbindexer/dbindexer/resolver.py/Resolver.load_backend
2,362
def test_sync_tool(tmpdir): # test non-existing destination try: _SyncTool(client, 'should-not-exist', None, None, None) except __HOLE__ as ve: assert str(ve) == 'destination must exist and be a directory' # test existing destination, no aoi.geojson td = tmpdir.mkdir('sync-dest') try: _SyncTool(client, td.strpath, None, None, None) except ValueError as ve: assert str(ve) == 'no aoi provided and no aoi.geojson file' # test existing destination, invalid aoi.geojson aoi_file = td.join('aoi.geojson') aoi_file.write('not geojson') try: _SyncTool(client, td.strpath, None, None, None) except ValueError as ve: assert str(ve) == '%s does not contain valid JSON' % aoi_file td.remove(True)
ValueError
dataset/ETHPy150Open planetlabs/planet-client-python/tests/test_sync.py/test_sync_tool
2,363
def get_process_memory(): if get_process_memory.psutil_process is None: try: import psutil except ImportError: get_process_memory.psutil_process = False else: pid = os.getpid() get_process_memory.psutil_process = psutil.Process(pid) if get_process_memory.psutil_process != False: meminfo = get_process_memory.psutil_process.get_memory_info() return meminfo.rss if get_process_memory.support_proc == False: return try: fp = open("/proc/self/status") except __HOLE__: get_process_memory.support_proc = False return None get_process_memory.support_proc = True with fp: for line in fp: if not(line.startswith("VmRSS:") and line.endswith(" kB\n")): continue value = line[6:-4].strip() value = int(value) * 1024 return value # VmRss not found in /proc/self/status get_process_memory.support_proc = False return None
IOError
dataset/ETHPy150Open wyplay/pytracemalloc/tracemalloc.py/get_process_memory
2,364
def _lazy_import_pickle(): # lazy loader for the pickle module global pickle if pickle is None: try: import cPickle as pickle except __HOLE__: import pickle return pickle
ImportError
dataset/ETHPy150Open wyplay/pytracemalloc/tracemalloc.py/_lazy_import_pickle
2,365
@classmethod def load(cls, filename): pickle = _lazy_import_pickle() try: with open(filename, "rb") as fp: data = pickle.load(fp) except Exception: err = sys.exc_info()[1] print("ERROR: Failed to load %s: [%s] %s" % (filename, type(err).__name__, err)) sys.exit(1) try: if data['format_version'] != cls.FORMAT_VERSION: raise TypeError("unknown format version") stats = data['stats'] timestamp = data['timestamp'] pid = data['pid'] process_memory = data.get('process_memory') user_data = data.get('user_data') except __HOLE__: raise TypeError("invalid file format") return cls(stats, timestamp, pid, process_memory, user_data)
KeyError
dataset/ETHPy150Open wyplay/pytracemalloc/tracemalloc.py/Snapshot.load
2,366
def __init__(self, file=None): try: # Python 3 import reprlib except __HOLE__: # Python 2 import repr as reprlib if file is not None: self.stream = file else: self.stream = sys.stdout self.cumulative = False self._getter = _GetUncollectable() self._objects = [] self.color = self.stream.isatty() reprobj = reprlib.Repr() reprobj.maxstring = 100 reprobj.maxother = 100 reprobj.maxlevel = 1 self.format_object = reprobj.repr
ImportError
dataset/ETHPy150Open wyplay/pytracemalloc/tracemalloc.py/DisplayGarbage.__init__
2,367
def get_integration(integration, *args, **kwargs): """Return a integration instance specified by `integration` name""" klass = integration_cache.get(integration, None) if not klass: integration_filename = "%s_integration" % integration integration_module = None for app in settings.INSTALLED_APPS: try: integration_module = import_module(".integrations.%s" % integration_filename, package=app) break except __HOLE__: pass if not integration_module: raise IntegrationModuleNotFound("Missing integration: %s" % (integration)) integration_class_name = "".join(integration_filename.title().split("_")) try: klass = getattr(integration_module, integration_class_name) except AttributeError: raise IntegrationNotConfigured("Missing %s class in the integration module." % integration_class_name) integration_cache[integration] = klass return klass(*args, **kwargs)
ImportError
dataset/ETHPy150Open agiliq/merchant/billing/integration.py/get_integration
2,368
def repl_linker_command(m): # Replaces any linker command file directives (e.g. "@foo.lnk") with # the actual contents of the file. try: f=open(m.group(2), "r") return m.group(1) + f.read() except __HOLE__: # the linker should return an error if it can't # find the linker command file so we will remain quiet. # However, we will replace the @ with a # so we will not continue # to find it with recursive substitution return m.group(1) + '#' + m.group(2)
IOError
dataset/ETHPy150Open kayhayen/Nuitka/nuitka/build/inline_copy/lib/scons-2.3.2/SCons/Tool/linkloc.py/repl_linker_command
2,369
def name_from_value(self, value, default=core.UNSET, patterns=["*"]): """Find the longest name in the set of constants (optionally qualified by pattern) which matches value. """ try: return max( (name for name in self.names(patterns) if self[name] == value), key=len ) except __HOLE__: if default is core.UNSET: raise KeyError("No constant matching name %s and value %s" % (patterns, value)) else: return default
ValueError
dataset/ETHPy150Open tjguk/winsys/winsys/registry.py/RegistryConstants.name_from_value
2,370
def _parse_moniker(moniker, accept_value=True): r"""Take a registry moniker and return the computer, root key, subkey path and value label. NB: neither the computer nor the registry key need exist; they need simply to be of the right format. The slashes must be backslashes (since registry key names can contain forward slashes). accept_value is mostly used internally to indicate that a key search is going on, where a colon is to be considered part of the key name; if it is True, then the colon is to be considered a value separator. The moniker must be of the form: [\\computer\]HKEY[\subkey path][:value] Valid monikers are: \\SVR01\HKEY_LOCAL_MACHINE\Software\Python:Version -> "SVR01", 0x80000002, "Software\Python", "Version" HKEY_CURRENT_USER\Software -> "", 0x80000001, "Software", None HKEY_CURRENT_USER\Software\Python: -> "", 0x80000001, "Software\Python", "" """ if accept_value: moniker_parser = re.compile(r"(?:\\\\([^\\]+)\\)?([^:]+)(:?)(.*)", re.UNICODE) else: moniker_parser = re.compile(r"(?:\\\\([^\\]+)\\)?(.*)", re.UNICODE) matcher = moniker_parser.match(moniker) if not matcher: raise x_moniker_ill_formed(errctx="_parse_moniker", errmsg="Ill-formed moniker: %s" % moniker) if accept_value: computer, keypath, colon, value = matcher.groups() else: computer, keypath = matcher.groups() colon = value = None keys = keypath.split(sep) root = path = None key0 = keys.pop(0) try: root = REGISTRY_HIVE[key0.upper()] except KeyError: root = None if root is None and keys: key1 = keys.pop(0) try: root = REGISTRY_HIVE[key1.upper()] except __HOLE__: root = None if root is None: raise x_moniker_no_root(errctx="_parse_moniker", errmsg="A registry hive must be the first or second segment in moniker") path = sep.join(keys) # # If a value is indicated (by a colon) but none is supplied, # use "" to indicate that the default value is requested. # Otherwise use None to indicate that no value is requested # if value == "" and not colon: value = None return computer, root, path, value
KeyError
dataset/ETHPy150Open tjguk/winsys/winsys/registry.py/_parse_moniker
2,371
@classmethod def _access(cls, access): """Conversion function which returns an integer representing a security access bit pattern. Uses the class's ACCESS map to translate letters to integers. """ if access is None: return None try: return int(access) except __HOLE__: return reduce(operator.or_, (cls.ACCESS[a] for a in access.upper()), 0)
ValueError
dataset/ETHPy150Open tjguk/winsys/winsys/registry.py/Registry._access
2,372
def set_value(self, label, value, type=None): """Attempt to set one of the key's named values. If type is None, then if the value already exists under the key its current type is assumed; otherwise a guess is made at the datatype as follows: * If the value is an int, use DWORD * If the value is a list, use MULTI_SZ * If the value has an even number of percent signs, use EXPAND_SZ * Otherwise, use REG_SZ This is a very naive approach, and will falter if, for example, a string is passed which can be converted into a number, or a string with 2 percent signs which don't refer to an env var. """ def _guess_type(value): try: int(value) except (ValueError, __HOLE__): pass else: return REGISTRY_VALUE_TYPE.REG_DWORD if isinstance(value, list): return REGISTRY_VALUE_TYPE.REG_MULTI_SZ value = unicode(value) if "%" in value and value.count("%") % 2 == 0: return REGISTRY_VALUE_TYPE.REG_EXPAND_SZ return REGISTRY_VALUE_TYPE.REG_SZ type = REGISTRY_VALUE_TYPE.constant(type) if type is None: try: _, type = wrapped(win32api.RegQueryValueEx, self.pyobject(), label) except exc.x_not_found: type = _guess_type(value) wrapped(win32api.RegSetValueEx, self.pyobject(), label, 0, type, value)
TypeError
dataset/ETHPy150Open tjguk/winsys/winsys/registry.py/Registry.set_value
2,373
def _readmodule(module, path, inpackage=None): '''Do the hard work for readmodule[_ex]. If INPACKAGE is given, it must be the dotted name of the package in which we are searching for a submodule, and then PATH must be the package search path; otherwise, we are searching for a top-level module, and PATH is combined with sys.path. ''' # Compute the full module name (prepending inpackage if set) if inpackage is not None: fullmodule = "%s.%s" % (inpackage, module) else: fullmodule = module # Check in the cache if fullmodule in _modules: return _modules[fullmodule] # Initialize the dict for this module's contents dict = {} # Check if it is a built-in module; we don't do much for these if module in sys.builtin_module_names and inpackage is None: _modules[module] = dict return dict # Check for a dotted module name i = module.rfind('.') if i >= 0: package = module[:i] submodule = module[i+1:] parent = _readmodule(package, path, inpackage) if inpackage is not None: package = "%s.%s" % (inpackage, package) return _readmodule(submodule, parent['__path__'], package) # Search the path for the module f = None if inpackage is not None: f, fname, (_s, _m, ty) = imp.find_module(module, path) else: f, fname, (_s, _m, ty) = imp.find_module(module, path + sys.path) if ty == imp.PKG_DIRECTORY: dict['__path__'] = [fname] path = [fname] + path f, fname, (_s, _m, ty) = imp.find_module('__init__', [fname]) _modules[fullmodule] = dict if ty != imp.PY_SOURCE: # not Python source, can't do anything with this module f.close() return dict stack = [] # stack of (class, indent) pairs g = tokenize.generate_tokens(f.readline) try: for tokentype, token, start, _end, _line in g: if tokentype == DEDENT: lineno, thisindent = start # close nested classes and defs while stack and stack[-1][1] >= thisindent: del stack[-1] elif token == 'def': lineno, thisindent = start # close previous nested classes and defs while stack and stack[-1][1] >= thisindent: del stack[-1] tokentype, meth_name, start = g.next()[0:3] if tokentype != NAME: continue # Syntax error if stack: cur_class = stack[-1][0] if isinstance(cur_class, Class): # it's a method cur_class._addmethod(meth_name, lineno) # else it's a nested def else: # it's a function dict[meth_name] = Function(fullmodule, meth_name, fname, lineno) stack.append((None, thisindent)) # Marker for nested fns elif token == 'class': lineno, thisindent = start # close previous nested classes and defs while stack and stack[-1][1] >= thisindent: del stack[-1] tokentype, class_name, start = g.next()[0:3] if tokentype != NAME: continue # Syntax error # parse what follows the class name tokentype, token, start = g.next()[0:3] inherit = None if token == '(': names = [] # List of superclasses # there's a list of superclasses level = 1 super = [] # Tokens making up current superclass while True: tokentype, token, start = g.next()[0:3] if token in (')', ',') and level == 1: n = "".join(super) if n in dict: # we know this super class n = dict[n] else: c = n.split('.') if len(c) > 1: # super class is of the form # module.class: look in module for # class m = c[-2] c = c[-1] if m in _modules: d = _modules[m] if c in d: n = d[c] names.append(n) super = [] if token == '(': level += 1 elif token == ')': level -= 1 if level == 0: break elif token == ',' and level == 1: pass # only use NAME and OP (== dot) tokens for type name elif tokentype in (NAME, OP) and level == 1: super.append(token) # expressions in the base list are not supported inherit = names cur_class = Class(fullmodule, class_name, inherit, fname, lineno) if not stack: dict[class_name] = cur_class stack.append((cur_class, thisindent)) elif token == 'import' and start[1] == 0: modules = _getnamelist(g) for mod, _mod2 in modules: try: # Recursively read the imported module if inpackage is None: _readmodule(mod, path) else: try: _readmodule(mod, path, inpackage) except ImportError: _readmodule(mod, []) except: # If we can't find or parse the imported module, # too bad -- don't die here. pass elif token == 'from' and start[1] == 0: mod, token = _getname(g) if not mod or token != "import": continue names = _getnamelist(g) try: # Recursively read the imported module d = _readmodule(mod, path, inpackage) except: # If we can't find or parse the imported module, # too bad -- don't die here. continue # add any classes that were defined in the imported module # to our name space if they were mentioned in the list for n, n2 in names: if n in d: dict[n2 or n] = d[n] elif n == '*': # don't add names that start with _ for n in d: if n[0] != '_': dict[n] = d[n] except __HOLE__: pass f.close() return dict
StopIteration
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/pyclbr.py/_readmodule
2,374
def create(self, image): if self.exists(image) and not self.regenerate_images: return try: ImageModel(image) except __HOLE__: log.warning("Unable to store models on this machine") image.root = self.root image.generate() with suppress(IOError): os.remove(self.latest) shutil.copy(image.path, self.latest)
ImportError
dataset/ETHPy150Open jacebrowning/memegen/memegen/stores/image.py/ImageStore.create
2,375
def transform(self, X, y=None): """Transform feature->value dicts to array or sparse matrix. Named features not encountered during fit or fit_transform will be silently ignored. Parameters ---------- X : Mapping or iterable over Mappings, length = n_samples Dict(s) or Mapping(s) from feature names (arbitrary Python objects) to feature values (strings or convertible to dtype). y : (ignored) Returns ------- Xa : {array, sparse matrix} Feature vectors; always 2-d. """ if self.sparse: return self._transform(X, fitting=False) else: dtype = self.dtype vocab = self.vocabulary_ X = _tosequence(X) Xa = np.zeros((len(X), len(vocab)), dtype=dtype) for i, x in enumerate(X): for f, v in six.iteritems(x): if isinstance(v, six.string_types): f = "%s%s%s" % (f, self.separator, v) v = 1 try: Xa[i, vocab[f]] = dtype(v) except __HOLE__: pass return Xa
KeyError
dataset/ETHPy150Open scikit-learn/scikit-learn/sklearn/feature_extraction/dict_vectorizer.py/DictVectorizer.transform
2,376
def handle(self, *url_lists, **options): from django.conf import settings from debug_logging.models import TestRun from debug_logging.utils import (get_project_name, get_hostname, get_revision) verbosity = int(options.get('verbosity', 1)) self.quiet = verbosity < 1 self.verbose = verbosity > 1 # Dtermine if the DBHandler is used if True in [isinstance(handler, DBHandler) for handler in LOGGING_CONFIG["LOGGING_HANDLERS"]]: self.has_dbhandler = True else: self.has_dbhandler = False # Check for a username without a password, or vice versa if options['username'] and not options['password']: raise CommandError('If a username is provided, a password must ' 'also be provided.') if options['password'] and not options['username']: raise CommandError('If a password is provided, a username must ' 'also be provided.') # Create a TestRun object to track this run filters = {} panels = settings.DEBUG_TOOLBAR_PANELS if 'debug_logging.panels.identity.IdentityLoggingPanel' in panels: filters['project_name'] = get_project_name() filters['hostname'] = get_hostname() if 'debug_logging.panels.revision.RevisionLoggingPanel' in panels: filters['revision'] = get_revision() if self.has_dbhandler: # Check to see if there is already a TestRun object open existing_runs = TestRun.objects.filter(end__isnull=True, **filters) if existing_runs: if options['manual_start']: # If the --manual-start option was specified, error out # because there is already an open TestRun raise CommandError('There is already an open TestRun.') # Otherwise, close it so that we can open a new one for existing_run in existing_runs: existing_run.end = datetime.now() existing_run.save() if options['manual_end']: # If the --manual-end option was specified, we can now exit self.status_update('The TestRun was successfully closed.') return if options['manual_end']: # The --manual-end option was specified, but there was no # existing run to close. raise CommandError('There is no open TestRun to end.') filters['start'] = datetime.now() test_run = TestRun(**filters) if options['name']: test_run.name = options['name'] if options['description']: test_run.description = options['description'] test_run.save() if options['manual_start']: # The TestRun was successfully created self.status_update('A new TestRun was successfully opened.') return urls = [] for url_list in url_lists: with open(url_list) as f: urls.extend([l.strip() for l in f.readlines() if not l.startswith('#')]) if options['sitemap']: sitemaps = import_from_string(options['sitemap']) if isinstance(sitemaps, dict): for sitemap in sitemaps.values(): urls.extend(map(sitemap.location, sitemap.items())) elif isinstance(sitemaps, Sitemap): urls.extend(map(sitemaps.location, sitemaps.items())) else: raise CommandError( 'Sitemaps should be a Sitemap object or a dict, got %s ' 'instead' % type(sitemaps) ) self.status_update('Beginning debug logging run...') client = Client() if options['username'] and options['password']: client.login(username=options['username'], password=options['password']) for url in urls: try: response = client.get(url, DJANGO_DEBUG_LOGGING=True) except KeyboardInterrupt as e: if self.has_dbhandler: # Close out the log entry test_run.end = datetime.now() test_run.save() raise CommandError('Debug logging run cancelled.') except Exception as e: if self.verbose: self.status_update('\nSkipped %s because of error: %s' % (url, e)) continue if response and response.status_code == 200: self.status_ticker() else: if self.verbose: try: self.status_update('\nURL %s responded with code %s' % (url, response.status_code)) except __HOLE__ as e: self.status_update('\nSkipped %s because of error: %s' % (url, e)) if self.has_dbhandler: # Close out the log entry test_run.end = datetime.now() test_run.save() self.status_update('done!\n')
NameError
dataset/ETHPy150Open lincolnloop/django-debug-logging/debug_logging/management/commands/log_urls.py/Command.handle
2,377
def check_argument_type(name, value): text = NamedTemporaryFile() try: with patch('sys.stderr') as mock_stderr: args = cli.parse_args(['--text', text.name, '--' + name, str(value)]) raise AssertionError('argument "{}" was accepted even though the type did not match'.format(name)) except SystemExit: pass except __HOLE__: pass
ValueError
dataset/ETHPy150Open amueller/word_cloud/test/test_wordcloud_cli.py/check_argument_type
2,378
def test_check_duplicate_color_error(): color_mask_file = NamedTemporaryFile() text_file = NamedTemporaryFile() try: cli.parse_args(['--color', 'red', '--colormask', color_mask_file.name, '--text', text_file.name]) raise AssertionError('parse_args(...) didn\'t raise') except __HOLE__ as e: assert_true('specify either' in str(e), msg='expecting the correct error message, instead got: ' + str(e))
ValueError
dataset/ETHPy150Open amueller/word_cloud/test/test_wordcloud_cli.py/test_check_duplicate_color_error
2,379
def flatten_json( form, json, parent_key='', separator='-', skip_unknown_keys=True ): """Flattens given JSON dict to cope with WTForms dict structure. :form form: WTForms Form object :param json: json to be converted into flat WTForms style dict :param parent_key: this argument is used internally be recursive calls :param separator: default separator :param skip_unknown_keys: if True unknown keys will be skipped, if False throws InvalidData exception whenever unknown key is encountered Examples:: >>> flatten_json(MyForm, {'a': {'b': 'c'}}) {'a-b': 'c'} """ if not isinstance(json, collections.Mapping): raise InvalidData( u'This function only accepts dict-like data structures.' ) items = [] for key, value in json.items(): try: unbound_field = getattr(form, key) except __HOLE__: if skip_unknown_keys: continue else: raise InvalidData(u"Unknown field name '%s'." % key) try: field_class = unbound_field.field_class except AttributeError: if skip_unknown_keys: continue else: raise InvalidData(u"Key '%s' is not valid field class." % key) new_key = parent_key + separator + key if parent_key else key if isinstance(value, collections.MutableMapping): if issubclass(field_class, FormField): nested_form_class = unbound_field.bind(Form(), '').form_class items.extend( flatten_json(nested_form_class, value, new_key) .items() ) else: items.append((new_key, value)) elif isinstance(value, list): if issubclass(field_class, FieldList): nested_unbound_field = unbound_field.bind( Form(), '' ).unbound_field items.extend( flatten_json_list( nested_unbound_field, value, new_key, separator ) ) else: items.append((new_key, value)) else: items.append((new_key, value)) return dict(items)
AttributeError
dataset/ETHPy150Open kvesteri/wtforms-json/wtforms_json/__init__.py/flatten_json
2,380
def run(self, args): try: return self.dispatch(args) except getopt.GetoptError as e: print("Error: %s\n" % str(e)) self.display_help() return 2 except CallError as e: sys.stderr.write("%s\n" % str(e)) return 1 except ArgumentError as e: sys.stderr.write("%s\n" % str(e)) return 1 except __HOLE__: return 1 except Exception: sys.stderr.write(traceback.format_exc()) return 1
KeyboardInterrupt
dataset/ETHPy150Open circus-tent/circus/circus/circusctl.py/ControllerApp.run
2,381
def autocomplete(self, autocomplete=False, words=None, cword=None): """ Output completion suggestions for BASH. The output of this function is passed to BASH's `COMREPLY` variable and treated as completion suggestions. `COMREPLY` expects a space separated string as the result. The `COMP_WORDS` and `COMP_CWORD` BASH environment variables are used to get information about the cli input. Please refer to the BASH man-page for more information about this variables. Subcommand options are saved as pairs. A pair consists of the long option string (e.g. '--exclude') and a boolean value indicating if the option requires arguments. When printing to stdout, a equal sign is appended to options which require arguments. Note: If debugging this function, it is recommended to write the debug output in a separate file. Otherwise the debug output will be treated and formatted as potential completion suggestions. """ autocomplete = autocomplete or 'AUTO_COMPLETE' in os.environ # Don't complete if user hasn't sourced bash_completion file. if not autocomplete: return words = words or os.environ['COMP_WORDS'].split()[1:] cword = cword or int(os.environ['COMP_CWORD']) try: curr = words[cword - 1] except __HOLE__: curr = '' subcommands = get_commands() if cword == 1: # if completing the command name print(' '.join(sorted([x for x in subcommands if x.startswith(curr)]))) sys.exit(1)
IndexError
dataset/ETHPy150Open circus-tent/circus/circus/circusctl.py/CircusCtl.autocomplete
2,382
def start(self, globalopts): self.autocomplete() self.controller.globalopts = globalopts args = globalopts['args'] parser = globalopts['parser'] if hasattr(args, 'command'): sys.exit(self.controller.run(globalopts['args'])) if args.help: for command in sorted(self.commands.keys()): doc = textwrap.dedent(self.commands[command].__doc__) help = doc.split('\n')[0] parser.add_argument(command, help=help) parser.print_help() sys.exit(0) # no command, no --help: enter the CLI print(VERSION) self.do_status('') try: self.cmdloop() except __HOLE__: sys.stdout.write('\n') sys.exit(0)
KeyboardInterrupt
dataset/ETHPy150Open circus-tent/circus/circus/circusctl.py/CircusCtl.start
2,383
def _handle_exception(): """Print exceptions raised by subscribers to stderr.""" # Heavily influenced by logging.Handler.handleError. # See note here: # https://docs.python.org/3.4/library/sys.html#sys.__stderr__ if sys.stderr: einfo = sys.exc_info() try: traceback.print_exception(einfo[0], einfo[1], einfo[2], None, sys.stderr) except __HOLE__: pass finally: del einfo # Note - to avoid bugs from forgetting which if these is all lowercase and # which are camelCase, and at the same time avoid having to add a test for # every command, use all lowercase here and test against command_name.lower().
IOError
dataset/ETHPy150Open mongodb/mongo-python-driver/pymongo/monitoring.py/_handle_exception
2,384
def main(argv=None): """script main. parses command line options in sys.argv, unless *argv* is given. """ if argv is None: argv = sys.argv parser = E.OptionParser( version="%prog version: $Id$", usage=globals()["__doc__"]) parser.add_option("-c", "--columns", dest="columns", type="string", help="columns to take for calculating histograms.") parser.add_option("--min-value", dest="min_value", type="float", help="minimum value for histogram.") parser.add_option("--max-value", dest="max_value", type="float", help="maximum value for histogram.") parser.add_option("--scale", dest="scale", type="float", help="scale values.") parser.add_option("-a", "--aggregate-column", dest="aggregate_column", type="int", help="use column to aggregate.") parser.add_option("-i", "--no-title", dest="titles", action="store_false", help="do not use supplied column titles.") parser.add_option("-e", "--header-names", dest="headers", type="string", help="headers.") parser.add_option("-r", "--rows", dest="rows", action="store_true", help="data is in rows.") parser.add_option("--ignore-zeros", dest="ignore_zeros", action="store_true", help="ignore zero values.") parser.add_option("-f", "--format", dest="value_format", type="string", help="number format.") parser.add_option("-x", "--flat-output", dest="flat", action="store_true", help="flat format.") parser.add_option("--skip-header", dest="add_header", action="store_false", help="do not add header to flat format.") parser.add_option("--output-with-header", dest="write_header", action="store_true", help="write header and exit.") parser.add_option("--skip-empty", dest="output_empty", action="store_false", help="do not output empty columns.") parser.add_option("--output-empty", dest="output_empty", action="store_true", help="output empty columns.") parser.set_defaults( columns="all", min_value=None, max_value=None, scale=None, aggregate_column=None, titles=True, headers=None, rows=False, value_format="%6.4f", flat=False, test="%5.2f", add_header=True, write_header=False, ignore_zeros=False, output_empty=False, separator="\t" ) (options, args) = E.Start(parser, quiet=True) if options.columns not in ("all", "all-but-first", "variable"): options.columns = map(lambda x: int(x) - 1, options.columns.split(",")) if options.headers: options.headers = options.headers.split(",") # write header for flat output if options.write_header: options.stdout.write("\t".join(("nval", "min", "max", "mean", "median", "stddev", "sum", "q1", "q3")) + "\n") return # retrieve histogram lines = filter(lambda x: x[0] != "#", sys.stdin.readlines()) outfile = options.stdout if len(lines) > 0: ncols = len(string.split(lines[0][:-1], "\t")) if options.columns == "all": options.columns = range(0, ncols) elif options.columns == "all-but-first": options.columns = range(1, ncols) elif options.columns == "variable": pass if options.rows: # ignore first value: is row title if options.columns != "variable": del options.columns[0] if options.titles: del lines[0] # write header for flat output if options.flat: if options.headers: head = options.headers[0] else: head = "row" options.stdout.write("\t".join( (head, "nval", "min", "max", "mean", "median", "stddev", "sum", "q1", "q3")) + "\n") options.add_header = False for l in lines: data = l[:-1].split(options.separator) if options.columns == "variable": vals = data[1:] else: vals = map(lambda x: data[x], options.columns) # remove unknown values vals = [float(x) for x in vals if x and x.lower() not in ("na", "nan")] if options.ignore_zeros: vals = [x for x in vals if x != 0.0] # now convert to float vals = map(float, vals) PrintValues(outfile, [vals], options, data[0]) else: last_aggregate = None if options.titles: data = lines[0][:-1].split("\t") if not options.headers: options.headers = map(lambda x: data[x], options.columns) del lines[0] if options.aggregate_column is not None: outfile.write( "category\t%s" % "\t".join(options.headers) + "\n") vals = [[] for x in range(len(options.columns))] for l in lines: data = l[:-1].split("\t") for c in range(len(options.columns)): try: val = string.atof(data[options.columns[c]]) except IndexError: E.warn("IndexError in line: %s" % l[:-1]) continue except __HOLE__: continue if options.aggregate_column is not None: if last_aggregate != data[options.aggregate_column]: if last_aggregate: PrintValues( outfile, vals, options, last_aggregate) vals = [[] for x in range(len(options.columns))] last_aggregate = data[options.aggregate_column] if options.scale: val *= options.scale if options.max_value is not None \ and val > options.max_value: val = options.max_value if options.min_value is not None \ and val < options.min_value: val = options.min_value vals[c].append(val) lines = None # remove empty columns nvals = [] titles = [] for c in range(len(options.columns)): if vals[c] or options.output_empty: nvals.append(vals[c]) if options.headers: titles.append(options.headers[c]) vals = nvals PrintValues(outfile, vals, options, last_aggregate, titles) else: if options.titles: titles = ["missing", ] else: titles = [] if options.output_empty: PrintValues(outfile, [[], ], options, None, titles) if options.loglevel >= 1: E.Stop()
ValueError
dataset/ETHPy150Open CGATOxford/cgat/scripts/data2stats.py/main
2,385
def get_boolean(self, section, name, default=None): """Retrieve a configuration setting as boolean. :param section: Tuple with section name and optional subsection namee :param name: Name of the setting, including section and possible subsection. :return: Contents of the setting :raise KeyError: if the value is not set """ try: value = self.get(section, name) except __HOLE__: return default if value.lower() == "true": return True elif value.lower() == "false": return False raise ValueError("not a valid boolean string: %r" % value)
KeyError
dataset/ETHPy150Open codeinn/vcs/vcs/backends/git/config.py/Config.get_boolean
2,386
def get(self, section, name): if isinstance(section, basestring): section = (section, ) if len(section) > 1: try: return self._values[section][name] except __HOLE__: pass return self._values[(section[0],)][name]
KeyError
dataset/ETHPy150Open codeinn/vcs/vcs/backends/git/config.py/ConfigDict.get
2,387
@classmethod def from_file(cls, f): """Read configuration from a file-like object.""" ret = cls() section = None setting = None for lineno, line in enumerate(f.readlines()): line = line.lstrip() if setting is None: if _strip_comments(line).strip() == "": continue if line[0] == "[": line = _strip_comments(line).rstrip() if line[-1] != "]": raise ValueError("expected trailing ]") key = line.strip() pts = key[1:-1].split(" ", 1) pts[0] = pts[0].lower() if len(pts) == 2: if pts[1][0] != "\"" or pts[1][-1] != "\"": raise ValueError( "Invalid subsection " + pts[1]) else: pts[1] = pts[1][1:-1] if not _check_section_name(pts[0]): raise ValueError("invalid section name %s" % pts[0]) section = (pts[0], pts[1]) else: if not _check_section_name(pts[0]): raise ValueError("invalid section name %s" % pts[0]) pts = pts[0].split(".", 1) if len(pts) == 2: section = (pts[0], pts[1]) else: section = (pts[0], ) ret._values[section] = {} else: if section is None: raise ValueError("setting %r without section" % line) try: setting, value = line.split("=", 1) except __HOLE__: setting = line value = "true" setting = setting.strip().lower() if not _check_variable_name(setting): raise ValueError("invalid variable name %s" % setting) if value.endswith("\\\n"): value = value[:-2] continuation = True else: continuation = False value = _parse_string(value) ret._values[section][setting] = value if not continuation: setting = None else: # continuation line if line.endswith("\\\n"): line = line[:-2] continuation = True else: continuation = False value = _parse_string(line) ret._values[section][setting] += value if not continuation: setting = None return ret
ValueError
dataset/ETHPy150Open codeinn/vcs/vcs/backends/git/config.py/ConfigFile.from_file
2,388
def write_to_file(self, f): """Write configuration to a file-like object.""" for section, values in self._values.iteritems(): try: section_name, subsection_name = section except __HOLE__: (section_name, ) = section subsection_name = None if subsection_name is None: f.write("[%s]\n" % section_name) else: f.write("[%s \"%s\"]\n" % (section_name, subsection_name)) for key, value in values.iteritems(): f.write("%s = %s\n" % (key, _escape_value(value)))
ValueError
dataset/ETHPy150Open codeinn/vcs/vcs/backends/git/config.py/ConfigFile.write_to_file
2,389
@classmethod def default_backends(cls): """Retrieve the default configuration. This will look in the repository configuration (if for_path is specified), the users' home directory and the system configuration. """ paths = [] paths.append(os.path.expanduser("~/.gitconfig")) paths.append("/etc/gitconfig") backends = [] for path in paths: try: cf = ConfigFile.from_path(path) except (__HOLE__, OSError), e: if e.errno != errno.ENOENT: raise else: continue backends.append(cf) return backends
IOError
dataset/ETHPy150Open codeinn/vcs/vcs/backends/git/config.py/StackedConfig.default_backends
2,390
def get(self, section, name): for backend in self.backends: try: return backend.get(section, name) except __HOLE__: pass raise KeyError(name)
KeyError
dataset/ETHPy150Open codeinn/vcs/vcs/backends/git/config.py/StackedConfig.get
2,391
def smtpcli(argv): """smtpcli [-h] [-l <logfilename>] [-s <portname>] [host] [port] Provides an interactive session at a protocol level to an SMTP server. """ bindto = None port = 25 sourcefile = None paged = False logname = None try: optlist, longopts, args = getopt.getopt(argv[1:], "b:hp:s:l:g") except getopt.GetoptError: print(smtpcli.__doc__) return for opt, val in optlist: if opt == "-b": bindto = val if opt == "-l": logname = val elif opt == "-s": sourcefile = val elif opt == "-g": paged = True elif opt == "-h": print(smtpcli.__doc__) return elif opt == "-p": try: port = int(val) except __HOLE__: print(smtpcli.__doc__) return theme = UI.DefaultTheme(PROMPT) parser = CLI.get_cli(EmailClientCLI, paged=paged, theme=theme) if len(args) > 0: if len(args) > 1: port = int(args[1]) else: port = 25 host = args[0] else: host = "" if logname: parser.commands.logfile(["logfile", logname]) if host: parser.commands.connect(["connect"]+IF(bindto, ["-s", bindto], [])+[host, port]) else: parser.commands._print("Be sure to run 'connect' before anything else.\n") if sourcefile: try: parser.parse(sourcefile) except CLI.CommandQuit: pass else: parser.interact()
ValueError
dataset/ETHPy150Open kdart/pycopia/net/pycopia/smtpCLI.py/smtpcli
2,392
def from_archive(archive_path): archive = zipfile.ZipFile(archive_path) try: xml_data = archive.read("package.xml") except __HOLE__: raise qisys.error.Error("Could not find package.xml in %s" % archive_path) element = etree.fromstring(xml_data) return from_xml(element)
KeyError
dataset/ETHPy150Open aldebaran/qibuild/python/qitoolchain/qipackage.py/from_archive
2,393
def test_local_fail(self): try: import ckan except __HOLE__: raise unittest.SkipTest('ckan not importable') self.assertRaises( ckanapi.CKANAPIError, ckanapi.LocalCKAN('fake').call_action, 'fake', {}, {}, 'apikey not allowed')
ImportError
dataset/ETHPy150Open ckan/ckanapi/ckanapi/tests/test_call.py/TestCallAction.test_local_fail
2,394
def _non_atomic_requests(view, using): try: view._non_atomic_requests.add(using) except __HOLE__: view._non_atomic_requests = set([using]) return view
AttributeError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/Django-1.6.10/django/db/transaction.py/_non_atomic_requests
2,395
def _osUrandom(self, nbytes): """ Wrapper around C{os.urandom} that cleanly manage its absence. """ try: return os.urandom(nbytes) except (AttributeError, __HOLE__), e: raise SourceNotAvailable(e)
NotImplementedError
dataset/ETHPy150Open kuri65536/python-for-android/python-modules/twisted/twisted/python/randbytes.py/RandomFactory._osUrandom
2,396
def _fileUrandom(self, nbytes): """ Wrapper around random file sources. This method isn't meant to be call out of the class and could be removed arbitrarily. """ for src in self.randomSources: try: f = file(src, 'rb') except (__HOLE__, OSError): pass else: bytes = f.read(nbytes) f.close() return bytes raise SourceNotAvailable("File sources not available: %s" % (self.randomSources,))
IOError
dataset/ETHPy150Open kuri65536/python-for-android/python-modules/twisted/twisted/python/randbytes.py/RandomFactory._fileUrandom
2,397
def handle_interactive_request(self, environ): code = environ['wsgi.input'].read().replace('\r\n', '\n') user_environ = self.get_user_environ(environ) if 'HTTP_CONTENT_LENGTH' in user_environ: del user_environ['HTTP_CONTENT_LENGTH'] user_environ['REQUEST_METHOD'] = 'GET' url = 'http://%s:%s%s?%s' % (user_environ['SERVER_NAME'], user_environ['SERVER_PORT'], urllib.quote(environ['PATH_INFO']), environ['QUERY_STRING']) results_io = cStringIO.StringIO() old_sys_stdout = sys.stdout try: error = logservice.LogsBuffer() request_environment.current_request.Init(error, user_environ) url = urlparse.urlsplit(url) environ.update(runtime.CgiDictFromParsedUrl(url)) sys.stdout = results_io try: try: __import__('appengine_config', self._command_globals) except __HOLE__ as e: if 'appengine_config' not in e.message: raise compiled_code = compile(code, '<string>', 'exec') exec(compiled_code, self._command_globals) except: traceback.print_exc(file=results_io) return {'error': 0, 'response_code': 200, 'headers': [('Content-Type', 'text/plain')], 'body': results_io.getvalue(), 'logs': error.parse_logs()} finally: request_environment.current_request.Clear() sys.stdout = old_sys_stdout
ImportError
dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/tools/devappserver2/python/request_handler.py/RequestHandler.handle_interactive_request
2,398
def execute(task, *args, **kwargs): """ Execute ``task`` (callable or name), honoring host/role decorators, etc. ``task`` may be an actual callable object, or it may be a registered task name, which is used to look up a callable just as if the name had been given on the command line (including :ref:`namespaced tasks <namespaces>`, e.g. ``"deploy.migrate"``. The task will then be executed once per host in its host list, which is (again) assembled in the same manner as CLI-specified tasks: drawing from :option:`-H`, :ref:`env.hosts <hosts>`, the `~fabric.decorators.hosts` or `~fabric.decorators.roles` decorators, and so forth. ``host``, ``hosts``, ``role``, ``roles`` and ``exclude_hosts`` kwargs will be stripped out of the final call, and used to set the task's host list, as if they had been specified on the command line like e.g. ``fab taskname:host=hostname``. Any other arguments or keyword arguments will be passed verbatim into ``task`` (the function itself -- not the ``@task`` decorator wrapping your function!) when it is called, so ``execute(mytask, 'arg1', kwarg1='value')`` will (once per host) invoke ``mytask('arg1', kwarg1='value')``. :returns: a dictionary mapping host strings to the given task's return value for that host's execution run. For example, ``execute(foo, hosts=['a', 'b'])`` might return ``{'a': None, 'b': 'bar'}`` if ``foo`` returned nothing on host `a` but returned ``'bar'`` on host `b`. In situations where a task execution fails for a given host but overall progress does not abort (such as when :ref:`env.skip_bad_hosts <skip-bad-hosts>` is True) the return value for that host will be the error object or message. .. seealso:: :ref:`The execute usage docs <execute>`, for an expanded explanation and some examples. .. versionadded:: 1.3 .. versionchanged:: 1.4 Added the return value mapping; previously this function had no defined return value. """ my_env = {'clean_revert': True} results = {} # Obtain task is_callable = callable(task) if not (is_callable or _is_task(task)): # Assume string, set env.command to it my_env['command'] = task task = crawl(task, state.commands) if task is None: msg = "%r is not callable or a valid task name" % (my_env['command'],) if state.env.get('skip_unknown_tasks', False): warn(msg) return else: abort(msg) # Set env.command if we were given a real function or callable task obj else: dunder_name = getattr(task, '__name__', None) my_env['command'] = getattr(task, 'name', dunder_name) # Normalize to Task instance if we ended up with a regular callable if not _is_task(task): task = WrappedCallableTask(task) # Filter out hosts/roles kwargs new_kwargs, hosts, roles, exclude_hosts = parse_kwargs(kwargs) # Set up host list my_env['all_hosts'], my_env['effective_roles'] = task.get_hosts_and_effective_roles(hosts, roles, exclude_hosts, state.env) parallel = requires_parallel(task) if parallel: # Import multiprocessing if needed, erroring out usefully # if it can't. try: import multiprocessing except __HOLE__: import traceback tb = traceback.format_exc() abort(tb + """ At least one task needs to be run in parallel, but the multiprocessing module cannot be imported (see above traceback.) Please make sure the module is installed or that the above ImportError is fixed.""") else: multiprocessing = None # Get pool size for this task pool_size = task.get_pool_size(my_env['all_hosts'], state.env.pool_size) # Set up job queue in case parallel is needed queue = multiprocessing.Queue() if parallel else None jobs = JobQueue(pool_size, queue) if state.output.debug: jobs._debug = True # Call on host list if my_env['all_hosts']: # Attempt to cycle on hosts, skipping if needed for host in my_env['all_hosts']: try: results[host] = _execute( task, host, my_env, args, new_kwargs, jobs, queue, multiprocessing ) except NetworkError, e: results[host] = e # Backwards compat test re: whether to use an exception or # abort if not state.env.use_exceptions_for['network']: func = warn if state.env.skip_bad_hosts else abort error(e.message, func=func, exception=e.wrapped) else: raise # If requested, clear out connections here and not just at the end. if state.env.eagerly_disconnect: disconnect_all() # If running in parallel, block until job queue is emptied if jobs: err = "One or more hosts failed while executing task '%s'" % ( my_env['command'] ) jobs.close() # Abort if any children did not exit cleanly (fail-fast). # This prevents Fabric from continuing on to any other tasks. # Otherwise, pull in results from the child run. ran_jobs = jobs.run() for name, d in ran_jobs.iteritems(): if d['exit_code'] != 0: if isinstance(d['results'], NetworkError) and \ _is_network_error_ignored(): error(d['results'].message, func=warn, exception=d['results'].wrapped) elif isinstance(d['results'], BaseException): error(err, exception=d['results']) else: error(err) results[name] = d['results'] # Or just run once for local-only else: with settings(**my_env): results['<local-only>'] = task.run(*args, **new_kwargs) # Return what we can from the inner task executions return results
ImportError
dataset/ETHPy150Open fabric/fabric/fabric/tasks.py/execute
2,399
def run_ner(self, doc): entities = [] # Apply the ner algorithm which takes a list of sentences and returns # a list of sentences, each being a list of NER-tokens, each of which is # a pairs (tokenstring, class) ner_sentences = self.ner(doc.get_sentences()) # Flatten the nested list above into just a list of kinds ner_kinds = (k for s in ner_sentences for (_, k) in s) # We build a large iterator z that goes over tuples like the following: # (offset, (token, kind)) # offset just goes incrementally from 0 z = itertools.chain( enumerate(zip(doc.tokens, ner_kinds)), # Add a sentinel last token to simplify last iteration of loop below [(len(doc.tokens), (None, 'INVALID'))] ) # Traverse z, looking for changes in the kind field. If there is a # change of kind, we have a new set of contiguous tokens; if the kind # of those isn't "O" (which means "other"), record the occurrence # # offset keeps the start of the current token run; last_kind keeps the kind. last_kind = 'O' offset = 0 for i, (token, kind) in z: if kind != last_kind: if last_kind != 'O': # Found a new entity in offset:i name = ' '.join(doc.tokens[offset:i]) entities.append( self.build_occurrence(name, last_kind.lower(), name, offset, i) ) # Restart offset counter at each change of entity type offset = i last_kind = kind # Just a sanity check: verify that all NER tokens were consumed try: next(ner_kinds) assert False, "ner_kinds should have been completely consumed" except __HOLE__: # Actually the stop iteration is the expected result here pass return entities
StopIteration
dataset/ETHPy150Open machinalis/iepy/iepy/preprocess/ner/stanford.py/NERRunner.run_ner