Unnamed: 0
int64
0
10k
function
stringlengths
79
138k
label
stringclasses
20 values
info
stringlengths
42
261
3,100
def _OSOndiskAPIVersion(os_dir): """Compute and return the API version of a given OS. This function will try to read the API version of the OS residing in the 'os_dir' directory. @type os_dir: str @param os_dir: the directory in which we should look for the OS @rtype: tuple @return: tuple (status, data) with status denoting the validity and data holding either the valid versions or an error message """ api_file = utils.PathJoin(os_dir, constants.OS_API_FILE) try: st = os.stat(api_file) except EnvironmentError, err: return False, ("Required file '%s' not found under path %s: %s" % (constants.OS_API_FILE, os_dir, utils.ErrnoOrStr(err))) if not stat.S_ISREG(stat.S_IFMT(st.st_mode)): return False, ("File '%s' in %s is not a regular file" % (constants.OS_API_FILE, os_dir)) try: api_versions = utils.ReadFile(api_file).splitlines() except EnvironmentError, err: return False, ("Error while reading the API version file at %s: %s" % (api_file, utils.ErrnoOrStr(err))) try: api_versions = [int(version.strip()) for version in api_versions] except (__HOLE__, ValueError), err: return False, ("API version(s) can't be converted to integer: %s" % str(err)) return True, api_versions
TypeError
dataset/ETHPy150Open ganeti/ganeti/lib/backend.py/_OSOndiskAPIVersion
3,101
def CreateFileStorageDir(file_storage_dir): """Create file storage directory. @type file_storage_dir: str @param file_storage_dir: directory to create @rtype: tuple @return: tuple with first element a boolean indicating wheter dir creation was successful or not """ file_storage_dir = _TransformFileStorageDir(file_storage_dir) if os.path.exists(file_storage_dir): if not os.path.isdir(file_storage_dir): _Fail("Specified storage dir '%s' is not a directory", file_storage_dir) else: try: os.makedirs(file_storage_dir, 0750) except __HOLE__, err: _Fail("Cannot create file storage directory '%s': %s", file_storage_dir, err, exc=True)
OSError
dataset/ETHPy150Open ganeti/ganeti/lib/backend.py/CreateFileStorageDir
3,102
def RemoveFileStorageDir(file_storage_dir): """Remove file storage directory. Remove it only if it's empty. If not log an error and return. @type file_storage_dir: str @param file_storage_dir: the directory we should cleanup @rtype: tuple (success,) @return: tuple of one element, C{success}, denoting whether the operation was successful """ file_storage_dir = _TransformFileStorageDir(file_storage_dir) if os.path.exists(file_storage_dir): if not os.path.isdir(file_storage_dir): _Fail("Specified Storage directory '%s' is not a directory", file_storage_dir) # deletes dir only if empty, otherwise we want to fail the rpc call try: os.rmdir(file_storage_dir) except __HOLE__, err: _Fail("Cannot remove file storage directory '%s': %s", file_storage_dir, err)
OSError
dataset/ETHPy150Open ganeti/ganeti/lib/backend.py/RemoveFileStorageDir
3,103
def RenameFileStorageDir(old_file_storage_dir, new_file_storage_dir): """Rename the file storage directory. @type old_file_storage_dir: str @param old_file_storage_dir: the current path @type new_file_storage_dir: str @param new_file_storage_dir: the name we should rename to @rtype: tuple (success,) @return: tuple of one element, C{success}, denoting whether the operation was successful """ old_file_storage_dir = _TransformFileStorageDir(old_file_storage_dir) new_file_storage_dir = _TransformFileStorageDir(new_file_storage_dir) if not os.path.exists(new_file_storage_dir): if os.path.isdir(old_file_storage_dir): try: os.rename(old_file_storage_dir, new_file_storage_dir) except __HOLE__, err: _Fail("Cannot rename '%s' to '%s': %s", old_file_storage_dir, new_file_storage_dir, err) else: _Fail("Specified storage dir '%s' is not a directory", old_file_storage_dir) else: if os.path.exists(old_file_storage_dir): _Fail("Cannot rename '%s' to '%s': both locations exist", old_file_storage_dir, new_file_storage_dir)
OSError
dataset/ETHPy150Open ganeti/ganeti/lib/backend.py/RenameFileStorageDir
3,104
def PowercycleNode(hypervisor_type, hvparams=None): """Hard-powercycle the node. Because we need to return first, and schedule the powercycle in the background, we won't be able to report failures nicely. """ hyper = hypervisor.GetHypervisor(hypervisor_type) try: pid = os.fork() except __HOLE__: # if we can't fork, we'll pretend that we're in the child process pid = 0 if pid > 0: return "Reboot scheduled in 5 seconds" # ensure the child is running on ram try: utils.Mlockall() except Exception: # pylint: disable=W0703 pass time.sleep(5) hyper.PowercycleNode(hvparams=hvparams)
OSError
dataset/ETHPy150Open ganeti/ganeti/lib/backend.py/PowercycleNode
3,105
def GetFileInfo(file_path): """ Checks if a file exists and returns information related to it. Currently returned information: - file size: int, size in bytes @type file_path: string @param file_path: Name of file to examine. @rtype: tuple of bool, dict @return: Whether the file exists, and a dictionary of information about the file gathered by os.stat. """ try: stat_info = os.stat(file_path) values_dict = { constants.STAT_SIZE: stat_info.st_size, } return True, values_dict except __HOLE__: return False, {}
IOError
dataset/ETHPy150Open ganeti/ganeti/lib/backend.py/GetFileInfo
3,106
def append_val(self, key, val): try: current = self.deserialize( self.storage[key]) except __HOLE__: current = [] # update new list current.append( val) self.storage[key] = self.serialize(current)
KeyError
dataset/ETHPy150Open brandonrobertz/SparseLSH/sparselsh/storage.py/BerkeleyDBStorage.append_val
3,107
def get_list(self, key): try: return self.deserialize(self.storage[key]) except __HOLE__: return []
KeyError
dataset/ETHPy150Open brandonrobertz/SparseLSH/sparselsh/storage.py/BerkeleyDBStorage.get_list
3,108
def append_val(self, key, val): # If a key doesn't exist, leveldb will throw KeyError try: current = self.deserialize(self.storage.Get(key)) except __HOLE__: current = [] # update new list current.append( val) self.storage.Put(key, self.serialize(current))
KeyError
dataset/ETHPy150Open brandonrobertz/SparseLSH/sparselsh/storage.py/LevelDBStorage.append_val
3,109
def get_list(self, key): try: return self.deserialize( self.storage.Get(key)) except __HOLE__: return []
KeyError
dataset/ETHPy150Open brandonrobertz/SparseLSH/sparselsh/storage.py/LevelDBStorage.get_list
3,110
def _validate_key(self, key): if key.find(',') >= 0: raise HandshakeException('Request has multiple %s header lines or ' 'contains illegal character \',\': %r' % (common.SEC_WEBSOCKET_KEY_HEADER, key)) # Validate key_is_valid = False try: # Validate key by quick regex match before parsing by base64 # module. Because base64 module skips invalid characters, we have # to do this in advance to make this server strictly reject illegal # keys. if _SEC_WEBSOCKET_KEY_REGEX.match(key): decoded_key = base64.b64decode(key) if len(decoded_key) == 16: key_is_valid = True except __HOLE__, e: pass if not key_is_valid: raise HandshakeException( 'Illegal value for header %s: %r' % (common.SEC_WEBSOCKET_KEY_HEADER, key)) return decoded_key
TypeError
dataset/ETHPy150Open google/pywebsocket/mod_pywebsocket/handshake/hybi.py/Handshaker._validate_key
3,111
def intersection_array(G): """Returns the intersection array of a distance-regular graph. Given a distance-regular graph G with integers b_i, c_i,i = 0,....,d such that for any 2 vertices x,y in G at a distance i=d(x,y), there are exactly c_i neighbors of y at a distance of i-1 from x and b_i neighbors of y at a distance of i+1 from x. A distance regular graph'sintersection array is given by, [b_0,b_1,.....b_{d-1};c_1,c_2,.....c_d] Parameters ---------- G: Networkx graph (undirected) Returns ------- b,c: tuple of lists Examples -------- >>> G=nx.icosahedral_graph() >>> nx.intersection_array(G) ([5, 2, 1], [1, 2, 5]) References ---------- .. [1] Weisstein, Eric W. "Intersection Array." From MathWorld--A Wolfram Web Resource. http://mathworld.wolfram.com/IntersectionArray.html See Also -------- global_parameters """ if G.is_multigraph() or G.is_directed(): raise nx.NetworkxException('Not implemented for directed ', 'or multiedge graphs.') # test for regular graph (all degrees must be equal) degree = G.degree_iter() (_,k) = next(degree) for _,knext in degree: if knext != k: raise nx.NetworkXError('Graph is not distance regular.') k = knext path_length = nx.all_pairs_shortest_path_length(G) diameter = max([max(path_length[n].values()) for n in path_length]) bint = {} # 'b' intersection array cint = {} # 'c' intersection array for u in G: for v in G: try: i = path_length[u][v] except __HOLE__: # graph must be connected raise nx.NetworkXError('Graph is not distance regular.') # number of neighbors of v at a distance of i-1 from u c = len([n for n in G[v] if path_length[n][u]==i-1]) # number of neighbors of v at a distance of i+1 from u b = len([n for n in G[v] if path_length[n][u]==i+1]) # b,c are independent of u and v if cint.get(i,c) != c or bint.get(i,b) != b: raise nx.NetworkXError('Graph is not distance regular') bint[i] = b cint[i] = c return ([bint.get(i,0) for i in range(diameter)], [cint.get(i+1,0) for i in range(diameter)])
KeyError
dataset/ETHPy150Open gkno/gkno_launcher/src/networkx/algorithms/distance_regular.py/intersection_array
3,112
def get_format_info(self): """ Returns format information. """ if self.active_formats == True: return { "product_cols": self.product_cols, "product_rows": self.product_rows, "category_cols": self.category_cols, } else: if self.parent is None: try: # TODO: Use cache here. Maybe we need a lfs_get_object, # which raise a ObjectDoesNotExist if the object does not # exist from lfs.core.models import Shop shop = Shop.objects.get(pk=1) except __HOLE__: return { "product_cols": 3, "product_rows": 3, "category_cols": 3, } else: return { "product_cols": shop.product_cols, "product_rows": shop.product_rows, "category_cols": shop.category_cols, } else: return self.parent.get_format_info()
ObjectDoesNotExist
dataset/ETHPy150Open diefenbach/django-lfs/lfs/catalog/models.py/Category.get_format_info
3,113
def get_category(self): """ Returns the first category of a product. """ object = self.get_parent() try: return object.get_categories()[0] except __HOLE__: return None
IndexError
dataset/ETHPy150Open diefenbach/django-lfs/lfs/catalog/models.py/Product.get_category
3,114
def get_current_category(self, request): """ Returns product category based on actual categories of the given product and the last visited category. This is needed if the category has more than one category to display breadcrumbs, selected menu points, etc. appropriately. """ last_category = None category = None product_categories = self.get_categories() if len(product_categories) >= 1: try: if len(product_categories) == 1: category = product_categories[0] return category else: last_category = request.session.get("last_category") if last_category is None: return product_categories[0] category = None if last_category in product_categories: category = last_category else: children = last_category.get_all_children() for product_category in product_categories: if product_category in children: category = product_category break if category is None: category = product_categories[0] except __HOLE__: category = None request.session["last_category"] = category return category
IndexError
dataset/ETHPy150Open diefenbach/django-lfs/lfs/catalog/models.py/Product.get_current_category
3,115
def get_image(self): """ Returns the first image (the main image) of the product. """ try: return self.get_images()[0] except __HOLE__: return None
IndexError
dataset/ETHPy150Open diefenbach/django-lfs/lfs/catalog/models.py/Product.get_image
3,116
def get_option(self, property_id): """ Returns the id of the selected option for property with passed id. """ from lfs.caching.utils import get_cache_group_id pid = self.get_parent().pk properties_version = get_cache_group_id('global-properties-version') group_id = '%s-%s' % (properties_version, get_cache_group_id('properties-%s' % pid)) cache_key = "%s-%s-productpropertyvalue%s" % (settings.CACHE_MIDDLEWARE_KEY_PREFIX, group_id, self.id) options = cache.get(cache_key) if options is None: options = {} for pvo in self.property_values.all(): options[pvo.property_id] = pvo.value cache.set(cache_key, options) try: return options[property_id] except __HOLE__: return None
KeyError
dataset/ETHPy150Open diefenbach/django-lfs/lfs/catalog/models.py/Product.get_option
3,117
def get_displayed_properties(self): """ Returns properties with ``display_on_product`` is True. """ from lfs.caching.utils import get_cache_group_id pid = self.get_parent().pk properties_version = get_cache_group_id('global-properties-version') group_id = '%s-%s' % (properties_version, get_cache_group_id('properties-%s' % pid)) cache_key = "%s-%s-displayed-properties-%s" % (group_id, settings.CACHE_MIDDLEWARE_KEY_PREFIX, self.id) properties = cache.get(cache_key) if properties: return properties properties = [] for ppv in self.property_values.filter(property__display_on_product=True, type=PROPERTY_VALUE_TYPE_DISPLAY).order_by('property_group__name', 'property__position'): if ppv.property.is_select_field: try: po = PropertyOption.objects.get(pk=int(float(ppv.value))) except (PropertyOption.DoesNotExist, __HOLE__): continue else: value = po.name position = po.position else: value = ppv.value position = 1 properties.append({ "name": ppv.property.name, "title": ppv.property.title, "value": value, "position": (ppv.property.position * 1000) + position, "unit": ppv.property.unit, "property_group": ppv.property_group, "property_group_id": ppv.property_group_id if ppv.property_group else 0 }) cache.set(cache_key, properties) return properties
ValueError
dataset/ETHPy150Open diefenbach/django-lfs/lfs/catalog/models.py/Product.get_displayed_properties
3,118
def has_option(self, property_group, prop, option): """ Returns True if the variant has the given property / option combination. """ from lfs.caching.utils import get_cache_group_id pid = self.get_parent().pk properties_version = get_cache_group_id('global-properties-version') group_id = '%s-%s' % (properties_version, get_cache_group_id('properties-%s' % pid)) options = cache.get("%s-%s-productpropertyvalue%s" % (settings.CACHE_MIDDLEWARE_KEY_PREFIX, group_id, self.id)) if options is None: options = {} for pvo in self.property_values.filter(property_group=property_group): options[pvo.property_id] = pvo.value cache.set("%s-%s-productpropertyvalue%s" % (settings.CACHE_MIDDLEWARE_KEY_PREFIX, group_id, self.id), options) try: return options[prop.id] == str(option.id) except __HOLE__: return False
KeyError
dataset/ETHPy150Open diefenbach/django-lfs/lfs/catalog/models.py/Product.has_option
3,119
def get_default_properties_price(self): """ Returns the total price of all default properties. """ price = 0 for property_dict in self.get_configurable_properties(): prop = property_dict['property'] property_group = property_dict['property_group'] if prop.add_price: # Try to get the default value of the property try: ppv = ProductPropertyValue.objects.get(product=self, property_group=property_group, property=prop, type=PROPERTY_VALUE_TYPE_DEFAULT) po = PropertyOption.objects.get(pk=ppv.value) except (ObjectDoesNotExist, ValueError): # If there is no explicit default value try to get the first # option. if prop.required: try: po = prop.options.all()[0] except __HOLE__: continue else: try: price += po.price except TypeError: pass else: try: price += po.price except TypeError: pass return price
IndexError
dataset/ETHPy150Open diefenbach/django-lfs/lfs/catalog/models.py/Product.get_default_properties_price
3,120
def get_default_variant(self): """ Returns the default variant, which is supposed to be displayed within the product view. This is either a selected variant or the first added variant. If the product has no variants it is None. """ cache_key = "%s-default-variant-%s" % (settings.CACHE_MIDDLEWARE_KEY_PREFIX, self.id) default_variant = cache.get(cache_key) if default_variant is not None: return default_variant if self.default_variant is not None: default_variant = self.default_variant elif self.is_product_with_variants(): try: default_variant = self.variants.filter(active=True)[0] except __HOLE__: return None cache.set(cache_key, default_variant) return default_variant
IndexError
dataset/ETHPy150Open diefenbach/django-lfs/lfs/catalog/models.py/Product.get_default_variant
3,121
def get_clean_quantity_value(self, quantity=1, allow_zero=False): """ Returns the valid quantity based on the product's type of quantity field. """ try: quantity = abs(core_utils.atof(str(quantity))) except (__HOLE__, ValueError): quantity = 1.0 if not allow_zero: quantity = 1 if quantity <= 0 else quantity type_of_quantity_field = self.get_type_of_quantity_field() if type_of_quantity_field == QUANTITY_FIELD_INTEGER or getattr(settings, 'LFS_FORCE_INTEGER_QUANTITY', False): quantity = int(quantity) return quantity
TypeError
dataset/ETHPy150Open diefenbach/django-lfs/lfs/catalog/models.py/Product.get_clean_quantity_value
3,122
def get_clean_quantity(self, quantity=1): """ Returns the correct formatted quantity based on the product's type of quantity field. """ try: quantity = abs(core_utils.atof(str(quantity))) except (__HOLE__, ValueError): quantity = 1.0 type_of_quantity_field = self.get_type_of_quantity_field() if type_of_quantity_field == QUANTITY_FIELD_INTEGER: quantity = int(quantity) elif type_of_quantity_field == QUANTITY_FIELD_DECIMAL_1: quantity = locale.format("%.1f", quantity) else: quantity = locale.format("%.2f", quantity) return quantity
TypeError
dataset/ETHPy150Open diefenbach/django-lfs/lfs/catalog/models.py/Product.get_clean_quantity
3,123
def is_valid_value(self, value): """ Returns True if given value is valid for this property. """ if self.is_number_field: try: float(value) except __HOLE__: return False return True
ValueError
dataset/ETHPy150Open diefenbach/django-lfs/lfs/catalog/models.py/Property.is_valid_value
3,124
def save(self, *args, **kwargs): """ Overwritten to save the parent id for variants. This is used to count the entries per filter. See catalog/utils/get_product_filters for more. """ if self.product.is_variant(): self.parent_id = self.product.parent.id else: self.parent_id = self.product.id try: float(self.value) except __HOLE__: pass else: self.value_as_float = self.value super(ProductPropertyValue, self).save(*args, **kwargs)
ValueError
dataset/ETHPy150Open diefenbach/django-lfs/lfs/catalog/models.py/ProductPropertyValue.save
3,125
def get_db_results(self, db, query): cursor = db.cursor() try: cursor.execute(query) results = float(cursor.fetchone()[1]) except __HOLE__: cursor.execute(query) results = cursor.fetchone()[1] return results
ValueError
dataset/ETHPy150Open serverdensity/sd-agent-plugins/MySQL/MySQL.py/MySQL.get_db_results
3,126
def run_query(self, db, query): """Run a query and returns a dictionary with results""" try: cursor = db.cursor() cursor.execute(query) metric = {} for entry in cursor: try: metric[entry[0]] = float(entry[1]) except __HOLE__ as e: metric[entry[0]] = entry[1] return metric except MySQLdb.OperationalError as message: self.checks_logger.debug( 'mysql: MySQL query error when getting metrics = '.format( message) )
ValueError
dataset/ETHPy150Open serverdensity/sd-agent-plugins/MySQL/MySQL.py/MySQL.run_query
3,127
def preliminaries(self): if ('MySQLServer' not in self.raw_config and 'mysql_server' not in self.raw_config['MySQLServer'] or self.raw_config['MySQLServer']['mysql_server'] == '' or self.raw_config['MySQLServer']['mysql_user'] == '' or self.raw_config['MySQLServer']['mysql_pass'] == ''): self.checks_logger.debug('mysql: config not set') return False if not self.raw_config['MySQLServer'].get('mysql_port'): self.raw_config['MySQLServer']['mysql_port'] = "3306" self.checks_logger.debug('mysql: config set') try: import MySQLdb except __HOLE__: self.checks_logger.error('mysql: unable to import MySQLdb') return False # Note, code here doesn't really make sense. See what I copied. if self.raw_config['MySQLServer'].get('mysql_server'): # Connect try: MySQLdb.connect( host=self.raw_config['MySQLServer']['mysql_server'], user=self.raw_config['MySQLServer']['mysql_user'], passwd=self.raw_config['MySQLServer']['mysql_pass'], port=int(self.raw_config['MySQLServer']['mysql_port']) ) except MySQLdb.OperationalError as message: self.checks_logger.error( "mysql: MySQL connection error: {0}".format(message)) return False elif (self.raw_config['MySQLServer'].get('mysql_ssl_cert') and self.raw_config['MySQLServer'].get('mysql_ssl_key')): ssl = { 'cert': self.raw_config['MySQLServer']['mysql_ssl_cert'], 'key': self.raw_config['MySQLServer']['mysql_ssl_key'] } MySQLdb.connect( host=self.raw_config['MySQLServer']['mysql_server'], user=self.raw_config['MySQLServer']['mysql_user'], passwd=self.raw_config['MySQLServer']['mysql_pass'], port=int(self.raw_config['MySQLServer']['mysql_port']), ssl=ssl ) else: # Connect try: MySQLdb.connect( host='localhost', user=self.raw_config['MySQLServer']['mysql_user'], passwd=self.raw_config['MySQLServer']['mysql_pass'], port=int(self.raw_config['MySQLServer']['mysql_port'])) except MySQLdb.OperationalError as message: self.checks_logger.error( 'mysql: MySQL connection error: {0}'.format(message) ) return False return True
ImportError
dataset/ETHPy150Open serverdensity/sd-agent-plugins/MySQL/MySQL.py/MySQL.preliminaries
3,128
def run(self): self.checks_logger.debug('mysql: started gathering data') if not self.preliminaries(): return False if not self.get_connection(): return False try: db = self.connection # setup status = {} # Get MySQL version try: self.checks_logger.debug('mysql: getting mysqlversion') cursor = db.cursor() cursor.execute('SELECT VERSION()') result = cursor.fetchone() version = result[0].split('-') # Case 31237. Might include a description e.g. 4.1.26-log. # See http://dev.mysql.com/doc/refman/4.1/en/ # information-functions.html#function_version version = version[0].split('.') status['version'] = [] for version_item in version: number = re.match('([0-9]+)', version_item) number = number.group(0) status['version'].append(number) except MySQLdb.OperationalError as message: self.checks_logger.error( ( 'mysql: MySQL query error when getting version: ' '{0}' ).format( message) ) return False # get show status metrics status_metrics = self.run_query(db, 'SHOW GLOBAL STATUS') status_variables = self.run_query(db, 'SHOW VARIABLES') # get Uptime status['Uptime'] = status_metrics['Uptime'] self.checks_logger.debug('mysql: getting Uptime - done') # Slow queries # Determine query depending on version. For 5.02 and above we # need the GLOBAL keyword (case 31015) # note, update with slow queries store. making it per second? # ask jordi about that. status['Slow queries'] = status_metrics['Slow_queries'] self.checks_logger.debug('mysql: getting Slow_queries - done') # Note, check for which version of mysql? # try: # if self.version_is_above_5(status): # query = 'SHOW GLOBAL STATUS LIKE "Slow_queries"' # else: # query = 'SHOW STATUS LIKE "Slow_queries' # QPS - Queries per second. status['Queries per second'] = self.calculate_per_s( 'qps', status_metrics['Queries'] ) # Note check for which version of mysql self.checks_logger.debug('mysql: getting QPS - done') # Connection pool status['threads connected'] = status_metrics['Threads_connected'] status['threads running'] = status_metrics['Threads_running'] status['max connections'] = status_variables['max_connections'] status['max used connections'] = status_metrics[ 'Max_used_connections'] status['Connection usage %'] = ( (status['threads running'] / status['max connections'])*100 ) self.checks_logger.debug('mysql: getting connections - done') # Buffer pool status['buffer pool pages total'] = status_metrics[ 'Innodb_buffer_pool_pages_total'] status['buffer pool pages free'] = status_metrics[ 'Innodb_buffer_pool_pages_free'] status['buffer pool pages dirty'] = status_metrics[ 'Innodb_buffer_pool_pages_dirty'] status['buffer pool pages data'] = status_metrics[ 'Innodb_buffer_pool_pages_data'] self.checks_logger.debug('mysql: getting buffer pool - done') # Query cache items status['qcache hits'] = status_metrics['Qcache_hits'] status['qcache hits/s'] = self.calculate_per_s( 'qcache_ps', status['qcache hits']) status['qcache free memory'] = status_metrics['Qcache_free_memory'] status['qcache not cached'] = status_metrics['Qcache_not_cached'] status['qcache in cache'] = status_metrics[ 'Qcache_queries_in_cache'] self.checks_logger.debug('mysql: getting Qcache data - done') # writes, reads, transactions writes = (status_metrics['Com_insert'] + status_metrics['Com_replace'] + status_metrics['Com_update'] + status_metrics['Com_delete']) status['Writes/s'] = self.calculate_per_s('writes', writes) # reads reads = status_metrics['Com_select'] + status['qcache hits'] status['Reads/s'] = self.calculate_per_s('reads', reads) try: status['RW ratio'] = reads/writes except ZeroDivisionError: status['RW ratio'] = 0 # transactions transactions = (status_metrics['Com_commit'] + status_metrics['Com_rollback']) status['Transactions/s'] = self.calculate_per_s( 'transactions', transactions) self.checks_logger.debug( 'mysql: getting transactions, reads and writes - done') # Aborted connections and clients status['aborted clients'] = status_metrics['Aborted_clients'] status['aborted connects'] = status_metrics['Aborted_connects'] self.checks_logger.debug( 'mysql: getting aborted connections - done') # Replication - Seconds Behind Master secondsBehindMaster = None try: cursor = db.cursor(MySQLdb.cursors.DictCursor) cursor.execute('SHOW SLAVE STATUS') result = cursor.fetchone() except MySQLdb.OperationalError as message: self.checks_logger.error( 'getMySQLStatus: MySQL query error when ' 'getting SHOW SLAVE STATUS = %s', message) result = None if result is not None: try: # Handle the case when Seconds_Behind_Master is NULL if result['Seconds_Behind_Master'] is None: secondsBehindMaster = -1 else: secondsBehindMaster = result['Seconds_Behind_Master'] self.checks_logger.debug( 'getMySQLStatus: ' 'secondsBehindMaster = %s', secondsBehindMaster ) except IndexError as e: self.checks_logger.debug( 'getMySQLStatus: secondsBehindMaster empty. %s', e ) else: self.checks_logger.debug( 'getMySQLStatus: secondsBehindMaster empty. Result = None.' ) # Created temporary tables in memory and on disk status['created tmp tables'] = status_metrics['Created_tmp_tables'] status['created tmp tables on disk'] = status_metrics[ 'Created_tmp_disk_tables'] # Note check mysql version? self.checks_logger.debug( 'mysql: getting temporary tables data - done') # select_full_join status['select full join'] = status_metrics['Select_full_join'] # note check for mysql version? self.checks_logger.debug('mysql: getting select_full_join - done') # slave_running result = status_metrics['Slave_running'] if result == 'OFF': result = 0 else: result = 1 status['slave running'] = result self.checks_logger.debug( 'mysql: getting slave_running - done') # open files status['open files'] = status_metrics['Open_files'] status['open files limit'] = status_variables['open_files_limit'] self.checks_logger.debug('mysql: getting open_files - done') # table_locks_waited status['table locks waited'] = status_metrics['Table_locks_waited'] self.checks_logger.debug( 'mysql: getting table_locks_waited - done') # checkpoint age # note this needs to be changed. try: cursor = db.cursor() cursor.execute('SHOW ENGINE INNODB STATUS') results = cursor.fetchone()[2] log_loci = results.find('Log sequence number') checkpoint_loci = results.find('Last checkpoint at') log_nr = int(re.search(r'\d+', results[log_loci:]).group(0)) cp_nr = int(re.search( r'\d+', results[checkpoint_loci:]).group(0)) cp_age = cp_nr - log_nr status['Checkpoint age'] = cp_age except MySQLdb.OperationalError as message: self.checks_logger.error( 'mysql: MySQL query error when ' 'getting checkpoint age = {0}'.format( message) ) return False self.checks_logger.debug( 'mysql: getting checkpoint age - done') # note remove this. try: # Key cache hit ratio # http://www.percona.com/blog/2010/02/28/why-you-should-ignore-mysqls-key-cache-hit-ratio/ key_read = self.get_db_results( db, 'SHOW STATUS LIKE "Key_reads"') key_requests = self.get_db_results( db, 'SHOW STATUS LIKE "Key_read_requests"') # status['Key cache hit ratio'] = ( # 100 - ((key_read * 100) / key_requests)) status['Key reads/s'] = self.calculate_per_s( "Key_reads", key_read) except MySQLdb.OperationalError as message: self.checks_logger.error( 'mysql: MySQL query error when ' 'getting key cache = {0}'.format( message) ) return False self.checks_logger.debug( 'mysql: getting key cache hit ratio - done') # com commands per second com = self.raw_config['MySQLServer'].get('mysql_include_per_s') if com: user_com_ps = com user_com_ps = user_com_ps.split(',') user_com_ps = [command.strip() for command in user_com_ps] user_com_ps = user_com_ps + COMMANDS else: user_com_ps = COMMANDS for command in user_com_ps: try: com_per_s = self.calculate_per_s( command, status_metrics[command]) status[command.replace('_', ' ')+'/s'] = com_per_s except __HOLE__, e: self.checks_logger.exception(e) if self.raw_config['MySQLServer'].get('mysql_include'): user_com = self.raw_config['MySQLServer']['mysql_include'] user_com = user_com.split(',') user_com = [command.strip() for command in user_com] user_com = user_com + COMMANDS else: user_com = COMMANDS for command in user_com: status[command.replace('_', ' ')] = status_metrics[ command] self.checks_logger.debug( 'mysql: getting com_commands - done') except Exception: self.checks_logger.error( 'mysql: unable to get data from MySQL - ' 'Exception: {0}'.format(traceback.format_exc()) ) self.checks_logger.debug('mysql: completed, returning') return status
KeyError
dataset/ETHPy150Open serverdensity/sd-agent-plugins/MySQL/MySQL.py/MySQL.run
3,129
def test_filestore(): from openid.store import filestore import tempfile import shutil try: temp_dir = tempfile.mkdtemp() except __HOLE__: import os temp_dir = os.tmpnam() os.mkdir(temp_dir) store = filestore.FileOpenIDStore(temp_dir) try: testStore(store) store.cleanup() except: raise else: shutil.rmtree(temp_dir)
AttributeError
dataset/ETHPy150Open CollabQ/CollabQ/openid/test/storetest.py/test_filestore
3,130
def test_sqlite(): from openid.store import sqlstore try: from pysqlite2 import dbapi2 as sqlite except __HOLE__: pass else: conn = sqlite.connect(':memory:') store = sqlstore.SQLiteStore(conn) store.createTables() testStore(store)
ImportError
dataset/ETHPy150Open CollabQ/CollabQ/openid/test/storetest.py/test_sqlite
3,131
def test_mysql(): from openid.store import sqlstore try: import MySQLdb except __HOLE__: pass else: db_user = 'openid_test' db_passwd = '' db_name = getTmpDbName() from MySQLdb.constants import ER # Change this connect line to use the right user and password conn = MySQLdb.connect(user=db_user, passwd=db_passwd, host = db_host) conn.query('CREATE DATABASE %s;' % db_name) try: conn.query('USE %s;' % db_name) # OK, we're in the right environment. Create store and # create the tables. store = sqlstore.MySQLStore(conn) store.createTables() # At last, we get to run the test. testStore(store) finally: # Remove the database. If you want to do post-mortem on a # failing test, comment out this line. conn.query('DROP DATABASE %s;' % db_name)
ImportError
dataset/ETHPy150Open CollabQ/CollabQ/openid/test/storetest.py/test_mysql
3,132
def test_postgresql(): """ Tests the PostgreSQLStore on a locally-hosted PostgreSQL database cluster, version 7.4 or later. To run this test, you must have: - The 'psycopg' python module (version 1.1) installed - PostgreSQL running locally - An 'openid_test' user account in your database cluster, which you can create by running 'createuser -Ad openid_test' as the 'postgres' user - Trust auth for the 'openid_test' account, which you can activate by adding the following line to your pg_hba.conf file: local all openid_test trust This test connects to the database cluster three times: - To the 'template1' database, to create the test database - To the test database, to run the store tests - To the 'template1' database once more, to drop the test database """ from openid.store import sqlstore try: import psycopg except __HOLE__: pass else: db_name = getTmpDbName() db_user = 'openid_test' # Connect once to create the database; reconnect to access the # new database. conn_create = psycopg.connect(database = 'template1', user = db_user, host = db_host) conn_create.autocommit() # Create the test database. cursor = conn_create.cursor() cursor.execute('CREATE DATABASE %s;' % (db_name,)) conn_create.close() # Connect to the test database. conn_test = psycopg.connect(database = db_name, user = db_user, host = db_host) # OK, we're in the right environment. Create the store # instance and create the tables. store = sqlstore.PostgreSQLStore(conn_test) store.createTables() # At last, we get to run the test. testStore(store) # Disconnect. conn_test.close() # It takes a little time for the close() call above to take # effect, so we'll wait for a second before trying to remove # the database. (Maybe this is because we're using a UNIX # socket to connect to postgres rather than TCP?) import time time.sleep(1) # Remove the database now that the test is over. conn_remove = psycopg.connect(database = 'template1', user = db_user, host = db_host) conn_remove.autocommit() cursor = conn_remove.cursor() cursor.execute('DROP DATABASE %s;' % (db_name,)) conn_remove.close()
ImportError
dataset/ETHPy150Open CollabQ/CollabQ/openid/test/storetest.py/test_postgresql
3,133
def setup_stub(): apiproxy_stub_map.apiproxy = apiproxy_stub_map.APIProxyStubMap() stub = datastore_file_stub.DatastoreFileStub('test','/dev/null', '/dev/null', trusted=True) apiproxy_stub_map.apiproxy.RegisterStub('datastore_v3', stub) apiproxy_stub_map.apiproxy.RegisterStub( 'user', user_service_stub.UserServiceStub()) apiproxy_stub_map.apiproxy.RegisterStub( 'memcache', memcache_stub.MemcacheServiceStub()) apiproxy_stub_map.apiproxy.RegisterStub( 'urlfetch', urlfetch_stub.URLFetchServiceStub()) apiproxy_stub_map.apiproxy.RegisterStub( 'taskqueue', taskqueue_stub.TaskQueueServiceStub()) try: apiproxy_stub_map.apiproxy.RegisterStub( 'images', images_stub.ImagesServiceStub()) except __HOLE__: pass
NameError
dataset/ETHPy150Open IanLewis/kay/kay/management/test.py/setup_stub
3,134
def runtest(target='', verbosity=0): suite = unittest.TestSuite() if target: tests_mod = import_string("%s.tests" % target) suite.addTest(unittest.defaultTestLoader.loadTestsFromModule( tests_mod)) else: for app_name in settings.INSTALLED_APPS: if app_name.startswith('kay.'): continue try: tests_mod = import_string("%s.tests" % app_name) except (ImportError, __HOLE__), e: logging.error("Loading module %s.tests failed: '%s'." % (app_name, e)) else: suite.addTest(unittest.defaultTestLoader.loadTestsFromModule( tests_mod)) unittest.TextTestRunner(verbosity=verbosity).run(suite)
AttributeError
dataset/ETHPy150Open IanLewis/kay/kay/management/test.py/runtest
3,135
def do_socks_handshake(self, addr, callback, error_callback=None, auth=None): """ Perform a SOCKSv5 handshake, and call callback when it's completed. If authorization data is provided, we'll log onto the server too. =============== ============ Argument Description =============== ============ addr The address for the proxy server to connect to. This must be a tuple of ``(hostname, port)``. callback A function to call when the handshake has completed. error_callback *Optional.* A function to be called if the handshake has failed. auth *Optional.* If provided, it must be a tuple of ``(username, password)``. =============== ============ """ if not self.connected: raise RuntimeError("Tried to start SOCKS handshake on disconnected %r." % self) # Build our on_read. def on_read(data): if not self._socks_state: if data[0] != SOCKS_VERSION: if error_callback: self._safely_call(error_callback, BadVersion("Expected version 5, got %d." % ord(data[0]))) self.close(False) return elif (auth and data[1] != '\x02') or (not auth and data[1] != '\x00'): if error_callback: self._safely_call(error_callback, NoAuthenticationMethods()) self.close(False) return if auth: self.write("\x01%d%s%d%s" % ( len(auth[0]), auth[0], len(auth[1]), auth[1])) self._socks_state = 1 else: self._socks_state = 1 self.on_read("%s\x00" % SOCKS_VERSION) elif self._socks_state == 1: if data[0] != SOCKS_VERSION: if error_callback: self._safely_call(error_callback, BadVersion("Expected version 5, got %d." % ord(data[0]))) self.close(False) return elif data[1] != '\x00': if error_callback: self._safely_call(error_callback, Unauthorized(data[1])) self.close(False) return self.write("%s\x01\x00\x03%s%s%s" % ( SOCKS_VERSION, chr(len(addr[0])), addr[0], struct.pack('!H', addr[1]) )) self._socks_state = 2 self.read_delimiter = 4 elif self._socks_state == 2: if data[0] != SOCKS_VERSION: if error_callback: self._safely_call(error_callback, BadVersion("Expected version 5, got %d." % ord(data[0]))) self.close(False) return elif data[1] != '\x00': if error_callback: self._safely_call(error_callback, Exception(data[1])) self.close(False) return self._socks_state = 4 if data[3] == '\x01': self._socks_fam = 1 self.read_delimiter = 4 elif data[3] == '\x03': self._socks_state = 3 self.read_delimiter = 1 self._socks_fam = 0 elif data[3] == '\x04': self.read_delimiter = 16 self._socks_fam = 2 self._socks_port = struct.unpack("!H", data[-2:]) elif self._socks_state == 3: if self.read_delimiter == 1: self._socks_state = 4 self.read_delimiter = ord(data[0]) elif self._socks_state == 4: if self._socks_fam == 1: data = socket.inet_ntop(socket.AF_INET, data) elif self._socks_fam == 2: try: data = socket.inet_ntop(socket.AF_INET6, data) except (__HOLE__, socket.error): pass self.remote_address = (data, self._socks_port) # Cleanup! self.on_read = self._socks_read self.read_delimiter = self._socks_delim del self._socks_read del self._socks_delim del self._socks_port del self._socks_state del self._socks_fam self._safely_call(callback) # Start doing it! self._socks_state = 0 self.write("%s\x01%s" % ( SOCKS_VERSION, '\x02' if auth else '\x00')) self._socks_read = self.on_read self._socks_delim = self.read_delimiter self.on_read = on_read self.read_delimiter = 2
AttributeError
dataset/ETHPy150Open ecdavis/pants/pants/contrib/socks.py/do_socks_handshake
3,136
def iterateMacs2Peaks(infile): '''iterate over peaks.xls file and return parsed data. pvalues and fdr are converted to values between 0 and 1 from their -log10 values. ''' for row in CSV.DictReader(infile, dialect='excel-tab'): # these are 1-based coordinates # macs can have negative start coordinates # start try: yield Macs2Peak._make( (row['chr'], max(int(row['start']) - 1, 0), int(row['end']), int(row['length']), float(row['pileup']), math.pow(10, -float(row['-log10(pvalue)'])), float(row['fold_enrichment']), math.pow(10, -float(row['-log10(qvalue)'])), row['name'])) except __HOLE__, msg: raise KeyError("%s: %s" % (msg, row))
KeyError
dataset/ETHPy150Open CGATOxford/cgat/CGAT/WrapperMACS.py/iterateMacs2Peaks
3,137
def __init__(self, name, value, fname=None, filetype=None, filesize=None, quote=url_quote): self.quote = quote self.name = quote(name) if value is not None and not hasattr(value, 'read'): value = self.encode_unreadable_value(value) self.size = len(value) self.value = value if fname is not None: if isinstance(fname, unicode): fname = fname.encode("utf-8").encode("string_escape").replace('"', '\\"') else: fname = fname.encode("string_escape").replace('"', '\\"') self.fname = fname if filetype is not None: filetype = to_bytestring(filetype) self.filetype = filetype if isinstance(value, file) and filesize is None: try: value.flush() except __HOLE__: pass self.size = int(os.fstat(value.fileno())[6]) self._encoded_hdr = None self._encoded_bdr = None
IOError
dataset/ETHPy150Open benoitc/restkit/restkit/forms.py/BoundaryItem.__init__
3,138
def _clean_hosting_info(self): """Clean the hosting service information. If using a hosting service, this will validate that the data provided is valid on that hosting service. Then it will create an account and link it, if necessary, with the hosting service. """ hosting_type = self.cleaned_data['hosting_type'] if hosting_type == self.NO_HOSTING_SERVICE_ID: self.data['hosting_account'] = None self.cleaned_data['hosting_account'] = None return # This should have been caught during validation, so we can assume # it's fine. hosting_service_cls = get_hosting_service(hosting_type) assert hosting_service_cls # Validate that the provided tool is valid for the hosting service. tool_name = self.cleaned_data['tool'].name if tool_name not in hosting_service_cls.supported_scmtools: self.errors['tool'] = self.error_class([ _('This tool is not supported on the given hosting service') ]) return # Now make sure all the account info is correct. hosting_account = self.cleaned_data['hosting_account'] username = self.cleaned_data['hosting_account_username'] password = self.cleaned_data['hosting_account_password'] if hosting_service_cls.self_hosted: hosting_url = self.cleaned_data['hosting_url'] or None else: hosting_url = None if hosting_service_cls.supports_two_factor_auth: two_factor_auth_code = \ self.cleaned_data['hosting_account_two_factor_auth_code'] else: two_factor_auth_code = None if hosting_account and hosting_account.hosting_url != hosting_url: self.errors['hosting_account'] = self.error_class([ _('This account is not compatible with this hosting service ' 'configuration'), ]) return elif hosting_account and not username: username = hosting_account.username elif not hosting_account and not username: self.errors['hosting_account'] = self.error_class([ _('An account must be linked in order to use this hosting ' 'service'), ]) return if not hosting_account: # See if this account with the supplied credentials already # exists. If it does, we don't want to create a new entry. try: hosting_account = HostingServiceAccount.objects.get( service_name=hosting_type, username=username, hosting_url=hosting_url, local_site=self.local_site) except HostingServiceAccount.DoesNotExist: # That's fine. We're just going to create it later. pass plan = self.cleaned_data['repository_plan'] or self.DEFAULT_PLAN_ID # Set the main repository fields (Path, Mirror Path, etc.) based on # the field definitions in the hosting service. # # This will take into account the hosting service's form data for # the given repository plan, the main form data, and the hosting # account information. # # It's expected that the required fields will have validated by now. repository_form = self.repository_forms[hosting_type][plan] field_vars = repository_form.cleaned_data.copy() field_vars.update(self.cleaned_data) # If the hosting account needs to authorize and link with an external # service, attempt to do so and watch for any errors. # # If it doesn't need to link with it, we'll just create an entry # with the username and save it. if not hosting_account: hosting_account = HostingServiceAccount( service_name=hosting_type, username=username, hosting_url=hosting_url, local_site=self.local_site) if (hosting_service_cls.needs_authorization and not hosting_account.is_authorized): # Attempt to authorize the account. hosting_service = None plan = None if hosting_service_cls: hosting_service = hosting_service_cls(hosting_account) if hosting_service: plan = (self.cleaned_data['repository_plan'] or self.DEFAULT_PLAN_ID) repository_extra_data = self._build_repository_extra_data( hosting_service, hosting_type, plan) try: hosting_account.service.authorize( username, password, hosting_url=hosting_url, two_factor_auth_code=two_factor_auth_code, tool_name=tool_name, local_site_name=self.local_site_name, **repository_extra_data) except TwoFactorAuthCodeRequiredError as e: self.errors['hosting_account'] = \ self.error_class([six.text_type(e)]) hosting_info = self.hosting_service_info[hosting_type] hosting_info['needs_two_factor_auth_code'] = True return except AuthorizationError as e: self.errors['hosting_account'] = self.error_class([ _('Unable to link the account: %s') % e, ]) return except Exception as e: self.errors['hosting_account'] = self.error_class([ _('Unknown error when linking the account: %s') % e, ]) return if hosting_account: # Flag that we've linked the account. If there are any # validation errors, and this flag is set, we tell the user # that we successfully linked and they don't have to do it # again. self.hosting_account_linked = True hosting_account.save() self.data['hosting_account'] = hosting_account self.cleaned_data['hosting_account'] = hosting_account try: self.cleaned_data.update(hosting_service_cls.get_repository_fields( username=hosting_account.username, hosting_url=hosting_account.hosting_url, plan=plan, tool_name=tool_name, field_vars=field_vars)) except __HOLE__ as e: raise ValidationError([six.text_type(e)])
KeyError
dataset/ETHPy150Open reviewboard/reviewboard/reviewboard/scmtools/forms.py/RepositoryForm._clean_hosting_info
3,139
def save(self, commit=True, *args, **kwargs): """Saves the repository. This will thunk out to the hosting service form to save any extra repository data used for the hosting service, and saves the repository plan, if any. """ repository = super(RepositoryForm, self).save(commit=False, *args, **kwargs) repository.password = self.cleaned_data['password'] or None bug_tracker_use_hosting = self.cleaned_data['bug_tracker_use_hosting'] repository.extra_data = { 'repository_plan': self.cleaned_data['repository_plan'], 'bug_tracker_use_hosting': bug_tracker_use_hosting, } hosting_type = self.cleaned_data['hosting_type'] service = get_hosting_service(hosting_type) if service and service.self_hosted: repository.extra_data['hosting_url'] = \ self.cleaned_data['hosting_url'] if self.cert: repository.extra_data['cert'] = self.cert try: repository.extra_data['use_ticket_auth'] = \ self.cleaned_data['use_ticket_auth'] except __HOLE__: pass if hosting_type in self.repository_forms: plan = (self.cleaned_data['repository_plan'] or self.DEFAULT_PLAN_ID) self.repository_forms[hosting_type][plan].save(repository) if not bug_tracker_use_hosting: bug_tracker_type = self.cleaned_data['bug_tracker_type'] if bug_tracker_type in self.bug_tracker_forms: plan = (self.cleaned_data['bug_tracker_plan'] or self.DEFAULT_PLAN_ID) self.bug_tracker_forms[bug_tracker_type][plan].save(repository) repository.extra_data.update({ 'bug_tracker_type': bug_tracker_type, 'bug_tracker_plan': plan, }) bug_tracker_service = get_hosting_service(bug_tracker_type) assert bug_tracker_service if bug_tracker_service.self_hosted: repository.extra_data['bug_tracker_hosting_url'] = \ self.cleaned_data['bug_tracker_hosting_url'] if bug_tracker_service.get_bug_tracker_requires_username(plan): repository.extra_data.update({ 'bug_tracker-hosting_account_username': self.cleaned_data[ 'bug_tracker_hosting_account_username'], }) if commit: repository.save() return repository
KeyError
dataset/ETHPy150Open reviewboard/reviewboard/reviewboard/scmtools/forms.py/RepositoryForm.save
3,140
def _verify_repository_path(self): """ Verifies the repository path to check if it's valid. This will check if the repository exists and if an SSH key or HTTPS certificate needs to be verified. """ tool = self.cleaned_data.get('tool', None) if not tool: # This failed validation earlier, so bail. return scmtool_class = tool.get_scmtool_class() path = self.cleaned_data.get('path', '') username = self.cleaned_data['username'] password = self.cleaned_data['password'] if not path: self._errors['path'] = self.error_class( ['Repository path cannot be empty']) return hosting_type = self.cleaned_data['hosting_type'] hosting_service_cls = get_hosting_service(hosting_type) hosting_service = None plan = None if hosting_service_cls: hosting_service = hosting_service_cls( self.cleaned_data['hosting_account']) if hosting_service: plan = (self.cleaned_data['repository_plan'] or self.DEFAULT_PLAN_ID) repository_extra_data = self._build_repository_extra_data( hosting_service, hosting_type, plan) while 1: # Keep doing this until we have an error we don't want # to ignore, or it's successful. try: if hosting_service: hosting_service.check_repository( path=path, username=username, password=password, scmtool_class=scmtool_class, tool_name=tool.name, local_site_name=self.local_site_name, plan=plan, **repository_extra_data) else: scmtool_class.check_repository(path, username, password, self.local_site_name) # Success. break except BadHostKeyError as e: if self.cleaned_data['trust_host']: try: self.ssh_client.replace_host_key(e.hostname, e.raw_expected_key, e.raw_key) except __HOLE__ as e: raise ValidationError(e) else: self.hostkeyerror = e break except UnknownHostKeyError as e: if self.cleaned_data['trust_host']: try: self.ssh_client.add_host_key(e.hostname, e.raw_key) except IOError as e: raise ValidationError(e) else: self.hostkeyerror = e break except UnverifiedCertificateError as e: if self.cleaned_data['trust_host']: try: self.cert = scmtool_class.accept_certificate( path, username=username, password=password, local_site_name=self.local_site_name, certificate=e.certificate) except IOError as e: raise ValidationError(e) else: self.certerror = e break except AuthenticationError as e: if 'publickey' in e.allowed_types and e.user_key is None: self.userkeyerror = e break raise ValidationError(e) except Exception as e: try: text = six.text_type(e) except UnicodeDecodeError: text = six.text_type(e, 'ascii', 'replace') raise ValidationError(text)
IOError
dataset/ETHPy150Open reviewboard/reviewboard/reviewboard/scmtools/forms.py/RepositoryForm._verify_repository_path
3,141
def setup(self): try: import yaml except __HOLE__: raise SkipTest()
ImportError
dataset/ETHPy150Open miracle2k/webassets/tests/test_loaders.py/TestYAML.setup
3,142
def setup(self): super(TestYAMLCustomFilters, self).setup() # If zope.dottedname is not installed, that's OK try: import zope.dottedname.resolve except __HOLE__: raise SkipTest() # Save off the original get_import_resolver self.original_resolver = YAMLLoader._get_import_resolver # Make a mock def mock(cls): raise ImportError self.mock_resolver = mock
ImportError
dataset/ETHPy150Open miracle2k/webassets/tests/test_loaders.py/TestYAMLCustomFilters.setup
3,143
def run(self, make): """ Run the application component. :param make: A factory that produces instances of :class:`autobahn.asyncio.wamp.ApplicationSession` when called with an instance of :class:`autobahn.wamp.types.ComponentConfig`. :type make: callable """ # 1) factory for use ApplicationSession def create(): cfg = ComponentConfig(self.realm, self.extra) try: session = make(cfg) except Exception: self.log.failure("App session could not be created! ") asyncio.get_event_loop().stop() else: return session isSecure, host, port, resource, path, params = parse_url(self.url) if self.ssl is None: ssl = isSecure else: if self.ssl and not isSecure: raise RuntimeError( 'ssl argument value passed to %s conflicts with the "ws:" ' 'prefix of the url argument. Did you mean to use "wss:"?' % self.__class__.__name__) ssl = self.ssl # 2) create a WAMP-over-WebSocket transport client factory transport_factory = WampWebSocketClientFactory(create, url=self.url, serializers=self.serializers) # 3) start the client loop = asyncio.get_event_loop() txaio.use_asyncio() txaio.config.loop = loop coro = loop.create_connection(transport_factory, host, port, ssl=ssl) (transport, protocol) = loop.run_until_complete(coro) # start logging txaio.start_logging(level='info') try: loop.add_signal_handler(signal.SIGTERM, loop.stop) except NotImplementedError: # signals are not available on Windows pass # 4) now enter the asyncio event loop try: loop.run_forever() except __HOLE__: # wait until we send Goodbye if user hit ctrl-c # (done outside this except so SIGTERM gets the same handling) pass # give Goodbye message a chance to go through, if we still # have an active session if protocol._session: loop.run_until_complete(protocol._session.leave()) loop.close()
KeyboardInterrupt
dataset/ETHPy150Open crossbario/autobahn-python/autobahn/asyncio/wamp.py/ApplicationRunner.run
3,144
def remove(self, case): """Remove the given Case from this CaseArray.""" try: values = [case[n] for n in self._names] except __HOLE__: raise KeyError("Case to be removed is not a member of this CaseArray") self._values.remove(values)
KeyError
dataset/ETHPy150Open OpenMDAO/OpenMDAO-Framework/openmdao.lib/src/openmdao/lib/casehandlers/caseset.py/CaseArray.remove
3,145
def __getitem__(self, key): """If key is a varname or expression, returns a list of all of the recorded values corresponding to that string. If key is an integer index 'i', returns a Case object containing the data for the i'th recorded case. """ if isinstance(key, basestring): # return all of the values for the given name try: idx = self._names.index(key) except __HOLE__: raise KeyError("CaseSet has no input or outputs named %s" % key) return [lst[idx] for lst in self._values] else: # key is the case number return self._case_from_values(self._values[key])
ValueError
dataset/ETHPy150Open OpenMDAO/OpenMDAO-Framework/openmdao.lib/src/openmdao/lib/casehandlers/caseset.py/CaseArray.__getitem__
3,146
def _get_case_data(self, driver, inputs, outputs): """Return a list of values for the case in the same order as our values. Raise a KeyError if any of our names are missing from the case. """ in_names, out_names = self._cfg_map[driver] vals = [] for name in self._names: try: i = in_names.index(name) except __HOLE__: try: i = out_names.index(name) except ValueError: raise KeyError("input or output is missing from case: %s" % name) else: vals.append(outputs[i]) else: vals.append(inputs[i]) return vals
ValueError
dataset/ETHPy150Open OpenMDAO/OpenMDAO-Framework/openmdao.lib/src/openmdao/lib/casehandlers/caseset.py/CaseArray._get_case_data
3,147
def __contains__(self, case): if not isinstance(case, Case): return False try: values = [case[n] for n in self._names] except __HOLE__: return False for val in self._values: if val == values: return True return False
KeyError
dataset/ETHPy150Open OpenMDAO/OpenMDAO-Framework/openmdao.lib/src/openmdao/lib/casehandlers/caseset.py/CaseArray.__contains__
3,148
def __contains__(self, case): if not isinstance(case, Case): return False try: values = tuple(case[n] for n in self._names) except __HOLE__: return False return values in self._tupset
KeyError
dataset/ETHPy150Open OpenMDAO/OpenMDAO-Framework/openmdao.lib/src/openmdao/lib/casehandlers/caseset.py/CaseSet.__contains__
3,149
def remove(self, case): try: values = tuple(case[n] for n in self._names) except __HOLE__: raise KeyError("Case to be removed is not a member of this CaseSet") self._tupset.remove(values) self._values.remove(values)
KeyError
dataset/ETHPy150Open OpenMDAO/OpenMDAO-Framework/openmdao.lib/src/openmdao/lib/casehandlers/caseset.py/CaseSet.remove
3,150
def __call__(self): T = current.T auth = current.auth s3db = current.s3db db = current.db request = current.request appname = request.application response = current.response s3 = response.s3 settings = current.deployment_settings view = path.join(request.folder, "modules", "templates", "EC", "views", "index.html") try: # Pass view as file not str to work in compiled mode response.view = open(view, "rb") except __HOLE__: from gluon.http import HTTP raise HTTP(404, "Unable to open Custom View: %s" % view) title = settings.get_system_name() response.title = title # Menu Boxes menu_btns = [#div, label, app, function ["sit", T("Request"), "req", "req"], ["dec", T("Send"), "inv", "send"], ["res", T("Receive"), "inv", "recv"] ] menu_divs = {"facility": DIV( H3("Map"), _id = "facility_box", _class = "menu_box"), "sit": DIV( _id = "menu_div_sit", _class = "menu_div"), "dec": DIV( _id = "menu_div_dec", _class = "menu_div"), "res": DIV( _id = "menu_div_res", _class = "menu_div"), } for div, label, app, function in menu_btns: if settings.has_module(app): # @ToDo: Also check permissions (e.g. for anonymous users) menu_divs[div].append(A(DIV(label, _class = "menu-btn-r"), _class = "menu-btn-l", _href = URL(app,function) ) ) div_arrow = DIV(IMG(_src = "/%s/static/img/arrow_blue_right.png" % \ appname), _class = "div_arrow") sit_dec_res_box = DIV(menu_divs["sit"], div_arrow, menu_divs["dec"], div_arrow, menu_divs["res"], _id = "sit_dec_res_box", _class = "menu_box fleft swidth" #div_additional, ) facility_box = menu_divs["facility"] facility_box.append(A(IMG(_src = "/%s/static/img/map_icon_128.png" % \ appname), _href = URL(c="gis", f="index"), _title = T("Map") ) ) # Check logged in AND permissions _s3 = current.session.s3 AUTHENTICATED = _s3.system_roles.AUTHENTICATED roles = _s3.roles if AUTHENTICATED in roles and \ auth.s3_has_permission("read", s3db.org_organisation): auth.permission.controller = "org" auth.permission.function = "site" permitted_facilities = auth.permitted_facilities(redirect_on_error=False) if permitted_facilities: facilities = s3db.org_SiteRepresent().bulk(permitted_facilities) facility_list = [(fac, facilities[fac]) for fac in facilities] facility_list = sorted(facility_list, key=lambda fac: fac[1]) facility_opts = [OPTION(fac[1], _value=fac[0]) for fac in facility_list] manage_facility_box = DIV(H3(T("Manage Your Facilities")), SELECT(_id = "manage_facility_select", _style = "max-width:360px;", *facility_opts ), A(T("Go"), _href = URL(c="default", f="site", args=[facility_list[0][0]]), #_disabled = "disabled", _id = "manage_facility_btn", _class = "action-btn" ), _id = "manage_facility_box", _class = "menu_box fleft" ) s3.jquery_ready.append( '''$('#manage_facility_select').change(function(){ $('#manage_facility_btn').attr('href',S3.Ap.concat('/default/site/',$('#manage_facility_select').val())) })''') else: manage_facility_box = "" else: manage_facility_box = "" # Login/Registration forms self_registration = settings.get_security_self_registration() registered = False login_form = None login_div = None register_form = None register_div = None if AUTHENTICATED not in roles: # This user isn't yet logged-in if request.cookies.has_key("registered"): # This browser has logged-in before registered = True if self_registration: # Provide a Registration box on front page register_form = auth.register() register_div = DIV(H3(T("Register")), P(XML(T("If you would like to help, then please %(sign_up_now)s") % \ dict(sign_up_now=B(T("sign-up now")))))) if request.env.request_method == "POST": post_script = \ '''$('#register_form').removeClass('hide') $('#login_form').addClass('hide')''' else: post_script = "" register_script = \ '''$('#register-btn').attr('href','#register') $('#login-btn').attr('href','#login') %s $('#register-btn').click(function(){ $('#register_form').removeClass('hide') $('#login_form').addClass('hide') }) $('#login-btn').click(function(){ $('#register_form').addClass('hide') $('#login_form').removeClass('hide') })''' % post_script s3.jquery_ready.append(register_script) # Provide a login box on front page auth.messages.submit_button = T("Login") login_form = auth.login(inline=True) login_div = DIV(H3(T("Login")), P(XML(T("Registered users can %(login)s to access the system") % \ dict(login=B(T("login")))))) return dict(title = title, sit_dec_res_box = sit_dec_res_box, facility_box = facility_box, manage_facility_box = manage_facility_box, self_registration=self_registration, registered=registered, login_form=login_form, login_div=login_div, register_form=register_form, register_div=register_div ) # END =========================================================================
IOError
dataset/ETHPy150Open sahana/eden/modules/templates/EC/controllers.py/index.__call__
3,151
def _get_parsed_sent(self, grid, pos_in_tree, tagset=None): words = self._get_column(grid, self._colmap['words']) pos_tags = self._get_column(grid, self._colmap['pos']) if tagset and tagset != self._tagset: pos_tags = [map_tag(self._tagset, tagset, t) for t in pos_tags] parse_tags = self._get_column(grid, self._colmap['tree']) treestr = '' for (word, pos_tag, parse_tag) in zip(words, pos_tags, parse_tags): if word == '(': word = '-LRB-' if word == ')': word = '-RRB-' if pos_tag == '(': pos_tag = '-LRB-' if pos_tag == ')': pos_tag = '-RRB-' (left, right) = parse_tag.split('*') right = right.count(')')*')' # only keep ')'. treestr += '%s (%s %s) %s' % (left, pos_tag, word, right) try: tree = self._tree_class.parse(treestr) except (__HOLE__, IndexError): tree = self._tree_class.parse('(%s %s)' % (self._root_label, treestr)) if not pos_in_tree: for subtree in tree.subtrees(): for i, child in enumerate(subtree): if (isinstance(child, Tree) and len(child)==1 and isinstance(child[0], compat.string_types)): subtree[i] = (child[0], child.label()) return tree
ValueError
dataset/ETHPy150Open nltk/nltk/nltk/corpus/reader/conll.py/ConllCorpusReader._get_parsed_sent
3,152
def isStr(s): t = '' try: t += s except __HOLE__: return 0 return 1
TypeError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/pycrypto-2.6.1/lib/Crypto/SelfTest/Signature/test_pkcs1_15.py/isStr
3,153
def handle(self, sample_name=None, percent=None, *args, **options): list_sample = options['list_sample'] if list_sample: print('Samples:') for sample in Sample.objects.iterator(): print('%s: %s%%' % (sample.name, sample.percent)) return if not (sample_name and percent): raise CommandError('You need to specify a sample ' 'name and percentage.') try: percent = float(percent) if not (0.0 <= percent <= 100.0): raise ValueError() except __HOLE__: raise CommandError('You need to enter a valid percentage value.') if options['create']: sample, created = Sample.objects.get_or_create( name=sample_name, defaults={'percent': 0}) if created: print('Creating sample: %s' % sample_name) else: try: sample = Sample.objects.get(name=sample_name) except Sample.DoesNotExist: raise CommandError('This sample does not exist.') sample.percent = percent sample.save()
ValueError
dataset/ETHPy150Open jsocol/django-waffle/waffle/management/commands/waffle_sample.py/Command.handle
3,154
def _is_int(string): try: int(string) return True except __HOLE__: return False
ValueError
dataset/ETHPy150Open blue-yonder/pyscaffold/pyscaffold/contrib/pbr/pbr/version.py/_is_int
3,155
@classmethod def from_pip_string(klass, version_string): """Create a SemanticVersion from a pip version string. This method will parse a version like 1.3.0 into a SemanticVersion. This method is responsible for accepting any version string that any older version of pbr ever created. Therefore: versions like 1.3.0a1 versions are handled, parsed into a canonical form and then output - resulting in 1.3.0.0a1. Pre pbr-semver dev versions like 0.10.1.3.g83bef74 will be parsed but output as 0.10.1.dev3.g83bef74. :raises ValueError: Never tagged versions sdisted by old pbr result in just the git hash, e.g. '1234567' which poses a substantial problem since they collide with the semver versions when all the digits are numerals. Such versions will result in a ValueError being thrown if any non-numeric digits are present. They are an exception to the general case of accepting anything we ever output, since they were never intended and would permanently mess up versions on PyPI if ever released - we're treating that as a critical bug that we ever made them and have stopped doing that. """ try: return klass._from_pip_string_unsafe(version_string) except __HOLE__: raise ValueError("Invalid version %r" % version_string)
IndexError
dataset/ETHPy150Open blue-yonder/pyscaffold/pyscaffold/contrib/pbr/pbr/version.py/SemanticVersion.from_pip_string
3,156
@classmethod def _from_pip_string_unsafe(klass, version_string): # Versions need to start numerically, ignore if not if not version_string[:1].isdigit(): raise ValueError("Invalid version %r" % version_string) input_components = version_string.split('.') # decimals first (keep pre-release and dev/hashes to the right) components = [c for c in input_components if c.isdigit()] digit_len = len(components) if digit_len == 0: raise ValueError("Invalid version %r" % version_string) elif digit_len < 3: if (digit_len < len(input_components) and input_components[digit_len][0].isdigit()): # Handle X.YaZ - Y is a digit not a leadin to pre-release. mixed_component = input_components[digit_len] last_component = ''.join(itertools.takewhile( lambda x: x.isdigit(), mixed_component)) components.append(last_component) input_components[digit_len:digit_len + 1] = [ last_component, mixed_component[len(last_component):]] digit_len += 1 components.extend([0] * (3 - digit_len)) components.extend(input_components[digit_len:]) major = int(components[0]) minor = int(components[1]) dev_count = None post_count = None prerelease_type = None prerelease = None def _parse_type(segment): # Discard leading digits (the 0 in 0a1) isdigit = operator.methodcaller('isdigit') segment = ''.join(itertools.dropwhile(isdigit, segment)) isalpha = operator.methodcaller('isalpha') prerelease_type = ''.join(itertools.takewhile(isalpha, segment)) prerelease = segment[len(prerelease_type)::] return prerelease_type, int(prerelease) if _is_int(components[2]): patch = int(components[2]) else: # legacy version e.g. 1.2.0a1 (canonical is 1.2.0.0a1) # or 1.2.dev4.g1234 or 1.2.b4 patch = 0 components[2:2] = [0] remainder = components[3:] remainder_starts_with_int = False try: if remainder and int(remainder[0]): remainder_starts_with_int = True except __HOLE__: pass if remainder_starts_with_int: # old dev format - 0.1.2.3.g1234 dev_count = int(remainder[0]) else: if remainder and (remainder[0][0] == '0' or remainder[0][0] in ('a', 'b', 'r')): # Current RC/beta layout prerelease_type, prerelease = _parse_type(remainder[0]) remainder = remainder[1:] while remainder: component = remainder[0] if component.startswith('dev'): dev_count = int(component[3:]) elif component.startswith('post'): dev_count = None post_count = int(component[4:]) else: raise ValueError( 'Unknown remainder %r in %r' % (remainder, version_string)) remainder = remainder[1:] result = SemanticVersion( major, minor, patch, prerelease_type=prerelease_type, prerelease=prerelease, dev_count=dev_count) if post_count: if dev_count: raise ValueError( 'Cannot combine postN and devN - no mapping in %r' % (version_string,)) result = result.increment().to_dev(post_count) return result
ValueError
dataset/ETHPy150Open blue-yonder/pyscaffold/pyscaffold/contrib/pbr/pbr/version.py/SemanticVersion._from_pip_string_unsafe
3,157
def ancestors(node): ''' Returns the list of all nodes dominating the given tree node. This method will not work with leaf nodes, since there is no way to recover the parent. ''' results = [] try: current = node.parent() except __HOLE__: # if node is a leaf, we cannot retrieve its parent return results while current: results.append(current) current = current.parent() return results
AttributeError
dataset/ETHPy150Open nltk/nltk/nltk/tgrep.py/ancestors
3,158
def unique_ancestors(node): ''' Returns the list of all nodes dominating the given node, where there is only a single path of descent. ''' results = [] try: current = node.parent() except __HOLE__: # if node is a leaf, we cannot retrieve its parent return results while current and len(current) == 1: results.append(current) current = current.parent() return results
AttributeError
dataset/ETHPy150Open nltk/nltk/nltk/tgrep.py/unique_ancestors
3,159
def _descendants(node): ''' Returns the list of all nodes which are descended from the given tree node in some way. ''' try: treepos = node.treepositions() except __HOLE__: return [] return [node[x] for x in treepos[1:]]
AttributeError
dataset/ETHPy150Open nltk/nltk/nltk/tgrep.py/_descendants
3,160
def _leftmost_descendants(node): ''' Returns the set of all nodes descended in some way through left branches from this node. ''' try: treepos = node.treepositions() except __HOLE__: return [] return [node[x] for x in treepos[1:] if all(y == 0 for y in x)]
AttributeError
dataset/ETHPy150Open nltk/nltk/nltk/tgrep.py/_leftmost_descendants
3,161
def _rightmost_descendants(node): ''' Returns the set of all nodes descended in some way through right branches from this node. ''' try: rightmost_leaf = max(node.treepositions()) except __HOLE__: return [] return [node[rightmost_leaf[:i]] for i in range(1, len(rightmost_leaf) + 1)]
AttributeError
dataset/ETHPy150Open nltk/nltk/nltk/tgrep.py/_rightmost_descendants
3,162
def _before(node): ''' Returns the set of all nodes that are before the given node. ''' try: pos = node.treeposition() tree = node.root() except __HOLE__: return [] return [tree[x] for x in tree.treepositions() if x[:len(pos)] < pos[:len(x)]]
AttributeError
dataset/ETHPy150Open nltk/nltk/nltk/tgrep.py/_before
3,163
def _immediately_before(node): ''' Returns the set of all nodes that are immediately before the given node. Tree node A immediately precedes node B if the last terminal symbol (word) produced by A immediately precedes the first terminal symbol produced by B. ''' try: pos = node.treeposition() tree = node.root() except __HOLE__: return [] # go "upwards" from pos until there is a place we can go to the left idx = len(pos) - 1 while 0 <= idx and pos[idx] == 0: idx -= 1 if idx < 0: return [] pos = list(pos[:idx + 1]) pos[-1] -= 1 before = tree[pos] return [before] + _rightmost_descendants(before)
AttributeError
dataset/ETHPy150Open nltk/nltk/nltk/tgrep.py/_immediately_before
3,164
def _after(node): ''' Returns the set of all nodes that are after the given node. ''' try: pos = node.treeposition() tree = node.root() except __HOLE__: return [] return [tree[x] for x in tree.treepositions() if x[:len(pos)] > pos[:len(x)]]
AttributeError
dataset/ETHPy150Open nltk/nltk/nltk/tgrep.py/_after
3,165
def _immediately_after(node): ''' Returns the set of all nodes that are immediately after the given node. Tree node A immediately follows node B if the first terminal symbol (word) produced by A immediately follows the last terminal symbol produced by B. ''' try: pos = node.treeposition() tree = node.root() current = node.parent() except __HOLE__: return [] # go "upwards" from pos until there is a place we can go to the # right idx = len(pos) - 1 while 0 <= idx and pos[idx] == len(current) - 1: idx -= 1 current = current.parent() if idx < 0: return [] pos = list(pos[:idx + 1]) pos[-1] += 1 after = tree[pos] return [after] + _leftmost_descendants(after)
AttributeError
dataset/ETHPy150Open nltk/nltk/nltk/tgrep.py/_immediately_after
3,166
def tgrep_positions(pattern, trees, search_leaves=True): """ Return the tree positions in the trees which match the given pattern. :param pattern: a tgrep search pattern :type pattern: str or output of tgrep_compile() :param trees: a sequence of NLTK trees (usually ParentedTrees) :type trees: iter(ParentedTree) or iter(Tree) :param search_leaves: whether ot return matching leaf nodes :type search_leaves: bool :rtype: iter(tree positions) """ if isinstance(pattern, (binary_type, text_type)): pattern = tgrep_compile(pattern) for tree in trees: try: if search_leaves: positions = tree.treepositions() else: positions = treepositions_no_leaves(tree) yield [position for position in positions if pattern(tree[position])] except __HOLE__: yield []
AttributeError
dataset/ETHPy150Open nltk/nltk/nltk/tgrep.py/tgrep_positions
3,167
def tgrep_nodes(pattern, trees, search_leaves=True): """ Return the tree nodes in the trees which match the given pattern. :param pattern: a tgrep search pattern :type pattern: str or output of tgrep_compile() :param trees: a sequence of NLTK trees (usually ParentedTrees) :type trees: iter(ParentedTree) or iter(Tree) :param search_leaves: whether ot return matching leaf nodes :type search_leaves: bool :rtype: iter(tree nodes) """ if isinstance(pattern, (binary_type, text_type)): pattern = tgrep_compile(pattern) for tree in trees: try: if search_leaves: positions = tree.treepositions() else: positions = treepositions_no_leaves(tree) yield [tree[position] for position in positions if pattern(tree[position])] except __HOLE__: yield []
AttributeError
dataset/ETHPy150Open nltk/nltk/nltk/tgrep.py/tgrep_nodes
3,168
def _show_missing_code_main(config, session): """Preparing fixture duplicates for output.""" tw = py.io.TerminalWriter() session.perform_collect() fm = session._fixturemanager if config.option.features is None: tw.line("The --feature parameter is required.", red=True) session.exitstatus = 100 return features, scenarios, steps = parse_feature_files(config.option.features) for item in session.items: scenario = getattr(item.obj, "__scenario__", None) if scenario: if scenario in scenarios: scenarios.remove(scenario) for step in scenario.steps: fixturedefs = _find_step_fixturedef(fm, item, step.name, step.type) if fixturedefs: try: steps.remove(step) except __HOLE__: pass for scenario in scenarios: for step in scenario.steps: if step.background is None: steps.remove(step) grouped_steps = group_steps(steps) print_missing_code(scenarios, grouped_steps) if scenarios or steps: session.exitstatus = 100
ValueError
dataset/ETHPy150Open pytest-dev/pytest-bdd/pytest_bdd/generation.py/_show_missing_code_main
3,169
@property def data(self): try: local = storage[get_ident()] except __HOLE__: raise ThreadLocalMissing() return local.setdefault(id(self), {})
KeyError
dataset/ETHPy150Open cosmic-api/cosmic.py/cosmic/globals.py/ThreadLocalDict.data
3,170
def to_unicode(s): """ Convert to unicode, raise exception with instructive error message if s is not unicode, ascii, or utf-8. """ if not isinstance(s, unicode): if not isinstance(s, str): raise TypeError('You are required to pass either unicode or string here, not: %r (%s)' % (type(s), s)) try: s = s.decode('utf-8') except __HOLE__, le: raise TypeError('You are required to pass either a unicode object or a utf-8 string here. You passed a Python string object which contained non-utf-8: %r. The UnicodeDecodeError that resulted from attempting to interpret it as utf-8 was: %s' % (s, le,)) return s
UnicodeDecodeError
dataset/ETHPy150Open ibagrak/algae/gae/libs/oauth2/__init__.py/to_unicode
3,171
def to_unicode_optional_iterator(x): """ Raise TypeError if x is a str containing non-utf8 bytes or if x is an iterable which contains such a str. """ if isinstance(x, basestring): return to_unicode(x) try: l = list(x) except __HOLE__, e: assert 'is not iterable' in str(e) return x else: return [ to_unicode(e) for e in l ]
TypeError
dataset/ETHPy150Open ibagrak/algae/gae/libs/oauth2/__init__.py/to_unicode_optional_iterator
3,172
def to_utf8_optional_iterator(x): """ Raise TypeError if x is a str or if x is an iterable which contains a str. """ if isinstance(x, basestring): return to_utf8(x) try: l = list(x) except __HOLE__, e: assert 'is not iterable' in str(e) return x else: return [ to_utf8_if_string(e) for e in l ]
TypeError
dataset/ETHPy150Open ibagrak/algae/gae/libs/oauth2/__init__.py/to_utf8_optional_iterator
3,173
@staticmethod def from_string(s): """Deserializes a token from a string like one returned by `to_string()`.""" if not len(s): raise ValueError("Invalid parameter string.") params = parse_qs(s, keep_blank_values=False) if not len(params): raise ValueError("Invalid parameter string.") try: key = params['oauth_token'][0] except Exception: raise ValueError("'oauth_token' not found in OAuth request.") try: secret = params['oauth_token_secret'][0] except Exception: raise ValueError("'oauth_token_secret' not found in " "OAuth request.") token = Token(key, secret) try: token.callback_confirmed = params['oauth_callback_confirmed'][0] except __HOLE__: pass # 1.0, no callback confirmed. return token
KeyError
dataset/ETHPy150Open ibagrak/algae/gae/libs/oauth2/__init__.py/Token.from_string
3,174
def setter(attr): name = attr.__name__ def getter(self): try: return self.__dict__[name] except __HOLE__: raise AttributeError(name) def deleter(self): del self.__dict__[name] return property(getter, attr, deleter)
KeyError
dataset/ETHPy150Open ibagrak/algae/gae/libs/oauth2/__init__.py/setter
3,175
def to_url(self): """Serialize as a URL for a GET request.""" base_url = urlparse.urlparse(self.url) try: query = base_url.query except AttributeError: # must be python <2.5 query = base_url[4] query = parse_qs(query) for k, v in self.items(): query.setdefault(k, []).append(v) try: scheme = base_url.scheme netloc = base_url.netloc path = base_url.path params = base_url.params fragment = base_url.fragment except __HOLE__: # must be python <2.5 scheme = base_url[0] netloc = base_url[1] path = base_url[2] params = base_url[3] fragment = base_url[5] url = (scheme, netloc, path, params, urllib.urlencode(query, True), fragment) return urlparse.urlunparse(url)
AttributeError
dataset/ETHPy150Open ibagrak/algae/gae/libs/oauth2/__init__.py/Request.to_url
3,176
def get_normalized_parameters(self): """Return a string that contains the parameters that must be signed.""" items = [] for key, value in self.iteritems(): if key == 'oauth_signature': continue # 1.0a/9.1.1 states that kvp must be sorted by key, then by value, # so we unpack sequence values into multiple items for sorting. if isinstance(value, basestring): items.append((to_utf8_if_string(key), to_utf8(value))) else: try: value = list(value) except __HOLE__, e: assert 'is not iterable' in str(e) items.append((to_utf8_if_string(key), to_utf8_if_string(value))) else: items.extend((to_utf8_if_string(key), to_utf8_if_string(item)) for item in value) # Include any query string parameters from the provided URL query = urlparse.urlparse(self.url)[4] url_items = self._split_url_string(query).items() url_items = [(to_utf8(k), to_utf8(v)) for k, v in url_items if k != 'oauth_signature' ] items.extend(url_items) items.sort() encoded_str = urllib.urlencode(items) # Encode signature parameters per Oauth Core 1.0 protocol # spec draft 7, section 3.6 # (http://tools.ietf.org/html/draft-hammer-oauth-07#section-3.6) # Spaces must be encoded with "%20" instead of "+" return encoded_str.replace('+', '%20').replace('%7E', '~')
TypeError
dataset/ETHPy150Open ibagrak/algae/gae/libs/oauth2/__init__.py/Request.get_normalized_parameters
3,177
def test_default_namespace_searching(): ns = options.Namespace() ns.val = 0 ns.foo = options.Bunch(foo=1, bar=2, baz=3, blorg=6) ns.bar = options.Bunch(foo=4, baz=5, bop=7, val=8) assert ns.val == 0, "Top namespace has priority" assert ns.foo == options.Bunch(foo=1, bar=2, baz=3, blorg=6) assert ns.foo.foo == 1 assert ns.bar.foo == 4 assert ns.baz == 5 assert ns.blorg == 6 try: ns['does not exist'] assert False, "expected key error for missing item" except __HOLE__: pass del ns['val'] assert ns.val == 8, "Now getting val from inner dict" del ns.bar.val try: a = ns['val'] assert False, "expected exception for deleted item %s" % (ns) except KeyError: pass del ns.foo assert ns._sections == ['bar']
KeyError
dataset/ETHPy150Open paver/paver/paver/tests/test_options.py/test_default_namespace_searching
3,178
def test_search_order_is_adjustable(): ns = options.Namespace( bar=options.Bunch(val=1, blorg=4) ) ns.baz=options.Bunch(val=2, bop=5) ns.foo=options.Bunch(val=3, bam=6) assert ns.blorg == 4 assert ns.val == 3 ns.order('baz') assert ns.val == 2 assert ns.bop == 5 try: ns.bam assert False, "expected attribute error for item not in search" except __HOLE__: pass ns.order('bar', 'baz') assert ns.val == 1 assert ns.blorg == 4 assert ns.bop == 5 ns.order('baz', add_rest=True) assert ns.val == 2 assert ns.bam == 6
AttributeError
dataset/ETHPy150Open paver/paver/paver/tests/test_options.py/test_search_order_is_adjustable
3,179
def intOrNone(s): try: return int(s) except __HOLE__: return None
ValueError
dataset/ETHPy150Open twisted/nevow/nevow/accessors.py/intOrNone
3,180
@property def colors(self): """Return the current foreground and background colors.""" try: return get_console_info(self._kernel32, self._stream_handle) except __HOLE__: return WINDOWS_CODES['white'], WINDOWS_CODES['black']
IOError
dataset/ETHPy150Open Robpol86/colorclass/colorclass/windows.py/WindowsStream.colors
3,181
@property def top(self): """The topmost item on the stack. If the stack is empty, `None` is returned. """ try: return self._stack.get(get_ident(), [])[-1] except (__HOLE__, IndexError): return None
AttributeError
dataset/ETHPy150Open zeaphoo/cocopot/cocopot/local.py/LocalStack.top
3,182
def _get_current_object(self): """Return the current object. This is useful if you want the real object behind the proxy at a time for performance reasons or because you want to pass the object into a different context. """ if callable(self.__target): return self.__target() try: return getattr(self.__target, self.__name__) except __HOLE__: raise RuntimeError('no object bound to %s' % self.__name__)
AttributeError
dataset/ETHPy150Open zeaphoo/cocopot/cocopot/local.py/LocalProxy._get_current_object
3,183
@property def __dict__(self): try: return self._get_current_object().__dict__ except __HOLE__: raise AttributeError('__dict__')
RuntimeError
dataset/ETHPy150Open zeaphoo/cocopot/cocopot/local.py/LocalProxy.__dict__
3,184
def __repr__(self): try: obj = self._get_current_object() except __HOLE__: return '<%s unbound>' % self.__class__.__name__ return repr(obj)
RuntimeError
dataset/ETHPy150Open zeaphoo/cocopot/cocopot/local.py/LocalProxy.__repr__
3,185
def __bool__(self): try: return bool(self._get_current_object()) except __HOLE__: return False
RuntimeError
dataset/ETHPy150Open zeaphoo/cocopot/cocopot/local.py/LocalProxy.__bool__
3,186
def __unicode__(self): try: return unicode(self._get_current_object()) except __HOLE__: return repr(self)
RuntimeError
dataset/ETHPy150Open zeaphoo/cocopot/cocopot/local.py/LocalProxy.__unicode__
3,187
def __dir__(self): try: return dir(self._get_current_object()) except __HOLE__: return []
RuntimeError
dataset/ETHPy150Open zeaphoo/cocopot/cocopot/local.py/LocalProxy.__dir__
3,188
def validate_answer(answer, question): """ Return True if answer is a valid response to question, False if not. (question is expected to be the XFormsResponse object for the question) """ if question.event.datatype == "select": try: assert answer is not None answer = int(answer) assert answer >= 1 and answer <= len(question.event.choices) return True except (ValueError, AssertionError): return False else: try: assert answer is not None if isinstance(answer, basestring): assert len(answer.strip()) > 0 return True except __HOLE__: return False
AssertionError
dataset/ETHPy150Open dimagi/commcare-hq/corehq/apps/ivr/api.py/validate_answer
3,189
def add_metadata(call_log_entry, duration=None): try: call_log_entry.duration = int(round(float(duration))) call_log_entry.save() except (__HOLE__, ValueError): pass
TypeError
dataset/ETHPy150Open dimagi/commcare-hq/corehq/apps/ivr/api.py/add_metadata
3,190
def search(self, doc): for path in self.paths: try: return reduce(lambda obj, attr: obj[attr], path, doc) except __HOLE__: pass
KeyError
dataset/ETHPy150Open dimagi/commcare-hq/corehq/ex-submodules/couchexport/deid.py/JSONPath.search
3,191
def set_level(self, level): try: index = Logger.Levels.index(level) except __HOLE__: return self.level = index
ValueError
dataset/ETHPy150Open chrippa/livestreamer/src/livestreamer/logger.py/Logger.set_level
3,192
def do_GET(self): """GET method implementation for BaseHTTPRequestHandler.""" if not self._client_allowed(): return try: (_, _, path, query, _) = urlparse.urlsplit(self.path) params = urlparse.parse_qs(query) # Give each handler a chance to respond. for prefix, handler in self._GET_handlers: if self._maybe_handle(prefix, handler, path, params): return # If no path specified, default to showing the list of all runs. if path == '/': self._handle_runs('', {}) return self._send_content('Invalid GET request {}'.format(self.path), 'text/html', code=400) except (__HOLE__, ValueError): pass # Printing these errors gets annoying, and there's nothing to do about them anyway. #sys.stderr.write('Invalid GET request {}'.format(self.path))
IOError
dataset/ETHPy150Open pantsbuild/pants/src/python/pants/reporting/reporting_server.py/PantsHandler.do_GET
3,193
def drop(self): try: with transaction.atomic(): self.drop_atomic() except __HOLE__: self.drop_atomic() transaction.commit_unless_managed(using=self.using)
AttributeError
dataset/ETHPy150Open django-denorm/django-denorm/denorm/db/sqlite3/triggers.py/TriggerSet.drop
3,194
def install(self): try: with transaction.atomic(): self.install_atomic() except __HOLE__: self.install_atomic() transaction.commit_unless_managed(using=self.using)
AttributeError
dataset/ETHPy150Open django-denorm/django-denorm/denorm/db/sqlite3/triggers.py/TriggerSet.install
3,195
def _MathParser(math_stack): """Defines the entire math expression for BigQuery queries. Converts the expression into postfix notation. The stack is reversed (i.e. the last element acts the top of the stack). Actions do not occur unless parseString is called on the BNF returned. The actions will modify the original list that was passed when the BNF was generated. The <math_stack> will return the single expression converted to postfix. Arguments: math_stack: Returns postfix notation of one math expression. Returns: A BNF of an math/string expression. """ def PushAggregation(tokens): """Pushes aggregation functions onto the stack. When the aggregation is pushed, the name is rewritten. The label is prepended with AGGREGATION_ to signal that an aggregation is occurring. Following this prefix is an integer, which represents the number of comma separated arguments that were provided. Finally, the name of the function is appended to the label. For most functions, the aggregation name is simply appended. However, there are special exceptions for COUNT. A normal count function is rewritten as AGGREGATION_i_COUNT. However, a count with the distinct keyword is rewritten to AGGREGATION_i_DISTINCTCOUNT. Args: tokens: The function name and arguments in a list object. """ function_name = tokens[0] # Rename count with distinct keyword as distinctcount. if function_name == 'COUNT': if 'DISTINCT' in list(tokens): function_name = 'DISTINCTCOUNT' # Assume all aggregation functions have at least one argument. # If a function n commas, then it has n + 1 arguments. num_args = 1 for token in tokens: if token == ',': num_args += 1 math_stack.append(util.AggregationFunctionToken(function_name, num_args)) def PushFunction(tokens): """Push a function token onto the stack. Args: tokens: list of all tokens, tokens[0] is the function name str. """ math_stack.append(util.BuiltInFunctionToken(tokens[0])) def PushSingleToken(tokens): """Push the topmost token onto the stack.""" if util.IsFloat(tokens[0]): try: token = int(tokens[0]) except __HOLE__: token = float(tokens[0]) elif tokens[0].startswith('\'') or tokens[0].startswith('"'): token = util.StringLiteralToken(tokens[0]) elif tokens[0].lower() in util.BIGQUERY_CONSTANTS: token = util.LiteralToken(tokens[0].lower(), util.BIGQUERY_CONSTANTS[tokens[0].lower()]) else: token = util.FieldToken(tokens[0]) math_stack.append(token) def PushCountStar(tokens): if tokens[0] != '*': raise ValueError('Not a count star argument.') math_stack.append(util.CountStarToken()) def PushUnaryOperators(tokens): # The list must be reversed since unary operations are unwrapped in the # other direction. An example is ~-1. The negation occurs before the bit # inversion. for i in reversed(range(0, len(tokens))): if tokens[i] == '-': math_stack.append(int('-1')) math_stack.append(util.OperatorToken('*', 2)) elif tokens[i] == '~': math_stack.append(util.OperatorToken('~', 1)) elif tokens[i].lower() == 'not': math_stack.append(util.OperatorToken('not', 1)) def PushBinaryOperator(tokens): math_stack.append(util.OperatorToken(tokens[0], 2)) # Miscellaneous symbols and keywords. comma = pp.Literal(',') decimal = pp.Literal('.') exponent_literal = pp.CaselessLiteral('E') lp = pp.Literal('(') rp = pp.Literal(')') count_star = pp.Literal('*') distinct_keyword = pp.CaselessKeyword('DISTINCT') # Any non-space containing sequence of characters that must begin with # an alphabetical character and contain alphanumeric characters # and underscores (i.e. function or variable names). label = pp.Word(pp.alphas, pp.alphas + pp.nums + '_' + '.') # A single/double quote surrounded string. string = pp.quotedString # Various number representations. integer = pp.Word(pp.nums) decimal_type1 = pp.Combine(integer + decimal + pp.Optional(integer)) decimal_type2 = pp.Combine(decimal + integer) real = decimal_type1 | decimal_type2 exponent = exponent_literal + pp.Word('+-' + pp.nums, pp.nums) number_without_exponent = real | integer number = pp.Combine(number_without_exponent + pp.Optional(exponent)) integer_argument = pp.Word(pp.nums) integer_argument.setParseAction(PushSingleToken) # Forward declaration for recusive grammar. We assume that full_expression can # represent any expression that is valid. full_expression = pp.Forward() # Aggregation function definitions. avg_function = pp.CaselessKeyword('AVG') + lp + full_expression + rp count_star.setParseAction(PushCountStar) count_argument = ((pp.Optional(distinct_keyword) + full_expression) | count_star) count_function = (pp.CaselessKeyword('COUNT') + lp + count_argument + pp.Optional(comma + integer_argument) + rp) quantiles_function = (pp.CaselessKeyword('QUANTILES') + lp + full_expression + pp.Optional(comma + integer_argument) + rp) stddev_function = pp.CaselessKeyword('STDDEV') + lp + full_expression + rp variance_function = pp.CaselessKeyword('VARIANCE') + lp + full_expression + rp last_function = pp.CaselessKeyword('LAST') + lp + full_expression + rp max_function = pp.CaselessKeyword('MAX') + lp + full_expression + rp min_function = pp.CaselessKeyword('MIN') + lp + full_expression + rp nth_function = (pp.CaselessKeyword('NTH') + lp + integer_argument + comma + full_expression + rp) group_concat_function = (pp.CaselessKeyword('GROUP_CONCAT') + lp + full_expression + rp) sum_function = pp.CaselessKeyword('SUM') + lp + full_expression + rp top_function = (pp.CaselessKeyword('TOP') + lp + full_expression + pp.Optional(comma + integer_argument + pp.Optional(comma + integer_argument)) + rp) aggregate_functions = (avg_function | count_function | quantiles_function | stddev_function | variance_function | last_function | max_function | min_function | nth_function | group_concat_function | sum_function | top_function) aggregate_functions.setParseAction(PushAggregation) functions_arguments = pp.Optional(full_expression + pp.ZeroOrMore(comma.suppress() + full_expression)) functions = label + lp + functions_arguments + rp functions.setParseAction(PushFunction) literals = number | string | label literals.setParseAction(PushSingleToken) # Any expression that can be modified by an unary operator. # We include strings (even though they can't be modified by any unary # operator) since atoms do not necessitate modification by unary operators. # These errors will be caught by the interpreter. atom = ((lp + full_expression + rp) | aggregate_functions | functions | literals) unary_operators = (pp.CaselessLiteral('+') | pp.CaselessLiteral('-') | pp.CaselessLiteral('~') | pp.CaselessKeyword('not')) # Take all unary operators preceding atom (possibly many). current_expression = (pp.ZeroOrMore(unary_operators) + atom.suppress()) current_expression.setParseAction(PushUnaryOperators) # All operators in same set have same precedence. Precedence is top to bottom. binary_operators = [ (pp.CaselessLiteral('*') | pp.CaselessLiteral('/') | pp.CaselessLiteral('%')), pp.CaselessLiteral('+') | pp.CaselessLiteral('-'), pp.CaselessLiteral('>>') | pp.CaselessLiteral('<<'), (pp.CaselessLiteral('<=') | pp.CaselessLiteral('>=') | pp.CaselessLiteral('<') | pp.CaselessLiteral('>')), (pp.CaselessLiteral('==') | pp.CaselessLiteral('=') | pp.CaselessLiteral('!=')), pp.CaselessKeyword('is') | pp.CaselessKeyword('contains'), pp.CaselessLiteral('&'), pp.CaselessLiteral('^'), pp.CaselessLiteral('|'), pp.CaselessKeyword('and'), pp.CaselessKeyword('or'), ] # Take the operator set of the most precedence that has not been parsed. # Find and collapse all operators of the set. Thus, order of operations # is not broken. Equivalent to recursive descent parsing. # Below code is equivalent to: # expression = expression + pp.ZeroOrMore(op_level1 + expression) # expression = expression + pp.ZeroOrMore(op_level2 + expression) # ... for operator_set in binary_operators: # Represents _i-1 ai part of expression that is added to current expression. operator_expression = operator_set + current_expression # Push only the operator, both atoms will have already been pushed. operator_expression.setParseAction(PushBinaryOperator) # pylint: disable=g-no-augmented-assignment current_expression = (current_expression + pp.ZeroOrMore(operator_expression)) # pylint: disable=pointless-statement full_expression << current_expression return full_expression
ValueError
dataset/ETHPy150Open google/encrypted-bigquery-client/src/query_parser.py/_MathParser
3,196
def ParseQuery(query): """Parses the entire query. Arguments: query: The command the user sent that needs to be parsed. Returns: Dictionary mapping clause names to their arguments. Raises: bigquery_client.BigqueryInvalidQueryError: When invalid query is given. """ clause_arguments = { 'SELECT': [], 'AS': {}, 'WITHIN': {}, 'FROM': [], 'WHERE': [], 'HAVING': [], 'GROUP BY': [], 'ORDER BY': [], 'LIMIT': [], } try: _EBQParser(clause_arguments).parseString(query) except __HOLE__ as e: raise bigquery_client.BigqueryInvalidQueryError(e, None, None, None) return clause_arguments
ValueError
dataset/ETHPy150Open google/encrypted-bigquery-client/src/query_parser.py/ParseQuery
3,197
def estimate_params(img): img_green = img[..., 1] # supposedly using the green channel is a good idea. alternatively we could use luma. # this seems to work well enough. out = pysex.run(img_green, params=[ 'X_IMAGE', 'Y_IMAGE', # barycenter # 'XMIN_IMAGE', 'XMAX_IMAGE', 'YMIN_IMAGE', 'YMAX_IMAGE', # enclosing rectangle # 'XPEAK_IMAGE', 'YPEAK_IMAGE', # location of maximal intensity 'A_IMAGE', 'B_IMAGE', 'THETA_IMAGE', # ellipse parameters 'PETRO_RADIUS', # 'KRON_RADIUS', 'PETRO_RADIUS', 'FLUX_RADIUS', 'FWHM_IMAGE', # various radii ], conf_args={ 'DETECT_THRESH': DETECT_THRESH }) # x and y are flipped for some reason. # theta should be 90 - theta. # we convert these here so we can plot stuff with matplotlib easily. try: ys = out['X_IMAGE'].tonumpy() xs = out['Y_IMAGE'].tonumpy() as_ = out['A_IMAGE'].tonumpy() bs = out['B_IMAGE'].tonumpy() thetas = 90 - out['THETA_IMAGE'].tonumpy() # kron_radii = out['KRON_RADIUS'].tonumpy() petro_radii = out['PETRO_RADIUS'].tonumpy() # flux_radii = out['FLUX_RADIUS'].tonumpy() # fwhms = out['FWHM_IMAGE'].tonumpy() # detect the most salient galaxy # take in account size and centrality surface_areas = np.pi * (as_ * bs) centralities = np.exp(-((xs - 211.5)**2 + (ys - 211.5)**2)/SIGMA2) # 211.5, 211.5 is the center of the image # salience is proportional to surface area, with a gaussian prior on the distance to the center. saliences = surface_areas * centralities most_salient_idx = np.argmax(saliences) x = xs[most_salient_idx] y = ys[most_salient_idx] a = as_[most_salient_idx] b = bs[most_salient_idx] theta = thetas[most_salient_idx] # kron_radius = kron_radii[most_salient_idx] petro_radius = petro_radii[most_salient_idx] # flux_radius = flux_radii[most_salient_idx] # fwhm = fwhms[most_salient_idx] except __HOLE__: # sometimes these are empty (no objects found), use defaults in that case x = 211.5 y = 211.5 a = np.nan # dunno what this has to be, deal with it later b = np.nan # same theta = np.nan # same # kron_radius = np.nan petro_radius = np.nan # flux_radius = np.nan # fwhm = np.nan # return (x, y, a, b, theta, flux_radius, kron_radius, petro_radius, fwhm) return (x, y, a, b, theta, petro_radius)
TypeError
dataset/ETHPy150Open benanne/kaggle-galaxies/extract_pysex_params_gen2.py/estimate_params
3,198
def __init__(self, *args, **kwargs): super(ListResource, self).__init__(*args, **kwargs) try: self.key except __HOLE__: self.key = self.name.lower()
AttributeError
dataset/ETHPy150Open balanced/status.balancedpayments.com/venv/lib/python2.7/site-packages/twilio/rest/resources/base.py/ListResource.__init__
3,199
def ip(value): '''Validate an IP address (both IPv4 and IPv6)''' try: return ipv4(value) except ValueError: pass try: return ipv6(value) except __HOLE__: raise ValueError('{0} is not a valid ip'.format(value))
ValueError
dataset/ETHPy150Open noirbizarre/flask-restplus/flask_restplus/inputs.py/ip