Unnamed: 0
int64
0
10k
function
stringlengths
79
138k
label
stringclasses
20 values
info
stringlengths
42
261
2,900
def _k8s_object_to_st2_trigger(self, k8s_object): # Define some variables try: resource_type = k8s_object['type'] object_kind = k8s_object['object']['kind'] name = k8s_object['object']['metadata']['name'] namespace = k8s_object['object']['metadata']['namespace'] uid = k8s_object['object']['metadata']['uid'] labels_data = k8s_object['object']['metadata']['labels'] except __HOLE__: msg = 'One of "type", "kind", "name", "namespace" or "uid" or "labels" ' + \ 'do not exist in the object. Incoming object=%s' % k8s_object self._log.exception(msg) raise else: payload = self._build_a_trigger(resource_type=resource_type, name=name, labels=labels_data, namespace=namespace, object_kind=object_kind, uid=uid) self._log.debug('Trigger payload: %s.' % payload) return payload
KeyError
dataset/ETHPy150Open StackStorm/st2contrib/packs/kubernetes/sensors/third_party_resource.py/ThirdPartyResource._k8s_object_to_st2_trigger
2,901
@classmethod def get_size(cls, value): """ Predefined sizes: ======== ======== ========= size width height ======== ======== ========= tiny 420 315 small 480 360 medium 640 480 large 960 720 huge 1280 960 ======== ======== ========= You can also use custom size - in format ``WIDTHxHEIGHT`` (eg. ``500x400``). :type value: str :return: Returns tuple with (width, height) values. :rtype: tuple[int, int] """ sizes = { 'tiny': (420, 315), 'small': (480, 360), 'medium': (640, 480), 'large': (960, 720), 'huge': (1280, 960), } value = value or cls.default_size if value in sizes: return sizes[value] try: size = cls.re_size.match(value) return size.group('width'), size.group('height') except __HOLE__: raise TemplateSyntaxError( 'Incorrect size.\nPossible format is WIDTHxHEIGHT or using ' 'predefined size ({sizes}).'.format(sizes=', '.join(sizes.keys())) )
AttributeError
dataset/ETHPy150Open yetty/django-embed-video/embed_video/templatetags/embed_video_tags.py/VideoNode.get_size
2,902
def test_file_isatty(name): if not os.path.exists(name): return try: test_isatty(name, file(name)) except __HOLE__, e: print e # XXX Jython prints 'no such file or directory' - probably # 'permission denied' but Java doesn't understand?
IOError
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/test/test_isatty.py/test_file_isatty
2,903
def __iter__(self): self.generator = self.readlines('\n') has_next = True while has_next: try: chunk = six.next(self.generator) yield chunk except __HOLE__: has_next = False self.close()
StopIteration
dataset/ETHPy150Open spotify/luigi/luigi/contrib/webhdfs.py/ReadableWebHdfsFile.__iter__
2,904
def memo_key(args, kwargs): result = (args, frozenset(kwargs.items())) try: hash(result) except __HOLE__: result = tuple(map(id, args)), str(kwargs) return result
TypeError
dataset/ETHPy150Open blaze/cachey/cachey/cache.py/memo_key
2,905
def get(self, item, default=None): # overloading this method is needed to force the dict to go through # the timetable check try: return self[item] except __HOLE__: return default
KeyError
dataset/ETHPy150Open phaethon/scapy/scapy/config.py/CacheInstance.get
2,906
def __setitem__(self, item, v): try: self._timetable[item] = time.time() except __HOLE__: pass dict.__setitem__(self, item,v)
AttributeError
dataset/ETHPy150Open phaethon/scapy/scapy/config.py/CacheInstance.__setitem__
2,907
def first(self): """Returns the first element from the database result or None. """ try: return next(self) except __HOLE__: return None
StopIteration
dataset/ETHPy150Open anandology/broadgauge/broadgauge/models.py/ResultSet.first
2,908
def get_value(self, key, args, kwargs): # Try standard formatting, then return 'unknown key' try: return Formatter.get_value(self, key, args, kwargs) except __HOLE__: return self.default
KeyError
dataset/ETHPy150Open artirix/logcabin/logcabin/event.py/DefaultFormatter.get_value
2,909
def __init__(self, path, update_cache): self.fullpath = os.path.join(path, 'cache') self.update_cache = update_cache self.expire_after = timedelta(days=180).total_seconds() self.time = time.time() self.dirty = False self.cache = {} # this new set is to avoid doing the same query multiple # times during the same run while using update_cache self.new = set() try: with open(self.fullpath) as file_: self.cache = json.load(file_) except (__HOLE__, ValueError): pass
IOError
dataset/ETHPy150Open YetAnotherNerd/whatlastgenre/wlg/dataprovider.py/Cache.__init__
2,910
def save(self): '''Save the cache dict as json string to a file. Clean expired entries before saving and use a temporary file to avoid data loss on interruption. ''' if not self.dirty: return self.clean() print("Saving cache... ", end='') dirname, basename = os.path.split(self.fullpath) try: with NamedTemporaryFile(prefix=basename + '.tmp_', dir=dirname, delete=False) as tmpfile: tmpfile.write(json.dumps(self.cache)) os.fsync(tmpfile) # seems atomic rename here is not possible on windows # http://docs.python.org/2/library/os.html#os.rename if os.name == 'nt' and os.path.isfile(self.fullpath): os.remove(self.fullpath) os.rename(tmpfile.name, self.fullpath) self.time = time.time() self.dirty = False size_mb = os.path.getsize(self.fullpath) / 2 ** 20 print(" done! (%d entries, %.2f MB)" % (len(self.cache), size_mb)) except __HOLE__: if os.path.isfile(tmpfile.name): os.remove(tmpfile.name)
KeyboardInterrupt
dataset/ETHPy150Open YetAnotherNerd/whatlastgenre/wlg/dataprovider.py/Cache.save
2,911
def _request_json(self, url, params, method='GET'): '''Return a json response from a request.''' res = self._request(url, params, method=method) try: return res.json() except __HOLE__ as err: self.log.debug(res.text) raise DataProviderError("json request: %s" % err.message)
ValueError
dataset/ETHPy150Open YetAnotherNerd/whatlastgenre/wlg/dataprovider.py/DataProvider._request_json
2,912
def query(self, query): '''Perform a real DataProvider query.''' res = None if query.type == 'artist': try: # query by mbid if query.mbid_artist: res = self.query_by_mbid(query.type, query.mbid_artist) except NotImplementedError: pass if not res: res = self.query_artist(query.artist) elif query.type == 'album': try: # query by mbid if query.mbid_relgrp: res = self.query_by_mbid(query.type, query.mbid_relgrp) if not res and query.mbid_album: res = self.query_by_mbid(query.type, query.mbid_album) except __HOLE__: pass if not res: res = self.query_album(query.album, query.artist, query.year, query.releasetype) # preprocess tags for result in res or []: result['tags'] = self._preprocess_tags(result['tags']) return res
NotImplementedError
dataset/ETHPy150Open YetAnotherNerd/whatlastgenre/wlg/dataprovider.py/DataProvider.query
2,913
def __init__(self, conf): super(Discogs, self).__init__() # http://www.discogs.com/developers/#header:home-rate-limiting self.rate_limit = 3.0 # OAuth1 authentication import rauth discogs = rauth.OAuth1Service( consumer_key='sYGBZLljMPsYUnmGOzTX', consumer_secret='TtuLoHxEGvjDDOVMgmpgpXPuxudHvklk', request_token_url='https://api.discogs.com/oauth/request_token', access_token_url='https://api.discogs.com/oauth/access_token', authorize_url='https://www.discogs.com/oauth/authorize') # read token from config file token = (conf.get('discogs', 'token'), conf.get('discogs', 'secret')) # get from user if not token or not all(token): req_token, req_secret = discogs.get_request_token(headers=HEADERS) print('Discogs requires authentication with your own account.\n' 'Disable discogs in the config file or use this link to ' 'authenticate:\n%s' % discogs.get_authorize_url(req_token)) data = {'oauth_verifier': raw_input('Verification code: ')} try: token = discogs.get_access_token(req_token, req_secret, data=data, headers=HEADERS) except __HOLE__ as err: self.log.critical(err.message) exit() # save token to config file if not conf.has_section('discogs'): conf.add_section('discogs') conf.set('discogs', 'token', token[0]) conf.set('discogs', 'secret', token[1]) conf.save() self.session = discogs.get_session(token) self.session.headers.update(HEADERS) # avoid filling cache with unusable entries if requests_cache \ and not hasattr(self.session.cache, '_ignored_parameters'): self.session._is_cache_disabled = True # pylint: disable=W0212
KeyError
dataset/ETHPy150Open YetAnotherNerd/whatlastgenre/wlg/dataprovider.py/Discogs.__init__
2,914
def _request_html(self, url, params, method='GET'): '''Return a html response from a request.''' from lxml import html res = self._request(url, params, method=method) if res.status_code == 404: return None try: return html.fromstring(res.text) except __HOLE__ as err: self.log.debug(res.text) raise DataProviderError("html request: %s" % err.message)
ValueError
dataset/ETHPy150Open YetAnotherNerd/whatlastgenre/wlg/dataprovider.py/RateYourMusic._request_html
2,915
def _query(self, type_, searchterm): '''Search RateYourMusic.''' def match(str_a, str_b): '''Return True if str_a and str_b are quite similar.''' import difflib return difflib.SequenceMatcher( None, str_a.lower(), str_b.lower()).quick_ratio() > 0.9 tree = self._request_html( 'http://rateyourmusic.com/httprequest', {'type': type_, 'searchterm': searchterm, 'rym_ajax_req': 1, 'page': 1, 'action': 'Search'}, method='POST') # get first good enough result for result in tree.xpath('//tr[@class="infobox"]'): try: name = result.xpath('.//a[@class="searchpage"]/text()')[0] if type_ in 'ay' and not match(name, searchterm): continue elif type_ == 'l': artist = result.xpath( './/td[2]//td[1]/a[@class="artist"]/text()')[0] if not match(artist + ' ' + name, searchterm): continue url = result.xpath('.//a[@class="searchpage"]/@href')[0] except __HOLE__: continue return self._scrap_url(url) return None
IndexError
dataset/ETHPy150Open YetAnotherNerd/whatlastgenre/wlg/dataprovider.py/RateYourMusic._query
2,916
def _query(self, params): '''Query What.CD API.''' self.login() try: result = self._request_json('https://what.cd/ajax.php', params) except requests.exceptions.TooManyRedirects: # whatcd session expired self.session.cookies.set('session', None) self.login() try: response = result['response'] except __HOLE__: raise DataProviderError('request failure') return response
KeyError
dataset/ETHPy150Open YetAnotherNerd/whatlastgenre/wlg/dataprovider.py/WhatCD._query
2,917
def is_numeric(s): try: int(s) return True except __HOLE__: return False
ValueError
dataset/ETHPy150Open duydao/Text-Pastry/text_pastry_selection.py/is_numeric
2,918
def _watch_file(self, filepath, trigger_event=True): """Adds the file's modified time into its internal watchlist.""" is_new = filepath not in self._watched_files if trigger_event: if is_new: self.trigger_created(filepath) else: self.trigger_modified(filepath) try: self._watched_files[filepath] = self._get_modified_time(filepath) except __HOLE__: return # didn't happen
OSError
dataset/ETHPy150Open jeffh/sniffer/sniffer/scanner/base.py/PollingScanner._watch_file
2,919
def __init__(self, initlist=[]): """ Items in initlist can either be - tuples of (key,values) - or plain values Keys can be None or empty strings in item tuples. """ self.values = [] self.keys = [] for idx, item in enumerate(initlist): try: key, value = item key = None if key == '' else key except (TypeError, __HOLE__): value = item key = None finally: self.values.append(value) self.keys.append(key)
ValueError
dataset/ETHPy150Open ralhei/pyRserve/pyRserve/taggedContainers.py/TaggedList.__init__
2,920
def __getitem__(self, idx_or_name): try: return numpy.ndarray.__getitem__(self, idx_or_name) except: pass try: return numpy.ndarray.__getitem__(self, self.attr.index(idx_or_name)) except __HOLE__: raise KeyError('No key "%s" available for array' % idx_or_name)
ValueError
dataset/ETHPy150Open ralhei/pyRserve/pyRserve/taggedContainers.py/TaggedArray.__getitem__
2,921
def is_correct(self): try: abs_path = get_absolute_path(self.__str__()) return access(abs_path, W_OK) except (AttributeError, __HOLE__): # TODO: These are thrown by get_absolute_path when called on None and probably shouldn't be needed return False
TypeError
dataset/ETHPy150Open turbulenz/turbulenz_local/turbulenz_local/models/gamedetails.py/PathDetail.is_correct
2,922
def is_correct(self): try: path = get_absolute_path(self.game.path) path = join_path(path, self.image_path) except (AttributeError, __HOLE__): # TODO: These are thrown by get_absolute_path when called on None and probably shouldn't be needed return None else: return access(path, R_OK)
TypeError
dataset/ETHPy150Open turbulenz/turbulenz_local/turbulenz_local/models/gamedetails.py/ImageDetail.is_correct
2,923
def _has_changed(self, initial, data): try: input_format = formats.get_format('DATE_INPUT_FORMATS')[0] initial = datetime.datetime.strptime(initial, input_format).date() except (__HOLE__, ValueError): pass return super(DateInput, self)._has_changed( self._format_value(initial), data )
TypeError
dataset/ETHPy150Open gregmuellegger/django-floppyforms/floppyforms/widgets.py/DateInput._has_changed
2,924
def _has_changed(self, initial, data): try: input_format = formats.get_format('DATETIME_INPUT_FORMATS')[0] initial = datetime.datetime.strptime(initial, input_format) except (TypeError, __HOLE__): pass return super(DateTimeInput, self)._has_changed( self._format_value(initial), data )
ValueError
dataset/ETHPy150Open gregmuellegger/django-floppyforms/floppyforms/widgets.py/DateTimeInput._has_changed
2,925
def _has_changed(self, initial, data): try: input_format = formats.get_format('TIME_INPUT_FORMATS')[0] initial = datetime.datetime.strptime(initial, input_format).time() except (__HOLE__, ValueError): pass return super(TimeInput, self)._has_changed( self._format_value(initial), data )
TypeError
dataset/ETHPy150Open gregmuellegger/django-floppyforms/floppyforms/widgets.py/TimeInput._has_changed
2,926
def _format_value(self, value): value = value[0] try: value = {True: '2', False: '3', '2': '2', '3': '3'}[value] except __HOLE__: value = '1' return value
KeyError
dataset/ETHPy150Open gregmuellegger/django-floppyforms/floppyforms/widgets.py/NullBooleanSelect._format_value
2,927
def render(self, name, value, attrs=None, extra_context={}): try: year_val, month_val, day_val = value.year, value.month, value.day except __HOLE__: year_val = month_val = day_val = None if isinstance(value, six.string_types): if settings.USE_L10N: try: input_format = formats.get_format( 'DATE_INPUT_FORMATS' )[0] v = datetime.datetime.strptime(value, input_format) year_val, month_val, day_val = v.year, v.month, v.day except ValueError: pass else: match = RE_DATE.match(value) if match: year_val, month_val, day_val = map(int, match.groups()) context = self.get_context(name, value, attrs=attrs, extra_context=extra_context) context['year_choices'] = [(i, i) for i in self.years] context['year_val'] = year_val context['month_choices'] = list(MONTHS.items()) context['month_val'] = month_val context['day_choices'] = [(i, i) for i in range(1, 32)] context['day_val'] = day_val # Theoretically the widget should use self.is_required to determine # whether the field is required. For some reason this widget gets a # required parameter. The Django behaviour is preferred in this # implementation. # Django also adds none_value only if there is no value. The choice # here is to treat the Django behaviour as a bug: if the value isn't # required, then it can be unset. if self.required is False: context['year_choices'].insert(0, self.none_value) context['month_choices'].insert(0, self.none_value) context['day_choices'].insert(0, self.none_value) return loader.render_to_string(self.template_name, context)
AttributeError
dataset/ETHPy150Open gregmuellegger/django-floppyforms/floppyforms/widgets.py/SelectDateWidget.render
2,928
def value_from_datadict(self, data, files, name): y = data.get(self.year_field % name) m = data.get(self.month_field % name) d = data.get(self.day_field % name) if y == m == d == "0": return None if y and m and d: if settings.USE_L10N: input_format = formats.get_format('DATE_INPUT_FORMATS')[0] try: date_value = datetime.date(int(y), int(m), int(d)) except __HOLE__: return '%s-%s-%s' % (y, m, d) else: date_value = datetime_safe.new_date(date_value) return date_value.strftime(input_format) else: return '%s-%s-%s' % (y, m, d) return data.get(name, None)
ValueError
dataset/ETHPy150Open gregmuellegger/django-floppyforms/floppyforms/widgets.py/SelectDateWidget.value_from_datadict
2,929
def _wait_for_container_running(self, name): # we bump to 20 minutes here to match the timeout on the router and in the app unit files try: self._wait_for_job_state(name, JobState.up) except __HOLE__: raise RuntimeError('container failed to start')
RuntimeError
dataset/ETHPy150Open deis/deis/controller/scheduler/fleet.py/FleetHTTPClient._wait_for_container_running
2,930
def state(self, name): """Display the given job's running state.""" systemdActiveStateMap = { 'active': 'up', 'reloading': 'down', 'inactive': 'created', 'failed': 'crashed', 'activating': 'down', 'deactivating': 'down', } try: # NOTE (bacongobbler): this call to ._get_unit() acts as a pre-emptive check to # determine if the job no longer exists (will raise a RuntimeError on 404) self._get_unit(name) state = self._wait_for_container_state(name) activeState = state['systemdActiveState'] # FIXME (bacongobbler): when fleet loads a job, sometimes it'll automatically start and # stop the container, which in our case will return as 'failed', even though # the container is perfectly fine. if activeState == 'failed' and state['systemdLoadState'] == 'loaded': return JobState.created return getattr(JobState, systemdActiveStateMap[activeState]) except __HOLE__: # failed retrieving a proper response from the fleet API return JobState.error except RuntimeError: # failed to retrieve a response from the fleet API, # which means it does not exist return JobState.destroyed
KeyError
dataset/ETHPy150Open deis/deis/controller/scheduler/fleet.py/FleetHTTPClient.state
2,931
def matchBlocks(self, blocks, threshold=.5, *args, **kwargs): """ Partitions blocked data and returns a list of clusters, where each cluster is a tuple of record ids Keyword arguments: blocks -- Sequence of tuples of records, where each tuple is a set of records covered by a blocking predicate threshold -- Number between 0 and 1 (default is .5). We will only consider as duplicates record pairs as duplicates if their estimated duplicate likelihood is greater than the threshold. Lowering the number will increase recall, raising it will increase precision """ candidate_records = self._blockedPairs(blocks) matches = core.scoreDuplicates(candidate_records, self.data_model, self.classifier, self.num_cores, threshold) logger.debug("matching done, begin clustering") clusters = self._cluster(matches, threshold, *args, **kwargs) try: match_file = matches.filename del matches os.remove(match_file) except __HOLE__: pass return clusters
AttributeError
dataset/ETHPy150Open datamade/dedupe/dedupe/api.py/Matching.matchBlocks
2,932
def __init__(self, settings_file, num_cores=None): # pragma : no cover """ Initialize from a settings file #### Example usage # initialize from a settings file with open('my_learned_settings', 'rb') as f: deduper = dedupe.StaticDedupe(f) #### Keyword arguments `settings_file` A file object containing settings data. Settings files are typically generated by saving the settings learned from ActiveMatching. If you need details for this file see the method [`writeSettings`][[api.py#writesettings]]. """ if num_cores is None: self.num_cores = multiprocessing.cpu_count() else: self.num_cores = num_cores try: self.data_model = pickle.load(settings_file) self.classifier = pickle.load(settings_file) self.predicates = pickle.load(settings_file) except (KeyError, __HOLE__): raise SettingsFileLoadingException( "This settings file is not compatible with " "the current version of dedupe. This can happen " "if you have recently upgraded dedupe.") except: raise SettingsFileLoadingException( "Something has gone wrong with loading the settings file. " "Try deleting the file") self.loaded_indices = False try: self._loadIndices(settings_file) except EOFError: pass except (KeyError, AttributeError): raise SettingsFileLoadingException( "This settings file is not compatible with " "the current version of dedupe. This can happen " "if you have recently upgraded dedupe.") except: raise SettingsFileLoadingException( "Something has gone wrong with loading the settings file. " "Try deleting the file") logger.info(self.predicates) self.blocker = blocking.Blocker(self.predicates)
AttributeError
dataset/ETHPy150Open datamade/dedupe/dedupe/api.py/StaticMatching.__init__
2,933
def _checkDataSample(self, data_sample): try: len(data_sample) except __HOLE__: raise ValueError("data_sample must be a sequence") if len(data_sample): self._checkRecordPairType(data_sample[0]) else: warnings.warn("You submitted an empty data_sample")
TypeError
dataset/ETHPy150Open datamade/dedupe/dedupe/api.py/ActiveMatching._checkDataSample
2,934
def unindex(self, data): # pragma : no cover for field in self.blocker.index_fields: self.blocker.unindex((record[field] for record in viewvalues(data)), field) for block_key, record_id in self.blocker(viewitems(data)): try: del self.blocked_records[block_key][record_id] except __HOLE__: pass
KeyError
dataset/ETHPy150Open datamade/dedupe/dedupe/api.py/GazetteerMatching.unindex
2,935
def __init__(self, *args, **kwargs): super(StaticGazetteer, self).__init__(*args, **kwargs) settings_file = args[0] try: self.blocked_records = pickle.load(settings_file) except EOFError: self.blocked_records = OrderedDict({}) except (__HOLE__, AttributeError): raise SettingsFileLoadingException( "This settings file is not compatible with " "the current version of dedupe. This can happen " "if you have recently upgraded dedupe.") except: raise SettingsFileLoadingException( "Something has gone wrong with loading the settings file. " "Try deleting the file")
KeyError
dataset/ETHPy150Open datamade/dedupe/dedupe/api.py/StaticGazetteer.__init__
2,936
def delete_column_constraints(func): """ Decorates column operation functions for MySQL. Deletes the constraints from the database and clears local cache. """ def _column_rm(self, table_name, column_name, *args, **opts): # Delete foreign key constraints try: self.delete_foreign_key(table_name, column_name) except __HOLE__: pass # If no foreign key on column, OK because it checks first # Delete constraints referring to this column try: reverse = self._lookup_reverse_constraint(table_name, column_name) for cname, rtable, rcolumn in reverse: self.delete_foreign_key(rtable, rcolumn) except DryRunError: pass return func(self, table_name, column_name, *args, **opts) return _column_rm
ValueError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/South-1.0.2/south/db/mysql.py/delete_column_constraints
2,937
def copy_column_constraints(func): """ Decorates column operation functions for MySQL. Determines existing constraints and copies them to a new column """ def _column_cp(self, table_name, column_old, column_new, *args, **opts): # Copy foreign key constraint try: constraint = self._find_foreign_constraints( table_name, column_old)[0] refs = self._lookup_constraint_references(table_name, constraint) if refs is not None: (ftable, fcolumn) = refs if ftable and fcolumn: fk_sql = self.foreign_key_sql( table_name, column_new, ftable, fcolumn) get_logger().debug("Foreign key SQL: " + fk_sql) self.add_deferred_sql(fk_sql) except __HOLE__: pass # No constraint exists so ignore except DryRunError: pass # Copy constraints referring to this column try: reverse = self._lookup_reverse_constraint(table_name, column_old) for cname, rtable, rcolumn in reverse: fk_sql = self.foreign_key_sql( rtable, rcolumn, table_name, column_new) self.add_deferred_sql(fk_sql) except DryRunError: pass return func(self, table_name, column_old, column_new, *args, **opts) return _column_cp
IndexError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/South-1.0.2/south/db/mysql.py/copy_column_constraints
2,938
def _fill_constraint_cache(self, db_name, table_name): # for MySQL grab all constraints for this database. It's just as cheap as a single column. self._constraint_cache[db_name] = {} self._constraint_cache[db_name][table_name] = {} self._reverse_cache[db_name] = {} self._constraint_references[db_name] = {} name_query = """ SELECT kc.`constraint_name`, kc.`column_name`, kc.`table_name`, kc.`referenced_table_name`, kc.`referenced_column_name` FROM information_schema.key_column_usage AS kc WHERE kc.table_schema = %s """ rows = self.execute(name_query, [db_name]) if not rows: return cnames = {} for constraint, column, table, ref_table, ref_column in rows: key = (table, constraint) cnames.setdefault(key, set()) cnames[key].add((column, ref_table, ref_column)) type_query = """ SELECT c.constraint_name, c.table_name, c.constraint_type FROM information_schema.table_constraints AS c WHERE c.table_schema = %s """ rows = self.execute(type_query, [db_name]) for constraint, table, kind in rows: key = (table, constraint) self._constraint_cache[db_name].setdefault(table, {}) try: cols = cnames[key] except __HOLE__: cols = set() for column_set in cols: (column, ref_table, ref_column) = column_set self._constraint_cache[db_name][table].setdefault(column, set()) if kind == 'FOREIGN KEY': self._constraint_cache[db_name][table][column].add((kind, constraint)) # Create constraint lookup, see constraint_references self._constraint_references[db_name][(table, constraint)] = (ref_table, ref_column) # Create reverse table lookup, reverse_lookup self._reverse_cache[db_name].setdefault(ref_table, {}) self._reverse_cache[db_name][ref_table].setdefault(ref_column, set()) self._reverse_cache[db_name][ref_table][ref_column].add( (constraint, table, column)) else: self._constraint_cache[db_name][table][column].add((kind, constraint))
KeyError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/South-1.0.2/south/db/mysql.py/DatabaseOperations._fill_constraint_cache
2,939
def _lookup_constraint_references(self, table_name, cname): """ Provided an existing table and constraint, returns tuple of (foreign table, column) """ db_name = self._get_setting('NAME') try: return self._constraint_references[db_name][(table_name, cname)] except __HOLE__: return None
KeyError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/South-1.0.2/south/db/mysql.py/DatabaseOperations._lookup_constraint_references
2,940
def _lookup_reverse_constraint(self, table_name, column_name=None): """Look for the column referenced by a foreign constraint""" db_name = self._get_setting('NAME') if self.dry_run: raise DryRunError("Cannot get constraints for columns.") if not self._is_valid_cache(db_name, table_name): # Piggy-back on lookup_constraint, ensures cache exists self.lookup_constraint(db_name, table_name) try: table = self._reverse_cache[db_name][table_name] if column_name == None: return [(y, tuple(y)) for x, y in table.items()] else: return tuple(table[column_name]) except __HOLE__: return []
KeyError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/South-1.0.2/south/db/mysql.py/DatabaseOperations._lookup_reverse_constraint
2,941
def create(vm_): ''' Create a single VM from a data dict CLI Example: .. code-block:: bash salt-cloud -p profile_name vm_name ''' try: # Check for required profile parameters before sending any API calls. if vm_['profile'] and config.is_profile_configured(__opts__, __active_provider_name__ or 'joyent', vm_['profile'], vm_=vm_) is False: return False except __HOLE__: pass # Since using "provider: <provider-engine>" is deprecated, alias provider # to use driver: "driver: <provider-engine>" if 'provider' in vm_: vm_['driver'] = vm_.pop('provider') key_filename = config.get_cloud_config_value( 'private_key', vm_, __opts__, search_global=False, default=None ) salt.utils.cloud.fire_event( 'event', 'starting create', 'salt/cloud/{0}/creating'.format(vm_['name']), { 'name': vm_['name'], 'profile': vm_['profile'], 'provider': vm_['driver'], }, transport=__opts__['transport'] ) log.info( 'Creating Cloud VM {0} in {1}'.format( vm_['name'], vm_.get('location', DEFAULT_LOCATION) ) ) # added . for fqdn hostnames salt.utils.cloud.check_name(vm_['name'], 'a-zA-Z0-9-.') kwargs = { 'name': vm_['name'], 'networks': vm_.get('networks', DEFAULT_NETWORKS), 'image': get_image(vm_), 'size': get_size(vm_), 'location': vm_.get('location', DEFAULT_LOCATION) } salt.utils.cloud.fire_event( 'event', 'requesting instance', 'salt/cloud/{0}/requesting'.format(vm_['name']), {'kwargs': kwargs}, transport=__opts__['transport'] ) try: data = create_node(**kwargs) except Exception as exc: log.error( 'Error creating {0} on JOYENT\n\n' 'The following exception was thrown when trying to ' 'run the initial deployment: \n{1}'.format( vm_['name'], str(exc) ), # Show the traceback if the debug logging level is enabled exc_info_on_loglevel=logging.DEBUG ) return False query_instance(vm_) data = show_instance(vm_['name'], call='action') vm_['key_filename'] = key_filename vm_['ssh_host'] = data[1]['primaryIp'] salt.utils.cloud.bootstrap(vm_, __opts__) salt.utils.cloud.fire_event( 'event', 'created instance', 'salt/cloud/{0}/created'.format(vm_['name']), { 'name': vm_['name'], 'profile': vm_['profile'], 'provider': vm_['driver'], }, transport=__opts__['transport'] ) return data[1]
AttributeError
dataset/ETHPy150Open saltstack/salt/salt/cloud/clouds/joyent.py/create
2,942
def clean(self, value): value = super(ITSocialSecurityNumberField, self).clean(value) if value in EMPTY_VALUES: return '' value = re.sub('\s', '', value).upper() try: check_digit = ssn_check_digit(value) except __HOLE__: raise ValidationError(self.error_messages['invalid']) if not value[15] == check_digit: raise ValidationError(self.error_messages['invalid']) return value
ValueError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.5/django/contrib/localflavor/it/forms.py/ITSocialSecurityNumberField.clean
2,943
def clean(self, value): value = super(ITVatNumberField, self).clean(value) if value in EMPTY_VALUES: return '' try: vat_number = int(value) except __HOLE__: raise ValidationError(self.error_messages['invalid']) vat_number = str(vat_number).zfill(11) check_digit = vat_number_check_digit(vat_number[0:10]) if not vat_number[10] == check_digit: raise ValidationError(self.error_messages['invalid']) return smart_text(vat_number)
ValueError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.5/django/contrib/localflavor/it/forms.py/ITVatNumberField.clean
2,944
def updateMessages(self, parameters): # Modify messages created by internal validation for each parameter. # This method is called after internal validation. class_ = self.__class__ dataset_parameter = parameters[0] variables_parameter = parameters[1] output_file_parameter = parameters[2] extent_parameter = parameters[3] dimensions_parameter = parameters[4] value_selection_method_parameter = parameters[5] value_selection_method = mds.SELECT_BY_VALUE if \ value_selection_method_parameter.valueAsText == "BY_VALUE" else \ mds.SELECT_BY_INDEX dataset = None # Dataset. if not dataset_parameter.value is None: try: dataset = mds.netcdf.Dataset(dataset_parameter.valueAsText, filter_out_nd_coordinates=True) except __HOLE__, exception: if "No such file or directory" in str(exception) or \ "Invalid argument" in str(exception): class_.set_error(dataset_parameter, mds.messages.INPUT_DATASET_DOES_NOT_RESOLVE_TO_FILENAME.format( dataset_parameter.valueAsText)) elif "Malformed or inaccessible DAP DDS" in str(exception): class_.set_error(dataset_parameter, mds.messages.INPUT_DATASET_URL_MALFORMED.format( dataset_parameter.valueAsText)) else: class_.set_error(dataset_parameter, mds.messages.INPUT_DATASET_GENERIC_ERROR.format( dataset_parameter.valueAsText, str(exception))) except Exception, exception: class_.set_error(dataset_parameter, mds.messages.INPUT_DATASET_GENERIC_ERROR.format( dataset_parameter.valueAsText, str(exception))) # Variables. if variables_parameter.values is not None: if dataset is not None: variable_names_available = dataset.data_variable_names() variable_names_requested = mds.OrderedSet( variables_parameter.values) unknown_variable_names = variable_names_requested - \ variable_names_available known_variable_names = variable_names_requested - \ unknown_variable_names if unknown_variable_names: class_.set_warning(variables_parameter, mds.messages.VARIABLES_DO_NOT_EXIST.format(", ".join( unknown_variable_names), "Input OPeNDAP Dataset")) if len(known_variable_names) == 0: class_.set_error(variables_parameter, mds.messages.NONE_OF_VARIABLES_EXISTS) elif not class_.variables_are_compatible( known_variable_names, dataset): class_.set_error(variables_parameter, mds.messages.VARIABLES_MUST_SHARE_DIMENSIONS) # Determine names of dimensions that are present in the # selected variables. dimension_names = set() for variable_name in known_variable_names: variable = dataset.variable(variable_name) for dimension_name in variable.dimensions: dimension_names.add(dimension_name) for dimension_name in dimension_names: if dimension_name in dataset.variable_names(): if len(dataset.variable(dimension_name)[:].shape) > 1: class_.set_error(variables_parameter, mds.messages.MULTIDIMENSIONAL_DIMENSIONS_NOT_SUPPORTED.format( dimension_name)) break # Output file. if output_file_parameter.value is not None: output_filename = output_file_parameter.valueAsText if os.path.splitext(output_filename)[1] != ".nc": class_.set_error(output_file_parameter, mds.messages.OUTPUT_FILE_EXTENSION_MUST_BE_NC) # Dimensions. # Check whether the selected dimensions are present in the selected # variables. if dimensions_parameter.value is not None: if dataset is not None: if variables_parameter.values is not None: # Determine names of dimensions that are present in the # selected variables. dimension_names = set() for variable_name in variables_parameter.values: if variable_name in dataset.dataset.variables: variable = dataset.variable(variable_name) for dimension_name in variable.dimensions: dimension_names.add(dimension_name) for dimension_record in dimensions_parameter.values: # Test whether selected dimensions are present in the # collection just filled. dimension_name = dimension_record[0] if dimension_name not in dimension_names: class_.set_error(dimensions_parameter, mds.messages.DIMENSION_NOT_PRESENT.format( dimension_record[0])) break elif dimension_name in dataset.variable_names(): if len(dataset.variable(dimension_name)[:].shape) > 1: class_.set_error(dimensions_parameter, mds.messages.MULTIDIMENSIONAL_DIMENSIONS_NOT_SUPPORTED.format( dimension_name)) break if value_selection_method == mds.SELECT_BY_VALUE \ and dataset.convention.is_time_dimension_variable( dimension_name): # Check format of temporal coordinates. _, start_value, end_value = dimension_record try: mds.date_time.from_iso_format(start_value) mds.date_time.from_iso_format(end_value) except ValueError: class_.set_error(dimensions_parameter, mds.messages.INVALID_DATE_TIME) break elif dataset.convention.is_space_dimension_variable( dimension_name): class_.set_error(dimensions_parameter, mds.messages.SKIPPING_SPATIAL_DIMENSION) break
RuntimeError
dataset/ETHPy150Open Esri/solutions-geoprocessing-toolbox/suitability/toolboxes/scripts/MultidimensionSupplementalTools/MultidimensionSupplementalTools/Scripts/mds/tools/opendap_to_netcdf.py/OPeNDAPtoNetCDF.updateMessages
2,945
def execute(self, parameters, messages): dataset_name = parameters[0].valueAsText variable_names = mds.OrderedSet(parameters[1].values) output_filename = parameters[2].valueAsText extent = None if parameters[3].value is not None: extent = [float(value) for value in parameters[3].valueAsText.split(" ")] dimension_records = parameters[4].values value_selection_method = mds.SELECT_BY_VALUE if \ parameters[5].valueAsText == "BY_VALUE" else mds.SELECT_BY_INDEX date_time_string = time.strftime("%m/%d/%Y %H:%M", time.localtime()) history_message = mds.messages.OPENDAP_TO_NETCDF_HISTORY.format( date_time_string, dataset_name) try: dataset = mds.netcdf.Dataset(dataset_name, filter_out_nd_coordinates=True) # Get rid of the variable names that are not part of the dataset. known_variable_names = \ variable_names & dataset.data_variable_names() assert len(known_variable_names) > 0 # See updateMessages. unknown_variable_names = variable_names - known_variable_names if unknown_variable_names: messages.addWarningMessage( mds.messages.VARIABLES_DO_NOT_EXIST.format(", ".join( unknown_variable_names), "Input OPeNDAP Dataset")) mds.netcdf.copy(dataset, known_variable_names, output_filename, extent, dimension_records, value_selection_method, history_message) except __HOLE__, exception: # Handle errors not detected by updateMessages. messages.addErrorMessage(str(exception)) raise arcpy.ExecuteError
RuntimeError
dataset/ETHPy150Open Esri/solutions-geoprocessing-toolbox/suitability/toolboxes/scripts/MultidimensionSupplementalTools/MultidimensionSupplementalTools/Scripts/mds/tools/opendap_to_netcdf.py/OPeNDAPtoNetCDF.execute
2,946
def mftest(): cam = Camera() img = cam.getImage() d = Display(img.size()) bb1 = getBBFromUser(cam,d) fs1=[] img = cam.getImage() while True: try: img1 = cam.getImage() fs1 = img1.track("mftrack",fs1,img,bb1, numM=10, numN=10, winsize=10) print fs1[-1].shift, "shift" fs1.drawBB(color=(255,0,0)) fs1.drawPath() img1.show() except __HOLE__: break
KeyboardInterrupt
dataset/ETHPy150Open sightmachine/SimpleCV/SimpleCV/examples/tracking/mftrack.py/mftest
2,947
def getBBFromUser(cam, d): p1 = None p2 = None img = cam.getImage() while d.isNotDone(): try: img = cam.getImage() img.save(d) dwn = d.leftButtonDownPosition() up = d.leftButtonUpPosition() if dwn: p1 = dwn if up: p2 = up break time.sleep(0.05) except __HOLE__: break print p1,p2 if not p1 or not p2: return None xmax = np.max((p1[0],p2[0])) xmin = np.min((p1[0],p2[0])) ymax = np.max((p1[1],p2[1])) ymin = np.min((p1[1],p2[1])) print xmin,ymin,xmax,ymax return (xmin,ymin,xmax-xmin,ymax-ymin)
KeyboardInterrupt
dataset/ETHPy150Open sightmachine/SimpleCV/SimpleCV/examples/tracking/mftrack.py/getBBFromUser
2,948
def bump_version(version): try: parts = map(int, version.split('.')) except __HOLE__: fail('Current version is not numeric') parts[-1] += 1 return '.'.join(map(str, parts))
ValueError
dataset/ETHPy150Open espeed/bulbs/scripts/make-release.py/bump_version
2,949
def simplify_value(value): if hasattr(value, 'simplify_for_render'): return value.simplify_for_render(simplify_value, simplify_model) elif isinstance(value, dict): out = {} for key in value: new_key = key if isinstance(key, (basestring, int)) else str(key) try: out[new_key] = simplify_value(value[key]) except __HOLE__: logger.info('Could not simplify field %s of type %s', key, type(value[key]), exc_info=True) pass return out elif isinstance(value, tuple) and hasattr(value, '_asdict'): # Handle named tuples as dicts return simplify_value(value._asdict()) elif isinstance(value, (list, tuple, set, frozenset)): out = [] for subvalue in value: try: out.append(simplify_value(subvalue)) except NotImplementedError: logger.info('Could not simplify a value of type %s', type(subvalue), exc_info=True) if isinstance(value, tuple): return tuple(out) else: return out elif isinstance(value, (basestring, int, float)): return value elif isinstance(value, lazy_translation): return unicode(value) elif isinstance(value, datetime.datetime): return DateTimeUnicode(value.isoformat(' ')) elif isinstance(value, datetime.date): return DateUnicode(value.isoformat()) elif hasattr(type(value), '__mro__') and models.Model in type(value).__mro__: return simplify_model(value) elif isinstance(value, Page): return { 'has_next': value.has_next(), 'has_previous': value.has_next(), 'next_page_number': value.has_next(), 'previous_page_number': value.has_next(), 'number': value.number, 'objects': simplify_value(value.object_list), 'num_pages': value.paginator.num_pages, 'num_objects': value.paginator.count, } elif value is None: return None elif isinstance(value, Point): return simplify_value(list(value)) elif isinstance(value, Distance): # This is here to avoid a circular import from molly.utils.templatetags.molly_utils import humanise_distance return simplify_value(humanise_distance(value.m)) elif hasattr(value, '__iter__'): # Iterators may be unbounded; silently ignore elements once we've already had 1000. return [simplify_value(item) for item in itertools.islice(value, 1000)] else: raise NotImplementedError
NotImplementedError
dataset/ETHPy150Open mollyproject/mollyproject/molly/utils/simplify.py/simplify_value
2,950
def simplify_model(obj, terse=False): if obj is None: return None # It's a Model instance # "expose_fields" is never used if hasattr(obj._meta, 'expose_fields'): expose_fields = obj._meta.expose_fields else: expose_fields = [f.name for f in obj._meta.fields] out = { '_type': '%s.%s' % (obj.__module__[:-7], obj._meta.object_name), '_pk': obj.pk, } if hasattr(obj, 'get_absolute_url'): out['_url'] = obj.get_absolute_url() if terse: out['_terse'] = True else: for field_name in expose_fields: if field_name in FIELDS_NOT_EXPOSED: continue try: value = getattr(obj, field_name) if isinstance(value, models.Model): value = simplify_model(value, terse=True) out[field_name] = simplify_value(value) except __HOLE__: pass # Add any non-field attributes for field in list(dir(obj)): try: if field[0] != '_' and field != 'objects' \ and not isinstance(getattr(obj, field), models.Field) \ and not field in FIELDS_NOT_EXPOSED: try: out[field] = simplify_value(getattr(obj, field)) except NotImplementedError: pass except AttributeError: pass return out
NotImplementedError
dataset/ETHPy150Open mollyproject/mollyproject/molly/utils/simplify.py/simplify_model
2,951
def serialize_to_xml(value): if value is None: node = etree.Element('null') elif isinstance(value, bool): node = etree.Element('literal') node.text = 'true' if value else 'false' node.attrib['type'] = 'boolean' elif isinstance(value, (basestring, int, float)): node = etree.Element('literal') try: node.text = unicode(value) except __HOLE__: # Encode as UTF-8 if ASCII string can not be encoded node.text = unicode(value, 'utf-8') node.attrib['type'] = [d[1] for d in _XML_DATATYPES if isinstance(value, d[0])][0] elif isinstance(value, dict): if '_type' in value: node = etree.Element('object', {'type': value['_type'], 'pk': unicode(value.get('_pk', ''))}) del value['_type'] del value['_pk'] if '_url' in value: node.attrib['url'] = value['_url'] del value['_url'] if value.get('_terse'): node.attrib['terse'] = 'true' del value['_terse'] else: node = etree.Element('collection', {'type': 'mapping'}) for key in value: v = serialize_to_xml(value[key]) subnode = etree.Element('item', {'key': key}) subnode.append(v) node.append(subnode) elif isinstance(value, (list, tuple, set, frozenset)): for x, y in ((list, 'list'), (tuple, 'tuple')): if isinstance(value, x): node = etree.Element('collection', {'type': y}) break else: node = etree.Element('collection', {'type': 'set'}) for item in value: v = serialize_to_xml(item) subnode = etree.Element('item') subnode.append(v) node.append(subnode) else: node = etree.Element('unknown') return node
UnicodeDecodeError
dataset/ETHPy150Open mollyproject/mollyproject/molly/utils/simplify.py/serialize_to_xml
2,952
def main(unused_argv): if not FLAGS.discovery: raise app.UsageError('You must specify --discovery') if not (FLAGS.output_dir or FLAGS.output_file): raise app.UsageError( 'You must specify one of --output_dir or --output_file') if not FLAGS.templates: raise app.UsageError('You must specify --templates') f = open(FLAGS.discovery) discovery_doc = json.loads(f.read()) f.close() options = { # Include other files needed to compile (e.g. base jar files) 'include_dependencies': False, # Include the timestamp in the generated library 'include_timestamp': FLAGS.include_timestamp, # Put API version in the package 'version_package': FLAGS.version_package, } if FLAGS.output_type == 'full': options['include_dependencies'] = True # try to determine what generator to use from the language variant. language_variants = Targets().VariationsForLanguage(FLAGS.language) if language_variants: features = language_variants.GetFeatures(FLAGS.language_variant) template_dir = features.template_dir generator_name = features.get('generator', FLAGS.language) else: # Not described by targets.json. The template dir should just be down # in the language specific folder template_dir = os.path.join(os.path.dirname(__file__), 'languages', FLAGS.language) features = None generator_name = FLAGS.language # Instantiate the right code generator lang_model = None try: if FLAGS.language == 'any': api = Api(discovery_doc) # TODO(user): A default language model should be built in to the # templates lang_model = language_model.DocumentingLanguageModel() else: generator_class = generator_lookup.GetGeneratorByLanguage(generator_name) generator_instance = generator_class(discovery_doc) api = generator_instance.api lang_model = generator_instance.language_model except __HOLE__: raise app.UsageError('Unsupported language option: %s' % FLAGS.language) api.VisitAll(lambda o: o.SetLanguageModel(lang_model)) gen = TemplateExpander(api, options=options) if features: gen.SetFeatures(features) template_dir = os.path.join(template_dir, FLAGS.templates) if not os.path.isdir(template_dir): raise app.UsageError('Can not find template tree at: %s' % template_dir) gen.SetTemplateDir(template_dir) # Get an output writer package_writer = package_writer_foundry.GetPackageWriter( output_dir=FLAGS.output_dir, output_file=FLAGS.output_file, output_format=FLAGS.output_format) # do it gen.GeneratePackage(package_writer) package_writer.DoneWritingArchive() return 0
ValueError
dataset/ETHPy150Open google/apis-client-generator/src/googleapis/codegen/expand_templates.py/main
2,953
def __getattr__(self, name): try: # Return 1 if value in day_of_week is True, 0 otherwise return (self.day_of_week[self._DAYS_OF_WEEK.index(name)] and 1 or 0) except __HOLE__: pass except ValueError: # not a day of the week pass raise AttributeError(name)
KeyError
dataset/ETHPy150Open google/transitfeed/transitfeed/serviceperiod.py/ServicePeriod.__getattr__
2,954
def ValidateDate(self, date, field_name, problems, context=None): if date is None: # No exception is issued because ServicePeriods can be created using only # calendar_dates.txt. In that case we have a ServicePeriod consisting # entirely of service exceptions, and with no start_date or end_date. return False if util.IsEmpty(date): problems.MissingValue(field_name, date, context) return False elif not util.ValidateDate(date, field_name, problems): return False else: try: date_value = time.strptime(date, "%Y%m%d") if not (self._VALID_DATE_RANGE_FROM <= date_value.tm_year <= self._VALID_DATE_RANGE_TO): problems.DateOutsideValidRange(field_name, date, self._VALID_DATE_RANGE_FROM, self._VALID_DATE_RANGE_TO, context=context) return False return True except __HOLE__: problems.InvalidValue(field_name, 'Could not parse date value.', date, context, problems_module.TYPE_ERROR) return False
ValueError
dataset/ETHPy150Open google/transitfeed/transitfeed/serviceperiod.py/ServicePeriod.ValidateDate
2,955
def get(self, request, *args, **kwargs): # Get the data from the session try: key = self.request.session[self.session_key_name] del self.request.session[self.session_key_name] except __HOLE__: raise Http404() # Get data for qrcode image_factory_string = getattr(settings, 'TWO_FACTOR_QR_FACTORY', self.default_qr_factory) image_factory = import_string(image_factory_string) content_type = self.image_content_types[image_factory.kind] try: username = self.request.user.get_username() except AttributeError: username = self.request.user.username otpauth_url = get_otpauth_url(accountname=username, issuer=get_current_site(self.request).name, secret=key, digits=totp_digits()) # Make and return QR code img = qrcode.make(otpauth_url, image_factory=image_factory) resp = HttpResponse(content_type=content_type) img.save(resp) return resp
KeyError
dataset/ETHPy150Open Bouke/django-two-factor-auth/two_factor/views/core.py/QRGeneratorView.get
2,956
def test_random_location(self, domain): """ Execute a test for the given domain at a random location. Optional key is required for Google's public instance. """ locations = self.list_locations() test = self.request_test(domain, random.choice(locations)) try: return test['data']['userUrl'] except __HOLE__: return "Error: {0}".format(test)
KeyError
dataset/ETHPy150Open StackStorm/st2contrib/packs/webpagetest/actions/lib/webpagetest.py/WebPageTestAction.test_random_location
2,957
@task def build_form_multimedia_zip(domain, xmlns, startdate, enddate, app_id, export_id, zip_name, download_id): def find_question_id(form, value): for k, v in form.iteritems(): if isinstance(v, dict): ret = find_question_id(v, value) if ret: return [k] + ret else: if v == value: return [k] return None def filename(form_info, question_id, extension): fname = u"%s-%s-%s-%s%s" if form_info['cases']: fname = u'-'.join(form_info['cases']) + u'-' + fname return fname % (form_info['name'], unidecode(question_id), form_info['user'], form_info['id'], extension) case_ids = set() def extract_form_info(form, properties=None, case_ids=case_ids): unknown_number = 0 meta = form['form'].get('meta', dict()) # get case ids case_blocks = extract_case_blocks(form) cases = {c['@case_id'] for c in case_blocks} case_ids |= cases form_info = { 'form': form, 'attachments': list(), 'name': form['form'].get('@name', 'unknown form'), 'user': meta.get('username', 'unknown_user'), 'cases': cases, 'id': form['_id'] } for k, v in form['_attachments'].iteritems(): if v['content_type'] == 'text/xml': continue try: question_id = unicode(u'-'.join(find_question_id(form['form'], k))) except __HOLE__: question_id = unicode(u'unknown' + unicode(unknown_number)) unknown_number += 1 if not properties or question_id in properties: extension = unicode(os.path.splitext(k)[1]) form_info['attachments'].append({ 'size': v['length'], 'name': k, 'question_id': question_id, 'extension': extension, 'timestamp': parse(form['received_on']).timetuple(), }) return form_info key = [domain, app_id, xmlns] form_ids = {f['id'] for f in XFormInstance.get_db().view("attachments/attachments", start_key=key + [startdate], end_key=key + [enddate, {}], reduce=False)} properties = set() if export_id: schema = FormExportSchema.get(export_id) for table in schema.tables: # - in question id is replaced by . in excel exports properties |= {c.display.replace('.', '-') for c in table.columns} if not app_id: zip_name = 'Unrelated Form' forms_info = list() for form in iter_docs(XFormInstance.get_db(), form_ids): if not zip_name: zip_name = unidecode(form['form'].get('@name', 'unknown form')) forms_info.append(extract_form_info(form, properties)) num_forms = len(forms_info) DownloadBase.set_progress(build_form_multimedia_zip, 0, num_forms) # get case names case_id_to_name = {c: c for c in case_ids} for case in iter_docs(CommCareCase.get_db(), case_ids): if case['name']: case_id_to_name[case['_id']] = case['name'] use_transfer = settings.SHARED_DRIVE_CONF.transfer_enabled if use_transfer: params = '_'.join(map(str, [xmlns, startdate, enddate, export_id, num_forms])) fname = '{}-{}'.format(app_id, hashlib.md5(params).hexdigest()) fpath = os.path.join(settings.SHARED_DRIVE_CONF.transfer_dir, fname) else: _, fpath = tempfile.mkstemp() if not (os.path.isfile(fpath) and use_transfer): # Don't rebuild the file if it is already there with open(fpath, 'wb') as zfile: with zipfile.ZipFile(zfile, 'w') as z: for form_number, form_info in enumerate(forms_info): f = XFormInstance.wrap(form_info['form']) form_info['cases'] = {case_id_to_name[case_id] for case_id in form_info['cases']} for a in form_info['attachments']: fname = filename(form_info, a['question_id'], a['extension']) zi = zipfile.ZipInfo(fname, a['timestamp']) z.writestr(zi, f.fetch_attachment(a['name'], stream=True).read(), zipfile.ZIP_STORED) DownloadBase.set_progress(build_form_multimedia_zip, form_number + 1, num_forms) common_kwargs = dict( mimetype='application/zip', content_disposition='attachment; filename="{fname}.zip"'.format(fname=zip_name), download_id=download_id, ) if use_transfer: expose_file_download( fpath, use_transfer=use_transfer, **common_kwargs ) else: expose_cached_download( FileWrapper(open(fpath)), expiry=(1 * 60 * 60), file_extension=file_extention_from_filename(fpath), **common_kwargs ) DownloadBase.set_progress(build_form_multimedia_zip, num_forms, num_forms)
TypeError
dataset/ETHPy150Open dimagi/commcare-hq/corehq/apps/reports/tasks.py/build_form_multimedia_zip
2,958
def _buffer_recv_worker(ft_client): """Worker thread that constantly receives buffers.""" try: for raw_buffer in ft_client.iter_raw_buffers(): ft_client._push_raw_buffer(raw_buffer) except __HOLE__ as err: # something is wrong, the server stopped (or something) ft_client._recv_thread = None print('Buffer receive thread stopped: %s' % err)
RuntimeError
dataset/ETHPy150Open mne-tools/mne-python/mne/realtime/fieldtrip_client.py/_buffer_recv_worker
2,959
def draw(self): self.plot._render_lock.acquire() self.camera.apply_transformation() calc_verts_pos, calc_verts_len = 0, 0 calc_cverts_pos, calc_cverts_len = 0, 0 should_update_caption = (clock() - self.last_caption_update > self.caption_update_interval) if len(self.plot._functions.values()) == 0: self.drawing_first_object = True for r in self.plot._functions.itervalues(): if self.drawing_first_object: self.camera.set_rot_preset(r.default_rot_preset) self.drawing_first_object = False glPushMatrix() r._draw() glPopMatrix() # might as well do this while we are # iterating and have the lock rather # than locking and iterating twice # per frame: if should_update_caption: try: if r.calculating_verts: calc_verts_pos += r.calculating_verts_pos calc_verts_len += r.calculating_verts_len if r.calculating_cverts: calc_cverts_pos += r.calculating_cverts_pos calc_cverts_len += r.calculating_cverts_len except __HOLE__: pass for r in self.plot._pobjects: glPushMatrix() r._draw() glPopMatrix() if should_update_caption: self.update_caption(calc_verts_pos, calc_verts_len, calc_cverts_pos, calc_cverts_len) self.last_caption_update = clock() if self.plot._screenshot: self.plot._screenshot._execute_saving() self.plot._render_lock.release()
ValueError
dataset/ETHPy150Open sympy/sympy/sympy/plotting/pygletplot/plot_window.py/PlotWindow.draw
2,960
def validateJSON(): style=unicode(js.toPlainText()) if not style.strip(): #no point in validating an empty string return pos=None try: json.loads(style) except __HOLE__, e: s=str(e) print s if s == 'No JSON object could be decoded': pos=0 elif s.startswith('Expecting '): pos=int(s.split(' ')[-1][:-1]) elif s.startswith('Extra data'): pos=int(s.split(' ')[-3]) else: print 'UNKNOWN ERROR' # This makes a red bar appear in the line # containing position pos js.highlightError(pos) # Run validateJSON on every keypress
ValueError
dataset/ETHPy150Open rst2pdf/rst2pdf/gui/codeeditor.py/validateJSON
2,961
def process_request(self, request): latitude = None longitude = None accuracy = None # If the request has latitude and longitude query params, use those if 'latitude' in request.GET and 'longitude' in request.GET: latitude = request.GET['latitude'] longitude = request.GET['longitude'] accuracy = request.GET.get('accuracy') # Else look for an X-Current-Location header with the format # X-Current-Location: latitude=0.0,longitude=0.0,accuracy=1 elif 'HTTP_X_CURRENT_LOCATION' in request.META: location_string = request.META['HTTP_X_CURRENT_LOCATION'] try: temp_dict = dict([token.split('=') for token in location_string.split(',')]) if 'latitude' in temp_dict and 'longitude' in temp_dict: latitude = temp_dict['latitude'] longitude = temp_dict['longitude'] accuracy = temp_dict.get('accuracy') except __HOLE__: # Malformed X-Current-Location header (e.g. latitude=0.0&foo) pass # Else use a geolocation:location session variable elif 'geolocation:location' in request.session: longitude, latitude = request.session['geolocation:location'] accuracy = request.session.get('geolocation:accuracy') if latitude and longitude: point = Point(float(longitude), float(latitude), srid=4326) if accuracy: accuracy = float(accuracy) else: accuracy = None request.user_location = Location(point, accuracy)
ValueError
dataset/ETHPy150Open mollyproject/mollyproject/molly/utils/middleware.py/LocationMiddleware.process_request
2,962
def get(self): recording.dont_record() lineno = self.request.get('n') try: lineno = int(lineno) except: lineno = 0 filename = self.request.get('f') or '' orig_filename = filename match = re.match('<path\[(\d+)\]>(.*)', filename) if match: index, tail = match.groups() index = int(index) if index < len(sys.path): filename = sys.path[index] + tail try: fp = open(filename) except __HOLE__, err: self.response.out.write('<h1>IOError</h1><pre>%s</pre>' % cgi.escape(str(err))) self.response.set_status(404) else: try: data = {'fp': fp, 'filename': filename, 'orig_filename': orig_filename, 'lineno': lineno, } self.response.out.write(render('file.html', data)) finally: fp.close()
IOError
dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/ext/appstats/ui.py/FileHandler.get
2,963
def run(self, no_ipython): """ Runs the shell. Unless no_ipython is True or use_python is False then runs IPython shell if that is installed. """ context = self.get_context() if not no_ipython: try: import IPython sh = IPython.Shell.IPShellEmbed(banner=self.banner) sh(global_ns=dict(), local_ns=context) return except __HOLE__: pass code.interact(self.banner, local=context)
ImportError
dataset/ETHPy150Open danjac/Flask-Script/flaskext/script.py/Shell.run
2,964
def handle(self, prog, name, args=None): args = list(args or []) try: command = self._commands[name] except __HOLE__: raise InvalidCommand, "Command %s not found" % name help_args = ('-h', '--help') # remove -h from args if present, and add to remaining args app_args = [a for a in args if a not in help_args] app_parser = self.create_parser(prog) app_namespace, remaining_args = app_parser.parse_known_args(app_args) for arg in help_args: if arg in args: remaining_args.append(arg) command_parser = command.create_parser(prog + " " + name) if getattr(command, 'capture_all_args', False): command_namespace, unparsed_args = \ command_parser.parse_known_args(remaining_args) positional_args = [unparsed_args] else: command_namespace = command_parser.parse_args(remaining_args) positional_args = [] app = self.create_app(**app_namespace.__dict__) command.handle(app, *positional_args, **command_namespace.__dict__)
KeyError
dataset/ETHPy150Open danjac/Flask-Script/flaskext/script.py/Manager.handle
2,965
def run(self, commands=None, default_command=None): """ Prepares manager to receive command line input. Usually run inside "if __name__ == "__main__" block in a Python script. :param commands: optional dict of commands. Appended to any commands added using add_command(). :param default_command: name of default command to run if no arguments passed. """ if commands: self._commands.update(commands) try: if len(sys.argv) == 1 and default_command is not None: command = default_command else: command = sys.argv[1] if command is None: raise InvalidCommand, "Please provide a command" self.handle(sys.argv[0], command, sys.argv[2:]) sys.exit(0) except __HOLE__: self.print_usage() sys.exit(0) except InvalidCommand, e: print e self.print_usage() sys.exit(1)
IndexError
dataset/ETHPy150Open danjac/Flask-Script/flaskext/script.py/Manager.run
2,966
def read_edl(self, path): """Reads the content of the edl in the given path :param path: A string showing the EDL path :return: str """ try: with open(path) as f: edl_content = f.read() except __HOLE__: edl_content = '' return edl_content
IOError
dataset/ETHPy150Open eoyilmaz/anima/anima/ui/edl_importer.py/MainDialog.read_edl
2,967
def store_media_file_path(self, path): """stores the given path as the avid media file path in anima cache folder. :param str path: The path to be stored :return: """ # make dirs first try: os.makedirs(os.path.dirname(self.cache_file_full_path)) except __HOLE__: pass # file already exists finally: with open(self.cache_file_full_path, 'w') as f: f.write(path)
OSError
dataset/ETHPy150Open eoyilmaz/anima/anima/ui/edl_importer.py/MainDialog.store_media_file_path
2,968
def restore_media_file_path(self): """restores the media file path """ try: with open(self.cache_file_full_path) as f: media_file_path = f.read() self.media_files_path_lineEdit.setText(media_file_path) except __HOLE__: pass # not stored yet
IOError
dataset/ETHPy150Open eoyilmaz/anima/anima/ui/edl_importer.py/MainDialog.restore_media_file_path
2,969
def get_namespace(self, schema): try: namespace = make_safe(schema._namespace) except __HOLE__: namespace = None return namespace
AttributeError
dataset/ETHPy150Open spotify/pyschema/pyschema/source_generation.py/PackageBuilder.get_namespace
2,970
def __contains__(self, key): try: if dict.__contains__(self, key): state = dict.__getitem__(self, key) o = state.obj() else: return False except __HOLE__: return False else: return o is not None
KeyError
dataset/ETHPy150Open RoseOu/flasky/venv/lib/python2.7/site-packages/sqlalchemy/orm/identity.py/WeakInstanceDict.__contains__
2,971
def add(self, state): key = state.key # inline of self.__contains__ if dict.__contains__(self, key): try: existing_state = dict.__getitem__(self, key) if existing_state is not state: o = existing_state.obj() if o is not None: raise AssertionError( "A conflicting state is already " "present in the identity map for key %r" % (key, )) else: return except __HOLE__: pass dict.__setitem__(self, key, state) self._manage_incoming_state(state)
KeyError
dataset/ETHPy150Open RoseOu/flasky/venv/lib/python2.7/site-packages/sqlalchemy/orm/identity.py/WeakInstanceDict.add
2,972
def __init__(self, shell=None): super(BuiltinTrap, self).__init__(shell=shell, config=None) self._orig_builtins = {} # We define this to track if a single BuiltinTrap is nested. # Only turn off the trap when the outermost call to __exit__ is made. self._nested_level = 0 self.shell = shell # builtins we always add - if set to HideBuiltin, they will just # be removed instead of being replaced by something else self.auto_builtins = {'exit': HideBuiltin, 'quit': HideBuiltin, 'get_ipython': self.shell.get_ipython, } # Recursive reload function try: from IPython.lib import deepreload if self.shell.deep_reload: self.auto_builtins['reload'] = deepreload.reload else: self.auto_builtins['dreload']= deepreload.reload except __HOLE__: pass
ImportError
dataset/ETHPy150Open ipython/ipython-py3k/IPython/core/builtin_trap.py/BuiltinTrap.__init__
2,973
def remove_builtin(self, key): """Remove an added builtin and re-set the original.""" try: orig = self._orig_builtins.pop(key) except __HOLE__: pass else: if orig is BuiltinUndefined: del builtins.__dict__[key] else: builtins.__dict__[key] = orig
KeyError
dataset/ETHPy150Open ipython/ipython-py3k/IPython/core/builtin_trap.py/BuiltinTrap.remove_builtin
2,974
def deactivate(self): """Remove any builtins which might have been added by add_builtins, or restore overwritten ones to their previous values.""" # Note: must iterate over a static keys() list because we'll be # mutating the dict itself remove_builtin = self.remove_builtin for key in list(self._orig_builtins.keys()): remove_builtin(key) self._orig_builtins.clear() self._builtins_added = False try: del builtins.__dict__['__IPYTHON__active'] except __HOLE__: pass
KeyError
dataset/ETHPy150Open ipython/ipython-py3k/IPython/core/builtin_trap.py/BuiltinTrap.deactivate
2,975
def get_auth(self, username, password, authoritative_source, auth_options=None): """ Returns an authentication object. Examines the auth backend given after the '@' in the username and returns a suitable instance of a subclass of the BaseAuth class. * `username` [string] Username to authenticate as. * `password` [string] Password to authenticate with. * `authoritative_source` [string] Authoritative source of the query. * `auth_options` [dict] A dict which, if authenticated as a trusted user, can override `username` and `authoritative_source`. """ if auth_options is None: auth_options = {} # validate arguments if (authoritative_source is None): raise AuthError("Missing authoritative_source.") # remove invalid cache entries rem = list() for key in self._auth_cache: if self._auth_cache[key]['valid_until'] < datetime.utcnow(): rem.append(key) for key in rem: del(self._auth_cache[key]) user_authbackend = username.rsplit('@', 1) # Find out what auth backend to use. # If no auth backend was specified in username, use default backend = "" if len(user_authbackend) == 1: backend = self._config.get('auth', 'default_backend') self._logger.debug("Using default auth backend %s" % backend) else: backend = user_authbackend[1] # do we have a cached instance? auth_str = ( str(username) + str(password) + str(authoritative_source) + str(auth_options) ) if auth_str in self._auth_cache: self._logger.debug('found cached auth object for user %s' % username) return self._auth_cache[auth_str]['auth_object'] # Create auth object try: auth = self._backends[backend](backend, user_authbackend[0], password, authoritative_source, auth_options) except __HOLE__: raise AuthError("Invalid auth backend '%s' specified" % str(backend)) # save auth object to cache self._auth_cache[auth_str] = { 'valid_until': datetime.utcnow() + timedelta(seconds=self._config.getint('auth', 'auth_cache_timeout')), 'auth_object': auth } return auth
KeyError
dataset/ETHPy150Open SpriteLink/NIPAP/nipap/nipap/authlib.py/AuthFactory.get_auth
2,976
def authenticate(self): """ Verify authentication. Returns True/False dependant on whether the authentication succeeded or not. """ # if authentication has been performed, return last result if self._authenticated is not None: return self._authenticated try: self._ldap_conn.simple_bind_s(self._ldap_binddn_fmt.format(ldap.dn.escape_dn_chars(self.username)), self.password) except ldap.SERVER_DOWN as exc: raise AuthError('Could not connect to LDAP server') except (ldap.INVALID_CREDENTIALS, ldap.INVALID_DN_SYNTAX, ldap.UNWILLING_TO_PERFORM) as exc: # Auth failed self._logger.debug('erroneous password for user %s' % self.username) self._authenticated = False return self._authenticated # auth succeeded self.authenticated_as = self.username self.trusted = False self.readonly = False try: # Create separate connection for search? if self._ldap_search_conn is not None: self._ldap_search_conn.simple_bind(self._ldap_search_binddn, self._ldap_search_password) search_conn = self._ldap_search_conn else: search_conn = self._ldap_conn res = search_conn.search_s(self._ldap_basedn, ldap.SCOPE_SUBTREE, self._ldap_search.format(ldap.dn.escape_dn_chars(self.username)), ['cn','memberOf']) self.full_name = res[0][1]['cn'][0] # check for ro_group membership if ro_group is configured if self._ldap_ro_group: if self._ldap_ro_group in res[0][1].get('memberOf', []): self.readonly = True # check for rw_group membership if rw_group is configured if self._ldap_rw_group: if self._ldap_rw_group in res[0][1].get('memberOf', []): self.readonly = False else: # if ro_group is configured, and the user is a member of # neither the ro_group nor the rw_group, fail authentication. if self._ldap_ro_group: if self._ldap_ro_group not in res[0][1].get('memberOf', []): self._authenticated = False return self._authenticated else: self.readonly = True except ldap.LDAPError as exc: raise AuthError(exc) except KeyError: raise AuthError('LDAP attribute missing') except __HOLE__: self.full_name = '' # authentication fails if either ro_group or rw_group are configured # and the user is not found. if self._ldap_rw_group or self._ldap_ro_group: self._authenticated = False return self._authenticated self._authenticated = True self._logger.debug('successfully authenticated as %s, username %s, full_name %s, readonly %s' % (self.authenticated_as, self.username, self.full_name, str(self.readonly))) return self._authenticated
IndexError
dataset/ETHPy150Open SpriteLink/NIPAP/nipap/nipap/authlib.py/LdapAuth.authenticate
2,977
def main(): '''CGI entry point.''' config = ConfigParser.ConfigParser() config.read('/etc/edeploy.conf') def config_get(section, name, default): 'Secured config getter.' try: return config.get(section, name) except (ConfigParser.NoOptionError, ConfigParser.NoSectionError): return default cfg_dir = os.path.normpath(config_get( 'SERVER', 'HEALTHDIR', os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', 'health'))) + '/' # parse hw file given in argument or passed to cgi script if len(sys.argv) == 3 and sys.argv[1] == '-f': hw_file = open(sys.argv[2]) else: cgitb.enable() form = cgi.FieldStorage() if 'file' not in form: upload.fatal_error('No file passed to the CGI') fileitem = form['file'] hw_file = fileitem.file try: json_hw_items = json.loads(hw_file.read(-1)) except Exception, excpt: upload.fatal_error("'Invalid hardware file: %s'" % str(excpt)) def encode(elt): 'Encode unicode strings as strings else return the object' try: return elt.encode('ascii', 'ignore') except __HOLE__: return elt hw_items = [] for info in json_hw_items: hw_items.append(tuple(map(encode, info))) filename_and_macs = matcher.generate_filename_and_macs(hw_items) dirname = time.strftime("%Y_%m_%d-%Hh%M", time.localtime()) if form.getvalue('session'): dest_dir = (cfg_dir + os.path.basename(form.getvalue('session')) + '/' + dirname) else: dest_dir = cfg_dir + '/' + dirname try: if not os.path.isdir(dest_dir): os.makedirs(dest_dir) except OSError, e: upload.fatal_error("Cannot create %s directory (%s)" % (dest_dir, e.errno)) upload.save_hw(hw_items, filename_and_macs['sysname'], dest_dir)
AttributeError
dataset/ETHPy150Open redhat-cip/edeploy/server/upload-health.py/main
2,978
def run(self): """main loop waiting for keyboard interrupt i.e. do nothing until user press 'X' or CTRL-C""" try: while(self.controller.__class__.running): sleep(1) except (__HOLE__, SystemExit): pass
KeyboardInterrupt
dataset/ETHPy150Open ericgibert/supersid/supersid/textsidviewer.py/textSidViewer.run
2,979
def handle_noargs(self, **options): if not settings.PWD_ALGORITHM == 'bcrypt': return for user in User.objects.all(): pwd = user.password if pwd.startswith('hh$') or pwd.startswith('bcrypt$'): continue # Password has already been strengthened. try: alg, salt, hash = pwd.split('$') except __HOLE__: continue # Probably not a password we understand. bc_value = bcrypt_auth.create_hash(pwd) # 'hh' stands for 'hardened hash'. new_password = '$'.join(['hh', alg, salt, bc_value]) user.password = new_password user.save()
ValueError
dataset/ETHPy150Open fwenzel/django-sha2/django_sha2/management/commands/strengthen_user_passwords.py/Command.handle_noargs
2,980
@staticmethod def _open_file(filename): """Attempt to open the the file at ``filename`` for reading. Raises: DataSourceError, if the file cannot be opened. """ if filename is None: raise DataSourceError("Trace filename is not defined") try: trace_file = open(filename, "r") except __HOLE__ as e: raise DataSourceError("Unable to open trace file %s" % filename, e) else: LOG.debug("Opened trace file %s", filename) return trace_file
IOError
dataset/ETHPy150Open openxc/openxc-python/openxc/sources/trace.py/TraceDataSource._open_file
2,981
def GetNetworkGateway(network): """Get the gateway for a network. Uses "netstat -nr" on Darwin and "ip route" on Linux to read the routing table. It searches for a route with destination exactly matching the network parameter! Args: network: str, likely in CIDR format or default gateway, e.g. "1.2.3/24" or "0.0.0.0" Returns: a string like "1.2.3.4" or "link#1" or "01:02:03:04:05:06" or "dev wlan0", depending on the type of route and platform. """ route = ROUTE.get(_GetPlatform(), None) logging.debug('Route: %s', str(route)) if not route: return try: return_code, stdout, stderr = flight_common.Exec(route) except __HOLE__: return_code = None if return_code != 0 or stderr or not stdout: return gateway_pattern = ( r'^%s\s+(via[\s\t])?' r'([\d\.]+|[0-9a-f:]+|link#\d+|dev [a-z\d]+)[\s\t]+' % network) gateway = re.search(gateway_pattern, str(stdout), re.MULTILINE) if gateway: return gateway.group(2) return
OSError
dataset/ETHPy150Open google/simian/src/simian/mac/client/network_detect.py/GetNetworkGateway
2,982
def IsOnWwan(): """"Checks WWAN device connection status. Note: this may produce false-positives, and may not catch all WWAN devices. Several Sprint and Verizon devices were tested, all of which create ppp0 upon connection. However, L2TP VPN also creates ppp0 (Google no longer uses this as of Q2-2010 in favor of SSLVPN). A stronger check is probably needed at some point. As of 2011-12-6 OpenVPN interface is tun0 on Linux and Darwin. Returns: Boolean. True if WWAN device is active, False otherwise. """ wwan_ifaces = GetInterfaceNames(INTERFACE_WWAN) for wwan_iface in wwan_ifaces: try: return_code, unused_out, unused_err = flight_common.Exec( [IFCONFIG, wwan_iface]) except __HOLE__: return_code = None # ifconfig exits with 1 if interface doesn't exist. if return_code == 0: return True return False
OSError
dataset/ETHPy150Open google/simian/src/simian/mac/client/network_detect.py/IsOnWwan
2,983
def GetNetworkName(): """Return network name (SSID for WLANs) a device is connected to. Returns: name of the matching network name if possible, None otherwise. """ this_platform = _GetPlatform() if this_platform == LINUX: cmdline = '/usr/bin/nmcli -t -f NAME,DEVICES conn status' # Ignore "Auto " prefix on automatically connecting networks. ssid_re = re.compile(r'^(Auto )?([^:]*):.*$') try: return_code, out, _ = flight_common.Exec(cmdline) except OSError: logging.exception('Error executing nmcli') return if out and not return_code: for l in out.splitlines(): res = ssid_re.match(l) if res: return res.groups()[1] elif this_platform == DARWIN: cmdline = ( '/System/Library/PrivateFrameworks/Apple80211.framework/Versions/' 'Current/Resources/airport -I | ' 'awk \'/ SSID/ {print substr($0, index($0, $2))}\'') try: return_code, out, _ = flight_common.Exec(cmdline) except __HOLE__: logging.exception('Error executing airport') return if out and not return_code: return out.strip() or None
OSError
dataset/ETHPy150Open google/simian/src/simian/mac/client/network_detect.py/GetNetworkName
2,984
def IsOnAndroidWap(): """Checks if Android WiFi or Bluetooth tethering is connected. Returns: Boolean. True if Android tethering is connected, False otherwise. """ # ifconfig output looks a little bit different on Darwin vs Linux. # # Darwin: # inet 169.254.135.20 netmask 0xffff0000 broadcast 169.254.255.255 # Linux: # inet addr:172.26.113.45 Bcast:172.26.115.255 Mask:255.255.252.0 android_wap_match_regex = re.compile( r'inet[\w\s]*[\s:]+192\.168\.(42|43|44)\.\d{1,3}\s+' r'.*(?:netmask\s+0xffffff00\s+|Mask:255\.255\.255\.0)') ifaces = GetInterfaceNames(INTERFACE_ANDROID_WAP) for wifi_iface in ifaces: # Android tethering uses very specific subnets*, as well as dnsmasq which # reveals itself via the TXT VERSION.BIND record. # * 192.168.42.0/24 for wired, 192.168.43.0/24 for WiFi, and # 192.168.44.0/24 for Bluetooth. try: return_code, stdout, stderr = flight_common.Exec([IFCONFIG, wifi_iface]) except __HOLE__: return_code = None if return_code != 0 or stderr: # interface was likely not found. continue android_wap_match = android_wap_match_regex.search(stdout) # Look for an interface on 192.168.4[2-4].0/24. if android_wap_match is not None: # If the default gateway is not through a likely Android WAN interface, # tethering may be active but is not likely to be used. default_gateway = GetDefaultGateway() logging.debug('Default gateway: %s', str(default_gateway)) default_gateway_prefix = '192.168.%s.' % android_wap_match.group(1) if not default_gateway.startswith(default_gateway_prefix): return False # IP, netmask, gateway look like Android WAP, so check dnsmasq. # Request needs to be explicitly top level, as Linux uses # ndots:2 which would turn VERSION.BIND (without trailing dot) into # VERSION.BIND.foo.example.com in some cases. cmd = [HOST, '-W', '5', '-c', 'CHAOS', '-t', 'txt', 'VERSION.BIND.', default_gateway] try: return_code, stdout, unused_err = flight_common.Exec(cmd) except OSError: return_code = None if return_code != 0: continue dnsmasq_match = re.search( r'VERSION\.BIND descriptive text "dnsmasq-.*"', stdout) if dnsmasq_match is not None: # IP, netmask and dnsmasq all match Android WAP tethering. return True return False
OSError
dataset/ETHPy150Open google/simian/src/simian/mac/client/network_detect.py/IsOnAndroidWap
2,985
def check_group_whitelist(self, username): if not self.group_whitelist: return False for grnam in self.group_whitelist: try: group = getgrnam(grnam) except __HOLE__: self.log.error('No such group: [%s]' % grnam) continue if username in group.gr_mem: return True return False
KeyError
dataset/ETHPy150Open jupyterhub/jupyterhub/jupyterhub/auth.py/LocalAuthenticator.check_group_whitelist
2,986
@staticmethod def system_user_exists(user): """Check if the user exists on the system""" try: pwd.getpwnam(user.name) except __HOLE__: return False else: return True
KeyError
dataset/ETHPy150Open jupyterhub/jupyterhub/jupyterhub/auth.py/LocalAuthenticator.system_user_exists
2,987
def cloneFile(infile, outfile): '''create a clone of ``infile`` named ``outfile`` by creating a soft-link. ''' # link via relative paths, otherwise it # fails if infile and outfile are in different # directories or in a subdirectory if os.path.dirname(infile) != os.path.dirname(outfile): relpath = os.path.relpath( os.path.dirname(infile), os.path.dirname(outfile)) else: relpath = "." target = os.path.join(relpath, os.path.basename(infile)) try: os.symlink(target, outfile) except __HOLE__: pass
OSError
dataset/ETHPy150Open CGATOxford/cgat/CGAT/IOTools.py/cloneFile
2,988
def val2str(val, format="%5.2f", na="na"): '''return a formatted value. If value does not fit format string, return "na" ''' if type(val) == int: return format % val elif type(val) == float: return format % val try: x = format % val except (ValueError, __HOLE__): x = na return x
TypeError
dataset/ETHPy150Open CGATOxford/cgat/CGAT/IOTools.py/val2str
2,989
def str2val(val, format="%5.2f", na="na", list_detection=False): """guess type (int, float) of value. If `val` is neither int nor float, the value itself is returned. """ if val is None: return val def _convert(v): try: x = int(v) except __HOLE__: try: x = float(v) except ValueError: return v return x if list_detection and "," in val: return [_convert(v) for v in val.split(",")] else: return _convert(val)
ValueError
dataset/ETHPy150Open CGATOxford/cgat/CGAT/IOTools.py/str2val
2,990
def prettyPercent(numerator, denominator, format="%5.2f", na="na"): """output a percent value or "na" if not defined""" try: x = format % (100.0 * numerator / denominator) except (__HOLE__, ZeroDivisionError): x = "na" return x
ValueError
dataset/ETHPy150Open CGATOxford/cgat/CGAT/IOTools.py/prettyPercent
2,991
def convertDictionary(d, map={}): """convert string values in a dictionary to numeric types. Arguments d : dict The dictionary to convert map : dict If map contains 'default', a default conversion is enforced. For example, to force int for every column but column ``id``, supply map = {'default' : "int", "id" : "str" } """ rx_int = re.compile("^\s*[+-]*[0-9]+\s*$") rx_float = re.compile("^[-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?$") # pre-process with 'default' if "default" in map: k = "default" if map[k] == "int": default = int elif map[k] == "float": default = float elif map[k] == "string": default = str else: default = False for k, vv in d.items(): if vv is None: continue v = vv.strip() try: if k in map: if map[k] == "int": d[k] = int(v) elif map[k] == "float": d[k] = float(v) elif map[k] == "string": pass continue elif default: if v != "": d[k] = default(v) else: d[k] = v continue except __HOLE__, msg: raise TypeError("conversion in field: %s, %s" % (k, msg)) try: if rx_int.match(v): d[k] = int(v) elif rx_float.match(v): d[k] = float(v) except TypeError, msg: raise TypeError( "expected string or buffer: offending value = '%s' " % str(v)) except ValueError, msg: raise ValueError("conversion error: %s, %s" % (msg, str(d))) return d
TypeError
dataset/ETHPy150Open CGATOxford/cgat/CGAT/IOTools.py/convertDictionary
2,992
def write(self, identifier, line): """write `line` to file specified by `identifier`""" filename = self.getFilename(identifier) if filename not in self.mFiles: if self.maxopen and len(self.mFiles) > self.maxopen: for f in self.mFiles.values(): f.close() self.mFiles = {} self.mFiles[filename] = self.openFile(filename, "a") if self.mHeader: self.mFiles[filename].write(self.mHeader) try: self.mFiles[filename].write(line) except __HOLE__, msg: raise ValueError( "error while writing to %s: msg=%s" % (filename, msg)) self.mCounts[filename] += 1
ValueError
dataset/ETHPy150Open CGATOxford/cgat/CGAT/IOTools.py/FilePool.write
2,993
def readList(infile, column=0, map_function=str, map_category={}, with_title=False): """read a list of values from infile. Arguments --------- infile : File File object to read from columns : int Column to take from the file. map_function : function Function to convert the values in the rows to the desired object types such as int or float. map_category : dict When given, automatically transform/map the values given this dictionary. with_title : bool If true, first line of file is title and will be ignored. Returns ------- list : list A list with the values. """ m = [] title = None for l in infile: if l[0] == "#": continue if with_title and not title: title = l[:-1].split("\t")[column] continue try: d = map_function(l[:-1].split("\t")[column]) except __HOLE__: continue if map_category: d = map_category[d] m.append(d) return m
ValueError
dataset/ETHPy150Open CGATOxford/cgat/CGAT/IOTools.py/readList
2,994
def readMultiMap(infile, columns=(0, 1), map_functions=(str, str), both_directions=False, has_header=False, dtype=dict): """read a map (pairs of values) from infile. In contrast to :func:`readMap`, this method permits multiple entries for the same key. Arguments --------- infile : File File object to read from columns : tuple Columns (A, B) to take from the file to create the mapping from A to B. map_functions : tuple Functions to convert the values in the rows to the desired object types such as int or float. both_directions : bool If true, both mapping directions are returned in a tuple, i.e., A->B and B->A. has_header : bool If true, ignore first line with header. dtype : function datatype to use for the dictionaries. Returns ------- map : dict A dictionary containing the mapping. If `both_directions` is true, two dictionaries will be returned. """ m = dtype() r = dtype() n = 0 for l in infile: if l[0] == "#": continue n += 1 if has_header and n == 1: continue d = l[:-1].split("\t") try: key = map_functions[0](d[columns[0]]) val = map_functions[1](d[columns[1]]) except (ValueError, __HOLE__), msg: raise ValueError("parsing error in line %s: %s" % (l[:-1], msg)) if key not in m: m[key] = [] m[key].append(val) if val not in r: r[val] = [] r[val].append(key) if both_directions: return m, r else: return m
IndexError
dataset/ETHPy150Open CGATOxford/cgat/CGAT/IOTools.py/readMultiMap
2,995
def readTable(file, separator="\t", numeric_type=numpy.float, take="all", headers=True, truncate=None, cumulate_out_of_range=True, ): """read a table of values. If cumulate_out_of_range is set to true, the terminal bins will contain the cumulative values of bins out of range. .. note:: Deprecated use pandas dataframes instead """ lines = filter(lambda x: x[0] != "#", file.readlines()) if len(lines) == 0: return None, [] if take == "all": num_cols = len(string.split(lines[0][:-1], "\t")) take = range(0, num_cols) else: num_cols = len(take) if headers: headers = lines[0][:-1].split("\t") headers = map(lambda x: headers[x], take) del lines[0] num_rows = len(lines) matrix = numpy.ma.masked_array( numpy.zeros((num_rows, num_cols), numeric_type)) if truncate: min_row, max_row = truncate nrow = 0 min_data = [0] * num_cols max_data = None for l in lines: data = l[:-1].split("\t") data = map(lambda x: data[x], take) # try conversion. Unparseable fields set to missing_value for x in range(len(data)): try: data[x] = float(data[x]) except __HOLE__: data[x] = numpy.ma.masked if truncate is not None: if data[0] < min_row: if cumulate_out_of_range: for x in range(1, num_cols): min_data[x] += data[x] continue elif data[0] >= max_row: if max_data is None: max_data = [0] * num_cols max_data[0] = max_row for x in range(1, num_cols): try: max_data[x] += data[x] except TypeError: # missing values cause type errors continue continue elif min_row is not None: if cumulate_out_of_range: for x in range(0, num_cols): try: min_data[x] += data[x] except TypeError: # missing values cause type errors continue else: min_data = data data = min_data min_row = None # copy values into matrix # this is a bit clumsy, but missing values # cause an error otherwise for x in range(len(data)): matrix[nrow, x] = data[x] nrow += 1 if truncate is not None: if cumulate_out_of_range: if max_data is not None: matrix[nrow] = max_data # truncate matrix matrix = matrix[0:nrow + 1, 0:num_cols] return matrix, headers
ValueError
dataset/ETHPy150Open CGATOxford/cgat/CGAT/IOTools.py/readTable
2,996
def __init__(self, game_json, appid): """ This sets member variables for the various values that the game object should have. Not all of these exist on all appids, so there's some defaults whenever there is a key error. TODO: This is so awful. Rewrite this whole ugly method into smaller ones. """ self.appid = appid if 'success' in game_json: self.success = game_json['success'] if self.success: self.store_url = self._store_url(self.appid) data = game_json['data'] self.raw_json = data self.type = data['type'] self.descriptidataon = data['detailed_description'] # Some appids don't have names try: self.name = data['name'] except KeyError: self.name = "No Name" try: self.supported_languages = data['supported_languages'] except __HOLE__: self.supported_languages = None self.header_image = "http://cdn.steampowered.com/v/gfx/apps/{}/capsule_184x69.jpg".format(self.appid) self.website = data['website'] # If any of these don't exit all of them don't exist, # which is why I think it's okay to wrap them all in one try/except. try: self.currency = data['price_overview']['currency'] self.price = self._calc_price(data['price_overview']['initial']) self.discounted_price = self._calc_price(data['price_overview']['final']) self.discount_percent = data['price_overview']['discount_percent'] except KeyError: self.currency = None self.price = 0 self.discounted_price = 0 self.discount_percent = 0 try: self.packages = data['packages'] except KeyError: self.packages = None self.platforms = data['platforms'] try: self.categories = data['categories'] except KeyError: self.categories = None else: print "Error! Can't read the game info for {}".format(appid)
KeyError
dataset/ETHPy150Open naiyt/steamapiwrapper/steamapiwrapper/SteamGames.py/Game.__init__
2,997
@protocol.commands.add('count') def count(context, *args): """ *musicpd.org, music database section:* ``count {TAG} {NEEDLE}`` Counts the number of songs and their total playtime in the db matching ``TAG`` exactly. *GMPC:* - use multiple tag-needle pairs to make more specific searches. """ try: query = _query_from_mpd_search_parameters(args, _SEARCH_MAPPING) except __HOLE__: raise exceptions.MpdArgError('incorrect arguments') results = context.core.library.search(query=query, exact=True).get() result_tracks = _get_tracks(results) return [ ('songs', len(result_tracks)), ('playtime', sum(t.length for t in result_tracks if t.length) / 1000), ]
ValueError
dataset/ETHPy150Open mopidy/mopidy/mopidy/mpd/protocol/music_db.py/count
2,998
@protocol.commands.add('find') def find(context, *args): """ *musicpd.org, music database section:* ``find {TYPE} {WHAT}`` Finds songs in the db that are exactly ``WHAT``. ``TYPE`` can be any tag supported by MPD, or one of the two special parameters - ``file`` to search by full path (relative to database root), and ``any`` to match against all available tags. ``WHAT`` is what to find. *GMPC:* - also uses ``find album "[ALBUM]" artist "[ARTIST]"`` to list album tracks. *ncmpc:* - capitalizes the type argument. *ncmpcpp:* - also uses the search type "date". - uses "file" instead of "filename". """ try: query = _query_from_mpd_search_parameters(args, _SEARCH_MAPPING) except __HOLE__: return with deprecation.ignore('core.library.search:empty_query'): results = context.core.library.search(query=query, exact=True).get() result_tracks = [] if ('artist' not in query and 'albumartist' not in query and 'composer' not in query and 'performer' not in query): result_tracks += [_artist_as_track(a) for a in _get_artists(results)] if 'album' not in query: result_tracks += [_album_as_track(a) for a in _get_albums(results)] result_tracks += _get_tracks(results) return translator.tracks_to_mpd_format(result_tracks)
ValueError
dataset/ETHPy150Open mopidy/mopidy/mopidy/mpd/protocol/music_db.py/find
2,999
@protocol.commands.add('findadd') def findadd(context, *args): """ *musicpd.org, music database section:* ``findadd {TYPE} {WHAT}`` Finds songs in the db that are exactly ``WHAT`` and adds them to current playlist. Parameters have the same meaning as for ``find``. """ try: query = _query_from_mpd_search_parameters(args, _SEARCH_MAPPING) except __HOLE__: return results = context.core.library.search(query=query, exact=True).get() with deprecation.ignore('core.tracklist.add:tracks_arg'): # TODO: for now just use tracks as other wise we have to lookup the # tracks we just got from the search. context.core.tracklist.add(tracks=_get_tracks(results)).get()
ValueError
dataset/ETHPy150Open mopidy/mopidy/mopidy/mpd/protocol/music_db.py/findadd