Unnamed: 0
int64
0
10k
function
stringlengths
79
138k
label
stringclasses
20 values
info
stringlengths
42
261
2,700
def try_convert_to_date(self, word): """ Tries to convert word to date(datetime) using search_date_formats Return None if word fits no one format """ for frm in self.search_date_formats: try: return datetime.datetime.strptime(word, frm).date() except __HOLE__: pass return None
ValueError
dataset/ETHPy150Open AndrewIngram/django-extra-views/extra_views/contrib/mixins.py/SearchableListMixin.try_convert_to_date
2,701
def post(self): body = self.request.body_file.read() stream = None timezone_offset = self._get_timezone_offset() # Decode the request try: request = remoting.decode(body, strict=self.strict, logger=self.logger, timezone_offset=timezone_offset) except (DecodeError, IOError): if self.logger: self.logger.exception('Error decoding AMF request') response = ("400 Bad Request\n\nThe request body was unable to " "be successfully decoded.") if self.debug: response += "\n\nTraceback:\n\n%s" % gateway.format_exception() self.error(400) self.response.headers['Content-Type'] = 'text/plain' self.response.headers['Server'] = gateway.SERVER_NAME self.response.out.write(response) return except (__HOLE__, SystemExit): raise except: if self.logger: self.logger.exception('Unexpected error decoding AMF request') response = ('500 Internal Server Error\n\n' 'An unexpected error occurred.') if self.debug: response += "\n\nTraceback:\n\n%s" % gateway.format_exception() self.error(500) self.response.headers['Content-Type'] = 'text/plain' self.response.headers['Server'] = gateway.SERVER_NAME self.response.out.write(response) return if self.logger: self.logger.debug("AMF Request: %r" % request) # Process the request try: response = self.getResponse(request) except (KeyboardInterrupt, SystemExit): raise except: if self.logger: self.logger.exception('Error processing AMF request') response = ("500 Internal Server Error\n\nThe request was " \ "unable to be successfully processed.") if self.debug: response += "\n\nTraceback:\n\n%s" % gateway.format_exception() self.error(500) self.response.headers['Content-Type'] = 'text/plain' self.response.headers['Server'] = gateway.SERVER_NAME self.response.out.write(response) return if self.logger: self.logger.debug("AMF Response: %r" % response) # Encode the response try: stream = remoting.encode(response, strict=self.strict, logger=self.logger, timezone_offset=timezone_offset) except: if self.logger: self.logger.exception('Error encoding AMF request') response = ("500 Internal Server Error\n\nThe request was " \ "unable to be encoded.") if self.debug: response += "\n\nTraceback:\n\n%s" % gateway.format_exception() self.error(500) self.response.headers['Content-Type'] = 'text/plain' self.response.headers['Server'] = gateway.SERVER_NAME self.response.out.write(response) return response = stream.getvalue() self.response.headers['Content-Type'] = remoting.CONTENT_TYPE self.response.headers['Content-Length'] = str(len(response)) self.response.headers['Server'] = gateway.SERVER_NAME self.response.out.write(response)
KeyboardInterrupt
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/PyAMF-0.6.1/pyamf/remoting/gateway/google.py/WebAppGateway.post
2,702
def main(): usage = "usage: follow.py <search>" opts = utils.parse(sys.argv[1:], {}, ".splunkrc", usage=usage) if len(opts.args) != 1: utils.error("Search expression required", 2) search = opts.args[0] service = client.connect(**opts.kwargs) job = service.jobs.create( search, earliest_time="rt", latest_time="rt", search_mode="realtime") # Wait for the job to transition out of QUEUED and PARSING so that # we can if its a transforming search, or not. while True: job.refresh() if job['dispatchState'] not in ['QUEUED', 'PARSING']: break time.sleep(2) # Wait if job['reportSearch'] is not None: # Is it a transforming search? count = lambda: int(job['numPreviews']) items = lambda _: job.preview() else: count = lambda: int(job['eventCount']) items = lambda offset: job.events(offset=offset) try: follow(job, count, items) except __HOLE__: print "\nInterrupted." finally: job.cancel()
KeyboardInterrupt
dataset/ETHPy150Open splunk/splunk-sdk-python/examples/follow.py/main
2,703
def _get_video_id(self, video_url): match = re.findall("watch.*[\?|&]v=([\dA-Za-z_\-]+)", video_url) try: nid_as_youtube_url = match[0] except __HOLE__: nid_as_youtube_url = None logging.error(u"couldn't get video_id for {video_url}".format( video_url=video_url)) return nid_as_youtube_url #override because need to break up id
IndexError
dataset/ETHPy150Open Impactstory/total-impact-core/totalimpact/providers/youtube.py/Youtube._get_video_id
2,704
def _extract_biblio(self, page, id=None): if not "snippet" in page: raise ProviderContentMalformedError json_response = provider._load_json(page) this_video_json = json_response["items"][0] dict_of_keylists = { 'title': ['snippet', 'title'], 'channel_title': ['snippet', 'channelTitle'], 'published_date': ['snippet', 'publishedAt'] } biblio_dict = provider._extract_from_data_dict(this_video_json, dict_of_keylists) try: biblio_dict["year"] = biblio_dict["published_date"][0:4] except __HOLE__: pass biblio_dict["url"] = id biblio_dict["repository"] = "YouTube" return biblio_dict
KeyError
dataset/ETHPy150Open Impactstory/total-impact-core/totalimpact/providers/youtube.py/Youtube._extract_biblio
2,705
def is_number(s, cast=float): """ Check if a string is a number. Use cast=int to check if s is an integer. """ try: cast(s) # for int, long and float except __HOLE__: return False return True
ValueError
dataset/ETHPy150Open tanghaibao/jcvi/formats/base.py/is_number
2,706
def start(self, *pages): """ Takes a list of Page instances, creates html files, and starts the server. """ # Make sure at least one page has been given: if not(pages): print "*Can't start server - no pages provided." return # Make sure pages/ directory exists: if not(os.path.exists(PAGES_DIR)): os.system("mkdir %s" % PAGES_DIR) # Remove old pages if any: if (os.listdir(PAGES_DIR)): os.system("rm %s/*" % PAGES_DIR) # We treat the first page passed in as the home page and create # an index.html page in the base directory that redirects to it: home = pages[0] with open(INDEX, 'w') as index: with open(INDEX_TEMPLATE, 'r') as index_template: index.write(index_template.read() % home.filename) # Generate a list of links for the sidebar: links = '' for page in pages: links += '<li><a href="%s">%s</a></li>\n' % (page.filename, page.title) # Add sidebar to each page and write them to files: for page in pages: path = "%s/%s" % (PAGES_DIR, page.filename) with open(path, 'w') as f: f.write(str(page) % links) # Start srver in a daemon thread: server_thread = threading.Thread(target=self._server.serve_forever) server_thread.daemon = True server_thread.start() if (self.blocking): try: while(True): delay(10000) except __HOLE__: pass
KeyboardInterrupt
dataset/ETHPy150Open graycatlabs/PyBBIO/bbio/libraries/BBIOServer/bbio_server.py/BBIOServer.start
2,707
def kv_create(request, kv_class, obj_pk): """ POST to: /core/keyvalue/api/<kv_class>/create/ with parameters like: { 'key': 'key_string' 'value': 'value_string' 'obj_pk': 1 } Status Codes: * 201 - Object created * 400 - Issues during creation """ key, value, errors = get_kv(request) if errors: return HttpResponse( status=400, content=json.dumps( {'success': False, 'message': errors} ) ) obj, KVKlass = resolve_obj(kv_class, obj_pk) try: kv = KVKlass(obj=obj, key=key, value=value) kv.clean() kv.save() resp = { 'status': 201, 'content': json.dumps( { 'success': True, 'key': kv.key, 'value': kv.value, 'obj_uri': kv.uri, 'kv_pk': kv.pk } ) } except __HOLE__, e: resp = { 'status': 400, 'content': json.dumps({'success': False, 'message': str(e)}) } return HttpResponse(**resp)
ValidationError
dataset/ETHPy150Open mozilla/inventory/core/keyvalue/api.py/kv_create
2,708
def kv_update(request, kv_class, kv_pk): """ POST to: /core/keyvalue/api/<kv_class>/<kv_pk>/update/ with parameters like: { 'key': 'key_string' 'value': 'value_string' 'obj_pk': 1 } Status Codes: * 200 - Object updated * 400 - Issues during update """ key, value, errors = get_kv(request) if errors: return HttpResponse( status=400, content=json.dumps( {'success': False, 'message': errors} ) ) Klass = resolve_class(kv_class) KVKlass = Klass.keyvalue_set.related.model try: kv = KVKlass.objects.get(pk=kv_pk) except KVKlass.DoesNotExist: return HttpResponse( status=404, content=json.dumps({'success': False}) ) try: kv.key = key kv.value = value kv.clean() kv.save() resp = { 'status': 200, 'content': json.dumps( {'success': True, 'key': kv.key, 'value': kv.value} ) } except __HOLE__, e: resp = { 'status': 400, 'content': json.dumps({'success': False, 'message': str(e)}) } return HttpResponse(**resp)
ValidationError
dataset/ETHPy150Open mozilla/inventory/core/keyvalue/api.py/kv_update
2,709
def read( fname ): try: return open( os.path.join( os.path.dirname( __file__ ), fname ) ).read() except __HOLE__: return ''
IOError
dataset/ETHPy150Open ilblackdragon/django-blogs/setup.py/read
2,710
def _run(self): if not self._running: return next_call = None try: next_call = self.callback() except (KeyboardInterrupt, __HOLE__): raise except: logging.error("Error in periodic callback", exc_info=True) if self._running: self.start(next_call)
SystemExit
dataset/ETHPy150Open mrjoes/tornadio/tornadio/periodic.py/Callback._run
2,711
def reduce(func,iterable,initializer=None): args = iter(iterable) if initializer is not None: res = initializer else: res = next(args) while True: try: res = func(res,next(args)) except __HOLE__: return res
StopIteration
dataset/ETHPy150Open amrdraz/kodr/app/brython/www/src/Lib/_functools.py/reduce
2,712
def __getattr__(self, key): """Python only calls this when key is missing!""" try: return self.__getitem__(key) except __HOLE__: raise AttributeError(key)
KeyError
dataset/ETHPy150Open plotly/plotly.py/plotly/graph_objs/graph_objs.py/PlotlyDict.__getattr__
2,713
def force_clean(self, **kwargs): """Recursively remove empty/None values.""" keys = list(self.keys()) for key in keys: try: self[key].force_clean() except __HOLE__: pass if isinstance(self[key], (dict, list)): if len(self[key]) == 0: del self[key] # clears empty collections! elif self[key] is None: del self[key]
AttributeError
dataset/ETHPy150Open plotly/plotly.py/plotly/graph_objs/graph_objs.py/PlotlyDict.force_clean
2,714
def _patch_figure_class(figure_class): def __init__(self, *args, **kwargs): super(figure_class, self).__init__(*args, **kwargs) if 'data' not in self: self.data = GraphObjectFactory.create('data', _parent=self, _parent_key='data') figure_class.__init__ = __init__ def get_data(self, flatten=False): """ Returns the JSON for the plot with non-data elements stripped. Flattening may increase the utility of the result. :param (bool) flatten: {'a': {'b': ''}} --> {'a.b': ''} :returns: (dict|list) Depending on (flat|unflat) """ return self.data.get_data(flatten=flatten) figure_class.get_data = get_data def to_dataframe(self): """ Create a pandas dataframe with trace names and keys as column names. :return: (DataFrame) """ data = self.get_data(flatten=True) from pandas import DataFrame, Series return DataFrame(dict([(k, Series(v)) for k, v in data.items()])) figure_class.to_dataframe = to_dataframe def print_grid(self): """ Print a visual layout of the figure's axes arrangement. This is only valid for figures that are created with plotly.tools.make_subplots. """ try: grid_str = self.__dict__['_grid_str'] except AttributeError: raise Exception("Use plotly.tools.make_subplots " "to create a subplot grid.") print(grid_str) figure_class.print_grid = print_grid def append_trace(self, trace, row, col): """ Add a data traces to your figure bound to axes at the row, col index. The row, col index is generated from figures created with plotly.tools.make_subplots and can be viewed with Figure.print_grid. :param (dict) trace: The data trace to be bound. :param (int) row: Subplot row index (see Figure.print_grid). :param (int) col: Subplot column index (see Figure.print_grid). Example: # stack two subplots vertically fig = tools.make_subplots(rows=2) This is the format of your plot grid: [ (1,1) x1,y1 ] [ (2,1) x2,y2 ] fig.append_trace(Scatter(x=[1,2,3], y=[2,1,2]), 1, 1) fig.append_trace(Scatter(x=[1,2,3], y=[2,1,2]), 2, 1) """ try: grid_ref = self._grid_ref except __HOLE__: raise Exception("In order to use Figure.append_trace, " "you must first use plotly.tools.make_subplots " "to create a subplot grid.") if row <= 0: raise Exception("Row value is out of range. " "Note: the starting cell is (1, 1)") if col <= 0: raise Exception("Col value is out of range. " "Note: the starting cell is (1, 1)") try: ref = grid_ref[row-1][col-1] except IndexError: raise Exception("The (row, col) pair sent is out of range. " "Use Figure.print_grid to view the subplot grid. ") if 'scene' in ref[0]: trace['scene'] = ref[0] if ref[0] not in self['layout']: raise Exception("Something went wrong. " "The scene object for ({r},{c}) subplot cell " "got deleted.".format(r=row, c=col)) else: xaxis_key = "xaxis{ref}".format(ref=ref[0][1:]) yaxis_key = "yaxis{ref}".format(ref=ref[1][1:]) if (xaxis_key not in self['layout'] or yaxis_key not in self['layout']): raise Exception("Something went wrong. " "An axis object for ({r},{c}) subplot cell " "got deleted.".format(r=row, c=col)) trace['xaxis'] = ref[0] trace['yaxis'] = ref[1] self['data'] += [trace] figure_class.append_trace = append_trace
AttributeError
dataset/ETHPy150Open plotly/plotly.py/plotly/graph_objs/graph_objs.py/_patch_figure_class
2,715
def openpty(): """openpty() -> (master_fd, slave_fd) Open a pty master/slave pair, using os.openpty() if possible.""" try: return os.openpty() except (__HOLE__, OSError): pass master_fd, slave_name = _open_terminal() slave_fd = slave_open(slave_name) return master_fd, slave_fd
AttributeError
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/pty.py/openpty
2,716
def master_open(): """master_open() -> (master_fd, slave_name) Open a pty master and return the fd, and the filename of the slave end. Deprecated, use openpty() instead.""" try: master_fd, slave_fd = os.openpty() except (AttributeError, __HOLE__): pass else: slave_name = os.ttyname(slave_fd) os.close(slave_fd) return master_fd, slave_name return _open_terminal()
OSError
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/pty.py/master_open
2,717
def _open_terminal(): """Open pty master and return (master_fd, tty_name). SGI and generic BSD version, for when openpty() fails.""" try: import sgi except ImportError: pass else: try: tty_name, master_fd = sgi._getpty(os.O_RDWR, 0666, 0) except __HOLE__, msg: raise os.error, msg return master_fd, tty_name for x in 'pqrstuvwxyzPQRST': for y in '0123456789abcdef': pty_name = '/dev/pty' + x + y try: fd = os.open(pty_name, os.O_RDWR) except os.error: continue return (fd, '/dev/tty' + x + y) raise os.error, 'out of pty devices'
IOError
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/pty.py/_open_terminal
2,718
def slave_open(tty_name): """slave_open(tty_name) -> slave_fd Open the pty slave and acquire the controlling terminal, returning opened filedescriptor. Deprecated, use openpty() instead.""" result = os.open(tty_name, os.O_RDWR) try: from fcntl import ioctl, I_PUSH except __HOLE__: return result try: ioctl(result, I_PUSH, "ptem") ioctl(result, I_PUSH, "ldterm") except IOError: pass return result
ImportError
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/pty.py/slave_open
2,719
def fork(): """fork() -> (pid, master_fd) Fork and make the child a session leader with a controlling terminal.""" try: pid, fd = os.forkpty() except (__HOLE__, OSError): pass else: if pid == CHILD: try: os.setsid() except OSError: # os.forkpty() already set us session leader pass return pid, fd master_fd, slave_fd = openpty() pid = os.fork() if pid == CHILD: # Establish a new session. os.setsid() os.close(master_fd) # Slave becomes stdin/stdout/stderr of child. os.dup2(slave_fd, STDIN_FILENO) os.dup2(slave_fd, STDOUT_FILENO) os.dup2(slave_fd, STDERR_FILENO) if (slave_fd > STDERR_FILENO): os.close (slave_fd) # Explicitly open the tty to make it become a controlling tty. tmp_fd = os.open(os.ttyname(STDOUT_FILENO), os.O_RDWR) os.close(tmp_fd) else: os.close(slave_fd) # Parent and child process. return pid, master_fd
AttributeError
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/pty.py/fork
2,720
def spawn(argv, master_read=_read, stdin_read=_read): """Create a spawned process.""" if type(argv) == type(''): argv = (argv,) pid, master_fd = fork() if pid == CHILD: os.execlp(argv[0], *argv) try: mode = tty.tcgetattr(STDIN_FILENO) tty.setraw(STDIN_FILENO) restore = 1 except tty.error: # This is the same as termios.error restore = 0 try: _copy(master_fd, master_read, stdin_read) except (IOError, __HOLE__): if restore: tty.tcsetattr(STDIN_FILENO, tty.TCSAFLUSH, mode) os.close(master_fd)
OSError
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/pty.py/spawn
2,721
@contextlib.contextmanager def tempdir(**kwargs): argdict = kwargs.copy() if 'dir' not in argdict: argdict['dir'] = CONF.tempdir tmpdir = tempfile.mkdtemp(**argdict) try: yield tmpdir finally: try: shutil.rmtree(tmpdir) except __HOLE__ as e: LOG.error(_('Could not remove tmpdir: %s'), str(e))
OSError
dataset/ETHPy150Open openstack/ec2-api/ec2api/utils.py/tempdir
2,722
def check_ceph_df(): 'Program entry point.' try: res = subprocess.check_output(["ceph", "df", "--format=json"], stderr=subprocess.STDOUT) exit_code, message = interpret_output_df(res) sys.stdout.write("%s\n" % message) sys.exit(exit_code) except subprocess.CalledProcessError as e: sys.stdout.write('CEPH UNKNOWN: %s\n' % e.output) sys.exit(3) except __HOLE__: sys.stdout.write('CEPH UNKNOWN: unable to launch ceph health\n') sys.exit(3)
OSError
dataset/ETHPy150Open openstack/monitoring-for-openstack/oschecks/ceph.py/check_ceph_df
2,723
def check_ceph_health(): 'Program entry point.' try: res = subprocess.check_output(["ceph", "health"], stderr=subprocess.STDOUT) exit_code, message = interpret_output_health(res) sys.stdout.write(message) sys.exit(exit_code) except subprocess.CalledProcessError as e: sys.stdout.write('CEPH UNKNOWN: %s\n' % e.output) sys.exit(3) except __HOLE__: sys.stdout.write('CEPH UNKNOWN: unable to launch ceph health\n') sys.exit(3)
OSError
dataset/ETHPy150Open openstack/monitoring-for-openstack/oschecks/ceph.py/check_ceph_health
2,724
def poll(): """ Callback function that polls for new tasks based on a schedule. """ deployment_id = helper.get_deployment_id() # If the deployment is not registered, skip. if not deployment_id: return # If we can't reach the backup and recovery services, skip. nodes = helper.get_node_info() http_client = tornado.httpclient.HTTPClient() for node in nodes: br_host = node[helper.NodeInfoTags.HOST] request = tornado.httpclient.HTTPRequest(br_host) try: response = http_client.fetch(request) if json.loads(response.body)['status'] != 'up': logging.warn('Backup and Recovery service at {} is not up.' .format(br_host)) return except (socket.error, __HOLE__): logging.exception('Backup and Recovery service at {} is not up.' .format(br_host)) return logging.info("Polling for new task.") # Send request to AppScale Portal. url = "{0}{1}".format(hermes_constants.PORTAL_URL, hermes_constants.PORTAL_POLL_PATH) data = urllib.urlencode({JSONTags.DEPLOYMENT_ID: deployment_id}) request = helper.create_request(url=url, method='POST', body=data) response = helper.urlfetch(request) if not response[JSONTags.SUCCESS]: logging.error("Inaccessible resource: {}".format(url)) return try: data = json.loads(response[JSONTags.BODY]) except (TypeError, ValueError) as error: logging.error("Cannot parse response from url '{0}'. Error: {1}". format(url, str(error))) return if data == {}: # If there's no task to perform. return # Verify all necessary fields are present in the request. if not set(data.keys()).issuperset(set(hermes_constants.REQUIRED_KEYS)): logging.error("Missing args in response: {0}".format(response)) return logging.debug("Task to run: {0}".format(data)) logging.info("Redirecting task request to TaskHandler.") url = "{0}{1}".format(hermes_constants.HERMES_URL, TaskHandler.PATH) request = helper.create_request(url, method='POST', body=json.dumps(data)) # The poller can move forward without waiting for a response here. helper.urlfetch_async(request)
ValueError
dataset/ETHPy150Open AppScale/appscale/Hermes/hermes.py/poll
2,725
def ArrayOf(klass): """Function to return a class that can encode and decode a list of some other type.""" global _array_of_map global _array_of_classes, _sequence_of_classes # if this has already been built, return the cached one if klass in _array_of_map: return _array_of_map[klass] # no ArrayOf(ArrayOf(...)) allowed if klass in _array_of_classes: raise TypeError("nested arrays disallowed") # no ArrayOf(SequenceOf(...)) allowed if klass in _sequence_of_classes: raise TypeError("arrays of SequenceOf disallowed") # define a generic class for arrays @bacpypes_debugging class ArrayOf(Array): subtype = None def __init__(self, value=None): if value is None: self.value = [0] elif isinstance(value, list): self.value = [len(value)] self.value.extend(value) else: raise TypeError("invalid constructor datatype") def append(self, value): if issubclass(self.subtype, Atomic): pass elif issubclass(self.subtype, AnyAtomic) and not isinstance(value, Atomic): raise TypeError("instance of an atomic type required") elif not isinstance(value, self.subtype): raise TypeError("%s value required" % (self.subtype.__name__,)) self.value.append(value) self.value[0] = len(self.value) - 1 def __len__(self): return self.value[0] def __getitem__(self, item): # no wrapping index if (item < 0) or (item > self.value[0]): raise IndexError("index out of range") return self.value[item] def __setitem__(self, item, value): # no wrapping index if (item < 1) or (item > self.value[0]): raise IndexError("index out of range") # special length handling for index 0 if item == 0: if value < self.value[0]: # trim self.value = self.value[0:value + 1] elif value > self.value[0]: # extend self.value.extend( [None] * (value - self.value[0]) ) else: return self.value[0] = value else: self.value[item] = value def __delitem__(self, item): # no wrapping index if (item < 1) or (item > self.value[0]): raise IndexError("index out of range") # delete the item and update the length del self.value[item] self.value[0] -= 1 def index(self, value): # only search through values for i in range(1, self.value[0] + 1): if value == self.value[i]: return i # not found raise ValueError("%r not in array" % (value,)) def encode(self, taglist): if _debug: ArrayOf._debug("(%r)encode %r", self.__class__.__name__, taglist) for value in self.value[1:]: if issubclass(self.subtype, (Atomic, AnyAtomic)): # a helper cooperates between the atomic value and the tag helper = self.subtype(value) # build a tag and encode the data into it tag = Tag() helper.encode(tag) # now encode the tag taglist.append(tag) elif isinstance(value, self.subtype): # it must have its own encoder value.encode(taglist) else: raise TypeError("%s must be a %s" % (value, self.subtype.__name__)) def decode(self, taglist): if _debug: ArrayOf._debug("(%r)decode %r", self.__class__.__name__, taglist) # start with an empty array self.value = [0] while len(taglist) != 0: tag = taglist.Peek() if tag.tagClass == Tag.closingTagClass: break if issubclass(self.subtype, (Atomic, AnyAtomic)): if _debug: ArrayOf._debug(" - building helper: %r %r", self.subtype, tag) taglist.Pop() # a helper cooperates between the atomic value and the tag helper = self.subtype(tag) # save the value self.value.append(helper.value) else: if _debug: ArrayOf._debug(" - building value: %r", self.subtype) # build an element value = self.subtype() # let it decode itself value.decode(taglist) # save what was built self.value.append(value) # update the length self.value[0] = len(self.value) - 1 def encode_item(self, item, taglist): if _debug: ArrayOf._debug("(%r)encode_item %r %r", self.__class__.__name__, item, taglist) if item == 0: # a helper cooperates between the atomic value and the tag helper = Unsigned(self.value[0]) # build a tag and encode the data into it tag = Tag() helper.encode(tag) # now encode the tag taglist.append(tag) else: value = self.value[item] if issubclass(self.subtype, (Atomic, AnyAtomic)): # a helper cooperates between the atomic value and the tag helper = self.subtype(self.value[item]) # build a tag and encode the data into it tag = Tag() helper.encode(tag) # now encode the tag taglist.append(tag) elif isinstance(value, self.subtype): # it must have its own encoder value.encode(taglist) else: raise TypeError("%s must be a %s" % (value, self.subtype.__name__)) def decode_item(self, item, taglist): if _debug: ArrayOf._debug("(%r)decode_item %r %r", self.__class__.__name__, item, taglist) if item == 0: # a helper cooperates between the atomic value and the tag helper = Unsigned(taglist.Pop()) # save the value self.value = helper.value elif issubclass(self.subtype, (Atomic, AnyAtomic)): if _debug: ArrayOf._debug(" - building helper: %r", self.subtype) # a helper cooperates between the atomic value and the tag helper = self.subtype(taglist.Pop()) # save the value self.value = helper.value else: if _debug: ArrayOf._debug(" - building value: %r", self.subtype) # build an element value = self.subtype() # let it decode itself value.decode(taglist) # save what was built self.value = value def debug_contents(self, indent=1, file=sys.stdout, _ids=None): try: value_list = enumerate(self.value) except __HOLE__: file.write("%s(non-sequence) %r\n" % (" " * indent, self.value)) return for i, value in value_list: if i == 0: file.write("%slength = %d\n" % (" " * indent, value)) elif issubclass(self.subtype, (Atomic, AnyAtomic)): file.write("%s[%d] = %r\n" % (" " * indent, i, value)) elif isinstance(value, self.subtype): file.write("%s[%d]\n" % (" " * indent, i)) value.debug_contents(indent+1, file, _ids) else: file.write("%s%s must be a %s" % (" " * indent, value, self.subtype.__name__)) def dict_contents(self, use_dict=None, as_class=dict): # return arrays as arrays mapped_value = [] for value in self.value: if issubclass(self.subtype, Atomic): mapped_value.append(value) ### ambiguous elif issubclass(self.subtype, AnyAtomic): mapped_value.append(value.value) ### ambiguous elif isinstance(value, self.subtype): mapped_value.append(value.dict_contents(as_class=as_class)) # return what we built return mapped_value # constrain it to a list of a specific type of item setattr(ArrayOf, 'subtype', klass) ArrayOf.__name__ = 'ArrayOf' + klass.__name__ # cache this type _array_of_map[klass] = ArrayOf _array_of_classes[ArrayOf] = 1 # return this new type return ArrayOf # # Choice #
TypeError
dataset/ETHPy150Open JoelBender/bacpypes/py27/bacpypes/constructeddata.py/ArrayOf
2,726
def decode_base58(bc, length): n = 0 for char in bc: n = n * 58 + DIGITS58.index(char) try: return n.to_bytes(length, 'big') except __HOLE__: return _long_to_bytes(n, length, 'big')
AttributeError
dataset/ETHPy150Open blockcypher/blockcypher-python/blockcypher/utils.py/decode_base58
2,727
def instance(self, uri, cls=None, default=None, **kwargs): instance = self._instances.get(uri, None) if instance is None: if cls is None: try: cls = self._resources[uri[:uri.rfind('/')]] except __HOLE__: cls = Reference if isinstance(default, Resource) and default.uri is None: default._status = 200 default._uri = uri instance = default else: instance = cls(uri=uri, **kwargs) self._instances[uri] = instance return instance
KeyError
dataset/ETHPy150Open biosustain/potion-client/potion_client/__init__.py/Client.instance
2,728
def create_test_zipline(**config): """ :param config: A configuration object that is a dict with: - sid - an integer, which will be used as the asset ID. - order_count - the number of orders the test algo will place, defaults to 100 - order_amount - the number of shares per order, defaults to 100 - trade_count - the number of trades to simulate, defaults to 101 to ensure all orders are processed. - algorithm - optional parameter providing an algorithm. defaults to :py:class:`zipline.test.algorithms.TestAlgorithm` - trade_source - optional parameter to specify trades, if present. If not present :py:class:`zipline.sources.SpecificEquityTrades` is the source, with daily frequency in trades. - slippage: optional parameter that configures the :py:class:`zipline.gens.tradingsimulation.TransactionSimulator`. Expects an object with a simulate mehod, such as :py:class:`zipline.gens.tradingsimulation.FixedSlippage`. :py:mod:`zipline.finance.trading` """ assert isinstance(config, dict) try: sid_list = config['sid_list'] except __HOLE__: try: sid_list = [config['sid']] except KeyError: raise Exception("simfactory create_test_zipline() requires " "argument 'sid_list' or 'sid'") concurrent_trades = config.get('concurrent_trades', False) if 'order_count' in config: order_count = config['order_count'] else: order_count = 100 if 'order_amount' in config: order_amount = config['order_amount'] else: order_amount = 100 # ------------------- # Create the Algo # ------------------- if 'algorithm' in config: test_algo = config['algorithm'] else: test_algo = TestAlgorithm( sid_list[0], order_amount, order_count, sim_params=config.get('sim_params', factory.create_simulation_parameters()), slippage=config.get('slippage'), identifiers=sid_list ) # ------------------- # Trade Source # ------------------- if 'skip_data' not in config: if 'trade_source' in config: trade_source = config['trade_source'] else: trade_source = factory.create_daily_trade_source( sid_list, test_algo.sim_params, test_algo.trading_environment, concurrent=concurrent_trades, ) trades_by_sid = {} for trade in trade_source: if trade.sid not in trades_by_sid: trades_by_sid[trade.sid] = [] trades_by_sid[trade.sid].append(trade) data_portal = create_data_portal_from_trade_history( config['env'], config['tempdir'], config['sim_params'], trades_by_sid ) test_algo.data_portal = data_portal # ------------------- # Benchmark source # ------------------- test_algo.benchmark_return_source = config.get('benchmark_source', None) # ------------------ # generator/simulator sim = test_algo.get_generator() return sim
KeyError
dataset/ETHPy150Open quantopian/zipline/zipline/utils/simfactory.py/create_test_zipline
2,729
def __init__(self, graph=None, encoding="utf-8",prettyprint=True): try: import xml.etree.ElementTree except __HOLE__: raise ImportError('GraphML writer requires ' 'xml.elementtree.ElementTree') self.prettyprint=prettyprint self.encoding = encoding self.xml = Element("graphml", {'xmlns':self.NS_GRAPHML, 'xmlns:xsi':self.NS_XSI, 'xsi:schemaLocation':self.SCHEMALOCATION} ) self.keys={} if graph is not None: self.add_graph_element(graph)
ImportError
dataset/ETHPy150Open gkno/gkno_launcher/src/networkx/readwrite/graphml.py/GraphMLWriter.__init__
2,730
def get_key(self, name, attr_type, scope, default): keys_key = (name, attr_type, scope) try: return self.keys[keys_key] except __HOLE__: new_id = "d%i" % len(list(self.keys)) self.keys[keys_key] = new_id key_kwargs = {"id":new_id, "for":scope, "attr.name":name, "attr.type":attr_type} key_element=Element("key",**key_kwargs) # add subelement for data default value if present if default is not None: default_element=Element("default") default_element.text=make_str(default) key_element.append(default_element) self.xml.insert(0,key_element) return new_id
KeyError
dataset/ETHPy150Open gkno/gkno_launcher/src/networkx/readwrite/graphml.py/GraphMLWriter.get_key
2,731
def __init__(self, node_type=str): try: import xml.etree.ElementTree except __HOLE__: raise ImportError('GraphML reader requires ' 'xml.elementtree.ElementTree') self.node_type=node_type self.multigraph=False # assume multigraph and test for parallel edges
ImportError
dataset/ETHPy150Open gkno/gkno_launcher/src/networkx/readwrite/graphml.py/GraphMLReader.__init__
2,732
def decode_data_elements(self, graphml_keys, obj_xml): """Use the key information to decode the data XML if present.""" data = {} for data_element in obj_xml.findall("{%s}data" % self.NS_GRAPHML): key = data_element.get("key") try: data_name=graphml_keys[key]['name'] data_type=graphml_keys[key]['type'] except __HOLE__: raise nx.NetworkXError("Bad GraphML data: no key %s"%key) text=data_element.text # assume anything with subelements is a yfiles extension if text is not None and len(list(data_element))==0: if data_type==bool: data[data_name] = self.convert_bool[text] else: data[data_name] = data_type(text) elif len(list(data_element)) > 0: # Assume yfiles as subelements, try to extract node_label node_label = None for node_type in ['ShapeNode', 'SVGNode', 'ImageNode']: geometry = data_element.find("{%s}%s/{%s}Geometry" % (self.NS_Y, node_type, self.NS_Y)) if geometry is not None: data['x'] = geometry.get('x') data['y'] = geometry.get('y') if node_label is None: node_label = data_element.find("{%s}%s/{%s}NodeLabel" % (self.NS_Y, node_type, self.NS_Y)) if node_label is not None: data['label'] = node_label.text # check all the diffrent types of edges avaivable in yEd. for e in ['PolyLineEdge', 'SplineEdge', 'QuadCurveEdge', 'BezierEdge', 'ArcEdge']: edge_label = data_element.find("{%s}%s/{%s}EdgeLabel"% (self.NS_Y, e, (self.NS_Y))) if edge_label is not None: break if edge_label is not None: data['label'] = edge_label.text return data
KeyError
dataset/ETHPy150Open gkno/gkno_launcher/src/networkx/readwrite/graphml.py/GraphMLReader.decode_data_elements
2,733
def __init__(self, count, max_count = None, prepend = None, width = 'auto', speed_calc_cycles = 10, interval = 1, verbose = 0, sigint = 'stop', sigterm = 'stop', name = 'progress', info_line = None): """ count [mp.Value] - shared memory to hold the current state, (list or single value) max_count [mp.Value] - shared memory holding the final state, (None, list or single value), may be changed by external process without having to explicitly tell this class. If None, no TTG and relative progress can be calculated -> TTG = None prepend [string] - string to put in front of the progress output, (None, single string or of list of strings) interval [int] - seconds to wait between progress print speed_calc_cycles [int] - use the current (time, count) as well as the (old_time, old_count) read by the show_stat function speed_calc_cycles calls before to calculate the speed as follows: s = count - old_count / (time - old_time) verbose, sigint, sigterm -> see loop class """ self.name = name self._identifier = get_identifier(self.name, pid='not started') try: for c in count: assert isinstance(c, mp.sharedctypes.Synchronized), "each element of 'count' must be if the type multiprocessing.sharedctypes.Synchronized" self.is_multi = True except TypeError: assert isinstance(count, mp.sharedctypes.Synchronized), "'count' must be if the type multiprocessing.sharedctypes.Synchronized" self.is_multi = False count = [count] self.len = len(count) if max_count is not None: if self.is_multi: try: for m in max_count: assert isinstance(m, mp.sharedctypes.Synchronized), "each element of 'max_count' must be if the type multiprocessing.sharedctypes.Synchronized" except __HOLE__: raise TypeError("'max_count' must be iterable") else: assert isinstance(max_count, mp.sharedctypes.Synchronized), "'max_count' must be of the type multiprocessing.sharedctypes.Synchronized" max_count = [max_count] else: max_count = [None] * self.len self.start_time = [] self.speed_calc_cycles = speed_calc_cycles self.width = width self.q = [] self.prepend = [] self.lock = [] self.last_count = [] self.last_old_count = [] self.last_old_time = [] for i in range(self.len): self.q.append(myQueue()) # queue to save the last speed_calc_cycles # (time, count) information to calculate speed self.last_count.append(UnsignedIntValue()) self.last_old_count.append(UnsignedIntValue()) self.last_old_time.append(FloatValue()) self.lock.append(mp.Lock()) self.start_time.append(FloatValue(val=time.time())) if prepend is None: # no prepend given self.prepend.append('') else: try: # assume list of prepend, (needs to be a sequence) # except if prepend is an instance of string # the assert will cause the except to be executed assert not isinstance(prepend, str) self.prepend.append(prepend[i]) except: # list fails -> assume single prepend for all self.prepend.append(prepend) self.max_count = max_count # list of multiprocessing value type self.count = count # list of multiprocessing value type self.interval = interval self.verbose = verbose self.show_on_exit = False self.add_args = {} self.info_line = info_line # setup loop class with func super(Progress, self).__init__(func=Progress.show_stat_wrapper_multi, args=(self.count, self.last_count, self.start_time, self.max_count, self.speed_calc_cycles, self.width, self.q, self.last_old_count, self.last_old_time, self.prepend, self.__class__.show_stat, self.len, self.add_args, self.lock, self.info_line), interval=interval, verbose=verbose, sigint=sigint, sigterm=sigterm, name=name, auto_kill_on_last_resort=True)
TypeError
dataset/ETHPy150Open cimatosa/jobmanager/jobmanager/progress.py/Progress.__init__
2,734
def _DisplayHost(self, computer, self_report): """Displays the report for a single host. Args: computer: models.Computer object to display. self_report: if True, display as self report. """ uuid = computer.uuid popup = self.request.get('format', None) == 'popup' if popup: limit = 1 else: limit = SINGLE_HOST_DATA_FETCH_LIMIT client_log_files = models.ClientLogFile.all().filter('uuid =', uuid).order( '-mtime').fetch(limit) msu_log = models.ComputerMSULog.all().filter('uuid =', uuid).order( '-mtime').fetch(limit) applesus_installs = models.InstallLog.all().filter('uuid =', uuid).filter( 'applesus =', True).order('-mtime').fetch(limit) installs = models.InstallLog.all().filter('uuid =', uuid).filter( 'applesus =', False).order('-mtime').fetch(limit) exits = models.PreflightExitLog.all().filter('uuid =', uuid).order( '-mtime').fetch(limit) install_problems = models.ClientLog.all().filter( 'action =', 'install_problem').filter('uuid =', uuid).order( '-mtime').fetch(limit) tags = {} tags_list = [] if computer: # Generate tags data. tags_list = models.Tag.GetAllTagNamesForEntity(computer) for tag in tags_list: tags[tag] = True for tag in models.Tag.GetAllTagNames(): if tag not in tags: tags[tag] = False tags = json.dumps(tags, sort_keys=True) admin.AddTimezoneToComputerDatetimes(computer) computer.connection_dates.reverse() computer.connection_datetimes.reverse() try: uuid_lookup_url = settings.UUID_LOOKUP_URL except AttributeError: uuid_lookup_url = None try: owner_lookup_url = settings.OWNER_LOOKUP_URL except __HOLE__: owner_lookup_url = None values = { 'uuid_lookup_url': uuid_lookup_url, 'owner_lookup_url': owner_lookup_url, 'client_site_enabled': settings.CLIENT_SITE_ENABLED, 'computer': computer, 'applesus_installs': applesus_installs, 'installs': installs, 'client_log_files': client_log_files, 'msu_log': msu_log, 'install_problems': install_problems, 'preflight_exits': exits, 'tags': tags, 'tags_list': tags_list, 'host_report': True, 'limit': SINGLE_HOST_DATA_FETCH_LIMIT, 'is_support_user': auth.IsSupportUser(), 'is_security_user': auth.IsSecurityUser(), 'is_physical_security_user': auth.IsPhysicalSecurityUser(), 'self_report': self_report } if popup: self.Render('host_popup.html', values) else: self.Render('host.html', values)
AttributeError
dataset/ETHPy150Open google/simian/src/simian/mac/admin/host.py/Host._DisplayHost
2,735
def write_csv_header(self): if self.is_dict: try: self.writer.writeheader() except __HOLE__: # For Python<2.7 self.writer.writerow(dict(zip( self.writer.fieldnames, self.writer.fieldnames)))
AttributeError
dataset/ETHPy150Open Havate/havate-openstack/proto-build/gui/horizon/Horizon_GUI/openstack_dashboard/usage/base.py/CsvDataMixin.write_csv_header
2,736
def serve_file(request, response, path, type=None, disposition=None, name=None): """Set status, headers, and body in order to serve the given file. The Content-Type header will be set to the type arg, if provided. If not provided, the Content-Type will be guessed by the file extension of the 'path' argument. If disposition is not None, the Content-Disposition header will be set to "<disposition>; filename=<name>". If name is None, it will be set to the basename of path. If disposition is None, no Content-Disposition header will be written. """ if not os.path.isabs(path): raise ValueError("'%s' is not an absolute path." % path) try: st = os.stat(path) except __HOLE__: return notfound(request, response) # Check if path is a directory. if stat.S_ISDIR(st.st_mode): # Let the caller deal with it as they like. return notfound(request, response) # Set the Last-Modified response header, so that # modified-since validation code can work. response.headers['Last-Modified'] = formatdate( st.st_mtime, usegmt=True ) result = validate_since(request, response) if result is not None: return result if type is None: # Set content-type based on filename extension ext = "" i = path.rfind('.') if i != -1: ext = path[i:].lower() type = mimetypes.types_map.get(ext, "text/plain") response.headers['Content-Type'] = type if disposition is not None: if name is None: name = os.path.basename(path) cd = '%s; filename="%s"' % (disposition, name) response.headers["Content-Disposition"] = cd # Set Content-Length and use an iterable (file object) # this way CP won't load the whole file in memory c_len = st.st_size bodyfile = open(path, 'rb') # HTTP/1.0 didn't have Range/Accept-Ranges headers, or the 206 code if request.protocol >= (1, 1): response.headers["Accept-Ranges"] = "bytes" r = get_ranges(request.headers.get('Range'), c_len) if r == []: response.headers['Content-Range'] = "bytes */%s" % c_len return httperror(request, response, 416) if r: if len(r) == 1: # Return a single-part response. start, stop = r[0] r_len = stop - start response.status = 206 response.headers['Content-Range'] = ( "bytes %s-%s/%s" % (start, stop - 1, c_len) ) response.headers['Content-Length'] = r_len bodyfile.seek(start) response.body = bodyfile.read(r_len) else: # Return a multipart/byteranges response. response.status = 206 boundary = _make_boundary() ct = "multipart/byteranges; boundary=%s" % boundary response.headers['Content-Type'] = ct if "Content-Length" in response.headers: # Delete Content-Length header so finalize() recalcs it. del response.headers["Content-Length"] def file_ranges(): # Apache compatibility: yield "\r\n" for start, stop in r: yield "--" + boundary yield "\r\nContent-type: %s" % type yield ("\r\nContent-range: bytes %s-%s/%s\r\n\r\n" % (start, stop - 1, c_len)) bodyfile.seek(start) yield bodyfile.read(stop - start) yield "\r\n" # Final boundary yield "--" + boundary + "--" # Apache compatibility: yield "\r\n" response.body = file_ranges() else: response.headers['Content-Length'] = c_len response.body = bodyfile else: response.headers['Content-Length'] = c_len response.body = bodyfile return response
OSError
dataset/ETHPy150Open circuits/circuits/circuits/web/tools.py/serve_file
2,737
def check_auth(request, response, realm, users, encrypt=None): """Check Authentication If an Authorization header contains credentials, return True, else False. :param realm: The authentication realm. :type realm: str :param users: A dict of the form: {username: password} or a callable returning a dict. :type users: dict or callable :param encrypt: Callable used to encrypt the password returned from the user-agent. if None it defaults to a md5 encryption. :type encrypt: callable """ if "Authorization" in request.headers: # make sure the provided credentials are correctly set ah = _httpauth.parseAuthorization(request.headers.get("Authorization")) if ah is None: return httperror(request, response, 400) if not encrypt: encrypt = _httpauth.DIGEST_AUTH_ENCODERS[_httpauth.MD5] if isinstance(users, collections.Callable): try: # backward compatibility users = users() # expect it to return a dictionary if not isinstance(users, dict): raise ValueError("Authentication users must be a dict") # fetch the user password password = users.get(ah["username"], None) except __HOLE__: # returns a password (encrypted or clear text) password = users(ah["username"]) else: if not isinstance(users, dict): raise ValueError("Authentication users must be a dict") # fetch the user password password = users.get(ah["username"], None) # validate the Authorization by re-computing it here # and compare it with what the user-agent provided if _httpauth.checkResponse(ah, password, method=request.method, encrypt=encrypt, realm=realm): request.login = ah["username"] return True request.login = False return False
TypeError
dataset/ETHPy150Open circuits/circuits/circuits/web/tools.py/check_auth
2,738
def run_python_module(modulename, args): """Run a python module, as though with ``python -m name args...``. `modulename` is the name of the module, possibly a dot-separated name. `args` is the argument array to present as sys.argv, including the first element naming the module being executed. """ openfile = None glo, loc = globals(), locals() try: try: # Search for the module - inside its parent package, if any - using # standard import mechanics. if '.' in modulename: packagename, name = rsplit1(modulename, '.') package = __import__(packagename, glo, loc, ['__path__']) searchpath = package.__path__ else: packagename, name = None, modulename searchpath = None # "top-level search" in imp.find_module() openfile, pathname, _ = imp.find_module(name, searchpath) # Complain if this is a magic non-file module. if openfile is None and pathname is None: raise NoSource( "module does not live in a file: %r" % modulename ) # If `modulename` is actually a package, not a mere module, then we # pretend to be Python 2.7 and try running its __main__.py script. if openfile is None: packagename = modulename name = '__main__' package = __import__(packagename, glo, loc, ['__path__']) searchpath = package.__path__ openfile, pathname, _ = imp.find_module(name, searchpath) except __HOLE__: _, err, _ = sys.exc_info() raise NoSource(str(err)) finally: if openfile: openfile.close() # Finally, hand the file off to run_python_file for execution. args[0] = pathname run_python_file(pathname, args, package=packagename)
ImportError
dataset/ETHPy150Open nedbat/byterun/byterun/execfile.py/run_python_module
2,739
def run_python_file(filename, args, package=None): """Run a python file as if it were the main program on the command line. `filename` is the path to the file to execute, it need not be a .py file. `args` is the argument array to present as sys.argv, including the first element naming the file being executed. `package` is the name of the enclosing package, if any. """ # Create a module to serve as __main__ old_main_mod = sys.modules['__main__'] main_mod = imp.new_module('__main__') sys.modules['__main__'] = main_mod main_mod.__file__ = filename if package: main_mod.__package__ = package main_mod.__builtins__ = BUILTINS # Set sys.argv and the first path element properly. old_argv = sys.argv old_path0 = sys.path[0] sys.argv = args if package: sys.path[0] = '' else: sys.path[0] = os.path.abspath(os.path.dirname(filename)) try: # Open the source file. try: source_file = open_source(filename) except __HOLE__: raise NoSource("No file to run: %r" % filename) try: source = source_file.read() finally: source_file.close() # We have the source. `compile` still needs the last line to be clean, # so make sure it is, then compile a code object from it. if not source or source[-1] != '\n': source += '\n' code = compile(source, filename, "exec") # Execute the source file. exec_code_object(code, main_mod.__dict__) finally: # Restore the old __main__ sys.modules['__main__'] = old_main_mod # Restore the old argv and path sys.argv = old_argv sys.path[0] = old_path0
IOError
dataset/ETHPy150Open nedbat/byterun/byterun/execfile.py/run_python_file
2,740
def view_task(request, cog_atlas_id=None): '''view_task returns a view to see a group of images associated with a particular cognitive atlas task. :param cog_atlas_id: statmaps.models.CognitiveAtlasTask the id for the task defined in the Cognitive Atlas ''' from cogat_functions import get_task_graph # Get the cognitive atlas id if not cog_atlas_id: return search(request, error_message="Please search for a Cognitive Atlas task to see the task view.") try: task = CognitiveAtlasTask.objects.get(cog_atlas_id=cog_atlas_id) except __HOLE__: return search(request, error_message="Invalid search for Cognitive Atlas.") if task: images = StatisticMap.objects.filter(cognitive_paradigm_cogatlas=cog_atlas_id, collection__private=False).order_by("pk") if len(images) > 0: first_image = images[0] graph = get_task_graph(cog_atlas_id, images=images) # Which images aren't tagged with contrasts? not_tagged = images.filter(cognitive_contrast_cogatlas__isnull=True) context = {'task': task, 'first_image': first_image, 'cognitive_atlas_tree': graph, 'tree_divid': "tree", # div id in template to append tree svg to 'images_without_contrasts': not_tagged} return render(request, 'cogatlas/cognitive_atlas_task.html', context) # If task does not have images context = {"no_task_images": True, # robots won't index page if defined "task": task } return render(request, 'cogatlas/cognitive_atlas_task.html', context)
ObjectDoesNotExist
dataset/ETHPy150Open NeuroVault/NeuroVault/neurovault/apps/statmaps/views.py/view_task
2,741
def serve_nidm(request, collection_cid, nidmdir, sep, path): collection = get_collection(collection_cid, request, mode='file') basepath = os.path.join(settings.PRIVATE_MEDIA_ROOT, 'images') fpath = path if sep is '/' else ''.join([nidmdir, sep, path]) try: nidmr = collection.basecollectionitem_set.instance_of(NIDMResults).get(name=nidmdir) except __HOLE__: return HttpResponseForbidden() if path in ['zip', 'ttl', 'provn']: fieldf = getattr(nidmr, '{0}_file'.format(path)) fpath = fieldf.path else: zipfile = nidmr.zip_file.path fpathbase = os.path.dirname(zipfile) fpath = ''.join([fpathbase,sep,path]) return sendfile(request, os.path.join(basepath, fpath), encoding="utf-8")
ObjectDoesNotExist
dataset/ETHPy150Open NeuroVault/NeuroVault/neurovault/apps/statmaps/views.py/serve_nidm
2,742
@csrf_exempt def atlas_query_region(request): # this query is significantly faster (from 2-4 seconds to <1 second) if the synonyms don't need to be queried # i was previously in contact with NIF and it seems like it wouldn't be too hard to download all the synonym data search = request.GET.get('region','') atlas = request.GET.get('atlas','').replace('\'', '') collection = name=request.GET.get('collection','') neurovault_root = os.path.dirname(os.path.dirname(os.path.realpath(neurovault.__file__))) try: collection_object = Collection.objects.filter(name=collection)[0] except IndexError: return JSONResponse('error: could not find collection: %s' % collection, status=400) try: atlas_object = Atlas.objects.filter(name=atlas, collection=collection_object)[0] atlas_image = atlas_object.file atlas_xml = atlas_object.label_description_file except IndexError: return JSONResponse('could not find %s' % atlas, status=400) if request.method == 'GET': atlas_xml.open() root = ET.fromstring(atlas_xml.read()) atlas_xml.close() atlasRegions = [x.text.lower() for x in root.find('data').findall('label')] if search in atlasRegions: searchList = [search] else: synonymsDict = {} with open(os.path.join(neurovault_root, 'neurovault/apps/statmaps/NIFgraph.pkl'),'rb') as input: graph = joblib.load(input) for atlasRegion in atlasRegions: synonymsDict[atlasRegion] = getSynonyms(atlasRegion) try: searchList = toAtlas(search, graph, atlasRegions, synonymsDict) except __HOLE__: return JSONResponse('error: region not in atlas or ontology', status=400) if searchList == 'none': return JSONResponse('error: could not map specified region to region in specified atlas', status=400) try: data = {'voxels':getAtlasVoxels(searchList, atlas_image, atlas_xml)} except ValueError: return JSONResponse('error: region not in atlas', status=400) return JSONResponse(data)
ValueError
dataset/ETHPy150Open NeuroVault/NeuroVault/neurovault/apps/statmaps/views.py/atlas_query_region
2,743
@csrf_exempt def atlas_query_voxel(request): X = request.GET.get('x','') Y = request.GET.get('y','') Z = request.GET.get('z','') collection = name = request.GET.get('collection','') atlas = request.GET.get('atlas','').replace('\'', '') try: collection_object = Collection.objects.filter(name=collection)[0] except __HOLE__: return JSONResponse('error: could not find collection: %s' % collection, status=400) try: atlas_object = Atlas.objects.filter(name=atlas, collection=collection_object)[0] atlas_image = atlas_object.file atlas_xml = atlas_object.label_description_file except IndexError: return JSONResponse('error: could not find atlas: %s' % atlas, status=400) try: data = voxelToRegion(X,Y,Z,atlas_image, atlas_xml) except IndexError: return JSONResponse('error: one or more coordinates are out of range', status=400) return JSONResponse(data) # Compare Two Images
IndexError
dataset/ETHPy150Open NeuroVault/NeuroVault/neurovault/apps/statmaps/views.py/atlas_query_voxel
2,744
@staticmethod def set_actor(user, sender, instance, **kwargs): """ Signal receiver with an extra, required 'user' kwarg. This method becomes a real (valid) signal receiver when it is curried with the actor. """ try: app_label, model_name = settings.AUTH_USER_MODEL.split('.') auth_user_model = apps.get_model(app_label, model_name) except __HOLE__: auth_user_model = apps.get_model('auth', 'user') if sender == LogEntry and isinstance(user, auth_user_model) and instance.actor is None: instance.actor = user if hasattr(threadlocal, 'auditlog'): instance.remote_addr = threadlocal.auditlog['remote_addr']
ValueError
dataset/ETHPy150Open jjkester/django-auditlog/src/auditlog/middleware.py/AuditlogMiddleware.set_actor
2,745
def test_field_types(self): # Very likely need to look into adding support for these field types skip_fields = [ getattr(models.fields.related, 'ForeignObject', None), getattr(models.fields, 'GenericIPAddressField', None), getattr(models.fields.proxy, 'OrderWrt', None), getattr(models.fields, 'BinaryField', None), getattr(models.fields, 'FilePathField', None), getattr(models.fields, 'DurationField', None), getattr(models.fields, 'UUIDField', None) ] for key, value in models.__dict__.iteritems(): try: bases = inspect.getmro(value) except __HOLE__: continue if models.fields.Field in bases and value not in skip_fields: assert getattr(fields, key)
AttributeError
dataset/ETHPy150Open praekelt/django-export/export/tests/__init__.py/FieldsTestCase.test_field_types
2,746
def shlex_split(x): """Helper function to split lines into segments. """ # shlex.split raises an exception if there is a syntax error in sh syntax # for example if no closing " is found. This function keeps dropping the # last character of the line until shlex.split does not raise # an exception. It adds end of the line to the result of shlex.split # # Example: # %run "c:/python -> ['%run','"c:/python'] endofline = [] while x != '': try: comps = shlex.split(x) if len(endofline) >= 1: comps.append(''.join(endofline)) return comps except __HOLE__: endofline = [x[-1:]]+endofline x = x[:-1] return [''.join(endofline)]
ValueError
dataset/ETHPy150Open ipython/ipython-py3k/IPython/core/completerlib.py/shlex_split
2,747
def prepare_post(self, data, amount): invoice = "%s" % data.id failct = data.paymentfailures.count() if failct > 0: invoice = "%s_%i" % (invoice, failct) try: cc = data.credit_card balance = trunc_decimal(data.balance, 2) self.packet['VendorTxCode'] = invoice self.packet['Amount'] = balance self.packet['Description'] = 'Online purchase' self.packet['CardType'] = cc.credit_type self.packet['CardHolder'] = cc.card_holder self.packet['CardNumber'] = cc.decryptedCC self.packet['ExpiryDate'] = '%02d%s' % (cc.expire_month, str(cc.expire_year)[2:]) if cc.start_month is not None: self.packet['StartDate'] = '%02d%s' % (cc.start_month, str(cc.start_year)[2:]) if cc.ccv is not None and cc.ccv != "": self.packet['CV2'] = cc.ccv if cc.issue_num is not None and cc.issue_num != "": self.packet['IssueNumber'] = cc.issue_num #'%02d' % int(cc.issue_num) addr = [data.bill_street1, data.bill_street2, data.bill_city, data.bill_state] self.packet['BillingAddress'] = ', '.join(addr) self.packet['BillingPostCode'] = data.bill_postal_code except Exception, e: self.log.error('preparing data, got error: %s\nData: %s', e, data) self.valid = False return # handle pesky unicode chars in names for key, value in self.packet.items(): try: value = value.encode('utf-8') self.packet[key] = value except __HOLE__: pass self.postString = urlencode(self.packet) self.url = self.connection self.valid = True
AttributeError
dataset/ETHPy150Open dokterbob/satchmo/satchmo/apps/payment/modules/sagepay/processor.py/PaymentProcessor.prepare_post
2,748
def capture_payment(self, testing=False, order=None, amount=None): """Execute the post to Sage Pay VSP DIRECT""" if not order: order = self.order if order.paid_in_full: self.log_extra('%s is paid in full, no capture attempted.', order) self.record_payment() return ProcessorResult(self.key, True, _("No charge needed, paid in full.")) self.log_extra('Capturing payment for %s', order) if amount is None: amount = order.balance self.prepare_post(order, amount) if self.valid: if self.settings.SKIP_POST.value: self.log.info("TESTING MODE - Skipping post to server. Would have posted %s?%s", self.url, self.postString) payment = self.record_payment(order=order, amount=amount, transaction_id="TESTING", reason_code='0') return ProcessorResult(self.key, True, _('TESTING MODE'), payment=payment) else: self.log_extra("About to post to server: %s?%s", self.url, self.postString) conn = urllib2.Request(self.url, data=self.postString) try: f = urllib2.urlopen(conn) result = f.read() self.log_extra('Process: url=%s\nPacket=%s\nResult=%s', self.url, self.packet, result) except urllib2.URLError, ue: self.log.error("error opening %s\n%s", self.url, ue) return ProcessorResult(self.key, False, 'Could not talk to Sage Pay gateway') try: self.response = dict([row.split('=', 1) for row in result.splitlines()]) status = self.response['Status'] success = (status == 'OK') detail = self.response['StatusDetail'] except __HOLE__, e: self.log.info('Error submitting payment: %s', e) payment = self.record_failure(order=order, amount=amount, transaction_id="", reason_code="error", details='Invalid response from payment gateway') return ProcessorResult(self.key, False, _('Invalid response from payment gateway')) payment = None transaction_id = "" if success: vpstxid = self.response.get('VPSTxID', '') txauthno = self.response.get('TxAuthNo', '') transaction_id="%s,%s" % (vpstxid, txauthno) self.log.info('Success on order #%i, recording payment', self.order.id) payment = self.record_payment(order=order, amount=amount, transaction_id=transaction_id, reason_code=status) else: payment = self.record_failure(order=order, amount=amount, transaction_id=transaction_id, reason_code=status, details=detail) return ProcessorResult(self.key, success, detail, payment=payment) else: return ProcessorResult(self.key, False, _('Error processing payment.'))
KeyError
dataset/ETHPy150Open dokterbob/satchmo/satchmo/apps/payment/modules/sagepay/processor.py/PaymentProcessor.capture_payment
2,749
def delete_doc(self, doc_id): """ Deletes a document by doc ID. Args: doc_id: A list of document IDs. Raises: search_exceptions.InternalError on internal errors. """ solr_request = {"delete": {"id": doc_id}} solr_url = "http://{0}:{1}/solr/update?commit=true".format(self._search_location, self.SOLR_SERVER_PORT) logging.debug("SOLR URL: {0}".format(solr_url)) json_request = simplejson.dumps(solr_request) logging.debug("SOLR JSON: {0}".format(json_request)) try: req = urllib2.Request(solr_url, data=json_request) req.add_header('Content-Type', 'application/json') conn = urllib2.urlopen(req) if conn.getcode() != HTTP_OK: raise search_exceptions.InternalError("Malformed response from SOLR.") response = simplejson.load(conn) status = response['responseHeader']['status'] logging.debug("Response: {0}".format(response)) except __HOLE__, exception: logging.error("Unable to decode json from SOLR server: {0}".format( exception)) raise search_exceptions.InternalError("Malformed response from SOLR.") if status != 0: raise search_exceptions.InternalError( "SOLR response status of {0}".format(status))
ValueError
dataset/ETHPy150Open AppScale/appscale/SearchService/solr_interface.py/Solr.delete_doc
2,750
def get_index(self, app_id, namespace, name): """ Gets an index from SOLR. Performs a JSON request to the SOLR schema API to get the list of defined fields. Extracts the fields that match the naming convention appid_[namespace]_index_name. Args: app_id: A str, the application identifier. namespace: A str, the application namespace. name: A str, the index name. Raises: search_exceptions.InternalError: Bad response from SOLR server. Returns: An index item. """ index_name = self.__get_index_name(app_id, namespace, name) solr_url = "http://{0}:{1}/solr/schema/fields".format(self._search_location, self.SOLR_SERVER_PORT) logging.debug("URL: {0}".format(solr_url)) try: conn = urllib2.urlopen(solr_url) if conn.getcode() != HTTP_OK: raise search_exceptions.InternalError("Malformed response from SOLR.") response = simplejson.load(conn) logging.debug("Response: {0}".format(response)) except __HOLE__, exception: logging.error("Unable to decode json from SOLR server: {0}".format( exception)) raise search_exceptions.InternalError("Malformed response from SOLR.") # Make sure the response we got from SOLR was with a good status. status = response['responseHeader']['status'] if status != 0: raise search_exceptions.InternalError( "SOLR response status of {0}".format(status)) # Get only fields which match the index name prefix. filtered_fields = [] for field in response['fields']: if field['name'].startswith("{0}_".format(index_name)): filtered_fields.append(field) schema = Schema(filtered_fields, response['responseHeader']) return Index(index_name, schema)
ValueError
dataset/ETHPy150Open AppScale/appscale/SearchService/solr_interface.py/Solr.get_index
2,751
def update_schema(self, updates): """ Updates the schema of a document. Args: updates: A list of updates to apply. Raises: search_exceptions.InternalError on internal errors from SOLR. """ field_list = [] for update in updates: field_list.append({'name': update['name'], 'type': update['type'], 'stored': 'true', 'indexed': 'true', 'multiValued': 'false'}) solr_url = "http://{0}:{1}/solr/schema/fields".format( self._search_location, self.SOLR_SERVER_PORT) json_request = simplejson.dumps(field_list) try: req = urllib2.Request(solr_url, data=json_request) req.add_header('Content-Type', 'application/json') conn = urllib2.urlopen(req) if conn.getcode() != HTTP_OK: raise search_exceptions.InternalError("Malformed response from SOLR.") response = simplejson.load(conn) status = response['responseHeader']['status'] logging.debug("Response: {0}".format(response)) except __HOLE__, exception: logging.error("Unable to decode json from SOLR server: {0}".format( exception)) raise search_exceptions.InternalError("Malformed response from SOLR.") if status != 0: raise search_exceptions.InternalError( "SOLR response status of {0}".format(status))
ValueError
dataset/ETHPy150Open AppScale/appscale/SearchService/solr_interface.py/Solr.update_schema
2,752
def commit_update(self, hash_map): """ Commits field/value changes to SOLR. Args: hash_map: A dictionary to send to SOLR. Raises: search_exceptions.InternalError: On failure. """ docs = [] docs.append(hash_map) json_payload = simplejson.dumps(docs) solr_url = "http://{0}:{1}/solr/update/json?commit=true".format( self._search_location, self.SOLR_SERVER_PORT) try: req = urllib2.Request(solr_url, data=json_payload) req.add_header('Content-Type', 'application/json') conn = urllib2.urlopen(req) if conn.getcode() != HTTP_OK: logging.error("Got code {0} with URL {1} and payload {2}".format( conn.getcode(), solr_url, json_payload)) raise search_exceptions.InternalError("Bad request sent to SOLR.") response = simplejson.load(conn) status = response['responseHeader']['status'] logging.debug("Response: {0}".format(response)) except __HOLE__, exception: logging.error("Unable to decode json from SOLR server: {0}".format( exception)) raise search_exceptions.InternalError("Malformed response from SOLR.") if status != 0: raise search_exceptions.InternalError( "SOLR response status of {0}".format(status))
ValueError
dataset/ETHPy150Open AppScale/appscale/SearchService/solr_interface.py/Solr.commit_update
2,753
def __execute_query(self, solr_query): """ Executes query string on SOLR. Args: solr_query: A str, the query to run. Returns: The results from the query executing. Raises: search_exceptions.InternalError on internal SOLR error. """ solr_url = "http://{0}:{1}/solr/select/?wt=json&{2}"\ .format(self._search_location, self.SOLR_SERVER_PORT, solr_query) logging.debug("SOLR URL: {0}".format(solr_url)) try: req = urllib2.Request(solr_url) req.add_header('Content-Type', 'application/json') conn = urllib2.urlopen(req) if conn.getcode() != HTTP_OK: logging.error("Got code {0} with URL {1}.".format( conn.getcode(), solr_url)) raise search_exceptions.InternalError("Bad request sent to SOLR.") response = simplejson.load(conn) status = response['responseHeader']['status'] logging.debug("Response: {0}".format(response)) except __HOLE__, exception: logging.error("Unable to decode json from SOLR server: {0}".format( exception)) raise search_exceptions.InternalError("Malformed response from SOLR.") except urllib2.HTTPError, http_error: logging.exception(http_error) # We assume no results were returned. status = 0 response = {'response': {'docs': [], 'start': 0}} if status != 0: raise search_exceptions.InternalError( "SOLR response status of {0}".format(status)) return response
ValueError
dataset/ETHPy150Open AppScale/appscale/SearchService/solr_interface.py/Solr.__execute_query
2,754
def to_representation(self, obj): if obj is None: return \ super(SubmissionStatsInstanceSerializer, self).to_representation(obj) request = self.context.get('request') field = request.query_params.get('group') name = request.query_params.get('name', field) if field is None: raise exceptions.ParseError(_(u"Expecting `group` and `name`" u" query parameters.")) try: data = get_form_submissions_grouped_by_field( obj, field, name) except __HOLE__ as e: raise exceptions.ParseError(detail=e.message) else: if data: dd = obj.data_dictionary() element = dd.get_survey_element(field) if element and element.type in SELECT_FIELDS: for record in data: label = dd.get_choice_label(element, record[name]) record[name] = label return data
ValueError
dataset/ETHPy150Open kobotoolbox/kobocat/onadata/libs/serializers/stats_serializer.py/SubmissionStatsInstanceSerializer.to_representation
2,755
def to_representation(self, obj): if obj is None: return super(StatsInstanceSerializer, self).to_representation(obj) request = self.context.get('request') method = request.query_params.get('method', None) field = request.query_params.get('field', None) if field and field not in obj.data_dictionary().get_keys(): raise exceptions.ParseError(detail=_("Field not in XForm.")) stats_function = STATS_FUNCTIONS.get(method and method.lower(), get_all_stats) try: data = stats_function(obj, field) except __HOLE__ as e: raise exceptions.ParseError(detail=e.message) return data
ValueError
dataset/ETHPy150Open kobotoolbox/kobocat/onadata/libs/serializers/stats_serializer.py/StatsInstanceSerializer.to_representation
2,756
@webapp.route('/log') def view_log(): filename = request.args['filename'] seek_tail = request.args.get('seek_tail', '1') != '0' session_key = session.get('client_id') try: content = current_service.read_log(filename, session_key=session_key, seek_tail=seek_tail) except __HOLE__: error_msg = 'File not found. Only files passed through args are allowed.' if request.is_xhr: return error_msg return render_template('error.html', error=error_msg), 404 if request.is_xhr: return content return render_template('log.html', content=content, filename=filename)
KeyError
dataset/ETHPy150Open Jahaja/psdash/psdash/web.py/view_log
2,757
@webapp.route('/log/search') def search_log(): filename = request.args['filename'] query_text = request.args['text'] session_key = session.get('client_id') try: data = current_service.search_log(filename, query_text, session_key=session_key) return jsonify(data) except __HOLE__: return 'Could not find log file with given filename', 404
KeyError
dataset/ETHPy150Open Jahaja/psdash/psdash/web.py/search_log
2,758
def __init__(self, backend, cipher, mode, operation): self._backend = backend self._cipher = cipher self._mode = mode self._operation = operation # There is a bug in CommonCrypto where block ciphers do not raise # kCCAlignmentError when finalizing if you supply non-block aligned # data. To work around this we need to keep track of the block # alignment ourselves, but only for alg+mode combos that require # block alignment. OFB, CFB, and CTR make a block cipher algorithm # into a stream cipher so we don't need to track them (and thus their # block size is effectively 1 byte just like OpenSSL/CommonCrypto # treat RC4 and other stream cipher block sizes). # This bug has been filed as rdar://15589470 self._bytes_processed = 0 if (isinstance(cipher, ciphers.BlockCipherAlgorithm) and not isinstance(mode, (OFB, CFB, CFB8, CTR))): self._byte_block_size = cipher.block_size // 8 else: self._byte_block_size = 1 registry = self._backend._cipher_registry try: cipher_enum, mode_enum = registry[type(cipher), type(mode)] except __HOLE__: raise UnsupportedAlgorithm( "cipher {0} in {1} mode is not supported " "by this backend.".format( cipher.name, mode.name if mode else mode), _Reasons.UNSUPPORTED_CIPHER ) ctx = self._backend._ffi.new("CCCryptorRef *") ctx = self._backend._ffi.gc(ctx, self._backend._release_cipher_ctx) if isinstance(mode, modes.ModeWithInitializationVector): iv_nonce = mode.initialization_vector elif isinstance(mode, modes.ModeWithNonce): iv_nonce = mode.nonce else: iv_nonce = self._backend._ffi.NULL if isinstance(mode, CTR): mode_option = self._backend._lib.kCCModeOptionCTR_BE else: mode_option = 0 res = self._backend._lib.CCCryptorCreateWithMode( operation, mode_enum, cipher_enum, self._backend._lib.ccNoPadding, iv_nonce, cipher.key, len(cipher.key), self._backend._ffi.NULL, 0, 0, mode_option, ctx) self._backend._check_cipher_response(res) self._ctx = ctx
KeyError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/cryptography-1.3.1/src/cryptography/hazmat/backends/commoncrypto/ciphers.py/_CipherContext.__init__
2,759
def __init__(self, backend, cipher, mode, operation): self._backend = backend self._cipher = cipher self._mode = mode self._operation = operation self._tag = None registry = self._backend._cipher_registry try: cipher_enum, mode_enum = registry[type(cipher), type(mode)] except __HOLE__: raise UnsupportedAlgorithm( "cipher {0} in {1} mode is not supported " "by this backend.".format( cipher.name, mode.name if mode else mode), _Reasons.UNSUPPORTED_CIPHER ) ctx = self._backend._ffi.new("CCCryptorRef *") ctx = self._backend._ffi.gc(ctx, self._backend._release_cipher_ctx) self._ctx = ctx res = self._backend._lib.CCCryptorCreateWithMode( operation, mode_enum, cipher_enum, self._backend._lib.ccNoPadding, self._backend._ffi.NULL, cipher.key, len(cipher.key), self._backend._ffi.NULL, 0, 0, 0, self._ctx) self._backend._check_cipher_response(res) res = self._backend._lib.CCCryptorGCMAddIV( self._ctx[0], mode.initialization_vector, len(mode.initialization_vector) ) self._backend._check_cipher_response(res) # CommonCrypto has a bug where calling update without at least one # call to authenticate_additional_data will result in null byte output # for ciphertext. The following empty byte string call prevents the # issue, which is present in at least 10.8 and 10.9. # Filed as rdar://18314544 self.authenticate_additional_data(b"")
KeyError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/cryptography-1.3.1/src/cryptography/hazmat/backends/commoncrypto/ciphers.py/_GCMCipherContext.__init__
2,760
def dump_ifd(self, ifd, ifd_name, tag_dict=EXIF_TAGS, relative=0, stop_tag=DEFAULT_STOP_TAG): """ Return a list of entries in the given IFD. """ # make sure we can process the entries try: entries = self.s2n(ifd, 2) except TypeError: logger.warning("Possibly corrupted IFD: %s" % ifd) return for i in range(entries): # entry is index of start of this IFD in the file entry = ifd + 2 + 12 * i tag = self.s2n(entry, 2) # get tag name early to avoid errors, help debug tag_entry = tag_dict.get(tag) if tag_entry: tag_name = tag_entry[0] else: tag_name = 'Tag 0x%04X' % tag # ignore certain tags for faster processing if not (not self.detailed and tag in IGNORE_TAGS): field_type = self.s2n(entry + 2, 2) # unknown field type if not 0 < field_type < len(FIELD_TYPES): if not self.strict: continue else: raise ValueError('Unknown type %d in tag 0x%04X' % (field_type, tag)) type_length = FIELD_TYPES[field_type][0] count = self.s2n(entry + 4, 4) # Adjust for tag id/type/count (2+2+4 bytes) # Now we point at either the data or the 2nd level offset offset = entry + 8 # If the value fits in 4 bytes, it is inlined, else we # need to jump ahead again. if count * type_length > 4: # offset is not the value; it's a pointer to the value # if relative we set things up so s2n will seek to the right # place when it adds self.offset. Note that this 'relative' # is for the Nikon type 3 makernote. Other cameras may use # other relative offsets, which would have to be computed here # slightly differently. if relative: tmp_offset = self.s2n(offset, 4) offset = tmp_offset + ifd - 8 if self.fake_exif: offset += 18 else: offset = self.s2n(offset, 4) field_offset = offset values = None if field_type == 2: # special case: null-terminated ASCII string # XXX investigate # sometimes gets too big to fit in int value if count != 0: # and count < (2**31): # 2E31 is hardware dependant. --gd file_position = self.offset + offset try: self.file.seek(file_position) values = self.file.read(count) #print(values) # Drop any garbage after a null. values = values.split(b'\x00', 1)[0] if isinstance(values, bytes): try: values = values.decode("utf-8") except UnicodeDecodeError: logger.warning("Possibly corrupted field %s in %s IFD", tag_name, ifd_name) except OverflowError: logger.warn('OverflowError at position: %s, length: %s', file_position, count) values = '' except MemoryError: logger.warn('MemoryError at position: %s, length: %s', file_position, count) values = '' else: values = [] signed = (field_type in [6, 8, 9, 10]) # XXX investigate # some entries get too big to handle could be malformed # file or problem with self.s2n if count < 1000: for dummy in range(count): if field_type in (5, 10): # a ratio value = Ratio(self.s2n(offset, 4, signed), self.s2n(offset + 4, 4, signed)) else: value = self.s2n(offset, type_length, signed) values.append(value) offset = offset + type_length # The test above causes problems with tags that are # supposed to have long values! Fix up one important case. elif tag_name in ('MakerNote', makernote.canon.CAMERA_INFO_TAG_NAME): for dummy in range(count): value = self.s2n(offset, type_length, signed) values.append(value) offset = offset + type_length # now 'values' is either a string or an array if count == 1 and field_type != 2: printable = str(values[0]) elif count > 50 and len(values) > 20 and not isinstance(values, basestring) : printable = str(values[0:20])[0:-1] + ", ... ]" else: try: printable = str(values) except UnicodeEncodeError: printable = unicode(values) # compute printable version of values if tag_entry: # optional 2nd tag element is present if len(tag_entry) != 1: if callable(tag_entry[1]): # call mapping function printable = tag_entry[1](values) elif type(tag_entry[1]) is tuple: ifd_info = tag_entry[1] try: logger.debug('%s SubIFD at offset %d:', ifd_info[0], values[0]) self.dump_ifd(values[0], ifd_info[0], tag_dict=ifd_info[1], stop_tag=stop_tag) except __HOLE__: logger.warn('No values found for %s SubIFD', ifd_info[0]) else: printable = '' for i in values: # use lookup table for this tag printable += tag_entry[1].get(i, repr(i)) self.tags[ifd_name + ' ' + tag_name] = IfdTag(printable, tag, field_type, values, field_offset, count * type_length) try: tag_value = repr(self.tags[ifd_name + ' ' + tag_name]) # fix for python2's handling of unicode values except UnicodeEncodeError: tag_value = unicode(self.tags[ifd_name + ' ' + tag_name]) logger.debug(' %s: %s', tag_name, tag_value) if tag_name == stop_tag: break
IndexError
dataset/ETHPy150Open ianare/exif-py/exifread/classes.py/ExifHeader.dump_ifd
2,761
def _canon_decode_tag(self, value, mn_tags): """ Decode Canon MakerNote tag based on offset within tag. See http://www.burren.cx/david/canon.html by David Burren """ for i in range(1, len(value)): tag = mn_tags.get(i, ('Unknown', )) name = tag[0] if len(tag) > 1: val = tag[1].get(value[i], 'Unknown') else: val = value[i] try: logger.debug(" %s %s %s", i, name, hex(value[i])) except __HOLE__: logger.debug(" %s %s %s", i, name, value[i]) # it's not a real IFD Tag but we fake one to make everybody # happy. this will have a "proprietary" type self.tags['MakerNote ' + name] = IfdTag(str(val), None, 0, None, None, None)
TypeError
dataset/ETHPy150Open ianare/exif-py/exifread/classes.py/ExifHeader._canon_decode_tag
2,762
def _set_pages(self, pages): """ The pages setter. """ # Remove pages from the old list that appear in the new list. The old # list will now contain pages that are no longer in the wizard. old_pages = self.pages new_pages = [] for page in pages: try: old_pages.remove(page) except __HOLE__: new_pages.append(page) # Dispose of the old pages. for page in old_pages: page.dispose_page() # If we have created the control then we need to add the new pages, # otherwise we leave it until the control is created. if self.control: for page in new_pages: self.control.addWizardPage(page) self.controller.pages = pages
ValueError
dataset/ETHPy150Open enthought/pyface/pyface/ui/qt4/wizard/wizard.py/Wizard._set_pages
2,763
def get_streaming_event(): try: now = datetime.datetime.now() streaming_event = Event.objects.filter(public=True, start_date__lte=now).order_by('-start_date')[0] try: next_event = streaming_event.get_next() except Event.DoesNotExist: streaming_event = next_event else: td = next_event.start_date - now if td.days == 0 and 0 < td.seconds < 1800: # if there is 30min to next event, take that one streaming_event = next_event # TODO: if previous event should have ended more than 3 hours ago, don't display the stream except __HOLE__: return return streaming_event
IndexError
dataset/ETHPy150Open kiberpipa/Intranet/pipa/video/utils.py/get_streaming_event
2,764
def get_next_streaming_event(): now = datetime.datetime.now() q = Event.objects.filter(public=True, require_video=True, start_date__gte=now) try: return q.order_by('-start_date')[0] except __HOLE__: return
IndexError
dataset/ETHPy150Open kiberpipa/Intranet/pipa/video/utils.py/get_next_streaming_event
2,765
def AtomicWrite(filename, contents, mode=0666, gid=None): """Create a file 'filename' with 'contents' atomically. As in Write, 'mode' is modified by the umask. This creates and moves a temporary file, and errors doing the above will be propagated normally, though it will try to clean up the temporary file in that case. This is very similar to the prodlib function with the same name. An optional gid can be specified. Args: filename: str; the name of the file contents: str; the data to write to the file mode: int; permissions with which to create the file (default is 0666 octal) gid: int; group id with which to create the file """ fd, tmp_filename = tempfile.mkstemp(dir=os.path.dirname(filename)) try: os.write(fd, contents) finally: os.close(fd) try: os.chmod(tmp_filename, mode) if gid is not None: os.chown(tmp_filename, -1, gid) os.rename(tmp_filename, filename) except OSError, exc: try: os.remove(tmp_filename) except __HOLE__, e: exc = OSError('%s. Additional errors cleaning up: %s' % (exc, e)) raise exc
OSError
dataset/ETHPy150Open google/google-apputils/google/apputils/file_util.py/AtomicWrite
2,766
@contextlib.contextmanager def TemporaryDirectory(suffix='', prefix='tmp', base_path=None): """A context manager to create a temporary directory and clean up on exit. The parameters are the same ones expected by tempfile.mkdtemp. The directory will be securely and atomically created. Everything under it will be removed when exiting the context. Args: suffix: optional suffix. prefix: options prefix. base_path: the base path under which to create the temporary directory. Yields: The absolute path of the new temporary directory. """ temp_dir_path = tempfile.mkdtemp(suffix, prefix, base_path) try: yield temp_dir_path finally: try: shutil.rmtree(temp_dir_path) except __HOLE__, e: if e.message == 'Cannot call rmtree on a symbolic link': # Interesting synthetic exception made up by shutil.rmtree. # Means we received a symlink from mkdtemp. # Also means must clean up the symlink instead. os.unlink(temp_dir_path) else: raise
OSError
dataset/ETHPy150Open google/google-apputils/google/apputils/file_util.py/TemporaryDirectory
2,767
def MkDirs(directory, force_mode=None): """Makes a directory including its parent directories. This function is equivalent to os.makedirs() but it avoids a race condition that os.makedirs() has. The race is between os.mkdir() and os.path.exists() which fail with errors when run in parallel. Args: directory: str; the directory to make force_mode: optional octal, chmod dir to get rid of umask interaction Raises: Whatever os.mkdir() raises when it fails for any reason EXCLUDING "dir already exists". If a directory already exists, it does not raise anything. This behaviour is different than os.makedirs() """ name = os.path.normpath(directory) dirs = name.split(os.path.sep) for i in range(0, len(dirs)): path = os.path.sep.join(dirs[:i+1]) try: if path: os.mkdir(path) # only chmod if we created if force_mode is not None: os.chmod(path, force_mode) except __HOLE__, exc: if not (exc.errno == errno.EEXIST and os.path.isdir(path)): raise
OSError
dataset/ETHPy150Open google/google-apputils/google/apputils/file_util.py/MkDirs
2,768
def RmDirs(dir_name): """Removes dir_name and every subsequently empty directory above it. Unlike os.removedirs and shutil.rmtree, this function doesn't raise an error if the directory does not exist. Args: dir_name: Directory to be removed. """ try: shutil.rmtree(dir_name) except OSError, err: if err.errno != errno.ENOENT: raise try: parent_directory = os.path.dirname(dir_name) while parent_directory: try: os.rmdir(parent_directory) except __HOLE__, err: if err.errno != errno.ENOENT: raise parent_directory = os.path.dirname(parent_directory) except OSError, err: if err.errno not in (errno.EACCES, errno.ENOTEMPTY, errno.EPERM): raise
OSError
dataset/ETHPy150Open google/google-apputils/google/apputils/file_util.py/RmDirs
2,769
def end(self, c): tdata = c.tableData data = tdata.get_data() # Add missing columns so that each row has the same count of columns # This prevents errors in Reportlab table try: maxcols = max([len(row) for row in data] or [0]) except __HOLE__: log.warn(c.warning("<table> rows seem to be inconsistent")) maxcols = [0] for i, row in enumerate(data): data[i] += [''] * (maxcols - len(row)) filter_len = filter(lambda col: col is None, tdata.colw) try: filter_len = len(filter_len) except Exception: filter_len = sum(1 for _ in filter_len) cols_with_no_width = filter_len if cols_with_no_width: # any col width not defined bad_cols = filter(lambda tup: tup[1] is None, enumerate(tdata.colw)) fair_division = str(100 / float(cols_with_no_width)) + '%' # get fair % for i, _ in bad_cols: tdata.colw[i] = fair_division # fix empty with fair % try: if tdata.data: # log.debug("Table styles %r", tdata.styles) t = PmlTable( data, colWidths=tdata.colw, rowHeights=tdata.rowh, # totalWidth = tdata.width, splitByRow=1, # repeatCols = 1, repeatRows=tdata.repeat, hAlign=tdata.align, vAlign='TOP', style=TableStyle(tdata.styles)) t.totalWidth = _width(tdata.width) t.spaceBefore = c.frag.spaceBefore t.spaceAfter = c.frag.spaceAfter # XXX Maybe we need to copy some more properties? t.keepWithNext = c.frag.keepWithNext # t.hAlign = tdata.align c.addStory(t) else: log.warn(c.warning("<table> is empty")) except: log.warn(c.warning("<table>"), exc_info=1) # Cleanup and re-swap table data c.clearFrag() c.tableData, self.tableData = self.tableData, None
ValueError
dataset/ETHPy150Open xhtml2pdf/xhtml2pdf/xhtml2pdf/tables.py/pisaTagTABLE.end
2,770
def insertBefore(self, newChild, refChild): """ Inserts the node newChild before the existing child node refChild. If refChild is null, insert newChild at the end of the list of children. """ if newChild.nodeType not in self._child_node_types: raise IllegalChild, "%s cannot be child of %s" % (newChild.tagName, self.tagName) if newChild.parentNode is not None: newChild.parentNode.removeChild(newChild) if refChild is None: self.appendChild(newChild) else: try: index = self.childNodes.index(refChild) except __HOLE__: raise xml.dom.NotFoundErr() self.childNodes.insert(index, newChild) newChild.nextSibling = refChild refChild.previousSibling = newChild if index: node = self.childNodes[index-1] node.nextSibling = newChild newChild.previousSibling = node else: newChild.previousSibling = None newChild.parentNode = self return newChild
ValueError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/tablib-0.10.0/tablib/packages/odf/element.py/Node.insertBefore
2,771
def removeChild(self, oldChild): """ Removes the child node indicated by oldChild from the list of children, and returns it. """ #FIXME: update ownerDocument.element_dict or find other solution try: self.childNodes.remove(oldChild) except __HOLE__: raise xml.dom.NotFoundErr() if oldChild.nextSibling is not None: oldChild.nextSibling.previousSibling = oldChild.previousSibling if oldChild.previousSibling is not None: oldChild.previousSibling.nextSibling = oldChild.nextSibling oldChild.nextSibling = oldChild.previousSibling = None if self.ownerDocument: self.ownerDocument.clear_caches() oldChild.parentNode = None return oldChild
ValueError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/tablib-0.10.0/tablib/packages/odf/element.py/Node.removeChild
2,772
def on_dccchat(self, c, e): if len(e.arguments) != 2: return args = e.arguments[1].split() if len(args) == 4: try: address = ip_numstr_to_quad(args[2]) port = int(args[3]) except __HOLE__: return self.dcc_connect(address, port)
ValueError
dataset/ETHPy150Open zulip/zulip/bots/irc-mirror.py/IRCBot.on_dccchat
2,773
def update_wrapper(wrapper, wrapped, *a, **ka): try: functools.update_wrapper(wrapper, wrapped, *a, **ka) except __HOLE__: pass # These helpers are used at module level and need to be defined first. # And yes, I know PEP-8, but sometimes a lower-case classname makes more sense.
AttributeError
dataset/ETHPy150Open Arelle/Arelle/arelle/webserver/bottle.py/update_wrapper
2,774
def add(self, rule, method, target, name=None): """ Add a new rule or replace the target for an existing rule. """ anons = 0 # Number of anonymous wildcards found keys = [] # Names of keys pattern = '' # Regular expression pattern with named groups filters = [] # Lists of wildcard input filters builder = [] # Data structure for the URL builder is_static = True for key, mode, conf in self._itertokens(rule): if mode: is_static = False if mode == 'default': mode = self.default_filter mask, in_filter, out_filter = self.filters[mode](conf) if not key: pattern += '(?:%s)' % mask key = 'anon%d' % anons anons += 1 else: pattern += '(?P<%s>%s)' % (key, mask) keys.append(key) if in_filter: filters.append((key, in_filter)) builder.append((key, out_filter or str)) elif key: pattern += re.escape(key) builder.append((None, key)) self.builder[rule] = builder if name: self.builder[name] = builder if is_static and not self.strict_order: self.static.setdefault(method, {}) self.static[method][self.build(rule)] = (target, None) return try: re_pattern = re.compile('^(%s)$' % pattern) re_match = re_pattern.match except re.error: raise RouteSyntaxError("Could not add Route: %s (%s)" % (rule, _e())) if filters: def getargs(path): url_args = re_match(path).groupdict() for name, wildcard_filter in filters: try: url_args[name] = wildcard_filter(url_args[name]) except __HOLE__: raise HTTPError(400, 'Path has wrong format.') return url_args elif re_pattern.groupindex: def getargs(path): return re_match(path).groupdict() else: getargs = None flatpat = _re_flatten(pattern) whole_rule = (rule, flatpat, target, getargs) if (flatpat, method) in self._groups: if DEBUG: msg = 'Route <%s %s> overwrites a previously defined route' warnings.warn(msg % (method, rule), RuntimeWarning) self.dyna_routes[method][ self._groups[flatpat, method]] = whole_rule else: self.dyna_routes.setdefault(method, []).append(whole_rule) self._groups[flatpat, method] = len(self.dyna_routes[method]) - 1 self._compile(method)
ValueError
dataset/ETHPy150Open Arelle/Arelle/arelle/webserver/bottle.py/Router.add
2,775
def build(self, _name, *anons, **query): """ Build an URL by filling the wildcards in a rule. """ builder = self.builder.get(_name) if not builder: raise RouteBuildError("No route with that name.", _name) try: for i, value in enumerate(anons): query['anon%d' % i] = value url = ''.join([f(query.pop(n)) if n else f for (n, f) in builder]) return url if not query else url + '?' + urlencode(query) except __HOLE__: raise RouteBuildError('Missing URL argument: %r' % _e().args[0])
KeyError
dataset/ETHPy150Open Arelle/Arelle/arelle/webserver/bottle.py/Router.build
2,776
def _handle(self, environ): path = environ['bottle.raw_path'] = environ['PATH_INFO'] if py3k: try: environ['PATH_INFO'] = path.encode('latin1').decode('utf8') except UnicodeError: return HTTPError(400, 'Invalid path string. Expected UTF-8') try: environ['bottle.app'] = self request.bind(environ) response.bind() try: self.trigger_hook('before_request') route, args = self.router.match(environ) environ['route.handle'] = route environ['bottle.route'] = route environ['route.url_args'] = args return route.call(**args) finally: self.trigger_hook('after_request') except HTTPResponse: return _e() except RouteReset: route.reset() return self._handle(environ) except (KeyboardInterrupt, __HOLE__, MemoryError): raise except Exception: if not self.catchall: raise stacktrace = format_exc() environ['wsgi.errors'].write(stacktrace) return HTTPError(500, "Internal Server Error", _e(), stacktrace)
SystemExit
dataset/ETHPy150Open Arelle/Arelle/arelle/webserver/bottle.py/Bottle._handle
2,777
def _cast(self, out, peek=None): """ Try to convert the parameter into something WSGI compatible and set correct HTTP headers when possible. Support: False, str, unicode, dict, HTTPResponse, HTTPError, file-like, iterable of strings and iterable of unicodes """ # Empty output is done here if not out: if 'Content-Length' not in response: response['Content-Length'] = 0 return [] # Join lists of byte or unicode strings. Mixed lists are NOT supported if isinstance(out, (tuple, list))\ and isinstance(out[0], (bytes, unicode)): out = out[0][0:0].join(out) # b'abc'[0:0] -> b'' # Encode unicode strings if isinstance(out, unicode): out = out.encode(response.charset) # Byte Strings are just returned if isinstance(out, bytes): if 'Content-Length' not in response: response['Content-Length'] = len(out) return [out] # HTTPError or HTTPException (recursive, because they may wrap anything) # TODO: Handle these explicitly in handle() or make them iterable. if isinstance(out, HTTPError): out.apply(response) out = self.error_handler.get(out.status_code, self.default_error_handler)(out) return self._cast(out) if isinstance(out, HTTPResponse): out.apply(response) return self._cast(out.body) # File-like objects. if hasattr(out, 'read'): if 'wsgi.file_wrapper' in request.environ: return request.environ['wsgi.file_wrapper'](out) elif hasattr(out, 'close') or not hasattr(out, '__iter__'): return WSGIFileWrapper(out) # Handle Iterables. We peek into them to detect their inner type. try: iout = iter(out) first = next(iout) while not first: first = next(iout) except StopIteration: return self._cast('') except HTTPResponse: first = _e() except (KeyboardInterrupt, __HOLE__, MemoryError): raise except: if not self.catchall: raise first = HTTPError(500, 'Unhandled exception', _e(), format_exc()) # These are the inner types allowed in iterator or generator objects. if isinstance(first, HTTPResponse): return self._cast(first) elif isinstance(first, bytes): new_iter = itertools.chain([first], iout) elif isinstance(first, unicode): encoder = lambda x: x.encode(response.charset) new_iter = imap(encoder, itertools.chain([first], iout)) else: msg = 'Unsupported response type: %s' % type(first) return self._cast(HTTPError(500, msg)) if hasattr(out, 'close'): new_iter = _closeiter(new_iter, out.close) return new_iter
SystemExit
dataset/ETHPy150Open Arelle/Arelle/arelle/webserver/bottle.py/Bottle._cast
2,778
def wsgi(self, environ, start_response): """ The bottle WSGI-interface. """ try: out = self._cast(self._handle(environ)) # rfc2616 section 4.3 if response._status_code in (100, 101, 204, 304)\ or environ['REQUEST_METHOD'] == 'HEAD': if hasattr(out, 'close'): out.close() out = [] start_response(response._status_line, response.headerlist) return out except (__HOLE__, SystemExit, MemoryError): raise except: if not self.catchall: raise err = '<h1>Critical error while processing request: %s</h1>' \ % html_escape(environ.get('PATH_INFO', '/')) if DEBUG: err += '<h2>Error:</h2>\n<pre>\n%s\n</pre>\n' \ '<h2>Traceback:</h2>\n<pre>\n%s\n</pre>\n' \ % (html_escape(repr(_e())), html_escape(format_exc())) environ['wsgi.errors'].write(err) headers = [('Content-Type', 'text/html; charset=UTF-8')] start_response('500 INTERNAL SERVER ERROR', headers, sys.exc_info()) return [tob(err)]
KeyboardInterrupt
dataset/ETHPy150Open Arelle/Arelle/arelle/webserver/bottle.py/Bottle.wsgi
2,779
@staticmethod def _iter_chunked(read, bufsize): err = HTTPError(400, 'Error while parsing chunked transfer body.') rn, sem, bs = tob('\r\n'), tob(';'), tob('') while True: header = read(1) while header[-2:] != rn: c = read(1) header += c if not c: raise err if len(header) > bufsize: raise err size, _, _ = header.partition(sem) try: maxread = int(tonat(size.strip()), 16) except __HOLE__: raise err if maxread == 0: break buff = bs while maxread > 0: if not buff: buff = read(min(maxread, bufsize)) part, buff = buff[:maxread], buff[maxread:] if not part: raise err yield part maxread -= len(part) if read(2) != rn: raise err
ValueError
dataset/ETHPy150Open Arelle/Arelle/arelle/webserver/bottle.py/BaseRequest._iter_chunked
2,780
@DictProperty('environ', 'bottle.request.body', read_only=True) def _body(self): try: read_func = self.environ['wsgi.input'].read except __HOLE__: self.environ['wsgi.input'] = BytesIO() return self.environ['wsgi.input'] body_iter = self._iter_chunked if self.chunked else self._iter_body body, body_size, is_temp_file = BytesIO(), 0, False for part in body_iter(read_func, self.MEMFILE_MAX): body.write(part) body_size += len(part) if not is_temp_file and body_size > self.MEMFILE_MAX: body, tmp = TemporaryFile(mode='w+b'), body body.write(tmp.getvalue()) del tmp is_temp_file = True self.environ['wsgi.input'] = body body.seek(0) return body
KeyError
dataset/ETHPy150Open Arelle/Arelle/arelle/webserver/bottle.py/BaseRequest._body
2,781
def __getattr__(self, name): """ Search in self.environ for additional user defined attributes. """ try: var = self.environ['bottle.request.ext.%s' % name] return var.__get__(self) if hasattr(var, '__get__') else var except __HOLE__: raise AttributeError('Attribute %r not defined.' % name)
KeyError
dataset/ETHPy150Open Arelle/Arelle/arelle/webserver/bottle.py/BaseRequest.__getattr__
2,782
def _local_property(): ls = threading.local() def fget(_): try: return ls.var except __HOLE__: raise RuntimeError("Request context not initialized.") def fset(_, value): ls.var = value def fdel(_): del ls.var return property(fget, fset, fdel, 'Thread-local property')
AttributeError
dataset/ETHPy150Open Arelle/Arelle/arelle/webserver/bottle.py/_local_property
2,783
def apply(self, callback, _): dumps = self.json_dumps if not dumps: return callback def wrapper(*a, **ka): try: rv = callback(*a, **ka) except __HOLE__: rv = _e() if isinstance(rv, dict): #Attempt to serialize, raises exception on failure json_response = dumps(rv) #Set content type only if serialization successful response.content_type = 'application/json' return json_response elif isinstance(rv, HTTPResponse) and isinstance(rv.body, dict): rv.body = dumps(rv.body) rv.content_type = 'application/json' return rv return wrapper
HTTPError
dataset/ETHPy150Open Arelle/Arelle/arelle/webserver/bottle.py/JSONPlugin.apply
2,784
def getunicode(self, name, default=None, encoding=None): """ Return the value as a unicode string, or the default. """ try: return self._fix(self[name], encoding) except (UnicodeError, __HOLE__): return default
KeyError
dataset/ETHPy150Open Arelle/Arelle/arelle/webserver/bottle.py/FormsDict.getunicode
2,785
def parse_date(ims): """ Parse rfc1123, rfc850 and asctime timestamps and return UTC epoch. """ try: ts = email.utils.parsedate_tz(ims) return time.mktime(ts[:8] + (0, )) - (ts[9] or 0) - time.timezone except (__HOLE__, ValueError, IndexError, OverflowError): return None
TypeError
dataset/ETHPy150Open Arelle/Arelle/arelle/webserver/bottle.py/parse_date
2,786
def parse_auth(header): """ Parse rfc2617 HTTP authentication header string (basic) and return (user,pass) tuple or None""" try: method, data = header.split(None, 1) if method.lower() == 'basic': user, pwd = touni(base64.b64decode(tob(data))).split(':', 1) return user, pwd except (KeyError, __HOLE__): return None
ValueError
dataset/ETHPy150Open Arelle/Arelle/arelle/webserver/bottle.py/parse_auth
2,787
def parse_range_header(header, maxlen=0): """ Yield (start, end) ranges parsed from a HTTP Range header. Skip unsatisfiable ranges. The end index is non-inclusive.""" if not header or header[:6] != 'bytes=': return ranges = [r.split('-', 1) for r in header[6:].split(',') if '-' in r] for start, end in ranges: try: if not start: # bytes=-100 -> last 100 bytes start, end = max(0, maxlen - int(end)), maxlen elif not end: # bytes=100- -> all but the first 99 bytes start, end = int(start), maxlen else: # bytes=100-200 -> bytes 100-200 (inclusive) start, end = int(start), min(int(end) + 1, maxlen) if 0 <= start < end <= maxlen: yield start, end except __HOLE__: pass
ValueError
dataset/ETHPy150Open Arelle/Arelle/arelle/webserver/bottle.py/parse_range_header
2,788
def run(self, app): # pragma: no cover from wsgiref.simple_server import make_server from wsgiref.simple_server import WSGIRequestHandler, WSGIServer import socket class FixedHandler(WSGIRequestHandler): def address_string(self): # Prevent reverse DNS lookups please. return self.client_address[0] def log_request(*args, **kw): if not self.quiet: return WSGIRequestHandler.log_request(*args, **kw) handler_cls = self.options.get('handler_class', FixedHandler) server_cls = self.options.get('server_class', WSGIServer) if ':' in self.host: # Fix wsgiref for IPv6 addresses. if getattr(server_cls, 'address_family') == socket.AF_INET: class server_cls(server_cls): address_family = socket.AF_INET6 self.srv = make_server(self.host, self.port, app, server_cls, handler_cls) self.port = self.srv.server_port # update port actual port (0 means random) try: self.srv.serve_forever() except __HOLE__: self.srv.server_close() # Prevent ResourceWarning: unclosed socket raise
KeyboardInterrupt
dataset/ETHPy150Open Arelle/Arelle/arelle/webserver/bottle.py/WSGIRefServer.run
2,789
def run(self, handler): from eventlet import wsgi, listen, patcher if not patcher.is_monkey_patched(os): msg = "Bottle requires eventlet.monkey_patch() (before import)" raise RuntimeError(msg) socket_args = {} for arg in ('backlog', 'family'): try: socket_args[arg] = self.options.pop(arg) except __HOLE__: pass address = (self.host, self.port) try: wsgi.server(listen(address, **socket_args), handler, log_output=(not self.quiet)) except TypeError: # Fallback, if we have old version of eventlet wsgi.server(listen(address), handler)
KeyError
dataset/ETHPy150Open Arelle/Arelle/arelle/webserver/bottle.py/EventletServer.run
2,790
def run(self, handler): import asyncio from aiohttp.wsgi import WSGIServerHttpProtocol self.loop = asyncio.new_event_loop() asyncio.set_event_loop(self.loop) protocol_factory = lambda: WSGIServerHttpProtocol( handler, readpayload=True, debug=(not self.quiet)) self.loop.run_until_complete(self.loop.create_server(protocol_factory, self.host, self.port)) if 'BOTTLE_CHILD' in os.environ: import signal signal.signal(signal.SIGINT, lambda s, f: self.loop.stop()) try: self.loop.run_forever() except __HOLE__: self.loop.stop()
KeyboardInterrupt
dataset/ETHPy150Open Arelle/Arelle/arelle/webserver/bottle.py/AiohttpServer.run
2,791
def run(self, handler): for sa in self.adapters: try: return sa(self.host, self.port, **self.options).run(handler) except __HOLE__: pass
ImportError
dataset/ETHPy150Open Arelle/Arelle/arelle/webserver/bottle.py/AutoServer.run
2,792
def run(app=None, server='wsgiref', host='127.0.0.1', port=8080, interval=1, reloader=False, quiet=False, plugins=None, debug=None, **kargs): """ Start a server instance. This method blocks until the server terminates. :param app: WSGI application or target string supported by :func:`load_app`. (default: :func:`default_app`) :param server: Server adapter to use. See :data:`server_names` keys for valid names or pass a :class:`ServerAdapter` subclass. (default: `wsgiref`) :param host: Server address to bind to. Pass ``0.0.0.0`` to listens on all interfaces including the external one. (default: 127.0.0.1) :param port: Server port to bind to. Values below 1024 require root privileges. (default: 8080) :param reloader: Start auto-reloading server? (default: False) :param interval: Auto-reloader interval in seconds (default: 1) :param quiet: Suppress output to stdout and stderr? (default: False) :param options: Options passed to the server adapter. """ if NORUN: return if reloader and not os.environ.get('BOTTLE_CHILD'): import subprocess lockfile = None try: fd, lockfile = tempfile.mkstemp(prefix='bottle.', suffix='.lock') os.close(fd) # We only need this file to exist. We never write to it while os.path.exists(lockfile): args = [sys.executable] + sys.argv environ = os.environ.copy() environ['BOTTLE_CHILD'] = 'true' environ['BOTTLE_LOCKFILE'] = lockfile p = subprocess.Popen(args, env=environ) while p.poll() is None: # Busy wait... os.utime(lockfile, None) # I am alive! time.sleep(interval) if p.poll() != 3: if os.path.exists(lockfile): os.unlink(lockfile) sys.exit(p.poll()) except KeyboardInterrupt: pass finally: if os.path.exists(lockfile): os.unlink(lockfile) return try: if debug is not None: _debug(debug) app = app or default_app() if isinstance(app, basestring): app = load_app(app) if not callable(app): raise ValueError("Application is not callable: %r" % app) for plugin in plugins or []: if isinstance(plugin, basestring): plugin = load(plugin) app.install(plugin) if server in server_names: server = server_names.get(server) if isinstance(server, basestring): server = load(server) if isinstance(server, type): server = server(host=host, port=port, **kargs) if not isinstance(server, ServerAdapter): raise ValueError("Unknown or unsupported server: %r" % server) server.quiet = server.quiet or quiet if not server.quiet: _stderr("Bottle v%s server starting up (using %s)...\n" % (__version__, repr(server))) _stderr("Listening on http://%s:%d/\n" % (server.host, server.port)) _stderr("Hit Ctrl-C to quit.\n\n") if reloader: lockfile = os.environ.get('BOTTLE_LOCKFILE') bgcheck = FileCheckerThread(lockfile, interval) with bgcheck: server.run(app) if bgcheck.status == 'reload': sys.exit(3) else: server.run(app) except KeyboardInterrupt: pass except (__HOLE__, MemoryError): raise except: if not reloader: raise if not getattr(server, 'quiet', quiet): print_exc() time.sleep(interval) sys.exit(3)
SystemExit
dataset/ETHPy150Open Arelle/Arelle/arelle/webserver/bottle.py/run
2,793
def execute_for(self, targets): session_setup = self.setup_repl_session(targets) self.context.release_lock() with stty_utils.preserve_stty_settings(): with self.context.new_workunit(name='repl', labels=[WorkUnitLabel.RUN]): print('') # Start REPL output on a new line. try: return self.launch_repl(session_setup) except __HOLE__: # This is a valid way to end a REPL session in general, so just break out of execute and # continue. pass
KeyboardInterrupt
dataset/ETHPy150Open pantsbuild/pants/src/python/pants/task/repl_task_mixin.py/ReplTaskMixin.execute_for
2,794
def get_last_log(self, file_name=None, finished=False): """ Execute query for the given command most recently started CalAccessCommandLog, unless finished=True, in which case query for the most recently finished. Commands that require a file / model as a positional argument should pass the file_name keyword argument. Returns a CalAccessCommandLog object or None, if no results. """ if file_name: q = self.command_logs.filter(file_name=file_name) else: q = self.command_logs if finished: order_by_field = '-finish_datetime' q = q.filter(finish_datetime__isnull=False) else: order_by_field = '-start_datetime' try: last_log = q.filter( command=self ).order_by(order_by_field)[0] except __HOLE__: last_log = None return last_log
IndexError
dataset/ETHPy150Open california-civic-data-coalition/django-calaccess-raw-data/calaccess_raw/management/commands/__init__.py/CalAccessCommand.get_last_log
2,795
def get_caller_log(self): """ If the command was called by another command, return the caller's RawDataCommandLog object. Else, return None. """ caller = None if not self._called_from_command_line: # TODO: see if there's another way to identify caller # in (edge) case when update is not called from command line # for now, assume the caller is the arg passed to manage.py # try getting the most recent log of this command for the version try: caller = self.command_logs.filter( command=sys.argv[1] ).order_by('-start_datetime')[0] except __HOLE__: pass return caller # # Logging methods #
IndexError
dataset/ETHPy150Open california-civic-data-coalition/django-calaccess-raw-data/calaccess_raw/management/commands/__init__.py/CalAccessCommand.get_caller_log
2,796
def main(): args = parser.parse_args() if args.verbose > 1: logging.basicConfig(level=logging.DEBUG) elif args.verbose > 0: logging.basicConfig(level=logging.INFO) server = DAPLinkServer(args.address, socket=args.socket, interface=args.interface) server.init() print 'pyDAPLink server running' try: if args.temporary: while True: sleep(5) if server.client_count == 0: break else: while True: sleep(60*60) except __HOLE__: pass finally: server.uninit()
KeyboardInterrupt
dataset/ETHPy150Open geky/pyDAPLink/pyDAPLink/tools/pydaplink_server.py/main
2,797
def __init__(self, symtable=None, writer=None, with_plugins=True): self.writer = writer or StdWriter() self.writer._larch = self if symtable is None: symtable = SymbolTable(larch=self) self.symtable = symtable self._interrupt = None self.error = [] self.expr = None self.retval = None self.func = None self.fname = '<stdin>' self.lineno = 0 builtingroup = symtable._builtin mathgroup = symtable._math setattr(mathgroup, 'j', 1j) # system-specific settings enable_plugins() site_config.system_settings() for sym in builtins.from_math: setattr(mathgroup, sym, getattr(math, sym)) for sym in builtins.from_builtin: setattr(builtingroup, sym, __builtins__[sym]) for sym in builtins.from_numpy: try: setattr(mathgroup, sym, getattr(numpy, sym)) except __HOLE__: pass for fname, sym in list(builtins.numpy_renames.items()): setattr(mathgroup, fname, getattr(numpy, sym)) for groupname, entries in builtins.local_funcs.items(): group = getattr(symtable, groupname, None) if group is not None: for fname, fcn in list(entries.items()): setattr(group, fname, Closure(func=fcn, _larch=self, _name=fname)) # set valid commands from builtins for cmd in builtins.valid_commands: self.symtable._sys.valid_commands.append(cmd) if with_plugins: # add all plugins in standard plugins folder plugins_dir = os.path.join(site_config.larchdir, 'plugins') loaded_plugins = [] for pname in site_config.core_plugins: pdir = os.path.join(plugins_dir, pname) if os.path.isdir(pdir): builtins._addplugin(pdir, _larch=self) loaded_plugins.append(pname) for pname in sorted(os.listdir(plugins_dir)): if pname not in loaded_plugins: pdir = os.path.join(plugins_dir, pname) if os.path.isdir(pdir): builtins._addplugin(pdir, _larch=self) loaded_plugins.append(pname) self.on_try = self.on_tryexcept self.node_handlers = dict(((node, getattr(self, "on_%s" % node)) for node in self.supported_nodes))
AttributeError
dataset/ETHPy150Open xraypy/xraylarch/lib/interpreter.py/Interpreter.__init__
2,798
def run(self, node, expr=None, func=None, fname=None, lineno=None, with_raise=False): """executes parsed Ast representation for an expression""" # Note: keep the 'node is None' test: internal code here may run # run(None) and expect a None in return. # print(" Run", node, expr) if node is None: return None if isinstance(node, str): node = self.parse(node) if lineno is not None: self.lineno = lineno if fname is not None: self.fname = fname if expr is not None: self.expr = expr if func is not None: self.func = func # get handler for this node: # on_xxx with handle nodes of type 'xxx', etc if node.__class__.__name__.lower() not in self.node_handlers: return self.unimplemented(node) handler = self.node_handlers[node.__class__.__name__.lower()] # run the handler: this will likely generate # recursive calls into this run method. try: out = handler(node) except: self.raise_exception(node, expr=self.expr, fname=self.fname, lineno=self.lineno) else: # for some cases (especially when using Parameter objects), # a calculation returns an otherwise numeric array, but with # dtype 'object'. fix here, trying (float, complex, list). if isinstance(out, numpy.ndarray): if out.dtype == numpy.object: try: out = out.astype(float) except __HOLE__: try: out = out.astype(complex) except TypeError: out = list(out) # enumeration objects are list-ified here... if isinstance(out, enumerate): out = list(out) return out
TypeError
dataset/ETHPy150Open xraypy/xraylarch/lib/interpreter.py/Interpreter.run
2,799
def eval(self, expr, fname=None, lineno=0): """evaluates a single statement""" self.fname = fname self.lineno = lineno self.error = [] try: node = self.parse(expr, fname=fname, lineno=lineno) except __HOLE__: errmsg = sys.exc_info()[1] if len(self.error) > 0: errtype, errmsg = self.error[0].get_error() return out = None try: return self.run(node, expr=expr, fname=fname, lineno=lineno) except RuntimeError: return
RuntimeError
dataset/ETHPy150Open xraypy/xraylarch/lib/interpreter.py/Interpreter.eval