Unnamed: 0
int64
0
10k
function
stringlengths
79
138k
label
stringclasses
20 values
info
stringlengths
42
261
2,600
def pytest_configure(config): mode = config.getvalue("assertmode") if config.getvalue("noassert") or config.getvalue("nomagic"): mode = "plain" if mode == "rewrite": try: import ast # noqa except __HOLE__: mode = "reinterp" else: # Both Jython and CPython 2.6.0 have AST bugs that make the # assertion rewriting hook malfunction. if (sys.platform.startswith('java') or sys.version_info[:3] == (2, 6, 0)): mode = "reinterp" if mode != "plain": _load_modules(mode) m = monkeypatch() config._cleanup.append(m.undo) m.setattr(py.builtin.builtins, 'AssertionError', reinterpret.AssertionError) # noqa hook = None if mode == "rewrite": hook = rewrite.AssertionRewritingHook() # noqa sys.meta_path.insert(0, hook) warn_about_missing_assertion(mode) config._assertstate = AssertionState(config, mode) config._assertstate.hook = hook config._assertstate.trace("configured with mode set to %r" % (mode,)) def undo(): hook = config._assertstate.hook if hook is not None and hook in sys.meta_path: sys.meta_path.remove(hook) config.add_cleanup(undo)
ImportError
dataset/ETHPy150Open pytest-dev/pytest/_pytest/assertion/__init__.py/pytest_configure
2,601
def warn_about_missing_assertion(mode): try: assert False except __HOLE__: pass else: if mode == "rewrite": specifically = ("assertions which are not in test modules " "will be ignored") else: specifically = "failing tests may report as passing" sys.stderr.write("WARNING: " + specifically + " because assert statements are not executed " "by the underlying Python interpreter " "(are you using python -O?)\n") # Expose this plugin's implementation for the pytest_assertrepr_compare hook
AssertionError
dataset/ETHPy150Open pytest-dev/pytest/_pytest/assertion/__init__.py/warn_about_missing_assertion
2,602
def _write(self, str): """Write a string to the modem.""" self._log(repr(str), "write") try: self.device.write(str) # if the device couldn't be written to, # wrap the error in something that can # sensibly be caught at a higher level except __HOLE__, err: raise(errors.GsmWriteError)
OSError
dataset/ETHPy150Open sahana/eden/modules/pygsm/gsmmodem.py/GsmModem._write
2,603
def test_noPermission(self): """ Check it keeps working when permission on dir changes. """ log = logfile.LogFile(self.name, self.dir) log.write("abc") # change permissions so rotation would fail os.chmod(self.dir, 0555) # if this succeeds, chmod doesn't restrict us, so we can't # do the test try: f = open(os.path.join(self.dir,"xxx"), "w") except (OSError, __HOLE__): pass else: f.close() return log.rotate() # this should not fail log.write("def") log.flush() f = log._file self.assertEqual(f.tell(), 6) f.seek(0, 0) self.assertEqual(f.read(), "abcdef") log.close()
IOError
dataset/ETHPy150Open nlloyd/SubliminalCollaborator/libs/twisted/test/test_logfile.py/LogFileTestCase.test_noPermission
2,604
def main(): import sys, re args = sys.argv[1:] iptfile = args and args[0] or "Python/graminit.c" if len(args) > 1: optfile = args[1] else: optfile = "Lib/keyword.py" # scan the source file for keywords with open(iptfile) as fp: strprog = re.compile('"([^"]+)"') lines = [] for line in fp: if '{1, "' in line: match = strprog.search(line) if match: lines.append(" '" + match.group(1) + "',\n") lines.sort() # load the output skeleton from the target with open(optfile) as fp: format = fp.readlines() # insert the lines of keywords try: start = format.index("#--start keywords--\n") + 1 end = format.index("#--end keywords--\n") format[start:end] = lines except __HOLE__: sys.stderr.write("target does not contain format markers\n") sys.exit(1) # write the output file fp = open(optfile, 'w') fp.write(''.join(format)) fp.close()
ValueError
dataset/ETHPy150Open amrdraz/kodr/app/brython/www/src/Lib/keyword.py/main
2,605
def __new__(cls, *args, **kw): error = BotoServerError(*args, **kw) try: newclass = globals()[error.error_code] except __HOLE__: newclass = ResponseError obj = newclass.__new__(newclass, *args, **kw) obj.__dict__.update(error.__dict__) return obj
KeyError
dataset/ETHPy150Open darcyliu/storyboard/boto/mws/exception.py/ResponseErrorFactory.__new__
2,606
def post(self, step_id): jobstep = JobStep.query.options( joinedload('project', innerjoin=True), ).get(step_id) if jobstep is None: return '', 404 args = self.post_parser.parse_args() current_datetime = args.date or datetime.utcnow() if args.result: jobstep.result = Result[args.result] if args.status: jobstep.status = Status[args.status] # if we've finished this job, lets ensure we have set date_finished if jobstep.status == Status.finished and jobstep.date_finished is None: jobstep.date_finished = current_datetime elif jobstep.status != Status.finished and jobstep.date_finished: jobstep.date_finished = None if jobstep.status != Status.queued and jobstep.date_started is None: jobstep.date_started = current_datetime elif jobstep.status == Status.queued and jobstep.date_started: jobstep.date_started = None if args.node: node, _ = get_or_create(Node, where={ 'label': args.node, }) jobstep.node_id = node.id if args.metrics: try: metrics = json.loads(args.metrics) except __HOLE__: return {'message': 'Metrics was not valid JSON'}, 400 if not isinstance(metrics, dict): return {'message': 'Metrics should be a JSON object'}, 400 if 'metrics' in jobstep.data: jobstep.data['metrics'].update(metrics) else: jobstep.data['metrics'] = metrics # we want to guarantee that even if the jobstep seems to succeed, that # we accurately reflect what we internally would consider a success state if jobstep.result == Result.passed and jobstep.status == Status.finished: last_command = Command.query.filter( Command.jobstep_id == jobstep.id, ).order_by(Command.order.desc()).first() if not last_command: pass elif last_command.status != Status.finished: jobstep.result = Result.failed elif last_command.return_code != 0: jobstep.result = Result.failed # are we missing an expansion step? it must happen before reporting # the result, and would falsely give us a success metric elif last_command.type.is_collector() and is_final_jobphase(jobstep.phase): jobstep.result = Result.failed job = jobstep.job # TODO(dcramer): we should add a better failure reason db.session.add(FailureReason( step_id=jobstep.id, job_id=job.id, build_id=job.build_id, project_id=job.project_id, reason='missing_artifact', )) db.session.add(jobstep) if db.session.is_modified(jobstep): db.session.commit() # TODO(dcramer): this is a little bit hacky, but until we can entirely # move to push APIs we need a good way to handle the existing sync job = jobstep.job sync_job.delay_if_needed( job_id=job.id.hex, task_id=job.id.hex, parent_task_id=job.build_id.hex, ) elif args.metrics: # Check for args.metrics because is_modified doesn't detect if data['metrics'] gets updated. # is_modified works fine for map creation, but not map updation. db.session.commit() return self.respond(jobstep)
ValueError
dataset/ETHPy150Open dropbox/changes/changes/api/jobstep_details.py/JobStepDetailsAPIView.post
2,607
def read_raw_gpsd_data(self): """Read the newest data from gpsd and return it""" try: if self.session.waiting(): report = self.session.next() return report else: return None except __HOLE__: raise GPSDError()
StopIteration
dataset/ETHPy150Open FishPi/FishPi-POCV---Command---Control/fishpi/sensor/gpsd/gpsd_interface.py/gpsdInterface.read_raw_gpsd_data
2,608
def _convert_to_number_without_precision(self, item): try: if utils.is_jython: item = self._handle_java_numbers(item) return float(item) except: error = utils.get_error_message() try: return float(self._convert_to_integer(item)) except __HOLE__: raise RuntimeError("'%s' cannot be converted to a floating " "point number: %s" % (item, error))
RuntimeError
dataset/ETHPy150Open shellderp/sublime-robot-plugin/lib/robot/libraries/BuiltIn.py/_Converter._convert_to_number_without_precision
2,609
def _get_var_name(self, orig): name = self._resolve_possible_variable(orig) try: return self._unescape_variable_if_needed(name) except __HOLE__: raise RuntimeError("Invalid variable syntax '%s'" % orig)
ValueError
dataset/ETHPy150Open shellderp/sublime-robot-plugin/lib/robot/libraries/BuiltIn.py/_Variables._get_var_name
2,610
def _resolve_possible_variable(self, name): try: resolved = self._variables[name] return self._unescape_variable_if_needed(resolved) except (KeyError, __HOLE__, DataError): return name
ValueError
dataset/ETHPy150Open shellderp/sublime-robot-plugin/lib/robot/libraries/BuiltIn.py/_Variables._resolve_possible_variable
2,611
def call_method(self, object, method_name, *args): """Calls the named method of the given object with the provided arguments. The possible return value from the method is returned and can be assigned to a variable. Keyword fails both if the object does not have a method with the given name or if executing the method raises an exception. Examples: | Call Method | ${hashtable} | put | myname | myvalue | | ${isempty} = | Call Method | ${hashtable} | isEmpty | | | Should Not Be True | ${isempty} | | | | | ${value} = | Call Method | ${hashtable} | get | myname | | Should Be Equal | ${value} | myvalue | | | """ try: method = getattr(object, method_name) except __HOLE__: raise RuntimeError("Object '%s' does not have a method '%s'" % (object, method_name)) return method(*args)
AttributeError
dataset/ETHPy150Open shellderp/sublime-robot-plugin/lib/robot/libraries/BuiltIn.py/_Misc.call_method
2,612
def test_RowSetTable(): row_set_json = { 'etag': 'aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee', 'headers': [ {'columnType': 'STRING', 'id': '353', 'name': 'name'}, {'columnType': 'DOUBLE', 'id': '355', 'name': 'x'}, {'columnType': 'DOUBLE', 'id': '3020', 'name': 'y'}, {'columnType': 'INTEGER', 'id': '891', 'name': 'n'}], 'rows': [{ 'rowId': 5, 'values': ['foo', '1.23', '2.2', '101'], 'versionNumber': 3}, {'rowId': 6, 'values': ['bar', '1.34', '2.4', '101'], 'versionNumber': 3}, {'rowId': 7, 'values': ['foo', '1.23', '2.2', '101'], 'versionNumber': 4}, {'rowId': 8, 'values': ['qux', '1.23', '2.2', '102'], 'versionNumber': 3}], 'tableId': 'syn2976298'} row_set = RowSet.from_json(row_set_json) assert row_set.etag == 'aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee' assert row_set.tableId == 'syn2976298' assert len(row_set.headers) == 4 assert len(row_set.rows) == 4 schema = Schema(id="syn2976298", name="Bogus Schema", columns=[353,355,3020,891], parent="syn1000001") table = Table(schema, row_set) assert table.etag == 'aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee' assert table.tableId == 'syn2976298' assert len(table.headers) == 4 assert len(table.asRowSet().rows) == 4 try: import pandas as pd df = table.asDataFrame() assert df.shape == (4,4) assert all(df['name'] == ['foo', 'bar', 'foo', 'qux']) except __HOLE__ as e1: sys.stderr.write('Pandas is apparently not installed, skipping part of test_RowSetTable.\n\n')
ImportError
dataset/ETHPy150Open Sage-Bionetworks/synapsePythonClient/tests/unit/unit_test_tables.py/test_RowSetTable
2,613
def test_as_table_columns(): try: import pandas as pd df = pd.DataFrame({ 'foobar' : ("foo", "bar", "baz", "qux", "asdf"), 'x' : tuple(math.pi*i for i in range(5)), 'n' : (101, 202, 303, 404, 505), 'really' : (False, True, False, True, False), 'size' : ('small', 'large', 'medium', 'medium', 'large')}) cols = as_table_columns(df) cols[0]['name'] == 'foobar' cols[0]['columnType'] == 'STRING' cols[1]['name'] == 'x' cols[1]['columnType'] == 'DOUBLE' cols[1]['name'] == 'n' cols[1]['columnType'] == 'INTEGER' cols[1]['name'] == 'really' cols[1]['columnType'] == 'BOOLEAN' cols[1]['name'] == 'size' # TODO: support Categorical when fully supported in Pandas Data Frames cols[1]['columnType'] == 'STRING' except __HOLE__ as e1: sys.stderr.write('Pandas is apparently not installed, skipping test_as_table_columns.\n\n')
ImportError
dataset/ETHPy150Open Sage-Bionetworks/synapsePythonClient/tests/unit/unit_test_tables.py/test_as_table_columns
2,614
def test_pandas_to_table(): try: import pandas as pd df = pd.DataFrame(dict(a=[1,2,3], b=["c", "d", "e"])) schema = Schema(name="Baz", parent="syn12345", columns=as_table_columns(df)) print("\n", df, "\n\n") ## A dataframe with no row id and version table = Table(schema, df) for i, row in enumerate(table): print(row) assert row[0]==(i+1) assert row[1]==["c", "d", "e"][i] assert len(table)==3 ## If includeRowIdAndRowVersion=True, include empty row id an versions ## ROW_ID,ROW_VERSION,a,b ## ,,1,c ## ,,2,d ## ,,3,e table = Table(schema, df, includeRowIdAndRowVersion=True) for i, row in enumerate(table): print(row) assert row[0] is None assert row[1] is None assert row[2]==(i+1) ## A dataframe with no row id and version df = pd.DataFrame(index=["1_7","2_7","3_8"], data=dict(a=[100,200,300], b=["c", "d", "e"])) print("\n", df, "\n\n") table = Table(schema, df) for i, row in enumerate(table): print(row) assert row[0]==["1","2","3"][i] assert row[1]==["7","7","8"][i] assert row[2]==(i+1)*100 assert row[3]==["c", "d", "e"][i] ## A dataframe with row id and version in columns df = pd.DataFrame(dict(ROW_ID=["0","1","2"], ROW_VERSION=["8","9","9"], a=[100,200,300], b=["c", "d", "e"])) print("\n", df, "\n\n") table = Table(schema, df) for i, row in enumerate(table): print(row) assert row[0]==["0","1","2"][i] assert row[1]==["8","9","9"][i] assert row[2]==(i+1)*100 assert row[3]==["c", "d", "e"][i] except __HOLE__ as e1: sys.stderr.write('Pandas is apparently not installed, skipping test_pandas_to_table.\n\n')
ImportError
dataset/ETHPy150Open Sage-Bionetworks/synapsePythonClient/tests/unit/unit_test_tables.py/test_pandas_to_table
2,615
def test_csv_table(): ## Maybe not truly a unit test, but here because it doesn't do ## network IO to synapse data = [["1", "1", "John Coltrane", 1926, 8.65, False], ["2", "1", "Miles Davis", 1926, 9.87, False], ["3", "1", "Bill Evans", 1929, 7.65, False], ["4", "1", "Paul Chambers", 1935, 5.14, False], ["5", "1", "Jimmy Cobb", 1929, 5.78, True], ["6", "1", "Scott LaFaro", 1936, 4.21, False], ["7", "1", "Sonny Rollins", 1930, 8.99, True], ["8", "1", "Kenny Burrel", 1931, 4.37, True]] filename = None cols = [] cols.append(Column(id='1', name='Name', columnType='STRING')) cols.append(Column(id='2', name='Born', columnType='INTEGER')) cols.append(Column(id='3', name='Hipness', columnType='DOUBLE')) cols.append(Column(id='4', name='Living', columnType='BOOLEAN')) schema1 = Schema(id='syn1234', name='Jazz Guys', columns=cols, parent="syn1000001") #TODO: use StringIO.StringIO(data) rather than writing files try: ## create CSV file with tempfile.NamedTemporaryFile(delete=False) as temp: filename = temp.name with io.open(filename, mode='w', encoding="utf-8", newline='') as temp: writer = csv.writer(temp, quoting=csv.QUOTE_NONNUMERIC, lineterminator=str(os.linesep)) headers = ['ROW_ID', 'ROW_VERSION'] + [col.name for col in cols] writer.writerow(headers) for row in data: print(row) writer.writerow(row) table = Table(schema1, filename) assert isinstance(table, CsvFileTable) ## need to set column headers to read a CSV file table.setColumnHeaders( [SelectColumn(name="ROW_ID", columnType="STRING"), SelectColumn(name="ROW_VERSION", columnType="STRING")] + [SelectColumn.from_column(col) for col in cols]) ## test iterator # print("\n\nJazz Guys") for table_row, expected_row in zip(table, data): # print(table_row, expected_row) assert table_row==expected_row ## test asRowSet rowset = table.asRowSet() for rowset_row, expected_row in zip(rowset.rows, data): #print(rowset_row, expected_row) assert rowset_row['values']==expected_row[2:] assert rowset_row['rowId']==expected_row[0] assert rowset_row['versionNumber']==expected_row[1] ## test asDataFrame try: import pandas as pd df = table.asDataFrame() assert all(df['Name'] == [row[2] for row in data]) assert all(df['Born'] == [row[3] for row in data]) assert all(df['Living'] == [row[5] for row in data]) assert all(df.index == ['%s_%s'%tuple(row[0:2]) for row in data]) assert df.shape == (8,4) except __HOLE__ as e1: sys.stderr.write('Pandas is apparently not installed, skipping asDataFrame portion of test_csv_table.\n\n') except Exception as ex1: if filename: try: if os.path.isdir(filename): shutil.rmtree(filename) else: os.remove(filename) except Exception as ex: print(ex) raise
ImportError
dataset/ETHPy150Open Sage-Bionetworks/synapsePythonClient/tests/unit/unit_test_tables.py/test_csv_table
2,616
def test_list_of_rows_table(): data = [["John Coltrane", 1926, 8.65, False], ["Miles Davis", 1926, 9.87, False], ["Bill Evans", 1929, 7.65, False], ["Paul Chambers", 1935, 5.14, False], ["Jimmy Cobb", 1929, 5.78, True], ["Scott LaFaro", 1936, 4.21, False], ["Sonny Rollins", 1930, 8.99, True], ["Kenny Burrel", 1931, 4.37, True]] cols = [] cols.append(Column(id='1', name='Name', columnType='STRING')) cols.append(Column(id='2', name='Born', columnType='INTEGER')) cols.append(Column(id='3', name='Hipness', columnType='DOUBLE')) cols.append(Column(id='4', name='Living', columnType='BOOLEAN')) schema1 = Schema(name='Jazz Guys', columns=cols, id="syn1000002", parent="syn1000001") ## need columns to do cast_values w/o storing table = Table(schema1, data, headers=[SelectColumn.from_column(col) for col in cols]) for table_row, expected_row in zip(table, data): assert table_row==expected_row rowset = table.asRowSet() for rowset_row, expected_row in zip(rowset.rows, data): assert rowset_row['values']==expected_row table.columns = cols ## test asDataFrame try: import pandas as pd df = table.asDataFrame() assert all(df['Name'] == [r[0] for r in data]) except __HOLE__ as e1: sys.stderr.write('Pandas is apparently not installed, skipping asDataFrame portion of test_list_of_rows_table.\n\n')
ImportError
dataset/ETHPy150Open Sage-Bionetworks/synapsePythonClient/tests/unit/unit_test_tables.py/test_list_of_rows_table
2,617
def test_aggregate_query_result_to_data_frame(): try: import pandas as pd class MockSynapse(object): def _queryTable(self, query, limit=None, offset=None, isConsistent=True, partMask=None): return {'concreteType': 'org.sagebionetworks.repo.model.table.QueryResultBundle', 'maxRowsPerPage': 2, 'queryCount': 4, 'queryResult': { 'concreteType': 'org.sagebionetworks.repo.model.table.QueryResult', 'nextPageToken': 'aaaaaaaa', 'queryResults': {'etag': 'aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee', 'headers': [ {'columnType': 'STRING', 'name': 'State'}, {'columnType': 'INTEGER', 'name': 'MIN(Born)'}, {'columnType': 'INTEGER', 'name': 'COUNT(State)'}, {'columnType': 'DOUBLE', 'name': 'AVG(Hipness)'}], 'rows': [ {'values': ['PA', '1935', '2', '1.1']}, {'values': ['MO', '1928', '3', '2.38']}], 'tableId': 'syn2757980'}}, 'selectColumns': [{ 'columnType': 'STRING', 'id': '1387', 'name': 'State'}]} def _queryTableNext(self, nextPageToken, tableId): return {'concreteType': 'org.sagebionetworks.repo.model.table.QueryResult', 'queryResults': {'etag': 'aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee', 'headers': [ {'columnType': 'STRING', 'name': 'State'}, {'columnType': 'INTEGER', 'name': 'MIN(Born)'}, {'columnType': 'INTEGER', 'name': 'COUNT(State)'}, {'columnType': 'DOUBLE', 'name': 'AVG(Hipness)'}], 'rows': [ {'values': ['DC', '1929', '1', '3.14']}, {'values': ['NC', '1926', '1', '4.38']}], 'tableId': 'syn2757980'}} result = TableQueryResult(synapse=MockSynapse(), query="select State, min(Born), count(State), avg(Hipness) from syn2757980 group by Living") assert result.etag == 'aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee' assert result.tableId == 'syn2757980' assert len(result.headers) == 4 rs = result.asRowSet() assert len(rs.rows) == 4 result = TableQueryResult(synapse=MockSynapse(), query="select State, min(Born), count(State), avg(Hipness) from syn2757980 group by Living") df = result.asDataFrame() assert df.shape == (4,4) assert all(df['State'].values == ['PA', 'MO', 'DC', 'NC']) ## check integer, double and boolean types after PLFM-3073 is fixed assert all(df['MIN(Born)'].values == [1935, 1928, 1929, 1926]), "Unexpected values" + str(df['MIN(Born)'].values) assert all(df['COUNT(State)'].values == [2,3,1,1]) assert all(df['AVG(Hipness)'].values == [1.1, 2.38, 3.14, 4.38]) except __HOLE__ as e1: sys.stderr.write('Pandas is apparently not installed, skipping asDataFrame portion of test_aggregate_query_result_to_data_frame.\n\n')
ImportError
dataset/ETHPy150Open Sage-Bionetworks/synapsePythonClient/tests/unit/unit_test_tables.py/test_aggregate_query_result_to_data_frame
2,618
@classmethod def Open(cls, fd, component, pathspec=None, progress_callback=None, full_pathspec=None): """Try to correct the casing of component. This method is called when we failed to open the component directly. We try to transform the component into something which is likely to work. In this implementation, we correct the case of the component until we can not open the path any more. Args: fd: The base fd we will use. component: The component we should open. pathspec: The rest of the pathspec object. progress_callback: A callback to indicate that the open call is still working but needs more time. full_pathspec: The full pathspec we are trying to open. Returns: A file object. Raises: IOError: If nothing could be opened still. """ # The handler for this component try: handler = VFS_HANDLERS[component.pathtype] except KeyError: raise IOError( "VFS handler %d not supported." % component.pathtype) # We will not do any case folding unless requested. if component.path_options == rdf_paths.PathSpec.Options.CASE_LITERAL: return handler(base_fd=fd, pathspec=component) path_components = client_utils.LocalPathToCanonicalPath(component.path) path_components = ["/"] + filter(None, path_components.split("/")) for i, path_component in enumerate(path_components): try: if fd: new_pathspec = fd.MatchBestComponentName(path_component) else: new_pathspec = component new_pathspec.path = path_component # The handler for this component try: handler = VFS_HANDLERS[new_pathspec.pathtype] except KeyError: raise IOError( "VFS handler %d not supported." % new_pathspec.pathtype) fd = handler(base_fd=fd, pathspec=new_pathspec, full_pathspec=full_pathspec, progress_callback=progress_callback) except __HOLE__: # Can not open the first component, we must raise here. if i <= 1: raise IOError("File not found") # Insert the remaining path at the front of the pathspec. pathspec.Insert(0, path=utils.JoinPath(*path_components[i:]), pathtype=rdf_paths.PathSpec.PathType.TSK) break return fd
IOError
dataset/ETHPy150Open google/grr/grr/client/vfs.py/VFSHandler.Open
2,619
def Run(self): VFS_HANDLERS.clear() for handler in VFSHandler.classes.values(): if handler.auto_register: VFS_HANDLERS[handler.supported_pathtype] = handler VFS_VIRTUALROOTS.clear() vfs_virtualroots = config_lib.CONFIG["Client.vfs_virtualroots"] for vfs_virtualroot in vfs_virtualroots: try: handler_string, root = vfs_virtualroot.split(":", 1) except __HOLE__: raise ValueError( "Badly formatted vfs virtual root: %s. Correct format is " "os:/path/to/virtual_root" % vfs_virtualroot) handler_string = handler_string.upper() handler = rdf_paths.PathSpec.PathType.enum_dict.get(handler_string) if handler is None: raise ValueError("Unsupported vfs handler: %s." % handler_string) # We need some translation here, TSK needs an OS virtual root base. For # every other handler we can just keep the type the same. base_types = { rdf_paths.PathSpec.PathType.TSK: rdf_paths.PathSpec.PathType.OS } base_type = base_types.get(handler, handler) VFS_VIRTUALROOTS[handler] = rdf_paths.PathSpec( path=root, pathtype=base_type, is_virtualroot=True)
ValueError
dataset/ETHPy150Open google/grr/grr/client/vfs.py/VFSInit.Run
2,620
def VFSOpen(pathspec, progress_callback=None): """Expands pathspec to return an expanded Path. A pathspec is a specification of how to access the file by recursively opening each part of the path by different drivers. For example the following pathspec: pathtype: OS path: "/dev/sda1" nested_path { pathtype: TSK path: "/home/image2.img" nested_path { pathtype: TSK path: "/home/a.txt" } } Instructs the system to: 1) open /dev/sda1 using the OS driver. 2) Pass the obtained filelike object to the TSK driver to open "/home/image2.img". 3) The obtained filelike object should be passed to the TSK driver to open "/home/a.txt". The problem remains how to get to this expanded path specification. Since the server is not aware of all the files on the client, the server may request this: pathtype: OS path: "/dev/sda1" nested_path { pathtype: TSK path: "/home/image2.img/home/a.txt" } Or even this: pathtype: OS path: "/dev/sda1/home/image2.img/home/a.txt" This function converts the pathspec requested by the server into an expanded pathspec required to actually open the file. This is done by expanding each component of the pathspec in turn. Expanding the component is done by opening each leading directory in turn and checking if it is a directory of a file. If its a file, we examine the file headers to determine the next appropriate driver to use, and create a nested pathspec. Note that for some clients there might be a virtual root specified. This is a directory that gets prepended to all pathspecs of a given pathtype. For example if there is a virtual root defined as ["os:/virtualroot"], a path specification like pathtype: OS path: "/home/user/*" will get translated into pathtype: OS path: "/virtualroot" is_virtualroot: True nested_path { pathtype: OS path: "/dev/sda1" } Args: pathspec: A Path() protobuf to normalize. progress_callback: A callback to indicate that the open call is still working but needs more time. Returns: The open filelike object. This will contain the expanded Path() protobuf as the member fd.pathspec. Raises: IOError: if one of the path components can not be opened. """ fd = None # Adjust the pathspec in case we are using a vfs_virtualroot. vroot = VFS_VIRTUALROOTS.get(pathspec.pathtype) # If we have a virtual root for this vfs handler, we need to prepend # it to the incoming pathspec except if the pathspec is explicitly # marked as containing a virtual root already or if it isn't marked but # the path already contains the virtual root. if (not vroot or pathspec.is_virtualroot or pathspec.CollapsePath().startswith(vroot.CollapsePath())): # No virtual root but opening changes the pathspec so we always work on a # copy. working_pathspec = pathspec.Copy() else: # We're in a virtual root, put the target pathspec inside the virtual root # as a nested path. working_pathspec = vroot.Copy() working_pathspec.last.nested_path = pathspec.Copy() # For each pathspec step, we get the handler for it and instantiate it with # the old object, and the current step. while working_pathspec: component = working_pathspec.Pop() try: handler = VFS_HANDLERS[component.pathtype] except KeyError: raise IOError( "VFS handler %d not supported." % component.pathtype) try: # Open the component. fd = handler.Open(fd, component, pathspec=working_pathspec, full_pathspec=pathspec, progress_callback=progress_callback) except __HOLE__ as e: raise IOError("%s: %s" % (e, pathspec)) return fd
IOError
dataset/ETHPy150Open google/grr/grr/client/vfs.py/VFSOpen
2,621
def subclass_iterator(cls, _seen=None): """ Generator over all subclasses of a given class, in depth first order. Source: http://code.activestate.com/recipes/576949-find-all-subclasses-of-a-given-class/ """ if not isinstance(cls, type): raise TypeError('_subclass_iterator must be called with ' 'new-style classes, not %.100r' % cls) _seen = _seen or set() try: subs = cls.__subclasses__() except __HOLE__: # fails only when cls is type subs = cls.__subclasses__(cls) for sub in subs: if sub not in _seen: _seen.add(sub) yield sub for sub in subclass_iterator(sub, _seen): yield sub
TypeError
dataset/ETHPy150Open Stiivi/brewery/brewery/utils.py/subclass_iterator
2,622
def __init__(self, file_name='-', read_values=False, debug=False): self._debug = debug self._read_values = read_values if file_name == '-': fh = sys.stdin else: try: fh = open(file_name) except __HOLE__ as ex: raise ThriftFile.Error('Could not open %s: %s' % (file_name, ex)) if HAS_MMAP and file_name != '-': self._data = mmap.mmap(fh.fileno(), 0, access=mmap.ACCESS_READ) self._view = None else: # this might hurt... self._data = fh.read() self._view = memoryview(self._data)
IOError
dataset/ETHPy150Open pinterest/thrift-tools/thrift_tools/thrift_file.py/ThriftFile.__init__
2,623
def upgrade(): op.create_table('file', sa.Column('key', sa.Integer(), nullable=False), sa.Column('revision_key', sa.Integer(), nullable=False, index=True), sa.Column('path', sa.Text(), nullable=False, index=True), sa.Column('old_path', sa.Text(), index=True), sa.Column('status', sa.String(length=1)), sa.Column('inserted', sa.Integer()), sa.Column('deleted', sa.Integer()), sa.PrimaryKeyConstraint('key') ) pathre = re.compile('((.*?)\{|^)(.*?) => (.*?)(\}(.*)|$)') insert = sa.text('insert into file (key, revision_key, path, old_path, status, inserted, deleted) ' ' values (NULL, :revision_key, :path, :old_path, :status, :inserted, :deleted)') conn = op.get_bind() countres = conn.execute('select count(*) from revision') revisions = countres.fetchone()[0] if revisions > 50: print('') print('Adding support for searching for changes by file modified. ' 'This may take a while.') qres = conn.execute('select p.name, c.number, c.status, r.key, r.number, r."commit", r.parent from project p, change c, revision r ' 'where r.change_key=c.key and c.project_key=p.key order by p.name') count = 0 for (pname, cnumber, cstatus, rkey, rnumber, commit, parent) in qres.fetchall(): count += 1 sys.stdout.write('Diffstat revision %s / %s\r' % (count, revisions)) sys.stdout.flush() ires = conn.execute(insert, revision_key=rkey, path='/COMMIT_MSG', old_path=None, status=None, inserted=None, deleted=None) repo = gertty.gitrepo.get_repo(pname, context.config.gertty_app.config) try: stats = repo.diffstat(parent, commit) except git.exc.GitCommandError: # Probably a missing commit if cstatus not in ['MERGED', 'ABANDONED']: print("Unable to examine diff for %s %s change %s,%s" % (cstatus, pname, cnumber, rnumber)) continue for stat in stats: try: (added, removed, path) = stat except __HOLE__: if cstatus not in ['MERGED', 'ABANDONED']: print("Empty diffstat for %s %s change %s,%s" % (cstatus, pname, cnumber, rnumber)) m = pathre.match(path) status = gertty.db.File.STATUS_MODIFIED old_path = None if m: status = gertty.db.File.STATUS_RENAMED pre = m.group(2) or '' post = m.group(6) or '' old_path = pre+m.group(3)+post path = pre+m.group(4)+post try: added = int(added) except ValueError: added = None try: removed = int(removed) except ValueError: removed = None conn.execute(insert, revision_key=rkey, path=path, old_path=old_path, status=status, inserted=added, deleted=removed) print('')
ValueError
dataset/ETHPy150Open openstack/gertty/gertty/alembic/versions/50344aecd1c2_add_files_table.py/upgrade
2,624
def _from_native_list(self, condition_list): ''' These arrive in one of three formats: - ["content-length-range", 1048579, 10485760] - ["content-length-range", 1024] - ["starts-with", "$key", "user/eric/"] Returns an object with these attributes set: - operator: 'eq', 'starts-with', or None - element_name: 'content-length-range', 'key', etc. - value: "user/eric/", 1024, or None - value_range: [1048579, 10485760] or None ''' from numbers import Number from drf_to_s3.models import PolicyCondition original_condition_list = condition_list # We use this for error reporting condition_list = list(condition_list) for item in condition_list: if not isinstance(item, basestring) and not isinstance(item, Number): raise ValidationError( _('Values in condition arrays should be numbers or strings'), ) try: if condition_list[0] in ['eq', 'starts-with']: operator = condition_list.pop(0) else: operator = None except IndexError: raise ValidationError( _('Empty condition array: %(condition)s'), params={'condition': original_condition_list}, ) try: element_name = condition_list.pop(0) except __HOLE__: raise ValidationError( _('Missing element in condition array: %(condition)s'), params={'condition': original_condition_list}, ) if operator: if element_name.startswith('$'): element_name = element_name[1:] else: raise ValidationError( _('Element name in condition array should start with $: %(element_name)s'), params={'element_name': element_name}, ) if len(condition_list) == 0: raise ValidationError( _('Missing values in condition array: %(condition)s'), params={'condition': original_condition_list}, ) elif len(condition_list) == 1: value = condition_list.pop(0) value_range = None elif len(condition_list) == 2: value = None value_range = condition_list else: raise ValidationError( _('Too many values in condition array: %(condition)s'), params={'condition': original_condition_list}, ) return PolicyCondition( operator=operator, element_name=element_name, value=value, value_range=value_range )
IndexError
dataset/ETHPy150Open bodylabs/drf-to-s3/drf_to_s3/naive_serializers.py/NaivePolicyConditionField._from_native_list
2,625
def validate(self, attrs): ''' 1. Disallow multiple conditions with the same element name 2. Use introspection to validate individual conditions which are present. ''' from .util import duplicates_in conditions = attrs.get('conditions', []) errors = {} all_names = [item.element_name for item in conditions] for name in duplicates_in(all_names): message = _('Duplicate element name') errors['conditions.' + name] = [message] for item in conditions: # FIXME this needs to sanitize the arguments a bit more # validate_condition_Content-Type -> validate_condition_Content_Type sanitized_element_name = item.element_name.replace('-', '_') condition_validate = getattr(self, "validate_condition_%s" % sanitized_element_name, None) if condition_validate: try: condition_validate(item) except __HOLE__ as err: field_name = 'conditions.' + item.element_name errors[field_name] = errors.get(field_name, []) + list(err.messages) if len(errors): raise ValidationError(errors) else: return attrs
ValidationError
dataset/ETHPy150Open bodylabs/drf-to-s3/drf_to_s3/naive_serializers.py/NaivePolicySerializer.validate
2,626
@webapi_check_local_site @webapi_login_required @webapi_request_fields( required=dict({ 'screenshot_id': { 'type': int, 'description': 'The ID of the screenshot being commented on.', }, 'x': { 'type': int, 'description': 'The X location for the comment.', }, 'y': { 'type': int, 'description': 'The Y location for the comment.', }, 'w': { 'type': int, 'description': 'The width of the comment region.', }, 'h': { 'type': int, 'description': 'The height of the comment region.', }, }, **BaseScreenshotCommentResource.REQUIRED_CREATE_FIELDS), optional=BaseScreenshotCommentResource.OPTIONAL_CREATE_FIELDS, allow_unknown=True, ) def create(self, request, screenshot_id, *args, **kwargs): """Creates a screenshot comment on a review. This will create a new comment on a screenshot as part of a review. The comment contains text and dimensions for the area being commented on. """ try: review_request = \ resources.review_request.get_object(request, *args, **kwargs) review = resources.review.get_object(request, *args, **kwargs) except ObjectDoesNotExist: return DOES_NOT_EXIST if not resources.review.has_modify_permissions(request, review): return self.get_no_access_error(request) try: screenshot = Screenshot.objects.get(pk=screenshot_id, review_request=review_request) except __HOLE__: return INVALID_FORM_DATA, { 'fields': { 'screenshot_id': ['This is not a valid screenshot ID'], } } new_comment = self.create_comment( review=review, screenshot=screenshot, fields=('screenshot', 'x', 'y', 'w', 'h'), **kwargs) review.screenshot_comments.add(new_comment) return 201, { self.item_result_key: new_comment, }
ObjectDoesNotExist
dataset/ETHPy150Open reviewboard/reviewboard/reviewboard/webapi/resources/review_screenshot_comment.py/ReviewScreenshotCommentResource.create
2,627
@webapi_check_local_site @webapi_login_required @webapi_response_errors(DOES_NOT_EXIST, NOT_LOGGED_IN, PERMISSION_DENIED) @webapi_request_fields( optional=dict({ 'x': { 'type': int, 'description': 'The X location for the comment.', }, 'y': { 'type': int, 'description': 'The Y location for the comment.', }, 'w': { 'type': int, 'description': 'The width of the comment region.', }, 'h': { 'type': int, 'description': 'The height of the comment region.', }, }, **BaseScreenshotCommentResource.OPTIONAL_UPDATE_FIELDS), allow_unknown=True ) def update(self, request, *args, **kwargs): """Updates a screenshot comment. This can update the text or region of an existing comment. It can only be done for comments that are part of a draft review. """ try: resources.review_request.get_object(request, *args, **kwargs) review = resources.review.get_object(request, *args, **kwargs) screenshot_comment = self.get_object(request, *args, **kwargs) except __HOLE__: return DOES_NOT_EXIST # Determine whether or not we're updating the issue status. if self.should_update_issue_status(screenshot_comment, **kwargs): return self.update_issue_status(request, self, *args, **kwargs) if not resources.review.has_modify_permissions(request, review): return self.get_no_access_error(request) self.update_comment(screenshot_comment, ('x', 'y', 'w', 'h'), **kwargs) return 200, { self.item_result_key: screenshot_comment, }
ObjectDoesNotExist
dataset/ETHPy150Open reviewboard/reviewboard/reviewboard/webapi/resources/review_screenshot_comment.py/ReviewScreenshotCommentResource.update
2,628
def is_matching(self, name): """Match given name with the step name.""" try: return bool(self.parser.parse(name)) except __HOLE__: return False
ValueError
dataset/ETHPy150Open pytest-dev/pytest-bdd/pytest_bdd/parsers.py/parse.is_matching
2,629
def __write_read(self, write_fn, read_fn, ep, length = 8): intf = self.backend.get_interface_descriptor(self.dev, 0, 0, 0).bInterfaceNumber for data in (utils.get_array_data1(length), utils.get_array_data2(length)): length = len(data) * data.itemsize try: ret = write_fn(self.handle, ep, intf, data, 1000) except __HOLE__: return self.assertEqual(ret, length, 'Failed to write data: ' + \ str(data) + \ ', in EP = ' + \ str(ep)) buff = usb.util.create_buffer(length) try: ret = read_fn(self.handle, ep | usb.util.ENDPOINT_IN, intf, buff, 1000) except NotImplementedError: return self.assertEqual(ret, length, str(ret) + ' != ' + str(length)) self.assertEqual(buff, data, 'Failed to read data: ' + \ str(data) + \ ', in EP = ' + \ str(ep)) if utils.is_windows(): time.sleep(0.5)
NotImplementedError
dataset/ETHPy150Open walac/pyusb/tests/test_backend.py/BackendTest.__write_read
2,630
def run(self, batch): """ Execute a collection of jobs and return all results. :param batch: A :class:`.Batch` of jobs. :rtype: :class:`list` """ response = self.post(batch) try: results = [] for result_data in response.content: result = JobResult.hydrate(result_data, batch) log.info("< %s", result) results.append(result) return results except __HOLE__: # Here, we're looking to gracefully handle a Neo4j server bug # whereby a response is received with no content and # 'Content-Type: application/json'. Given that correct JSON # technically needs to contain {} at minimum, the JSON # parser fails with a ValueError. if response.content_length == 0: from sys import exc_info from traceback import extract_tb type_, value, traceback = exc_info() for filename, line_number, function_name, text in extract_tb(traceback): if "json" in filename and "decode" in function_name: return [] raise finally: response.close()
ValueError
dataset/ETHPy150Open nigelsmall/py2neo/py2neo/ext/batman/batch.py/BatchRunner.run
2,631
def make_zipfile (base_name, base_dir, verbose=0, dry_run=0): """Create a zip file from all the files under 'base_dir'. The output zip file will be named 'base_dir' + ".zip". Uses either the "zipfile" Python module (if available) or the InfoZIP "zip" utility (if installed and found on the default search path). If neither tool is available, raises DistutilsExecError. Returns the name of the output zip file. """ try: import zipfile except __HOLE__: zipfile = None zip_filename = base_name + ".zip" mkpath(os.path.dirname(zip_filename), dry_run=dry_run) # If zipfile module is not available, try spawning an external # 'zip' command. if zipfile is None: if verbose: zipoptions = "-r" else: zipoptions = "-rq" try: spawn(["zip", zipoptions, zip_filename, base_dir], dry_run=dry_run) except DistutilsExecError: # XXX really should distinguish between "couldn't find # external 'zip' command" and "zip failed". raise DistutilsExecError, \ ("unable to create zip file '%s': " "could neither import the 'zipfile' module nor " "find a standalone zip utility") % zip_filename else: log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir) def visit (z, dirname, names): for name in names: path = os.path.normpath(os.path.join(dirname, name)) if os.path.isfile(path): z.write(path, path) log.info("adding '%s'" % path) if not dry_run: z = zipfile.ZipFile(zip_filename, "w", compression=zipfile.ZIP_DEFLATED) os.path.walk(base_dir, visit, z) z.close() return zip_filename # make_zipfile ()
ImportError
dataset/ETHPy150Open babble/babble/include/jython/Lib/distutils/archive_util.py/make_zipfile
2,632
def make_archive (base_name, format, root_dir=None, base_dir=None, verbose=0, dry_run=0): """Create an archive file (eg. zip or tar). 'base_name' is the name of the file to create, minus any format-specific extension; 'format' is the archive format: one of "zip", "tar", "ztar", or "gztar". 'root_dir' is a directory that will be the root directory of the archive; ie. we typically chdir into 'root_dir' before creating the archive. 'base_dir' is the directory where we start archiving from; ie. 'base_dir' will be the common prefix of all files and directories in the archive. 'root_dir' and 'base_dir' both default to the current directory. Returns the name of the archive file. """ save_cwd = os.getcwd() if root_dir is not None: log.debug("changing into '%s'", root_dir) base_name = os.path.abspath(base_name) if not dry_run: os.chdir(root_dir) if base_dir is None: base_dir = os.curdir kwargs = { 'dry_run': dry_run } try: format_info = ARCHIVE_FORMATS[format] except __HOLE__: raise ValueError, "unknown archive format '%s'" % format func = format_info[0] for (arg,val) in format_info[1]: kwargs[arg] = val filename = apply(func, (base_name, base_dir), kwargs) if root_dir is not None: log.debug("changing back to '%s'", save_cwd) os.chdir(save_cwd) return filename # make_archive ()
KeyError
dataset/ETHPy150Open babble/babble/include/jython/Lib/distutils/archive_util.py/make_archive
2,633
def reset(self): codecs.StreamReader.reset(self) try: del self.decode except __HOLE__: pass
AttributeError
dataset/ETHPy150Open deanhiller/databus/webapp/play1.3.x/python/Lib/encodings/utf_32.py/StreamReader.reset
2,634
def _build_contest_kwargs(self, race): office = race.attrib['RaceTitle'].split(', ')[0].strip() try: district = race.attrib['District'] except __HOLE__: if 'District' in race.attrib['RaceTitle']: district = race.attrib['RaceTitle'].split(', ')[1].split('District ')[1].split()[0] else: district = None return { 'office': office, 'district': district, 'total_precincts': race.attrib['TotalPrecincts'], 'precincts_reported': race.attrib['PrecinctsReported'] }
KeyError
dataset/ETHPy150Open openelections/openelections-core/openelex/us/nv/load.py/NVXmlLoader._build_contest_kwargs
2,635
def pop(self, nbytes): """ pops packets with _at least_ nbytes of payload """ size = 0 popped = [] with self._lock_packets: while size < nbytes: try: packet = self._packets.pop(0) size += len(packet.data.data) self._remaining -= len(packet.data.data) popped.append(packet) except __HOLE__: break return popped
IndexError
dataset/ETHPy150Open pinterest/thrift-tools/thrift_tools/sniffer.py/Stream.pop
2,636
def _handle_packet(self, packet): try: ip_p = get_ip_packet(packet.load, 0, self._port) except __HOLE__: return ip_data = getattr(ip_p, 'data', None) if ip_data is None: return if ip_data.sport != self._port and ip_data.dport != self._port: return if self._ip: src_ip = get_ip(ip_p, ip_p.src) dst_ip = get_ip(ip_p, ip_p.dst) if src_ip not in self._ip and dst_ip not in self._ip: return self._queue.append((packet.time, ip_p))
ValueError
dataset/ETHPy150Open pinterest/thrift-tools/thrift_tools/sniffer.py/Sniffer._handle_packet
2,637
def __getattr__(self, attr): try: return self[self.attrs.index(attr)] except __HOLE__: raise AttributeError
ValueError
dataset/ETHPy150Open babble/babble/include/jython/Lib/pwd.py/struct_passwd.__getattr__
2,638
def get_context(self, page, range_gap=5): try: page = int(page) except (ValueError, __HOLE__), exc: raise InvalidPage, exc try: paginator = self.page(page) except EmptyPage: return { 'EMPTY_PAGE': True, } if page > 5: start = page-range_gap else: start = 1 if page < self.num_pages-range_gap: end = page+range_gap+1 else: end = self.num_pages+1 context = { 'page_range': range(start, end), 'objects': paginator.object_list, 'num_pages': self.num_pages, 'page': page, 'has_pages': self.num_pages > 1, 'has_previous': paginator.has_previous(), 'has_next': paginator.has_next(), 'previous_page': paginator.previous_page_number() if paginator.has_previous() else None, 'next_page': paginator.next_page_number() if paginator.has_next() else None, 'is_first': page == 1, 'is_last': page == self.num_pages, } return context
TypeError
dataset/ETHPy150Open dcramer/django-paging/paging/paginators.py/BetterPaginator.get_context
2,639
def page(self, number): "Returns a Page object for the given 1-based page number." try: number = int(number) except ValueError: raise PageNotAnInteger('That page number is not an integer') bottom = (number - 1) * self.per_page top = bottom + self.per_page + 5 try: _page = EndlessPage(list(self.object_list[bottom:top]), number, self) except __HOLE__: top = top - 5 _page = EndlessPage(list(self.object_list[bottom:top]), number, self) if not _page.object_list: if number == 1 and self.allow_empty_first_page: pass else: raise EmptyPage('That page contains no results') return _page
AssertionError
dataset/ETHPy150Open dcramer/django-paging/paging/paginators.py/EndlessPaginator.page
2,640
@task @runs_once # do this once, locally def compile_redis(): "Compile redis locally" tempdir = tempfile.mkdtemp() try: os.remove('redis-server') except __HOLE__: pass try: os.remove('redis-cli') except OSError: pass with lcd(tempdir): local('wget https://github.com/antirez/redis/archive/2.6.16.tar.gz ' '-O -| tar xz --strip 1') local('make') #local('make test') # takes a long time shutil.move(os.path.join(tempdir, 'src/redis-server'), '.') shutil.move(os.path.join(tempdir, 'src/redis-cli'), '.') shutil.rmtree(tempdir)
OSError
dataset/ETHPy150Open PaulMcMillan/toorcon_2013/configurator/fabfile/master.py/compile_redis
2,641
def __call__(self): dirname = os.path.dirname(self._filename.dest) try: if not os.path.isdir(dirname): try: os.makedirs(dirname) except __HOLE__: # It's possible that between the if check and the makedirs # check that another thread has come along and created the # directory. In this case the directory already exists and we # can move on. pass # Always create the file. Even if it exists, we need to # wipe out the existing contents. with open(self._filename.dest, 'wb'): pass except Exception as e: message = print_operation(self._filename, failed=True, dryrun=False) message += '\n' + str(e) result = {'message': message, 'error': True} self._result_queue.put(PrintTask(**result)) self._context.cancel() else: self._context.announce_file_created()
OSError
dataset/ETHPy150Open aws/aws-cli/awscli/customizations/s3/tasks.py/CreateLocalFileTask.__call__
2,642
def fetch(args): """ %prog fetch "query" OR %prog fetch queries.txt Please provide a UniProt compatible `query` to retrieve data. If `query` contains spaces, please remember to "quote" it. You can also specify a `filename` which contains queries, one per line. Follow this syntax <http://www.uniprot.org/help/text-search#text-search-syntax> to query any of the documented fields <http://www.uniprot.org/help/query-fields> """ import re import csv p = OptionParser(fetch.__doc__) p.add_option("--format", default="tab", choices=valid_formats, help="download format [default: %default]") p.add_option("--columns", default="entry name, protein names, genes,organism", help="columns to download, if --format is `tab` or `xls`." + " [default: %default]") p.add_option("--include", default=False, action="store_true", help="Include isoforms when --format is `fasta` or include `description` when" + " --format is `rdf`. [default: %default]"); p.add_option("--limit", default=10, type="int", help="Max number of results to retrieve [default: %default]") p.add_option("--offset", default=0, type="int", help="Offset of first result, used with --limit [default: %default]") p.add_option("--skipcheck", default=False, action="store_true", help="turn off prompt to check file existence [default: %default]") opts, args = p.parse_args(args) if len(args) != 1: sys.exit(not p.print_help()) query, = args url_params = {} if op.exists(query): pf = query.rsplit(".", 1)[0] list_of_queries = [row.strip() for row in open(query)] else: # the query is the search term pf = query.strip().strip('\"') list_of_queries = [pf] pf = re.sub(r"\s+", '_', pf) assert len(list_of_queries) > 0, \ "Please provide atleast one input query" url_params['format'] = opts.format if opts.columns and opts.format in valid_column_formats: reader = csv.reader([opts.columns], skipinitialspace=True) cols = [col for r in reader for col in r] for col in cols: assert col in valid_columns, \ "Column '{0}' is not a valid. Allowed options are {1}".\ format(col, valid_columns) url_params['columns'] = ",".join(cols) if opts.include and opts.format in valid_include_formats: url_params['include'] = "yes" url_params['limit'] = opts.limit url_params['offset'] = opts.offset outfile = "{0}.{1}".format(pf, opts.format) # If noprompt, will not check file existence fw = must_open(outfile, "w", checkexists=True, \ skipcheck=opts.skipcheck) if fw is None: return seen = set() for query in list_of_queries: if query in seen: logging.error("Duplicate query ({0}) found".format(query)) continue url_params['query'] = query data = urllib.urlencode(url_params) try: request = urllib2.Request(uniprot_url, data) response = urllib2.urlopen(request) except (urllib2.HTTPError, urllib2.URLError, RuntimeError, __HOLE__) as e: logging.error(e) logging.debug("wait 5 seconds to reconnect...") time.sleep(5) page = response.read() if not page: logging.error("query `{0}` yielded no results".format(query)) continue print >> fw, page seen.add(query) if seen: print >> sys.stderr, "A total of {0} out of {1} queries returned results.".\ format(len(seen), len(list_of_queries))
KeyError
dataset/ETHPy150Open tanghaibao/jcvi/apps/uniprot.py/fetch
2,643
def _generate_dispatch(cls): """Return an optimized visit dispatch function for the cls for use by the compiler. """ if '__visit_name__' in cls.__dict__: visit_name = cls.__visit_name__ if isinstance(visit_name, str): # There is an optimization opportunity here because the # the string name of the class's __visit_name__ is known at # this early stage (import time) so it can be pre-constructed. getter = operator.attrgetter("visit_%s" % visit_name) def _compiler_dispatch(self, visitor, **kw): try: meth = getter(visitor) except __HOLE__: raise exc.UnsupportedCompilationError(visitor, cls) else: return meth(self, **kw) else: # The optimization opportunity is lost for this case because the # __visit_name__ is not yet a string. As a result, the visit # string has to be recalculated with each compilation. def _compiler_dispatch(self, visitor, **kw): visit_attr = 'visit_%s' % self.__visit_name__ try: meth = getattr(visitor, visit_attr) except AttributeError: raise exc.UnsupportedCompilationError(visitor, cls) else: return meth(self, **kw) _compiler_dispatch.__doc__ = \ """Look for an attribute named "visit_" + self.__visit_name__ on the visitor, and call it with the same kw params. """ cls._compiler_dispatch = _compiler_dispatch
AttributeError
dataset/ETHPy150Open RoseOu/flasky/venv/lib/python2.7/site-packages/sqlalchemy/sql/visitors.py/_generate_dispatch
2,644
def get_filter_bbox(self, request): bbox_string = request.query_params.get(self.bbox_param, None) if not bbox_string: return None try: p1x, p1y, p2x, p2y = (float(n) for n in bbox_string.split(',')) except __HOLE__: raise ParseError('Invalid bbox string supplied for parameter {0}'.format(self.bbox_param)) x = Polygon.from_bbox((p1x, p1y, p2x, p2y)) return x
ValueError
dataset/ETHPy150Open djangonauts/django-rest-framework-gis/rest_framework_gis/filters.py/InBBoxFilter.get_filter_bbox
2,645
def get_filter_bbox(self, request): tile_string = request.query_params.get(self.tile_param, None) if not tile_string: return None try: z, x, y = (int(n) for n in tile_string.split('/')) except __HOLE__: raise ParseError('Invalid tile string supplied for parameter {0}'.format(self.tile_param)) bbox = Polygon.from_bbox(tile_edges(x, y, z)) return bbox
ValueError
dataset/ETHPy150Open djangonauts/django-rest-framework-gis/rest_framework_gis/filters.py/TMSTileFilter.get_filter_bbox
2,646
def get_filter_point(self, request): point_string = request.query_params.get(self.point_param, None) if not point_string: return None try: (x, y) = (float(n) for n in point_string.split(',')) except __HOLE__: raise ParseError('Invalid geometry string supplied for parameter {0}'.format(self.point_param)) p = Point(x, y) return p
ValueError
dataset/ETHPy150Open djangonauts/django-rest-framework-gis/rest_framework_gis/filters.py/DistanceToPointFilter.get_filter_point
2,647
def filter_queryset(self, request, queryset, view): filter_field = getattr(view, 'distance_filter_field', None) convert_distance_input = getattr(view, 'distance_filter_convert_meters', False) geoDjango_filter = 'dwithin' # use dwithin for points if not filter_field: return queryset point = self.get_filter_point(request) if not point: return queryset # distance in meters dist_string = request.query_params.get(self.dist_param, 1000) try: dist = float(dist_string) except __HOLE__: raise ParseError('Invalid distance string supplied for parameter {0}'.format(self.dist_param)) if (convert_distance_input): # Warning: assumes that the point is (lon,lat) dist = self.dist_to_deg(dist, point[1]) return queryset.filter(Q(**{'%s__%s' % (filter_field, geoDjango_filter): (point, dist)}))
ValueError
dataset/ETHPy150Open djangonauts/django-rest-framework-gis/rest_framework_gis/filters.py/DistanceToPointFilter.filter_queryset
2,648
def __new__(cls, *args): # pylint: disable=W0613, E1002 ''' We override `__new__` in our logging logger class in order to provide some additional features like expand the module name padding if length is being used, and also some Unicode fixes. This code overhead will only be executed when the class is instantiated, i.e.: logging.getLogger(__name__) ''' instance = super(SaltLoggingClass, cls).__new__(cls) try: max_logger_length = len(max( list(logging.Logger.manager.loggerDict.keys()), key=len )) for handler in logging.root.handlers: if handler in (LOGGING_NULL_HANDLER, LOGGING_STORE_HANDLER, LOGGING_TEMP_HANDLER): continue formatter = handler.formatter if not formatter: continue if not handler.lock: handler.createLock() handler.acquire() fmt = formatter._fmt.replace('%', '%%') match = MODNAME_PATTERN.search(fmt) if not match: # Not matched. Release handler and return. handler.release() return instance if 'digits' not in match.groupdict(): # No digits group. Release handler and return. handler.release() return instance digits = match.group('digits') if not digits or not (digits and digits.isdigit()): # No valid digits. Release handler and return. handler.release() return instance if int(digits) < max_logger_length: # Formatter digits value is lower than current max, update. fmt = fmt.replace(match.group('name'), '%%(name)-%ds') formatter = logging.Formatter( fmt % max_logger_length, datefmt=formatter.datefmt ) handler.setFormatter(formatter) handler.release() except __HOLE__: # There are no registered loggers yet pass return instance
ValueError
dataset/ETHPy150Open saltstack/salt/salt/log/setup.py/SaltLoggingClass.__new__
2,649
def makeRecord(self, name, level, fn, lno, msg, args, exc_info, func=None, extra=None, sinfo=None): # Let's remove exc_info_on_loglevel from extra exc_info_on_loglevel = extra.pop('exc_info_on_loglevel') if not extra: # If nothing else is in extra, make it None extra = None # Let's try to make every logging message unicode if isinstance(msg, six.string_types) \ and not isinstance(msg, six.text_type): salt_system_encoding = __salt_system_encoding__ if salt_system_encoding == 'ascii': # Encoding detection most likely failed, let's use the utf-8 # value which we defaulted before __salt_system_encoding__ was # implemented salt_system_encoding = 'utf-8' try: _msg = msg.decode(salt_system_encoding, 'replace') except __HOLE__: _msg = msg.decode(salt_system_encoding, 'ignore') else: _msg = msg if six.PY3: logrecord = _LOG_RECORD_FACTORY(name, level, fn, lno, _msg, args, exc_info, func, sinfo) else: logrecord = _LOG_RECORD_FACTORY(name, level, fn, lno, _msg, args, exc_info, func) if extra is not None: for key in extra: if (key in ['message', 'asctime']) or (key in logrecord.__dict__): raise KeyError( 'Attempt to overwrite \'{0}\' in LogRecord'.format(key) ) logrecord.__dict__[key] = extra[key] if exc_info_on_loglevel is not None: # Let's add some custom attributes to the LogRecord class in order # to include the exc_info on a per handler basis. This will allow # showing tracebacks on logfiles but not on console if the logfile # handler is enabled for the log level "exc_info_on_loglevel" and # console handler is not. logrecord.exc_info_on_loglevel_instance = sys.exc_info() logrecord.exc_info_on_loglevel_formatted = None logrecord.exc_info_on_loglevel = exc_info_on_loglevel return logrecord # pylint: enable=C0103 # Override the python's logging logger class as soon as this module is imported
UnicodeDecodeError
dataset/ETHPy150Open saltstack/salt/salt/log/setup.py/SaltLoggingClass.makeRecord
2,650
def setup_logfile_logger(log_path, log_level='error', log_format=None, date_format=None): ''' Setup the logfile logger Since version 0.10.6 we support logging to syslog, some examples: tcp://localhost:514/LOG_USER tcp://localhost/LOG_DAEMON udp://localhost:5145/LOG_KERN udp://localhost file:///dev/log file:///dev/log/LOG_SYSLOG file:///dev/log/LOG_DAEMON The above examples are self explanatory, but: <file|udp|tcp>://<host|socketpath>:<port-if-required>/<log-facility> If you're thinking on doing remote logging you might also be thinking that you could point salt's logging to the remote syslog. **Please Don't!** An issue has been reported when doing this over TCP when the logged lines get concatenated. See #3061. The preferred way to do remote logging is setup a local syslog, point salt's logging to the local syslog(unix socket is much faster) and then have the local syslog forward the log messages to the remote syslog. ''' if is_logfile_configured(): logging.getLogger(__name__).warning('Logfile logging already configured') return if log_path is None: logging.getLogger(__name__).warning( 'log_path setting is set to `None`. Nothing else to do' ) return # Remove the temporary logging handler __remove_temp_logging_handler() if log_level is None: log_level = 'warning' level = LOG_LEVELS.get(log_level.lower(), logging.ERROR) parsed_log_path = urlparse(log_path) root_logger = logging.getLogger() if parsed_log_path.scheme in ('tcp', 'udp', 'file'): syslog_opts = { 'facility': SysLogHandler.LOG_USER, 'socktype': socket.SOCK_DGRAM } if parsed_log_path.scheme == 'file' and parsed_log_path.path: facility_name = parsed_log_path.path.split(os.sep)[-1].upper() if not facility_name.startswith('LOG_'): # The user is not specifying a syslog facility facility_name = 'LOG_USER' # Syslog default syslog_opts['address'] = parsed_log_path.path else: # The user has set a syslog facility, let's update the path to # the logging socket syslog_opts['address'] = os.sep.join( parsed_log_path.path.split(os.sep)[:-1] ) elif parsed_log_path.path: # In case of udp or tcp with a facility specified facility_name = parsed_log_path.path.lstrip(os.sep).upper() if not facility_name.startswith('LOG_'): # Logging facilities start with LOG_ if this is not the case # fail right now! raise RuntimeError( 'The syslog facility \'{0}\' is not known'.format( facility_name ) ) else: # This is the case of udp or tcp without a facility specified facility_name = 'LOG_USER' # Syslog default facility = getattr( SysLogHandler, facility_name, None ) if facility is None: # This python syslog version does not know about the user provided # facility name raise RuntimeError( 'The syslog facility \'{0}\' is not known'.format( facility_name ) ) syslog_opts['facility'] = facility if parsed_log_path.scheme == 'tcp': # tcp syslog support was only added on python versions >= 2.7 if sys.version_info < (2, 7): raise RuntimeError( 'Python versions lower than 2.7 do not support logging ' 'to syslog using tcp sockets' ) syslog_opts['socktype'] = socket.SOCK_STREAM if parsed_log_path.scheme in ('tcp', 'udp'): syslog_opts['address'] = ( parsed_log_path.hostname, parsed_log_path.port or logging.handlers.SYSLOG_UDP_PORT ) if sys.version_info < (2, 7) or parsed_log_path.scheme == 'file': # There's not socktype support on python versions lower than 2.7 syslog_opts.pop('socktype', None) try: # Et voilá! Finally our syslog handler instance handler = SysLogHandler(**syslog_opts) except socket.error as err: logging.getLogger(__name__).error( 'Failed to setup the Syslog logging handler: {0}'.format( err ) ) shutdown_multiprocessing_logging_listener() sys.exit(2) else: try: # Logfile logging is UTF-8 on purpose. # Since salt uses YAML and YAML uses either UTF-8 or UTF-16, if a # user is not using plain ASCII, their system should be ready to # handle UTF-8. handler = WatchedFileHandler(log_path, mode='a', encoding='utf-8', delay=0) except (__HOLE__, OSError): logging.getLogger(__name__).warning( 'Failed to open log file, do you have permission to write to ' '{0}?'.format(log_path) ) # Do not proceed with any more configuration since it will fail, we # have the console logging already setup and the user should see # the error. return handler.setLevel(level) # Set the default console formatter config if not log_format: log_format = '%(asctime)s [%(name)-15s][%(levelname)-8s] %(message)s' if not date_format: date_format = '%Y-%m-%d %H:%M:%S' formatter = logging.Formatter(log_format, datefmt=date_format) handler.setFormatter(formatter) root_logger.addHandler(handler) global __LOGFILE_CONFIGURED __LOGFILE_CONFIGURED = True
IOError
dataset/ETHPy150Open saltstack/salt/salt/log/setup.py/setup_logfile_logger
2,651
def shutdown_multiprocessing_logging_listener(daemonizing=False): global __MP_LOGGING_QUEUE global __MP_LOGGING_QUEUE_PROCESS global __MP_LOGGING_LISTENER_CONFIGURED if daemonizing is False and __MP_IN_MAINPROCESS is True: # We're in the MainProcess and we're not daemonizing, return! # No multiprocessing logging listener shutdown shall happen return if __MP_LOGGING_QUEUE_PROCESS is None: return if __MP_LOGGING_QUEUE_PROCESS.is_alive(): logging.getLogger(__name__).debug('Stopping the multiprocessing logging queue listener') try: # Sent None sentinel to stop the logging processing queue __MP_LOGGING_QUEUE.put(None) # Let's join the multiprocessing logging handle thread time.sleep(0.5) logging.getLogger(__name__).debug('closing multiprocessing queue') __MP_LOGGING_QUEUE.close() logging.getLogger(__name__).debug('joining multiprocessing queue thread') __MP_LOGGING_QUEUE.join_thread() __MP_LOGGING_QUEUE = None __MP_LOGGING_QUEUE_PROCESS.join(1) __MP_LOGGING_QUEUE = None except __HOLE__: # We were unable to deliver the sentinel to the queue # carry on... pass if __MP_LOGGING_QUEUE_PROCESS.is_alive(): # Process is still alive!? __MP_LOGGING_QUEUE_PROCESS.terminate() __MP_LOGGING_QUEUE_PROCESS = None __MP_LOGGING_LISTENER_CONFIGURED = False logging.getLogger(__name__).debug('Stopped the multiprocessing logging queue listener')
IOError
dataset/ETHPy150Open saltstack/salt/salt/log/setup.py/shutdown_multiprocessing_logging_listener
2,652
def __process_multiprocessing_logging_queue(opts, queue): import salt.utils salt.utils.appendproctitle('MultiprocessingLoggingQueue') if salt.utils.is_windows(): # On Windows, creating a new process doesn't fork (copy the parent # process image). Due to this, we need to setup extended logging # inside this process. setup_temp_logger() setup_extended_logging(opts) while True: try: record = queue.get() if record is None: # A sentinel to stop processing the queue break # Just log everything, filtering will happen on the main process # logging handlers logger = logging.getLogger(record.name) logger.handle(record) except (EOFError, __HOLE__, SystemExit): break except Exception as exc: # pylint: disable=broad-except logging.getLogger(__name__).warning( 'An exception occurred in the multiprocessing logging ' 'queue thread: {0}'.format(exc), exc_info_on_loglevel=logging.DEBUG )
KeyboardInterrupt
dataset/ETHPy150Open saltstack/salt/salt/log/setup.py/__process_multiprocessing_logging_queue
2,653
def _restoreConfig(self, cs, configRestoreList): # config files are cached, so we don't have to worry about not # restoring the same fileId/pathId twice for (pathId, newFileId, sha1, oldfile, newFileId, oldVersion, oldFileId, restoreContents) in configRestoreList: if cs.configFileIsDiff(pathId, newFileId): (contType, fileContents) = cs.getFileContents(pathId, newFileId) # the content for this file is in the form of a # diff, which we need to apply against the file in # the repository assert(oldVersion) try: f = self.repos.getFileContents( [(oldFileId, oldVersion, oldfile)])[0].get() except __HOLE__: raise errors.IntegrityError( "Missing file contents for pathId %s, fileId %s" % ( sha1helper.md5ToString(pathId), sha1helper.sha1ToString(oldFileId))) oldLines = f.readlines() f.close() del f diff = fileContents.get().readlines() (newLines, failedHunks) = patch.patch(oldLines, diff) fileContents = filecontents.FromString( "".join(newLines)) assert(not failedHunks) else: # config files are not always available compressed (due # to the config file cache) fileContents = filecontents.FromChangeSet(cs, pathId, newFileId) self.addFileContents(sha1, fileContents, restoreContents, 1)
KeyError
dataset/ETHPy150Open sassoftware/conary/conary/repository/repository.py/ChangeSetJob._restoreConfig
2,654
def _restoreNormal(self, cs, normalRestoreList, preRestored): ptrRestores = [] ptrRefsAdded = {} lastRestore = None # restore each pathId,fileId combo once while normalRestoreList: (pathId, fileId, sha1, restoreContents) = normalRestoreList.pop(0) if preRestored is not None and sha1 in preRestored: continue if (pathId, fileId) == lastRestore: continue lastRestore = (pathId, fileId) try: (contType, fileContents) = cs.getFileContents(pathId, fileId, compressed = True) except __HOLE__: raise errors.IntegrityError( "Missing file contents for pathId %s, fileId %s" % ( sha1helper.md5ToString(pathId), sha1helper.sha1ToString(fileId))) if contType == changeset.ChangedFileTypes.ptr: ptrRestores.append(sha1) target = util.decompressString(fileContents.get().read()) if util.tupleListBsearchInsert(normalRestoreList, (target[:16], target[16:], sha1, True), self.ptrCmp): # Item was inserted. This creates a reference in the # datastore; keep track of it to prevent a duplicate # reference count. ptrRefsAdded[sha1] = True continue assert(contType == changeset.ChangedFileTypes.file) self.addFileContents(sha1, fileContents, restoreContents, 0, precompressed = True) for sha1 in ptrRestores: # Increment the reference count for items which were ptr's # to a different file. if sha1 in ptrRefsAdded: del ptrRefsAdded[sha1] else: self.addFileContents(sha1, None, False, 0)
KeyError
dataset/ETHPy150Open sassoftware/conary/conary/repository/repository.py/ChangeSetJob._restoreNormal
2,655
def users_filter(doc, users): try: return doc['form']['meta']['userID'] in users except __HOLE__: return False
KeyError
dataset/ETHPy150Open dimagi/commcare-hq/corehq/apps/reports/util.py/users_filter
2,656
def numcell(text, value=None, convert='int', raw=None): if value is None: try: value = int(text) if convert == 'int' else float(text) if math.isnan(value): text = '---' elif not convert == 'int': # assume this is a percentage column text = '%.f%%' % value except __HOLE__: value = text return format_datatables_data(text=text, sort_key=value, raw=raw)
ValueError
dataset/ETHPy150Open dimagi/commcare-hq/corehq/apps/reports/util.py/numcell
2,657
def get_max_age(response): """ Returns the max-age from the response Cache-Control header as an integer (or ``None`` if it wasn't found or wasn't an integer. """ if not response.has_header('Cache-Control'): return cc = dict([_to_tuple(el) for el in cc_delim_re.split(response['Cache-Control'])]) if 'max-age' in cc: try: return int(cc['max-age']) except (ValueError, __HOLE__): pass
TypeError
dataset/ETHPy150Open splunk/splunk-webframework/contrib/django/django/utils/cache.py/get_max_age
2,658
def set_to_cache(self): """ Add widget object to Django's cache. You may need to overwrite this method, to pickle all information that is required to serve your JSON response view. """ try: cache.set(self._get_cache_key(), { 'widget': self, 'url': self.get_url(), }) except (PicklingError, cPicklingError, __HOLE__): msg = "You need to overwrite \"set_to_cache\" or ensure that %s is serialisable." raise NotImplementedError(msg % self.__class__.__name__)
AttributeError
dataset/ETHPy150Open applegrew/django-select2/django_select2/forms.py/HeavySelect2Mixin.set_to_cache
2,659
def render_options(self, *args): """Render only selected options.""" try: selected_choices, = args except __HOLE__: # Signature contained `choices` prior to Django 1.10 choices, selected_choices = args choices = chain(self.choices, choices) else: choices = self.choices output = ['<option></option>' if not self.is_required else ''] selected_choices = {force_text(v) for v in selected_choices} choices = {(k, v) for k, v in choices if force_text(k) in selected_choices} for option_value, option_label in choices: output.append(self.render_option(selected_choices, option_value, option_label)) return '\n'.join(output)
ValueError
dataset/ETHPy150Open applegrew/django-select2/django_select2/forms.py/HeavySelect2Mixin.render_options
2,660
def render_options(self, *args): """Render only selected options and set QuerySet from :class:`ModelChoicesIterator`.""" try: selected_choices, = args except __HOLE__: choices, selected_choices = args choices = chain(self.choices, choices) else: choices = self.choices selected_choices = {force_text(v) for v in selected_choices} output = ['<option></option>' if not self.is_required else ''] if isinstance(self.choices, ModelChoiceIterator): if not self.queryset: self.queryset = self.choices.queryset selected_choices = {c for c in selected_choices if c not in self.choices.field.empty_values} choices = {(obj.pk, self.label_from_instance(obj)) for obj in self.choices.queryset.filter(pk__in=selected_choices)} else: choices = {(k, v) for k, v in choices if force_text(k) in selected_choices} for option_value, option_label in choices: output.append(self.render_option(selected_choices, option_value, option_label)) return '\n'.join(output)
ValueError
dataset/ETHPy150Open applegrew/django-select2/django_select2/forms.py/ModelSelect2Mixin.render_options
2,661
def test_compare_results(self): """Test that `compare` tests results correctly.""" with mock.patch.object(self, 'run_comparator') as comparator: MSG = {'id': (), 'context': (), 'file': 'file.js', 'signing_severity': 'low'} EXPECTED = {'matched': MSG, 'ignored': True} FINAL = merge_dicts(MSG, EXPECTED) comparator.return_value = { 'messages': [FINAL], 'signing_summary': {'low': 0, 'medium': 0, 'high': 0, 'trivial': 0}, 'signing_ignored_summary': {'low': 1, 'medium': 0, 'high': 0, 'trivial': 0}} # Signing summary with ignored messages: self.compare(MSG, {}, EXPECTED) comparator.return_value['signing_summary']['low'] = 1 try: self.compare(MSG, {}, EXPECTED) except AssertionError: pass else: assert False, 'Bad signing summary passed.' comparator.return_value['signing_summary']['low'] = 0 comparator.return_value['signing_ignored_summary']['low'] = 0 try: self.compare(MSG, {}, EXPECTED) except AssertionError: pass else: assert False, 'Bad ignored signing summary passed.' # Signing summary without ignored messages: CHANGES = {'id': ('a', 'b', 'c')} FINAL = merge_dicts(MSG, CHANGES) comparator.return_value['messages'] = [FINAL] comparator.return_value['signing_summary']['low'] = 1 self.compare(MSG, CHANGES, {}) comparator.return_value['signing_summary']['low'] = 0 try: self.compare(MSG, CHANGES, {}) except __HOLE__: pass else: assert False, 'Bad signing summary passed.' comparator.return_value['signing_summary']['low'] = 1 comparator.return_value['signing_ignored_summary']['low'] = 1 try: self.compare(MSG, CHANGES, {}) except AssertionError: pass else: assert False, 'Bad ignored signing summary passed.'
AssertionError
dataset/ETHPy150Open mozilla/addons-server/src/olympia/devhub/tests/test_utils.py/TestValidationComparator.test_compare_results
2,662
def code(): # file monitor server observer = Observer() # py yml file monitor patterns = ['*.py', '*demo.yml'] # '*' is necessary, and must in the first. restart_processor = ServerStarter([ {'cmd': 'rm -rf %s/*.log' % os.path.join(workspace, 'log'), 'is_daemon': False}, {'cmd': './run.py run', 'network_port': (config['simple_server']['port'],)} ]) monitor = SourceCodeMonitor(restart_processor, patterns) observer.schedule(monitor, program_dir, recursive=True) observer.schedule(monitor, http_api.__path__[0], recursive=True) # # rebuild css and js's min file while source file is change # patterns = ['*.css', '*.js', '*static.yml'] # '*' is necessary, and must in the first. # monitor = SourceCodeMonitor(BuildCssJsProcessor(program_dir, static), patterns, None, 500) # observer.schedule(monitor, program_dir, recursive=True) # start monitoring observer.start() try: time.sleep(31536000) # one year except __HOLE__: observer.stop() # shell interface
KeyboardInterrupt
dataset/ETHPy150Open remyzane/flask-http-api/example/run.py/code
2,663
def __getattribute__(self, key): if key == "__dict__": return object.__getattribute__(self, key) try: # check the local cache first, overrides persistent storage return self.__dict__["_cache"].__getitem__(key) except __HOLE__: pass try: return super(RootContainer, self).__getattribute__(key) except AttributeError: node = self.__dict__["node"] session = self.__dict__["session"] try: item = config.get_item(session, node, key) if item.value is aid.NULL: return config.Container(session, item) else: return item.value except config.NoResultFound as err: raise AttributeError("RootContainer: No attribute or key '%s' found: %s" % (key, err))
KeyError
dataset/ETHPy150Open kdart/pycopia/QA/pycopia/QA/config.py/RootContainer.__getattribute__
2,664
def __delattr__(self, key): try: self.__dict__["_cache"].__delitem__(key) except __HOLE__: object.__delattr__(self, key)
KeyError
dataset/ETHPy150Open kdart/pycopia/QA/pycopia/QA/config.py/RootContainer.__delattr__
2,665
def __getitem__(self, key): try: return getattr(self._cache, key) except (AttributeError, __HOLE__, NameError): try: return self.get_userconfig().__getitem__(key) except KeyError: pass return super(RootContainer, self).__getitem__(key)
KeyError
dataset/ETHPy150Open kdart/pycopia/QA/pycopia/QA/config.py/RootContainer.__getitem__
2,666
def __delitem__(self, key): try: del self._cache[key] except __HOLE__: super(RootContainer, self).__delitem__(key)
KeyError
dataset/ETHPy150Open kdart/pycopia/QA/pycopia/QA/config.py/RootContainer.__delitem__
2,667
def get(self, key, default=None): try: rv = self.__getitem__(key) except __HOLE__: rv = default return rv
KeyError
dataset/ETHPy150Open kdart/pycopia/QA/pycopia/QA/config.py/RootContainer.get
2,668
def _build_userconfig(self): try: #username = self.__getitem__("username") username = self._cache["username"] except __HOLE__: username = os.environ["USER"] try: cont = self.get_container(username) except config.NoResultFound: self.add_container(username) cont = self.get_container(username) cont.register_user(username) return cont ##### end of RootContainer ###### # Runtime objects that bind sessions and database rows and provide helper # methods and properties. Attributes table is made available using the # mapping interface (getitem).
KeyError
dataset/ETHPy150Open kdart/pycopia/QA/pycopia/QA/config.py/RootContainer._build_userconfig
2,669
def _get_DUT(self): try: return self._eqcache["DUT"] except __HOLE__: pass eq = EquipmentRuntime( self._environment.get_DUT(self._session), "DUT", self.logfile, self._session) self._eqcache["DUT"] = eq return eq
KeyError
dataset/ETHPy150Open kdart/pycopia/QA/pycopia/QA/config.py/EnvironmentRuntime._get_DUT
2,670
def get_role(self, rolename): try: return self._eqcache[rolename] except __HOLE__: pass eq = self._environment.get_equipment_with_role(self._session, rolename) eq = EquipmentRuntime(eq, rolename, self.logfile, self._session) self._eqcache[rolename] = eq return eq
KeyError
dataset/ETHPy150Open kdart/pycopia/QA/pycopia/QA/config.py/EnvironmentRuntime.get_role
2,671
def get_comments_from_parent(doc): try: _comments = frappe.db.get_value(doc.reference_doctype, doc.reference_name, "_comments") or "[]" except Exception, e: if e.args[0] in (1146, 1054): # 1146 = no table # 1054 = missing column _comments = "[]" else: raise try: return json.loads(_comments) except __HOLE__: return []
ValueError
dataset/ETHPy150Open frappe/frappe/frappe/core/doctype/communication/comment.py/get_comments_from_parent
2,672
def _get_name(self): """ Dynamically defines new partition name depending on the partition subtype. """ try: return getattr(self, '_get_{0}_name'.format(self.subtype))() except __HOLE__: import re expression = '_get_(\w+)_name' raise PartitionRangeSubtypeError( model=self.model.__name__, dialect=self.dialect, current=self.subtype, allowed=[re.match(expression, c).group(1) for c in dir(self) if re.match(expression, c) is not None])
AttributeError
dataset/ETHPy150Open maxtepkeev/architect/architect/databases/mysql/partition.py/RangePartition._get_name
2,673
def _get_date_name(self): """ Defines name for a new partition for date partition subtype. """ patterns = { 'day': {'real': 'y%Yd%j', 'none': 'y0000d000'}, 'week': {'real': 'y%Yw%V', 'none': 'y0000w00'}, 'month': {'real': 'y%Ym%m', 'none': 'y0000m00'}, 'year': {'real': 'y%Y', 'none': 'y0000'}, } try: if self.column_value is None: pattern = patterns[self.constraint]['none'] else: pattern = self.column_value.strftime(patterns[self.constraint]['real']) except __HOLE__: raise PartitionConstraintError( model=self.model.__name__, dialect=self.dialect, current=self.constraint, allowed=patterns.keys()) return '{0}_{1}'.format(self.table, pattern)
KeyError
dataset/ETHPy150Open maxtepkeev/architect/architect/databases/mysql/partition.py/RangePartition._get_date_name
2,674
def _get_function(self): """ Returns correct partition function depending on the MySQL column type. """ functions = { 'date': 'TO_DAYS', 'datetime': 'TO_DAYS', 'timestamp': 'UNIX_TIMESTAMP', } column_type = self._get_column_type() try: return functions[column_type] except __HOLE__: raise PartitionFunctionError( model=self.model.__name__, dialect=self.dialect, current=column_type, allowed=functions.keys())
KeyError
dataset/ETHPy150Open maxtepkeev/architect/architect/databases/mysql/partition.py/RangePartition._get_function
2,675
def test_restart_workers(self): from gevent import sleep with start_wsgi_server(num_workers=4) as server: assert server.num_workers == 4 workers = server._workers assert len(workers) == server.num_workers worker = workers[2] os.kill(worker, signal.SIGKILL) sleep(0.1) try: os.kill(worker, 0) except __HOLE__, e: assert e.errno == errno.ESRCH sleep(5) assert len(server._workers) == server.num_workers assert worker not in server._workers # Helpers
OSError
dataset/ETHPy150Open momyc/gevent-fastcgi/tests/server/test_server.py/ServerTests.test_restart_workers
2,676
def add_default_headers(self, headers): try: headers['Authorization'] = 'OAuth ' + self.token except __HOLE__: self.token = self._fetch_oauth_token() headers['Authorization'] = 'OAuth ' + self.token return headers
AttributeError
dataset/ETHPy150Open cloudkick/libcloud/libcloud/compute/drivers/brightbox.py/BrightboxConnection.add_default_headers
2,677
@stay_put def _compile(args): path = args.path os.chdir(path) if os.getcwd() not in sys.path: sys.path.insert(0, os.getcwd()) import settings SETTINGS = {k:v for k,v in vars(settings).items() \ if not k.startswith('__')} import handlers if os.path.isfile('app'): sys.exit("Fatal: \ There can't be a file named 'app' in the project dir.") elif os.path.isdir('app'): shutil.rmtree('app') shutil.copytree( os.path.join(ASHIBA_SHARE, 'compiled_project_files'), 'app') ENAML = False if os.path.isfile('myapp.enaml'): compile_enaml('myapp.enaml') ENAML = True for fname in [x for x in os.listdir('.') if not x.startswith('.')]: root, ext = os.path.splitext(fname) if ext in ['.py']: shutil.copy(fname, os.path.join('app', fname)) elif ext == '.html': if root != 'myapp': shutil.copy(fname, os.path.join('app', 'templates', fname)) elif not ENAML: in_file = open(fname) out_file = open(os.path.join('app', 'templates', fname), 'w') out_file.write(templatify_html(in_file)) out_file.close() if os.path.isdir('static'): for item in [os.path.join('static', x) for x in os.listdir('static')]: src, dst = item, os.path.join('app', item) if os.path.isdir(item): shutil.copytree(src, dst) else: shutil.copy(src, dst) for item in SETTINGS.get('DATA_FILES', []): src, dst = item, os.path.join('app', item) try: shutil.copy(src, dst) except __HOLE__: print 'Error copying "{}".'.format(item) else: print 'Copied data file:', item file_path = os.path.join('app','static','ashiba_compiled.js') print "Writing to:", os.path.abspath(file_path) outfile = open(file_path, 'w') outfile.write("/* Compiled with Ashiba v{} */\n".format(ashiba.__version__)) outfile.write("\n$(window).load(function(){") fcn_names = [k for k in vars(handlers) if re.match('[\\w]+?__[\\w]+', k)] for fcn_name in fcn_names: print "--> Translating", fcn_name name, event = fcn_name.rsplit('__', 1) if name.startswith('_'): selector = '.' else: selector = '#' jquery_string = """ $("{selector}{name}").on("{event}", ashiba.eventHandlerFactory("{name}", "{event}") );""".format(selector=selector, name=name.lstrip('_'), event=event) outfile.write(jquery_string) outfile.write("\n});") #end document.ready outfile.close()
IOError
dataset/ETHPy150Open ContinuumIO/ashiba/ashiba/main.py/_compile
2,678
def compile_check(args): path = args.path app_path = os.path.abspath(os.path.join(path, 'app')) mtimes = get_mtimes(path) mtime_fname = os.path.abspath(os.path.join(path, '.modified.json')) try: old_mtimes = json.load(open(mtime_fname)) except (IOError, __HOLE__): old_mtimes = {} if (not os.path.isdir(app_path) or mtimes != old_mtimes or vars(args).get('recompile')): print "--- RECOMPILING before start ---" _compile(args) mtimes = get_mtimes(path) with closing(open(mtime_fname, 'w')) as mtime_file: json.dump(mtimes, mtime_file)
ValueError
dataset/ETHPy150Open ContinuumIO/ashiba/ashiba/main.py/compile_check
2,679
def browse(url, name='', icon=''): from PySide.QtGui import QApplication, QIcon from PySide.QtCore import QUrl from PySide.QtWebKit import QWebView for try_ in range(10): try: assert urllib2.urlopen(url).code == 200 except (__HOLE__, urllib2.URLError): time.sleep(0.25) else: print "Started Qt Web View after %i ticks." % try_ break else: sys.exit("Error initializing Qt Web View.") qtapp = QApplication(name) web = QWebView() web.load(QUrl(url)) if icon: print "Setting Icon to", icon web.setWindowIcon(QIcon(icon)) else: print "WARNING: No icon found in settings.py" web.setWindowTitle(name) web.show() qtapp.exec_()
AssertionError
dataset/ETHPy150Open ContinuumIO/ashiba/ashiba/main.py/browse
2,680
def set_details(self, details=True): if not details: try: del self._data['details'] except __HOLE__: pass else: self._data['details'] = 1 return self
KeyError
dataset/ETHPy150Open smsapi/smsapi-python-client/smsapi/actions/sms.py/SendAction.set_details
2,681
def set_date_validate(self, date_validate=True): if not date_validate: try: del self._data['date_validate'] except __HOLE__: pass else: self._data['date_validate'] = 1 return self
KeyError
dataset/ETHPy150Open smsapi/smsapi-python-client/smsapi/actions/sms.py/SendAction.set_date_validate
2,682
def set_eco(self, eco=True): if not eco: try: del self._data['eco'] except __HOLE__: pass else: self._data['eco'] = 1 return self
KeyError
dataset/ETHPy150Open smsapi/smsapi-python-client/smsapi/actions/sms.py/SendAction.set_eco
2,683
def set_nounicode(self, nounicode=True): if not nounicode: try: del self._data['nounicode'] except __HOLE__: pass else: self._data['nounicode'] = 1 return self
KeyError
dataset/ETHPy150Open smsapi/smsapi-python-client/smsapi/actions/sms.py/SendAction.set_nounicode
2,684
def set_normalize(self, normalize=True): if not normalize: try: del self._data['normalize'] except __HOLE__: pass else: self._data['normalize'] = 1 return self
KeyError
dataset/ETHPy150Open smsapi/smsapi-python-client/smsapi/actions/sms.py/SendAction.set_normalize
2,685
def set_fast(self, fast=True): if not fast: try: del self._data['fast'] except __HOLE__: pass else: self._data['fast'] = 1 return self
KeyError
dataset/ETHPy150Open smsapi/smsapi-python-client/smsapi/actions/sms.py/SendAction.set_fast
2,686
def get_file_path(self, filepath, token): try: encoded_path, _, user = self.updown_auth_manager.get_resource_info(token) if not self._valid_path(filepath, encoded_path): logger.info("Invalid path file!! %s: %s" % (user, filepath)) raise NotFoundException("File not found") logger.debug("Get file: user=%s path=%s" % (user, filepath)) file_path = os.path.normpath(os.path.join(self.base_store_folder, encoded_path)) return file_path except (jwt.ExpiredSignature, jwt.DecodeError, __HOLE__): raise NotFoundException("File not found")
AttributeError
dataset/ETHPy150Open conan-io/conan/conans/server/service/service.py/FileUploadDownloadService.get_file_path
2,687
def put_file(self, file_saver, abs_filepath, token, upload_size): """ file_saver is an object with the save() method without parameters """ try: encoded_path, filesize, user = self.updown_auth_manager.get_resource_info(token) # Check size if upload_size != filesize: logger.debug("Invalid size file!!: %s: %s" % (user, abs_filepath)) raise RequestErrorException("Bad file size") abs_encoded_path = os.path.abspath(os.path.join(self.base_store_folder, encoded_path)) if not self._valid_path(abs_filepath, abs_encoded_path): raise NotFoundException("File not found") logger.debug("Put file: %s: %s" % (user, abs_filepath)) mkdir(os.path.dirname(abs_filepath)) if os.path.exists(abs_filepath): os.remove(abs_filepath) file_saver.save(os.path.dirname(abs_filepath)) except (jwt.ExpiredSignature, jwt.DecodeError, __HOLE__): return NotFoundException("File not found")
AttributeError
dataset/ETHPy150Open conan-io/conan/conans/server/service/service.py/FileUploadDownloadService.put_file
2,688
def start(self, track, setup, metrics_store): configured_host_list = self.cfg.opts("launcher", "external.target.hosts") hosts = [] try: for authority in configured_host_list: host, port = authority.split(":") hosts.append({"host": host, "port": port}) except __HOLE__: msg = "Could not initialize external cluster. Invalid format for %s. Expected a comma-separated list of host:port pairs, " \ "e.g. host1:9200,host2:9200." % configured_host_list logger.exception(msg) raise exceptions.SystemSetupError(msg) t = telemetry.Telemetry(self.cfg, metrics_store, devices=[ telemetry.ExternalEnvironmentInfo(self.cfg, metrics_store), telemetry.NodeStats(self.cfg, metrics_store), telemetry.IndexStats(self.cfg, metrics_store) ]) c = self.cluster_factory.create(hosts, [], metrics_store, t) t.attach_to_cluster(c) self.setup_index(c, track, setup) return c
ValueError
dataset/ETHPy150Open elastic/rally/esrally/mechanic/launcher.py/ExternalLauncher.start
2,689
def stop(self, cluster): logger.info("Shutting down ES cluster") # Ask all nodes to shutdown: stop_watch = self._clock.stop_watch() stop_watch.start() for node in cluster.nodes: process = node.process node.telemetry.detach_from_node(node) os.kill(process.pid, signal.SIGINT) try: process.wait(10.0) logger.info("Done shutdown node (%.1f sec)" % stop_watch.split_time()) except subprocess.TimeoutExpired: # kill -9 logger.warn("Server %s did not shut down itself after 10 seconds; now kill -QUIT node, to see threads:" % node.node_name) try: os.kill(process.pid, signal.SIGQUIT) except __HOLE__: logger.warn(" no such process") return try: process.wait(120.0) logger.info("Done shutdown node (%.1f sec)" % stop_watch.split_time()) return except subprocess.TimeoutExpired: pass logger.info("kill -KILL node") try: process.kill() except ProcessLookupError: logger.warn("No such process") cluster.telemetry.detach_from_cluster(cluster) self._servers = []
OSError
dataset/ETHPy150Open elastic/rally/esrally/mechanic/launcher.py/InProcessLauncher.stop
2,690
def previous(self, cli): """ Return the previously focussed :class:`.Buffer` or `None`. """ if len(self.focus_stack) > 1: try: return self[self.focus_stack[-2]] except __HOLE__: pass
KeyError
dataset/ETHPy150Open jonathanslenders/python-prompt-toolkit/prompt_toolkit/buffer_mapping.py/BufferMapping.previous
2,691
def __setitem__(self, key, value): try: super(CaseInsensitiveDict, self).__setitem__(key.lower(), value) except __HOLE__: super(CaseInsensitiveDict, self).__setitem__(key, value)
AttributeError
dataset/ETHPy150Open missionpinball/mpf/mpf/system/config.py/CaseInsensitiveDict.__setitem__
2,692
def __getitem__(self, key): try: return super(CaseInsensitiveDict, self).__getitem__(key.lower()) except __HOLE__: return super(CaseInsensitiveDict, self).__getitem__(key)
AttributeError
dataset/ETHPy150Open missionpinball/mpf/mpf/system/config.py/CaseInsensitiveDict.__getitem__
2,693
def __contains__(self, key): try: return super(CaseInsensitiveDict, self).__contains__(key.lower()) except __HOLE__: return super(CaseInsensitiveDict, self).__contains__(key)
AttributeError
dataset/ETHPy150Open missionpinball/mpf/mpf/system/config.py/CaseInsensitiveDict.__contains__
2,694
def __delitem__(self, key): try: return super(CaseInsensitiveDict, self).__delitem__(key.lower()) except __HOLE__: return super(CaseInsensitiveDict, self).__delitem__(key)
AttributeError
dataset/ETHPy150Open missionpinball/mpf/mpf/system/config.py/CaseInsensitiveDict.__delitem__
2,695
@staticmethod def validate_config_item(spec, item='item not in config!@#'): try: if item.lower() == 'none': item = None except AttributeError: pass default = 'default required!@#' if '|' in spec: item_type, default = spec.split('|') if type(default) is str and default.lower() == 'none': default = None else: item_type = spec if item == 'item not in config!@#': if default == 'default required!@#': log.error('Required setting missing from config file. Run with ' 'verbose logging and look for the last ' 'ConfigProcessor entry above this line to see where ' 'the problem is.') sys.exit() else: item = default if item_type == 'list': return Util.string_to_list(item) if item_type == 'list_of_dicts': if type(item) is list: return item elif type(item) is dict: return [item] elif item_type == 'set': return set(Util.string_to_list(item)) elif item_type == 'dict': if type(item) is dict or type(item) is CaseInsensitiveDict: return item elif not default: return dict() else: log.error('Config error. "%s" is not a dictionary', item) sys.exit() elif item_type == 'int': try: return int(item) except __HOLE__: return None elif item_type == 'float': try: return float(item) except TypeError: return None elif item_type in ('string', 'str'): if item: return str(item) else: return None elif item_type in ('boolean', 'bool'): if type(item) is bool: return item else: return str(item).lower() in ('yes', 'true') elif item_type == 'ms': return Timing.string_to_ms(item) elif item_type == 'secs': return Timing.string_to_secs(item) elif item_type == 'list_of_lists': return Util.list_of_lists(item)
TypeError
dataset/ETHPy150Open missionpinball/mpf/mpf/system/config.py/Config.validate_config_item
2,696
def validate_item(self, item, validator, validation_failure_info): try: if item.lower() == 'none': item = None except AttributeError: pass if ':' in validator: validator = validator.split(':') # item could be str, list, or list of dicts item = Util.event_config_to_dict(item) return_dict = dict() for k, v in item.iteritems(): return_dict[self.validate_item(k, validator[0], validation_failure_info)] = ( self.validate_item(v, validator[1], validation_failure_info) ) item = return_dict elif '%' in validator: if type(item) is str: try: item = eval(validator.replace('%', "'" + item + "'")) except KeyError: self.validation_error(item, validation_failure_info) else: item = None elif validator == 'str': if item is not None: item = str(item) else: item = None elif validator == 'float': try: item = float(item) except (TypeError, ValueError): # TODO error pass elif validator == 'int': try: item = int(item) except (__HOLE__, ValueError): # TODO error pass elif validator in ('bool', 'boolean'): if type(item) is str: if item.lower() in ['false', 'f', 'no', 'disable', 'off']: item = False elif not item: item = False else: item = True elif validator == 'ms': item = Timing.string_to_ms(item) elif validator == 'secs': item = Timing.string_to_secs(item) elif validator == 'ticks': item = Timing.string_to_ticks(item) elif validator == 'ticks_int': item = int(Timing.string_to_ticks(item)) elif validator == 'list': item = Util.string_to_list(item) else: self.log.error("Invalid Validator '%s' in config spec %s:%s", validator, validation_failure_info[0][0], validation_failure_info[1]) sys.exit() return item
TypeError
dataset/ETHPy150Open missionpinball/mpf/mpf/system/config.py/Config.validate_item
2,697
@need_db_opened def prepare(self, params=None): try: # mandatory if not passed by method self._cluster_name = params[0] # mandatory if not passed by method self.set_cluster_type( params[1] ) self._cluster_location = params[2] self._datasegment_name = params[3] except( IndexError, TypeError ): # Use default for non existent indexes pass except __HOLE__: raise PyOrientBadMethodCallException( params[1] + ' is not a valid data cluster type', [] ) if self.get_protocol() < 24: self._append( ( FIELD_STRING, self._cluster_type ) ) self._append( ( FIELD_STRING, self._cluster_name ) ) self._append( ( FIELD_STRING, self._cluster_location ) ) self._append( ( FIELD_STRING, self._datasegment_name ) ) else: self._append( ( FIELD_STRING, self._cluster_name ) ) if self.get_protocol() >= 18: self._append( ( FIELD_SHORT, self._new_cluster_id ) ) return super( DataClusterAddMessage, self ).prepare()
ValueError
dataset/ETHPy150Open mogui/pyorient/pyorient/messages/cluster.py/DataClusterAddMessage.prepare
2,698
@need_db_opened def prepare(self, params=None): if isinstance( params, tuple ) or isinstance( params, list ): try: # mandatory if not passed by method # raise Exception if None if isinstance( params[0], tuple ) or isinstance( params[0], list ): self._cluster_ids = params[0] else: raise PyOrientBadMethodCallException( "Cluster IDs param must be an instance of Tuple or List.", [] ) self._count_tombstones = params[1] except( __HOLE__, TypeError ): # Use default for non existent indexes pass self._append( ( FIELD_SHORT, len(self._cluster_ids) ) ) for x in self._cluster_ids: self._append( ( FIELD_SHORT, x ) ) self._append( ( FIELD_BOOLEAN, self._count_tombstones ) ) return super( DataClusterCountMessage, self ).prepare()
IndexError
dataset/ETHPy150Open mogui/pyorient/pyorient/messages/cluster.py/DataClusterCountMessage.prepare
2,699
def is_distributed_router(router): """Return True if router to be handled is distributed.""" try: # See if router is a DB object first requested_router_type = router.extra_attributes.distributed except __HOLE__: # if not, try to see if it is a request body requested_router_type = router.get('distributed') if validators.is_attr_set(requested_router_type): return requested_router_type return cfg.CONF.router_distributed
AttributeError
dataset/ETHPy150Open openstack/neutron/neutron/db/l3_dvr_db.py/is_distributed_router