text1
stringlengths
92
132k
text2
stringlengths
83
99.1k
score
float64
0.01
1
Traceback (most recent call last): "/usr/local/lib/python3.8/site-packages/prefect/engine/runner.py", line 48, in inner new_state = method(self, state, *args, **kwargs) "/usr/local/lib/python3.8/site-packages/prefect/engine/flow_runner.py", line 491, in get_flow_run_state upstream_states = executor.wait( "/usr/local/lib/python3.8/site-packages/prefect/engine/executors/dask.py", line 375, in wait return self.client.gather(futures) "/usr/local/lib/python3.8/site-packages/distributed/client.py", line 1982, in gather return self.sync( "/usr/local/lib/python3.8/site-packages/distributed/client.py", line 832, in sync return sync( "/usr/local/lib/python3.8/site-packages/distributed/utils.py", line 339, in sync raise exc.with_traceback(tb) "/usr/local/lib/python3.8/site-packages/distributed/utils.py", line 323, in f result[0] = yield future "/usr/local/lib/python3.8/site-packages/tornado/gen.py", line 735, in run value = future.result() "/usr/local/lib/python3.8/site-packages/distributed/client.py", line 1876, in _gather response = await future "/usr/local/lib/python3.8/site-packages/distributed/client.py", line 1927, in _gather_remote response = await retry_operation(self.scheduler.gather, keys=keys) "/usr/local/lib/python3.8/site-packages/distributed/utils_comm.py", line 385, in retry_operation return await retry( "/usr/local/lib/python3.8/site-packages/distributed/utils_comm.py", line 370, in retry return await coro() "/usr/local/lib/python3.8/site-packages/distributed/core.py", line 861, in send_recv_from_rpc result = await send_recv(comm=comm, op=key, **kwargs) "/usr/local/lib/python3.8/site-packages/distributed/core.py", line 644, in send_recv response = await comm.read(deserializers=deserializers) "/usr/local/lib/python3.8/site-packages/distributed/comm/tcp.py", line 202, in read msg = await from_frames( "/usr/local/lib/python3.8/site-packages/distributed/comm/utils.py", line 87, in from_frames res = _from_frames() "/usr/local/lib/python3.8/site-packages/distributed/comm/utils.py", line 65, in _from_frames return protocol.loads( "/usr/local/lib/python3.8/site-packages/distributed/protocol/core.py", line 130, in loads value = _deserialize(head, fs, deserializers=deserializers) "/usr/local/lib/python3.8/site-packages/distributed/protocol/serialize.py", line 302, in deserialize return loads(header, frames) "/usr/local/lib/python3.8/site-packages/distributed/protocol/serialize.py", line 64, in pickle_loads return pickle.loads(x, buffers=buffers) "/usr/local/lib/python3.8/site-packages/distributed/protocol/pickle.py", line 75, in loads return pickle.loads(x) "/usr/local/lib/python3.8/site-packages/google/cloud/bigquery/table.py", line 1264, in __getattr__ value = self._xxx_field_to_index.get(name) "/usr/local/lib/python3.8/site-packages/google/cloud/bigquery/table.py", line 1264, in __getattr__ value = self._xxx_field_to_index.get(name) "/usr/local/lib/python3.8/site-packages/google/cloud/bigquery/table.py", line 1264, in __getattr__ value = self._xxx_field_to_index.get(name) [Previous line repeated 974 more times] RecursionError: maximum recursion depth exceeded
Traceback (most recent call last): "<stdin>", line 1, in <module> AttributeError: 'ChatBot' object has no attribute 'train'
0.310797
invalid output was: Traceback (most recent call last): "/root/.ansible/tmp/ansible-1384076413.61-29385143645352/apt", line 1342, in <module> main() "/root/.ansible/tmp/ansible-1384076413.61-29385143645352/apt", line 400, in main remove(module, packages, cache, p['purge']) "/root/.ansible/tmp/ansible-1384076413.61-29385143645352/apt", line 250, in remove installed, upgradable, has_files = package_status(m, name, version, cache, state='remove') "/root/.ansible/tmp/ansible-1384076413.61-29385143645352/apt", line 184, in package_status return ll_pkg.current_state == apt_pkg.CURSTATE_INSTALLED, pkg.isUpgradable, has_filesAttributeError: current_state
Traceback (most recent call last): "/Users/disflux/miniconda3/envs/flasktest/lib/python3.6/site-packages/gunicorn/workers/sync.py", line 184, in handle_request self.log.access(resp, req, environ, request_time) "/Users/disflux/miniconda3/envs/flasktest/lib/python3.6/site-packages/gunicorn/glogging.py", line 341, in access request_time)) "/Users/disflux/miniconda3/envs/flasktest/lib/python3.6/site-packages/gunicorn/glogging.py", line 323, in atoms atoms.update({"{%s}e" % k.lower(): v for k, v in environ_variables}) "/Users/disflux/miniconda3/envs/flasktest/lib/python3.6/site-packages/gunicorn/glogging.py", line 323, in <dictcomp> atoms.update({"{%s}e" % k.lower(): v for k, v in environ_variables}) AttributeError: 'object' object has no attribute 'lower'
0.157678
Traceback (most recent call last): "convert_model.py", line 108, in <module> model_trt = mmdet2trt(model, inputs, data) "/src/torch2trt/torch2trt/torch2trt.py", line 446, in mmdet2trt outputs = module(return_loss=False, rescale=True, **prepared_data) "/usr/local/lib/python3.6/dist-packages/torch/nn/modules/module.py", line 541, in __call__ result = self.forward(*input, **kwargs) "/src/mmdetection/mmdet/core/fp16/decorators.py", line 49, in new_func return old_func(*args, **kwargs) "/src/mmdetection/mmdet/models/detectors/base.py", line 119, in forward return self.forward_test(img, img_meta, **kwargs) "/src/mmdetection/mmdet/models/detectors/base.py", line 102, in forward_test return self.simple_test(imgs[0], img_metas[0], **kwargs) "/src/mmdetection/mmdet/models/detectors/cascade_rcnn.py", line 317, in simple_test x, img_meta, self.test_cfg.rpn) if proposals is None else proposals "/src/mmdetection/mmdet/models/detectors/test_mixins.py", line 12, in simple_test_rpn proposal_list = self.rpn_head.get_bboxes(*proposal_inputs) "/src/mmdetection/mmdet/core/fp16/decorators.py", line 127, in new_func return old_func(*args, **kwargs) "/src/mmdetection/mmdet/models/anchor_heads/anchor_head.py", line 263, in get_bboxes device=device) for i in range(num_levels) "/src/mmdetection/mmdet/models/anchor_heads/anchor_head.py", line 263, in <listcomp> device=device) for i in range(num_levels) "/src/mmdetection/mmdet/core/anchor/anchor_generator.py", line 72, in grid_anchors shift_xx, shift_yy = self._meshgrid(shift_x, shift_y) "/src/mmdetection/mmdet/core/anchor/anchor_generator.py", line 60, in _meshgrid yy = y.view(-1, 1).repeat(1, len(x)).view(-1) "/src/torch2trt/torch2trt/torch2trt.py", line 206, in wrapper converter['converter'](ctx) "/src/torch2trt/torch2trt/converters/view.py", line 10, in convert_view input_trt = trt_(ctx.network, input) "/src/torch2trt/torch2trt/torch2trt.py", line 139, in trt_ t._trt = network.add_constant(shape, weight).get_output(0) TypeError: add_constant(): incompatible function arguments. The following argument types are supported:
Traceback (most recent call last): "/opt/anaconda3/lib/python3.6/site-packages/ray/workers/default_worker.py", line 7, in <module> import numpy as np ImportError: No module named numpy
0.327091
Traceback (most recent call last): "/usr/local/lib/python3.7/dist-packages/urh-2.9.0-py3.7-linux-armv7l.egg/urh/controller/MainController.py", line 726, in on_signals_recorded self.add_signalfile(filename, enforce_sample_rate=sample_rate) "/usr/local/lib/python3.7/dist-packages/urh-2.9.0-py3.7-linux-armv7l.egg/urh/controller/MainController.py", line 288, in add_signalfile self.add_signal(signal, group_id) "/usr/local/lib/python3.7/dist-packages/urh-2.9.0-py3.7-linux-armv7l.egg/urh/controller/MainController.py", line 303, in add_signal signal.auto_detect(detect_modulation=True, detect_noise=False) "/usr/local/lib/python3.7/dist-packages/urh-2.9.0-py3.7-linux-armv7l.egg/urh/signalprocessing/Signal.py", line 423, in auto_detect estimated_params = AutoInterpretation.estimate(self.iq_array, **kwargs) "/usr/local/lib/python3.7/dist-packages/urh-2.9.0-py3.7-linux-armv7l.egg/urh/ainterpretation/AutoInterpretation.py", line 393, in estimate bit_length = get_bit_length_from_plateau_lengths(merged_lengths) "/usr/local/lib/python3.7/dist-packages/urh-2.9.0-py3.7-linux-armv7l.egg/urh/ainterpretation/AutoInterpretation.py", line 325, in get_bit_length_from_plateau_lengths histogram = c_auto_interpretation.get_threshold_divisor_histogram(merged_plateau_lengths) "src/urh/cythonext/auto_interpretation.pyx", line 113, in urh.cythonext.auto_interpretation.get_threshold_divisor_histogram "src/urh/cythonext/auto_interpretation.pyx", line 124, in urh.cythonext.auto_interpretation.get_threshold_divisor_histogram ValueError: Maximum allowed dimension exceeded
Traceback (most recent call last): "<string>", line 1, in <module> "/usr/lib/python3.6/tokenize.py", line 452, in open buffer = _builtin_open(filename, 'rb') NotFoundError: [Errno 2] No such file or directory: '/tmp/pip-build-ulwxy262/PyQt5/setup.py'
0.217425
Traceback (most recent call last): "/usr/bin/qtile", line 72, in <module> main() "/usr/bin/qtile", line 58, in main c = confreader. (options.configfile) "/usr/lib/python2.7/site-packages/libqtile/confreader.py", line 58, in __init__ raise ConfigError(str(v))libqtile.confreader.ConfigError: global name 'Bar' is not defined
Traceback (most recent call last): "/usr/src/homeassistant/homeassistant/setup.py", line 191, in _async_setup_component result = await task "/config/custom_components/sonoff/__init__.py", line 74, in async_setup await hass.data[DOMAIN].do_login() "/config/custom_components/sonoff/__init__.py", line 317, in do_login await self.set_wshost() "/config/custom_components/sonoff/__init__.py", line 326, in set_wshost async with session.post('https://%s-disp.coolkit.cc:8080/dispatch/app' % self._api_region, headers=self._headers) as r: "/usr/local/lib/python3.8/site-packages/aiohttp/client.py", line 1012, in __aenter__ self._resp = await self._coro "/usr/local/lib/python3.8/site-packages/aiohttp/client.py", line 480, in _request conn = await self._connector.connect( "/usr/local/lib/python3.8/site-packages/aiohttp/connector.py", line 523, in connect proto = await self._create_connection(req, traces, timeout) "/usr/local/lib/python3.8/site-packages/aiohttp/connector.py", line 858, in _create_connection _, proto = await self._create_direct_connection( "/usr/local/lib/python3.8/site-packages/aiohttp/connector.py", line 1004, in _create_direct_connection raise last_exc "/usr/local/lib/python3.8/site-packages/aiohttp/connector.py", line 980, in _create_direct_connection transp, proto = await self._wrap_create_connection( "/usr/local/lib/python3.8/site-packages/aiohttp/connector.py", line 938, in _wrap_create_connection raise ClientConnectorCertificateError( aiohttp.client_exceptions.ClientConnectorCertificateError: Cannot connect to host cn-disp.coolkit.cc:8080 ssl:True [SSLCertVerificationError: (1, '[SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: certificate has expired (_ssl.c:1108)')]
0.21009
Traceback (most recent call last):QUEST] https proxy request (_ssl.c:1108) "C:\Users\hashliZer\AppData\Local\Programs\Python\Python38-32\lib\site-packages\proxy\http\handler.py", line 345, in run self.initialize()t call last): "C:\Users\hashliZer\AppData\Local\Programs\Python\Python38-32\lib\site-packages\proxy\http\handler.py", line 91, in initialize conn = self.optionally_wrap_socket(self.client.connection) "C:\Users\hashliZer\AppData\Local\Programs\Python\Python38-32\lib\site-packages\proxy\http\handler.py", line 200, in optionally_wrap_socket conn = wrap_socket(conn, self.flags.keyfile, self.flags.certfile) "C:\Users\hashliZer\AppData\Local\Programs\Python\Python38-32\lib\site-packages\proxy\common\utils.py", line 163, in wrap_socketwrap_socket return ctx.wrap_socket(, self.flags.keyfile, self.flags.certfile) "C:\Users\hashliZer\AppData\Local\Programs\Python\Python38-32\lib\ssl.py", line 500, in wrap_socketpy", line 163, in wrap_socket return self.sslsocket_class._create( "C:\Users\hashliZer\AppData\Local\Programs\Python\Python38-32\lib\ssl.py", line 1040, in _createket self.do_handshake()et_class._create( "C:\Users\hashliZer\AppData\Local\Programs\Python\Python38-32\lib\ssl.py", line 1309, in do_handshake self._sslobj.do_handshake() ssl.SSLError: [SSL: HTTPS_PROXY_REQUEST] https proxy request (_ssl.c:1108)sl.py", line 1309, in do_handshake
Traceback (most recent call last): "PixivUtil2.py", line 412, in process_member "PixivUtil2.py", line 692, in process_image "PixivHelper.pyc", line 655, in ugoira2gif "imageio\core\functions.pyc", line 181, in imread "imageio\core\functions.pyc", line 108, in get_readerValueError: Could not find a format to read the specified file in mode 'i'
0.176769
Traceback (most recent call last): "./urlwatch", line 101, in <module> urlwatch_command.run() "/home/martin/urlwatch/lib/urlwatch/command.py", line 167, in run self.urlwatcher.close() "/home/martin/urlwatch/lib/urlwatch/main.py", line 96, in close self.report.finish() "/home/martin/urlwatch/lib/urlwatch/handler.py", line 128, in finish ReporterBase.submit_all(self, self.job_states, duration) "/home/martin/urlwatch/lib/urlwatch/reporters.py", line 92, in submit_all subclass(report, cfg, job_states, duration).submit() "/home/martin/urlwatch/lib/urlwatch/reporters.py", line 315, in submit print(self._red(line)) UnicodeEncodeError: 'ascii' codec can't encode characters in position 6927-6935: ordinal not in range(128)
Traceback (most recent call last): "<stdin>", line 1, in <module> "/usr/local/lib/python3.4/dist-packages/autograd/errors.py", line 49, in wrapped except Exception as e: add_extra_error_message(e) "/usr/local/lib/python3.4/dist-packages/autograd/errors.py", line 71, in add_extra_error_message raise_(etype, value, traceback) "/usr/local/lib/python3.4/dist-packages/future/utils/__init__.py", line 413, in raise_ raise exc.with_traceback(tb) "/usr/local/lib/python3.4/dist-packages/autograd/errors.py", line 48, in wrapped try: return fun(*args, **kwargs) "/usr/local/lib/python3.4/dist-packages/autograd/convenience_wrappers.py", line 27, in gradfun vjp, ans = make_vjp(scalar_fun, argnum)(*args, **kwargs) "/usr/local/lib/python3.4/dist-packages/autograd/core.py", line 14, in vjp start_node, end_node = forward_pass(fun, args, kwargs, argnum) "/usr/local/lib/python3.4/dist-packages/autograd/core.py", line 26, in forward_pass end_node = fun(*args, **kwargs) "/usr/local/lib/python3.4/dist-packages/autograd/convenience_wrappers.py", line 20, in scalar_fun return as_scalar(fun(*args, **kwargs)) "<stdin>", line 2, in loss_n TypeError: float() argument must be a string or a number, not 'ArrayNode'
0.315544
Traceback (most recent call last): "C:\Users\thomasstewart\AppData\Local\ESRI\conda\envs\arcgispro-py3-clone\lib\site-packages\spyder\widgets\status.py", line 158, in update_status self.label_value.setText(self.get_value()) "C:\Users\thomasstewart\AppData\Local\ESRI\conda\envs\arcgispro-py3-clone\lib\site-packages\spyder\widgets\status.py", line 190, in get_value text = '%d%%' % memory_usage() "C:\Users\thomasstewart\AppData\Local\ESRI\conda\envs\arcgispro-py3-clone\lib\site-packages\spyder\utils\system.py", line 57, in psutil_phymem_usage percent = psutil.phymem_usage().percentAttributeError: module 'psutil' has no attribute 'phymem_usage'
Traceback (most recent call last): "/usr/local/bin/bcbio_setup_genome.py", line 191, in <module> args.gtf = gff3_to_gtf(args.gtf) "/usr/local/bin/bcbio_setup_genome.py", line 50, in gff3_to_gtf db = gffutils.create_db(gff3_file, ":memory:") "/usr/local/share/bcbio/anaconda/lib/python2.7/site-packages/gffutils/create.py", line 1273, in create_db c.create() "/usr/local/share/bcbio/anaconda/lib/python2.7/site-packages/gffutils/create.py", line 488, in create self._populate_from_lines(self.iterator) "/usr/local/share/bcbio/anaconda/lib/python2.7/site-packages/gffutils/create.py", line 571, in _populate_from_lines fixed, final_strategy = self._do_merge(f, self.merge_strategy) "/usr/local/share/bcbio/anaconda/lib/python2.7/site-packages/gffutils/create.py", line 218, in _do_merge raise ValueError("Duplicate ID {0.id}".format(f))ValueError: Duplicate ID cds2
0.191587
Traceback (most recent call last): "/root/cobra/cobra/__init__.py", line 105, in main cli.start(args.target, args.format, args.output, args.special_rules, a_sid, args.dels) "/root/cobra/cobra/cli.py", line 83, in start main_framework = dt.framework "/root/cobra/cobra/detection.py", line 73, in framework frame_name = self.dependency_scan(root) # Based on the dependency analysis framework type "/root/cobra/cobra/detection.py", line 93, in dependency_scan dependencies = Dependencies(self.target_directory) "/root/cobra/cobra/dependencies.py", line 32, in __init__ self.dependencies() "/root/cobra/cobra/dependencies.py", line 35, in dependencies file_path, flag = self.find_file() "/root/cobra/cobra/dependencies.py", line 53, in find_file for root, dirs, filenames in os.walk(self.directory): "/usr/lib64/python2.7/os.py", line 294, in walk for x in walk(new_path, topdown, onerror, followlinks): "/usr/lib64/python2.7/os.py", line 294, in walk for x in walk(new_path, topdown, onerror, followlinks): "/usr/lib64/python2.7/os.py", line 284, in walk if isdir(join(top, name)): "/usr/lib64/python2.7/posixpath.py", line 80, in join path += '/' + bUnicodeDecodeError: 'utf8' codec can't decode byte 0xb8 in position 7: invalid start byte
Traceback (most recent call last): "c:\users\cleo\appdata\local\programs\python\python38\lib\site-packages\yt_dlp\extractor\niconico.py", line 398, in _real_extract dmc_info = api_data['video']['dmcInfo'] KeyError: 'dmcInfo'
0.274044
Traceback (most recent call last): "/usr/local/bin/airflow", line 10, in <module> args.func(args) "/usr/local/lib/python2.7/dist-packages/airflow/bin/cli.py", line 324, in initdb utils.initdb() "/usr/local/lib/python2.7/dist-packages/airflow/utils.py", line 215, in initdb models.DagBag(sync_to_db=True) "/usr/local/lib/python2.7/dist-packages/airflow/models.py", line 121, in __init__ self.deactivate_inactive_dags() "/usr/local/lib/python2.7/dist-packages/airflow/models.py", line 272, in deactivate_inactive_dags DagModel).filter(~DagModel.dag_id.in_(active_dag_ids)).all(): "/usr/local/lib/python2.7/dist-packages/sqlalchemy/orm/query.py", line 2399, in all return list(self) "/usr/local/lib/python2.7/dist-packages/sqlalchemy/orm/query.py", line 2516, in __iter__ return self._execute_and_instances(context) "/usr/local/lib/python2.7/dist-packages/sqlalchemy/orm/query.py", line 2529, in _execute_and_instances close_with_result=True) "/usr/local/lib/python2.7/dist-packages/sqlalchemy/orm/query.py", line 2520, in _connection_from_session **kw) "/usr/local/lib/python2.7/dist-packages/sqlalchemy/orm/session.py", line 882, in connection execution_options=execution_options) "/usr/local/lib/python2.7/dist-packages/sqlalchemy/orm/session.py", line 887, in _connection_for_bind engine, execution_options) "/usr/local/lib/python2.7/dist-packages/sqlalchemy/orm/session.py", line 313, in _connection_for_bind self._assert_active() "/usr/local/lib/python2.7/dist-packages/sqlalchemy/orm/session.py", line 214, in _assert_active % self._rollback_exceptionsqlalchemy.exc.InvalidRequestError: This Session's transaction has been rolled back due to a previous exception during flush. To begin a new transaction with this Session, first issue Session.rollback(). Original exception was: (mysql.connector.errors.ProgrammingError) Failed processing pyformat-parameters; Python 'newstr' cannot be converted to a MySQL type [SQL: u'INSERT INTO dag (dag_id, is_pau
Traceback (most recent call last): "/home/dash/programs/Real-Time-Voice-Cloning/toolbox/__init__.py", line 81, in <lambda> func = lambda: self.ui.play(self.ui.selected_utterance.wav, Synthesizer.sample_rate) "/home/dash/programs/Real-Time-Voice-Cloning/toolbox/ui.py", line 142, in play sd.play(wav, sample_rate) "/usr/lib/python3.7/site-packages/sounddevice.py", line 154, in play **kwargs) "/usr/lib/python3.7/site-packages/sounddevice.py", line 2417, in start_stream **kwargs) "/usr/lib/python3.7/site-packages/sounddevice.py", line 1374, in __init__ **_remove_self(locals())) "/usr/lib/python3.7/site-packages/sounddevice.py", line 780, in __init__ 'Error opening {0}'.format(self.__class__.__name__)) "/usr/lib/python3.7/site-packages/sounddevice.py", line 2572, in _check raise PortAudioError(errormsg, err) sounddevice.PortAudioError: Error opening OutputStream: Invalid sample rate [PaErrorCode -9997]
0.292595
Traceback (most recent call last): "/Library/Python/2.7/site-packages/youtube_dl/YoutubeDL.py", line 533, in extract_info ie_result = ie.extract(url) "/Library/Python/2.7/site-packages/youtube_dl/extractor/common.py", line 198, in extract return self._real_extract(url) "/Library/Python/2.7/site-packages/youtube_dl/extractor/generic.py", line 962, in _real_extract raise ExtractorError('Unsupported URL: %s' % url)ExtractorError: Unsupported URL: http://www.giga.de/filme/anime-awesome/trailer/anime-awesome-chihiros-reise-ins-zauberland-das-beste-kommt-zum-schluss/?ModPagespeed=noscript; please report this issue on https://yt-dl.org/bug . Be sure to call youtube-dl with the --verbose flag and include its complete output. Make sure you are using the latest version; type youtube-dl -U to update.
Traceback (most recent call last): "/Users/hvnsweeting/Envs/proj/lib/python3.6/site-packages/invoke/tasks.py", line 160, in argspec context_arg = arg_names.pop(0) IndexError: pop from empty list
0.205166
Traceback (most recent call last): "/usr/local/lib/python2.7/dist-packages/raven/transport/threaded.py", line 174, in send_sync super(ThreadedHTTPTransport, self).send(data, headers) "/usr/local/lib/python2.7/dist-packages/raven/transport/http.py", line 47, in send ca_certs=self.ca_certs, "/usr/local/lib/python2.7/dist-packages/raven/utils/http.py", line 66, in urlopen return opener.open(url, data, timeout) "/usr/local/lib/python2.7/dist-packages/future/backports/urllib/request.py", line 494, in open response = self._open(req, data) "/usr/local/lib/python2.7/dist-packages/future/backports/urllib/request.py", line 512, in _open '_open', req) "/usr/local/lib/python2.7/dist-packages/future/backports/urllib/request.py", line 466, in _call_chain result = func(*args) "/usr/local/lib/python2.7/dist-packages/raven/utils/http.py", line 46, in https_open return self.do_open(ValidHTTPSConnection, req) "/usr/local/lib/python2.7/dist-packages/future/backports/urllib/request.py", line 1284, in do_open h.request(req.get_method(), req.selector, req.data, headers) "/usr/lib/python2.7/httplib.py", line 1057, in request self._send_request(method, url, body, headers) "/usr/lib/python2.7/httplib.py", line 1097, in _send_request self.endheaders(body) "/usr/lib/python2.7/httplib.py", line 1053, in endheaders self._send_output(message_body) "/usr/lib/python2.7/httplib.py", line 895, in _send_output msg += message_bodyUnicodeDecodeError: 'ascii' codec can't decode byte 0x9c in position 1: ordinal not in range(128)
Traceback (most recent call last): "/usr/local/bin/katana", line 11, in <module> load_entry_point('katana==2.0', 'console_scripts', 'katana')() "/usr/local/lib/python3.8/dist-packages/katana-2.0-py3.8.egg/katana/__main__.py", line 155, in main name, value = param.split("=") ValueError: not enough values to unpack (expected 2, got 1)
0.279625
Traceback (most recent call last): "calibre_plugins.dedrm.kindlekey", line 1540, in getDBfrom cleartext = UnprotectHeaderData(encryptedValue) "calibre_plugins.dedrm.kindlekey", line 1408, in UnprotectHeaderData crp = LibCrypto() TypeError: 'NoneType' object is not callable
Traceback (most recent call last): "youtube_dl\YoutubeDL.pyo", line 994, in process_info "youtube_dl\YoutubeDL.pyo", line 970, in dl "youtube_dl\downloader\common.pyo", line 290, in download "youtube_dl\downloader\http.pyo", line 54, in real_download "youtube_dl\YoutubeDL.pyo", line 1198, in urlopen "urllib2.pyo", line 410, in open "urllib2.pyo", line 523, in http_response "urllib2.pyo", line 448, in error "urllib2.pyo", line 382, in _call_chain "urllib2.pyo", line 531, in http_error_defaultHTTPError: HTTP Error 403: Forbidden
0.246017
Traceback (most recent call last): "/tmp/GitPython/git/test/lib/helper.py", line 92, in wrapper return func(self, path) "/tmp/GitPython/git/test/test_submodule.py", line 834, in test_rename sm = parent.create_submodule(sm_name, sm_name, url=self._small_repo_url()) "/tmp/GitPython/git/repo/base.py", line 332, in create_submodule return Submodule.add(self, *args, **kwargs) "/tmp/GitPython/git/objects/submodule/base.py", line 401, in add mrepo = cls._clone_repo(repo, url, path, name, **kwargs) "/tmp/GitPython/git/objects/submodule/base.py", line 261, in _clone_repo clone = git.Repo.clone_from(url, module_checkout_path, **kwargs) "/tmp/GitPython/git/repo/base.py", line 972, in clone_from return cls._clone(git, url, to_path, GitCmdObjectDB, progress, **kwargs) "/tmp/GitPython/git/repo/base.py", line 927, in _clone finalize_process(proc, stderr=stderr) "/tmp/GitPython/git/util.py", line 341, in finalize_process proc.wait(**kwargs) "/tmp/GitPython/git/cmd.py", line 418, in wait raise GitCommandError(self.args, status, errstr) git.exc.GitCommandError: Cmd('/usr/pkg/bin/git') failed due to: exit code(128)
Traceback (most recent call last): "mellotron_train.py", line 80, in <module> train(hparams=hparams, **args.__dict__) "/home/pxdevice/xue/Mellotron/zhrtvc/mellotron/train.py", line 363, in train hparams.distributed_run, rank, outdir=Path(output_directory), hparams=hparams) "/home/pxdevice/xue/Mellotron/zhrtvc/mellotron/train.py", line 207, in validate logger.log_validation(val_loss, model, y, y_pred, iteration, x) "/home/pxdevice/xue/Mellotron/zhrtvc/mellotron/logger.py", line 34, in log_validation self.add_histogram(tag, value.data.cpu().numpy(), iteration) "/home/pxdevice/anaconda3/envs/ptf/lib/python3.6/site-packages/tensorboardX/writer.py", line 503, in add_histogram histogram(tag, values, bins, max_bins=max_bins), global_step, walltime) "/home/pxdevice/anaconda3/envs/ptf/lib/python3.6/site-packages/tensorboardX/summary.py", line 210, in histogram hist = make_histogram(values.astype(float), bins, max_bins) "/home/pxdevice/anaconda3/envs/ptf/lib/python3.6/site-packages/tensorboardX/summary.py", line 248, in make_histogram raise ValueError('The histogram is empty, please file a bug report.') ValueError: The histogram is empty, please file a bug report.
0.261369
Traceback (most recent call last): "/home/www/www/venv/lib/python3.4/site-packages/sphinx/cmdline.py", line 244, in main app.build(opts.force_all, filenames) "/home/www/www/venv/lib/python3.4/site-packages/sphinx/application.py", line 266, in build self.builder.build_update() "/home/www/www/venv/lib/python3.4/site-packages/sphinx/builders/__init__.py", line 251, in build_update 'out of date' % len(to_build)) "/home/www/www/venv/lib/python3.4/site-packages/sphinx/builders/__init__.py", line 265, in build self.doctreedir, self.app)) "/home/www/www/venv/lib/python3.4/site-packages/sphinx/environment.py", line 618, in update self._read_serial(docnames, app) "/home/www/www/venv/lib/python3.4/site-packages/sphinx/environment.py", line 638, in _read_serial self.read_doc(docname, app) "/home/www/www/venv/lib/python3.4/site-packages/sphinx/environment.py", line 863, in read_doc pickle.dump(doctree, f, pickle.HIGHEST_PROTOCOL)RuntimeError: maximum recursion depth exceeded while calling a Python object
Traceback (most recent call last): "scriptHandler.pyc", line 205, in executeScript "editableText.pyc", line 219, in script_caret_moveByLine "editableText.pyc", line 164, in _caretMovementScriptHelper "editableText.pyc", line 150, in _caretScriptPostMovedHelper "speech\__init__.pyc", line 793, in speakTextInfo "textInfos\offsets.pyc", line 527, in getTextWithFields "baseObject.pyc", line 26, in __get__ "textInfos\offsets.pyc", line 545, in _get_text "NVDAObjects\window\scintilla.pyc", line 180, in _getTextRange "winKernel.pyc", line 212, in virtualAllocEx OSError: [WinError 87] The parameter is incorrect.
0.314144
Traceback (most recent call last): "c:\ProgramData\Anaconda3\lib\logging\__init__.py", line 992, in emit msg = self.format(record) "c:\ProgramData\Anaconda3\lib\logging\__init__.py", line 838, in format return fmt.format(record) "c:\ProgramData\Anaconda3\lib\logging\__init__.py", line 575, in format record.message = record.getMessage() "c:\ProgramData\Anaconda3\lib\logging\__init__.py", line 338, in getMessage msg = msg % self.argsValueError: incomplete format
2020-11-24T08:49:43:WARNING:localstack.utils.server.http2_server: Error in proxy handler for request POST http://localhost:4566/: 'utf-8' codec can't decode byte 0xa2 in position 0: invalid start byte Traceback (most recent call last): "/opt/code/localstack/localstack/utils/server/http2_server.py", line 107, in index raise result "/opt/code/localstack/localstack/utils/bootstrap.py", line 578, in run result = self.func(self.params) "/opt/code/localstack/localstack/utils/async_utils.py", line 28, in _run return fn(_args, *_kwargs) "/opt/code/localstack/localstack/services/generic_proxy.py", line 571, in handler response = modify_and_forward(method=method, path=path_with_params, data_bytes=data, headers=headers, "/opt/code/localstack/localstack/services/generic_proxy.py", line 341, in modify_and_forward listener_result = listener.forward_request(method=method, "/opt/code/localstack/localstack/services/edge.py", line 81, in forward_request return do_forward_request(api, port, method, path, data, headers) "/opt/code/localstack/localstack/services/edge.py", line 86, in do_forward_request result = do_forward_request_inmem(api, port, method, path, data, headers) "/opt/code/localstack/localstack/services/edge.py", line 106, in do_forward_request_inmem response = modify_and_forward(method=method, path=path, data_bytes=data, headers=headers, "/opt/code/localstack/localstack/services/generic_proxy.py", line 409, in modify_and_forward updated_response = update_listener.return_response(**kwargs) "/opt/code/localstack/localstack/services/kinesis/kinesis_listener.py", line 81, in return_response to_str(response.content or '')) "/opt/code/localstack/localstack/utils/common.py", line 845, in to_str return obj.decode(encoding, errors) if isinstance(obj, six.binary_type) else obj UnicodeDecodeError: 'utf-8' codec can't decode byte 0xa2 in position 0: invalid start byte
0.223964
Traceback (most recent call last): "<stdin>", line 1, in <module> "C:\Python27\lib\site-packages\robot\libraries\BuiltIn.py", line 2872, in set_suite_metadata metadata = self._get_namespace(top).suite.metadata "C:\Python27\lib\site-packages\robot\libraries\BuiltIn.py", line 75, in _get_namespace return ctx.namespaceAttributeError: 'NoneType' object has no attribute 'namespace'
Traceback (most recent call last): "<string>", line 1, in <module> "C:\Users\User\AppData\Local\Temp\pip-install-hkpbpz6t\thinc\setup.py", line 261, in <module> setup_package() "C:\Users\User\AppData\Local\Temp\pip-install-hkpbpz6t\thinc\setup.py", line 201, in setup_package setup( "c:\users\user\appdata\local\programs\python\python38-32\lib\site-packages\setuptools\__init__.py", line 144, in setup _install_setup_requires(attrs) "c:\users\user\appdata\local\programs\python\python38-32\lib\site-packages\setuptools\__init__.py", line 139, in _install_setup_requires dist.fetch_build_eggs(dist.setup_requires) "c:\users\user\appdata\local\programs\python\python38-32\lib\site-packages\setuptools\dist.py", line 717, in fetch_build_eggs resolved_dists = pkg_resources.working_set.resolve( "c:\users\user\appdata\local\programs\python\python38-32\lib\site-packages\pkg_resources\__init__.py", line 780, in resolve dist = best[req.key] = env.best_match( "c:\users\user\appdata\local\programs\python\python38-32\lib\site-packages\pkg_resources\__init__.py", line 1065, in best_match return self.obtain(req, installer) "c:\users\user\appdata\local\programs\python\python38-32\lib\site-packages\pkg_resources\__init__.py", line 1077, in obtain return installer(requirement) "c:\users\user\appdata\local\programs\python\python38-32\lib\site-packages\setuptools\dist.py", line 787, in fetch_build_egg return cmd.easy_install(req) "c:\users\user\appdata\local\programs\python\python38-32\lib\site-packages\setuptools\command\easy_install.py", line 679, in easy_install return self.install_item(spec, dist.location, tmpdir, deps) "c:\users\user\appdata\local\programs\python\python38-32\lib\site-packages\setuptools\command\easy_install.py", line 705, in install_item dists = self.install_eggs(spec, download, tmpdir) "c:\users\user\appdata\local\programs\python\python38-32\lib\site-packages\setuptools\command\easy_install.py", line 890, in install_eggs return self.build_and_install(setup_script, setup_base) "c:\users\user\appdata\local\programs\python\python38-32\lib\site-packages\setuptools\command\easy_install.py", line 1158, in build_and_install self.run_setup(setup_script, setup_base, args) "c:\users\user\appdata\local\programs\python\python38-32\lib\site-packages\setuptools\command\easy_install.py", line 1146, in run_setup raise DistutilsError("Setup script exited with %s" % (v.args[0],)) distutils.errors.DistutilsError: Setup script exited with error: Microsoft Visual C++ 14.0 is required. Get it with "Build Tools for Visual Studio": https://visualstudio.microsoft.com/downloads/
0.283934
Traceback (most recent call last): "/home/pi/.pyenv/versions/homeassistant/lib/python3.4/site-packages/pip/basecommand.py", line 122, in main status = self.run(options, args) "/home/pi/.pyenv/versions/homeassistant/lib/python3.4/site-packages/pip/commands/install.py", line 311, in run os.path.join(options.target_dir, item) "/home/pi/.pyenv/versions/3.4.2/lib/python3.4/shutil.py", line 520, in move raise Error("Destination path '%s' already exists" % real_dst)shutil.Error: Destination path '/home/pi/.homeassistant/lib/__pycache__/__pycache__' already exists
Traceback (most recent call last): "manage.py", line 21, in <module> execute_from_command_line(sys.argv) "/usr/lib/python2.7/dist-packages/django/core/management/__init__.py", line 338, in execute_from_command_line utility.execute() "/usr/lib/python2.7/dist-packages/django/core/management/__init__.py", line 330, in execute self.fetch_command(subcommand).run_from_argv(self.argv) "/usr/lib/python2.7/dist-packages/django/core/management/base.py", line 393, in run_from_argv self.execute(*args, **cmd_options) "/usr/lib/python2.7/dist-packages/django/core/management/base.py", line 444, in execute output = self.handle(*args, **options) "/home/zulip/deployments/2015-10-01-04-05-33/zerver/management/commands/initialize_voyager_db.py", line 57, in handle settings.ADMIN_DOMAIN, True) "/home/zulip/deployments/2015-10-01-04-05-33/zerver/lib/actions.py", line 1594, in do_create_realm notifications_stream, _ = create_stream_if_needed(realm, Realm.NOTIFICATION_STREAM_NAME) "/home/zulip/deployments/2015-10-01-04-05-33/zerver/lib/actions.py", line 573, in create_stream_if_needed send_event(event, active_user_ids(realm)) "/home/zulip/deployments/2015-10-01-04-05-33/zerver/lib/event_queue.py", line 707, in send_event send_notification_http) "/home/zulip/deployments/2015-10-01-04-05-33/zerver/lib/queue.py", line 259, in queue_json_publish get_queue_client().json_publish(queue_name, event) "/home/zulip/deployments/2015-10-01-04-05-33/zerver/lib/queue.py", line 240, in get_queue_client queue_client = SimpleQueueClient() "/home/zulip/deployments/2015-10-01-04-05-33/zerver/lib/queue.py", line 25, in __init__ self._connect() "/home/zulip/deployments/2015-10-01-04-05-33/zerver/lib/queue.py", line 29, in _connect self.connection = pika.BlockingConnection(self._get_parameters()) "/usr/lib/python2.7/dist-packages/pika/adapters/blocking_connection.py", line 107, in __init__ super(BlockingConnection, self).__init__(parameters, None, False) "/usr/lib/python2.7/dist-packages/pika/adapters/base_connection.py", line 62, in __init__ on_close_callback) "/usr/lib/python2.7/dist-packages/pika/connection.py", line 590, in __init__ self.connect() "/usr/lib/python2.7/dist-packages/pika/adapters/blocking_connection.py", line 206, in connect if not self._adapter_connect(): "/usr/lib/python2.7/dist-packages/pika/adapters/blocking_connection.py", line 275, in _adapter_connect raise exceptions.AMQPConnectionError(1)pika.exceptions.AMQPConnectionError: 1
0.231118
Traceback (most recent call last): "/usr/lib/python2.7/runpy.py", line 174, in _run_module_as_main "__main__", fname, loader, pkg_name) "/usr/lib/python2.7/runpy.py", line 72, in _run_code exec code in run_globals "/opt/elastalert/elastalert/create_index.py", line 275, in <module> main() "/opt/elastalert/elastalert/create_index.py", line 127, in main esversion = es.info()["version"]["number"] "/usr/lib/python2.7/site-packages/elasticsearch-6.3.1-py2.7.egg/elasticsearch/client/utils.py", line 76, in _wrapped return func(*args, params=params, **kwargs) "/usr/lib/python2.7/site-packages/elasticsearch-6.3.1-py2.7.egg/elasticsearch/client/__init__.py", line 241, in info return self.transport.perform_request('GET', '/', params=params) "/usr/lib/python2.7/site-packages/elasticsearch-6.3.1-py2.7.egg/elasticsearch/transport.py", line 318, in perform_request status, headers_response, data = connection.perform_request(method, url, params, body, headers=headers, ignore=ignore, timeout=timeout) "/usr/lib/python2.7/site-packages/elasticsearch-6.3.1-py2.7.egg/elasticsearch/connection/http_requests.py", line 85, in perform_request raise ConnectionError('N/A', str(e), e) elasticsearch.exceptions.ConnectionError: ConnectionError([('UI routines', 'UI_process', 'processing error'), ('PEM routines', 'PEM_def_callback', 'problems getting pasword'), ('PEM routines', 'PEM_read_bio_PrivateKey', 'bad password read'), ('SSL routines', 'SSL_CTX_use_PrivateKey_file', 'PEM lib')]) caused by: Error([('UI routines', 'UIprocess', 'processing error'), ('PEM routines', 'PEM_def_callback', 'problems getting password'), ('PEM routines', 'PEM_read_bio_PrivateKey', 'bad password read'), ('SSL rotines', 'SSL_CTX_use_PrivateKey_file', 'PEM lib')])
Traceback (most recent call last): "./ssh-ident", line 316, in <module> import distutils.spawn ModuleNotFoundError: No module named 'distutils.spawn'
0.301901
2019-02-16 19:57:30.747 [launcher][ERROR] Except stack:Traceback (most recent call last): "/Users/osx/Desktop/XX-Net-master/code/3.13.1/launcher/module_init.py", line 40, in start proc_handler[module]["imp"] = __import__(module, globals(), locals(), ['local', 'start'], -1) "/Users/osx/Desktop/XX-Net-master/code/3.13.1/gae_proxy/local/__init__.py", line 2, in <module> import apis "/Users/osx/Desktop/XX-Net-master/code/3.13.1/gae_proxy/local/apis.py", line 2, in <module> from front import front, direct_front "/Users/osx/Desktop/XX-Net-master/code/3.13.1/gae_proxy/local/front.py", line 15, in <module> from front_base.http_dispatcher import HttpsDispatcher "/Users/osx/Desktop/XX-Net-master/code/3.13.1/python27/1.0/lib/noarch/front_base/http_dispatcher.py", line 32, in <module> from http2_connection import Http2Worker "/Users/osx/Desktop/XX-Net-master/code/3.13.1/python27/1.0/lib/noarch/front_base/http2_connection.py", line 21, in <module> from hyper.packages.hpack import Encoder, Decoder ImportError: cannot import name Encoder
<details><summary>Test output</summary><br><pre>Traceback (most recent call last): "/workspace/healthcare/api-client/v1beta1/fhir/fhir_stores_test.py", line 60, in test_create_delete_fhir_store fhir_stores.create_fhir_store( "/workspace/healthcare/api-client/v1beta1/fhir/fhir_stores.py", line 119, in create_fhir_store response = request.execute() "/workspace/healthcare/api-client/v1beta1/fhir/.nox/py-3-8/lib/python3.8/site-packages/googleapiclient/_helpers.py", line 134, in positional_wrapper return wrapped(*args, **kwargs) "/workspace/healthcare/api-client/v1beta1/fhir/.nox/py-3-8/lib/python3.8/site-packages/googleapiclient/http.py", line 900, in execute resp, content = _retry_request( "/workspace/healthcare/api-client/v1beta1/fhir/.nox/py-3-8/lib/python3.8/site-packages/googleapiclient/http.py", line 177, in _retry_request resp, content = http.request(uri, method, *args, **kwargs) "/workspace/healthcare/api-client/v1beta1/fhir/.nox/py-3-8/lib/python3.8/site-packages/google_auth_httplib2.py", line 189, in request self.credentials.before_request( "/workspace/healthcare/api-client/v1beta1/fhir/.nox/py-3-8/lib/python3.8/site-packages/google/auth/credentials.py", line 133, in before_request self.refresh(request) "/workspace/healthcare/api-client/v1beta1/fhir/.nox/py-3-8/lib/python3.8/site-packages/google/oauth2/service_account.py", line 361, in refresh access_token, expiry, _ = _client.jwt_grant(request, self._token_uri, assertion) "/workspace/healthcare/api-client/v1beta1/fhir/.nox/py-3-8/lib/python3.8/site-packages/google/oauth2/_client.py", line 153, in jwt_grant response_data = _token_endpoint_request(request, token_uri, body) "/workspace/healthcare/api-client/v1beta1/fhir/.nox/py-3-8/lib/python3.8/site-packages/google/oauth2/_client.py", line 111, in _token_endpoint_request response_data = json.loads(response_body) "/usr/local/lib/python3.8/json/__init__.py", line 357, in loads return _default_decoder.decode(s) "/usr/local/lib/python3.8/json/decoder.py", line 337, in decode obj, end = self.raw_decode(s, idx=_w(s, 0).end()) "/usr/local/lib/python3.8/json/decoder.py", line 355, in raw_decode raise JSONDecodeError("Expecting value", s, err.value) from Nonejson.decoder.JSONDecodeError: Expecting value: line 1 column 1 (char 0)</pre></details>
0.234723
Traceback (most recent call last): "train.py", line 19, in <module> from lib.net import Net, load_layer, caffe_test "/home/t-jinche/channel-pruning/lib/net.py", line 7, in <module> import caffe "/home/t-jinche/channel-pruning/caffe/python/caffe/__init__.py", line 4, in <module> from .proto.caffe_pb2 import TRAIN, TEST "/home/t-jinche/channel-pruning/caffe/python/caffe/proto/caffe_pb2.py", line 799, in <module> options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), '\020\001')), "/usr/local/lib/python3.4/dist-packages/google/protobuf/descriptor.py", line 874, in _ParseOptions message.ParseFromString(string) "/usr/local/lib/python3.4/dist-packages/google/protobuf/message.py", line 185, in ParseFromString self.MergeFromString(serialized) "/usr/local/lib/python3.4/dist-packages/google/protobuf/internal/python_message.py", line 1075, in MergeFromString raise message_mod.DecodeError('Truncated message.') google.protobuf.message.DecodeError: Truncated message.
  | Traceback (most recent call last):   | "/workdir/python_modules/dagstermill/.tox/py36/lib/python3.6/site-packages/sqlalchemy/engine/base.py", line 1246, in _execute_context   | cursor, statement, parameters, context   | "/workdir/python_modules/dagstermill/.tox/py36/lib/python3.6/site-packages/sqlalchemy/engine/default.py", line 588, in do_execute   | cursor.execute(statement, parameters)   | sqlite3.OperationalError: no such table: event_logs   |     | The above exception was the direct cause of the following exception:   |     | Traceback (most recent call last):   | "/workdir/python_modules/dagster/dagster/core/storage/sql.py", line 53, in handle_schema_errors   | yield   | "/workdir/python_modules/dagster/dagster/core/storage/event_log/sqlite/sqlite_event_log.py", line 108, in connect   | yield conn   | "/workdir/python_modules/dagster/dagster/core/storage/event_log/sql_event_log.py", line 82, in get_logs_for_run   | results = conn.execute(query).fetchall()   | "/workdir/python_modules/dagstermill/.tox/py36/lib/python3.6/site-packages/sqlalchemy/engine/base.py", line 982, in execute   | return meth(self, multiparams, params)   | "/workdir/python_modules/dagstermill/.tox/py36/lib/python3.6/site-packages/sqlalchemy/sql/elements.py", line 293, in _execute_on_connection   | return connection._execute_clauseelement(self, multiparams, params)   | "/workdir/python_modules/dagstermill/.tox/py36/lib/python3.6/site-packages/sqlalchemy/engine/base.py", line 1101, in _execute_clauseelement   | distilled_params,   | "/workdir/python_modules/dagstermill/.tox/py36/lib/python3.6/site-packages/sqlalchemy/engine/base.py", line 1250, in _execute_context   | e, statement, parameters, cursor, context   | "/workdir/python_modules/dagstermill/.tox/py36/lib/python3.6/site-packages/sqlalchemy/engine/base.py", line 1476, in _handle_dbapi_exception   | util.raise_from_cause(sqlalchemy_exception, exc_info)   | "/workdir/python_modules/dagstermill/.tox/py36/lib/python3.6/site-packages/sqlalchemy/util/compat.py", line 398, in raise_from_cause   | reraise(type(exception), exception, tb=exc_tb, cause=cause)   | "/workdir/python_modules/dagstermill/.tox/py36/lib/python3.6/site-packages/sqlalchemy/util/compat.py", line 152, in reraise   | raise value.with_traceback(tb)   | "/workdir/python_modules/dagstermill/.tox/py36/lib/python3.6/site-packages/sqlalchemy/engine/base.py", line 1246, in _execute_context   | cursor, statement, parameters, context   | "/workdir/python_modules/dagstermill/.tox/py36/lib/python3.6/site-packages/sqlalchemy/engine/default.py", line 588, in do_execute   | cursor.execute(statement, parameters)   | sqlalchemy.exc.OperationalError: (sqlite3.OperationalError) no such table: event_logs   | [SQL: SELECT event_logs.event   | FROM event_logs   | WHERE event_logs.run_id = ? AND event_logs.id >= ? ORDER BY event_logs.id ASC]   | [parameters: ('c127bf51-753b-48af-9638-cff6029d11ee', -1)]   | (Background on this error at: http://sqlalche.me/e/e3q8)   |     | The above exception was the direct cause of the following exception:   |     | Traceback (most recent call last):   | "/usr/local/lib/python3.6/threading.py", line 916, in _bootstrap_inner   | self.run()   | "/workdir/python_modules/dagstermill/.tox/py36/lib/python3.6/site-packages/watchdog/observers/api.py", line 196, in run   | self.dispatch_events(self.event_queue, self.timeout)   | "/workdir/python_modules/dagstermill/.tox/py36/lib/python3.6/site-packages/watchdog/observers/api.py", line 369, in dispatch_events   | handler.dispatch(event)   | "/workdir/python_modules/dagstermill/.tox/py36/lib/python3.6/site-packages/watchdog/events.py", line 462, in dispatch   | _method_map[event_type](event)   | "/workdir/python_modules/dagster/dagster/core/storage/event_log/sqlite/sqlite_event_log.py", line 156, in on_created   | self._process_log()   | "/workdir/python_modules/dagster/dagster/core/storage/event_log/sqlite/sqlite_event_log.py", line 146, in _process_log   | events = self._event_log_storage.get_logs_for_run(self._run_id, self._cursor)   | "/workdir/python_modules/dagster/dagster/core/storage/event_log/sql_event_log.py", line 82, in get_logs_for_run   | results = conn.execute(query).fetchall()   | "/usr/local/lib/python3.6/contextlib.py", line 99, in __exit__   | self.gen.throw(type, value, traceback)   | "/workdir/python_modules/dagster/dagster/core/storage/event_log/sqlite/sqlite_event_log.py", line 108, in connect   | yield conn   | "/usr/local/lib/python3.6/contextlib.py", line 99, in __exit__   | self.gen.throw(type, value, traceback)   | "/workdir/python_modules/dagster/dagster/core/storage/sql.py", line 70, in handle_schema_errors   | exc,   | "<string>", line 3, in raise_from   | dagster.core.errors.DagsterInstanceMigrationRequired: Instance is out of date and must be migrated (SqliteEventLogStorage for run c127bf51-753b-48af-9638-cff6029d11ee). Database is at revision None, head is 567bc23fd1ac. Please run `dagster instance migrate`.
0.255459
Traceback (most recent call last): "uvloop/handles/stream.pyx", line 609, in uvloop.loop.UVStream._on_eof "uvloop/sslproto.pyx", line 171, in uvloop.loop._SSLPipe.feed_ssldata "/usr/local/lib/python3.6/ssl.py", line 689, in do_handshake self._sslobj.do_handshake()ssl.SSLError: [SSL: HTTP_REQUEST] http request (_ssl.c:841)
Traceback (most recent call last): "/usr/lib/python2.7/threading.py", line 552, in __bootstrap_inner self.run() "/usr/lib/python2.7/threading.py", line 505, in run self.__target(*self.__args, **self.__kwargs) "/usr/local/lib/python2.7/dist-packages/shinken/stats.py", line 120, in reaper struct = self.app.get_stats_struct() "/usr/local/lib/python2.7/dist-packages/shinken/daemons/receiverdaemon.py", line 411, in get_stats_struct res = super(Receiver, self).get_stats_struct() "/usr/local/lib/python2.7/dist-packages/shinken/satellite.py", line 1042, in get_stats_struct _type = self.__class__.my_typeAttributeError: type object 'Receiver' has no attribute 'my_type'
0.207763
Traceback (most recent call last): "/usr/local/lib/python2.6/dist-packages/celery-3.0.11-py2.6.egg/celery/task/trace.py", line 266, in trace_task on_chord_part_return(task) "/usr/local/lib/python2.6/dist-packages/celery-3.0.11-py2.6.egg/celery/backends/base.py", line 475, in on_chord_part_return if val >= len(deps):TypeError: object of type 'NoneType' has no len()
Traceback (most recent call last): "/nfs/slac/kipac/fs1/u/awright/anaconda/lib/python2.7/site-packages/conda/exceptions.py", line 591, in conda_exception_handler return_value = func(*args, **kwargs) "/nfs/slac/kipac/fs1/u/awright/anaconda/lib/python2.7/site-packages/conda/cli/main.py", line 134, in _main exit_code = args.func(args, p) "/nfs/slac/kipac/fs1/u/awright/anaconda/lib/python2.7/site-packages/conda/cli/main_update.py", line 65, in execute install(args, parser, 'update') "/nfs/slac/kipac/fs1/u/awright/anaconda/lib/python2.7/site-packages/conda/cli/install.py", line 359, in install execute_actions(actions, index, verbose=not context.quiet) "/nfs/slac/kipac/fs1/u/awright/anaconda/lib/python2.7/site-packages/conda/plan.py", line 823, in execute_actions execute_instructions(plan, index, verbose) "/nfs/slac/kipac/fs1/u/awright/anaconda/lib/python2.7/site-packages/conda/instructions.py", line 247, in execute_instructions cmd(state, arg) "/nfs/slac/kipac/fs1/u/awright/anaconda/lib/python2.7/site-packages/conda/instructions.py", line 107, in UNLINKLINKTRANSACTION_CMD txn = UnlinkLinkTransaction.create_from_dists(index, prefix, unlink_dists, link_dists) "/nfs/slac/kipac/fs1/u/awright/anaconda/lib/python2.7/site-packages/conda/core/link.py", line 123, in create_from_dists for dist, pkg_dir in zip(link_dists, pkg_dirs_to_link)) "/nfs/slac/kipac/fs1/u/awright/anaconda/lib/python2.7/site-packages/conda/core/link.py", line 123, in <genexpr> for dist, pkg_dir in zip(link_dists, pkg_dirs_to_link)) "/nfs/slac/kipac/fs1/u/awright/anaconda/lib/python2.7/site-packages/conda/gateways/disk/read.py", line 86, in read_package_info index_json_record = read_index_json(extracted_package_directory) "/nfs/slac/kipac/fs1/u/awright/anaconda/lib/python2.7/site-packages/conda/gateways/disk/read.py", line 105, in read_index_json with open(join(extracted_package_directory, 'info', 'index.json')) as fi: IOError: [Errno 2] No such file or directory: u'/nfs/slac/kipac/fs1/u/awright/anaconda/pkgs/conda-4.3.23-py27_0/info/index.json'
0.323814
Traceback (most recent call last): "/usr/local/lib/python3.5/dist-packages/pysc2/lib/protocol.py", line 151, in send res = self.send_req(req) "/usr/local/lib/python3.5/dist-packages/pysc2/lib/protocol.py", line 131, in send_req return self.read() "/usr/local/lib/python3.5/dist-packages/pysc2/lib/stopwatch.py", line 212, in _stopwatch return func(*args, **kwargs) "/usr/local/lib/python3.5/dist-packages/pysc2/lib/protocol.py", line 102, in read response = self._read() "/usr/local/lib/python3.5/dist-packages/pysc2/lib/protocol.py", line 183, in _read response_str = self._sock.recv() "/usr/lib/python3.5/contextlib.py", line 77, in __exit__ self.gen.throw(type, value, traceback) "/usr/local/lib/python3.5/dist-packages/pysc2/lib/protocol.py", line 71, in catch_websocket_connection_errors raise ConnectionError("Websocket timed out.") pysc2.lib.protocol.ConnectionError: Websocket timed out.
Traceback (most recent call last): "/usr/share/w3af/w3af/core/ui/gui/main.py", line 546, in _scan_director func() "/usr/share/w3af/w3af/core/ui/gui/main.py", line 591, in _scan_start if not self.save_state_to_core(): "/usr/share/w3af/w3af/core/ui/gui/main.py", line 556, in save_state_to_core self.w3af.plugins.set_plugins([], ptype) "/usr/share/w3af/w3af/core/controllers/core_helpers/plugins.py", line 182, in set_plugins set_dict[plugin_type](plugin_names) "/usr/share/w3af/w3af/core/controllers/core_helpers/plugins.py", line 375, in _set_evasion_plugins self.plugin_factory() "/usr/share/w3af/w3af/core/controllers/core_helpers/plugins.py", line 358, in plugin_factory self.order_plugins() "/usr/share/w3af/w3af/core/controllers/core_helpers/plugins.py", line 325, in order_plugins plugin_type].index(plugin_name)ValueError: 'fingerprint_WAF' is not in list
0.308705
Traceback (most recent call last): "youtube_dl\YoutubeDL.pyo", line 665, in extract_info "youtube_dl\extractor\common.pyo", line 291, in extract "youtube_dl\extractor\generic.pyo", line 1908, in _real_extractUnsupportedError: Unsupported URL: http://www.mtv.com/full-episodes/0ghr35/teen-mom-why-can-t-we-just-get-along-season-5-ep-511
Traceback (most recent call last): "c:\Python27\lib\site-packages\nose\case.py", line 197, in runTest self.test(*self.arg) "c:\documents and settings\jdm\ipython\IPython\core\tests\test_run.py", line 250, in test_unicode tt.assert_equals(_ip.user_ns['u'], u'Ўт№Ф')KeyError: 'u'
0.193236
Traceback (most recent call last): "/home/mike/code/reddit-media-downloader3/lib/python3.6/site-packages/sqlalchemy/engine/base.py", line 1249, in _execute_context cursor, statement, parameters, context "/home/mike/code/reddit-media-downloader3/lib/python3.6/site-packages/sqlalchemy/engine/default.py", line 580, in do_execute cursor.execute(statement, parameters) sqlite3.OperationalError: locking protocol
Traceback (most recent call last): "/home/philip/projects/sapling/forks/django-tastypie-philipn/tests/content_gfk/tests/fields.py", line 60, in test_resource_from_uri '/api/v1/notes/%s/' % note_2.pk "/home/philip/projects/sapling/forks/django-tastypie-philipn/tastypie/contrib/contenttypes/fields.py", line 45, in resource_from_uri obj = fk_resource.get_via_uri(uri, request=request) "/home/philip/projects/sapling/forks/django-tastypie-philipn/tastypie/contrib/contenttypes/resources.py", line 33, in get_via_uri return parent_resource.obj_get(**self.remove_api_resource_names(kwargs))AttributeError: 'function' object has no attribute 'obj_get'
0.189699
Traceback (most recent call last): "C:\programdata\miniconda3\lib\logging\__init__.py", line 1085, in emit self.flush() "C:\programdata\miniconda3\lib\logging\__init__.py", line 1065, in flush self.stream.flush() OSError: [Errno 9] Bad file descriptor
Traceback (most recent call last): "/usr/share/w3af/w3af/core/ui/gui/tabs/log/stats.py", line 111, in update new_text = self.generate_text(item, set_defaults) "/usr/share/w3af/w3af/core/ui/gui/tabs/log/stats.py", line 118, in generate_text value = getattr(self.w3af.status, 'get_%s' % item.getter)() "/usr/share/w3af/w3af/core/controllers/core_helpers/status.py", line 252, in get_audit_eta eta_minutes = current_size / speedTypeError: unsupported operand type(s) for /: 'NoneType' and 'float'
0.215679
Traceback (most recent call last): "/app/medusa/medusa/db.py", line 113, in _execute sql_results = cursor.execute(query) sqlite3.OperationalError: no such table: scene_exceptions
Traceback (most recent call last): "/usr/local/lib/python2.7/dist-packages/binwalk/core/module.py", line 565, in main retval = self.run() "/usr/local/lib/python2.7/dist-packages/binwalk/modules/signature.py", line 177, in run self.scan_file(fp) "/usr/local/lib/python2.7/dist-packages/binwalk/modules/signature.py", line 160, in scan_file self.result(r=r) "/usr/local/lib/python2.7/dist-packages/binwalk/core/module.py", line 454, in result getattr(self, dependency.attribute).callback(r) "/usr/local/lib/python2.7/dist-packages/binwalk/modules/extractor.py", line 165, in callback (extraction_directory, dd_file, scan_extracted_files) = self.extract(r.offset, r.description, r.file.path, size, r.name) "/usr/local/lib/python2.7/dist-packages/binwalk/modules/extractor.py", line 450, in extract fname = self._dd(file_path, offset, size, rule['extension'], output_file_name=name) "/usr/local/lib/python2.7/dist-packages/binwalk/modules/extractor.py", line 632, in _dd binwalk.core.common.debug("Carved data block 0x%X - 0x%X from '%s' to '%s'" % (offset, offset+size, file_name, fname))UnicodeDecodeError: 'ascii' codec can't decode byte 0xc3 in position 55: ordinal not in range(128)
0.214703
Traceback (most recent call last): "/usr/local/lib/python2.7/dist-packages/tornado-4.5.3-py2.7-linux-x86_64.egg/tornado/web.py", line 1510, in _execute result = method(*self.path_args, **self.path_kwargs) "/usr/local/lib/python2.7/dist-packages/wifiphisher-1.4-py2.7.egg/wifiphisher/common/phishinghttp.py", line 127, in post post_data = tornado.escape.url_unescape(self.request.body) "/usr/local/lib/python2.7/dist-packages/tornado-4.5.3-py2.7-linux-x86_64.egg/tornado/escape.py", line 135, in url_unescape return unicode_type(unquote(utf8(value)), encoding) UnicodeDecodeError: 'utf8' codec can't decode byte 0xd4 in position 8: invalid continuation byte
Traceback (most recent call last): "/usr/local/lib/python2.7/site-packages/awslogs/bin.py", line 178, in main getattr(logs, options.func)() "/usr/local/lib/python2.7/site-packages/awslogs/core.py", line 230, in list_groups for group in self.get_groups(): "/usr/local/lib/python2.7/site-packages/awslogs/core.py", line 244, in get_groups for page in paginator.paginate(**kwargs): "/usr/local/lib/python2.7/site-packages/botocore/paginate.py", line 255, in __iter__ response = self._make_request(current_kwargs) "/usr/local/lib/python2.7/site-packages/botocore/paginate.py", line 332, in _make_request return self._method(**current_kwargs) "/usr/local/lib/python2.7/site-packages/botocore/client.py", line 357, in _api_call return self._make_api_call(operation_name, kwargs) "/usr/local/lib/python2.7/site-packages/botocore/client.py", line 648, in _make_api_call operation_model, request_dict, request_context) "/usr/local/lib/python2.7/site-packages/botocore/client.py", line 667, in _make_request return self._endpoint.make_request(operation_model, request_dict) "/usr/local/lib/python2.7/site-packages/botocore/endpoint.py", line 102, in make_request return self._send_request(request_dict, operation_model) "/usr/local/lib/python2.7/site-packages/botocore/endpoint.py", line 137, in _send_request success_response, exception): "/usr/local/lib/python2.7/site-packages/botocore/endpoint.py", line 231, in _needs_retry caught_exception=caught_exception, request_dict=request_dict) "/usr/local/lib/python2.7/site-packages/botocore/hooks.py", line 356, in emit return self._emitter.emit(aliased_event_name, **kwargs) "/usr/local/lib/python2.7/site-packages/botocore/hooks.py", line 228, in emit return self._emit(event_name, kwargs) "/usr/local/lib/python2.7/site-packages/botocore/hooks.py", line 211, in _emit response = handler(**kwargs) "/usr/local/lib/python2.7/site-packages/botocore/retryhandler.py", line 183, in __call__ if self._checker(attempts, response, caught_exception): "/usr/local/lib/python2.7/site-packages/botocore/retryhandler.py", line 251, in __call__ caught_exception) "/usr/local/lib/python2.7/site-packages/botocore/retryhandler.py", line 277, in _should_retry return self._checker(attempt_number, response, caught_exception) "/usr/local/lib/python2.7/site-packages/botocore/retryhandler.py", line 317, in __call__ caught_exception) "/usr/local/lib/python2.7/site-packages/botocore/retryhandler.py", line 223, in __call__ attempt_number, caught_exception) "/usr/local/lib/python2.7/site-packages/botocore/retryhandler.py", line 359, in _check_caught_exception raise caught_exception EndpointConnectionError: Could not connect to the endpoint URL: "https://logs.Frankfurt.amazonaws.com/"
0.306494
Traceback (most recent call last): "/home/travis/build/matthew-brett/manylinux-testing/venv/lib/python2.7/site-packages/pandas/tseries/tests/test_period.py", line 2660, in test_range_slice_seconds idx[v:] "/home/travis/build/matthew-brett/manylinux-testing/venv/lib/python2.7/site-packages/pandas/tseries/base.py", line 175, in __getitem__ result = getitem(key)IndexError: failed to coerce slice entry of type str to integer
Traceback (most recent call last): "/usr/local/lib/python2.7/site-packages/beets/plugins.py", line 253, in load_plugins namespace = __import__(modname, None, None) "/usr/local/lib/python2.7/site-packages/beetsplug/web/__init__.py", line 23, in <module> import flaskImportError: No module named flask
0.204117
Traceback (most recent call last): "/usr/local/lib/python2.7/site-packages/nose/case.py", line 197, in runTest self.test(*self.arg) "/Users/rgommers/Code/bldscipy/scipy/spatial/tests/test_qhull.py", line 310, in test_more_barycentric_transforms grid = np.c_[list(map(np.ravel, np.broadcast_arrays(*np.ix_(*([x]*ndim)))))].T "/Users/rgommers/Code/numpy/numpy/lib/index_tricks.py", line 77, in ix_ raise ValueError("Cross index must be 1 dimensional")ValueError: Cross index must be 1 dimensional
Traceback (most recent call last): "/usr/lib/python3.6/shutil.py", line 544, in move os.rename(src, real_dst) OSError: [Errno 18] Invalid cross-device link: '/home/florian/.local/share/qutebrowser/qutebrowser/QtWebEngine/Default' -> '/tmp/qutebrowser-basedir-whxniira/data/webengine'
0.25929
Traceback (most recent call last): "/usr/local/bin/youtube-dl/youtube_dl/YoutubeDL.py", line 776, in extract_info ie_result = ie.extract(url) "/usr/local/bin/youtube-dl/youtube_dl/extractor/common.py", line 434, in extract ie_result = self._real_extract(url) "/usr/local/bin/youtube-dl/youtube_dl/extractor/mixcloud.py", line 110, in _real_extract play_info = self._decrypt_play_info(encrypted_play_info, track_id) "/usr/local/bin/youtube-dl/youtube_dl/extractor/mixcloud.py", line 74, in _decrypt_play_info video_id) "/usr/local/bin/youtube-dl/youtube_dl/extractor/common.py", line 695, in _parse_json raise ExtractorError(errmsg, cause=ve) ExtractorError: genia-passion-and-shadow-sacred-circle-dance-aug-13-2017: Failed to parse JSON (caused by ValueError('Extra data: line 1 column 2 - line 2 column 101 (char 1 - 217)',)); please report this issue on https://yt-dl.org/bug . Make sure you are using the latest version; type youtube-dl -U to update. Be sure to call youtube-dl with the --verbose flag and include its complete output.
Traceback (most recent call last): "<stdin>", line 2, in <module> ".venv/lib/python3.6/site-packages/redis/lock.py", line 150, in __exit__ self.release() ".venv/lib/python3.6/site-packages/redis/lock.py", line 226, in release self.do_release(expected_token) ".venv/lib/python3.6/site-packages/redis/lock.py", line 231, in do_release client=self.redis)): ".venv/lib/python3.6/site-packages/redis/client.py", line 3581, in __call__ return client.evalsha(self.sha, len(keys), *args) ".venv/lib/python3.6/site-packages/redis/client.py", line 2761, in evalsha return self.execute_command('EVALSHA', sha, numkeys, *keys_and_args) ".venv/lib/python3.6/site-packages/redis/client.py", line 774, in execute_command connection.send_command(*args) ".venv/lib/python3.6/site-packages/redis/connection.py", line 620, in send_command self.send_packed_command(self.pack_command(*args)) ".venv/lib/python3.6/site-packages/redis/connection.py", line 600, in send_packed_command self._sock.sendall(item) ".venv/lib/python3.6/site-packages/fakeredis/_server.py", line 778, in sendall self._parser.send(data) ".venv/lib/python3.6/site-packages/fakeredis/_server.py", line 699, in _parse_commands self._process_command(fields) ".venv/lib/python3.6/site-packages/fakeredis/_server.py", line 797, in _process_command result = self._run_command(func, sig, fields[1:], False) ".venv/lib/python3.6/site-packages/fakeredis/_server.py", line 715, in _run_command result = func(*args) ".venv/lib/python3.6/site-packages/fakeredis/_server.py", line 2364, in evalsha return self.eval(script, numkeys, *keys_and_args) ".venv/lib/python3.6/site-packages/fakeredis/_server.py", line 2317, in eval from lupa import LuaRuntime, LuaError ModuleNotFoundError: No module named 'lupa'
0.218309
Traceback (most recent call last): "pickle_.py", line 42, in ? p=pickle.dumps(j) "/usr/lib/python2.4/copy_reg.py", line 69, in _reduce_ex raise TypeError, "can't pickle %s objects" % base.__name__ TypeError: can't pickle function objects
Traceback (most recent call last): "<stdin>", line 1, in <module> "/V…/lib/python3.5/site-packages/babel/core.py", line 168, in __init__ raise UnknownLocaleError(identifier) babel.core.UnknownLocaleError: unknown locale 'zh_CN'
0.289328
Traceback (most recent call last): "/home/gus/.Envs/python3_kivy/bin/garden", line 189, in <module> GardenTool().main(sys.argv[1:]) "/home/gus/.Envs/python3_kivy/bin/garden", line 71, in main options.func() "/home/gus/.Envs/python3_kivy/bin/garden", line 109, in cmd_install fd = self.download(opts.package) "/home/gus/.Envs/python3_kivy/bin/garden", line 170, in download data += bufTypeError: Can't convert 'bytes' object to str implicitly
Traceback (most recent call last): "/usr/local/bin/youtube-dl", line 11, in <module> sys.exit(main()) "/usr/local/lib/python2.7/dist-packages/youtube_dl/__init__.py", line 474, in main _real_main(argv) "/usr/local/lib/python2.7/dist-packages/youtube_dl/__init__.py", line 464, in _real_main retcode = ydl.download(all_urls) "/usr/local/lib/python2.7/dist-packages/youtube_dl/YoutubeDL.py", line 2019, in download url, force_generic_extractor=self.params.get('force_generic_extractor', False)) "/usr/local/lib/python2.7/dist-packages/youtube_dl/YoutubeDL.py", line 797, in extract_info ie_result = ie.extract(url) "/usr/local/lib/python2.7/dist-packages/youtube_dl/extractor/common.py", line 530, in extract ie_result = self._real_extract(url) "/usr/local/lib/python2.7/dist-packages/youtube_dl/extractor/xtube.py", line 110, in _real_extract for format_id, format_url in sources.items(): AttributeError: 'NoneType' object has no attribute 'items'
0.325432
Traceback (most recent call last): "calibre_plugins.dedrm.__init__", line 474, in PDFDecrypt "calibre_plugins.dedrm.ineptpdf", line 2193, in decryptBook "calibre_plugins.dedrm.ineptpdf", line 1999, in __init__ "calibre_plugins.dedrm.ineptpdf", line 1485, in initialize PDFEncryptionError: Unknown filter: param={'DUID': 'xxxxx.xxx.5afd2940175e77.58821786', 'BUILD': '907', 'SVID': 'xxxxx.xxx', 'V': 2, 'VEID': '9.0', 'Filter': /FOPN#5ffoweb, 'Length': 128, 'INFO': 'HgR50GSLkqXShHKestPel1/ocyoslBDzOQxbbo1igGDzJg3a0iTM9nsUYTCH8SDI+iThsmBnq8p1Avl7/T8csX+A52oYDeVaK8Etr2C61PI3zscTE1SGmSY7ePtDzNnmVrUWPhu8NGVdt0jud66y0YwJpdG0AD6E0wqvw7sZe83+gJz+Sz1glzPY29OhZmLCQaTjDhtYRjh2oA759PQWm0BG8m7LEesHb3BxjIwWTnBVn2DNjNUBy5AIRjfodrsNBx/ly+lzkHwAiolduv7wGXVwNWth1mpWEAKV3P6kZ8NIwkss7iwNcxd8IGqf+Xr0OQVb9qSfg8Bz+4lzHhqHxCFemCvrY4FDzJiOFcqAJ+WqSP4DjeyTIRYUWSxlB3NcLtaXabVHqTCg8+Gsq/GURbAnguCzNDcP1zf9dT6QSyFNsfHFCuBdaAPs6VN1GYQW6ohnbONfKW2ovM2zF44YJqwE8IFsHkrZOmeEVe4E7ps5sF8Fi1F7V7+GCtQgSr0KjZIK8gq7lVXE4sLa6zARDDbqGsMEB7bGFYrwQ4c86D9xGuY6Xm9aQlVY+x42LovQRNUp6WIlMK9mOwwj9B6SGBPxOWt48YbR5FfcffrEEGQcxAMlM6i3zuzqfES6lIBz1IIDU78Dd/LwoxYFihAJK0uS6sfYE/a8UAgBZxhizdEkRVjJGmq3ULqQS6v2OKKzTdtlJj52Zg=='}
Traceback (most recent call last): "set_timezone.py", line 9, in <module> tz = tz_lookup.reverse_timezone((lat, lon)) "/usr/local/lib/python2.7/dist-packages/geopy/geocoders/geonames.py", line 301, in reverse_timezone return self._parse_json_timezone(response) "/usr/local/lib/python2.7/dist-packages/geopy/geocoders/geonames.py", line 304, in _parse_json_timezone return from_timezone_name(response["timezoneId"], raw=response) KeyError: 'timezoneId'
0.271565
Traceback (most recent call last): "/home/baal/bin/python/zeus-scanner/lib/attacks/gist_lookup/__init__.py", line 153, in github_gist_search_main gist, gist_link = get_raw_html(link, verbose=verbose)TypeError: 'NoneType' object is not iterable
Traceback (most recent call last): "/home/pootle/www/pootle/env/bin/pootle", line 9, in <module> load_entry_point('Pootle==2.6.0-rc2', 'console_scripts', 'pootle')() "/home/pootle/www/pootle/env/local/lib/python2.7/site-packages/pootle/runner.py", line 171, in main django_settings_module='pootle.settings') "/home/pootle/www/pootle/env/local/lib/python2.7/site-packages/pootle/runner.py", line 150, in run_app management.execute_from_command_line([runner_name] + remainder) "/home/pootle/www/pootle/env/local/lib/python2.7/site-packages/django/core/management/__init__.py", line 399, in execute_from_command_line utility.execute() "/home/pootle/www/pootle/env/local/lib/python2.7/site-packages/django/core/management/__init__.py", line 392, in execute self.fetch_command(subcommand).run_from_argv(self.argv) "/home/pootle/www/pootle/env/local/lib/python2.7/site-packages/django/core/management/base.py", line 242, in run_from_argv self.execute(*args, **options.__dict__) "/home/pootle/www/pootle/env/local/lib/python2.7/site-packages/django/core/management/base.py", line 285, in execute output = self.handle(*args, **options) "/home/pootle/www/pootle/env/local/lib/python2.7/site-packages/south/management/commands/migrate.py", line 111, in handle ignore_ghosts = ignore_ghosts, "/home/pootle/www/pootle/env/local/lib/python2.7/site-packages/south/migration/__init__.py", line 220, in migrate_app success = migrator.migrate_many(target, workplan, database) "/home/pootle/www/pootle/env/local/lib/python2.7/site-packages/south/migration/migrators.py", line 256, in migrate_many result = migrator.__class__.migrate_many(migrator, target, migrations, database) "/home/pootle/www/pootle/env/local/lib/python2.7/site-packages/south/migration/migrators.py", line 331, in migrate_many result = self.migrate(migration, database) "/home/pootle/www/pootle/env/local/lib/python2.7/site-packages/south/migration/migrators.py", line 133, in migrate result = self.run(migration, database) "/home/pootle/www/pootle/env/local/lib/python2.7/site-packages/south/migration/migrators.py", line 114, in run return self.run_migration(migration, database) "/home/pootle/www/pootle/env/local/lib/python2.7/site-packages/south/migration/migrators.py", line 84, in run_migration migration_function() "/home/pootle/www/pootle/env/local/lib/python2.7/site-packages/south/migration/migrators.py", line 60, in <lambda> return (lambda: direction(orm)) "/home/pootle/www/pootle/env/local/lib/python2.7/site-packages/pootle/apps/pootle_translationproject/migrations/0002_auto__del_field_translationproject_description_html__chg_field_transla.py", line 12, in forwards db.delete_column('pootle_app_translationproject', 'description_html') "/home/pootle/www/pootle/env/local/lib/python2.7/site-packages/south/db/generic.py", line 54, in _column_rm return func(self, table, column, *args, **opts) "/home/pootle/www/pootle/env/local/lib/python2.7/site-packages/south/db/generic.py", line 911, in delete_column self.execute(self.delete_column_string % params, []) "/home/pootle/www/pootle/env/local/lib/python2.7/site-packages/south/db/generic.py", line 282, in execute cursor.execute(sql, params) "/home/pootle/www/pootle/env/local/lib/python2.7/site-packages/django/db/backends/util.py", line 53, in execute return self.cursor.execute(sql, params) "/home/pootle/www/pootle/env/local/lib/python2.7/site-packages/django/db/utils.py", line 99, in __exit__ six.reraise(dj_exc_type, dj_exc_value, traceback) "/home/pootle/www/pootle/env/local/lib/python2.7/site-packages/django/db/backends/util.py", line 53, in execute return self.cursor.execute(sql, params)django.db.utils.ProgrammingError: ERREUR: la colonne « description_html » de la relation « pootle_app_translationproject » n'existe pas
0.149442
Traceback (most recent call last): "setup.py", line 333, in <module> **keywords_with_side_effects(sys.argv) "/usr/lib/python3.5/distutils/core.py", line 148, in setup dist.run_commands() "/usr/lib/python3.5/distutils/dist.py", line 955, in run_commands self.run_command(cmd) "/usr/lib/python3.5/distutils/dist.py", line 973, in run_command cmd_obj.ensure_finalized() "/usr/lib/python3.5/distutils/cmd.py", line 107, in ensure_finalized self.finalize_options() "setup.py", line 101, in finalize_options [sys.executable, "setup.py", "install"], cwd="vectors" "/usr/lib/python3.5/subprocess.py", line 581, in check_call raise CalledProcessError(retcode, cmd)subprocess.CalledProcessError: Command '['/usr/bin/python3.5', 'setup.py', 'install']' returned non-zero exit status 1
"Traceback (most recent call last): "/home/frappe/frappe-bench/apps/frappe/frappe/desk/form/save.py", line 19, in savedocs doc.submit() "/home/frappe/frappe-bench/apps/frappe/frappe/model/document.py", line 841, in submit self._submit() "/home/frappe/frappe-bench/apps/frappe/frappe/model/document.py", line 830, in _submit self.save() "/home/frappe/frappe-bench/apps/frappe/frappe/model/document.py", line 260, in save return self._save(*args, **kwargs) "/home/frappe/frappe-bench/apps/frappe/frappe/model/document.py", line 311, in _save self.run_post_save_methods() "/home/frappe/frappe-bench/apps/frappe/frappe/model/document.py", line 901, in run_post_save_methods self.run_method("on_submit") "/home/frappe/frappe-bench/apps/frappe/frappe/model/document.py", line 765, in run_method out = Document.hook(fn)(self, *args, **kwargs) "/home/frappe/frappe-bench/apps/frappe/frappe/model/document.py", line 1040, in composer return composed(self, method, *args, **kwargs) "/home/frappe/frappe-bench/apps/frappe/frappe/model/document.py", line 1023, in runner add_to_return_value(self, fn(self, *args, **kwargs)) "/home/frappe/frappe-bench/apps/frappe/frappe/model/document.py", line 759, in <lambda> fn = lambda self, *args, **kwargs: getattr(self, method)(*args, **kwargs) "/home/frappe/frappe-bench/apps/erpnext/erpnext/accounts/doctype/purchase_invoice/purchase_invoice.py", line 318, in on_submit self.make_gl_entries() "/home/frappe/frappe-bench/apps/erpnext/erpnext/accounts/doctype/purchase_invoice/purchase_invoice.py", line 327, in make_gl_entries gl_entries = self.get_gl_entries() "/home/frappe/frappe-bench/apps/erpnext/erpnext/accounts/doctype/purchase_invoice/purchase_invoice.py", line 355, in get_gl_entries self.make_item_gl_entries(gl_entries) "/home/frappe/frappe-bench/apps/erpnext/erpnext/accounts/doctype/purchase_invoice/purchase_invoice.py", line 391, in make_item_gl_entries warehouse_account = get_warehouse_account_map() "/home/frappe/frappe-bench/apps/erpnext/erpnext/stock/__init__.py", line 22, in get_warehouse_account_map d.account = get_warehouse_account(d, warehouse_account) "/home/frappe/frappe-bench/apps/erpnext/erpnext/stock/__init__.py", line 36, in get_warehouse_account account = warehouse_account.get(warehouse.parent_warehouse).account AttributeError: 'NoneType' object has no attribute 'account'
0.303455
Traceback (most recent call last): "/home/pmironchyk/anaconda3/envs/py34/lib/python3.4/site-packages/zipline/utils/cache.py", line 225, in **getitem** with open(self._keypath(key), 'rb') as f: NotFoundError: [Errno 2] No such file or directory: '/home/pmironchyk/.zipline/data/quandl/.cache/TEST123456789'
Traceback (most recent call last): "/usr/local/lib/python3.8/site-packages/requests/adapters.py", line 439, in send resp = conn.urlopen( "/usr/local/lib/python3.8/site-packages/urllib3/connectionpool.py", line 755, in urlopen retries = retries.increment( "/usr/local/lib/python3.8/site-packages/urllib3/util/retry.py", line 531, in increment raise six.reraise(type(error), error, _stacktrace) "/usr/local/lib/python3.8/site-packages/urllib3/packages/six.py", line 734, in reraise raise value.with_traceback(tb) "/usr/local/lib/python3.8/site-packages/urllib3/connectionpool.py", line 699, in urlopen httplib_response = self._make_request( "/usr/local/lib/python3.8/site-packages/urllib3/connectionpool.py", line 394, in _make_request conn.request(method, url, **httplib_request_kw) "/usr/local/lib/python3.8/http/client.py", line 1255, in request self._send_request(method, url, body, headers, encode_chunked) "/usr/local/lib/python3.8/http/client.py", line 1301, in _send_request self.endheaders(body, encode_chunked=encode_chunked) "/usr/local/lib/python3.8/http/client.py", line 1250, in endheaders self._send_output(message_body, encode_chunked=encode_chunked) "/usr/local/lib/python3.8/http/client.py", line 1010, in _send_output self.send(msg) "/usr/local/lib/python3.8/http/client.py", line 950, in send self.connect() "/usr/local/lib/python3.8/site-packages/docker/transport/unixconn.py", line 43, in connect sock.connect(self.unix_socket) urllib3.exceptions.ProtocolError: ('Connection aborted.', ConnectionRefusedError(111, 'Connection refused'))
0.259041
linderlake@rbpi:~/svtplaydl $ Traceback (most recent call last): "/usr/bin/svtplay-dl", line 4, in <module> import svtplay_dl ImportError: No module named 'svtplay_dl'
Original Traceback (most recent call last): "my_project/lib/python3.5/site-packages/torch/utils/data/_utils/worker.py", line 178, in _worker_loop data = fetcher.fetch(index) "my_project/lib/python3.5/site-packages/torch/utils/data/_utils/fetch.py", line 44, in fetch data = [self.dataset[idx] for idx in possibly_batched_index] "/my_project/lib/python3.5/site-packages/torch/utils/data/_utils/fetch.py", line 44, in <listcomp> data = [self.dataset[idx] for idx in possibly_batched_index] "yolov3/utils/datasets.py", line 416, in __getitem__ img, labels = load_mosaic(self, index) "yolov3/utils/datasets.py", line 593, in load_mosaic labels4 = np.concatenate(labels4, 0) "<__array_function__ internals>", line 6, in concatenate ValueError: all the input arrays must have same number of dimensions, but the array at index 0 has 2 dimension(s) and the array at index 2 has 1 dimension(s)
0.265548
Traceback (most recent call last): "/home/alex/.local/lib/python3.4/site-packages/youtube_dl/YoutubeDL.py", line 796, in extract_info ie_result = ie.extract(url) "/home/alex/.local/lib/python3.4/site-packages/youtube_dl/extractor/common.py", line 528, in extract self.initialize() "/home/alex/.local/lib/python3.4/site-packages/youtube_dl/extractor/common.py", line 432, in initialize self._real_initialize() "/home/alex/.local/lib/python3.4/site-packages/youtube_dl/extractor/safari.py", line 27, in _real_initialize self._login() "/home/alex/.local/lib/python3.4/site-packages/youtube_dl/extractor/safari.py", line 51, in _login login_page, 'csrf token') "/home/alex/.local/lib/python3.4/site-packages/youtube_dl/extractor/common.py", line 1013, in _html_search_regex res = self._search_regex(pattern, string, name, default, fatal, flags, group) "/home/alex/.local/lib/python3.4/site-packages/youtube_dl/extractor/common.py", line 1004, in _search_regex raise RegexNotFoundError('Unable to extract %s' % _name) youtube_dl.utils.RegexNotFoundError: Unable to extract csrf token; please report this issue on https://yt-dl.org/bug . Make sure you are using the latest version; see https://yt-dl.org/update on how to update. Be sure to call youtube-dl with the --verbose flag and include its complete output.
Traceback (most recent call last): "/home/zhouhao/.local/lib/python3.6/site-packages/ray/tune/trial_runner.py", line 239, in _process_events result = self.trial_executor.fetch_result(trial) "/home/zhouhao/.local/lib/python3.6/site-packages/ray/tune/ray_trial_executor.py", line 200, in fetch_result result = ray.get(trial_future[0]) "/home/zhouhao/.local/lib/python3.6/site-packages/ray/worker.py", line 2522, in get raise RayGetError(object_ids, value) ray.worker.RayGetError: Could not get objectid ObjectID(4145c787694f8b5cf1a4d9d4b71133906e38381b). It was created by remote function train which failed with:
0.182412
Traceback (most recent call last): "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 1817, in wsgi_app response = self.full_dispatch_request() "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 1477, in full_dispatch_request rv = self.handle_user_exception(e) "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 1381, in handle_user_exception reraise(exc_type, exc_value, tb) "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 1475, in full_dispatch_request rv = self.dispatch_request() "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 1461, in dispatch_request return self.view_functions[rule.endpoint](**req.view_args) "/usr/local/lib/python2.7/dist-packages/docker_registry/toolkit.py", line 277, in wrapper return f(namespace=namespace, repository=repository, *args, **kwargs) "/usr/local/lib/python2.7/dist-packages/docker_registry/toolkit.py", line 251, in wrapper return f(*args, **kwargs) "/usr/local/lib/python2.7/dist-packages/docker_registry/index.py", line 98, in put_repository update_index_images(namespace, repository, flask.request.data) "/usr/local/lib/python2.7/dist-packages/docker_registry/index.py", line 60, in update_index_images data = json.loads(data_arg.decode('utf8')) + store.get_json(path) "/usr/local/lib/python2.7/dist-packages/docker_registry/core/driver.py", line 185, in get_json return json.loads(self.get_unicode(path)) "/usr/local/lib/python2.7/dist-packages/docker_registry/core/driver.py", line 191, in get_unicode return self.get_bytes(path).decode('utf8') "/usr/local/lib/python2.7/dist-packages/docker_registry/core/driver.py", line 197, in get_bytes return self.get_content(path) "/usr/local/lib/python2.7/dist-packages/docker_registry/drivers/s3.py", line 152, in get_content return super(Storage, self).get_content(path) "/usr/local/lib/python2.7/dist-packages/docker_registry/core/boto.py", line 220, in get_content if not key.exists(): "/usr/local/lib/python2.7/dist-packages/boto/s3/key.py", line 537, in exists return bool(self.bucket.lookup(self.name, headers=headers)) "/usr/local/lib/python2.7/dist-packages/boto/s3/bucket.py", line 142, in lookup return self.get_key(key_name, headers=headers) "/usr/local/lib/python2.7/dist-packages/boto/s3/bucket.py", line 192, in get_key key, resp = self._get_key_internal(key_name, headers, query_args_l) "/usr/local/lib/python2.7/dist-packages/boto/s3/bucket.py", line 199, in _get_key_internal query_args=query_args) "/usr/local/lib/python2.7/dist-packages/boto/s3/connection.py", line 664, in make_request retry_handler=retry_handler "/usr/local/lib/python2.7/dist-packages/boto/connection.py", line 1053, in make_request retry_handler=retry_handler) "/usr/local/lib/python2.7/dist-packages/boto/connection.py", line 911, in _mexe request.authorize(connection=self) "/usr/local/lib/python2.7/dist-packages/boto/connection.py", line 375, in authorize connection._auth_handler.add_auth(self, **kwargs) "/usr/local/lib/python2.7/dist-packages/boto/auth.py", line 675, in add_auth return super(S3HmacAuthV4Handler, self).add_auth(req, **kwargs) "/usr/local/lib/python2.7/dist-packages/boto/auth.py", line 511, in add_auth canonical_request = self.canonical_request(req) "/usr/local/lib/python2.7/dist-packages/boto/auth.py", line 402, in canonical_request cr.append(self.canonical_headers(headers_to_sign) + '\n') "/usr/local/lib/python2.7/dist-packages/boto/auth.py", line 363, in canonical_headers if '"' in raw_value:TypeError: argument of type 'int' is not iterable
Traceback (most recent call last): "C:\Users\Jaime\anaconda3\envs\PIT2020II\lib\site-packages\spyder\plugins\completion\kite\client.py", line 111, in get_status kite_status = status() "C:\Users\Jaime\anaconda3\envs\PIT2020II\lib\site-packages\spyder\plugins\completion\kite\utils\status.py", line 93, in status elif check_if_kite_running(): "C:\Users\Jaime\anaconda3\envs\PIT2020II\lib\site-packages\spyder\plugins\completion\kite\utils\status.py", line 42, in check_if_kite_running 'status']): "C:\Users\Jaime\anaconda3\envs\PIT2020II\lib\site-packages\psutil\__init__.py", line 1457, in process_iter yield add(pid) "C:\Users\Jaime\anaconda3\envs\PIT2020II\lib\site-packages\psutil\__init__.py", line 1432, in add proc = Process(pid) "C:\Users\Jaime\anaconda3\envs\PIT2020II\lib\site-packages\psutil\__init__.py", line 346, in __init__ self._init(pid) "C:\Users\Jaime\anaconda3\envs\PIT2020II\lib\site-packages\psutil\__init__.py", line 373, in _init self.create_time() "C:\Users\Jaime\anaconda3\envs\PIT2020II\lib\site-packages\psutil\__init__.py", line 723, in create_time self._create_time = self._proc.create_time() "C:\Users\Jaime\anaconda3\envs\PIT2020II\lib\site-packages\psutil\_pswindows.py", line 681, in wrapper raise convert_oserror(err, pid=self.pid, name=self._name) "C:\Users\Jaime\anaconda3\envs\PIT2020II\lib\site-packages\psutil\_pswindows.py", line 671, in convert_oserror raise exc "C:\Users\Jaime\anaconda3\envs\PIT2020II\lib\site-packages\psutil\_pswindows.py", line 679, in wrapper return fun(self, *args, **kwargs) "C:\Users\Jaime\anaconda3\envs\PIT2020II\lib\site-packages\psutil\_pswindows.py", line 933, in create_time user, system, created = cext.proc_times(self.pid)OSError: [WinError 0] La operación se completó correctamente: '(originated from OpenProcess)'
0.254117
"exception": "Traceback (most recent call last):\n \"/usr/local/lib/python3.7/site-packages/scenario_player/tasks/base.py\", line 74, in __call__\n return self._run(*args, **kwargs)\n \"/usr/local/lib/python3.7/site-packages/scenario_player/tasks/api_base.py\", line 59, in _run\n f'HTTP status code \"{resp.status_code}\" while fetching {url}. '\nscenario_player.exceptions.legacy.RESTAPIStatusMismatchError: HTTP status code \"409\" while fetching http://127.0.0.1:37689/api/v1/payments/0x62083c80353Df771426D209eF578619EE68D5C7A/0x41bee2A55BE225C626480F6F3DB020fFc84B492f. Expected 200: {\"errors\": \"Payment couldn't be completed because: there is no route available\"}" }
Traceback (most recent call last): "/srv/conda/envs/notebook/lib/python3.8/site-packages/notebook/notebookapp.py", line 1942, in init_server_extensions mod = importlib.import_module(modulename) "/srv/conda/envs/notebook/lib/python3.8/importlib/__init__.py", line 127, in import_module return _bootstrap._gcd_import(name[level:], package, level) "<frozen importlib._bootstrap>", line 1014, in _gcd_import "<frozen importlib._bootstrap>", line 991, in _find_and_load "<frozen importlib._bootstrap>", line 973, in _find_and_load_unlocked ModuleNotFoundError: No module named 'jupyter_offlinenotebook'
0.258978
Traceback (most recent call last): "/home/vagrant/.virtualenvs/proj/src/django/django/db/backends/utils.py", line 64, in execute return self.cursor.execute(sql, params)psycopg2.ProgrammingError: can't adapt type 'JSONDict'
Traceback (most recent call last): "/builds/databiosphere/toil/src/toil/test/batchSystems/batchSystemTest.py", line 556, in test self.assertEqual(maxConcurrentTasks, expectedMaxConcurrentTasks) "/usr/lib/python3.7/unittest/case.py", line 852, in assertEqual assertion_func(first, second, msg=msg) "/usr/lib/python3.7/unittest/case.py", line 845, in _baseAssertEqual raise self.failureException(msg)AssertionError: 19 != 20
0.216145
Traceback (most recent call last): "/usr/local/bin/youtube-dl/youtube_dl/YoutubeDL.py", line 1942, in post_process files_to_delete, info = pp.run(info) "/usr/local/bin/youtube-dl/youtube_dl/postprocessor/ffmpeg.py", line 241, in run raise PostProcessingError('WARNING: unable to obtain file audio codec with ffprobe') PostProcessingError: WARNING: unable to obtain file audio codec with ffprobe
Traceback (most recent call last): "/home/software/pycharm/helpers/pydev/pydevd.py", line 1580, in <module> globals = debugger.run(setup['file'], None, None, is_module) "/home/software/pycharm/helpers/pydev/pydevd.py", line 964, in run pydev_imports.execfile(file, globals, locals) # execute the script "/home/RNN/crnn.pytorch-master_demo/demo_caffe.py", line 50, in <module> plot_graph(output_var, os.path.join(caffemodel_dir, 'pytorch_graph.dot')) "/home/RNN/crnn.pytorch-master_demo/pytorch2caffe.py", line 380, in plot_graph (graph,) = pydot.graph_from_dot_file(fname) TypeError: 'Dot' object is not iterable
0.316088
Traceback (most recent call last): "/usr/src/homeassistant/homeassistant/util/json.py", line 31, in load_json with open(filename, encoding="utf-8") as fdesc: NotADirectoryError: [Errno 20] Not a directory: '/config/.storage/hacs/120370503.hacs'
Traceback (most recent call last): "/usr/local/lib/python3.6/site-packages/sanic/server.py", line 339, in write_response self.keep_alive_timeout)) "uvloop/handles/stream.pyx", line 636, in uvloop.loop.UVStream.write "uvloop/handles/handle.pyx", line 165, in uvloop.loop.UVHandle._ensure_alive RuntimeError: unable to perform operation on <UnixTransport closed=True reading=False 0x7f00061252a8>; the handler is closed
0.283656
WARNING: Automatic signature extraction failed: Traceback (most recent call last): "/usr/local/bin/youtube-dl/youtube_dl/extractor/youtube.py", line 879, in _decrypt_signature return func(s) "/usr/local/bin/youtube-dl/youtube_dl/extractor/youtube.py", line 447, in <lambda> return lambda s: initial_function([s]) "/usr/local/bin/youtube-dl/youtube_dl/jsinterp.py", line 113, in resf res = self.interpret_statement(stmt, local_vars) "/usr/local/bin/youtube-dl/youtube_dl/jsinterp.py", line 43, in interpret_statement 'Cannot determine left side of statement in %r' % stmt)ExtractorError: Cannot determine left side of statement in u'xh(this.j,a)'; please report this issue on https://yt-dl.org/bug . Be sure to call youtube-dl with the --verbose flag and include its complete output. Make sure you are using the latest version; type youtube-dl -U to update.
Traceback (most recent call last): "test.py", line 87, in <module> df['chuncked'] = df.apply (lambda row: nltk.ne_chunk(row["posTag"], binary=True),axis=1) "/usr/local/lib/python3.5/dist-packages/pandas/core/frame.py", line 4152, in apply return self._apply_standard(f, axis, reduce=reduce) "/usr/local/lib/python3.5/dist-packages/pandas/core/frame.py", line 4265, in _apply_standard result = self._constructor(data=results, index=index) "/usr/local/lib/python3.5/dist-packages/pandas/core/frame.py", line 266, in __init__ mgr = self._init_dict(data, index, columns, dtype=dtype) "/usr/local/lib/python3.5/dist-packages/pandas/core/frame.py", line 402, in _init_dict return _arrays_to_mgr(arrays, data_names, index, columns, dtype=dtype) "/usr/local/lib/python3.5/dist-packages/pandas/core/frame.py", line 5408, in _arrays_to_mgr return create_block_manager_from_arrays(arrays, arr_names, axes) "/usr/local/lib/python3.5/dist-packages/pandas/core/internals.py", line 4267, in create_block_manager_from_arrays construction_error(len(arrays), arrays[0].shape, axes, e) "/usr/local/lib/python3.5/dist-packages/pandas/core/internals.py", line 4229, in construction_error raise e "/usr/local/lib/python3.5/dist-packages/pandas/core/internals.py", line 4262, in create_block_manager_from_arrays blocks = form_blocks(arrays, names, axes) "/usr/local/lib/python3.5/dist-packages/pandas/core/internals.py", line 4359, in form_blocks object_blocks = _simple_blockify(object_items, np.object_) "/usr/local/lib/python3.5/dist-packages/pandas/core/internals.py", line 4389, in _simple_blockify values, placement = _stack_arrays(tuples, dtype) "/usr/local/lib/python3.5/dist-packages/pandas/core/internals.py", line 4453, in _stack_arrays stacked[i] = _asarray_compat(arr) ValueError: could not broadcast input array from shape (28) into shape (15)
0.275364
Traceback (most recent call last): "/usr/local/bin/telepresence/telepresence/cli.py", line 136, in crash_reporting yield "/usr/local/bin/telepresence/telepresence/main.py", line 77, in main runner, remote_info, env, socks_port, ssh, mount_dir, pod_info "/usr/local/bin/telepresence/telepresence/outbound/setup.py", line 95, in launch mount_dir, pod_info "/usr/local/bin/telepresence/telepresence/outbound/container.py", line 155, in run_docker_command raise RuntimeError("SSH to the network container failed to start.") RuntimeError: SSH to the network container failed to start.
Traceback (most recent call last): "./manage.py", line 31, in <module> execute_from_command_line(sys.argv) "/usr/local/lib/python2.7/dist-packages/django/core/management/__init__.py", line 367, in execute_from_command_line utility.execute() "/usr/local/lib/python2.7/dist-packages/django/core/management/__init__.py", line 341, in execute django.setup() "/usr/local/lib/python2.7/dist-packages/django/__init__.py", line 27, in setup apps.populate(settings.INSTALLED_APPS) "/usr/local/lib/python2.7/dist-packages/django/apps/registry.py", line 108, in populate app_config.import_models(all_models) "/usr/local/lib/python2.7/dist-packages/django/apps/config.py", line 199, in import_models self.models_module = import_module(models_module_name) "/usr/lib/python2.7/importlib/__init__.py", line 37, in import_module __import__(name) "/usr/lib/python2.7/dist-packages/social/apps/django_app/default/models.py", line 14, in <module> from social.apps.django_app.default.fields import JSONField "/usr/lib/python2.7/dist-packages/social/apps/django_app/default/fields.py", line 14, in <module> class JSONField(six.with_metaclass(models.SubfieldBase, models.TextField)):AttributeError: 'module' object has no attribute 'SubfieldBase'
0.231211
Traceback (most recent call last): "/usr/lib/python2.7/runpy.py", line 162, in _run_module_as_main "__main__", fname, loader, pkg_name) "/usr/lib/python2.7/runpy.py", line 72, in _run_code exec code in run_globals "/usr/local/lib/python2.7/dist-packages/tensorflow/models/image/mnist/convolutional.py", line 326, in <module> tf.app.run() "/usr/local/lib/python2.7/dist-packages/tensorflow/python/platform/app.py", line 30, in run sys.exit(main(sys.argv)) "/usr/local/lib/python2.7/dist-packages/tensorflow/models/image/mnist/convolutional.py", line 138, in main train_data = extract_data(train_data_filename, 60000) "/usr/local/lib/python2.7/dist-packages/tensorflow/models/image/mnist/convolutional.py", line 85, in extract_data buf = bytestream.read(IMAGE_SIZE * IMAGE_SIZE * num_images * NUM_CHANNELS) "/usr/lib/python2.7/gzip.py", line 261, in read self._read(readsize) "/usr/lib/python2.7/gzip.py", line 308, in _read self._read_eof() "/usr/lib/python2.7/gzip.py", line 347, in _read_eof hex(self.crc)))IOError: CRC check failed 0x56d16c09 != 0x39bbe345L
Traceback (most recent call last): "/root/w3af/w3af/core/ui/gui/main.py", line 547, in _scan_director func() "/root/w3af/w3af/core/ui/gui/main.py", line 646, in _scan_start self.set_tabs(True) "/root/w3af/w3af/core/ui/gui/main.py", line 793, in set_tabs self._set_tab(sensit, _("Results"), scanrun.ScanRunBody) "/root/w3af/w3af/core/ui/gui/main.py", line 799, in _set_tab newone = realWidget(self.w3af) "/root/w3af/w3af/core/ui/gui/scanrun.py", line 675, in __init__ kbbrowser = KBBrowser(w3af) "/root/w3af/w3af/core/ui/gui/scanrun.py", line 246, in __init__ description = self.get_notebook_description() "/root/w3af/w3af/core/ui/gui/scanrun.py", line 262, in get_notebook_description self.description = webkit.WebView()AttributeError: 'module' object has no attribute 'WebView'
0.314073
Traceback (most recent call last): "/opt/openstates/venv-pupa//bin/pupa", line 11, in <module> load_entry_point('pupa', 'console_scripts', 'pupa')() "/opt/openstates/venv-pupa/src/pupa/pupa/cli/__main__.py", line 68, in main subcommands[args.subcommand].handle(args, other) "/opt/openstates/venv-pupa/src/pupa/pupa/cli/commands/update.py", line 260, in handle return self.do_handle(args, other, juris) "/opt/openstates/venv-pupa/src/pupa/pupa/cli/commands/update.py", line 307, in do_handle report['import'] = self.do_import(juris, args) "/opt/openstates/venv-pupa/src/pupa/pupa/cli/commands/update.py", line 217, in do_import report.update(vote_event_importer.import_directory(datadir)) "/opt/openstates/venv-pupa/src/pupa/pupa/importers/base.py", line 196, in import_directory return self.import_data(json_stream()) "/opt/openstates/venv-pupa/src/pupa/pupa/importers/base.py", line 233, in import_data obj_id, what = self.import_item(data) "/opt/openstates/venv-pupa/src/pupa/pupa/importers/base.py", line 272, in import_item raise DuplicateItemError(data, obj, related.get('sources', []))pupa.exceptions.DuplicateItemError: attempt to import data that would conflict with data already in the import: {'identifier': '', 'motion_text': 'Floor Vote', 'motion_classification': ['passage'], 'start_date': '2017-05-09T00:00:00+00:00', 'result': 'pass', 'extras': {}, 'legislative_session_id': UUID('9bb6e478-b1b5-4b61-b69d-918cc424068d'), 'organization_id': 'ocd-organization/26bb6306-85f0-4d10-bff7-d1cd5bdc0865', 'bill_id': 'ocd-bill/b7b34ebd-3c3b-433b-8d18-f46a815f3f59'} (already imported as Floor Vote on A 5025 in New York 2017 Regular Session)
Traceback (most recent call last): "/tmp/pip-install-c74kecsx/horovod/setup.py", line 88, in check_mx_version import mxnet as mx ModuleNotFoundError: No module named 'mxnet'
0.287654
Traceback (most recent call last): "/cbio/cslab/home/pastore/nobackup/bcbio/anaconda/lib/python2.7/site-packages/bcbio/provenance/do.py", line 21, in run _do_run(cmd, checks, log_stdout) "/cbio/cslab/home/pastore/nobackup/bcbio/anaconda/lib/python2.7/site-packages/bcbio/provenance/do.py", line 95, in _do_run raise subprocess.CalledProcessError(exitcode, error_msg)CalledProcessError: Command 'set -o pipefail; export VAR_DICT_OPTS='-Xms750m -Xmx3000m -XX:+UseSerialGC -Djava.io.tmpdir=/cbio/cslab/nobackup/pastore/install/bcbio-vm/data/project/PCNSL/PCNSL/work/vardict/chr4/tx/tmpVqMIsZ' && vardict-java -G /cbio/cslab/nobackup/pastore/bcbio/genomes/Hsapiens/hg19/seq/hg19.fa -f 0.1 -N s_F15_14_p10 -b "/cbio/cslab/nobackup/pastore/install/bcbio-vm/data/project/PCNSL/PCNSL/work/bamprep/s_F15_14_p10/chr4/1_2015-09-29_PCNSL-sort.nsorted.disambiguatedSpeciesA.sorted-chr4_99950017_115749091-prep.bam|/cbio/cslab/nobackup/pastore/install/bcbio-vm/data/project/PCNSL/PCNSL/work/bamprep/s_F15_Blood/chr4/4_2015-09-29_PCNSL-sort.nsorted.disambiguatedSpeciesA.sorted-chr4_99950017_115749091-prep.bam" -c 1 -S 2 -E 3 -g 4 /cbio/cslab/nobackup/pastore/install/bcbio-vm/data/project/PCNSL/PCNSL/work/vardict/chr4/F15_14_p10-chr4_99950017_115749091-raw-regions-regionlimit.bed | testsomatic.R | var2vcf_paired.pl -N "s_F15_14_p10|s_F15_Blood" -f 0.1 | bcftools filter -m '+' -s 'REJECT' -e 'STATUS !~ "._Somatic"' 2> /dev/null | sed 's/\._Somatic\/Somatic/' | sed 's/REJECT,Description="._">/REJECT,Description="Not Somatic via VarDict">/' | /cbio/cslab/home/pastore/nobackup/bcbio/anaconda/bin/py -x 'bcbio.variation.freebayes.call_somatic(x)' | awk -F$'\t' -v OFS='\t' '{if ($0 !~ /^#/) gsub(/[KMRYSWBVHDX]/, "N", $4) } {print}' | awk -F$'\t' -v OFS='\t' '$1!~/^#/ && $4 == $5 {next} {print}' | /cbio/cslab/home/pastore/nobackup/bcbio/bin/vcfstreamsort | bgzip -c > /cbio/cslab/nobackup/pastore/install/bcbio-vm/data/project/PCNSL/PCNSL/work/vardict/chr4/tx/tmpVqMIsZ/F15_14_p10-chr4_99950017_115749091-raw.vcf.gz
Traceback (most recent call last): "/share/CACHEDEV1_DATA/.qpkg/QSabNZBdPlus/SABnzbd/cherrypy/wsgiserver/__init__.py", line 2024, in start self.tick() "/share/CACHEDEV1_DATA/.qpkg/QSabNZBdPlus/SABnzbd/cherrypy/wsgiserver/__init__.py", line 2091, in tick s, ssl_env = self.ssl_adapter.wrap(s) "/share/CACHEDEV1_DATA/.qpkg/QSabNZBdPlus/SABnzbd/cherrypy/wsgiserver/ssl_builtin.py", line 67, in wrap server_side=True) "/share/CACHEDEV1_DATA/.qpkg/QSabNZBdPlus/lib/python2.7/ssl.py", line 363, in wrap_socket _context=self) "/share/CACHEDEV1_DATA/.qpkg/QSabNZBdPlus/lib/python2.7/ssl.py", line 611, in __init__ self.do_handshake() "/share/CACHEDEV1_DATA/.qpkg/QSabNZBdPlus/lib/python2.7/ssl.py", line 840, in do_handshake self._sslobj.do_handshake() SSLError: [SSL: INAPPROPRIATE_FALLBACK] inappropriate fallback (_ssl.c:661)
0.249908
Traceback (most recent call last): "/usr/lib/python2.6/dist-packages/wx-2.8-gtk2-unicode/wx/_misc.py", line 1342, in Notify self.notify() "/usr/lib/python2.6/dist-packages/wx-2.8-gtk2-unicode/wx/_core.py", line 14720, in Notify wx.CallAfter(self.Stop) "/usr/lib/python2.6/dist-packages/wx-2.8-gtk2-unicode/wx/_core.py", line 14609, in CallAfter assert app is not None, 'No wx.App created yet'AssertionError: No wx.App created yet
invalid output was: Traceback (most recent call last): "/home/elasticdog/.ansible/tmp/ansible-tmp-1406564373.52-277382391891465/ec2_elb_lb", line 2103, in <module> main() "/home/elasticdog/.ansible/tmp/ansible-tmp-1406564373.52-277382391891465/ec2_elb_lb", line 596, in main elb_man.ensure_ok() "/home/elasticdog/.ansible/tmp/ansible-tmp-1406564373.52-277382391891465/ec2_elb_lb", line 261, in ensure_ok self._set_subnets() "/home/elasticdog/.ansible/tmp/ansible-tmp-1406564373.52-277382391891465/ec2_elb_lb", line 479, in _set_subnets self._attach_subnets(subnets_to_attach) "/home/elasticdog/.ansible/tmp/ansible-tmp-1406564373.52-277382391891465/ec2_elb_lb", line 461, in _attach_subnets self.elb_conn.attach_lb_to_subnets(self.name, subnets) "/home/elasticdog/.virtualenvs/ansible/lib/python2.7/site-packages/boto/ec2/elb/__init__.py", line 711, in attach_lb_to_subnets params, None) "/home/elasticdog/.virtualenvs/ansible/lib/python2.7/site-packages/boto/connection.py", line 1158, in get_list raise self.ResponseError(response.status, response.reason, body)boto.exception.BotoServerError: BotoServerError: 409 Conflict
0.201005
Traceback (most recent call last): "<ipython-input-9-8b0ba0929a83>", line 1, in <module> unpacker.read_array_header() "_unpacker.pyx", line 388, in msgpack._unpacker.Unpacker.read_array_header (msgpack/_unpacker.cpp:388) "_unpacker.pyx", line 331, in msgpack._unpacker.Unpacker._unpack (msgpack/_unpacker.cpp:331)ValueError: Unexpected type header on stream
Traceback (most recent call last): "/usr/lib/python3/dist-packages/picamera/camera.py", line 345, in __init__ }[(GPIO.RPI_REVISION, camera_num)] KeyError: (3, 1)
0.144743
Traceback (most recent call last): "<stdin>", line 1, in <module> "psutil/__init__.py", line 274, in suspend os.kill(self.pid, signal.SIGSTOP)OSError: [Errno 1] Operation not permitted
Traceback (most recent call last): "<string>", line 1, in <module> "/home/xxx/anaconda3/envs/allennlp1.0/lib/python3.7/multiprocessing/spawn.py", line 105, in spawn_main exitcode = _main(fd) "/home/xxx/anaconda3/envs/allennlp1.0/lib/python3.7/multiprocessing/spawn.py", line 115, in _main self = reduction.pickle.load(from_parent) ModuleNotFoundError: No module named 'my_text_classifier'
0.209121
Traceback (most recent call last): "sqlmap.py", line 215, in main start() "lib/core/decorators.py", line 75, in _ result = f(*args, **kwargs) "lib/controller/controller.py", line 717, in start action() "lib/controller/action.py", line 105, in action conf.dumper.dbs(conf.dbmsHandler.getDbs()) "plugins/generic/databases.py", line 172, in getDbs db = unArrayizeValue(inject.getValue(query, union=False, error=False)) "lib/core/decorators.py", line 91, in _ result = f(*args, **kwargs) "lib/core/decorators.py", line 75, in _ result = f(*args, **kwargs) "lib/request/inject.py", line 484, in getValue value = _goInferenceProxy(query, fromUser, batch, unpack, charsetType, firstChar, lastChar, dump) "lib/request/inject.py", line 307, in _goInferenceProxy outputs = _goInferenceFields(expression, expressionFields, expressionFieldsList, payload, charsetType=charsetType, firstChar=firstChar, lastChar=lastChar, dump=dump) "lib/request/inject.py", line 153, in _goInferenceFields output = _goInference(payload, expressionReplaced, charsetType, firstChar, lastChar, dump, field) "lib/request/inject.py", line 125, in _goInference count, value = bisection(payload, expression, length, charsetType, firstChar, lastChar, dump) "lib/techniques/blind/inference.py", line 646, in bisection val = getChar(index, asciiTbl, not(charsetType is None and conf.charset)) "lib/techniques/blind/inference.py", line 362, in getChar result = Request.queryPage(forgedPayload, timeBasedCompare=timeBasedCompare, raise404=False) "lib/core/decorators.py", line 75, in _ result = f(*args, **kwargs) "lib/request/connect.py", line 1373, in queryPage page, headers, code = Connect.getPage(url=uri, get=get, post=post, method=method, cookie=cookie, ua=ua, referer=referer, host=host, silent=silent, auxHeaders=auxHeaders, response=response, raise404=raise404, ignoreTimeout=timeBasedCompare) "lib/request/connect.py", line 798, in getPage warnMsg += "( thread.retrycount " + threadData.retriesCount + " | conf.retries = " + conf.retries + " )"TypeError: cannot concatenate 'str' and 'int' objects
Traceback (most recent call last): "D:\Anaconda3\lib\site-packages\tensorflow\python\client\session.py", line 1022, in _do_call return fn(*args) "D:\Anaconda3\lib\site-packages\tensorflow\python\client\session.py", line 1004, in _run_fn status, run_metadata) "D:\Anaconda3\lib\contextlib.py", line 66, in __exit__ next(self.gen) "D:\Anaconda3\lib\site-packages\tensorflow\python\framework\errors_impl.py", line 469, in raise_exception_on_not_ok_status pywrap_tensorflow.TF_GetCode(status)) tensorflow.python.framework.errors_impl.InvalidArgumentError: NodeDef mentions attr 'data_format' not in Op<name=DepthwiseConv2dNative; signature=input:T, filter:T -> output:T; attr=T:type,allowed=[DT_FLOAT, DT_DOUBLE]; attr=strides:list(int); attr=padding:string,allowed=["SAME", "VALID"]>; NodeDef: FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_1_depthwise/depthwise = DepthwiseConv2dNative[T=DT_FLOAT, data_format="NHWC", padding="SAME", strides=[1, 1, 1, 1], _device="/job:localhost/replica:0/task:0/gpu:0"](FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_0/Relu6, FeatureExtractor/MobilenetV1/Conv2d_1_depthwise/depthwise_weights/read)
0.252769
shellresult: Traceback (most recent call last): "/Applications/DeDRM.app/Contents/Resources/ineptpdf.py", line 2349, in <module> sys.exit(cli_main()) "/Applications/DeDRM.app/Contents/Resources/ineptpdf.py", line 2218, in cli_main result = decryptBook(userkey, inpath, outpath) "/Applications/DeDRM.app/Contents/Resources/ineptpdf.py", line 2193, in decryptBook serializer = PDFSerializer(inf, userkey) "/Applications/DeDRM.app/Contents/Resources/ineptpdf.py", line 1999, in __init__ doc.initialize(userkey) "/Applications/DeDRM.app/Contents/Resources/ineptpdf.py", line 1484, in initialize return self.initialize_ebx(password, docid, param) "/Applications/DeDRM.app/Contents/Resources/ineptpdf.py", line 1614, in initialize_ebx bookkey = rsa.decrypt(bookkey) "/Applications/DeDRM.app/Contents/Resources/ineptpdf.py", line 216, in decrypt raise ADEPTError('RSA decryption failed') __main__.ADEPTError: RSA decryption failed
Traceback (most recent call last): "manage.py", line 28, in <module> execute_from_command_line(sys.argv) "/usr/local/lib/python2.7/site-packages/django/core/management/**init**.py", line 399, in execute_from_command_line utility.execute() "/usr/local/lib/python2.7/site-packages/django/core/management/**init**.py", line 392, in execute self.fetch_command(subcommand).run_from_argv(self.argv) "/usr/local/lib/python2.7/site-packages/django/core/management/base.py", line 242, in run_from_argv self.execute(_args, *_options.**dict**) "/usr/local/lib/python2.7/site-packages/django/core/management/base.py", line 285, in execute output = self.handle(_args, *_options) "/usr/local/lib/python2.7/site-packages/South-0.8.4-py2.7.egg/south/management/commands/migrate.py", line 111, in handle ignore_ghosts = ignore_ghosts, "/usr/local/lib/python2.7/site-packages/South-0.8.4-py2.7.egg/south/migration/**init**.py", line 220, in migrate_app success = migrator.migrate_many(target, workplan, database) "/usr/local/lib/python2.7/site-packages/South-0.8.4-py2.7.egg/south/migration/migrators.py", line 254, in migrate_many result = migrator.**class**.migrate_many(migrator, target, migrations, database) "/usr/local/lib/python2.7/site-packages/South-0.8.4-py2.7.egg/south/migration/migrators.py", line 329, in migrate_many result = self.migrate(migration, database) "/usr/local/lib/python2.7/site-packages/South-0.8.4-py2.7.egg/south/migration/migrators.py", line 133, in migrate result = self.run(migration, database) "/usr/local/lib/python2.7/site-packages/South-0.8.4-py2.7.egg/south/migration/migrators.py", line 114, in run return self.run_migration(migration, database) "/usr/local/lib/python2.7/site-packages/South-0.8.4-py2.7.egg/south/migration/migrators.py", line 84, in run_migration migration_function() "/usr/local/lib/python2.7/site-packages/South-0.8.4-py2.7.egg/south/migration/migrators.py", line 60, in <lambda> return (lambda: direction(orm)) "/usr/local/lib/python2.7/site-packages/mezzanine/conf/migrations/0001_initial.py", line 15, in forwards ('name', self.gf('django.db.models.fields.CharField')(max_length=50)), "/usr/local/lib/python2.7/site-packages/South-0.8.4-py2.7.egg/south/db/generic.py", line 47, in _cache_clear return func(self, table, _args, *_opts) "/usr/local/lib/python2.7/site-packages/South-0.8.4-py2.7.egg/south/db/generic.py", line 361, in create_table "columns": ', '.join([col for col in columns if col]), "/usr/local/lib/python2.7/site-packages/South-0.8.4-py2.7.egg/south/db/generic.py", line 282, in execute cursor.execute(sql, params) "/usr/local/lib/python2.7/site-packages/django/db/backends/util.py", line 69, in execute return super(CursorDebugWrapper, self).execute(sql, params) "/usr/local/lib/python2.7/site-packages/django/db/backends/util.py", line 53, in execute return self.cursor.execute(sql, params) "/usr/local/lib/python2.7/site-packages/django/db/utils.py", line 99, in __exit__ six.reraise(dj_exc_type, dj_exc_value, traceback) "/usr/local/lib/python2.7/site-packages/django/db/backends/util.py", line 53, in execute return self.cursor.execute(sql, params)django.db.utils.ProgrammingError: relation "conf_setting" already exists
0.292795
Traceback (most recent call last): "/usr/local/bin/youtube-dl/youtube_dl/YoutubeDL.py", line 1925, in process_info success = dl(filename, info_dict) "/usr/local/bin/youtube-dl/youtube_dl/YoutubeDL.py", line 1864, in dl return fd.download(name, info) "/usr/local/bin/youtube-dl/youtube_dl/downloader/common.py", line 366, in download return self.real_download(filename, info_dict) "/usr/local/bin/youtube-dl/youtube_dl/downloader/dash.py", line 48, in real_download success, frag_content = self._download_fragment(ctx, fragment_url, info_dict) "/usr/local/bin/youtube-dl/youtube_dl/downloader/fragment.py", line 100, in _download_fragment success = ctx['dl'].download(fragment_filename, { "/usr/local/bin/youtube-dl/youtube_dl/downloader/common.py", line 366, in download return self.real_download(filename, info_dict) "/usr/local/bin/youtube-dl/youtube_dl/downloader/http.py", line 341, in real_download establish_connection() "/usr/local/bin/youtube-dl/youtube_dl/downloader/http.py", line 109, in establish_connection ctx.data = self.ydl.urlopen(request) "/usr/local/bin/youtube-dl/youtube_dl/YoutubeDL.py", line 2237, in urlopen return self._opener.open(req, timeout=self._socket_timeout) "/usr/lib/python3.8/urllib/request.py", line 525, in open response = self._open(req, data) "/usr/lib/python3.8/urllib/request.py", line 542, in _open result = self._call_chain(self.handle_open, protocol, protocol + "/usr/lib/python3.8/urllib/request.py", line 502, in _call_chain result = func(*args) "/usr/local/bin/youtube-dl/youtube_dl/utils.py", line 2726, in https_open return self.do_open(functools.partial( "/usr/lib/python3.8/urllib/request.py", line 1322, in do_open raise URLError(err) urllib.error.URLError: <urlopen error [Errno 0] Error>
Traceback (most recent call last): "/www/lemur/lemur/certificates/service.py", line 273, in create cert_body, private_key, cert_chain, external_id, csr = mint(**kwargs) "/www/lemur/lemur/certificates/service.py", line 223, in mint cert_body, cert_chain, external_id = issuer.create_certificate(csr, kwargs) "/www/lemur/lemur/plugins/lemur_cfssl/plugin.py", line 58, in create_certificate key = bytes.fromhex(hex_key) TypeError: fromhex() argument must be str, not None
0.259116
Traceback (most recent call last): "/usr/local/lib/python3.5/dist-packages/tensorflow/python/client/session.py", line 1322, in _do_call return fn(*args) "/usr/local/lib/python3.5/dist-packages/tensorflow/python/client/session.py", line 1307, in _run_fn options, feed_dict, fetch_list, target_list, run_metadata) "/usr/local/lib/python3.5/dist-packages/tensorflow/python/client/session.py", line 1409, in _call_tf_sessionrun run_metadata) tensorflow.python.framework.errors_impl.InvalidArgumentError: Nan in summary histogram for: pred
[2020-07-20 18:08:54,911][DummyThread-262:139872069604936][task_id:default][pgsql.py:160][WARNING]- PgSQL命令执行报错,语句:SELECT datname FROM pg_database;, 错误信息:Traceback (most recent call last): "/opt/archery/sql/engines/pgsql.py", line 144, in _query conn = self.get_connection(db_name=db_name) "/opt/archery/sql/engines/pgsql.py", line 30, in get_connection password=self.password, dbname=db_name) "/opt/venv4archery/lib/python3.6/site-packages/psycopg2/__init__.py", line 126, in connect conn = _connect(dsn, connection_factory=connection_factory, **kwasync) psycopg2.OperationalError: FATAL: database "db_sys" does not exist
0.308962
Traceback (most recent call last): "/home/django/.virtualenvs/blog/lib/python2.7/site-packages/south/migration/migrators.py", line 175, in _run_migration migration_function() "/home/django/.virtualenvs/blog/lib/python2.7/site-packages/south/migration/migrators.py", line 57, in <lambda> return (lambda: direction(orm)) "/home/django/blog/django-blog-zinnia/zinnia/migrations/0018_initial.py", line 59, in forwards ('author', models.ForeignKey(orm['zinnia.author'], null=False)) "/home/django/.virtualenvs/blog/lib/python2.7/site-packages/south/orm.py", line 169, in __getitem__ raise KeyError("The model '%s' from the app '%s' is not available in this migration." % (model, app))KeyError: "The model 'author' from the app 'zinnia' is not available in this migration."`
Traceback (most recent call last): "./hangupsbot/hangupsbot.py", line 164, in run loop.run_until_complete(self._client.connect()) "/usr/lib/python3.4/asyncio/base_events.py", line 276, in run_until_complete return future.result() "/usr/lib/python3.4/asyncio/futures.py", line 277, in result raise self._exception "/usr/lib/python3.4/asyncio/tasks.py", line 237, in _step result = next(coro) "/usr/local/lib/python3.4/dist-packages/hangups/client.py", line 109, in connect initial_data = yield from self._initialize_chat() "/usr/local/lib/python3.4/dist-packages/hangups/client.py", line 189, in _initialize_chat .format(e))hangups.exceptions.HangupsError: Failed to fetch PVT token: Request connection error: Cannot connect to host talkgadget.google.com:443 ssl:True
0.22736
Traceback (most recent call last): "subject_extract.py", line 47, in <module> D = D[D[2] != u'其他'] "/home/work/.local/lib/python2.7/site-packages/pandas/core/ops.py", line 1728, in wrapper (is_extension_array_dtype(other) and not is_scalar(other))): "/home/work/.local/lib/python2.7/site-packages/pandas/core/dtypes/common.py", line 1749, in is_extension_array_dtype registry.find(dtype) is not None) "/home/work/.local/lib/python2.7/site-packages/pandas/core/dtypes/dtypes.py", line 89, in find return dtype_type.construct_from_string(dtype) "/home/work/.local/lib/python2.7/site-packages/pandas/core/dtypes/dtypes.py", line 699, in construct_from_string raise TypeError(msg.format(string)) UnicodeEncodeError: 'ascii' codec can't encode characters in position 0-1: ordinal not in range(128)
DEBUG 2014-03-21 17:36:25,501 CheckThread-http://dis.dsv.su.se/Login/ Traceback (most recent call last): "/usr/local/lib/python2.7/dist-packages/LinkChecker-9.1-py2.7-linux-i686.egg/linkcheck/checker/urlbase.py", line 439, in local_check self.check_connection() "/usr/local/lib/python2.7/dist-packages/LinkChecker-9.1-py2.7-linux-i686.egg/linkcheck/checker/httpurl.py", line 133, in check_connection self.follow_redirections(request) "/usr/local/lib/python2.7/dist-packages/LinkChecker-9.1-py2.7-linux-i686.egg/linkcheck/checker/httpurl.py", line 218, in follow_redirections for response in self.session.resolve_redirects(self.url_connection, request, **kwargs): "/usr/local/lib/python2.7/dist-packages/requests/sessions.py", line 168, in resolve_redirects allow_redirects=False, "/usr/local/lib/python2.7/dist-packages/requests/sessions.py", line 486, in send r = adapter.send(request, **kwargs) "/usr/local/lib/python2.7/dist-packages/requests/adapters.py", line 385, in send raise SSLError(e)SSLError: [Errno 1] _ssl.c:504: error:14090086:SSL routines:SSL3_GET_SERVER_CERTIFICATE:certificate verify failed
0.303752
Traceback (most recent call last): "/home/michele/programming/Tools/web2py/gluon/restricted.py", line 219, in restricted exec(ccode, environment) "/home/michele/programming/Tools/web2py/applications/welcome/controllers/default.py", li "/home/michele/programming/Tools/web2py/gluon/globals.py", line 422, in <lambda> self._caller = lambda f: f() "/home/michele/programming/Tools/web2py/applications/welcome/controllers/default.py", li return dict(form=auth()) "/home/michele/programming/Tools/web2py/gluon/tools.py", line 1801, in __call__ return getattr(self, args[0])() "/home/michele/programming/Tools/web2py/gluon/tools.py", line 3006, in register self.login_user(user) "/home/michele/programming/Tools/web2py/gluon/authapi.py", line 735, in login_user user = self._update_session_user(user) "/home/michele/programming/Tools/web2py/gluon/authapi.py", line 720, in _update_session_ user = Row(user) "/home/michele/programming/Tools/web2py/gluon/packages/dal/pydal/helpers/classes.py", li return self.__dict__.__init__(*args, **kwargs) TypeError: 'NoneType' object is not iterable
Traceback (most recent call last): "C:\Program s\Electrum\lib\paymentrequest.py", line 63, in get_payment_request response = requests.request('GET', url, headers=REQUEST_HEADERS) "Z:\home\voegtlin\electrum\contrib\build-wine\build\deterministic\out00-PYZ.pyz\requests.api", line 50, in request "Z:\home\voegtlin\electrum\contrib\build-wine\build\deterministic\out00-PYZ.pyz\requests.sessions", line 468, in request "Z:\home\voegtlin\electrum\contrib\build-wine\build\deterministic\out00-PYZ.pyz\requests.sessions", line 576, in send "Z:\home\voegtlin\electrum\contrib\build-wine\build\deterministic\out00-PYZ.pyz\requests.adapters", line 433, in sendSSLError: [Errno 1] _ssl.c:510: error:14090086:SSL routines:SSL3_GET_SERVER_CERTIFICATE:certificate verify failed
0.277542
Traceback (most recent call last): "/app/src/flanker/flanker/mime/message/headers/encoding.py", line 37, in encode return encode_unstructured(name, value) "/app/src/flanker/flanker/mime/message/headers/encoding.py", line 52, in encode_unstructured return encode_address_header(name, value) "/app/src/flanker/flanker/mime/message/headers/encoding.py", line 65, in encode_address_header out.append(addr.full_spec().encode('utf-8')) "/app/src/flanker/flanker/addresslib/address.py", line 536, in full_spec ace_hostname = self.hostname.encode('idna') "/usr/lib/python2.7/encodings/idna.py", line 164, in encode result.append(ToASCII(label)) "/usr/lib/python2.7/encodings/idna.py", line 76, in ToASCII label = nameprep(label) "/usr/lib/python2.7/encodings/idna.py", line 38, in nameprep raise UnicodeError("Invalid character %r" % c) UnicodeError: Invalid character u'\u200e'
Traceback (most recent call last): "/home/ubuntu/miniconda/lib/python3.6/site-packages/conda/exceptions.py", line 640, in conda_exception_handler class CondaVerificationError(CondaError): "/home/ubuntu/miniconda/lib/python3.6/site-packages/conda_env/cli/main_update.py", line 106, in execute # e.g. due to conda_env being upgraded or Python version switched. "/home/ubuntu/miniconda/lib/python3.6/site-packages/conda_env/installers/pip.py", line 40, in _pip_install_via_requirements args, pip_version = pip_args(prefix) ValueError: too many values to unpack (expected 2)
0.279291
Traceback (most recent call last): "<stdin>", line 1, in <module> "/home/canicula/anaconda2/lib/python2.7/site-packages/prettyplotlib/colors.py", line 35, in wrapper return func(*args, **kwargs) "/home/canicula/anaconda2/lib/python2.7/site-packages/prettyplotlib/_hist.py", line 18, in hist color_cycle = ax._get_lines.color_cycle AttributeError: '_process_plot_var_args' object has no attribute 'color_cycle'
Traceback (most recent call last): "cellprofiler\gui\pipelinecontroller.pyc", line 2777, in do_step "cellprofiler\modules\overlayoutlines.pyc", line 210, in run "cellprofiler\modules\overlayoutlines.pyc", line 306, in run_bw "cellprofiler\measurements.pyc", line 1578, in get_imageValueError: The None image is missing from the pipeline.
0.227315
Traceback (most recent call last): "/home/wxp/anaconda3/envs/atss/lib/python3.7/runpy.py", line 193, in _run_module_as_main "__main__", mod_spec) "/home/wxp/anaconda3/envs/atss/lib/python3.7/runpy.py", line 85, in _run_code exec(code, run_globals) "/home/wxp/anaconda3/envs/atss/lib/python3.7/site-packages/torch/distributed/launch.py", line 235, in <module> main() "/home/wxp/anaconda3/envs/atss/lib/python3.7/site-packages/torch/distributed/launch.py", line 231, in main cmd=process.args) subprocess.CalledProcessError: Command '['/home/wxp/anaconda3/envs/atss/bin/python', '-u', 'tools/train_net.py', '--local_rank=0', '--config-file', 'configs/atss/atss_R_50_FPN_1x.yaml', 'DATALOADER.NUM_WO
Traceback (most recent call last): "<stdin>", line 1, in <module> "/usr/lib/python3/dist-packages/matplotlib/pyplot.py", line 103, in <module> _backend_mod, new_figure_manager, draw_if_interactive, _show = pylab_setup() "/usr/lib/python3/dist-packages/matplotlib/backends/__init__.py", line 32, in pylab_setup globals(),locals(),[backend_name],0) "/usr/lib/python3/dist-packages/matplotlib/backends/backend_tkagg.py", line 13, in <module> import matplotlib.backends.tkagg as tkagg "/usr/lib/python3/dist-packages/matplotlib/backends/tkagg.py", line 7, in <module> from matplotlib.backends import _tkaggImportError: cannot import name '_tkagg'
0.323956
fatal: [localhost]: FAILED! => {"changed": true, "cmd": ["python3", "manage.py", "migrate"], "delta": "0:00:00.283813", "end": "2019-03-28 03:45:17.018815", "msg": "non-zero return code", "rc": 1, "start": "2019-03-28 03:45:16.735002", "stderr": "Traceback (most recent call last):\n \"manage.py\", line 8, in <module>\n from django.core.management import execute_from_command_line\n \"/usr/lib64/python3.4/site-packages/django/core/management/__init__.py\", line 11, in <module>\n from django.conf import settings\n \"/usr/lib64/python3.4/site-packages/django/conf/__init__.py\", line 18, in <module>\n from django.utils.functional import LazyObject, empty\n \"/usr/lib64/python3.4/site-packages/django/utils/functional.py\", line 12\n return _curried_func(*args, *moreargs, **{**kwargs, **morekwargs})\n ^\nSyntaxError: invalid syntax", "stderr_lines": ["Traceback (most recent call last):", " \"manage.py\", line 8, in <module>", " from django.core.management import execute_from_command_line", " \"/usr/lib64/python3.4/site-packages/django/core/management/__init__.py\", line 11, in <module>", " from django.conf import settings", " \"/usr/lib64/python3.4/site-packages/django/conf/__init__.py\", line 18, in <module>", " from django.utils.functional import LazyObject, empty", " \"/usr/lib64/python3.4/site-packages/django/utils/functional.py\", line 12", " return _curried_func(*args, *moreargs, **{**kwargs, **morekwargs})", " ^", "SyntaxError: invalid syntax"], "stdout": "", "stdout_lines": []} ```
Traceback (most recent call last): "/home/cad/phozone/dev/spyder_openfiles/widgets/openfilesgui.py", line 209, in timerUpdate data=self.get Data() "/home/cad/phozone/dev/spyder_openfiles/widgets/openfilesgui.py", line 118, in get Data cdsLib=self.getCdsLib() "/home/cad/phozone/dev/spyder_openfiles/widgets/openfilesgui.py", line 215, in getCdsLib return k.widget.cdsLibAttributeError: 'oaideWidget' object has no attribute 'cdsLib'
0.15687
Traceback (most recent call last): "e:\WPy-3710\python-3.7.1.amd64\lib\site.py", line 439, in register_readline readline.read_history_file(history) "e:\WPy-3710\python-3.7.1.amd64\lib\site-packages\pyreadline\rlmain.py", line 165, in read_history_file self.mode._history.read_history_file(filename) "e:\WPy-3710\python-3.7.1.amd64\lib\site-packages\pyreadline\lineeditor\history.py", line 82, in read_history_file for line in open(filename, 'r'): "e:\WPy-3710\python-3.7.1.amd64\lib\encodings\cp1252.py", line 23, in decode return codecs.charmap_decode(input,self.errors,decoding_table)[0] UnicodeDecodeError: 'charmap' codec can't decode byte 0x81 in position 1748: character maps to <undefined>
Traceback (most recent call last): "/home/datamind/anaconda3/envs/can_env/lib/python3.7/runpy.py", line 193, in _run_module_as_main "__main__", mod_spec) "/home/datamind/anaconda3/envs/can_env/lib/python3.7/runpy.py", line 85, in _run_code exec(code, run_globals) "/home/datamind/allennlp/allennlp/tests/training/gan_callback_trainer_test.py", line 343, in <module> trainer = TrainerBase.from_params(params=params, serialization_dir=serialization_dir) "/home/datamind/allennlp/allennlp/common/from_params.py", line 555, in from_params **extras, "/home/datamind/allennlp/allennlp/common/from_params.py", line 583, in from_params kwargs = create_kwargs(constructor_to_inspect, cls, params, **extras) "/home/datamind/allennlp/allennlp/common/from_params.py", line 188, in create_kwargs cls.__name__, param_name, annotation, param.default, params, **extras "/home/datamind/allennlp/allennlp/common/from_params.py", line 287, in pop_and_construct_arg popped_params = params.pop(name, default) if default != _NO_DEFAULT else params.pop(name) "/home/datamind/allennlp/allennlp/common/params.py", line 245, in pop raise ConfigurationError(f'key "{key}" is required at location "{self.history}"') allennlp.common.checks.ConfigurationError: key "train_data" is required at location ""
0.274838
Unhandled exception (KeyError) encountered during scan. Please report this as a bug: ['Traceback (most recent call last):\n', ' "/opt/spiderfoot-2.12/sfscan.py", line 265, in startScan\n psMod.notifyListeners(firstEvent)\n', ' "/opt/spiderfoot-2.12/sflib.py", line 1527, in notifyListeners\n listener.handleEvent(sfEvent)\n', ' "/opt/spiderfoot-2.12/modules/sfp_bingsearch.py", line 93, in handleEvent\n self.notifyListeners(evt)\n', ' "/opt/spiderfoot-2.12/sflib.py", line 1527, in notifyListeners\n listener.handleEvent(sfEvent)\n', ' "/opt/spiderfoot-2.12/modules/sfp_dnsresolve.py", line 162, in handleEvent\n self.processHost(match[1], parentEvent, False)\n', ' "/opt/spiderfoot-2.12/modules/sfp_dnsresolve.py", line 334, in processHost\n self.notifyListeners(evt)\n', ' "/opt/spiderfoot-2.12/sflib.py", line 1527, in notifyListeners\n listener.handleEvent(sfEvent)\n', ' "/opt/spiderfoot-2.12/modules/sfp_bingsearch.py", line 93, in handleEvent\n self.notifyListeners(evt)\n', ' "/opt/spiderfoot-2.12/sflib.py", line 1527, in notifyListeners\n listener.handleEvent(sfEvent)\n', ' "/opt/spiderfoot-2.12/modules/sfp_dnsresolve.py", line 162, in handleEvent\n self.processHost(match[1], parentEvent, False)\n', ' "/opt/spiderfoot-2.12/modules/sfp_dnsresolve.py", line 338, in processHost\n self.processDomain(dom, evt)\n', ' "/opt/spiderfoot-2.12/modules/sfp_dnsresolve.py", line 357, in processDomain\n self.notifyListeners(domevt)\n', ' "/opt/spiderfoot-2.12/sflib.py", line 1527, in notifyListeners\n listener.handleEvent(sfEvent)\n', ' "/opt/spiderfoot-2.12/modules/sfp_builtwith.py", line 127, in handleEvent\n self.notifyListeners(ev)\n', ' "/opt/spiderfoot-2.12/sflib.py", line 1527, in notifyListeners\n listener.handleEvent(sfEvent)\n', ' "/opt/spiderfoot-2.12/modules/sfp_dnsresolve.py", line 227, in handleEvent\n self.processHost(addr, parentEvent, False)\n', ' "/opt/spiderfoot-2.12/modules/sfp_dnsresolve.py", line 334, in processHost\n self.notifyListeners(evt)\n', ' "/opt/spiderfoot-2.12/sflib.py", line 1527, in notifyListeners\n listener.handleEvent(sfEvent)\n', ' "/opt/spiderfoot-2.12/modules/sfp_censys.py", line 165, in handleEvent\n self.notifyListeners(e)\n', ' "/opt/spiderfoot-2.12/sflib.py", line 1527, in notifyListeners\n listener.handleEvent(sfEvent)\n', ' "/opt/spiderfoot-2.12/modules/sfp_ripe.py", line 305, in handleEvent\n asn = self.netblockAs(eventData)\n', ' "/opt/spiderfoot-2.12/modules/sfp_ripe.py", line 94, in netblockAs\n res = self.fetchRir("https://stat.ripe.net/data/whois/data.json?resource=" + prefix)\n', ' "/opt/spiderfoot-2.12/modules/sfp_ripe.py", line 65, in fetchRir\n self.notifyListeners(evt)\n', ' "/opt/spiderfoot-2.12/sflib.py", line 1527, in notifyListeners\n listener.handleEvent(sfEvent)\n', ' "/opt/spiderfoot-2.12/modules/sfp_email.py", line 98, in handleEvent\n self.notifyListeners(evt)\n', ' "/opt/spiderfoot-2.12/sflib.py", line 1527, in notifyListeners\n listener.handleEvent(sfEvent)\n', ' "/opt/spiderfoot-2.12/modules/sfp_fullcontact.py", line 151, in handleEvent\n e = SpiderFootEvent("HUMAN_NAME", data[\'fullName\'], self.__name__, event)\n', "KeyError: 'fullName'\n"]
Traceback (most recent call last): "/freqtrade/freqtrade/commands/trade_commands.py", line 19, in start_trading worker.run() "/freqtrade/freqtrade/worker.py", line 74, in run state = self._worker(old_state=state) "/freqtrade/freqtrade/worker.py", line 111, in _worker self._throttle(func=self._process_running, throttle_secs=self._throttle_secs) "/freqtrade/freqtrade/worker.py", line 132, in _throttle result = func(*args, **kwargs) "/freqtrade/freqtrade/worker.py", line 145, in _process_running self.freqtrade.process() "/freqtrade/freqtrade/freqtradebot.py", line 181, in process if self.get_free_open_trades(): "/freqtrade/freqtrade/freqtradebot.py", line 236, in get_free_open_trades open_trades = len(Trade.get_open_trades()) "/freqtrade/freqtrade/persistence/models.py", line 570, in get_open_trades return Trade.get_trades(Trade.is_open.is_(True)).all() "/usr/local/lib/python3.8/site-packages/sqlalchemy/orm/query.py", line 3373, in all return list(self) "/usr/local/lib/python3.8/site-packages/sqlalchemy/orm/query.py", line 3534, in __iter__ self.session._autoflush() "/usr/local/lib/python3.8/site-packages/sqlalchemy/orm/session.py", line 1629, in _autoflush util.raise_(e, with_traceback=sys.exc_info()[2]) "/usr/local/lib/python3.8/site-packages/sqlalchemy/util/compat.py", line 182, in raise_ raise exception "/usr/local/lib/python3.8/site-packages/sqlalchemy/orm/session.py", line 1618, in _autoflush self.flush() "/usr/local/lib/python3.8/site-packages/sqlalchemy/orm/session.py", line 2536, in flush self._flush(objects) "/usr/local/lib/python3.8/site-packages/sqlalchemy/orm/session.py", line 2678, in _flush transaction.rollback(_capture_exception=True) "/usr/local/lib/python3.8/site-packages/sqlalchemy/util/langhelpers.py", line 68, in __exit__ compat.raise_( "/usr/local/lib/python3.8/site-packages/sqlalchemy/util/compat.py", line 182, in raise_ raise exception "/usr/local/lib/python3.8/site-packages/sqlalchemy/orm/session.py", line 2674, in _flush transaction.commit() "/usr/local/lib/python3.8/site-packages/sqlalchemy/orm/session.py", line 508, in commit t[1].commit() "/usr/local/lib/python3.8/site-packages/sqlalchemy/engine/base.py", line 1762, in commit self._do_commit() "/usr/local/lib/python3.8/site-packages/sqlalchemy/engine/base.py", line 1793, in _do_commit self.connection._commit_impl() "/usr/local/lib/python3.8/site-packages/sqlalchemy/engine/base.py", line 773, in _commit_impl self._handle_dbapi_exception(e, None, None, None, None) "/usr/local/lib/python3.8/site-packages/sqlalchemy/engine/base.py", line 1510, in _handle_dbapi_exception util.raise_( "/usr/local/lib/python3.8/site-packages/sqlalchemy/util/compat.py", line 182, in raise_ raise exception "/usr/local/lib/python3.8/site-packages/sqlalchemy/engine/base.py", line 771, in _commit_impl self.engine.dialect.do_commit(self.connection) "/usr/local/lib/python3.8/site-packages/sqlalchemy/engine/default.py", line 546, in do_commit dbapi_connection.commit() sqlalchemy.exc.OperationalError: (raised as a result of Query-invoked autoflush; consider using a session.no_autoflush block if this flush is occurring prematurely)
0.319623
Traceback (most recent call last): "pandas\_libs\index.pyx", line 705, in pandas._libs.index.BaseMultiIndexCodesEngine.get_loc indices = [0 if checknull(v) else lev.get_loc(v) + 1 "C:\git\pandas\pandas\core\indexes\base.py", line 2963, in get_loc casted_key = self._maybe_cast_indexer(key) "C:\git\pandas\pandas\core\indexes\category.py", line 527, in _maybe_cast_indexer return self._data._unbox_scalar(key) "C:\git\pandas\pandas\core\arrays\categorical.py", line 1728, in _unbox_scalar code = self.categories.get_loc(key) "C:\git\pandas\pandas\core\indexes\base.py", line 2967, in get_loc raise KeyError(key) from err KeyError: 'All'
: org.apache.spark.SparkException: Job aborted due to stage failure: Task 1 in stage 0.0 failed 1 times, most recent failure: Lost task 1.0 in stage 0.0 (TID 1, localhost): org.apache.spark.api.python.PythonException: Traceback (most recent call last): "/usr/local/spark/python/lib/pyspark.zip/pyspark/worker.py", line 98, in main command = pickleSer._read_with_length(infile) "/usr/local/spark/python/lib/pyspark.zip/pyspark/serializers.py", line 164, in _read_with_length return self.loads(obj) "/usr/local/spark/python/lib/pyspark.zip/pyspark/serializers.py", line 422, in loads return pickle.loads(obj) "/usr/local/spark/python/lib/pyspark.zip/pyspark/mllib/__init__.py", line 25, in <module>ImportError: No module named numpy
0.250355
Traceback (most recent call last): "/var/lib/awx/venv/awx/lib/python3.6/site-packages/awx/settings/production.py", line 84, in <module> include(settings_file, optional(settings_files), scope=locals()) "/var/lib/awx/venv/awx/lib/python3.6/site-packages/split_settings/tools.py", line 107, in include exec(compiled_code, scope) # noqa: S102, WPS421 "/etc/tower/settings.py", line 19, in <module> SECRET_KEY = get_secret() "/etc/tower/settings.py", line 8, in get_secret return open('/etc/tower/SECRET_KEY', 'rb').read().strip() IsADirectoryError: [Errno 21] Is a directory: '/etc/tower/SECRET_KEY'
Traceback (most recent call last): "/usr/local/bin/wafw00f", line 8, in <module> main.main() "/usr/local/lib/python2.7/site-packages/wafw00f/main.py", line 419, in main if attacker.normalrequest() is None: "/usr/local/lib/python2.7/site-packages/wafw00f/main.py", line 96, in normalrequest return self.request(usecache=usecache, cacheresponse=cacheresponse, headers=headers) "/usr/local/lib/python2.7/site-packages/wafw00f/lib/evillib.py", line 320, in request r = self._request(method, path, headers) "/usr/local/lib/python2.7/site-packages/wafw00f/lib/evillib.py", line 362, in _request h = conn_factory(connect_host, connect_port,**params)TypeError: __init__() got an unexpected keyword argument 'context'
0.267864
Traceback (most recent call last): "/usr/sbin/flocker-diagnostics", line 7, in <module> from flocker.node.script import flocker_diagnostics_main "/opt/flocker/lib/python2.7/site-packages/flocker/node/__init__.py", line 20, in <module> from .script import DeployerType "/opt/flocker/lib/python2.7/site-packages/flocker/node/script.py", line 16, in <module> import yaml "/opt/mesosphere/lib/python3.4/site-packages/yaml/__init__.py", line 284 class YAMLObject(metaclass=YAMLObjectMetaclass): ^SyntaxError: invalid syntax
Traceback (most recent call last): "/usr/local/bin/kalliope", line 11, in <module> load_entry_point('kalliope==0.4.4b0', 'console_scripts', 'kalliope')() "/usr/local/lib/python2.7/dist-packages/kalliope-0.4.4b0-py2.7.egg/kalliope/__init__.py", line 142, in main MainController(brain=brain) "/usr/local/lib/python2.7/dist-packages/kalliope-0.4.4b0-py2.7.egg/kalliope/core/MainController.py", line 86, in __init__ self.start_trigger() "build/bdist.linux-x86_64/egg/transitions/core.py", line 282, in trigger "build/bdist.linux-x86_64/egg/transitions/core.py", line 837, in _process "build/bdist.linux-x86_64/egg/transitions/core.py", line 312, in _trigger "build/bdist.linux-x86_64/egg/transitions/core.py", line 198, in execute "build/bdist.linux-x86_64/egg/transitions/core.py", line 209, in _change_state "build/bdist.linux-x86_64/egg/transitions/core.py", line 77, in enter "build/bdist.linux-x86_64/egg/transitions/core.py", line 807, in _callback "/usr/local/lib/python2.7/dist-packages/kalliope-0.4.4b0-py2.7.egg/kalliope/core/MainController.py", line 202, in analysing_order_thread is_api_call=False) "/usr/local/lib/python2.7/dist-packages/kalliope-0.4.4b0-py2.7.egg/kalliope/core/SynapseLauncher.py", line 70, in run_matching_synapse_from_order list_synapse_to_process = OrderAnalyser.get_matching_synapse(order=order_to_process, brain=brain) "/usr/local/lib/python2.7/dist-packages/kalliope-0.4.4b0-py2.7.egg/kalliope/core/OrderAnalyser.py", line 52, in get_matching_synapse if cls.spelt_order_match_brain_order_via_table(signal.sentence, order): "/usr/local/lib/python2.7/dist-packages/kalliope-0.4.4b0-py2.7.egg/kalliope/core/OrderAnalyser.py", line 79, in spelt_order_match_brain_order_via_table user_said = user_said.lower() AttributeError: 'NoneType' object has no attribute 'lower'
0.284185
ERROR:root:Traceback (most recent call last): "/usr/local/lib/python3.6/site-packages/elastalert/elastalert.py", line 1440, in alert return self.send_alert(matches, rule, alert_time=alert_time, retried=retried) "/usr/local/lib/python3.6/site-packages/elastalert/elastalert.py", line 1529, in send_alert alert.alert(matches) "/usr/local/lib/python3.6/site-packages/elastalert/alerts.py", line 378, in alert conn.start() AttributeError: 'StompConnection11' object has no attribute 'start'
Traceback (most recent call last): "/usr/local/bin/ipython", line 11, in <module> sys.exit(start_ipython()) "/usr/local/lib/python2.7/dist-packages/IPython/__init__.py", line 118, in start_ipython return launch_new_instance(argv=argv, **kwargs) "/usr/local/lib/python2.7/dist-packages/traitlets/config/application.py", line 592, in launch_instance app.start() "/usr/local/lib/python2.7/dist-packages/IPython/terminal/ipapp.py", line 349, in start return self.subapp.start() "/usr/local/lib/python2.7/dist-packages/nbconvert/nbconvertapp.py", line 286, in start self.convert_notebooks() "/usr/local/lib/python2.7/dist-packages/nbconvert/nbconvertapp.py", line 409, in convert_notebooks self.convert_single_notebook(notebook_filename) "/usr/local/lib/python2.7/dist-packages/nbconvert/nbconvertapp.py", line 380, in convert_single_notebook output, resources = self.export_single_notebook(notebook_filename, resources) "/usr/local/lib/python2.7/dist-packages/nbconvert/nbconvertapp.py", line 332, in export_single_notebook output, resources = self.exporter.from_filename(notebook_filename, resources=resources) "/usr/local/lib/python2.7/dist-packages/nbconvert/exporters/exporter.py", line 166, in from_filename return self.from_notebook_node(nbformat.read(f, as_version=4), resources=resources, **kw) "/usr/local/lib/python2.7/dist-packages/nbconvert/exporters/latex.py", line 88, in from_notebook_node return super(LatexExporter, self).from_notebook_node(nb, resources, **kw) "/usr/local/lib/python2.7/dist-packages/nbconvert/exporters/templateexporter.py", line 206, in from_notebook_node raise IOError('template file "%s" could not be found' % self.template_file)IOError: template file "latex_nocode.tplx" could not be found
0.287814
Traceback (most recent call last): "<censored>", line 158, in request async with self._http_session.get(req_url) as response: "C:\Python38\lib\site-packages\aiohttp\client.py", line 1012, in __aenter__ self._resp = await self._coro "C:\Python38\lib\site-packages\vcr\stubs\aiohttp_stubs\__init__.py", line 187, in new_request return play_responses(cassette, vcr_request) "C:\Python38\lib\site-packages\vcr\stubs\aiohttp_stubs\__init__.py", line 89, in play_responses vcr_response = cassette.play_response(vcr_request) "C:\Python38\lib\site-packages\vcr\cassette.py", line 265, in play_response raise UnhandledHTTPRequestError( vcr.errors.UnhandledHTTPRequestError: "The cassette ('main.yaml') doesn't contain the request (<Request (GET) http://<censored_unfiltered_url>>) asked for"
Traceback (most recent call last): "/projects/segmentation/Pytorch-UNet/predict.py", line 88, in <module> net.load_state_dict(torch.load(args.model)) "/anaconda3/lib/python3.6/site-packages/torch/serialization.py", line 303, in load return _load(f, map_location, pickle_module) "/anaconda3/lib/python3.6/site-packages/torch/serialization.py", line 469, in _load result = unpickler.load() "/anaconda3/lib/python3.6/site-packages/torch/serialization.py", line 437, in persistent_load data_type(size), location) "/anaconda3/lib/python3.6/site-packages/torch/serialization.py", line 88, in default_restore_location result = fn(storage, location) "/anaconda3/lib/python3.6/site-packages/torch/serialization.py", line 70, in _cuda_deserialize return obj.cuda(device) "/anaconda3/lib/python3.6/site-packages/torch/_utils.py", line 68, in _cuda with torch.cuda.device(device): "/anaconda3/lib/python3.6/site-packages/torch/cuda/__init__.py", line 225, in __enter__ self.prev_idx = torch._C._cuda_getDevice() RuntimeError: cuda runtime error (30) : unknown error at torch/csrc/cuda/Module.cpp:51
0.315152
2016-05-17 22:26:05,525 - ERROR - Exception from semantic completer (using general): Traceback (most recent call last): "/home/bsabin/dotfiles/.vim/bundle/dein/.dein/third_party/ycmd/ycmd/../ycmd/handlers.py", line 98, in GetCompletions .ComputeCandidates( request_data ) ) "/home/bsabin/dotfiles/.vim/bundle/dein/.dein/third_party/ycmd/ycmd/../ycmd/completers/completer.py", line 218, in ComputeCandidates candidates = self._GetCandidatesFromSubclass( request_data ) "/home/bsabin/dotfiles/.vim/bundle/dein/.dein/third_party/ycmd/ycmd/../ycmd/completers/completer.py", line 234, in _GetCandidatesFromSubclass raw_completions = self.ComputeCandidatesInner( request_data ) "/home/bsabin/dotfiles/.vim/bundle/dein/.dein/third_party/ycmd/ycmd/../ycmd/completers/rust/rust_completer.py", line 239, in ComputeCandidatesInner completions = self._FetchCompletions( request_data ) "/home/bsabin/dotfiles/.vim/bundle/dein/.dein/third_party/ycmd/ycmd/../ycmd/completers/rust/rust_completer.py", line 257, in _FetchCompletions return self._GetResponse( '/list_completions', request_data ) "/home/bsabin/dotfiles/.vim/bundle/dein/.dein/third_party/ycmd/ycmd/../ycmd/completers/rust/rust_completer.py", line 171, in _GetResponse headers = extra_headers ) "/home/bsabin/dotfiles/.vim/bundle/dein/.dein/third_party/ycmd/third_party/requests/requests/api.py", line 53, in request return session.request(method=method, url=url, **kwargs) "/home/bsabin/dotfiles/.vim/bundle/dein/.dein/third_party/ycmd/third_party/requests/requests/sessions.py", line 468, in request resp = self.send(prep, **send_kwargs) "/home/bsabin/dotfiles/.vim/bundle/dein/.dein/third_party/ycmd/third_party/requests/requests/sessions.py", line 576, in send r = adapter.send(request, **kwargs) "/home/bsabin/dotfiles/.vim/bundle/dein/.dein/third_party/ycmd/third_party/requests/requests/adapters.py", line 437, in send raise ConnectionError(e, request=request)ConnectionError: HTTPConnectionPool(host='127.0.0.1', port=46850): Max retries exceeded with url: /list_completions (Caused by NewConnectionError('<requests.packages.urllib3.connection.HTTPConnection object at 0x7fee645d6d90>: Failed to establish a new connection: [Errno 111] Connection refused',))
Traceback (most recent call last): "...\snirf.py", line 3, in <module> snirf_intensity = read_raw_snirf('minimum_example.snirf') "...\mne-python\mne\io\snirf\_snirf.py", line 43, in read_raw_snirf return RawSNIRF(fname, preload, verbose) "<decorator-gen-248>", line 24, in __init__ "...\mne-python\mne\io\snirf\_snirf.py", line 112, in __init__ sources = [s.decode('UTF-8') for s in sources] TypeError: iteration over a 0-d array
0.302623
Traceback (most recent call last): "/usr/lib/python2.6/site-packages/salt/utils/templates.py", line 368, in render_jinja_tmpl output = template.render(**decoded_context) "/usr/lib64/python2.6/site-packages/jinja2/environment.py", line 669, in render return self.environment.handle_exception(exc_info, True) "<template>", line 1, in top-level template code "/usr/lib64/python2.6/site-packages/jinja2/environment.py", line 713, in make_module return TemplateModule(self, self.new_context(vars, shared, locals)) "/usr/lib64/python2.6/site-packages/jinja2/environment.py", line 769, in __init__ self._body_stream = list(template.root_render_func(context)) "/var/cache/salt/minion/files/base/install/fluentd/map.jinja", line 1, in top-level template code {% set fluentd = salt['grains.filter_by']({ "/usr/lib/python2.6/site-packages/salt/modules/grains.py", line 555, in filter_by val = salt.utils.traverse_dict_and_list(__grains__, grain, []) "/usr/lib/python2.6/site-packages/salt/utils/__init__.py", line 1547, in traverse_dict_and_list for each in key.split(delimiter): AttributeError: 'list' object has no attribute 'split'
Traceback (most recent call last): "C:\Users\annaz\PycharmProjects\newsalyze-backend\venv\lib\site-packages\pymagnitude\__init__.py", line 3238, in xOpen self.basevfs, name, flags, self, self.options)) "C:\Users\annaz\PycharmProjects\newsalyze-backend\venv\lib\site-packages\pymagnitude\__init__.py", line 2917, in __init__ raise RuntimeError("Invalid URL.") RuntimeError: Invalid URL.
0.328587
Traceback (most recent call last): "/usr/share/git-cola/lib/cola/models/gitrepo.py", line 215, in event self.emit(SIGNAL(e.signal), *e.data) AttributeError: 'QEvent' object has no attribute 'signal'
Traceback (most recent call last): "/usr/local/lib/python3.5/site-packages/dataflow_worker/batchworker.py", line 649, in do_work work_executor.execute() "/usr/local/lib/python3.5/site-packages/dataflow_worker/executor.py", line 178, in execute op.finish() "apache_beam/runners/worker/operations.py", line 874, in apache_beam.runners.worker.operations.PGBKCVOperation.finish "apache_beam/runners/worker/operations.py", line 876, in apache_beam.runners.worker.operations.PGBKCVOperation.finish "apache_beam/runners/worker/operations.py", line 887, in apache_beam.runners.worker.operations.PGBKCVOperation.output_key "apache_beam/runners/worker/operations.py", line 256, in apache_beam.runners.worker.operations.Operation.output "apache_beam/runners/worker/operations.py", line 143, in apache_beam.runners.worker.operations.SingletonConsumerSet.receive "dataflow_worker/shuffle_operations.py", line 188, in dataflow_worker.shuffle_operations.ReifyTimestampAndWindowsOperation.process "dataflow_worker/shuffle_operations.py", line 192, in dataflow_worker.shuffle_operations.ReifyTimestampAndWindowsOperation.process "apache_beam/runners/worker/operations.py", line 256, in apache_beam.runners.worker.operations.Operation.output "apache_beam/runners/worker/operations.py", line 143, in apache_beam.runners.worker.operations.SingletonConsumerSet.receive "dataflow_worker/shuffle_operations.py", line 151, in dataflow_worker.shuffle_operations.ShuffleWriteOperation.process "dataflow_worker/shuffle_operations.py", line 171, in dataflow_worker.shuffle_operations.ShuffleWriteOperation.process "/usr/local/lib/python3.5/site-packages/dataflow_worker/shuffle.py", line 620, in Write self.writer.Write(self.stream.getvalue()) "third_party/windmill/shuffle/python/shuffle_client.pyx", line 176, in shuffle_client.PyShuffleWriter.Write OSError: Shuffle write failed: b'INVALID_ARGUMENT: Shuffle key too large:1745410 > 1572864'
0.262541
Traceback (most recent call last): "demo.py", line 75, in <module> results = model.detect([image]) "/phd/pytorch-mask-rcnn/model.py", line 1598, in detect detections, mrcnn_mask = self.predict([molded_images, image_metas], mode='inference') "/phd/pytorch-mask-rcnn/model.py", line 1673, in predict detections = detection_layer(self.config, rpn_rois, mrcnn_class, mrcnn_bbox, image_metas) "/phd/pytorch-mask-rcnn/model.py", line 841, in detection_layer detections = refine_detections(rois, mrcnn_class, mrcnn_bbox, window, config) "/phd/pytorch-mask-rcnn/model.py", line 786, in refine_detections keep = torch.nonzero(keep_bool)[:,0] "/home/xyc/anaconda3/envs/map-seg-py35/lib/python3.5/site-packages/torch/autograd/variable.py", line 78, in __getitem__ return Index.apply(self, key) "/home/xyc/anaconda3/envs/map-seg-py35/lib/python3.5/site-packages/torch/autograd/_functions/tensor.py", line 89, in forward result = i.index(ctx.index) IndexError: trying to index 2 dimensions of a 0 dimensional tensor
Traceback (most recent call last): "c:\users\jnbra\appdata\local\programs\python\python38\lib\site-packages\youtube_dl\extractor\youtube.py", line 1383, in _decrypt_signature func = self._extract_signature_function( "c:\users\jnbra\appdata\local\programs\python\python38\lib\site-packages\youtube_dl\extractor\youtube.py", line 1297, in _extract_signature_function cache_res = res(test_string) "c:\users\jnbra\appdata\local\programs\python\python38\lib\site-packages\youtube_dl\extractor\youtube.py", line 1360, in <lambda> return lambda s: initial_function([s]) "c:\users\jnbra\appdata\local\programs\python\python38\lib\site-packages\youtube_dl\jsinterp.py", line 258, in resf res, abort = self.interpret_statement(stmt, local_vars) "c:\users\jnbra\appdata\local\programs\python\python38\lib\site-packages\youtube_dl\jsinterp.py", line 56, in interpret_statement v = self.interpret_expression(expr, local_vars, allow_recursion) "c:\users\jnbra\appdata\local\programs\python\python38\lib\site-packages\youtube_dl\jsinterp.py", line 91, in interpret_expression right_val = self.interpret_expression( "c:\users\jnbra\appdata\local\programs\python\python38\lib\site-packages\youtube_dl\jsinterp.py", line 188, in interpret_expression x, abort = self.interpret_statement( "c:\users\jnbra\appdata\local\programs\python\python38\lib\site-packages\youtube_dl\jsinterp.py", line 56, in interpret_statement v = self.interpret_expression(expr, local_vars, allow_recursion) "c:\users\jnbra\appdata\local\programs\python\python38\lib\site-packages\youtube_dl\jsinterp.py", line 211, in interpret_expression raise ExtractorError('Unsupported JS expression %r' % expr) youtube_dl.utils.ExtractorError: Unsupported JS expression '[1596180896,'; please report this issue on https://yt-dl.org/bug . Make sure you are using the latest version; see https://yt-dl.org/update on how to update. Be sure to call youtube-dl with the --verbose flag and include its complete output.
0.131095
Traceback (most recent call last): "/local/Anaconda3/envs/tensorflow_gpu/lib/python3.5/site-packages/tensorflow/python/client/session.py", line 1361, in _do_call return fn(*args) "/local/Anaconda3/envs/tensorflow_gpu/lib/python3.5/site-packages/tensorflow/python/client/session.py", line 1340, in _run_fn target_list, status, run_metadata) "/local/Anaconda3/envs/tensorflow_gpu/lib/python3.5/site-packages/tensorflow/python/framework/errors_impl.py", line 516, in __exit__ c_api.TF_GetCode(self.status.status)) tensorflow.python.framework.errors_impl.InvalidArgumentError: Assign requires shapes of both tensors to match. lhs shape= [1001] rhs shape= [1000]
2016-03-01 09:17:07,676 - ERROR - 19898:Dummy-507 - zato.server.service.reqresp.sio:22 - Conversion error, param:`<Integer at 0x7fd1da7e2d10 name:[nb_deleted]>`, param_name:`nb_deleted`, repr:`1`, type:`<type 'int'>`, e:`Traceback (most recent call last): "/opt/zato/2.0.7/zato-server/src/zato/server/service/reqresp/sio.py", line 336, in convert_sio value = param.convert(value, param_name, data_format, from_sio_to_external) "/opt/zato/2.0.7/zato-server/src/zato/server/service/reqresp/sio.py", line 91, in convert return self.serialize_dispatch[(from_sio_to_external, data_type)](value, param_name) if value else valueKeyError: (True, None)
0.187419
Traceback (most recent call last): "/var/www/weblate-env-dev/lib/python3.5/site-packages/django/template/base.py", line 470, in parse compile_func = self.tags[command] KeyError: 'static'
Traceback (most recent call last): "finalrecon.py", line 224, in <module> full_recon() "finalrecon.py", line 159, in full_recon dnsrec(domain, output, data) "/Tools/FinalRecon/modules/dns.py", line 19, in dnsrec pkt = q.send('8.8.8.8', 53, tcp='UDP') "/usr/lib/python3/dist-packages/dnslib/dns.py", line 365, in send sock.connect((dest,port)) ConnectionRefusedError: [Errno 111] Connection refused
0.1604
str: Traceback (most recent call last): "C:\bellagio_eclipse\eclipse-standard-kepler-SR2-RC2-win32\eclipse\plugins\org.python.pydev_3.3.3.201401272249\pysrc\pydevd_resolver.py", line 183, in _getPyDictionary attr = getattr(var, n) "C:\Users\skvenkatesh\Documents\robot-workspace\Ride\lib\robot\libraries\BuiltIn.py", line 2435, in _namespace return self._context.namespaceAttributeError: 'NoneType' object has no attribute 'namespace'
00:50:43,289 [salt.minion ][WARNING ] The minion function caused an exception: Traceback (most recent call last): "/home/epyx/salt/env/lib/python2.6/site-packages/salt/minion.py", line 277, in _thread_return ret['return'] = func(*args, **kw) "/home/epyx/salt/env/lib/python2.6/site-packages/salt/modules/saltutil.py", line 73, in sync_modules return _sync('modules', env) "/home/epyx/salt/env/lib/python2.6/site-packages/salt/modules/saltutil.py", line 32, in _sync cache.extend(__salt__['cp.cache_dir'](source, sub_env)) "/home/epyx/salt/env/lib/python2.6/site-packages/salt/modules/cp.py", line 134, in cache_dir client = salt.fileclient.get_file_client(__opts__) "/home/epyx/salt/env/lib/python2.6/site-packages/salt/fileclient.py", line 49, in get_file_client }.get(opts['file_client'], 'remote')(opts) "/home/epyx/salt/env/lib/python2.6/site-packages/salt/fileclient.py", line 511, in __init__ self.auth = salt.crypt.SAuth(opts) "/home/epyx/salt/env/lib/python2.6/site-packages/salt/crypt.py", line 349, in __init__ self.crypticle = self.__authenticate() "/home/epyx/salt/env/lib/python2.6/site-packages/salt/crypt.py", line 358, in __authenticate creds = self.sign_in() "/home/epyx/salt/env/lib/python2.6/site-packages/salt/crypt.py", line 263, in sign_in auth['aes'] = self.decrypt_aes(payload['aes']) "/home/epyx/salt/env/lib/python2.6/site-packages/salt/crypt.py", line 180, in decrypt_aes key = self.get_keys() "/home/epyx/salt/env/lib/python2.6/site-packages/salt/crypt.py", line 142, in get_keys key = clean_old_key(self.rsa_path) "/home/epyx/salt/env/lib/python2.6/site-packages/salt/crypt.py", line 39, in clean_old_key mkey = RSA.load_key(rsa_path, callback=foo_pass) "/usr/lib/pymodules/python2.6/M2Crypto/RSA.py", line 347, in load_key return load_key_bio(bio, callback) "/usr/lib/pymodules/python2.6/M2Crypto/RSA.py", line 368, in load_key_bio rsa_error() "/usr/lib/pymodules/python2.6/M2Crypto/RSA.py", line 298, in rsa_error raise RSAError, m2.err_reason_error_string(m2.err_get_error())RSAError: no start line
0.109506
Traceback (most recent call last): "/home/pi/env/lib/python3.5/site-packages/googleapiclient/discovery_cache/file_cache.py", line 37, in <module> from oauth2client.locked_file import Locked ImportError: No module named 'oauth2client.locked_file'
Traceback (most recent call last): "sqlmap.py", line 99, in main start() "lib/controller/controller.py", line 594, in start action() "lib/controller/action.py", line 90, in action conf.dbmsHandler.getPrivileges(), "privilege", CONTENT_TYPE.PRIVILEGES) "plugins/dbms/mssqlserver/enumeration.py", line 60, in getPrivileges kb.data.cachedUsersPrivileges[user] = NoneTypeError: unhashable type: 'list'
0.174625
Traceback (most recent call last): "/usr/lib/python3/dist-packages/razer_daemon/misc/key_event_management.py", line 386, in key_action key_name = self.EVENT_MAP[key_id] KeyError: 11
Traceback (most recent call last): "/my/path/to/anaconda/envs/jupyterhub/bin/jupyterhub", line 3, in <module> from jupyterhub.app import main "/my/path/to/anaconda/envs/jupyterhub/lib/python3.5/site-packages/jupyterhub/app.py", line 35, in <module> from traitlets import (ImportError: cannot import name 'Unicode'
0.177657
Traceback (most recent call last): "C:\projects\tuf\tests\test_key_revocation_integration.py", line 189, in test_timestamp_key_revocation self.repository_updater.refresh() "c:\projects\tuf\tuf\client\updater.py", line 1094, in refresh self._update_metadata('timestamp', DEFAULT_TIMESTAMP_UPPERLENGTH) "c:\projects\tuf\tuf\client\updater.py", line 1786, in _update_metadata upperbound_filelength, version) "c:\projects\tuf\tuf\client\updater.py", line 1602, in _get_metadata_file raise tuf.exceptions.NoWorkingMirrorError(file_mirror_errors) NoWorkingMirrorError: No working mirror was found:
Traceback (most recent call last): "<stdin>", line 1, in <module>TypeError: 'FooFunction' object is not callable
0.248048
Traceback (most recent call last): "/usr/local/lib/python2.7/dist-packages/pyspider/libs/base_handler.py", line 187, in run_task result = self._run_task(task, response) "/usr/local/lib/python2.7/dist-packages/pyspider/libs/base_handler.py", line 166, in _run_task response.raise_for_status() "/usr/local/lib/python2.7/dist-packages/pyspider/libs/response.py", line 183, in raise_for_status raise http_error HTTPError: HTTP 599: gnutls_handshake() failed: Handshake failed
Traceback (most recent call last): "/export/home/ncit/external/a.mizeranschi/bcbio_nextgen/tools/bin/bcbio_nextgen.py", line 238, in <module> main(**kwargs) "/export/home/ncit/external/a.mizeranschi/bcbio_nextgen/tools/bin/bcbio_nextgen.py", line 46, in main run_main(**kwargs) "/export/home/ncit/external/a.mizeranschi/bcbio_nextgen/anaconda/lib/python2.7/site-packages/bcbio/pipeline/main.py", line 45, in run_main fc_dir, run_info_yaml) "/export/home/ncit/external/a.mizeranschi/bcbio_nextgen/anaconda/lib/python2.7/site-packages/bcbio/pipeline/main.py", line 89, in _run_toplevel for xs in pipeline(config, run_info_yaml, parallel, dirs, samples): "/export/home/ncit/external/a.mizeranschi/bcbio_nextgen/anaconda/lib/python2.7/site-packages/bcbio/pipeline/main.py", line 162, in variant2pipeline samples = joint.square_off(samples, run_parallel) "/export/home/ncit/external/a.mizeranschi/bcbio_nextgen/anaconda/lib/python2.7/site-packages/bcbio/variation/joint.py", line 142, in square_off "vrn_file", ["region", "sam_ref", "config"]) "/export/home/ncit/external/a.mizeranschi/bcbio_nextgen/anaconda/lib/python2.7/site-packages/bcbio/distributed/split.py", line 38, in grouped_parallel_split_combine parallel_fn(combine_name, combine_args) "/export/home/ncit/external/a.mizeranschi/bcbio_nextgen/anaconda/lib/python2.7/site-packages/bcbio/distributed/multi.py", line 28, in run_parallel return run_multicore(fn, items, config, parallel=parallel) "/export/home/ncit/external/a.mizeranschi/bcbio_nextgen/anaconda/lib/python2.7/site-packages/bcbio/distributed/multi.py", line 86, in run_multicore for data in joblib.Parallel(parallel["num_jobs"], batch_size=1, backend="multiprocessing")(joblib.delayed(fn)(*x) for x in items): "/export/home/ncit/external/a.mizeranschi/bcbio_nextgen/anaconda/lib/python2.7/site-packages/joblib/parallel.py", line 921, in __call__ if self.dispatch_one_batch(iterator): "/export/home/ncit/external/a.mizeranschi/bcbio_nextgen/anaconda/lib/python2.7/site-packages/joblib/parallel.py", line 759, in dispatch_one_batch self._dispatch(tasks) "/export/home/ncit/external/a.mizeranschi/bcbio_nextgen/anaconda/lib/python2.7/site-packages/joblib/parallel.py", line 716, in _dispatch job = self._backend.apply_async(batch, callback=cb) "/export/home/ncit/external/a.mizeranschi/bcbio_nextgen/anaconda/lib/python2.7/site-packages/joblib/_parallel_backends.py", line 182, in apply_async result = ImmediateResult(func) "/export/home/ncit/external/a.mizeranschi/bcbio_nextgen/anaconda/lib/python2.7/site-packages/joblib/_parallel_backends.py", line 549, in __init__ self.results = batch() "/export/home/ncit/external/a.mizeranschi/bcbio_nextgen/anaconda/lib/python2.7/site-packages/joblib/parallel.py", line 225, in __call__ for func, args, kwargs in self.items] "/export/home/ncit/external/a.mizeranschi/bcbio_nextgen/anaconda/lib/python2.7/site-packages/bcbio/utils.py", line 55, in wrapper return f(*args, **kwargs) "/export/home/ncit/external/a.mizeranschi/bcbio_nextgen/anaconda/lib/python2.7/site-packages/bcbio/distributed/multitasks.py", line 292, in concat_variant_files return vcfutils.concat_variant_files(*args) "/export/home/ncit/external/a.mizeranschi/bcbio_nextgen/anaconda/lib/python2.7/site-packages/bcbio/variation/vcfutils.py", line 371, in concat_variant_files out_file = _run_concat_variant_files_gatk4(input_file_list, out_file, config) "/export/home/ncit/external/a.mizeranschi/bcbio_nextgen/anaconda/lib/python2.7/site-packages/bcbio/variation/vcfutils.py", line 394, in _run_concat_variant_files_gatk4 broad_runner.run_gatk(params) "/export/home/ncit/external/a.mizeranschi/bcbio_nextgen/anaconda/lib/python2.7/site-packages/bcbio/broad/__init__.py", line 365, in run_gatk log_error=log_error) "/export/home/ncit/external/a.mizeranschi/bcbio_nextgen/anaconda/lib/python2.7/site-packages/bcbio/provenance/do.py", line 26, in run _do_run(cmd, checks, log_stdout, env=env) "/export/home/ncit/external/a.mizeranschi/bcbio_nextgen/anaconda/lib/python2.7/site-packages/bcbio/provenance/do.py", line 106, in _do_run raise subprocess.CalledProcessError(exitcode, error_msg) subprocess.CalledProcessError: Command 'set -o pipefail; unset JAVA_HOME && export PATH=/export/home/ncit/external/a.mizeranschi/bcbio_nextgen/anaconda/bin:$PATH && gatk --java-options '-Xms454m -Xmx3181m -XX:+UseSerialGC -Djava.io.tmpdir=/export/home/ncit/external/a.mizeranschi/automated-VC-test/testingVC-merged/work/bcbiotx/tmptAzZBo' GatherVcfs -I /export/home/ncit/external/a.mizeranschi/automated-VC-test/testingVC-merged/work/joint/platypus-joint/testingVC/testingVC-joint-files.list -O /export/home/ncit/external/a.mizeranschi/automated-VC-test/testingVC-merged/work/bcbiotx/tmpZVfkX1/testingVC-joint.vcf.gz
0.311339
Traceback (most recent call last): "wdf.py", line 568, in <module> main() "wdf.py", line 442, in main ssl._create_default_https_context = ssl._create_unverified_contextAttributeError: 'module' object has no attribute '_create_unverified_context'
Traceback (most recent call last): "/usr/lib/python2.6/site-packages/Django-1.1.1-py2.6.egg/django/core/handlers/base.py", line 92, in get_response response = callback(request, _callback_args, *_callback_kwargs) "/home/fwolff/download/translate/src/Pootle/local_apps/pootle_app/views/language/view.py", line 58, in decorated_f return f(request, translation_project, _args, *_kwargs) "/home/fwolff/download/translate/src/Pootle/local_apps/pootle_app/views/language/view.py", line 68, in decorated_f return f(request, translation_project, _args, *_kwargs) "/home/fwolff/download/translate/src/Pootle/local_apps/pootle_translationproject/views.py", line 230, in tp_overview view_obj(request, translation_project, directory), "/home/fwolff/download/translate/src/Pootle/local_apps/pootle_app/lib/view_handler.py", line 47, in **call** template_vars.update(form.dispatch(action, request, _args, *_kwargs)) "/home/fwolff/download/translate/src/Pootle/local_apps/pootle_app/lib/view_handler.py", line 68, in dispatch return handler(request, _args, *_kwargs) "/home/fwolff/download/translate/src/Pootle/local_apps/pootle_translationproject/views.py", line 481, in do_update translation_project.update_project(request) "/home/fwolff/download/translate/src/Pootle/local_apps/pootle_translationproject/models.py", line 279, in update_project oldstats, remotestats, newstats = self.update_file_from_version_control(store) "/home/fwolff/download/translate/src/Pootle/local_apps/pootle_translationproject/models.py", line 240, in update_file_from_version_control working_copy = store.file.store "/home/fwolff/download/translate/src/Pootle/local_apps/pootle_store/fields.py", line 141, in _get_store self._update_store_cache() "/home/fwolff/download/translate/src/Pootle/local_apps/pootle_store/fields.py", line 147, in _update_store_cache mod_info = self.getpomtime() "/home/fwolff/download/translate/src/Pootle/local_apps/pootle_store/fields.py", line 116, in getpomtime file_stat = os.stat(self.realpath) "/home/fwolff/download/translate/src/Pootle/local_apps/pootle_store/fields.py", line 133, in _get_cached_realpath return self._get_realpath() "/home/fwolff/download/translate/src/Pootle/local_apps/pootle_store/fields.py", line 126, in _get_realpath self._realpath = os.path.realpath(self.path) "/usr/lib/python2.6/site-packages/Django-1.1.1-py2.6.egg/django/db/models/fields/files.py", line 63, in _get_path self._require_file() "/usr/lib/python2.6/site-packages/Django-1.1.1-py2.6.egg/django/db/models/fields/files.py", line 46, in _require_file raise ValueError("The '%s' attribute has no file associated with it." % self.field.name)ValueError: The 'file' attribute has no file associated with it.
0.192195
Traceback (most recent call last): "test.py", line 22, in <module> main() "test.py", line 14, in main db = get_pickledb() "test.py", line 9, in get_pickledb db = pickledb.load(DB_FILEPATH, False) "/home/don/tmp/pickledb_robust/pickledb/pickledb.py", line 34, in load return pickledb(location, option) "/home/don/tmp/pickledb_robust/pickledb/pickledb.py", line 41, in __init__ self.load(location, option) "/home/don/tmp/pickledb_robust/pickledb/pickledb.py", line 49, in load self._loaddb() "/home/don/tmp/pickledb_robust/pickledb/pickledb.py", line 186, in _loaddb self.db = json.load(open(self.loco, 'rb')) "/usr/lib/python2.7/json/__init__.py", line 290, in load **kw) "/usr/lib/python2.7/json/__init__.py", line 338, in loads return _default_decoder.decode(s) "/usr/lib/python2.7/json/decoder.py", line 366, in decode obj, end = self.raw_decode(s, idx=_w(s, 0).end()) "/usr/lib/python2.7/json/decoder.py", line 384, in raw_decode raise ValueError("No JSON object could be decoded")ValueError: No JSON object could be decoded
Traceback (most recent call last): "InfernalWireless.py", line 82, in <module> db_setup.create_db(cur, db_setup.INFERNAL_DB, username, password) "/root/infernal-twin/Modules/db_setup.py", line 52, in create_db dbcurr.execute(grant_all) "/usr/lib/python2.7/dist-packages/MySQLdb/cursors.py", line 226, in execute self.errorhandler(self, exc, value) "/usr/lib/python2.7/dist-packages/MySQLdb/connections.py", line 36, in defaulterrorhandler raise errorvalue _mysql_exceptions.ProgrammingError: (1146, "Table 'mysql.user' doesn't exist")
0.307076
Traceback (most recent call last): "/usr/local/lib/python3.7/runpy.py", line 193, in _run_module_as_main "__main__", mod_spec) "/usr/local/lib/python3.7/runpy.py", line 85, in _run_code exec(code, run_globals) "/elasticdl/python/worker/main.py", line 64, in <module> main() "/elasticdl/python/worker/main.py", line 60, in main worker.run() "/elasticdl/python/worker/worker.py", line 876, in run self._train_and_evaluate() "/elasticdl/python/worker/worker.py", line 825, in _train_and_evaluate self._process_save_model_task_if_needed() "/elasticdl/python/worker/worker.py", line 712, in _process_save_model_task_if_needed self._model, dataset "/elasticdl/python/common/model_handler.py", line 74, in get_model_to_export model._build_model_with_inputs(inputs=dataset, targets=None) AttributeError: 'DNNClassifier' object has no attribute '_build_model_with_inputs'
Traceback (most recent call last): "<pyshell#33>", line 1, in <module> pandas.Series({1j: 1, 2j: 2}) "c:\users\brenbarn\documents\python\extensions\pandas\pandas\core\series.py", line 444, in __new__ index = Index(sorted(data))TypeError: no ordering relation is defined for complex numbers`
0.282494
Traceback (most recent call last): "/Users/kfranz/continuum/conda/conda/cli/install.py", line 315, in install shortcuts=shortcuts) "/Users/kfranz/continuum/conda/conda/plan.py", line 467, in install_actions sys.exit("Error: 'conda' can only be installed into the root environment")SystemExit: Error: 'conda' can only be installed into the root environment
WARNING: Automatic signature extraction failed: Traceback (most recent call last): "/home/oliver/bin/youtube-dl/youtube_dl/extractor/youtube.py", line 974, in _decrypt_signature video_id, player_url, len(s) "/home/oliver/bin/youtube-dl/youtube_dl/extractor/youtube.py", line 373, in _extract_signature_function res = self._parse_sig_js(code) "/home/oliver/bin/youtube-dl/youtube_dl/extractor/youtube.py", line 441, in _parse_sig_js u'Initial JS player signature function name') "/home/oliver/bin/youtube-dl/youtube_dl/extractor/common.py", line 356, in _search_regex raise RegexNotFoundError(u'Unable to extract %s' % _name)youtube_dl.utils.RegexNotFoundError: Unable to extract Initial JS player signature function name; please report this issue on https://yt-dl.org/bug . Be sure to call youtube-dl with the --verbose flag and include its complete output. Make sure you are using the latest version; type youtube-dl -U to update.
0.092533
Traceback (most recent call last): "C:\Users\AnotherMotion\AppData\Roaming\Blender Foundation\Blender\2.83\scripts\addons\MH_Community\operators\bodyimport.py", line 14, in execute ImportBodyBinary() "C:\Users\AnotherMotion\AppData\Roaming\Blender Foundation\Blender\2.83\scripts\addons\MH_Community\mh_sync\import_body_binary.py", line 89, in __init__ FetchServerData('getBodyMeshInfo', self.gotBodyInfo) "C:\Users\AnotherMotion\AppData\Roaming\Blender Foundation\Blender\2.83\scripts\addons\MH_Community\mh_sync\fetch_server_data.py", line 8, in __init__ self.executeJsonCall(expectBinaryResponse=expectBinary, params=params) "C:\Users\AnotherMotion\AppData\Roaming\Blender Foundation\Blender\2.83\scripts\addons\MH_Community\mh_sync\sync_ops.py", line 19, in executeJsonCall self.callback(json_obj) "C:\Users\AnotherMotion\AppData\Roaming\Blender Foundation\Blender\2.83\scripts\addons\MH_Community\mh_sync\fetch_server_data.py", line 14, in callback self.readyFunction(json_obj.data) "C:\Users\AnotherMotion\AppData\Roaming\Blender Foundation\Blender\2.83\scripts\addons\MH_Community\mh_sync\import_body_binary.py", line 147, in gotBodyInfo FetchServerData('getBodyVerticesBinary',self.gotVerticesData,True) "C:\Users\AnotherMotion\AppData\Roaming\Blender Foundation\Blender\2.83\scripts\addons\MH_Community\mh_sync\fetch_server_data.py", line 8, in __init__ self.executeJsonCall(expectBinaryResponse=expectBinary, params=params) "C:\Users\AnotherMotion\AppData\Roaming\Blender Foundation\Blender\2.83\scripts\addons\MH_Community\mh_sync\sync_ops.py", line 19, in executeJsonCall self.callback(json_obj) "C:\Users\AnotherMotion\AppData\Roaming\Blender Foundation\Blender\2.83\scripts\addons\MH_Community\mh_sync\fetch_server_data.py", line 12, in callback self.readyFunction(json_obj) "C:\Users\AnotherMotion\AppData\Roaming\Blender Foundation\Blender\2.83\scripts\addons\MH_Community\mh_sync\import_body_binary.py", line 169, in gotVerticesData FetchServerData('getBodyFacesBinary',self.gotFacesData,True) "C:\Users\AnotherMotion\AppData\Roaming\Blender Foundation\Blender\2.83\scripts\addons\MH_Community\mh_sync\fetch_server_data.py", line 8, in __init__ self.executeJsonCall(expectBinaryResponse=expectBinary, params=params) "C:\Users\AnotherMotion\AppData\Roaming\Blender Foundation\Blender\2.83\scripts\addons\MH_Community\mh_sync\sync_ops.py", line 19, in executeJsonCall self.callback(json_obj) "C:\Users\AnotherMotion\AppData\Roaming\Blender Foundation\Blender\2.83\scripts\addons\MH_Community\mh_sync\fetch_server_data.py", line 12, in callback self.readyFunction(json_obj) "C:\Users\AnotherMotion\AppData\Roaming\Blender Foundation\Blender\2.83\scripts\addons\MH_Community\mh_sync\import_body_binary.py", line 187, in gotFacesData FetchServerData('getBodyTextureCoordsBinary', self.gotTextureCoords, True) "C:\Users\AnotherMotion\AppData\Roaming\Blender Foundation\Blender\2.83\scripts\addons\MH_Community\mh_sync\fetch_server_data.py", line 8, in __init__ self.executeJsonCall(expectBinaryResponse=expectBinary, params=params) "C:\Users\AnotherMotion\AppData\Roaming\Blender Foundation\Blender\2.83\scripts\addons\MH_Community\mh_sync\sync_ops.py", line 19, in executeJsonCall self.callback(json_obj) "C:\Users\AnotherMotion\AppData\Roaming\Blender Foundation\Blender\2.83\scripts\addons\MH_Community\mh_sync\fetch_server_data.py", line 12, in callback self.readyFunction(json_obj) "C:\Users\AnotherMotion\AppData\Roaming\Blender Foundation\Blender\2.83\scripts\addons\MH_Community\mh_sync\import_body_binary.py", line 201, in gotTextureCoords FetchServerData('getBodyFaceUVMappingsBinary', self.gotFaceUVMappings, True) "C:\Users\AnotherMotion\AppData\Roaming\Blender Foundation\Blender\2.83\scripts\addons\MH_Community\mh_sync\fetch_server_data.py", line 8, in __init__ self.executeJsonCall(expectBinaryResponse=expectBinary, params=params) "C:\Users\AnotherMotion\AppData\Roaming\Blender Foundation\Blender\2.83\scripts\addons\MH_Community\mh_sync\sync_ops.py", line 19, in executeJsonCall self.callback(json_obj) "C:\Users\AnotherMotion\AppData\Roaming\Blender Foundation\Blender\2.83\scripts\addons\MH_Community\mh_sync\fetch_server_data.py", line 12, in callback self.readyFunction(json_obj) "C:\Users\AnotherMotion\AppData\Roaming\Blender Foundation\Blender\2.83\scripts\addons\MH_Community\mh_sync\import_body_binary.py", line 235, in gotFaceUVMappings self.afterMeshData() "C:\Users\AnotherMotion\AppData\Roaming\Blender Foundation\Blender\2.83\scripts\addons\MH_Community\mh_sync\import_body_binary.py", line 429, in afterMeshData FetchServerData('getBodyMaterialInfo',self.gotBodyMaterialInfo) "C:\Users\AnotherMotion\AppData\Roaming\Blender Foundation\Blender\2.83\scripts\addons\MH_Community\mh_sync\fetch_server_data.py", line 8, in __init__ self.executeJsonCall(expectBinaryResponse=expectBinary, params=params) "C:\Users\AnotherMotion\AppData\Roaming\Blender Foundation\Blender\2.83\scripts\addons\MH_Community\mh_sync\sync_ops.py", line 19, in executeJsonCall self.callback(json_obj) "C:\Users\AnotherMotion\AppData\Roaming\Blender Foundation\Blender\2.83\scripts\addons\MH_Community\mh_sync\fetch_server_data.py", line 14, in callback self.readyFunction(json_obj.data) "C:\Users\AnotherMotion\AppData\Roaming\Blender Foundation\Blender\2.83\scripts\addons\MH_Community\mh_sync\import_body_binary.py", line 459, in gotBodyMaterialInfo FetchServerData('getSkeleton', self.gotSkeleton) "C:\Users\AnotherMotion\AppData\Roaming\Blender Foundation\Blender\2.83\scripts\addons\MH_Community\mh_sync\fetch_server_data.py", line 8, in __init__ self.executeJsonCall(expectBinaryResponse=expectBinary, params=params) "C:\Users\AnotherMotion\AppData\Roaming\Blender Foundation\Blender\2.83\scripts\addons\MH_Community\mh_sync\sync_ops.py", line 19, in executeJsonCall self.callback(json_obj) "C:\Users\AnotherMotion\AppData\Roaming\Blender Foundation\Blender\2.83\scripts\addons\MH_Community\mh_sync\fetch_server_data.py", line 14, in callback self.readyFunction(json_obj.data) "C:\Users\AnotherMotion\AppData\Roaming\Blender Foundation\Blender\2.83\scripts\addons\MH_Community\mh_sync\import_body_binary.py", line 537, in gotSkeleton FetchServerData('getProxiesInfo', self.gotProxiesInfo) "C:\Users\AnotherMotion\AppData\Roaming\Blender Foundation\Blender\2.83\scripts\addons\MH_Community\mh_sync\fetch_server_data.py", line 8, in __init__ self.executeJsonCall(expectBinaryResponse=expectBinary, params=params) "C:\Users\AnotherMotion\AppData\Roaming\Blender Foundation\Blender\2.83\scripts\addons\MH_Community\mh_sync\sync_ops.py", line 19, in executeJsonCall self.callback(json_obj) "C:\Users\AnotherMotion\AppData\Roaming\Blender Foundation\Blender\2.83\scripts\addons\MH_Community\mh_sync\fetch_server_data.py", line 14, in callback self.readyFunction(json_obj.data) "C:\Users\AnotherMotion\AppData\Roaming\Blender Foundation\Blender\2.83\scripts\addons\MH_Community\mh_sync\import_body_binary.py", line 543, in gotProxiesInfo self.importNextProxy() "C:\Users\AnotherMotion\AppData\Roaming\Blender Foundation\Blender\2.83\scripts\addons\MH_Community\mh_sync\import_body_binary.py", line 551, in importNextProxy ImportProxyBinary(self.obj, self.name, self.proxiesInfo[self.nextProxyToImport], self.proxyLoaded, self.collection) "C:\Users\AnotherMotion\AppData\Roaming\Blender Foundation\Blender\2.83\scripts\addons\MH_Community\mh_sync\import_proxy_binary.py", line 122, in __init__ FetchServerData('getProxyVerticesBinary', self.gotVerticesData, expectBinary=True, params={ "uuid": self.proxyInfo["uuid"] }) "C:\Users\AnotherMotion\AppData\Roaming\Blender Foundation\Blender\2.83\scripts\addons\MH_Community\mh_sync\fetch_server_data.py", line 8, in __init__ self.executeJsonCall(expectBinaryResponse=expectBinary, params=params) "C:\Users\AnotherMotion\AppData\Roaming\Blender Foundation\Blender\2.83\scripts\addons\MH_Community\mh_sync\sync_ops.py", line 19, in executeJsonCall self.callback(json_obj) "C:\Users\AnotherMotion\AppData\Roaming\Blender Foundation\Blender\2.83\scripts\addons\MH_Community\mh_sync\fetch_server_data.py", line 12, in callback self.readyFunction(json_obj) "C:\Users\AnotherMotion\AppData\Roaming\Blender Foundation\Blender\2.83\scripts\addons\MH_Community\mh_sync\import_proxy_binary.py", line 163, in gotVerticesData FetchServerData('getProxyFacesBinary',self.gotFacesData, expectBinary=True, params={ "uuid": self.proxyInfo["uuid"] }) "C:\Users\AnotherMotion\AppData\Roaming\Blender Foundation\Blender\2.83\scripts\addons\MH_Community\mh_sync\fetch_server_data.py", line 8, in __init__ self.executeJsonCall(expectBinaryResponse=expectBinary, params=params) "C:\Users\AnotherMotion\AppData\Roaming\Blender Foundation\Blender\2.83\scripts\addons\MH_Community\mh_sync\sync_ops.py", line 19, in executeJsonCall self.callback(json_obj) "C:\Users\AnotherMotion\AppData\Roaming\Blender Foundation\Blender\2.83\scripts\addons\MH_Community\mh_sync\fetch_server_data.py", line 12, in callback self.readyFunction(json_obj) "C:\Users\AnotherMotion\AppData\Roaming\Blender Foundation\Blender\2.83\scripts\addons\MH_Community\mh_sync\import_proxy_binary.py", line 195, in gotFacesData FetchServerData('getProxyTextureCoordsBinary', self.gotTextureCoords, expectBinary=True, params={ "uuid": self.proxyInfo["uuid"] }) "C:\Users\AnotherMotion\AppData\Roaming\Blender Foundation\Blender\2.83\scripts\addons\MH_Community\mh_sync\fetch_server_data.py", line 8, in __init__ self.executeJsonCall(expectBinaryResponse=expectBinary, params=params) "C:\Users\AnotherMotion\AppData\Roaming\Blender Foundation\Blender\2.83\scripts\addons\MH_Community\mh_sync\sync_ops.py", line 19, in executeJsonCall self.callback(json_obj) "C:\Users\AnotherMotion\AppData\Roaming\Blender Foundation\Blender\2.83\scripts\addons\MH_Community\mh_sync\fetch_server_data.py", line 12, in callback self.readyFunction(json_obj) "C:\Users\AnotherMotion\AppData\Roaming\Blender Foundation\Blender\2.83\scripts\addons\MH_Community\mh_sync\import_proxy_binary.py", line 214, in gotTextureCoords FetchServerData('getProxyFaceUVMappingsBinary', self.gotFaceUVMappings, expectBinary=True, params={ "uuid": self.proxyInfo["uuid"] }) "C:\Users\AnotherMotion\AppData\Roaming\Blender Foundation\Blender\2.83\scripts\addons\MH_Community\mh_sync\fetch_server_data.py", line 8, in __init__ self.executeJsonCall(expectBinaryResponse=expectBinary, params=params) "C:\Users\AnotherMotion\AppData\Roaming\Blender Foundation\Blender\2.83\scripts\addons\MH_Community\mh_sync\sync_ops.py", line 19, in executeJsonCall self.callback(json_obj) "C:\Users\AnotherMotion\AppData\Roaming\Blender Foundation\Blender\2.83\scripts\addons\MH_Community\mh_sync\fetch_server_data.py", line 12, in callback self.readyFunction(json_obj) "C:\Users\AnotherMotion\AppData\Roaming\Blender Foundation\Blender\2.83\scripts\addons\MH_Community\mh_sync\import_proxy_binary.py", line 248, in gotFaceUVMappings self.afterMeshData() "C:\Users\AnotherMotion\AppData\Roaming\Blender Foundation\Blender\2.83\scripts\addons\MH_Community\mh_sync\import_proxy_binary.py", line 374, in afterMeshData FetchServerData('getProxyMaterialInfo', self.gotProxyMaterialInfo, expectBinary=False, params={ "uuid": self.proxyInfo["uuid"] }) "C:\Users\AnotherMotion\AppData\Roaming\Blender Foundation\Blender\2.83\scripts\addons\MH_Community\mh_sync\fetch_server_data.py", line 8, in __init__ self.executeJsonCall(expectBinaryResponse=expectBinary, params=params) "C:\Users\AnotherMotion\AppData\Roaming\Blender Foundation\Blender\2.83\scripts\addons\MH_Community\mh_sync\sync_ops.py", line 19, in executeJsonCall self.callback(json_obj) "C:\Users\AnotherMotion\AppData\Roaming\Blender Foundation\Blender\2.83\scripts\addons\MH_Community\mh_sync\fetch_server_data.py", line 14, in callback self.readyFunction(json_obj.data) "C:\Users\AnotherMotion\AppData\Roaming\Blender Foundation\Blender\2.83\scripts\addons\MH_Community\mh_sync\import_proxy_binary.py", line 415, in gotProxyMaterialInfo createMakeSkinMaterial(matname, obj=self.obj, materialSettingsHash=data, importBlendMat=True, onlyBlendMat=self.blendMat) "C:\Users\AnotherMotion\AppData\Roaming\Blender Foundation\Blender\2.83\scripts\addons\MH_Community\mh_sync\material.py", line 87, in createMakeSkinMaterial mhmat = MHMat(fileName=materialSettingsHash["material "]) "C:\Users\AnotherMotion\AppData\Roaming\Blender Foundation\Blender\2.83\scripts\addons\makeskin\material.py", line 51, in __init__ self._parse (fileName) "C:\Users\AnotherMotion\AppData\Roaming\Blender Foundation\Blender\2.83\scripts\addons\makeskin\material.py", line 431, in _parse line = f.readline() UnicodeDecodeError: 'cp949' codec can't decode byte 0xe2 in position 559: illegal multibyte sequence
Traceback (most recent call last): "/Users/Kostas/Documents/github-repos/Google/Commissioner/venv/lib/python3.6/site-packages/fontTools/ttx.py", line 400, in main process(jobs, options) "/Users/Kostas/Documents/github-repos/Google/Commissioner/venv/lib/python3.6/site-packages/fontTools/ttx.py", line 374, in process action(input, output, options) "/Users/Kostas/Documents/github-repos/Google/Commissioner/venv/lib/python3.6/site-packages/fontTools/misc/loggingTools.py", line 367, in wrapper return func(*args, **kwds) "/Users/Kostas/Documents/github-repos/Google/Commissioner/venv/lib/python3.6/site-packages/fontTools/ttx.py", line 285, in ttCompile ttf.importXML(input) "/Users/Kostas/Documents/github-repos/Google/Commissioner/venv/lib/python3.6/site-packages/fontTools/ttLib/ttFont.py", line 335, in importXML reader.read() "/Users/Kostas/Documents/github-repos/Google/Commissioner/venv/lib/python3.6/site-packages/fontTools/misc/xmlReader.py", line 48, in read self._parse (self.file) "/Users/Kostas/Documents/github-repos/Google/Commissioner/venv/lib/python3.6/site-packages/fontTools/misc/xmlReader.py", line 73, in _parse parser.Parse(chunk, 0) "/Users/sysadmin/build/v3.6.6/Modules/pyexpat.c", line 468, in EndElement "/Users/Kostas/Documents/github-repos/Google/Commissioner/venv/lib/python3.6/site-packages/fontTools/misc/xmlReader.py", line 155, in _endElementHandler self.currentTable.fromXML(name, attrs, content, self.ttFont) "/Users/Kostas/Documents/github-repos/Google/Commissioner/venv/lib/python3.6/site-packages/fontTools/ttLib/tables/otBase.py", line 105, in fromXML self.table.fromXML(name, attrs, content, font) "/Users/Kostas/Documents/github-repos/Google/Commissioner/venv/lib/python3.6/site-packages/fontTools/ttLib/tables/otBase.py", line 767, in fromXML value = conv.xmlRead(attrs, content, font) "/Users/Kostas/Documents/github-repos/Google/Commissioner/venv/lib/python3.6/site-packages/fontTools/ttLib/tables/otConverters.py", line 448, in xmlRead table.fromXML(name, attrs, content, font) "/Users/Kostas/Documents/github-repos/Google/Commissioner/venv/lib/python3.6/site-packages/fontTools/ttLib/tables/otBase.py", line 767, in fromXML value = conv.xmlRead(attrs, content, font) "/Users/Kostas/Documents/github-repos/Google/Commissioner/venv/lib/python3.6/site-packages/fontTools/ttLib/tables/otConverters.py", line 448, in xmlRead table.fromXML(name, attrs, content, font) "/Users/Kostas/Documents/github-repos/Google/Commissioner/venv/lib/python3.6/site-packages/fontTools/ttLib/tables/otBase.py", line 764, in fromXML conv = self.getConverterByName(name) "/Users/Kostas/Documents/github-repos/Google/Commissioner/venv/lib/python3.6/site-packages/fontTools/ttLib/tables/otBase.py", line 804, in getConverterByName return self.convertersByName[self.Format][name] KeyError: 'AxisIndex'`
0.325675
Traceback (most recent call last): "/home/lasse/.virtualenvs/mopidy/bin/mopidy", line 9, in <module> load_entry_point('Mopidy==0.17.0a1', 'console_scripts', 'mopidy')() "/home/lasse/dev/mopidy/mopidy/__main__.py", line 81, in main start(proxied_config, enabled_extensions) "/home/lasse/dev/mopidy/mopidy/__main__.py", line 134, in start backends = start_backends(config, extensions, audio) "/home/lasse/dev/mopidy/mopidy/__main__.py", line 171, in start_backends backend = backend_class.start(config=config, audio=audio).proxy() "build/bdist.linux-x86_64/egg/pykka/actor.py", line 93, in start "/home/lasse/dev/mopidy/mopidy/backends/local/actor.py", line 26, in __init__ self.library = LocalLibraryProvider(backend=self) "/home/lasse/dev/mopidy/mopidy/backends/local/library.py", line 22, in __init__ self.refresh() "/home/lasse/dev/mopidy/mopidy/backends/local/library.py", line 35, in refresh tracks = parse_mpd_tag_cache(self._tag_cache_file, self._media_dir) "/home/lasse/dev/mopidy/mopidy/backends/local/translator.py", line 97, in parse_mpd_tag_cache key, value = line.split(b': ', 1)ValueError: need more than 1 value to unpack
An exception occurred in this state: Traceback (most recent call last): "/usr/lib/python2.7/dist-packages/salt/state.py", line 1533, in call **cdata['kwargs']) "/usr/lib/python2.7/dist-packages/salt/states/pkg.py", line 1093, in latest cur = __salt__['pkg.version'](*desired_pkgs, **kwargs) "/usr/lib/python2.7/dist-packages/salt/modules/aptpkg.py", line 300, in version return __salt__['pkg_resource.version'](*names, **kwargs) "/usr/lib/python2.7/dist-packages/salt/modules/pkg_resource.py", line 162, in version pkgs = __salt__['pkg.list_pkgs'](versions_as_list=True, **kwargs) "/usr/lib/python2.7/dist-packages/salt/modules/aptpkg.py", line 921, in list_pkgs virtpkgs_all = _get_virtual() "/usr/lib/python2.7/dist-packages/salt/modules/aptpkg.py", line 158, in _get_virtual apt_cache = apt.cache.Cache() "/usr/lib/python2.7/dist-packages/apt/cache.py", line 107, in __init__ self.open(progress) "/usr/lib/python2.7/dist-packages/apt/cache.py", line 155, in open self._list.read_main_list() SystemError: E:Type 'http://apt.postgresql.org/pub/repos/apt' is not known on line 63 in source list /etc/apt/sources.list```
0.308923
Traceback (most recent call last): "/usr/bin/youku", line 202, in <module> main() "/usr/bin/youku", line 199, in main script_main('youku', youku_download, youku_download_playlist) "/Users/daoser/git/youku-lixian/common.py", line 287, in script_main download(url, merge=merge) "/usr/bin/youku", line 135, in youku_download youku_download_by_id(id2, title, output_dir, merge=merge) "/usr/bin/youku", line 128, in youku_download_by_id download_urls(urls, title, file_type_of_url(urls[0]), total_size, output_dir, merge=merge) "/Users/daoser/git/youku-lixian/common.py", line 226, in download_urls url_save(url, filepath, bar, refer=refer) "/Users/daoser/git/youku-lixian/common.py", line 75, in url_save response = urllib2.urlopen(request) "/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/urllib2.py", line 127, in urlopen return _opener.open(url, data, timeout) "/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/urllib2.py", line 404, in open response = self._open(req, data) "/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/urllib2.py", line 422, in _open '_open', req) "/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/urllib2.py", line 382, in _call_chain result = func(*args) "/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/urllib2.py", line 1214, in http_open return self.do_open(httplib.HTTPConnection, req) "/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/urllib2.py", line 1184, in do_open raise URLError(err)urllib2.URLError: <urlopen error [Errno 61] Connection refused>
Traceback (most recent call last): "/usr/share/recon-ng/recon/core/module.py", line 289, in do_run self.module_run(*params) "/root/.recon-ng/modules/recon/domains-hosts/shodan_hostname.py", line 25, in module_run results = self.search_shodan_api(query, limit) "/usr/share/recon-ng/recon/mixins/search.py", line 129, in search_shodan_api if resp.json() == None: "/usr/lib/python3/dist-packages/requests/models.py", line 897, in json return complexjson.loads(self.text, **kwargs) "/usr/lib/python3/dist-packages/simplejson/__init__.py", line 518, in loads return _default_decoder.decode(s) "/usr/lib/python3/dist-packages/simplejson/decoder.py", line 370, in decode obj, end = self.raw_decode(s) "/usr/lib/python3/dist-packages/simplejson/decoder.py", line 400, in raw_decode return self.scan_once(s, idx=_w(s, idx).end()) simplejson.errors.JSONDecodeError: Expecting value: line 1 column 1 (char 0)
0.212818
Traceback (most recent call last): "<string>", line 1, in <module> "setup.py", line 25, in <module> import makeman # helper script "makeman.py", line 14, in <module> from zim.main import HelpCommand "zim/main/__init__.py", line 28, in <module> from zim.notebook import Notebook, Path, \ "zim/notebook/__init__.py", line 39, in <module> from .info import NotebookInfo, NotebookInfoList, \ "zim/notebook/info.py", line 19, in <module> from .notebook import NotebookConfig, _resolve_relative_config "zim/notebook/notebook.py", line 22, in <module> from zim.newfs import LocalFolder "zim/newfs/__init__.py", line 19, in <module> logger.warn(' system encoding is set to ASCII or Latin1, using UTF-8 instead') NameError: name 'logger' is not defined
Traceback (most recent call last): "/config/custom_components/sonoff/sonoff_cloud.py", line 147, in _api r = await coro "/usr/local/lib/python3.8/site-packages/aiohttp/client.py", line 480, in _request conn = await self._connector.connect( "/usr/local/lib/python3.8/site-packages/aiohttp/connector.py", line 523, in connect proto = await self._create_connection(req, traces, timeout) "/usr/local/lib/python3.8/site-packages/aiohttp/connector.py", line 858, in _create_connection _, proto = await self._create_direct_connection( "/usr/local/lib/python3.8/site-packages/aiohttp/connector.py", line 1004, in _create_direct_connection raise last_exc "/usr/local/lib/python3.8/site-packages/aiohttp/connector.py", line 980, in _create_direct_connection transp, proto = await self._wrap_create_connection( "/usr/local/lib/python3.8/site-packages/aiohttp/connector.py", line 943, in _wrap_create_connection raise client_error(req.connection_key, exc) from exc aiohttp.client_exceptions.ClientConnectorError: Cannot connect to host eu-api.coolkit.cc:8080 ssl:default [Connect call failed ('52.57.118.192', 8080)]`
0.270188
Traceback (most recent call last): "/Users/orestes/Downloads/venv/py-backoffice/bin/flask", line 11, in <module> sys.exit(main()) "/Users/orestes/Downloads/venv/py-backoffice/lib/python3.5/site-packages/flask/cli.py", line 507, in main cli.main(args=args, prog_name=name) "/Users/orestes/Downloads/venv/py-backoffice/lib/python3.5/site-packages/flask/cli.py", line 374, in main return AppGroup.main(self, *args, **kwargs) "/Users/orestes/Downloads/venv/py-backoffice/lib/python3.5/site-packages/click/core.py", line 697, in main rv = self.invoke(ctx) "/Users/orestes/Downloads/venv/py-backoffice/lib/python3.5/site-packages/click/core.py", line 1066, in invoke return _process_result(sub_ctx.command.invoke(sub_ctx)) "/Users/orestes/Downloads/venv/py-backoffice/lib/python3.5/site-packages/click/core.py", line 1066, in invoke return _process_result(sub_ctx.command.invoke(sub_ctx)) "/Users/orestes/Downloads/venv/py-backoffice/lib/python3.5/site-packages/click/core.py", line 895, in invoke return ctx.invoke(self.callback, **ctx.params) "/Users/orestes/Downloads/venv/py-backoffice/lib/python3.5/site-packages/click/core.py", line 535, in invoke return callback(*args, **kwargs) "/Users/orestes/Downloads/venv/py-backoffice/lib/python3.5/site-packages/click/decorators.py", line 17, in new_func return f(get_current_context(), *args, **kwargs) "/Users/orestes/Downloads/venv/py-backoffice/lib/python3.5/site-packages/flask/cli.py", line 251, in decorator return __ctx.invoke(f, *args, **kwargs) "/Users/orestes/Downloads/venv/py-backoffice/lib/python3.5/site-packages/click/core.py", line 535, in invoke return callback(*args, **kwargs) "/Users/orestes/Downloads/venv/py-backoffice/lib/python3.5/site-packages/flask_migrate/cli.py", line 134, in upgrade _upgrade(directory, revision, sql, tag, x_arg) "/Users/orestes/Downloads/venv/py-backoffice/lib/python3.5/site-packages/flask_migrate/__init__.py", line 247, in upgrade command.upgrade(config, revision, sql=sql, tag=tag) "/Users/orestes/Downloads/venv/py-backoffice/lib/python3.5/site-packages/alembic/command.py", line 254, in upgrade script.run_env() "/Users/orestes/Downloads/venv/py-backoffice/lib/python3.5/site-packages/alembic/script/base.py", line 421, in run_env util.load_python_file(self.dir, 'env.py') "/Users/orestes/Downloads/venv/py-backoffice/lib/python3.5/site-packages/alembic/util/pyfiles.py", line 93, in load_python_file module = load_module_py(module_id, path) "/Users/orestes/Downloads/venv/py-backoffice/lib/python3.5/site-packages/alembic/util/compat.py", line 64, in load_module_py module_id, path).load_module(module_id) "<frozen importlib._bootstrap_external>", line 388, in _check_name_wrapper "<frozen importlib._bootstrap_external>", line 809, in load_module "<frozen importlib._bootstrap_external>", line 668, in load_module "<frozen importlib._bootstrap>", line 268, in _load_module_shim "<frozen importlib._bootstrap>", line 693, in _load "<frozen importlib._bootstrap>", line 673, in _load_unlocked "<frozen importlib._bootstrap_external>", line 665, in exec_module "<frozen importlib._bootstrap>", line 222, in _call_with_frames_removed "migrations/env.py", line 88, in <module> run_migrations_online() "migrations/env.py", line 81, in run_migrations_online context.run_migrations() "<string>", line 8, in run_migrations "/Users/orestes/Downloads/venv/py-backoffice/lib/python3.5/site-packages/alembic/runtime/environment.py", line 817, in run_migrations self.get_context().run_migrations(**kw) "/Users/orestes/Downloads/venv/py-backoffice/lib/python3.5/site-packages/alembic/runtime/migration.py", line 320, in run_migrations for step in self._migrations_fn(heads, self): "/Users/orestes/Downloads/venv/py-backoffice/lib/python3.5/site-packages/alembic/command.py", line 243, in upgrade return script._upgrade_revs(revision, rev) "/Users/orestes/Downloads/venv/py-backoffice/lib/python3.5/site-packages/alembic/script/base.py", line 338, in _upgrade_revs for script in reversed(list(revs)) "/Library/Frameworks/Python.framework/Versions/3.5/lib/python3.5/contextlib.py", line 77, in __exit__ self.gen.throw(type, value, traceback) "/Users/orestes/Downloads/venv/py-backoffice/lib/python3.5/site-packages/alembic/script/base.py", line 174, in _catch_revision_errors compat.raise_from_cause(util.CommandError(resolution)) "/Users/orestes/Downloads/venv/py-backoffice/lib/python3.5/site-packages/alembic/util/compat.py", line 117, in raise_from_cause reraise(type(exception), exception, tb=exc_tb, cause=exc_value) "/Users/orestes/Downloads/venv/py-backoffice/lib/python3.5/site-packages/alembic/util/compat.py", line 110, in reraise raise value.with_traceback(tb) "/Users/orestes/Downloads/venv/py-backoffice/lib/python3.5/site-packages/alembic/script/base.py", line 143, in _catch_revision_errors yield "/Users/orestes/Downloads/venv/py-backoffice/lib/python3.5/site-packages/alembic/script/base.py", line 333, in _upgrade_revs destination, current_rev, implicit_base=True) "/Users/orestes/Downloads/venv/py-backoffice/lib/python3.5/site-packages/alembic/script/revision.py", line 545, in iterate_revisions inclusive, assert_relative_length "/Users/orestes/Downloads/venv/py-backoffice/lib/python3.5/site-packages/alembic/script/revision.py", line 490, in _relative_iterate inclusive=inclusive, implicit_base=implicit_base)) "/Users/orestes/Downloads/venv/py-backoffice/lib/python3.5/site-packages/alembic/script/revision.py", line 652, in _iterate_revisions uppers = util.dedupe_tuple(self.get_revisions(upper)) "/Users/orestes/Downloads/venv/py-backoffice/lib/python3.5/site-packages/alembic/script/revision.py", line 300, in get_revisions resolved_id, branch_label = self._resolve_revision_number(id_) "/Users/orestes/Downloads/venv/py-backoffice/lib/python3.5/site-packages/alembic/script/revision.py", line 441, in _resolve_revision_number current_head = self.get_current_head(branch_label) "/Users/orestes/Downloads/venv/py-backoffice/lib/python3.5/site-packages/alembic/script/revision.py", line 266, in get_current_head current_heads = self.filter_for_lineage(current_heads, branch_label) "/Users/orestes/Downloads/venv/py-backoffice/lib/python3.5/site-packages/alembic/script/revision.py", line 399, in filter_for_lineage tg for tg in targets "/Users/orestes/Downloads/venv/py-backoffice/lib/python3.5/site-packages/alembic/script/revision.py", line 401, in <listcomp> tg, shares, include_dependencies=include_dependencies)] "/Users/orestes/Downloads/venv/py-backoffice/lib/python3.5/site-packages/alembic/script/revision.py", line 415, in _shares_lineage in util.to_tuple(test_against_revs, default=()) "/Users/orestes/Downloads/venv/py-backoffice/lib/python3.5/site-packages/alembic/script/revision.py", line 414, in <listcomp> for test_against_rev "/Users/orestes/Downloads/venv/py-backoffice/lib/python3.5/site-packages/alembic/script/revision.py", line 365, in _revision_for_ident ), resolved_id)alembic.util.exc.CommandError: Can't locate revision identified by 'models'
Traceback (most recent call last): "test2.py", line 2, in <module> from Tkinter import *ImportError: No module named Tkinter
0.302242
Traceback (most recent call last): "/opt/anaconda/python.app/Contents/lib/python2.7/multiprocessing/process.py", line 258, in _bootstrap self.run() "/opt/anaconda/python.app/Contents/lib/python2.7/multiprocessing/process.py", line 114, in run self._target(*self._args, **self._kwargs) "/opt/anaconda/python.app/Contents/lib/python2.7/multiprocessing/managers.py", line 558, in _run_server server.serve_forever() "/opt/anaconda/python.app/Contents/lib/python2.7/multiprocessing/managers.py", line 179, in serve_forever c = self.listener.accept() "/opt/anaconda/python.app/Contents/lib/python2.7/multiprocessing/connection.py", line 145, in accept c = self._listener.accept() "/opt/anaconda/python.app/Contents/lib/python2.7/multiprocessing/connection.py", line 275, in accept s, self._last_accepted = self._socket.accept() "/opt/anaconda/lib/python2.7/site-packages/gevent/socket.py", line 316, in accept self._wait(self._read_event) "/opt/anaconda/lib/python2.7/site-packages/gevent/socket.py", line 300, in _wait self.hub.wait(watcher) "/opt/anaconda/lib/python2.7/site-packages/gevent/hub.py", line 348, in wait result = waiter.get() "/opt/anaconda/lib/python2.7/site-packages/gevent/hub.py", line 575, in get return self.hub.switch() "/opt/anaconda/lib/python2.7/site-packages/gevent/hub.py", line 338, in switch return greenlet.switch(self)AssertionError: Impossible to call blocking function in the event loop callback
Traceback (most recent call last): "/usr/lib/python2.7/runpy.py", line 162, in _run_module_as_main "__main__", fname, loader, pkg_name) "/usr/lib/python2.7/runpy.py", line 72, in _run_code exec code in run_globals "/home/frappe/frappe-bench/apps/frappe/frappe/utils/bench_helper.py", line 97, in <module> main() "/home/frappe/frappe-bench/apps/frappe/frappe/utils/bench_helper.py", line 18, in main click.Group(commands=commands)(prog_name='bench') "/home/frappe/frappe-bench/env/local/lib/python2.7/site-packages/click/core.py", line 722, in __call__ return self.main(*args, **kwargs) "/home/frappe/frappe-bench/env/local/lib/python2.7/site-packages/click/core.py", line 697, in main rv = self.invoke(ctx) "/home/frappe/frappe-bench/env/local/lib/python2.7/site-packages/click/core.py", line 1066, in invoke return _process_result(sub_ctx.command.invoke(sub_ctx)) "/home/frappe/frappe-bench/env/local/lib/python2.7/site-packages/click/core.py", line 1066, in invoke return _process_result(sub_ctx.command.invoke(sub_ctx)) "/home/frappe/frappe-bench/env/local/lib/python2.7/site-packages/click/core.py", line 895, in invoke return ctx.invoke(self.callback, **ctx.params) "/home/frappe/frappe-bench/env/local/lib/python2.7/site-packages/click/core.py", line 535, in invoke return callback(*args, **kwargs) "/home/frappe/frappe-bench/env/local/lib/python2.7/site-packages/click/decorators.py", line 17, in new_func return f(get_current_context(), *args, **kwargs) "/home/frappe/frappe-bench/apps/frappe/frappe/commands/__init__.py", line 25, in _func ret = f(frappe._dict(ctx.obj), *args, **kwargs) "/home/frappe/frappe-bench/apps/frappe/frappe/commands/site.py", line 222, in migrate migrate(context.verbose, rebuild_website=rebuild_website) "/home/frappe/frappe-bench/apps/frappe/frappe/migrate.py", line 39, in migrate frappe.modules.patch_handler.run_all() "/home/frappe/frappe-bench/apps/frappe/frappe/modules/patch_handler.py", line 29, in run_all if not run_single(patchmodule = patch): "/home/frappe/frappe-bench/apps/frappe/frappe/modules/patch_handler.py", line 63, in run_single return execute_patch(patchmodule, method, methodargs) "/home/frappe/frappe-bench/apps/frappe/frappe/modules/patch_handler.py", line 83, in execute_patch frappe.get_attr(patchmodule.split()[0] + ".execute")() "/home/frappe/frappe-bench/apps/erpnext/erpnext/patches/v11_0/set_update_field_and_value_in_workflow_state.py", line 17, in execute frappe.set_value('Workflow Document State', state.name, 'update_field', status_field) "/home/frappe/frappe-bench/apps/frappe/frappe/__init__.py", line 622, in set_value return frappe.client.set_value(doctype, docname, fieldname, value) "/home/frappe/frappe-bench/apps/frappe/frappe/client.py", line 136, in set_value doc.save() "/home/frappe/frappe-bench/apps/frappe/frappe/model/document.py", line 260, in save return self._save(*args, **kwargs) "/home/frappe/frappe-bench/apps/frappe/frappe/model/document.py", line 293, in _save self._validate_links() "/home/frappe/frappe-bench/apps/frappe/frappe/model/document.py", line 734, in _validate_links frappe.LinkValidationError) "/home/frappe/frappe-bench/apps/frappe/frappe/__init__.py", line 327, in throw msgprint(msg, raise_exception=exc, title=title, indicator='red') "/home/frappe/frappe-bench/apps/frappe/frappe/__init__.py", line 313, in msgprint _raise_exception() "/home/frappe/frappe-bench/apps/frappe/frappe/__init__.py", line 286, in _raise_exception raise raise_exception(msg) frappe.exceptions.LinkValidationError: Could not find Row #1: Action: Approve, Row #2: Action: Reject
0.328485
Traceback (most recent call last): "/opt/openstates/venv-pupa//bin/pupa", line 11, in <module> load_entry_point('pupa', 'console_scripts', 'pupa')() "/opt/openstates/venv-pupa/src/pupa/pupa/cli/__main__.py", line 68, in main subcommands[args.subcommand].handle(args, other) "/opt/openstates/venv-pupa/src/pupa/pupa/cli/commands/update.py", line 260, in handle return self.do_handle(args, other, juris) "/opt/openstates/venv-pupa/src/pupa/pupa/cli/commands/update.py", line 306, in do_handle report['scrape'] = self.do_scrape(juris, args, scrapers) "/opt/openstates/venv-pupa/src/pupa/pupa/cli/commands/update.py", line 173, in do_scrape report[scraper_name] = scraper.do_scrape(**scrape_args) "/opt/openstates/venv-pupa/src/pupa/pupa/scrape/base.py", line 121, in do_scrape raise ScrapeError('no objects returned from {} scrape'.format(self.__class__.__name__))pupa.exceptions.ScrapeError: no objects returned from ARBillScraper scrape
Traceback (most recent call last): "/usr/lib/python3.6/runpy.py", line 193, in _run_module_as_main "__main__", mod_spec) "/usr/lib/python3.6/runpy.py", line 85, in _run_code exec(code, run_globals) "/home/paul/.local/lib/python3.6/site-packages/twine/__main__.py", line 31, in <module> sys.exit(main()) "/home/paul/.local/lib/python3.6/site-packages/twine/__main__.py", line 25, in main return dispatch(sys.argv[1:]) "/home/paul/.local/lib/python3.6/site-packages/twine/cli.py", line 72, in dispatch main = registered_commands[args.command].load() "/home/paul/.local/lib/python3.6/site-packages/pkg_resources/__init__.py", line 2443, in load return self.resolve() "/home/paul/.local/lib/python3.6/site-packages/pkg_resources/__init__.py", line 2449, in resolve module = __import__(self.module_name, fromlist=['__name__'], level=0) "/home/paul/.local/lib/python3.6/site-packages/twine/commands/upload.py", line 20, in <module> from twine import settings "/home/paul/.local/lib/python3.6/site-packages/twine/settings.py", line 22, in <module> from twine import auth "/home/paul/.local/lib/python3.6/site-packages/twine/auth.py", line 6, in <module> import keyring "/home/paul/.local/lib/python3.6/site-packages/keyring/__init__.py", line 1, in <module> from .core import ( "/home/paul/.local/lib/python3.6/site-packages/keyring/core.py", line 192, in <module> init_backend() "/home/paul/.local/lib/python3.6/site-packages/keyring/core.py", line 96, in init_backend filter(limit, backend.get_all_keyring()), "/home/paul/.local/lib/python3.6/site-packages/keyring/util/__init__.py", line 22, in wrapper func.always_returns = func(*args, **kwargs) "/home/paul/.local/lib/python3.6/site-packages/keyring/backend.py", line 216, in get_all_keyring _load_plugins() "/home/paul/.local/lib/python3.6/site-packages/keyring/backend.py", line 207, in _load_plugins log.exception("Error initializing plugin %s." % ep) TypeError: not all arguments converted during string formatting
0.324476
Traceback (most recent call last): "./installinstallmacos.py", line 528, in <module> main() "./installinstallmacos.py", line 427, in main su_catalog_url = args.sucatalog_url AttributeError: 'Namespace' object has no attribute 'sucatalog_url'```
Traceback (most recent call last): "/home/ubuntu/.local/lib/python3.7/site-packages/urllib3/connection.py", line 141, in _new_conn (self.host, self.port), self.timeout, **extra_kw) "/home/ubuntu/.local/lib/python3.7/site-packages/urllib3/util/connection.py", line 83, in create_connection raise err "/home/ubuntu/.local/lib/python3.7/site-packages/urllib3/util/connection.py", line 73, in create_connection sock.connect(sa) ConnectionRefusedError: [Errno 111] Connection refused
0.243142
Traceback (most recent call last): "cellprofiler\gui\pipelinecontroller.pyc", line 2777, in do_step "cellprofiler\modules\maskobjects.pyc", line 244, in run "cellprofiler\measurements.pyc", line 1608, in get_imageValueError: Image was not binary
Traceback (most recent call last): "/home/tabbott/zulip/zerver/tests/test_users.py", line 431, in test_admin_user_can_change_profile_data self.assertEqual(True, False) AssertionError: True != False
0.235934
Traceback (most recent call last): "ice\decorators.py", line 9, in wrapped "ice\runners\ice_engine.py", line 131, in run "ice\runners\ice_engine.py", line 104, in create_backup "ice\backups.py", line 67, in create_backup_of_shortcuts "pysteam\shortcuts.py", line 30, in write_shortcuts "pysteam\_shortcut_generator.py", line 20, in to_string "pysteam\_shortcut_generator.py", line 27, in generate_array_string TypeError: object of type 'NoneType' has no len()
Traceback (most recent call last): "/Library/Frameworks/Python.framework/Versions/3.4/lib/python3.4/site-packages/requests/adapters.py", line 370, in send timeout=timeout "/Library/Frameworks/Python.framework/Versions/3.4/lib/python3.4/site-packages/requests/packages/urllib3/connectionpool.py", line 597, in urlopen _stacktrace=sys.exc_info()[2]) "/Library/Frameworks/Python.framework/Versions/3.4/lib/python3.4/site-packages/requests/packages/urllib3/util/retry.py", line 245, in increment raise six.reraise(type(error), error, _stacktrace) "/Library/Frameworks/Python.framework/Versions/3.4/lib/python3.4/site-packages/requests/packages/urllib3/packages/six.py", line 309, in reraise raise value.with_traceback(tb) "/Library/Frameworks/Python.framework/Versions/3.4/lib/python3.4/site-packages/requests/packages/urllib3/connectionpool.py", line 544, in urlopen body=body, headers=headers) "/Library/Frameworks/Python.framework/Versions/3.4/lib/python3.4/site-packages/requests/packages/urllib3/connectionpool.py", line 341, in _make_request self._validate_conn(conn) "/Library/Frameworks/Python.framework/Versions/3.4/lib/python3.4/site-packages/requests/packages/urllib3/connectionpool.py", line 761, in _validate_conn conn.connect() "/Library/Frameworks/Python.framework/Versions/3.4/lib/python3.4/site-packages/requests/packages/urllib3/connection.py", line 204, in connect conn = self._new_conn() "/Library/Frameworks/Python.framework/Versions/3.4/lib/python3.4/site-packages/requests/packages/urllib3/connection.py", line 134, in _new_conn (self.host, self.port), self.timeout, **extra_kw) "/Library/Frameworks/Python.framework/Versions/3.4/lib/python3.4/site-packages/requests/packages/urllib3/util/connection.py", line 88, in create_connection raise err "/Library/Frameworks/Python.framework/Versions/3.4/lib/python3.4/site-packages/requests/packages/urllib3/util/connection.py", line 78, in create_connection sock.connect(sa)requests.packages.urllib3.exceptions.ProtocolError: ('Connection aborted.', OSError(64, 'Host is down'))
0.216847
Traceback (most recent call last): "hsutil/cocoa.pyc", line 47, in _async_run "dupeguru/app.pyc", line 229, in do "dupeguru/directories.pyc", line 111, in get_files "dupeguru/directories.pyc", line 75, in _get_files "dupeguru/directories.pyc", line 75, in _get_files "dupeguru/directories.pyc", line 67, in _get_files "dupeguru/fs.pyc", line 161, in get_files "hsutil/path.pyc", line 70, in __add__ "hsutil/path.pyc", line 61, in **new** "hsutil/path.pyc", line 39, in unicode_if_neededUnicodeDecodeError: 'utf8' codec can't decode bytes in position 5-7: invalid data
Traceback (most recent call last): "/Library/Python/2.6/site-packages/robotide/editor/editors.py", line 111, in OnIdle if self._last_shown_tooltip and self._mouse_outside_tooltip(): "/Library/Python/2.6/site-packages/robotide/editor/editors.py", line 117, in _mouse_outside_tooltip tx, ty = self._last_shown_tooltip.screen_positionAttributeError: 'MacRidePopupWindow' object has no attribute 'screen_position'
0.205706
Traceback (most recent call last): "/usr/local/lib/python3.6/dist-packages/discord/ext/commands/core.py", line 61, in wrapped ret = await coro(*args, **kwargs) "/usr/local/lib/python3.6/dist-packages/redbot/cogs/cleanup/cleanup.py", line 172, in user member = await commands.converter.MemberConverter().convert(ctx, user) AttributeError: module 'redbot.core.commands.converter' has no attribute 'MemberConverter'
```Traceback (most recent call last): "/root/Desktop/zeus-scanner/var/search/selenium_search.py", line 268, in parse_search_results tor=tor, batch=batch, xforward=forward_for "/root/Desktop/zeus-scanner/var/search/selenium_search.py", line 80, in get_urls ff_display = Display(visible=0, size=(800, 600)) "/usr/local/lib/python2.7/dist-packages/pyvirtualdisplay/display.py", line 34, in __init__ self._obj = self.display_class( "/usr/local/lib/python2.7/dist-packages/pyvirtualdisplay/display.py", line 52, in display_class cls.check_installed() "/usr/local/lib/python2.7/dist-packages/pyvirtualdisplay/xvfb.py", line 38, in check_installed ubuntu_package=PACKAGE).check_installed()TypeError: __init__() got an unexpected keyword argument 'url'
0.261197
Traceback (most recent call last): "/usr/lib/python3.5/runpy.py", line 184, in _run_module_as_main "__main__", mod_spec) "/usr/lib/python3.5/runpy.py", line 85, in _run_code exec(code, run_globals) "/home/frappe/frappe-bench/apps/frappe/frappe/utils/bench_helper.py", line 97, in <module> main() "/home/frappe/frappe-bench/apps/frappe/frappe/utils/bench_helper.py", line 18, in main click.Group(commands=commands)(prog_name='bench') "/home/frappe/frappe-bench/env/lib/python3.5/site-packages/click/core.py", line 764, in __call__ return self.main(*args, **kwargs) "/home/frappe/frappe-bench/env/lib/python3.5/site-packages/click/core.py", line 717, in main rv = self.invoke(ctx) "/home/frappe/frappe-bench/env/lib/python3.5/site-packages/click/core.py", line 1137, in invoke return _process_result(sub_ctx.command.invoke(sub_ctx)) "/home/frappe/frappe-bench/env/lib/python3.5/site-packages/click/core.py", line 1137, in invoke return _process_result(sub_ctx.command.invoke(sub_ctx)) "/home/frappe/frappe-bench/env/lib/python3.5/site-packages/click/core.py", line 956, in invoke return ctx.invoke(self.callback, **ctx.params) "/home/frappe/frappe-bench/env/lib/python3.5/site-packages/click/core.py", line 555, in invoke return callback(*args, **kwargs) "/home/frappe/frappe-bench/env/lib/python3.5/site-packages/click/decorators.py", line 17, in new_func return f(get_current_context(), *args, **kwargs) "/home/frappe/frappe-bench/apps/frappe/frappe/commands/__init__.py", line 25, in _func ret = f(frappe._dict(ctx.obj), *args, **kwargs) "/home/frappe/frappe-bench/apps/frappe/frappe/commands/site.py", line 243, in migrate migrate(context.verbose, rebuild_website=rebuild_website, skip_failing=skip_failing) "/home/frappe/frappe-bench/apps/frappe/frappe/migrate.py", line 48, in migrate frappe.modules.patch_handler.run_all(skip_failing) "/home/frappe/frappe-bench/apps/frappe/frappe/modules/patch_handler.py", line 41, in run_all run_patch(patch) "/home/frappe/frappe-bench/apps/frappe/frappe/modules/patch_handler.py", line 30, in run_patch if not run_single(patchmodule = patch): "/home/frappe/frappe-bench/apps/frappe/frappe/modules/patch_handler.py", line 71, in run_single return execute_patch(patchmodule, method, methodargs) "/home/frappe/frappe-bench/apps/frappe/frappe/modules/patch_handler.py", line 91, in execute_patch frappe.get_attr(patchmodule.split()[0] + ".execute")() "/home/frappe/frappe-bench/apps/frappe/frappe/patches/v11_0/reload_and_rename_view_log.py", line 12, in execute frappe.db.sql("CREATE TABLE `ViewLogTemp` AS SELECT * FROM `tabView log`") "/home/frappe/frappe-bench/apps/frappe/frappe/database/database.py", line 171, in sql self._cursor.execute(query) "/home/frappe/frappe-bench/env/lib/python3.5/site-packages/pymysql/cursors.py", line 170, in execute result = self._query(query) "/home/frappe/frappe-bench/env/lib/python3.5/site-packages/pymysql/cursors.py", line 328, in _query conn.query(q) "/home/frappe/frappe-bench/env/lib/python3.5/site-packages/pymysql/connections.py", line 517, in query self._affected_rows = self._read_query_result(unbuffered=unbuffered) "/home/frappe/frappe-bench/env/lib/python3.5/site-packages/pymysql/connections.py", line 732, in _read_query_result result.read() "/home/frappe/frappe-bench/env/lib/python3.5/site-packages/pymysql/connections.py", line 1075, in read first_packet = self.connection._read_packet() "/home/frappe/frappe-bench/env/lib/python3.5/site-packages/pymysql/connections.py", line 684, in _read_packet packet.check_error() "/home/frappe/frappe-bench/env/lib/python3.5/site-packages/pymysql/protocol.py", line 220, in check_error err.raise_mysql_exception(self._data) "/home/frappe/frappe-bench/env/lib/python3.5/site-packages/pymysql/err.py", line 109, in raise_mysql_exception raise errorclass(errno, errval) pymysql.err.ProgrammingError: (1146, "Table '_0df47e2828caf50e.tabView log' doesn't exist")
Traceback (most recent call last): "/opt/cobra/cobra/detection.py", line 176, in project_information for root, dirs, filenames in os.walk(absolute_path): "/usr/lib/python2.7/os.py", line 296, in walk for x in walk(new_path, topdown, onerror, followlinks): "/usr/lib/python2.7/os.py", line 296, in walk for x in walk(new_path, topdown, onerror, followlinks): "/usr/lib/python2.7/os.py", line 296, in walk for x in walk(new_path, topdown, onerror, followlinks): "/usr/lib/python2.7/os.py", line 286, in walk if isdir(join(top, name)): "/usr/lib/python2.7/posixpath.py", line 73, in join path += '/' + bUnicodeDecodeError: 'utf8' codec can't decode byte 0xc9 in position 13: invalid continuation byte
0.294851
Traceback (most recent call last): "C:\Python34\lib\site-packages\pip\basecommand.py", line 122, in main status = self.run(options, args) "C:\Python34\lib\site-packages\pip\commands\install.py", line 283, in run requirement_set.install(install_options, global_options, root=options.root_path) "C:\Python34\lib\site-packages\pip\req.py", line 1431, in install requirement.uninstall(auto_confirm=True) "C:\Python34\lib\site-packages\pip\req.py", line 598, in uninstall paths_to_remove.remove(auto_confirm) "C:\Python34\lib\site-packages\pip\req.py", line 1836, in remove renames(path, new_path) "C:\Python34\lib\site-packages\pip\util.py", line 295, in renames shutil.move(old, new) "C:\Python34\lib\shutil.py", line 535, in move os.unlink(src)PermissionError: [WinError 5] Access is denied: 'c:\python34\etc\bash_completion.d\youtube-dl.bash-completion'
Traceback (most recent call last): "/usr/bin/mailfy", line 11, in <module> load_entry_point('osrframework==0.20.1', 'console_scripts', 'mailfy')() "/usr/lib/python3/dist-packages/osrframework/mailfy.py", line 502, in main registered = process_mail_list_step_2(platforms=platforms, emails=emails) "/usr/lib/python3/dist-packages/osrframework/mailfy.py", line 349, in process_mail_list_step_2 entities = pla.get_info(query=e, mode="mailfy") "/usr/lib/python3/dist-packages/osrframework/utils/platforms.py", line 152, in get_info results = getattr(self, "do_{}".format(mode))(query) "/usr/lib/python3/dist-packages/osrframework/utils/platforms.py", line 352, in do_mailfy if self.check_mailfy(query, **kwargs): "/usr/lib/python3/dist-packages/osrframework/wrappers/instagram.py", line 128, in check_mailfy csrf_token = re.findall("csrf_token", r1.text)[0] IndexError: list index out of range
0.212289