Unnamed: 0
int64 0
10k
| function
stringlengths 79
138k
| label
stringclasses 20
values | info
stringlengths 42
261
|
---|---|---|---|
4,200 |
def __init__(self, features, mask=None, specs=['kl'], Ks=[3],
cores=None, algorithm=None, min_dist=None,
status_fn=True, progressbar=None, **flann_args):
if progressbar is None:
progressbar = status_fn is True
self.status_fn = status_fn = get_status_fn(status_fn)
self.progressbar = progressbar
if not isinstance(features, Features):
raise TypeError("features should be a Features instance")
n_bags = len(features)
dim = features.dim
self.features = features
if mask is None:
mask = np.ones((n_bags, n_bags), dtype=bool)
else:
mask = np.asarray(mask)
if mask.shape != (n_bags, n_bags):
msg = "mask should be n x n, not {}"
raise TypeError(msg.format(mask.shape))
elif mask.dtype.kind != 'b':
msg = "mask should be a boolean array, not {}"
raise TypeError(msg.format(mask.dtype))
self.mask = mask
self.Ks = Ks = np.array(np.squeeze(Ks), ndmin=1, dtype=np.int32)
if Ks.ndim != 1:
msg = "Ks should be 1-dim, got shape {}"
raise TypeError(msg.format(Ks.shape))
if Ks.min() < 1:
raise ValueError("Ks should be positive; got {}".format(Ks.min()))
if Ks.max() >= features._n_pts.min():
msg = "asked for K = {}, but there's a bag with only {} points"
raise ValueError(msg.format(Ks.max(), features._n_pts.min()))
self.max_K = Ks.max()
self.funcs, self.metas, self.n_meta_only = \
_parse_specs(specs, Ks, dim, features._n_pts)
self.specs = specs
self.save_all_Ks = False
for func in self.funcs:
if hasattr(func, 'k_needed'):
self.max_K = max(self.max_K, func.k_needed)
self.save_all_Ks = True
# TODO: could be more efficient about this
# eg if we need [1, 2, ..., 5] and 20, no need to save 6 to 19
# (but that won't happen with the current estimators)
if cores is None:
from multiprocessing import cpu_count
cores = cpu_count()
flann_args['cores'] = cores
if algorithm is None:
algorithm = pick_flann_algorithm(dim)
flann_args['algorithm'] = algorithm
try:
FLANNParameters(**flann_args)
except __HOLE__ as e:
msg = "_DivEstimator got an unexpected keyword argument:\n {}"
raise TypeError(msg.format(e))
self.flann_args = flann_args
if min_dist is None:
min_dist = default_min_dist(dim)
self.min_dist = min_dist
status_fn('kNN processing: K = {} on {!r}'.format(self.max_K, features))
|
AttributeError
|
dataset/ETHPy150Open dougalsutherland/py-sdm/sdm/np_divs.py/_DivEstimator.__init__
|
4,201 |
def inner_run(self, *args, **options):
from django.conf import settings
from django.utils import translation
threading = options.get('use_threading')
shutdown_message = options.get('shutdown_message', '')
quit_command = (sys.platform == 'win32') and 'CTRL-BREAK' or 'CONTROL-C'
self.stdout.write("Validating models...\n\n")
self.validate(display_num_errors=True)
self.stdout.write((
"Django version %(version)s, using settings %(settings)r\n"
"Development server is running at http://%(addr)s:%(port)s/\n"
"Quit the server with %(quit_command)s.\n"
) % {
"version": self.get_version(),
"settings": settings.SETTINGS_MODULE,
"addr": self._raw_ipv6 and '[%s]' % self.addr or self.addr,
"port": self.port,
"quit_command": quit_command,
})
# django.core.management.base forces the locale to en-us. We should
# set it up correctly for the first request (particularly important
# in the "--noreload" case).
translation.activate(settings.LANGUAGE_CODE)
try:
handler = self.get_handler(*args, **options)
run(self.addr, int(self.port), handler,
ipv6=self.use_ipv6, threading=threading)
except WSGIServerException, e:
# Use helpful error messages instead of ugly tracebacks.
ERRORS = {
13: "You don't have permission to access that port.",
98: "That port is already in use.",
99: "That IP address can't be assigned-to.",
}
try:
error_text = ERRORS[e.args[0].args[0]]
except (__HOLE__, KeyError):
error_text = str(e)
sys.stderr.write(self.style.ERROR("Error: %s" % error_text) + '\n')
# Need to use an OS exit because sys.exit doesn't work in a thread
os._exit(1)
except KeyboardInterrupt:
if shutdown_message:
self.stdout.write("%s\n" % shutdown_message)
sys.exit(0)
|
AttributeError
|
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.4/django/core/management/commands/runserver.py/BaseRunserverCommand.inner_run
|
4,202 |
def _convert_permissions(permissions):
if isinstance(permissions, (int, float)):
return int(permissions)
try:
permissions = int(permissions, 8)
except (__HOLE__, TypeError):
LOG.warning("Fail to process permissions %s, assuming %s",
permissions, DEFAULT_PERMISSIONS)
permissions = DEFAULT_PERMISSIONS
return permissions
|
ValueError
|
dataset/ETHPy150Open openstack/cloudbase-init/cloudbaseinit/plugins/common/userdataplugins/cloudconfigplugins/write_files.py/_convert_permissions
|
4,203 |
def _process_content(content, encoding):
"""Decode the content taking into consideration the encoding."""
result = content
if six.PY3 and not isinstance(result, six.binary_type):
# At this point, content will be string, which is wrong for Python 3.
result = result.encode()
steps = _decode_steps(encoding)
if not steps:
LOG.error("Unknown encoding, doing nothing.")
return result
for mime_type in _decode_steps(encoding):
if mime_type == GZIP_MIME:
bufferio = io.BytesIO(result)
with gzip.GzipFile(fileobj=bufferio, mode='rb') as file_handle:
try:
result = file_handle.read()
except (IOError, ValueError):
LOG.exception("Fail to decompress gzip content.")
elif mime_type == BASE64_MIME:
try:
result = base64.b64decode(result)
except (ValueError, __HOLE__):
LOG.exception("Fail to decode base64 content.")
return result
|
TypeError
|
dataset/ETHPy150Open openstack/cloudbase-init/cloudbaseinit/plugins/common/userdataplugins/cloudconfigplugins/write_files.py/_process_content
|
4,204 |
def _write_file(path, content, permissions=DEFAULT_PERMISSIONS,
open_mode="wb"):
"""Writes a file with the given content.
Also the function sets the file mode as specified.
The function arguments are the following:
path: The absolute path to the location on the filesystem where
the file should be written.
content: The content that should be placed in the file.
permissions:The octal permissions set that should be given for
this file.
open_mode: The open mode used when opening the file.
"""
dirname = os.path.dirname(path)
if not os.path.isdir(dirname):
try:
os.makedirs(dirname)
except __HOLE__ as exc:
LOG.exception(exc)
return False
with open(path, open_mode) as file_handle:
file_handle.write(content)
file_handle.flush()
os.chmod(path, permissions)
return True
|
OSError
|
dataset/ETHPy150Open openstack/cloudbase-init/cloudbaseinit/plugins/common/userdataplugins/cloudconfigplugins/write_files.py/_write_file
|
4,205 |
def get_locale():
'''
Get the current system locale
CLI Example:
.. code-block:: bash
salt '*' locale.get_locale
'''
cmd = ''
if salt.utils.systemd.booted(__context__):
params = _parse_dbus_locale() if HAS_DBUS else _parse_localectl()
return params.get('LANG', '')
elif 'RedHat' in __grains__['os_family']:
cmd = 'grep "^LANG=" /etc/sysconfig/i18n'
elif 'Suse' in __grains__['os_family']:
cmd = 'grep "^RC_LANG" /etc/sysconfig/language'
elif 'Debian' in __grains__['os_family']:
# this block only applies to Debian without systemd
cmd = 'grep "^LANG=" /etc/default/locale'
elif 'Gentoo' in __grains__['os_family']:
cmd = 'eselect --brief locale show'
return __salt__['cmd.run'](cmd).strip()
elif 'Solaris' in __grains__['os_family']:
cmd = 'grep "^LANG=" /etc/default/init'
else: # don't waste time on a failing cmd.run
raise CommandExecutionError('Error: Unsupported platform!')
try:
return __salt__['cmd.run'](cmd).split('=')[1].replace('"', '')
except __HOLE__:
return ''
|
IndexError
|
dataset/ETHPy150Open saltstack/salt/salt/modules/localemod.py/get_locale
|
4,206 |
def avail(locale):
'''
Check if a locale is available.
.. versionadded:: 2014.7.0
CLI Example:
.. code-block:: bash
salt '*' locale.avail 'en_US.UTF-8'
'''
try:
normalized_locale = salt.utils.locales.normalize_locale(locale)
except __HOLE__:
log.error('Unable to validate locale "{0}"'.format(locale))
return False
avail_locales = __salt__['locale.list_avail']()
locale_exists = next((True for x in avail_locales
if salt.utils.locales.normalize_locale(x.strip()) == normalized_locale), False)
return locale_exists
|
IndexError
|
dataset/ETHPy150Open saltstack/salt/salt/modules/localemod.py/avail
|
4,207 |
def gen_locale(locale, **kwargs):
'''
Generate a locale. Options:
.. versionadded:: 2014.7.0
:param locale: Any locale listed in /usr/share/i18n/locales or
/usr/share/i18n/SUPPORTED for Debian and Gentoo based distributions,
which require the charmap to be specified as part of the locale
when generating it.
verbose
Show extra warnings about errors that are normally ignored.
CLI Example:
.. code-block:: bash
salt '*' locale.gen_locale en_US.UTF-8
salt '*' locale.gen_locale 'en_IE.UTF-8 UTF-8' # Debian/Gentoo only
'''
on_debian = __grains__.get('os') == 'Debian'
on_ubuntu = __grains__.get('os') == 'Ubuntu'
on_gentoo = __grains__.get('os_family') == 'Gentoo'
on_suse = __grains__.get('os_family') == 'Suse'
on_solaris = __grains__.get('os_family') == 'Solaris'
if on_solaris: # all locales are pre-generated
return locale in __salt__['locale.list_avail']()
locale_info = salt.utils.locales.split_locale(locale)
# if the charmap has not been supplied, normalize by appening it
if not locale_info['charmap'] and not on_ubuntu:
locale_info['charmap'] = locale_info['codeset']
locale = salt.utils.locales.join_locale(locale_info)
if on_debian or on_gentoo: # file-based search
search = '/usr/share/i18n/SUPPORTED'
valid = __salt__['file.search'](search,
'^{0}$'.format(locale),
flags=re.MULTILINE)
else: # directory-based search
if on_suse:
search = '/usr/share/locale'
else:
search = '/usr/share/i18n/locales'
try:
valid = "{0}_{1}".format(locale_info['language'],
locale_info['territory']) in os.listdir(search)
except __HOLE__ as ex:
log.error(ex)
raise CommandExecutionError(
"Locale \"{0}\" is not available.".format(locale))
if not valid:
log.error(
'The provided locale "{0}" is not found in {1}'.format(locale, search))
return False
if os.path.exists('/etc/locale.gen'):
__salt__['file.replace'](
'/etc/locale.gen',
r'^\s*#\s*{0}\s*$'.format(locale),
'{0}\n'.format(locale),
append_if_not_found=True
)
elif on_ubuntu:
__salt__['file.touch'](
'/var/lib/locales/supported.d/{0}'.format(locale_info['language'])
)
__salt__['file.replace'](
'/var/lib/locales/supported.d/{0}'.format(locale_info['language']),
locale,
locale,
append_if_not_found=True
)
if salt.utils.which("locale-gen") is not None:
cmd = ['locale-gen']
if on_gentoo:
cmd.append('--generate')
if on_ubuntu:
cmd.append(salt.utils.locales.normalize_locale(locale))
else:
cmd.append(locale)
elif salt.utils.which("localedef") is not None:
cmd = ['localedef', '--force',
'-i', "{0}_{1}".format(locale_info['language'],
locale_info['territory']),
'-f', locale_info['codeset'],
'{0}_{1}.{2}'.format(locale_info['language'],
locale_info['territory'],
locale_info['codeset'])]
cmd.append(kwargs.get('verbose', False) and '--verbose' or '--quiet')
else:
raise CommandExecutionError(
'Command "locale-gen" or "localedef" was not found on this system.')
res = __salt__['cmd.run_all'](cmd)
if res['retcode']:
log.error(res['stderr'])
if kwargs.get('verbose'):
return res
else:
return res['retcode'] == 0
|
OSError
|
dataset/ETHPy150Open saltstack/salt/salt/modules/localemod.py/gen_locale
|
4,208 |
def updateTimer(self):
"""update the elapsed time of framer in current outline
use store.stamp for current time reference
"""
try:
self.elapsed = self.store.stamp - self.stamp
except __HOLE__: #one or both stamps are not numbers
self.stamp = self.store.stamp #makes self.stamp a number once store.stamp is
self.elapsed = 0.0 #elapsed zero until both numbers
self.updateElapsed()
|
TypeError
|
dataset/ETHPy150Open ioflo/ioflo/ioflo/base/framing.py/Framer.updateTimer
|
4,209 |
def resolveOverLinks(self):
"""Starting with self.over climb over links resolving the links as needed along the way
"""
over = self.over
under = self
while over: #not beyond top
if not isinstance(over, Frame): #over is name of frame not ref so resolve
name = over #make copy for later
try:
over = Frame.Names[name] #get reference from Frame name registry
except __HOLE__:
raise excepting.ResolveError("Bad over link in outline", self.name, name)
if over == self: #check for loop
raise excepting.ResolveError("Outline overs create loop", self.name, under.name)
#attach under to over
if under.name in over.unders: #under name in unders as a result of script under cmd
index = over.unders.index(under.name) #index = position in list
over.unders[index] = under #replace under at position index
else: #otherwise append
over.unders.append(under) #add to unders
#maybe should error check for duplicates in unders here
under.over = over #assign valid over ref
else: #over is valid frame reference so don't need to resolve
if over == self: #check for loop
raise excepting.ResolveError("Outline overs create loop", self.name, under.name)
under = over
over = over.over #rise one level
|
KeyError
|
dataset/ETHPy150Open ioflo/ioflo/ioflo/base/framing.py/Frame.resolveOverLinks
|
4,210 |
def start(self, *args, **kwargs):
"""Starts the instance.
:raises RuntimeError: has been already started.
:raises TypeError: :meth:`run` is not canonical.
"""
if self.is_running():
raise RuntimeError('Already started')
self._running = self.run(*args, **kwargs)
try:
yielded = next(self._running)
except __HOLE__:
raise TypeError('run() must yield just one time')
if yielded is not None:
raise TypeError('run() must yield without value')
|
StopIteration
|
dataset/ETHPy150Open what-studio/profiling/profiling/utils.py/Runnable.start
|
4,211 |
def stop(self):
"""Stops the instance.
:raises RuntimeError: has not been started.
:raises TypeError: :meth:`run` is not canonical.
"""
if not self.is_running():
raise RuntimeError('Not started')
running, self._running = self._running, None
try:
next(running)
except __HOLE__:
# expected.
pass
else:
raise TypeError('run() must yield just one time')
|
StopIteration
|
dataset/ETHPy150Open what-studio/profiling/profiling/utils.py/Runnable.stop
|
4,212 |
def detect(self, callback):
self.context.request.prevent_result_storage = True
try:
if not Detector.detect_task:
celery_tasks = CeleryTasks(
self.context.config.SQS_QUEUE_KEY_ID,
self.context.config.SQS_QUEUE_KEY_SECRET,
self.context.config.SQS_QUEUE_REGION, None
)
Detector.detect_task = celery_tasks.get_detect_task()
Detector.detect_task.delay('all', self.context.request.image_url, self.context.request.image_url)
except __HOLE__:
self.context.request.detection_error = True
Detector.detect_task = None
logger.exception('Celery Error')
finally:
callback([])
|
RuntimeError
|
dataset/ETHPy150Open thumbor/thumbor/thumbor/detectors/queued_sqs_detector/__init__.py/Detector.detect
|
4,213 |
def translate(s, a, b=None, c=None):
"""Return ``s`` where characters have been replaced or deleted.
SYNTAX
======
translate(s, None, deletechars):
all characters in ``deletechars`` are deleted
translate(s, map [,deletechars]):
all characters in ``deletechars`` (if provided) are deleted
then the replacements defined by map are made; if the keys
of map are strings then the longer ones are handled first.
Multicharacter deletions should have a value of ''.
translate(s, oldchars, newchars, deletechars)
all characters in ``deletechars`` are deleted
then each character in ``oldchars`` is replaced with the
corresponding character in ``newchars``
Examples
========
>>> from sympy.utilities.misc import translate
>>> from sympy.core.compatibility import unichr
>>> abc = 'abc'
>>> translate(abc, None, 'a')
'bc'
>>> translate(abc, {'a': 'x'}, 'c')
'xb'
>>> translate(abc, {'abc': 'x', 'a': 'y'})
'x'
>>> translate('abcd', 'ac', 'AC', 'd')
'AbC'
There is no guarantee that a unique answer will be
obtained if keys in a mapping overlap are the same
length and have some identical sequences at the
beginning/end:
>>> translate(abc, {'ab': 'x', 'bc': 'y'}) in ('xc', 'ay')
True
"""
from sympy.core.compatibility import maketrans
# when support for Python 2 is dropped, this try/except can be
#removed
try:
''.translate(None, '')
py3 = False
except __HOLE__:
py3 = True
mr = {}
if a is None:
assert c is None
if not b:
return s
c = b
a = b = ''
else:
if type(a) is dict:
short = {}
for k in list(a.keys()):
if (len(k) == 1 and len(a[k]) == 1):
short[k] = a.pop(k)
mr = a
c = b
if short:
a, b = [''.join(i) for i in list(zip(*short.items()))]
else:
a = b = ''
else:
assert len(a) == len(b)
if py3:
if c:
s = s.translate(maketrans('', '', c))
s = replace(s, mr)
return s.translate(maketrans(a, b))
else:
# when support for Python 2 is dropped, this if-else-block
# can be replaced with the if-clause
if c:
c = list(c)
rem = {}
for i in range(-1, -1 - len(c), -1):
if ord(c[i]) > 255:
rem[c[i]] = ''
c.pop(i)
s = s.translate(None, ''.join(c))
s = replace(s, rem)
if a:
a = list(a)
b = list(b)
for i in range(-1, -1 - len(a), -1):
if ord(a[i]) > 255 or ord(b[i]) > 255:
mr[a.pop(i)] = b.pop(i)
a = ''.join(a)
b = ''.join(b)
s = replace(s, mr)
table = maketrans(a, b)
# s may have become unicode which uses the py3 syntax for translate
if type(table) is str and type(s) is str:
s = s.translate(table)
else:
s = s.translate(dict(
[(i, ord(c)) for i, c in enumerate(table)]))
return s
|
TypeError
|
dataset/ETHPy150Open sympy/sympy/sympy/utilities/misc.py/translate
|
4,214 |
def close(self):
"""Closes all files. If you put real :class:`file` objects into the
:attr:`files` dict you can call this method to automatically close
them all in one go.
"""
if self.closed:
return
try:
files = self.files.itervalues()
except __HOLE__:
files = ()
for f in files:
try:
f.close()
except Exception, e:
pass
self.closed = True
|
AttributeError
|
dataset/ETHPy150Open IanLewis/kay/kay/lib/werkzeug/test.py/EnvironBuilder.close
|
4,215 |
def handle_write(self):
while True:
with self.deque_lock:
try:
next_msg = self.deque.popleft()
except __HOLE__:
self._writable = False
return
try:
sent = self.send(next_msg)
self._readable = True
except socket.error as err:
if (err.args[0] in NONBLOCKING):
with self.deque_lock:
self.deque.appendleft(next_msg)
else:
self.defunct(err)
return
else:
if sent < len(next_msg):
with self.deque_lock:
self.deque.appendleft(next_msg[sent:])
if sent == 0:
return
|
IndexError
|
dataset/ETHPy150Open datastax/python-driver/cassandra/io/asyncorereactor.py/AsyncoreConnection.handle_write
|
4,216 |
def _set_attributes(self):
self._attributes = {}
all_attributes = list(_get_all_attributes(self._root_node))
for key, value in all_attributes:
# commented since enketo forms may have the template attribute in
# multiple xml tags and I dont see the harm in overiding
# attributes at this point
try:
assert key not in self._attributes
except __HOLE__:
import logging
logger = logging.getLogger("console_logger")
logger.debug("Skipping duplicate attribute: %s"
" with value %s" % (key, value))
logger.debug(str(all_attributes))
else:
self._attributes[key] = value
|
AssertionError
|
dataset/ETHPy150Open kobotoolbox/kobocat/onadata/apps/logger/xform_instance_parser.py/XFormInstanceParser._set_attributes
|
4,217 |
def invalidate(self, func, *args, **kwargs):
"""Invalidate a cache decorated function. You must call this with
the same positional and keyword arguments as what you did when you
call the decorated function, otherwise the cache will not be deleted.
The usage is simple::
@cache.cache()
def load(name, limit):
return load_from_database(name, limit)
rv = load('foo', limit=5)
cache.invalidate(load, 'foo', limit=5)
:param func: decorated function to invalidate
:param args: same positional arguments as you call the function
:param kwargs: same keyword arguments as you call the function
:return: whether it is invalidated or not
"""
try:
cache_params = func.__rc_cache_params__
except __HOLE__:
raise TypeError('Attempted to invalidate a function that is'
'not cache decorated')
key_prefix = cache_params['key_prefix']
cache_args = args
include_self = cache_params.get('include_self', False)
if include_self:
instance_self = getattr(func, '__self__', None)
if instance_self:
cache_args = tuple([instance_self] + list(args))
cache_key = generate_key_for_cached_func(
key_prefix, func, *cache_args, **kwargs)
return self.delete(cache_key)
|
AttributeError
|
dataset/ETHPy150Open fengsp/rc/rc/cache.py/BaseCache.invalidate
|
4,218 |
def rfc3339(dt_obj):
'''
dt_obj: datetime object or string
The filter use `datetime.datetime.isoformat()`, which is in ISO 8601
format, not in RFC 3339 format, but they have a lot in common, so I used
ISO 8601 format directly.
'''
if isinstance(dt_obj, datetime.datetime):
pass
elif isinstance(dt_obj, basestring):
for fmt in ('%Y-%m-%d %H:%M:%S', '%Y-%m-%d %H:%M'):
try:
dt_obj = datetime.datetime.strptime(dt_obj, fmt)
except __HOLE__:
pass
else:
break
else:
raise ValueError('can not parse datetime {0}'.format(dt_obj))
else:
raise ValueError('{0} is not datetime object or string'.format(dt_obj))
# make sure the dt_obj is local time
if not dt_obj.tzinfo:
tz = tzlocal.get_localzone()
dt_obj = tz.localize(dt_obj)
# remove microsecond
dt_obj = dt_obj.replace(microsecond=0)
return dt_obj.isoformat()
|
ValueError
|
dataset/ETHPy150Open tankywoo/simiki/simiki/jinja_exts.py/rfc3339
|
4,219 |
def __getattr__(self, name):
try:
return self._fields[name].value
except __HOLE__:
raise AttributeError('No attribute %s' % name)
|
KeyError
|
dataset/ETHPy150Open correl/Transmission-XBMC/resources/lib/transmissionrpc/torrent.py/Torrent.__getattr__
|
4,220 |
def __getattr__(self, name):
if name in self._PROXY_FUNCS:
try:
return getattr(self._adapter, name)
except __HOLE__:
pass
raise AttributeError(name)
|
AttributeError
|
dataset/ETHPy150Open openstack/requests-mock/requests_mock/mocker.py/MockerCore.__getattr__
|
4,221 |
def _get_ipaddress(node):
"""Adds the ipaddress attribute to the given node object if not already
present and it is correctly given by ohai
Returns True if ipaddress is added, False otherwise
"""
if "ipaddress" not in node:
with settings(hide('stdout'), warn_only=True):
output = sudo('ohai -l warn ipaddress')
if output.succeeded:
try:
node['ipaddress'] = json.loads(output)[0]
except __HOLE__:
abort("Could not parse ohai's output for ipaddress"
":\n {0}".format(output))
return True
return False
|
ValueError
|
dataset/ETHPy150Open tobami/littlechef/littlechef/chef.py/_get_ipaddress
|
4,222 |
def pop(self, num=1):
"""
Pops values from storage
"""
values = []
for i in range(0, num):
try:
values.append(self._list.pop(0))
except __HOLE__:
break
return values
|
IndexError
|
dataset/ETHPy150Open bbrodriges/pholcidae/pholcidae2.py/SyncStorage.pop
|
4,223 |
def _saferound(value, decimal_places):
"""
Rounds a float value off to the desired precision
"""
try:
f = float(value)
except __HOLE__:
return ''
format = '%%.%df' % decimal_places
return format % f
|
ValueError
|
dataset/ETHPy150Open eyeseast/python-tablefu/table_fu/formatting.py/_saferound
|
4,224 |
def dollar_signs(value, failure_string='N/A'):
"""
Converts an integer into the corresponding number of dollar sign symbols.
If the submitted value isn't a string, returns the `failure_string` keyword
argument.
Meant to emulate the illustration of price range on Yelp.
"""
try:
count = int(value)
except __HOLE__:
return failure_string
string = ''
for i in range(0, count):
string += '$'
return string
|
ValueError
|
dataset/ETHPy150Open eyeseast/python-tablefu/table_fu/formatting.py/dollar_signs
|
4,225 |
def percentage(value, decimal_places=1, multiply=True, failure_string='N/A'):
"""
Converts a floating point value into a percentage value.
Number of decimal places set by the `decimal_places` kwarg. Default is one.
By default the number is multiplied by 100. You can prevent it from doing
that by setting the `multiply` keyword argument to False.
If the submitted value isn't a string, returns the `failure_string` keyword
argument.
"""
try:
value = float(value)
except __HOLE__:
return failure_string
if multiply:
value = value * 100
return _saferound(value, decimal_places) + '%'
|
ValueError
|
dataset/ETHPy150Open eyeseast/python-tablefu/table_fu/formatting.py/percentage
|
4,226 |
def percent_change(value, decimal_places=1, multiply=True, failure_string='N/A'):
"""
Converts a floating point value into a percentage change value.
Number of decimal places set by the `precision` kwarg. Default is one.
Non-floats are assumed to be zero division errors and are presented as
'N/A' in the output.
By default the number is multiplied by 100. You can prevent it from doing
that by setting the `multiply` keyword argument to False.
"""
try:
f = float(value)
if multiply:
f = f * 100
except __HOLE__:
return failure_string
s = _saferound(f, decimal_places)
if f > 0:
return '+' + s + '%'
else:
return s + '%'
|
ValueError
|
dataset/ETHPy150Open eyeseast/python-tablefu/table_fu/formatting.py/percent_change
|
4,227 |
def ratio(value, decimal_places=0, failure_string='N/A'):
"""
Converts a floating point value a X:1 ratio.
Number of decimal places set by the `precision` kwarg. Default is one.
"""
try:
f = float(value)
except __HOLE__:
return failure_string
return _saferound(f, decimal_places) + ':1'
|
ValueError
|
dataset/ETHPy150Open eyeseast/python-tablefu/table_fu/formatting.py/ratio
|
4,228 |
def __init__(self, *args, **kwargs):
try:
self.search_fields = kwargs.pop('search_fields')
except __HOLE__:
pass
super(LinkSearchField, self).__init__(*args, **kwargs)
|
KeyError
|
dataset/ETHPy150Open jrief/djangocms-cascade/cmsplugin_cascade/link/fields.py/LinkSearchField.__init__
|
4,229 |
def main():
filename = os.path.join('hdf5', 'example_05.hdf5')
env = Environment(trajectory='Example_05_Euler_Integration',
filename=filename,
file_title='Example_05_Euler_Integration',
overwrite_file=True,
comment='Go for Euler!')
traj = env.trajectory
trajectory_name = traj.v_name
# 1st a) phase parameter addition
add_parameters(traj)
# 1st b) phase preparation
# We will add the differential equation (well, its source code only) as a derived parameter
traj.f_add_derived_parameter(FunctionParameter,'diff_eq', diff_lorenz,
comment='Source code of our equation!')
# We want to explore some initial conditions
traj.f_explore({'initial_conditions' : [
np.array([0.01,0.01,0.01]),
np.array([2.02,0.02,0.02]),
np.array([42.0,4.2,0.42])
]})
# 3 different conditions are enough for an illustrative example
# 2nd phase let's run the experiment
# We pass `euler_scheme` as our top-level simulation function and
# the Lorenz equation 'diff_lorenz' as an additional argument
env.run(euler_scheme, diff_lorenz)
# We don't have a 3rd phase of post-processing here
# 4th phase analysis.
# I would recommend to do post-processing completely independent from the simulation,
# but for simplicity let's do it here.
# Let's assume that we start all over again and load the entire trajectory new.
# Yet, there is an error within this approach, do you spot it?
del traj
traj = Trajectory(filename=filename)
# We will only fully load parameters and derived parameters.
# Results will be loaded manually later on.
try:
# However, this will fail because our trajectory does not know how to
# build the FunctionParameter. You have seen this coming, right?
traj.f_load(name=trajectory_name, load_parameters=2, load_derived_parameters=2,
load_results=1)
except __HOLE__ as e:
print('That did\'nt work, I am sorry: %s ' % str(e))
# Ok, let's try again but this time with adding our parameter to the imports
traj = Trajectory(filename=filename,
dynamically_imported_classes=FunctionParameter)
# Now it works:
traj.f_load(name=trajectory_name, load_parameters=2, load_derived_parameters=2,
load_results=1)
#For the fun of it, let's print the source code
print('\n ---------- The source code of your function ---------- \n %s' % traj.diff_eq)
# Let's get the exploration array:
initial_conditions_exploration_array = traj.f_get('initial_conditions').f_get_range()
# Now let's plot our simulated equations for the different initial conditions:
# We will iterate through the run names
for idx, run_name in enumerate(traj.f_get_run_names()):
#Get the result of run idx from the trajectory
euler_result = traj.results.f_get(run_name).euler_evolution
# Now we manually need to load the result. Actually the results are not so large and we
# could load them all at once. But for demonstration we do as if they were huge:
traj.f_load_item(euler_result)
euler_data = euler_result.data
#Plot fancy 3d plot
fig = plt.figure(idx)
ax = fig.gca(projection='3d')
x = euler_data[:,0]
y = euler_data[:,1]
z = euler_data[:,2]
ax.plot(x, y, z, label='Initial Conditions: %s' % str(initial_conditions_exploration_array[idx]))
plt.legend()
plt.show()
# Now we free the data again (because we assume its huuuuuuge):
del euler_data
euler_result.f_empty()
# You have to click through the images to stop the example_05 module!
# Finally disable logging and close all log-files
env.disable_logging()
|
ImportError
|
dataset/ETHPy150Open SmokinCaterpillar/pypet/examples/example_05_custom_parameter.py/main
|
4,230 |
@property
def full_filepath(self):
if self.filepath:
default_storage = get_storage_class()()
try:
return default_storage.path(self.filepath)
except __HOLE__:
# read file from s3
name, ext = os.path.splitext(self.filepath)
tmp = NamedTemporaryFile(suffix=ext, delete=False)
f = default_storage.open(self.filepath)
tmp.write(f.read())
tmp.close()
return tmp.name
return None
|
NotImplementedError
|
dataset/ETHPy150Open kobotoolbox/kobocat/onadata/apps/viewer/models/export.py/Export.full_filepath
|
4,231 |
def unmarshal(self, v):
"""
Convert the value from Strava API format to useful python representation.
If the value does not appear in the choices attribute we log an error rather
than raising an exception as this may be caused by a change to the API upstream
so we want to fail gracefully.
"""
try:
return self.choices[v]
except __HOLE__:
self.log.warning("No such choice {0} for field {1}.".format(v, self))
# Just return the value from the API
return v
|
KeyError
|
dataset/ETHPy150Open hozn/stravalib/stravalib/attributes.py/ChoicesAttribute.unmarshal
|
4,232 |
def __init__(self,
config,
module_dict,
app_code_path,
imp_module=imp,
os_module=os,
dummy_thread_module=dummy_thread,
pickle_module=pickle):
"""Initializer.
Args:
config: AppInfoExternal instance representing the parsed app.yaml file.
module_dict: Module dictionary to use for managing system modules.
Should be sys.modules.
app_code_path: The absolute path to the application code on disk.
imp_module, os_module, dummy_thread_module, etc.: References to
modules that exist in the dev_appserver that must be used by this class
in order to function, even if these modules have been unloaded from
sys.modules.
"""
self._config = config
self._module_dict = module_dict
self._imp = imp_module
self._os = os_module
self._dummy_thread = dummy_thread_module
self._pickle = pickle
self._indent_level = 0
self._app_code_path = app_code_path
self._white_list_c_modules = list(self._WHITE_LIST_C_MODULES)
self._white_list_partial_modules = dict(self._WHITE_LIST_PARTIAL_MODULES)
self._enabled_modules = []
if self._config and self._config.runtime == 'python27':
self._white_list_c_modules.extend(self._PY27_ALLOWED_MODULES)
self._white_list_partial_modules['os'] = (
list(self._white_list_partial_modules['os']) +
['getpid', 'getuid', 'sys'])
for k in self._white_list_partial_modules.keys():
if k.startswith('Crypto'):
del self._white_list_partial_modules[k]
webob_path = os.path.join(SDK_ROOT, 'lib', 'webob-1.1.1')
if webob_path not in sys.path:
sys.path.insert(1, webob_path)
for libentry in self._config.GetAllLibraries():
self._enabled_modules.append(libentry.name)
extra = self.__PY27_OPTIONAL_ALLOWED_MODULES.get(libentry.name)
logging.debug('Enabling %s: %r', libentry.name, extra)
if extra:
self._white_list_c_modules.extend(extra)
if libentry.name == 'django':
if 'django' not in self._module_dict:
version = libentry.version
if version == 'latest':
django_library = appinfo._NAME_TO_SUPPORTED_LIBRARY['django']
version = django_library.non_deprecated_versions[-1]
if google.__name__.endswith('3'):
try:
__import__('django.v' + version.replace('.', '_'))
continue
except __HOLE__:
sys.modules.pop('django', None)
sitedir = os.path.join(SDK_ROOT,
'lib',
'django-%s' % version)
if os.path.isdir(sitedir):
logging.debug('Enabling Django version %s at %s',
version, sitedir)
sys.path[:] = [dirname
for dirname in sys.path
if not dirname.startswith(os.path.join(
SDK_ROOT, 'lib', 'django'))]
sys.path.insert(1, sitedir)
else:
logging.warn('Enabling Django version %s (no directory found)',
version)
|
ImportError
|
dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/tools/dev_appserver_import_hook.py/HardenedModulesHook.__init__
|
4,233 |
@Trace
def FindModuleRestricted(self,
submodule,
submodule_fullname,
search_path):
"""Locates a module while enforcing module import restrictions.
Args:
submodule: The short name of the submodule (i.e., the last section of
the fullname; for 'foo.bar' this would be 'bar').
submodule_fullname: The fully qualified name of the module to find (e.g.,
'foo.bar').
search_path: List of paths to search for to find this module. Should be
None if the current sys.path should be used.
Returns:
Tuple (source_file, pathname, description) where:
source_file: File-like object that contains the module; in the case
of packages, this will be None, which implies to look at __init__.py.
pathname: String containing the full path of the module on disk.
description: Tuple returned by imp.find_module().
However, in the case of an import using a path hook (e.g. a zipfile),
source_file will be a PEP-302-style loader object, pathname will be None,
and description will be a tuple filled with None values.
Raises:
ImportError exception if the requested module was found, but importing
it is disallowed.
CouldNotFindModuleError exception if the request module could not even
be found for import.
"""
if search_path is None:
search_path = [None] + sys.path
search_path += ['/usr/local/lib/python2.7/dist-packages/lxml-3.2.3-py2.7-linux-x86_64.egg']
py27_optional = False
py27_enabled = False
topmodule = None
if self._config and self._config.runtime == 'python27':
topmodule = submodule_fullname.split('.')[0]
if topmodule in self.__PY27_OPTIONAL_ALLOWED_MODULES:
py27_optional = True
py27_enabled = topmodule in self._enabled_modules
elif topmodule == 'Crypto':
py27_optional = True
py27_enabled = 'pycrypto' in self._enabled_modules
import_error = None
for path_entry in search_path:
result = self.FindPathHook(submodule, submodule_fullname, path_entry)
if result is not None:
source_file, pathname, description = result
if description == (None, None, None):
return result
suffix, mode, file_type = description
try:
if (file_type not in (self._imp.C_BUILTIN, self._imp.C_EXTENSION)):
pkg_pathname = pathname
if file_type == self._imp.PKG_DIRECTORY:
pkg_pathname = os.path.join(pkg_pathname, '__init__.py')
# AppScale: disables the whitelist and lets you import anything
# installed, as long as it's in your path.
#if not FakeFile.IsFileAccessible(
# pkg_pathname, py27_optional=py27_optional):
# error_message = 'Access to module file denied: %s' % pathname
# logging.debug(error_message)
# raise ImportError(error_message)
if (file_type not in self._ENABLED_FILE_TYPES and
submodule not in self._white_list_c_modules):
error_message = ('Could not import "%s": Disallowed C-extension '
'or built-in module' % submodule_fullname)
logging.debug(error_message)
raise ImportError(error_message)
if (py27_optional and not py27_enabled and
not pathname.startswith(self._app_code_path)):
error_message = ('Third party package %s not enabled.' % topmodule)
logging.debug(error_message)
raise ImportError(error_message)
return source_file, pathname, description
except __HOLE__, e:
import_error = e
if py27_optional and submodule_fullname == topmodule:
if py27_enabled:
msg = ('Third party package %s was enabled in app.yaml '
'but not found on import. You may have to download '
'and install it.' % topmodule)
else:
msg = ('Third party package %s must be included in the '
'"libraries:" clause of your app.yaml file '
'in order to be imported.' % topmodule)
logging.debug(msg)
raise Py27OptionalModuleError(msg)
if import_error:
raise import_error
self.log('Could not find module "%s"', submodule_fullname)
raise CouldNotFindModuleError()
|
ImportError
|
dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/tools/dev_appserver_import_hook.py/HardenedModulesHook.FindModuleRestricted
|
4,234 |
def FindPathHook(self, submodule, submodule_fullname, path_entry):
"""Helper for FindModuleRestricted to find a module in a sys.path entry.
Args:
submodule:
submodule_fullname:
path_entry: A single sys.path entry, or None representing the builtins.
Returns:
Either None (if nothing was found), or a triple (source_file, path_name,
description). See the doc string for FindModuleRestricted() for the
meaning of the latter.
"""
if path_entry is None:
if submodule_fullname in sys.builtin_module_names:
try:
result = self._imp.find_module(submodule)
except ImportError:
pass
else:
source_file, pathname, description = result
suffix, mode, file_type = description
if file_type == self._imp.C_BUILTIN:
return result
return None
if path_entry in sys.path_importer_cache:
importer = sys.path_importer_cache[path_entry]
else:
importer = None
for hook in sys.path_hooks:
try:
importer = hook(path_entry)
break
except __HOLE__:
pass
sys.path_importer_cache[path_entry] = importer
if importer is None:
try:
return self._imp.find_module(submodule, [path_entry])
except ImportError:
pass
else:
loader = importer.find_module(submodule_fullname)
if loader is not None:
return (loader, None, (None, None, None))
return None
|
ImportError
|
dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/tools/dev_appserver_import_hook.py/HardenedModulesHook.FindPathHook
|
4,235 |
@Trace
def FindAndLoadModule(self,
submodule,
submodule_fullname,
search_path):
"""Finds and loads a module, loads it, and adds it to the module dictionary.
Args:
submodule: Name of the module to import (e.g., baz).
submodule_fullname: Full name of the module to import (e.g., foo.bar.baz).
search_path: Path to use for searching for this submodule. For top-level
modules this should be None; otherwise it should be the __path__
attribute from the parent package.
Returns:
A new module instance that has been inserted into the module dictionary
supplied to __init__.
Raises:
ImportError exception if the module could not be loaded for whatever
reason (e.g., missing, not allowed).
"""
module = self._imp.new_module(submodule_fullname)
if submodule_fullname == 'thread':
module.__dict__.update(self._dummy_thread.__dict__)
module.__name__ = 'thread'
elif submodule_fullname == 'cPickle':
module.__dict__.update(self._pickle.__dict__)
module.__name__ = 'cPickle'
elif submodule_fullname == 'os':
module.__dict__.update(self._os.__dict__)
elif submodule_fullname == 'ssl':
pass
elif self.StubModuleExists(submodule_fullname):
module = self.ImportStubModule(submodule_fullname)
else:
source_file, pathname, description = self.FindModuleRestricted(submodule, submodule_fullname, search_path)
module = self.LoadModuleRestricted(submodule_fullname,
source_file,
pathname,
description)
if (getattr(module, '__path__', None) is not None and
search_path != self._app_code_path):
try:
app_search_path = os.path.join(self._app_code_path,
*(submodule_fullname.split('.')[:-1]))
source_file, pathname, description = self.FindModuleRestricted(submodule,
submodule_fullname,
[app_search_path])
module.__path__.append(pathname)
except __HOLE__, e:
pass
module.__loader__ = self
self.FixModule(module)
if submodule_fullname not in self._module_dict:
self._module_dict[submodule_fullname] = module
if submodule_fullname != submodule:
parent_module = self._module_dict.get(
submodule_fullname[:-len(submodule) - 1])
if parent_module and not hasattr(parent_module, submodule):
setattr(parent_module, submodule, module)
if submodule_fullname == 'os':
os_path_name = module.path.__name__
os_path = self.FindAndLoadModule(os_path_name, os_path_name, search_path)
self._module_dict['os.path'] = os_path
module.__dict__['path'] = os_path
return module
|
ImportError
|
dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/tools/dev_appserver_import_hook.py/HardenedModulesHook.FindAndLoadModule
|
4,236 |
def serve_status_page(self, port):
Handler.task_graph = self.task_graph
print("Starting server at http://localhost:%d" % port)
try:
httpd = SocketServer.TCPServer(("", port), Handler)
except socket.error, error:
raise CommandLineException(error.strerror)
try:
httpd.serve_forever()
except __HOLE__:
pass
|
KeyboardInterrupt
|
dataset/ETHPy150Open deanmalmgren/flo/flo/commands/status.py/Command.serve_status_page
|
4,237 |
def main(argv=None):
"""script main.
parses command line options in sys.argv, unless *argv* is given.
"""
if argv is None:
argv = sys.argv
parser = E.OptionParser(
version="%prog version: $Id: split_fasta.py 1714 2007-12-11 16:51:12Z andreas $")
parser.add_option("-f", "--file", dest="input_filename", type="string",
help="input filename. If not given, stdin is used.",
metavar="FILE")
parser.add_option("-i", "--input-pattern", dest="input_pattern", type="string",
help="input pattern. Parses description line in order to extract id.")
parser.add_option("-o", "--output-filename-pattern", dest="output_pattern", type="string",
help="output pattern. Gives filename for a given sequence.")
parser.add_option("-n", "--num-sequences", dest="num_sequences", type="int",
help="split by number of sequences (not implemented yet).")
parser.add_option("-m", "--map", dest="map_filename", type="string",
help="map filename. Map identifiers to filenames",
metavar="FILE")
parser.add_option("-s", "--skip-identifiers", dest="skip_identifiers", action="store_true",
help="do not write identifiers.",
metavar="FILE")
parser.add_option("--min-size", dest="min_size", type="int",
help="minimum cluster size.")
parser.set_defaults(
input_filename=None,
map_filename=None,
skip_identifiers=False,
input_pattern="^(\S+)",
min_size=0,
num_sequences=None,
output_pattern="%s")
(options, args) = E.Start(parser)
if options.input_filename:
infile = IOTools.openFile(options.input_filename, "r")
else:
infile = sys.stdin
if options.map_filename:
map_id2filename = IOTools.ReadMap(open(options.map_filename, "r"))
else:
map_id2filename = {}
if options.num_sequences:
files = FilesChunks(chunk_size=options.num_sequences,
output_pattern=options.output_pattern,
skip_identifiers=options.skip_identifiers)
else:
files = Files(output_pattern=options.output_pattern,
skip_identifiers=options.skip_identifiers)
if options.input_pattern:
rx = re.compile(options.input_pattern)
else:
rx = None
ninput = 0
noutput = 0
identifier = None
chunk = 0
for seq in FastaIterator.iterate(infile):
ninput += 1
if rx:
try:
identifier = rx.search(seq.title).groups()[0]
except __HOLE__:
print "# parsing error in description line %s" % (seq.title)
else:
identifier = seq.title
if map_id2filename:
if identifier in map_id2filename:
identifier = map_id2filename[identifier]
else:
continue
files.Write(identifier, seq)
noutput += 1
if options.input_filename:
infile.close()
# delete all clusters below a minimum size
# Note: this has to be done at the end, because
# clusters sizes are only available once both the fasta
# file and the map has been parsed.
if options.min_size:
ndeleted = files.DeleteFiles(min_size=options.min_size)
else:
ndeleted = 0
if options.loglevel >= 1:
print "# input=%i, output=%i, ndeleted=%i" % (ninput, noutput, ndeleted)
E.Stop()
|
AttributeError
|
dataset/ETHPy150Open CGATOxford/cgat/scripts/split_fasta.py/main
|
4,238 |
def _match(S, N):
"""Structural matching of term S to discrimination net node N."""
stack = deque()
restore_state_flag = False
# matches are stored in a tuple, because all mutations result in a copy,
# preventing operations from changing matches stored on the stack.
matches = ()
while True:
if S.current is END:
yield N.patterns, matches
try:
# This try-except block is to catch hashing errors from un-hashable
# types. This allows for variables to be matched with un-hashable
# objects.
n = N.edges.get(S.current, None)
if n and not restore_state_flag:
stack.append((S.copy(), N, matches))
N = n
S.next()
continue
except __HOLE__:
pass
n = N.edges.get(VAR, None)
if n:
restore_state_flag = False
matches = matches + (S.term,)
S.skip()
N = n
continue
try:
# Backtrack here
(S, N, matches) = stack.pop()
restore_state_flag = True
except:
return
|
TypeError
|
dataset/ETHPy150Open dask/dask/dask/rewrite.py/_match
|
4,239 |
@classmethod
def _fetch_remote(cls, uri, depth_indicator=1):
"""
return remote document and actual remote uri
:type uri: str
:type depth_indicator: int
:rtype: (document: str | None, uri)
"""
cls._log("debug", "fetch remote(%d): %s" % (depth_indicator, uri))
request = Request(uri, headers=cls.request_headers())
response = urlopen(request, timeout=cls.request_timeout())
if not response:
return None
uri = response.geturl()
charset = 'utf8'
try:
charset = response.info().get_param('charset', charset) # py3
except __HOLE__:
pass
return response.read().decode(charset), uri
|
AttributeError
|
dataset/ETHPy150Open k4cg/nichtparasoup/crawler/__init__.py/Crawler._fetch_remote
|
4,240 |
def close(self):
if not self.close_called:
self.close_called = True
try:
self.file.close()
except (OSError, __HOLE__):
pass
try:
self.unlink(self.name)
except (OSError):
pass
|
IOError
|
dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/_internal/django/core/files/temp.py/TemporaryFile.close
|
4,241 |
def load(self):
session_data = {}
try:
session_file = open(self._key_to_file(), "rb")
try:
file_data = session_file.read()
# Don't fail if there is no data in the session file.
# We may have opened the empty placeholder file.
if file_data:
try:
session_data = self.decode(file_data)
except (EOFError, SuspiciousOperation):
self.create()
finally:
session_file.close()
except __HOLE__:
self.create()
return session_data
|
IOError
|
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.4/django/contrib/sessions/backends/file.py/SessionStore.load
|
4,242 |
def save(self, must_create=False):
# Get the session data now, before we start messing
# with the file it is stored within.
session_data = self._get_session(no_load=must_create)
session_file_name = self._key_to_file()
try:
# Make sure the file exists. If it does not already exist, an
# empty placeholder file is created.
flags = os.O_WRONLY | os.O_CREAT | getattr(os, 'O_BINARY', 0)
if must_create:
flags |= os.O_EXCL
fd = os.open(session_file_name, flags)
os.close(fd)
except __HOLE__, e:
if must_create and e.errno == errno.EEXIST:
raise CreateError
raise
# Write the session file without interfering with other threads
# or processes. By writing to an atomically generated temporary
# file and then using the atomic os.rename() to make the complete
# file visible, we avoid having to lock the session file, while
# still maintaining its integrity.
#
# Note: Locking the session file was explored, but rejected in part
# because in order to be atomic and cross-platform, it required a
# long-lived lock file for each session, doubling the number of
# files in the session storage directory at any given time. This
# rename solution is cleaner and avoids any additional overhead
# when reading the session data, which is the more common case
# unless SESSION_SAVE_EVERY_REQUEST = True.
#
# See ticket #8616.
dir, prefix = os.path.split(session_file_name)
try:
output_file_fd, output_file_name = tempfile.mkstemp(dir=dir,
prefix=prefix + '_out_')
renamed = False
try:
try:
os.write(output_file_fd, self.encode(session_data))
finally:
os.close(output_file_fd)
os.rename(output_file_name, session_file_name)
renamed = True
finally:
if not renamed:
os.unlink(output_file_name)
except (OSError, IOError, EOFError):
pass
|
OSError
|
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.4/django/contrib/sessions/backends/file.py/SessionStore.save
|
4,243 |
def delete(self, session_key=None):
if session_key is None:
if self.session_key is None:
return
session_key = self.session_key
try:
os.unlink(self._key_to_file(session_key))
except __HOLE__:
pass
|
OSError
|
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.4/django/contrib/sessions/backends/file.py/SessionStore.delete
|
4,244 |
def _get_mro(obj_class):
""" Get a reasonable method resolution order of a class and its superclasses
for both old-style and new-style classes.
"""
if not hasattr(obj_class, '__mro__'):
# Old-style class. Mix in object to make a fake new-style class.
try:
obj_class = type(obj_class.__name__, (obj_class, object), {})
except __HOLE__:
# Old-style extension type that does not descend from object.
# FIXME: try to construct a more thorough MRO.
mro = [obj_class]
else:
mro = obj_class.__mro__[1:-1]
else:
mro = obj_class.__mro__
return mro
|
TypeError
|
dataset/ETHPy150Open ipython/ipython-py3k/IPython/lib/pretty.py/_get_mro
|
4,245 |
def pretty(self, obj):
"""Pretty print the given object."""
obj_id = id(obj)
cycle = obj_id in self.stack
self.stack.append(obj_id)
self.begin_group()
try:
obj_class = getattr(obj, '__class__', None) or type(obj)
# First try to find registered singleton printers for the type.
try:
printer = self.singleton_pprinters[obj_id]
except (__HOLE__, KeyError):
pass
else:
return printer(obj, self, cycle)
# Next look for type_printers.
for cls in _get_mro(obj_class):
if cls in self.type_pprinters:
return self.type_pprinters[cls](obj, self, cycle)
else:
printer = self._in_deferred_types(cls)
if printer is not None:
return printer(obj, self, cycle)
# Finally look for special method names.
if hasattr(obj_class, '_repr_pretty_'):
return obj_class._repr_pretty_(obj, self, cycle)
return _default_pprint(obj, self, cycle)
finally:
self.end_group()
self.stack.pop()
|
TypeError
|
dataset/ETHPy150Open ipython/ipython-py3k/IPython/lib/pretty.py/RepresentationPrinter.pretty
|
4,246 |
def remove(self, group):
try:
self.queue[group.depth].remove(group)
except __HOLE__:
pass
|
ValueError
|
dataset/ETHPy150Open ipython/ipython-py3k/IPython/lib/pretty.py/GroupQueue.remove
|
4,247 |
def _default_pprint(obj, p, cycle):
"""
The default print function. Used if an object does not provide one and
it's none of the builtin objects.
"""
klass = getattr(obj, '__class__', None) or type(obj)
if getattr(klass, '__repr__', None) != object.__repr__:
# A user-provided repr.
p.text(repr(obj))
return
p.begin_group(1, '<')
p.pretty(klass)
p.text(' at 0x%x' % id(obj))
if cycle:
p.text(' ...')
elif p.verbose:
first = True
for key in dir(obj):
if not key.startswith('_'):
try:
value = getattr(obj, key)
except __HOLE__:
continue
if isinstance(value, types.MethodType):
continue
if not first:
p.text(',')
p.breakable()
p.text(key)
p.text('=')
step = len(key) + 1
p.indentation += step
p.pretty(value)
p.indentation -= step
first = False
p.end_group(1, '>')
|
AttributeError
|
dataset/ETHPy150Open ipython/ipython-py3k/IPython/lib/pretty.py/_default_pprint
|
4,248 |
def _votes(self, val):
"""
Returns cleaned version of votes or 0 if it's a non-numeric value.
"""
if val.strip() == '':
return 0
try:
return int(float(val))
except __HOLE__:
# Count'y convert value from string
return 0
|
ValueError
|
dataset/ETHPy150Open openelections/openelections-core/openelex/us/wv/load.py/WVLoader._votes
|
4,249 |
def _writein(self, row):
# sometimes write-in field not present
try:
write_in = row['Write-In?'].strip()
except __HOLE__:
write_in = None
return write_in
|
KeyError
|
dataset/ETHPy150Open openelections/openelections-core/openelex/us/wv/load.py/WVLoader._writein
|
4,250 |
def __init__(self):
super().__init__()
self._handling_lock = Lock()
self._teardown_callback_stack = LifoQueue() # we execute callbacks in the reverse order that they were added
self._logger = log.get_logger(__name__)
self._handled_exceptions = Queue()
self._teardown_callback_raised_exception = False
# Set up handlers to be called when the application process receives certain signals.
# Note: this will raise if called on a non-main thread, but we should NOT work around that here. (That could
# prevent the teardown handler from ever being registered!) Calling code should be organized so that this
# singleton is only ever initialized on the main thread.
signal.signal(signal.SIGTERM, self._application_teardown_signal_handler)
signal.signal(signal.SIGINT, self._application_teardown_signal_handler)
try:
signal.signal(process_utils.SIGINFO, self._application_info_dump_signal_handler)
except __HOLE__:
self._logger.warning('Failed to register signal handler for SIGINFO. This is expected if ClusterRunner '
'is running on Windows.')
|
ValueError
|
dataset/ETHPy150Open box/ClusterRunner/app/util/unhandled_exception_handler.py/UnhandledExceptionHandler.__init__
|
4,251 |
def handle_modeladmin(self, modeladmin):
def datepublisher_admin(self, obj):
return '%s – %s' % (
format_date(obj.publication_date),
format_date(obj.publication_end_date, '∞'),
)
datepublisher_admin.allow_tags = True
datepublisher_admin.short_description = _('visible from - to')
modeladmin.__class__.datepublisher_admin = datepublisher_admin
try:
pos = modeladmin.list_display.index('is_visible_admin')
except __HOLE__:
pos = len(modeladmin.list_display)
modeladmin.list_display.insert(pos + 1, 'datepublisher_admin')
modeladmin.add_extension_options(_('Date-based publishing'), {
'fields': ['publication_date', 'publication_end_date'],
})
# ------------------------------------------------------------------------
|
ValueError
|
dataset/ETHPy150Open feincms/feincms/feincms/module/extensions/datepublisher.py/Extension.handle_modeladmin
|
4,252 |
def main():
test = TestUniqueChars()
test.test_unique_chars(unique_chars)
try:
test.test_unique_chars(unique_chars_hash)
test.test_unique_chars(unique_chars_inplace)
except __HOLE__:
# Alternate solutions are only defined
# in the solutions file
pass
|
NameError
|
dataset/ETHPy150Open donnemartin/interactive-coding-challenges/arrays_strings/unique_chars/test_unique_chars.py/main
|
4,253 |
def BuildDependentLibraries(env, src_dir): # pylint: disable=R0914
INCLUDES_RE = re.compile(
r"^\s*#include\s+(\<|\")([^\>\"\']+)(?:\>|\")", re.M)
LIBSOURCE_DIRS = [env.subst(d) for d in env.get("LIBSOURCE_DIRS", [])]
# start internal prototypes
class IncludeFinder(object):
def __init__(self, base_dir, name, is_system=False):
self.base_dir = base_dir
self.name = name
self.is_system = is_system
self._inc_path = None
self._lib_dir = None
self._lib_name = None
def getIncPath(self):
return self._inc_path
def getLibDir(self):
return self._lib_dir
def getLibName(self):
return self._lib_name
def run(self):
if not self.is_system and self._find_in_local():
return True
return self._find_in_system()
def _find_in_local(self):
if isfile(join(self.base_dir, self.name)):
self._inc_path = join(self.base_dir, self.name)
return True
else:
return False
def _find_in_system(self):
for lsd_dir in LIBSOURCE_DIRS:
if not isdir(lsd_dir):
continue
for ld in env.get("LIB_USE", []) + sorted(listdir(lsd_dir)):
if not isdir(join(lsd_dir, ld)):
continue
inc_path = normpath(join(lsd_dir, ld, self.name))
try:
lib_dir = inc_path[:inc_path.index(
sep, len(lsd_dir) + 1)]
except __HOLE__:
continue
lib_name = basename(lib_dir)
# ignore user's specified libs
if lib_name in env.get("LIB_IGNORE", []):
continue
if not isfile(inc_path):
# if source code is in "src" dir
lib_dir = join(lsd_dir, lib_name, "src")
inc_path = join(lib_dir, self.name)
if isfile(inc_path):
self._lib_dir = lib_dir
self._lib_name = lib_name
self._inc_path = inc_path
return True
return False
def _get_dep_libs(src_dir):
state = {
"paths": set(),
"libs": set(),
"ordered": set()
}
state = _process_src_dir(state, env.subst(src_dir))
result = []
for item in sorted(state['ordered'], key=lambda s: s[0]):
result.append((item[1], item[2]))
return result
def _process_src_dir(state, src_dir):
for root, _, files in walk(src_dir, followlinks=True):
for f in files:
if env.IsFileWithExt(f, SRC_BUILD_EXT + SRC_HEADER_EXT):
state = _parse_includes(state, env.File(join(root, f)))
return state
def _parse_includes(state, node):
skip_includes = ("arduino.h", "energia.h")
matches = INCLUDES_RE.findall(node.get_text_contents())
for (inc_type, inc_name) in matches:
base_dir = dirname(node.get_abspath())
if inc_name.lower() in skip_includes:
continue
if join(base_dir, inc_name) in state['paths']:
continue
else:
state['paths'].add(join(base_dir, inc_name))
finder = IncludeFinder(base_dir, inc_name, inc_type == "<")
if finder.run():
_parse_includes(state, env.File(finder.getIncPath()))
_lib_dir = finder.getLibDir()
if _lib_dir and _lib_dir not in state['libs']:
state['ordered'].add((
len(state['ordered']) + 1, finder.getLibName(),
_lib_dir))
state['libs'].add(_lib_dir)
if env.subst("$LIB_DFCYCLIC").lower() == "true":
state = _process_src_dir(state, _lib_dir)
return state
# end internal prototypes
deplibs = _get_dep_libs(src_dir)
for l, ld in deplibs:
env.Append(
CPPPATH=[join("$BUILD_DIR", l)]
)
# add automatically "utility" dir from the lib (Arduino issue)
if isdir(join(ld, "utility")):
env.Append(
CPPPATH=[join("$BUILD_DIR", l, "utility")]
)
libs = []
for (libname, inc_dir) in deplibs:
lib = env.BuildLibrary(
join("$BUILD_DIR", libname), inc_dir)
env.Clean(libname, lib)
libs.append(lib)
return libs
|
ValueError
|
dataset/ETHPy150Open platformio/platformio/platformio/builder/tools/platformio.py/BuildDependentLibraries
|
4,254 |
def __getattribute__(self, key):
"""overrides getattr's behaviour for the instance"""
try: return object.__getattribute__(self, key)
except __HOLE__:
try: return self.service.applicationContext[key]
except KeyError:
raise AttributeError("'%s' object has no attribute '%s'" % \
(self.__class__.__name__, key))
|
AttributeError
|
dataset/ETHPy150Open OrbitzWorldwide/droned/droned/lib/droned/applications/__init__.py/StorageMixin.__getattribute__
|
4,255 |
def loadAppPlugins(self):
"""load all of the application plugins"""
#find all application plugins
my_dir = os.path.dirname(__file__)
for filename in os.listdir(my_dir):
if not filename.endswith('.py'): continue
if filename == '__init__.py': continue
modname = filename[:-3]
try:
mod = __import__(__name__ + '.' + modname, {}, {}, [modname])
except:
err('Application Plugin Loader Caught Exception')
continue #horribly broken module ... skipping
for name,obj in vars(mod).items():
if name in self._classMap: continue
try:
#need the interfaces to be what we expect
if IDroneDApplication.implementedBy(obj):
self._classMap[name] = copy.deepcopy(obj)
except __HOLE__: pass
except:
err('Application Plugin Scanner Caught Exception')
|
TypeError
|
dataset/ETHPy150Open OrbitzWorldwide/droned/droned/lib/droned/applications/__init__.py/_PluginFactory.loadAppPlugins
|
4,256 |
def _pluginSetup(self, plugin):
"""get romeo configuration bound in order to update the instance
This gets called by the factory below.
Note: ROMEO configuration always overrides the ApplicationPlugin's
default constructor!!!!
"""
plugin.log('applying configuration from romeo')
tmp = getattr(plugin, 'INSTANCES', 1) #allow constructor to set this up
plugin.INSTANCES = plugin.configuration.get('INSTANCES', tmp)
#get the real settings from configuration
plugin.STARTUP_INFO.update(plugin.configuration.get('STARTUP_INFO',{}))
plugin.STARTUP_INFO['START_ARGS'] = tuple(plugin.STARTUP_INFO['START_ARGS'])
plugin.SHUTDOWN_INFO.update(plugin.configuration.get('SHUTDOWN_INFO',{}))
plugin.SHUTDOWN_INFO['STOP_ARGS'] = tuple(plugin.SHUTDOWN_INFO['STOP_ARGS'])
#how long to wait before searching for a newly started process
tmp = getattr(plugin, 'SEARCH_DELAY', 5.0) #allow constructor to set this up
plugin.SEARCH_DELAY = plugin.configuration.get('SEARCH_DELAY', tmp)
#how long to wait on a command before timing out!!! Timeout implies failure!
tmp = getattr(plugin, 'DEFAULT_TIMEOUT', 120) #allow constructor to set this up
plugin.DEFAULT_TIMEOUT = plugin.configuration.get('DEFAULT_TIMEOUT', tmp) #seconds
#how to assimilate an application
tmp = getattr(plugin, 'ASSIMILATION_PATTERN', None) #allow constructor to set this up
plugin.ASSIMILATION_PATTERN = plugin.configuration.get('ASSIMILATION_PATTERN', tmp)
#prepare the process regular expression
plugin.PROCESS_REGEX = None
if plugin.ASSIMILATION_PATTERN:
plugin.PROCESS_REGEX = re.compile(plugin.ASSIMILATION_PATTERN, re.I)
#if you don't like the default behavior of addInstance override it
for i in range(plugin.INSTANCES):
try: plugin.getInstance(i)
except __HOLE__:
plugin.addInstance(i)
tmp = getattr(plugin, 'AUTO_RECOVER', False) #allow constructor to set this up
#configure automatic restart after crash
if plugin.configuration.get('AUTO_RECOVER', tmp):
Event('instance-crashed').subscribe(plugin.recoverInstance)
#allow romeo to hint that droned manages the daemonization of this app.
if plugin.configuration.get('MANAGED', False):
plugin.startProtoKwargs.update({'daemonize': True})
plugin.log('plugin is configured and ready to be used')
|
AssertionError
|
dataset/ETHPy150Open OrbitzWorldwide/droned/droned/lib/droned/applications/__init__.py/_PluginFactory._pluginSetup
|
4,257 |
def clean_ldap_user_dn_template(self):
tpl = self.cleaned_data["ldap_user_dn_template"]
try:
test = tpl % {"user": "toto"}
except (__HOLE__, ValueError):
raise forms.ValidationError(_("Invalid syntax"))
return tpl
|
KeyError
|
dataset/ETHPy150Open tonioo/modoboa/modoboa/core/app_settings.py/GeneralParametersForm.clean_ldap_user_dn_template
|
4,258 |
def to_django_settings(self):
"""Apply LDAP related parameters to Django settings
Doing so, we can use the django_auth_ldap module.
"""
try:
import ldap
from django_auth_ldap.config import LDAPSearch, PosixGroupType
ldap_available = True
except __HOLE__:
ldap_available = False
values = self.get_current_values()
if not ldap_available or values["authentication_type"] != "ldap":
return
if not hasattr(settings, "AUTH_LDAP_USER_ATTR_MAP"):
setattr(settings, "AUTH_LDAP_USER_ATTR_MAP", {
"first_name": "givenName",
"email": "mail",
"last_name": "sn"
})
ldap_uri = 'ldaps://' if values["ldap_secured"] == "yes" else "ldap://"
ldap_uri += "%s:%s" % (
values["ldap_server_address"], values["ldap_server_port"])
setattr(settings, "AUTH_LDAP_SERVER_URI", ldap_uri)
setattr(settings, "AUTH_LDAP_GROUP_TYPE", PosixGroupType())
setattr(settings, "AUTH_LDAP_GROUP_SEARCH", LDAPSearch(
values["ldap_groups_search_base"], ldap.SCOPE_SUBTREE,
"(objectClass=posixGroup)"
))
if values["ldap_auth_method"] == "searchbind":
setattr(settings, "AUTH_LDAP_BIND_DN", values["ldap_bind_dn"])
setattr(
settings, "AUTH_LDAP_BIND_PASSWORD",
values["ldap_bind_password"]
)
search = LDAPSearch(
values["ldap_search_base"], ldap.SCOPE_SUBTREE,
values["ldap_search_filter"]
)
setattr(settings, "AUTH_LDAP_USER_SEARCH", search)
else:
setattr(
settings, "AUTH_LDAP_USER_DN_TEMPLATE",
values["ldap_user_dn_template"]
)
if values["ldap_is_active_directory"] == "yes":
if not hasattr(settings, "AUTH_LDAP_GLOBAL_OPTIONS"):
setattr(settings, "AUTH_LDAP_GLOBAL_OPTIONS", {
ldap.OPT_REFERRALS: False
})
else:
settings.AUTH_LDAP_GLOBAL_OPTIONS[ldap.OPT_REFERRALS] = False
|
ImportError
|
dataset/ETHPy150Open tonioo/modoboa/modoboa/core/app_settings.py/GeneralParametersForm.to_django_settings
|
4,259 |
def __init__(self, config, prefix=None):
self.manager, self.Manager = None, None
self.Transport = None
self.running = False
self.config = config = Bunch(config)
if prefix is not None:
self.config = config = Bunch.partial(prefix, config)
if 'manager' in config and isinstance(config.manager, dict):
self.manager_config = manager_config = config.manager
elif 'manager' in config:
self.manager_config = manager_config = dict(manager_config)
else:
try:
self.manager_config = manager_config = Bunch.partial('manager', config)
except ValueError:
self.manager_config = manager_config = dict()
if 'manager' in config and isinstance(config.manager, basestring):
warnings.warn("Use of the manager directive is deprecated; use manager.use instead.", DeprecationWarning)
manager_config.use = config.manager
try:
if 'transport' in config and isinstance(config.transport, dict):
self.transport_config = transport_config = Bunch(config.transport)
else:
self.transport_config = transport_config = Bunch.partial('transport', config)
except (AttributeError, ValueError): # pragma: no cover
self.transport_config = transport_config = Bunch()
if 'transport' in config and isinstance(config.transport, basestring):
warnings.warn("Use of the transport directive is deprecated; use transport.use instead.", DeprecationWarning)
transport_config.use = config.transport
try:
if 'message' in config and isinstance(config.message, dict):
self.message_config = Bunch(config.message)
else:
self.message_config = Bunch.partial('message', config)
except (__HOLE__, ValueError):
self.message_config = Bunch()
self.Manager = Manager = self._load(manager_config.use if 'use' in manager_config else 'immediate', 'marrow.mailer.manager')
if not Manager:
raise LookupError("Unable to determine manager from specification: %r" % (config.manager, ))
# Removed until marrow.interface is updated to use marrow.schema.
#if not isinstance(Manager, IManager):
# raise TypeError("Chosen manager does not conform to the manager API.")
self.Transport = Transport = self._load(transport_config.use, 'marrow.mailer.transport')
if not Transport:
raise LookupError("Unable to determine transport from specification: %r" % (config.transport, ))
# Removed until marrow.interface is updated to use marrow.schema.
#if not isinstance(Transport, ITransport):
# raise TypeError("Chosen transport does not conform to the transport API.")
self.manager = Manager(manager_config, partial(Transport, transport_config))
|
AttributeError
|
dataset/ETHPy150Open marrow/mailer/marrow/mailer/__init__.py/Mailer.__init__
|
4,260 |
def isNumeric(num):
'''
Returns True if the string representation of num is numeric
Inputs:
=======
num : A string representation of a number.
Outputs:
========
True if num is numeric, False otherwise
'''
try:
float(num)
except __HOLE__, typError:
print 'valueError :',ValueError
print 'typeError :',typError
return False
else:
return True
|
ValueError
|
dataset/ETHPy150Open pivotalsoftware/pymadlib/pymadlib/utils.py/isNumeric
|
4,261 |
def textile(value):
"""
Textile processing.
"""
try:
import textile
except __HOLE__:
warnings.warn("The Python textile library isn't installed.",
RuntimeWarning)
return value
return textile.textile(force_text(value),
encoding='utf-8', output='utf-8')
|
ImportError
|
dataset/ETHPy150Open Fantomas42/django-blog-zinnia/zinnia/markups.py/textile
|
4,262 |
def markdown(value, extensions=MARKDOWN_EXTENSIONS):
"""
Markdown processing with optionally using various extensions
that python-markdown supports.
`extensions` is an iterable of either markdown.Extension instances
or extension paths.
"""
try:
import markdown
except __HOLE__:
warnings.warn("The Python markdown library isn't installed.",
RuntimeWarning)
return value
return markdown.markdown(force_text(value), extensions=extensions)
|
ImportError
|
dataset/ETHPy150Open Fantomas42/django-blog-zinnia/zinnia/markups.py/markdown
|
4,263 |
def restructuredtext(value, settings=RESTRUCTUREDTEXT_SETTINGS):
"""
RestructuredText processing with optionnally custom settings.
"""
try:
from docutils.core import publish_parts
except __HOLE__:
warnings.warn("The Python docutils library isn't installed.",
RuntimeWarning)
return value
parts = publish_parts(source=force_bytes(value),
writer_name='html4css1',
settings_overrides=settings)
return force_text(parts['fragment'])
|
ImportError
|
dataset/ETHPy150Open Fantomas42/django-blog-zinnia/zinnia/markups.py/restructuredtext
|
4,264 |
def vcard(self, qs, out):
try:
import vobject
except __HOLE__:
print(self.style.ERROR("Please install python-vobject to use the vcard export format."))
import sys
sys.exit(1)
for ent in qs:
card = vobject.vCard()
card.add('fn').value = full_name(**ent)
if not ent['last_name'] and not ent['first_name']:
# fallback to fullname, if both first and lastname are not declared
card.add('n').value = vobject.vcard.Name(full_name(**ent))
else:
card.add('n').value = vobject.vcard.Name(ent['last_name'], ent['first_name'])
emailpart = card.add('email')
emailpart.value = ent['email']
emailpart.type_param = 'INTERNET'
out.write(card.serialize().encode(self.encoding))
|
ImportError
|
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/django-extensions-1.5.0/django_extensions/management/commands/export_emails.py/Command.vcard
|
4,265 |
def ast_walk(root, dispatcher, debug=False):
"""Walk the given tree and for each node apply the corresponding function
as defined by the dispatcher.
If one node type does not have any function defined for it, it simply
returns the node unchanged.
Parameters
----------
root : Node
top of the tree to walk into.
dispatcher : Dispatcher
defines the action for each node type.
"""
def _walker(par):
children = []
for c in par.children:
children.append(_walker(c))
par.children = [c for c in children if c is not None]
try:
func = dispatcher.action_dict[par.type]
return func(par)
except __HOLE__:
if debug:
print("no action for type %s" % par.type)
return par
# FIXME: we need to copy the dict because the dispatcher modify the dict in
# place ATM, and we call this often. This is very expensive, and should be
# removed as it has a huge cost in starting time (~30 % in hot case)
return _walker(__copy(root))
|
KeyError
|
dataset/ETHPy150Open cournape/Bento/bento/parser/nodes.py/ast_walk
|
4,266 |
def __contains__(self, key):
try:
value = self[key]
except __HOLE__:
return False
return True
|
KeyError
|
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.3/django/test/utils.py/ContextList.__contains__
|
4,267 |
def setup_test_template_loader(templates_dict, use_cached_loader=False):
"""
Changes Django to only find templates from within a dictionary (where each
key is the template name and each value is the corresponding template
content to return).
Use meth:`restore_template_loaders` to restore the original loaders.
"""
if hasattr(loader, RESTORE_LOADERS_ATTR):
raise Exception("loader.%s already exists" % RESTORE_LOADERS_ATTR)
def test_template_loader(template_name, template_dirs=None):
"A custom template loader that loads templates from a dictionary."
try:
return (templates_dict[template_name], "test:%s" % template_name)
except __HOLE__:
raise TemplateDoesNotExist(template_name)
if use_cached_loader:
template_loader = cached.Loader(('test_template_loader',))
template_loader._cached_loaders = (test_template_loader,)
else:
template_loader = test_template_loader
setattr(loader, RESTORE_LOADERS_ATTR, loader.template_source_loaders)
loader.template_source_loaders = (template_loader,)
return template_loader
|
KeyError
|
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.3/django/test/utils.py/setup_test_template_loader
|
4,268 |
def isNetflixInternal():
try:
open("/etc/profile.d/netflix_environment.sh", "r")
return True
except __HOLE__:
return False
|
IOError
|
dataset/ETHPy150Open Netflix/gcviz/root/apps/apache/htdocs/AdminGCViz/visualize-gc.py/isNetflixInternal
|
4,269 |
def try_to_draw_vms_cache_refresh_lines():
if(isNetflixInternal()):
try:
fp = open(vmsGCReportDirectory + os.path.sep + 'vms-cache-refresh-overall-events-milliseconds')
except IOError:
return
for line in fp:
line = line.rstrip('\r\n')
try:
(finish_time_ms_str, duration_ms_str) = line.split()
except __HOLE__:
continue
finish_time_ms = long(finish_time_ms_str)
duration_ms = long(duration_ms_str)
start_time_ms = finish_time_ms - duration_ms
start_time_secs = start_time_ms/1000.0
start_time_days = mdates.epoch2num(start_time_secs)
start_time_line = lines.Line2D([start_time_days,start_time_days], [0,maxGCEventDuration], color='r')
ax.add_line(start_time_line)
finish_time_secs = finish_time_ms/1000.0
finish_time_days = mdates.epoch2num(finish_time_secs)
finish_time_line = lines.Line2D([finish_time_days,finish_time_days], [0,maxGCEventDuration], color='c')
ax.add_line(finish_time_line)
fp.close()
# draw some fake lines just to get them into the legend
fake_vms_start_line = lines.Line2D([jvmBootDays,0], [jvmBootDays,0], label='VMS cache refresh start', color='r')
fake_vms_end_line = lines.Line2D([jvmBootDays,0], [jvmBootDays,0], label='VMS cache refresh end', color='c')
ax.add_line(fake_vms_start_line)
ax.add_line(fake_vms_end_line)
|
ValueError
|
dataset/ETHPy150Open Netflix/gcviz/root/apps/apache/htdocs/AdminGCViz/visualize-gc.py/try_to_draw_vms_cache_refresh_lines
|
4,270 |
@listens_for(File, 'after_delete')
def del_file(mapper, connection, target):
if target.path:
try:
os.remove(op.join(file_path, target.path))
except __HOLE__:
# Don't care if was not deleted because it does not exist
pass
|
OSError
|
dataset/ETHPy150Open flask-admin/flask-admin/examples/forms/app.py/del_file
|
4,271 |
@listens_for(Image, 'after_delete')
def del_image(mapper, connection, target):
if target.path:
# Delete image
try:
os.remove(op.join(file_path, target.path))
except OSError:
pass
# Delete thumbnail
try:
os.remove(op.join(file_path,
form.thumbgen_filename(target.path)))
except __HOLE__:
pass
# Administrative views
|
OSError
|
dataset/ETHPy150Open flask-admin/flask-admin/examples/forms/app.py/del_image
|
4,272 |
def targets(self, tgt, tgt_type):
'''
Return a dict of {'id': {'ipv4': <ipaddr>}} data sets to be used as
targets given the passed tgt and tgt_type
'''
targets = {}
for back in self._gen_back():
f_str = '{0}.targets'.format(back)
if f_str not in self.rosters:
continue
try:
targets.update(self.rosters[f_str](tgt, tgt_type))
except salt.exceptions.SaltRenderError as exc:
log.error('Unable to render roster file: {0}'.format(exc))
except __HOLE__ as exc:
pass
if not targets:
raise salt.exceptions.SaltSystemExit(
'No hosts found with target {0} of type {1}'.format(
tgt,
tgt_type)
)
log.debug('Matched minions: {0}'.format(targets))
return targets
|
IOError
|
dataset/ETHPy150Open saltstack/salt/salt/roster/__init__.py/Roster.targets
|
4,273 |
def GetAppInfo(self, user_info):
"""Grabs the application info from the user_info dictionary.
Args:
user_info: dictionary of application info
Returns:
tuple of bundle_id, app_version, app_path
"""
bundle_id, app_version, app_path = None, None, None
try:
bundle_id = user_info['NSApplicationBundleIdentifier']
except __HOLE__:
# Malformed applications may not have NSApplicationBundleIdentifier
# Return NSApplicationName instead
logging.error('Error reading bundle identifier: %s', user_info)
bundle_id = user_info['NSApplicationName']
try:
app_path = user_info['NSApplicationPath']
except KeyError:
# Malformed applications may not have NSApplicationPath
logging.error('Error reading application path: %s', user_info)
if app_path:
try:
app_info_plist = NSDictionary.dictionaryWithContentsOfFile_(
'%s/Contents/Info.plist' % app_path)
if app_info_plist:
app_version = app_info_plist['CFBundleVersion']
except KeyError:
logging.error('Error reading application version from %s', app_path)
return bundle_id, app_version, app_path
|
KeyError
|
dataset/ETHPy150Open google/macops/crankd/ApplicationUsage.py/ApplicationUsage.GetAppInfo
|
4,274 |
def logtail(fh, date_format, start_date, field, parser=None, delimiter=None,
**kwargs):
"""Tail rows from logfile, based on complex expressions such as a
date range."""
dt_start = dateutil.parser.parse(start_date)
_is_match = partial(_is_match_full, date_format=date_format, dt_start=dt_start)
_is_match_func = _is_match
if parser:
# Custom parser specified, use field-based matching
parser = eval(parser, vars(logtools.parsers), {})()
is_indices = field.isdigit()
if is_indices:
# Field index based matching
def _is_match_func(line):
parsed_line = parser(line)
return _is_match(parsed_line.by_index(field))
else:
# Named field based matching
def _is_match_func(line):
parsed_line = parser(line)
return _is_match(parsed_line.by_index(field))
else:
# No custom parser, field/delimiter-based extraction
def _is_match_func(line):
val = line.split(delimiter)[int(field)-1]
return _is_match(val)
num_lines=0
num_filtered=0
num_nomatch=0
for line in imap(lambda x: x.strip(), fh):
try:
is_match = _is_match_func(line)
except (KeyError, __HOLE__):
# Parsing error
logging.warn("No match for line: %s", line)
num_nomatch +=1
continue
else:
if not is_match:
logging.debug("Filtering line: %s", line)
num_filtered += 1
continue
num_lines+=1
yield line
logging.info("Number of lines after filtering: %s", num_lines)
logging.info("Number of lines filtered: %s", num_filtered)
if num_nomatch:
logging.info("Number of lines could not match on: %s", num_nomatch)
return
|
ValueError
|
dataset/ETHPy150Open adamhadani/logtools/logtools/_tail.py/logtail
|
4,275 |
def add_datapoints(self, stats):
"""Add all of the data points for a node
:param str stats: The stats content from Apache as a string
"""
matches = PATTERN.findall(stats or '')
for key, value in matches:
try:
value = int(value)
except __HOLE__:
try:
value = float(value)
except ValueError:
value = 0
if key in self.KEYS:
if self.KEYS[key].get('type') == 'gauge':
self.add_gauge_value(self.KEYS[key]['label'],
self.KEYS[key].get('suffix', ''),
value)
else:
self.add_derive_value(self.KEYS[key]['label'],
self.KEYS[key].get('suffix', ''),
value)
else:
LOGGER.debug('Found unmapped key/value pair: %s = %s',
key, value)
score_data = self.get_scoreboard(stats)
for key, value in score_data.iteritems():
if key in self.KEYS:
if self.KEYS[key].get('type') == 'gauge':
self.add_gauge_value(self.KEYS[key]['label'],
self.KEYS[key].get('suffix', ''),
value)
else:
self.add_derive_value(self.KEYS[key]['label'],
self.KEYS[key].get('suffix', ''),
value)
else:
LOGGER.debug('Found unmapped key/value pair: %s = %s',
key, value)
|
ValueError
|
dataset/ETHPy150Open MeetMe/newrelic-plugin-agent/newrelic_plugin_agent/plugins/apache_httpd.py/ApacheHTTPD.add_datapoints
|
4,276 |
def __validate_date_arg(self, cmd_arg):
try:
date = dateutil.parser.parse(self.command_args[cmd_arg])
return date
except __HOLE__:
raise AzureInvalidCommand(
cmd_arg + '=' + self.command_args[cmd_arg]
)
|
ValueError
|
dataset/ETHPy150Open SUSE/azurectl/azurectl/commands/storage_container.py/StorageContainerTask.__validate_date_arg
|
4,277 |
def _ProcessHost(self, d):
"""Retrieves recovery data from an LDAP host and escrows to CauliflowerVest.
Args:
d: a single ldap.conn.result3() result dictionary.
Raises:
InvalidDistinguishedName: the given host had an invalid DN.
InvalidRecoveryGuid: the given host had an invalid Recovery GUID.
PreviouslyEscrowed: the given host has already been escrowed.
"""
dn = d['distinguishedName'][0]
# Parse the hostname out of the distinguishedName, which is in this format:
# CN=<timestamp>{<recovery_guid>},CN=<hostname>,OU=Workstations,...
hostname = dn.split(',')[1][len('CN='):]
# Ignore records with legacy DNs, as they have invalid RecoveryGUIDs,
# and all have separate valid records.
if INVALID_DN_REGEX.search(dn):
raise InvalidDistinguishedName(dn)
# Some msFVE-RecoveryGuid values may be invalid, so carefully attempt to
# contruct the recovery_guid, and skip over objects which are invalid.
try:
recovery_guid = str(
uuid.UUID(bytes_le=d['msFVE-RecoveryGuid'][0])).upper()
except __HOLE__:
raise InvalidRecoveryGuid(
'%s: %s' % (hostname, d['msFVE-RecoveryGuid']))
if FLAGS.redact_recovery_passwords:
recovery_password = '--redacted--'
else:
recovery_password = d['msFVE-RecoveryPassword'][0]
when_created = d['whenCreated'][0]
try:
datetime.datetime.strptime(when_created, '%Y%m%d%H%M%S.0Z')
except ValueError:
logging.error('Unknown whenCreated format: %r', when_created)
when_created = ''
parent_guid = None
# msFVE-RecoveryObject distinguishedName is in the form of:
# CN=<TIMESTAMP>{<UUID>},CN=<HOSTNAME>,DC=example,DC=com
# where CN=<HOSTNAME>,.* is the parent's distinguishedName.
# Given that the the msFVE-RecoveryObject is a child of the parent host,
# split off the child to obtain the parent's DN.
parent_dn = dn.split(',', 1)[1]
# Alternatively: parent_dn = dn.replace('CN=%s,' % d['name'][0], '')
ldap_filter = '(&(objectCategory=computer))'
for host in self._QueryLdap(parent_dn, ldap_filter, scope=ldap.SCOPE_BASE):
parent_guid = str(uuid.UUID(bytes_le=host['objectGUID'][0])).upper()
# TODO(user): consider other parent data; os/os_version/sid?
metadata = {
'hostname': hostname,
'dn': dn,
'when_created': when_created,
'parent_guid': parent_guid,
}
self.client.UploadPassphrase(recovery_guid, recovery_password, metadata)
logging.info('Escrowed recovery password: %r', recovery_guid)
|
ValueError
|
dataset/ETHPy150Open google/cauliflowervest/src/cauliflowervest/client/win/bitlocker_ad_sync.py/BitLockerAdSync._ProcessHost
|
4,278 |
def get_output(self):
try:
command = [self.config['bin'], 'sourcestats']
if self.config['use_sudo']:
command.insert(0, self.config['sudo_cmd'])
return subprocess.Popen(command,
stdout=subprocess.PIPE).communicate()[0]
except __HOLE__:
return ""
|
OSError
|
dataset/ETHPy150Open BrightcoveOS/Diamond/src/collectors/chronyd/chronyd.py/ChronydCollector.get_output
|
4,279 |
def collect(self):
output = self.get_output()
for line in output.strip().split("\n"):
m = LINE_PATTERN.search(line)
if m is None:
continue
source = cleanup_source(m.group('source'))
offset = float(m.group('offset'))
unit = m.group('unit')
try:
value = diamond.convertor.time.convert(offset, unit, 'ms')
except __HOLE__, e:
self.log.error('Unable to convert %s%s: %s', offset, unit, e)
continue
self.publish('%s.offset_ms' % source, value)
|
NotImplementedError
|
dataset/ETHPy150Open BrightcoveOS/Diamond/src/collectors/chronyd/chronyd.py/ChronydCollector.collect
|
4,280 |
@api.login_required(oauth_scopes=['teams:write'])
@api.parameters(parameters.CreateTeamParameters())
@api.response(schemas.DetailedTeamSchema())
@api.response(code=http_exceptions.Conflict.code)
def post(self, args):
"""
Create a new team.
"""
try:
try:
team = Team(**args)
except __HOLE__ as exception:
abort(code=http_exceptions.Conflict.code, message=str(exception))
db.session.add(team)
try:
db.session.commit()
except sqlalchemy.exc.IntegrityError:
abort(code=http_exceptions.Conflict.code, message="Could not create a new team.")
finally:
db.session.rollback()
return team
|
ValueError
|
dataset/ETHPy150Open frol/flask-restplus-server-example/app/modules/teams/resources.py/Teams.post
|
4,281 |
@api.login_required(oauth_scopes=['teams:write'])
@api.resolve_object_by_model(Team, 'team')
@api.permission_required(
permissions.OwnerRolePermission,
kwargs_on_request=lambda kwargs: {'obj': kwargs['team']}
)
@api.permission_required(permissions.WriteAccessPermission())
@api.parameters(parameters.PatchTeamDetailsParameters())
@api.response(schemas.DetailedTeamSchema())
@api.response(code=http_exceptions.Conflict.code)
def patch(self, args, team):
"""
Patch team details by ID.
"""
try:
for operation in args:
try:
if not self._process_patch_operation(operation, team=team):
log.info("Team patching has ignored unknown operation %s", operation)
except __HOLE__ as exception:
abort(code=http_exceptions.Conflict.code, message=str(exception))
db.session.merge(team)
try:
db.session.commit()
except sqlalchemy.exc.IntegrityError:
abort(
code=http_exceptions.Conflict.code,
message="Could not update team details."
)
finally:
db.session.rollback()
return team
|
ValueError
|
dataset/ETHPy150Open frol/flask-restplus-server-example/app/modules/teams/resources.py/TeamByID.patch
|
4,282 |
@api.login_required(oauth_scopes=['teams:write'])
@api.resolve_object_by_model(Team, 'team')
@api.permission_required(
permissions.OwnerRolePermission,
kwargs_on_request=lambda kwargs: {'obj': kwargs['team']}
)
@api.permission_required(permissions.WriteAccessPermission())
@api.parameters(parameters.AddTeamMemberParameters())
@api.response(schemas.BaseTeamMemberSchema())
@api.response(code=http_exceptions.Conflict.code)
def post(self, args, team):
"""
Add a new member to a team.
"""
try:
user_id = args.pop('user_id')
user = User.query.get(user_id)
if user is None:
abort(
code=http_exceptions.NotFound.code,
message="User with id %d does not exist" % user_id
)
try:
team_member = TeamMember(team=team, user=user, **args)
except __HOLE__ as exception:
abort(code=http_exceptions.Conflict.code, message=str(exception))
db.session.add(team_member)
try:
db.session.commit()
except sqlalchemy.exc.IntegrityError:
abort(
code=http_exceptions.Conflict.code,
message="Could not update team details."
)
finally:
db.session.rollback()
return team_member
|
ValueError
|
dataset/ETHPy150Open frol/flask-restplus-server-example/app/modules/teams/resources.py/TeamMembers.post
|
4,283 |
def parse(self, stream, media_type=None, parser_context=None):
"""
Parses the incoming bytestream as XML and returns the resulting data.
"""
assert etree, 'XMLParser requires defusedxml to be installed'
parser_context = parser_context or {}
encoding = parser_context.get('encoding', settings.DEFAULT_CHARSET)
parser = etree.DefusedXMLParser(encoding=encoding)
try:
tree = etree.parse(stream, parser=parser, forbid_dtd=True)
except (etree.ParseError, __HOLE__) as exc:
raise ParseError('XML parse error - %s' % six.text_type(exc))
data = self._xml_convert(tree.getroot())
return data
|
ValueError
|
dataset/ETHPy150Open jpadilla/django-rest-framework-xml/rest_framework_xml/parsers.py/XMLParser.parse
|
4,284 |
def _type_convert(self, value):
"""
Converts the value returned by the XMl parse into the equivalent
Python type
"""
if value is None:
return value
try:
return datetime.datetime.strptime(value, '%Y-%m-%d %H:%M:%S')
except ValueError:
pass
try:
return int(value)
except __HOLE__:
pass
try:
return decimal.Decimal(value)
except decimal.InvalidOperation:
pass
return value
|
ValueError
|
dataset/ETHPy150Open jpadilla/django-rest-framework-xml/rest_framework_xml/parsers.py/XMLParser._type_convert
|
4,285 |
def get(self, key, default=None):
try:
return self[key] # <4>
except __HOLE__:
return default # <5>
|
KeyError
|
dataset/ETHPy150Open fluentpython/example-code/03-dict-set/strkeydict0.py/StrKeyDict0.get
|
4,286 |
def _construct_tree(path):
if not op.exists(path):
try:
os.makedirs(op.dirname(path))
except __HOLE__:
pass
|
OSError
|
dataset/ETHPy150Open rossant/ipymd/ipymd/core/scripts.py/_construct_tree
|
4,287 |
def safe_rm(tgt):
'''
Safely remove a file
'''
try:
os.remove(tgt)
except (__HOLE__, OSError):
pass
|
IOError
|
dataset/ETHPy150Open saltstack/salt/salt/utils/__init__.py/safe_rm
|
4,288 |
def is_empty(filename):
'''
Is a file empty?
'''
try:
return os.stat(filename).st_size == 0
except __HOLE__:
# Non-existent file or permission denied to the parent dir
return False
|
OSError
|
dataset/ETHPy150Open saltstack/salt/salt/utils/__init__.py/is_empty
|
4,289 |
def get_uid(user=None):
"""
Get the uid for a given user name. If no user given,
the current euid will be returned. If the user
does not exist, None will be returned. On
systems which do not support pwd or os.geteuid
it will return None.
"""
if not HAS_PWD:
result = None
elif user is None:
try:
result = os.geteuid()
except __HOLE__:
result = None
else:
try:
u_struct = pwd.getpwnam(user)
except KeyError:
result = None
else:
result = u_struct.pw_uid
return result
|
AttributeError
|
dataset/ETHPy150Open saltstack/salt/salt/utils/__init__.py/get_uid
|
4,290 |
def get_gid(group=None):
"""
Get the gid for a given group name. If no group given,
the current egid will be returned. If the group
does not exist, None will be returned. On
systems which do not support grp or os.getegid
it will return None.
"""
if grp is None:
result = None
elif group is None:
try:
result = os.getegid()
except __HOLE__:
result = None
else:
try:
g_struct = grp.getgrnam(group)
except KeyError:
result = None
else:
result = g_struct.gr_gid
return result
|
AttributeError
|
dataset/ETHPy150Open saltstack/salt/salt/utils/__init__.py/get_gid
|
4,291 |
def daemonize(redirect_out=True):
'''
Daemonize a process
'''
try:
pid = os.fork()
if pid > 0:
# exit first parent
reinit_crypto()
sys.exit(salt.defaults.exitcodes.EX_OK)
except OSError as exc:
log.error(
'fork #1 failed: {0} ({1})'.format(exc.errno, exc.strerror)
)
sys.exit(salt.defaults.exitcodes.EX_GENERIC)
# decouple from parent environment
os.chdir('/')
# noinspection PyArgumentList
os.setsid()
os.umask(18)
# do second fork
try:
pid = os.fork()
if pid > 0:
reinit_crypto()
sys.exit(salt.defaults.exitcodes.EX_OK)
except __HOLE__ as exc:
log.error(
'fork #2 failed: {0} ({1})'.format(
exc.errno, exc.strerror
)
)
sys.exit(salt.defaults.exitcodes.EX_GENERIC)
reinit_crypto()
# A normal daemonization redirects the process output to /dev/null.
# Unfortunately when a python multiprocess is called the output is
# not cleanly redirected and the parent process dies when the
# multiprocessing process attempts to access stdout or err.
if redirect_out:
with fopen('/dev/null', 'r+') as dev_null:
os.dup2(dev_null.fileno(), sys.stdin.fileno())
os.dup2(dev_null.fileno(), sys.stdout.fileno())
os.dup2(dev_null.fileno(), sys.stderr.fileno())
|
OSError
|
dataset/ETHPy150Open saltstack/salt/salt/utils/__init__.py/daemonize
|
4,292 |
def profile_func(filename=None):
'''
Decorator for adding profiling to a nested function in Salt
'''
def proffunc(fun):
def profiled_func(*args, **kwargs):
logging.info('Profiling function {0}'.format(fun.__name__))
try:
profiler = cProfile.Profile()
retval = profiler.runcall(fun, *args, **kwargs)
profiler.dump_stats((filename or '{0}_func.profile'
.format(fun.__name__)))
except __HOLE__:
logging.exception(
'Could not open profile file {0}'.format(filename)
)
return retval
return profiled_func
return proffunc
|
IOError
|
dataset/ETHPy150Open saltstack/salt/salt/utils/__init__.py/profile_func
|
4,293 |
def which(exe=None):
'''
Python clone of /usr/bin/which
'''
def _is_executable_file_or_link(exe):
# check for os.X_OK doesn't suffice because directory may executable
return (os.access(exe, os.X_OK) and
(os.path.isfile(exe) or os.path.islink(exe)))
if exe:
if _is_executable_file_or_link(exe):
# executable in cwd or fullpath
return exe
ext_list = os.environ.get('PATHEXT', '.EXE').split(';')
@real_memoize
def _exe_has_ext():
'''
Do a case insensitive test if exe has a file extension match in
PATHEXT
'''
for ext in ext_list:
try:
pattern = r'.*\.' + ext.lstrip('.') + r'$'
re.match(pattern, exe, re.I).groups()
return True
except __HOLE__:
continue
return False
# Enhance POSIX path for the reliability at some environments, when $PATH is changing
# This also keeps order, where 'first came, first win' for cases to find optional alternatives
search_path = os.environ.get('PATH') and os.environ['PATH'].split(os.pathsep) or list()
for default_path in ['/bin', '/sbin', '/usr/bin', '/usr/sbin', '/usr/local/bin']:
if default_path not in search_path:
search_path.append(default_path)
os.environ['PATH'] = os.pathsep.join(search_path)
for path in search_path:
full_path = os.path.join(path, exe)
if _is_executable_file_or_link(full_path):
return full_path
elif is_windows() and not _exe_has_ext():
# On Windows, check for any extensions in PATHEXT.
# Allows both 'cmd' and 'cmd.exe' to be matched.
for ext in ext_list:
# Windows filesystem is case insensitive so we
# safely rely on that behavior
if _is_executable_file_or_link(full_path + ext):
return full_path + ext
log.trace('\'{0}\' could not be found in the following search path: \'{1}\''.format(exe, search_path))
else:
log.error('No executable was passed to be searched by salt.utils.which()')
return None
|
AttributeError
|
dataset/ETHPy150Open saltstack/salt/salt/utils/__init__.py/which
|
4,294 |
def output_profile(pr, stats_path='/tmp/stats', stop=False, id_=None):
if pr is not None and HAS_CPROFILE:
try:
pr.disable()
if not os.path.isdir(stats_path):
os.makedirs(stats_path)
date = datetime.datetime.now().isoformat()
if id_ is None:
id_ = rand_str(size=32)
ficp = os.path.join(stats_path, '{0}.{1}.pstats'.format(id_, date))
fico = os.path.join(stats_path, '{0}.{1}.dot'.format(id_, date))
ficn = os.path.join(stats_path, '{0}.{1}.stats'.format(id_, date))
if not os.path.exists(ficp):
pr.dump_stats(ficp)
with open(ficn, 'w') as fic:
pstats.Stats(pr, stream=fic).sort_stats('cumulative')
log.info('PROFILING: {0} generated'.format(ficp))
log.info('PROFILING (cumulative): {0} generated'.format(ficn))
pyprof = which('pyprof2calltree')
cmd = [pyprof, '-i', ficp, '-o', fico]
if pyprof:
failed = False
try:
pro = subprocess.Popen(
cmd, shell=False,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
except __HOLE__:
failed = True
if pro.returncode:
failed = True
if failed:
log.error('PROFILING (dot problem')
else:
log.info('PROFILING (dot): {0} generated'.format(fico))
log.trace('pyprof2calltree output:')
log.trace(to_str(pro.stdout.read()).strip() +
to_str(pro.stderr.read()).strip())
else:
log.info('You can run {0} for additional stats.'.format(cmd))
finally:
if not stop:
pr.enable()
return pr
|
OSError
|
dataset/ETHPy150Open saltstack/salt/salt/utils/__init__.py/output_profile
|
4,295 |
def dns_check(addr, safe=False, ipv6=False):
'''
Return the ip resolved by dns, but do not exit on failure, only raise an
exception. Obeys system preference for IPv4/6 address resolution.
'''
error = False
try:
# issue #21397: force glibc to re-read resolv.conf
if HAS_RESINIT:
res_init()
hostnames = socket.getaddrinfo(
addr, None, socket.AF_UNSPEC, socket.SOCK_STREAM
)
if not hostnames:
error = True
else:
addr = False
for h in hostnames:
if h[0] == socket.AF_INET or (h[0] == socket.AF_INET6 and ipv6):
addr = ip_bracket(h[4][0])
break
if not addr:
error = True
except __HOLE__:
err = ('Attempt to resolve address \'{0}\' failed. Invalid or unresolveable address').format(addr)
raise SaltSystemExit(code=42, msg=err)
except socket.error:
error = True
if error:
err = ('DNS lookup of \'{0}\' failed.').format(addr)
if safe:
if salt.log.is_console_configured():
# If logging is not configured it also means that either
# the master or minion instance calling this hasn't even
# started running
log.error(err)
raise SaltClientError()
raise SaltSystemExit(code=42, msg=err)
return addr
|
TypeError
|
dataset/ETHPy150Open saltstack/salt/salt/utils/__init__.py/dns_check
|
4,296 |
def required_module_list(docstring=None):
'''
Return a list of python modules required by a salt module that aren't
in stdlib and don't exist on the current pythonpath.
'''
if not docstring:
return []
ret = []
modules = parse_docstring(docstring).get('deps', [])
for mod in modules:
try:
imp.find_module(mod)
except __HOLE__:
ret.append(mod)
return ret
|
ImportError
|
dataset/ETHPy150Open saltstack/salt/salt/utils/__init__.py/required_module_list
|
4,297 |
def format_call(fun,
data,
initial_ret=None,
expected_extra_kws=()):
'''
Build the required arguments and keyword arguments required for the passed
function.
:param fun: The function to get the argspec from
:param data: A dictionary containing the required data to build the
arguments and keyword arguments.
:param initial_ret: The initial return data pre-populated as dictionary or
None
:param expected_extra_kws: Any expected extra keyword argument names which
should not trigger a :ref:`SaltInvocationError`
:returns: A dictionary with the function required arguments and keyword
arguments.
'''
ret = initial_ret is not None and initial_ret or {}
ret['args'] = []
ret['kwargs'] = {}
aspec = salt.utils.args.get_function_argspec(fun)
arg_data = arg_lookup(fun)
args = arg_data['args']
kwargs = arg_data['kwargs']
# Since we WILL be changing the data dictionary, let's change a copy of it
data = data.copy()
missing_args = []
for key in kwargs:
try:
kwargs[key] = data.pop(key)
except __HOLE__:
# Let's leave the default value in place
pass
while args:
arg = args.pop(0)
try:
ret['args'].append(data.pop(arg))
except KeyError:
missing_args.append(arg)
if missing_args:
used_args_count = len(ret['args']) + len(args)
args_count = used_args_count + len(missing_args)
raise SaltInvocationError(
'{0} takes at least {1} argument{2} ({3} given)'.format(
fun.__name__,
args_count,
args_count > 1 and 's' or '',
used_args_count
)
)
ret['kwargs'].update(kwargs)
if aspec.keywords:
# The function accepts **kwargs, any non expected extra keyword
# arguments will made available.
for key, value in six.iteritems(data):
if key in expected_extra_kws:
continue
ret['kwargs'][key] = value
# No need to check for extra keyword arguments since they are all
# **kwargs now. Return
return ret
# Did not return yet? Lets gather any remaining and unexpected keyword
# arguments
extra = {}
for key, value in six.iteritems(data):
if key in expected_extra_kws:
continue
extra[key] = copy.deepcopy(value)
# We'll be showing errors to the users until Salt Carbon comes out, after
# which, errors will be raised instead.
warn_until(
'Carbon',
'It\'s time to start raising `SaltInvocationError` instead of '
'returning warnings',
# Let's not show the deprecation warning on the console, there's no
# need.
_dont_call_warnings=True
)
if extra:
# Found unexpected keyword arguments, raise an error to the user
if len(extra) == 1:
msg = '\'{0[0]}\' is an invalid keyword argument for \'{1}\''.format(
list(extra.keys()),
ret.get(
# In case this is being called for a state module
'full',
# Not a state module, build the name
'{0}.{1}'.format(fun.__module__, fun.__name__)
)
)
else:
msg = '{0} and \'{1}\' are invalid keyword arguments for \'{2}\''.format(
', '.join(['\'{0}\''.format(e) for e in extra][:-1]),
list(extra.keys())[-1],
ret.get(
# In case this is being called for a state module
'full',
# Not a state module, build the name
'{0}.{1}'.format(fun.__module__, fun.__name__)
)
)
# Return a warning to the user explaining what's going on
ret.setdefault('warnings', []).append(
'{0}. If you were trying to pass additional data to be used '
'in a template context, please populate \'context\' with '
'\'key: value\' pairs. Your approach will work until Salt '
'Carbon is out.{1}'.format(
msg,
'' if 'full' not in ret else ' Please update your state files.'
)
)
# Lets pack the current extra kwargs as template context
ret.setdefault('context', {}).update(extra)
return ret
|
KeyError
|
dataset/ETHPy150Open saltstack/salt/salt/utils/__init__.py/format_call
|
4,298 |
def istextfile(fp_, blocksize=512):
'''
Uses heuristics to guess whether the given file is text or binary,
by reading a single block of bytes from the file.
If more than 30% of the chars in the block are non-text, or there
are NUL ('\x00') bytes in the block, assume this is a binary file.
'''
int2byte = (lambda x: bytes((x,))) if six.PY3 else chr
text_characters = (
b''.join(int2byte(i) for i in range(32, 127)) +
b'\n\r\t\f\b')
try:
block = fp_.read(blocksize)
except AttributeError:
# This wasn't an open filehandle, so treat it as a file path and try to
# open the file
try:
with fopen(fp_, 'rb') as fp2_:
block = fp2_.read(blocksize)
except __HOLE__:
# Unable to open file, bail out and return false
return False
if b'\x00' in block:
# Files with null bytes are binary
return False
elif not block:
# An empty file is considered a valid text file
return True
try:
block.decode('utf-8')
return True
except UnicodeDecodeError:
pass
nontext = block.translate(None, text_characters)
return float(len(nontext)) / len(block) <= 0.30
|
IOError
|
dataset/ETHPy150Open saltstack/salt/salt/utils/__init__.py/istextfile
|
4,299 |
def str_to_num(text):
'''
Convert a string to a number.
Returns an integer if the string represents an integer, a floating
point number if the string is a real number, or the string unchanged
otherwise.
'''
try:
return int(text)
except __HOLE__:
try:
return float(text)
except ValueError:
return text
|
ValueError
|
dataset/ETHPy150Open saltstack/salt/salt/utils/__init__.py/str_to_num
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.