Unnamed: 0
int64 0
10k
| function
stringlengths 79
138k
| label
stringclasses 20
values | info
stringlengths 42
261
|
---|---|---|---|
2,400 |
def parse_index(value):
try:
return int(value)
except (ValueError, __HOLE__):
return value
|
TypeError
|
dataset/ETHPy150Open ombre42/robotframework-sudslibrary/src/SudsLibrary/utils.py/parse_index
|
2,401 |
def get_cache(self, instance, translation=None,
language=None, field_name=None,
field_value=None):
"""
Returns translation from cache.
"""
is_new = bool(instance.pk is None)
try:
cached_obj = instance._linguist_translations[field_name][language]
if not cached_obj.field_name:
cached_obj.field_name = field_name
if not cached_obj.language:
cached_obj.language = language
if not cached_obj.identifier:
cached_obj.identifier = self.instance.linguist_identifier
except __HOLE__:
cached_obj = None
if not is_new:
if translation is None:
try:
translation = self.decider.objects.get(identifier=self.instance.linguist_identifier,
object_id=self.instance.pk,
language=language,
field_name=field_name)
except self.decider.DoesNotExist:
pass
if cached_obj is None:
if translation is not None:
cached_obj = CachedTranslation.from_object(translation)
else:
cached_obj = CachedTranslation(instance=instance,
language=language,
field_name=field_name,
field_value=field_value)
instance._linguist_translations[cached_obj.field_name][cached_obj.language] = cached_obj
return cached_obj
|
KeyError
|
dataset/ETHPy150Open ulule/django-linguist/linguist/fields.py/Linguist.get_cache
|
2,402 |
def __get__(self, instance, instance_type=None):
if instance is None:
return self
try:
return getattr(instance, '_linguist_cache')
except __HOLE__:
linguist = Linguist(instance=instance,
identifier=self.identifier,
default_language=self.default_language,
default_language_field=self.default_language_field,
fields=self.fields,
decider=self.decider)
setattr(instance, '_linguist_cache', linguist)
setattr(instance, '_linguist_translations', defaultdict(dict))
return instance._linguist_cache
|
AttributeError
|
dataset/ETHPy150Open ulule/django-linguist/linguist/fields.py/CacheDescriptor.__get__
|
2,403 |
def GenerateOutput(target_list, target_dicts, data, params):
# Update target_dicts for iOS device builds.
target_dicts = gyp.xcode_emulation.CloneConfigurationForDeviceAndEmulator(
target_dicts)
user_config = params.get('generator_flags', {}).get('config', None)
if gyp.common.GetFlavor(params) == 'win':
target_list, target_dicts = MSVSUtil.ShardTargets(target_list, target_dicts)
target_list, target_dicts = MSVSUtil.InsertLargePdbShims(
target_list, target_dicts, generator_default_variables)
if user_config:
GenerateOutputForConfig(target_list, target_dicts, data, params,
user_config)
else:
config_names = target_dicts[target_list[0]]['configurations'].keys()
if params['parallel']:
try:
pool = multiprocessing.Pool(len(config_names))
arglists = []
for config_name in config_names:
arglists.append(
(target_list, target_dicts, data, params, config_name))
pool.map(CallGenerateOutputForConfig, arglists)
except __HOLE__, e:
pool.terminate()
raise e
else:
for config_name in config_names:
GenerateOutputForConfig(target_list, target_dicts, data, params,
config_name)
|
KeyboardInterrupt
|
dataset/ETHPy150Open adblockplus/gyp/pylib/gyp/generator/ninja.py/GenerateOutput
|
2,404 |
@wand.setter
def wand(self, wand):
try:
self.resource = wand
except __HOLE__:
raise TypeError(repr(wand) + ' is not a MagickWand instance')
|
TypeError
|
dataset/ETHPy150Open dahlia/wand/wand/image.py/BaseImage.wand
|
2,405 |
def __getitem__(self, idx):
if (not isinstance(idx, string_type) and
isinstance(idx, collections.Iterable)):
idx = tuple(idx)
d = len(idx)
if not (1 <= d <= 2):
raise ValueError('index cannot be {0}-dimensional'.format(d))
elif d == 2:
x, y = idx
x_slice = isinstance(x, slice)
y_slice = isinstance(y, slice)
if x_slice and not y_slice:
y = slice(y, y + 1)
elif not x_slice and y_slice:
x = slice(x, x + 1)
elif not (x_slice or y_slice):
if not (isinstance(x, numbers.Integral) and
isinstance(y, numbers.Integral)):
raise TypeError('x and y must be integral, not ' +
repr((x, y)))
if x < 0:
x += self.width
if y < 0:
y += self.height
if x >= self.width:
raise IndexError('x must be less than width')
elif y >= self.height:
raise IndexError('y must be less than height')
elif x < 0:
raise IndexError('x cannot be less than 0')
elif y < 0:
raise IndexError('y cannot be less than 0')
with iter(self) as iterator:
iterator.seek(y)
return iterator.next(x)
if not (x.step is None and y.step is None):
raise ValueError('slicing with step is unsupported')
elif (x.start is None and x.stop is None and
y.start is None and y.stop is None):
return self.clone()
cloned = self.clone()
try:
cloned.crop(x.start, y.start, x.stop, y.stop)
except __HOLE__ as e:
raise IndexError(str(e))
return cloned
else:
return self[idx[0]]
elif isinstance(idx, numbers.Integral):
if idx < 0:
idx += self.height
elif idx >= self.height:
raise IndexError('index must be less than height, but got ' +
repr(idx))
elif idx < 0:
raise IndexError('index cannot be less than zero, but got ' +
repr(idx))
with iter(self) as iterator:
iterator.seek(idx)
return iterator.next()
elif isinstance(idx, slice):
return self[:, idx]
raise TypeError('unsupported index type: ' + repr(idx))
|
ValueError
|
dataset/ETHPy150Open dahlia/wand/wand/image.py/BaseImage.__getitem__
|
2,406 |
@manipulative
def caption(self, text, left=0, top=0, width=None, height=None, font=None,
gravity=None):
"""Writes a caption ``text`` into the position.
:param text: text to write
:type text: :class:`basestring`
:param left: x offset in pixels
:type left: :class:`numbers.Integral`
:param top: y offset in pixels
:type top: :class:`numbers.Integral`
:param width: width of caption in pixels.
default is :attr:`width` of the image
:type width: :class:`numbers.Integral`
:param height: height of caption in pixels.
default is :attr:`height` of the image
:type height: :class:`numbers.Integral`
:param font: font to use. default is :attr:`font` of the image
:type font: :class:`wand.font.Font`
:param gravity: text placement gravity.
uses the current :attr:`gravity` setting of the image
by default
:type gravity: :class:`basestring`
.. versionadded:: 0.3.0
"""
if not isinstance(left, numbers.Integral):
raise TypeError('left must be an integer, not ' + repr(left))
elif not isinstance(top, numbers.Integral):
raise TypeError('top must be an integer, not ' + repr(top))
elif width is not None and not isinstance(width, numbers.Integral):
raise TypeError('width must be an integer, not ' + repr(width))
elif height is not None and not isinstance(height, numbers.Integral):
raise TypeError('height must be an integer, not ' + repr(height))
elif font is not None and not isinstance(font, Font):
raise TypeError('font must be a wand.font.Font, not ' + repr(font))
elif gravity is not None and compat.text(gravity) not in GRAVITY_TYPES:
raise ValueError('invalid gravity value')
if width is None:
width = self.width - left
if height is None:
height = self.height - top
if not font:
try:
font = self.font
except __HOLE__:
raise TypeError('font must be specified or existing in image')
with Image() as textboard:
library.MagickSetSize(textboard.wand, width, height)
textboard.font = font
textboard.gravity = gravity or self.gravity
with Color('transparent') as background_color:
library.MagickSetBackgroundColor(textboard.wand,
background_color.resource)
textboard.read(filename=b'caption:' + text.encode('utf-8'))
self.composite(textboard, left, top)
|
TypeError
|
dataset/ETHPy150Open dahlia/wand/wand/image.py/BaseImage.caption
|
2,407 |
@manipulative
def resize(self, width=None, height=None, filter='undefined', blur=1):
"""Resizes the image.
:param width: the width in the scaled image. default is the original
width
:type width: :class:`numbers.Integral`
:param height: the height in the scaled image. default is the original
height
:type height: :class:`numbers.Integral`
:param filter: a filter type to use for resizing. choose one in
:const:`FILTER_TYPES`. default is ``'undefined'``
which means IM will try to guess best one to use
:type filter: :class:`basestring`, :class:`numbers.Integral`
:param blur: the blur factor where > 1 is blurry, < 1 is sharp.
default is 1
:type blur: :class:`numbers.Real`
.. versionchanged:: 0.2.1
The default value of ``filter`` has changed from ``'triangle'``
to ``'undefined'`` instead.
.. versionchanged:: 0.1.8
The ``blur`` parameter changed to take :class:`numbers.Real`
instead of :class:`numbers.Rational`.
.. versionadded:: 0.1.1
"""
if width is None:
width = self.width
if height is None:
height = self.height
if not isinstance(width, numbers.Integral):
raise TypeError('width must be a natural number, not ' +
repr(width))
elif not isinstance(height, numbers.Integral):
raise TypeError('height must be a natural number, not ' +
repr(height))
elif width < 1:
raise ValueError('width must be a natural number, not ' +
repr(width))
elif height < 1:
raise ValueError('height must be a natural number, not ' +
repr(height))
elif not isinstance(blur, numbers.Real):
raise TypeError('blur must be numbers.Real , not ' + repr(blur))
elif not isinstance(filter, (string_type, numbers.Integral)):
raise TypeError('filter must be one string defined in wand.image.'
'FILTER_TYPES or an integer, not ' + repr(filter))
if isinstance(filter, string_type):
try:
filter = FILTER_TYPES.index(filter)
except __HOLE__:
raise ValueError(repr(filter) + ' is an invalid filter type; '
'choose on in ' + repr(FILTER_TYPES))
elif (isinstance(filter, numbers.Integral) and
not (0 <= filter < len(FILTER_TYPES))):
raise ValueError(repr(filter) + ' is an invalid filter type')
blur = ctypes.c_double(float(blur))
if self.animation:
self.wand = library.MagickCoalesceImages(self.wand)
library.MagickSetLastIterator(self.wand)
n = library.MagickGetIteratorIndex(self.wand)
library.MagickResetIterator(self.wand)
for i in xrange(n + 1):
library.MagickSetIteratorIndex(self.wand, i)
library.MagickResizeImage(self.wand, width, height,
filter, blur)
library.MagickSetSize(self.wand, width, height)
else:
r = library.MagickResizeImage(self.wand, width, height,
filter, blur)
library.MagickSetSize(self.wand, width, height)
if not r:
self.raise_exception()
|
IndexError
|
dataset/ETHPy150Open dahlia/wand/wand/image.py/BaseImage.resize
|
2,408 |
@manipulative
def composite_channel(self, channel, image, operator, left=0, top=0):
"""Composite two images using the particular ``channel``.
:param channel: the channel type. available values can be found
in the :const:`CHANNELS` mapping
:param image: the composited source image.
(the receiver image becomes the destination)
:type image: :class:`Image`
:param operator: the operator that affects how the composite
is applied to the image. available values
can be found in the :const:`COMPOSITE_OPERATORS`
list
:param left: the column offset of the composited source image
:type left: :class:`numbers.Integral`
:param top: the row offset of the composited source image
:type top: :class:`numbers.Integral`
:raises ValueError: when the given ``channel`` or
``operator`` is invalid
.. versionadded:: 0.3.0
"""
if not isinstance(channel, string_type):
raise TypeError('channel must be a string, not ' +
repr(channel))
elif not isinstance(operator, string_type):
raise TypeError('operator must be a string, not ' +
repr(operator))
elif not isinstance(left, numbers.Integral):
raise TypeError('left must be an integer, not ' + repr(left))
elif not isinstance(top, numbers.Integral):
raise TypeError('top must be an integer, not ' + repr(left))
try:
ch_const = CHANNELS[channel]
except KeyError:
raise ValueError(repr(channel) + ' is an invalid channel type'
'; see wand.image.CHANNELS dictionary')
try:
op = COMPOSITE_OPERATORS.index(operator)
except __HOLE__:
raise IndexError(repr(operator) + ' is an invalid composite '
'operator type; see wand.image.COMPOSITE_'
'OPERATORS dictionary')
library.MagickCompositeImageChannel(self.wand, ch_const, image.wand,
op, int(left), int(top))
self.raise_exception()
|
IndexError
|
dataset/ETHPy150Open dahlia/wand/wand/image.py/BaseImage.composite_channel
|
2,409 |
@manipulative
def threshold(self, threshold=0.5, channel=None):
"""Changes the value of individual pixels based on the intensity
of each pixel compared to threshold. The result is a high-contrast,
two color image. It manipulates the image in place.
:param threshold: threshold as a factor of quantum
:type threshold: :class:`numbers.Real`
:param channel: the channel type. available values can be found
in the :const:`CHANNELS` mapping. If ``None``,
threshold all channels.
:type channel: :class:`basestring`
.. versionadded:: 0.3.10
"""
if not isinstance(threshold, numbers.Real):
raise TypeError('threshold has to be a numbers.Real, not ' +
repr(threshold))
if channel:
try:
ch_const = CHANNELS[channel]
except __HOLE__:
raise ValueError(repr(channel) + ' is an invalid channel type'
'; see wand.image.CHANNELS dictionary')
r = library.MagickThresholdImageChannel(
self.wand, ch_const,
threshold * self.quantum_range
)
else:
r = library.MagickThresholdImage(self.wand,
threshold * self.quantum_range)
if not r:
self.raise_exception()
|
KeyError
|
dataset/ETHPy150Open dahlia/wand/wand/image.py/BaseImage.threshold
|
2,410 |
def negate(self, grayscale=False, channel=None):
"""Negate the colors in the reference image.
:param grayscale: if set, only negate grayscale pixels in the image.
:type grayscale: :class:`bool`
:param channel: the channel type. available values can be found
in the :const:`CHANNELS` mapping. If ``None``,
negate all channels.
:type channel: :class:`basestring`
.. versionadded:: 0.3.8
"""
if channel:
try:
ch_const = CHANNELS[channel]
except __HOLE__:
raise ValueError(repr(channel) + ' is an invalid channel type'
'; see wand.image.CHANNELS dictionary')
r = library.MagickNegateImageChannel(self.wand, ch_const,
grayscale)
else:
r = library.MagickNegateImage(self.wand, grayscale)
if not r:
self.raise_exception()
|
KeyError
|
dataset/ETHPy150Open dahlia/wand/wand/image.py/BaseImage.negate
|
2,411 |
def level(self, black=0.0, white=None, gamma=1.0, channel=None):
"""Adjusts the levels of an image by scaling the colors falling
between specified black and white points to the full available
quantum range.
If only ``black`` is given, ``white`` will be adjusted inward.
:param black: Black point, as a percentage of the system's quantum
range. Defaults to 0.
:type black: :class:`numbers.Real`
:param white: White point, as a percentage of the system's quantum
range. Defaults to 1.0.
:type white: :class:`numbers.Real`
:param gamma: Optional gamma adjustment. Values > 1.0 lighten the
image's midtones while values < 1.0 darken them.
:type gamma: :class:`numbers.Real`
:param channel: The channel type. Available values can be found
in the :const:`CHANNELS` mapping. If ``None``,
normalize all channels.
:type channel: :const:`CHANNELS`
.. versionadded:: 0.4.1
"""
if not isinstance(black, numbers.Real):
raise TypeError('expecting real number, not' + repr(black))
# If white is not given, mimic CLI behavior by reducing top point
if white is None:
white = 1.0 - black
if not isinstance(white, numbers.Real):
raise TypeError('expecting real number, not' + repr(white))
if not isinstance(gamma, numbers.Real):
raise TypeError('expecting real number, not' + repr(gamma))
bp = float(self.quantum_range * black)
wp = float(self.quantum_range * white)
if channel:
try:
ch_const = CHANNELS[channel]
except __HOLE__:
raise ValueError(repr(channel) + ' is an invalid channel type'
'; see wand.image.CHANNELS dictionary')
library.MagickLevelImageChannel(self.wand, ch_const, bp, gamma, wp)
else:
library.MagickLevelImage(self.wand, bp, gamma, wp)
self.raise_exception()
|
KeyError
|
dataset/ETHPy150Open dahlia/wand/wand/image.py/Image.level
|
2,412 |
@manipulative
def auto_orient(self):
"""Adjusts an image so that its orientation is suitable
for viewing (i.e. top-left orientation). If available it uses
:c:func:`MagickAutoOrientImage` (was added in ImageMagick 6.8.9+)
if you have an older magick library,
it will use :attr:`_auto_orient()` method for fallback.
.. versionadded:: 0.4.1
"""
try:
result = library.MagickAutoOrientImage(self.wand)
if not result:
self.raise_exception()
except __HOLE__:
self._auto_orient()
|
AttributeError
|
dataset/ETHPy150Open dahlia/wand/wand/image.py/Image.auto_orient
|
2,413 |
def normalize(self, channel=None):
"""Normalize color channels.
:param channel: the channel type. available values can be found
in the :const:`CHANNELS` mapping. If ``None``,
normalize all channels.
:type channel: :class:`basestring`
"""
if channel:
try:
ch_const = CHANNELS[channel]
except __HOLE__:
raise ValueError(repr(channel) + ' is an invalid channel type'
'; see wand.image.CHANNELS dictionary')
r = library.MagickNormalizeImageChannel(self.wand, ch_const)
else:
r = library.MagickNormalizeImage(self.wand)
if not r:
self.raise_exception()
|
KeyError
|
dataset/ETHPy150Open dahlia/wand/wand/image.py/Image.normalize
|
2,414 |
def __setitem__(self, key, value):
path = self._get_path(key)
pickled = self.encode(value)
try:
f = open(path, 'w')
try:
f.write(pickled)
finally:
f.close()
except __HOLE__:
pass
|
IOError
|
dataset/ETHPy150Open wecatch/app-turbo/turbo/session.py/DiskStore.__setitem__
|
2,415 |
def download_data_file(href):
destination_path = data_file_destination_path(href)
try:
url = url_for_href(href)
response = urllib2.urlopen(url)
if response.code == 200:
destination_dir = os.path.dirname(destination_path)
if not os.path.exists(destination_dir):
os.makedirs(destination_dir)
with file(destination_path, 'w') as destination_file:
destination_file.write(response.read())
else:
raise Exception("Unexpected response code %s for data file: %s"
% (str(response.code), destination_path))
except __HOLE__:
os.remove(destination_path)
raise
|
KeyboardInterrupt
|
dataset/ETHPy150Open sunlightlabs/clearspending/timeliness/crawler.py/download_data_file
|
2,416 |
def crawl_main(startdate=None, enddate=None):
download_schedule = []
try:
for fy in FISCAL_YEARS:
listing = request_listing(fy, startdate, enddate)
update_download_schedule(download_schedule, listing)
log_inaccessible_files(download_schedule)
if confirm_download_schedule(download_schedule):
exec_download_schedule(download_schedule)
except __HOLE__:
print
print "Exiting due to CTRL-C"
finally:
save_schedule(download_schedule)
|
KeyboardInterrupt
|
dataset/ETHPy150Open sunlightlabs/clearspending/timeliness/crawler.py/crawl_main
|
2,417 |
def resume_main():
download_schedule = restore_schedule()
try:
if download_schedule and confirm_download_schedule(download_schedule):
exec_download_schedule(download_schedule)
except __HOLE__:
print
print "Exiting due to CTRL-C"
finally:
save_schedule(download_schedule)
|
KeyboardInterrupt
|
dataset/ETHPy150Open sunlightlabs/clearspending/timeliness/crawler.py/resume_main
|
2,418 |
def perform(self):
""" Performs the action. """
# Hack! Works tho.
try:
meth = getattr(self._window.scene, self.view)
meth()
except __HOLE__:
pass
|
AttributeError
|
dataset/ETHPy150Open enthought/mayavi/examples/tvtk/scene.py/SpecialViewAction.perform
|
2,419 |
def to_internal_value(self, value):
if value == '' or value is None:
return value
if isinstance(value, GEOSGeometry):
# value already has the correct representation
return value
if isinstance(value, dict):
value = json.dumps(value)
try:
return GEOSGeometry(value)
except (__HOLE__, GEOSException, OGRException, TypeError):
raise ValidationError(_('Invalid format: string or unicode input unrecognized as GeoJSON, WKT EWKT or HEXEWKB.'))
|
ValueError
|
dataset/ETHPy150Open djangonauts/django-rest-framework-gis/rest_framework_gis/fields.py/GeometryField.to_internal_value
|
2,420 |
def parse(curl_command):
method = "get"
tokens = shlex.split(curl_command)
parsed_args = parser.parse_args(tokens)
base_indent = " " * 4
data_token = ''
post_data = parsed_args.data or parsed_args.data_binary
if post_data:
method = 'post'
try:
post_data_json = json.loads(post_data)
except __HOLE__:
post_data_json = None
# If we found JSON and it is a dict, pull it apart. Otherwise, just leave as a string
if post_data_json and isinstance(post_data_json, dict):
post_data = dict_to_pretty_string(post_data_json)
else:
post_data = "'{}',\n".format(post_data)
data_token = '{}data={}'.format(base_indent, post_data)
cookie_dict = OrderedDict()
quoted_headers = OrderedDict()
for curl_header in parsed_args.header:
header_key, header_value = curl_header.split(":", 1)
if header_key.lower() == 'cookie':
cookie = Cookie.SimpleCookie(header_value)
for key in cookie:
cookie_dict[key] = cookie[key].value
else:
quoted_headers[header_key] = header_value.strip()
result = """requests.{method}("{url}",\n{data_token}{headers_token}{cookies_token})""".format(
method=method,
url=parsed_args.url,
data_token=data_token,
headers_token="{}headers={}".format(base_indent, dict_to_pretty_string(quoted_headers)),
cookies_token="{}cookies={}".format(base_indent, dict_to_pretty_string(cookie_dict)),
)
return result
|
ValueError
|
dataset/ETHPy150Open spulec/uncurl/uncurl/api.py/parse
|
2,421 |
def acquire(self, blocking=False):
import fcntl
self.fd = os.open(self.filename, os.O_CREAT | os.O_WRONLY)
mode = fcntl.LOCK_EX
if not blocking: mode |= fcntl.LOCK_NB
try:
fcntl.flock(self.fd, mode)
self.locked = True
return True
except __HOLE__, e:
if e.errno not in (errno.EAGAIN, errno.EACCES):
raise
return False
|
IOError
|
dataset/ETHPy150Open dokipen/whoosh/src/whoosh/support/filelock.py/FcntlLock.acquire
|
2,422 |
def acquire(self, blocking=False):
import msvcrt
self.fd = os.open(self.filename, os.O_CREAT | os.O_WRONLY)
mode = msvcrt.LK_NBLCK
if blocking: mode = msvcrt.LK_LOCK
try:
msvcrt.locking(self.fd, mode, 1)
return True
except __HOLE__, e:
if e.errno not in (errno.EAGAIN, errno.EACCES, errno.EDEADLK):
raise
return False
|
IOError
|
dataset/ETHPy150Open dokipen/whoosh/src/whoosh/support/filelock.py/MsvcrtLock.acquire
|
2,423 |
def has_next_election(self):
try:
if self.not_seeking_reelection or len(self.candidate_status) > 0:
return False
except __HOLE__:
pass
return True
|
TypeError
|
dataset/ETHPy150Open sunlightlabs/read_FEC/fecreader/summary_data/models.py/Candidate_Overlay.has_next_election
|
2,424 |
def show_candidate_status(self):
if self.cand_is_gen_winner and self.is_general_candidate:
return "Won General"
if self.cand_is_gen_winner == False and self.is_general_candidate:
return "Lost General"
if self.candidate_status:
try:
return CANDIDATE_STATUS_DICT[self.candidate_status]
except __HOLE__:
return ""
return ""
|
KeyError
|
dataset/ETHPy150Open sunlightlabs/read_FEC/fecreader/summary_data/models.py/Candidate_Overlay.show_candidate_status
|
2,425 |
def display_type(self):
key = self.ctype
returnval = ''
try:
returnval = type_hash[key]
except __HOLE__:
pass
if self.designation == 'D':
returnval += " (Leadership PAC)"
elif self.designation == 'J':
returnval += " (Joint Fundraising PAC)"
return returnval
|
KeyError
|
dataset/ETHPy150Open sunlightlabs/read_FEC/fecreader/summary_data/models.py/Committee_Overlay.display_type
|
2,426 |
def display_designation(self):
key = self.designation
try:
return committee_designation_hash[key]
except __HOLE__:
return ''
|
KeyError
|
dataset/ETHPy150Open sunlightlabs/read_FEC/fecreader/summary_data/models.py/Committee_Overlay.display_designation
|
2,427 |
@group_only
def add_quote(self, msg, matches):
try:
quote = matches.group(1)
except __HOLE__:
quote = matches
if hasattr(msg.src, 'username'):
username = msg.src.username
self.insert(timestamp=msg.date,
uid=msg.src.id, username=username,
full_name="{0} {1}".format(msg.src.first_name or '', msg.src.last_name or ''),
chat_id=msg.dest.id, quote=quote)
return "Done!"
|
AttributeError
|
dataset/ETHPy150Open datamachine/telex/plugins/quotes.py/QuotesPlugin.add_quote
|
2,428 |
def process(fp, outfp, env = {}):
lineno = 0
while 1:
line = fp.readline()
if not line: break
lineno = lineno + 1
match = p_define.match(line)
if match:
# gobble up continuation lines
while line[-2:] == '\\\n':
nextline = fp.readline()
if not nextline: break
lineno = lineno + 1
line = line + nextline
name = match.group(1)
body = line[match.end():]
body = pytify(body)
ok = 0
stmt = '%s = %s\n' % (name, body.strip())
try:
exec stmt in env
except:
sys.stderr.write('Skipping: %s' % stmt)
else:
outfp.write(stmt)
match = p_macro.match(line)
if match:
macro, arg = match.group(1, 2)
body = line[match.end():]
body = pytify(body)
stmt = 'def %s(%s): return %s\n' % (macro, arg, body)
try:
exec stmt in env
except:
sys.stderr.write('Skipping: %s' % stmt)
else:
outfp.write(stmt)
match = p_include.match(line)
if match:
regs = match.regs
a, b = regs[1]
filename = line[a:b]
if importable.has_key(filename):
outfp.write('from %s import *\n' % importable[filename])
elif not filedict.has_key(filename):
filedict[filename] = None
inclfp = None
for dir in searchdirs:
try:
inclfp = open(dir + '/' + filename)
break
except __HOLE__:
pass
if inclfp:
outfp.write(
'\n# Included from %s\n' % filename)
process(inclfp, outfp, env)
else:
sys.stderr.write('Warning - could not find file %s\n' %
filename)
|
IOError
|
dataset/ETHPy150Open francelabs/datafari/windows/python/Tools/Scripts/h2py.py/process
|
2,429 |
def set_rot_preset(self, preset_name):
self.init_rot_matrix()
try:
r = self.rot_presets[preset_name]
except AttributeError:
raise ValueError(
"%s is not a valid rotation preset." % preset_name)
try:
self.euler_rotate(r[0], 1, 0, 0)
self.euler_rotate(r[1], 0, 1, 0)
self.euler_rotate(r[2], 0, 0, 1)
except __HOLE__:
pass
|
AttributeError
|
dataset/ETHPy150Open sympy/sympy/sympy/plotting/pygletplot/plot_camera.py/PlotCamera.set_rot_preset
|
2,430 |
def _parse_robots(self, response, netloc):
rp = robotparser.RobotFileParser(response.url)
body = ''
if hasattr(response, 'text'):
body = response.text
else: # last effort try
try:
body = response.body.decode('utf-8')
except __HOLE__:
# If we found garbage, disregard it:,
# but keep the lookup cached (in self._parsers)
# Running rp.parse() will set rp state from
# 'disallow all' to 'allow any'.
pass
rp.parse(body.splitlines())
rp_dfd = self._parsers[netloc]
self._parsers[netloc] = rp
rp_dfd.callback(rp)
|
UnicodeDecodeError
|
dataset/ETHPy150Open scrapy/scrapy/scrapy/downloadermiddlewares/robotstxt.py/RobotsTxtMiddleware._parse_robots
|
2,431 |
def get_object(self, request, object_id, from_field=None):
queryset = self.get_queryset(request)
if isinstance(queryset, TranslationQueryset): # will always be true once Django 1.9 is required
model = queryset.shared_model
if from_field is None:
field = model._meta.pk
else:
try:
field = model._meta.get_field(from_field)
except FieldDoesNotExist:
field = model._meta.translations_model._meta.get_field(from_field)
else:
model = queryset.model
field = model._meta.pk if from_field is None else model._meta.get_field(from_field)
try:
object_id = field.to_python(object_id)
obj = queryset.get(**{field.name: object_id})
except (model.DoesNotExist, ValidationError, __HOLE__):
return None
# object was in queryset - need to make sure we got the right translation
# we use getattr to trigger a load if instance exists but translation was
# not cached yet. Should not happen with current code, but is correct,
# future-proof behavior.
language_code = getattr(obj, 'language_code', None)
request_lang = self._language(request)
if language_code is None or language_code != request_lang:
# if language does not match that of request, we know request_lang
# does not exist, because it was the first language in the use_fallbacks
# list. We prepare it as a new translation.
obj.translate(request_lang)
return obj
|
ValueError
|
dataset/ETHPy150Open KristianOellegaard/django-hvad/hvad/admin.py/TranslatableAdmin.get_object
|
2,432 |
def get_request(url, headers={}):
"""
Make a HTTP GET request to a url
"""
MAX_TRIES_ALLOWED = current.MAX_TRIES_ALLOWED
i = 0
while i < MAX_TRIES_ALLOWED:
try:
response = requests.get(url,
headers=headers,
proxies=current.PROXY,
timeout=current.TIMEOUT)
except __HOLE__:
return -1
except:
return {}
if response.status_code == 200:
return response
i += 1
if response.status_code == 404 or response.status_code == 400:
return {}
return -1
|
RuntimeError
|
dataset/ETHPy150Open stopstalk/stopstalk-deployment/modules/sites/init.py/get_request
|
2,433 |
def test_bitfield():
try:
x = Bitfield(7, 'ab')
assert False
except ValueError:
pass
try:
x = Bitfield(7, 'ab')
assert False
except ValueError:
pass
try:
x = Bitfield(9, 'abc')
assert False
except ValueError:
pass
try:
x = Bitfield(0, 'a')
assert False
except ValueError:
pass
try:
x = Bitfield(1, '')
assert False
except ValueError:
pass
try:
x = Bitfield(7, '')
assert False
except ValueError:
pass
try:
x = Bitfield(8, '')
assert False
except ValueError:
pass
try:
x = Bitfield(9, 'a')
assert False
except __HOLE__:
pass
try:
x = Bitfield(7, chr(1))
assert False
except ValueError:
pass
try:
x = Bitfield(9, chr(0) + chr(0x40))
assert False
except ValueError:
pass
assert Bitfield(0, '').tostring() == ''
assert Bitfield(1, chr(0x80)).tostring() == chr(0x80)
assert Bitfield(7, chr(0x02)).tostring() == chr(0x02)
assert Bitfield(8, chr(0xFF)).tostring() == chr(0xFF)
assert Bitfield(9, chr(0) + chr(0x80)).tostring() == chr(0) + chr(0x80)
x = Bitfield(1)
assert x.numfalse == 1
x[0] = 1
assert x.numfalse == 0
x[0] = 1
assert x.numfalse == 0
assert x.tostring() == chr(0x80)
x = Bitfield(7)
assert len(x) == 7
x[6] = 1
assert x.numfalse == 6
assert x.tostring() == chr(0x02)
x = Bitfield(8)
x[7] = 1
assert x.tostring() == chr(1)
x = Bitfield(9)
x[8] = 1
assert x.numfalse == 8
assert x.tostring() == chr(0) + chr(0x80)
x = Bitfield(8, chr(0xC4))
assert len(x) == 8
assert x.numfalse == 5
assert x.tostring() == chr(0xC4)
|
ValueError
|
dataset/ETHPy150Open Cclleemm/FriendlyTorrent/src/tornado/BitTornado/bitfield.py/test_bitfield
|
2,434 |
def parse_defines(args):
"""
This parses a list of define argument in the form of -DNAME=VALUE or -DNAME (
which is treated as -DNAME=1).
"""
macros = {}
for arg in args:
try:
var, val = arg.split('=', 1)
except __HOLE__:
var = arg
val = '1'
macros[var] = val
return macros
|
ValueError
|
dataset/ETHPy150Open tonyfischetti/sake/sakelib/acts.py/parse_defines
|
2,435 |
def expand_macros(raw_text, macros):
"""
this gets called before the sakefile is parsed. it looks for
macros defined anywhere in the sakefile (the start of the line
is '#!') and then replaces all occurences of '$variable' with the
value defined in the macro. it then returns the contents of the
file with the macros expanded.
"""
includes = {}
result = []
pattern = re.compile("#!\s*(\w+)\s*(?:(\??\s*)=\s*(.*$)|or\s*(.*))", re.UNICODE)
ipattern = re.compile("#<\s*(\S+)\s*(optional|or\s+(.+))?$", re.UNICODE)
for line in raw_text.split("\n"):
line = string.Template(line).safe_substitute(macros)
# note that the line is appended to result before it is checked for macros
# this prevents macros expanding into themselves
result.append(line)
if line.startswith("#!"):
match = pattern.match(line)
try:
var, opt, val, or_ = match.group(1, 2, 3, 4)
except:
raise InvalidMacroError("Failed to parse macro {}\n".format(line))
if or_:
if var not in macros:
raise InvalidMacroError("Macro {} is not defined: {}\n".format(var, or_))
elif not (opt and var in macros):
macros[var] = val
elif line.startswith("#<"):
match = ipattern.match(line)
try:
filename = match.group(1)
except:
error("Failed to parse include {}\n".format(line))
sys.exit(1)
try:
with io.open(filename, 'r') as f:
includes[filename] = expand_macros(f.read(), macros)
except __HOLE__:
if match.group(2):
if match.group(2).startswith('or '):
sprint(match.group(3))
else:
error("Nonexistent include {}\n".format(filename))
sys.exit(1)
return "\n".join(result), includes
|
IOError
|
dataset/ETHPy150Open tonyfischetti/sake/sakelib/acts.py/expand_macros
|
2,436 |
def find_fastq_pairs(fq1, fq2, out1, out2, tmpdir=None, quiet=False):
tmp1 = tempfile.NamedTemporaryFile(delete=False, prefix='.tmp', suffix='.gz', dir=tmpdir if tmpdir else os.path.dirname(fq1.fname))
tmp1_fname = tmp1.name
tmp1_out = gzip.GzipFile(fileobj=tmp1)
ngsutils.fastq.sort.fastq_sort(fq1, out=tmp1_out, tmpdir=tmpdir if tmpdir else os.path.dirname(fq1.fname))
tmp1_out.close()
tmp1.close()
tmp2 = tempfile.NamedTemporaryFile(delete=False, prefix='.tmp', suffix='.gz', dir=tmpdir if tmpdir else os.path.dirname(fq2.fname))
tmp2_fname = tmp2.name
tmp2_out = gzip.GzipFile(fileobj=tmp2)
ngsutils.fastq.sort.fastq_sort(fq2, out=tmp2_out, tmpdir=tmpdir if tmpdir else os.path.dirname(fq2.fname))
tmp2_out.close()
tmp2.close()
sys.stderr.write('Finding properly paired FASTQ reads...\n')
fq_tmp1 = FASTQ(tmp1_fname)
fq_tmp2 = FASTQ(tmp2_fname)
reader1 = fq_tmp1.fetch(quiet=quiet)
reader2 = fq_tmp2.fetch(quiet=True)
read1 = reader1.next()
read2 = reader2.next()
pairs = 0
discarded_1 = 0
discarded_2 = 0
while read1 and read2:
if read1.name == read2.name:
read1.write(out1)
read2.write(out2)
try:
read1 = reader1.next()
read2 = reader2.next()
except __HOLE__:
break
pairs += 1
elif read1.name < read2.name:
discarded_1 += 1
try:
read1 = reader1.next()
except StopIteration:
break
else:
discarded_2 += 1
try:
read2 = reader2.next()
except StopIteration:
break
fq_tmp1.close()
fq_tmp2.close()
os.unlink(tmp1_fname)
os.unlink(tmp2_fname)
return pairs, discarded_1, discarded_2
|
StopIteration
|
dataset/ETHPy150Open ngsutils/ngsutils/ngsutils/fastq/properpairs.py/find_fastq_pairs
|
2,437 |
def __set_baudrate(self, baud):
"""setting baudrate if supported"""
log.info('Changing communication to %s baud', baud)
self.__writeln(UART_SETUP.format(baud=baud))
# Wait for the string to be sent before switching baud
time.sleep(0.1)
try:
self._port.setBaudrate(baud)
except __HOLE__:
#pySerial 2.7
self._port.baudrate = baud
|
AttributeError
|
dataset/ETHPy150Open kmpm/nodemcu-uploader/nodemcu_uploader/uploader.py/Uploader.__set_baudrate
|
2,438 |
def __clear_buffers(self):
"""Clears the input and output buffers"""
try:
self._port.reset_input_buffer()
self._port.reset_output_buffer()
except __HOLE__:
#pySerial 2.7
self._port.flushInput()
self._port.flushOutput()
|
AttributeError
|
dataset/ETHPy150Open kmpm/nodemcu-uploader/nodemcu_uploader/uploader.py/Uploader.__clear_buffers
|
2,439 |
@register.filter
def highlight(value, language):
try:
from pygments import highlight
from pygments.lexers import get_lexer_by_name
from pygments.formatters import HtmlFormatter
except __HOLE__:
return value
# Can't use class-based colouring because the debug toolbar's css rules
# are more specific so take precedence
formatter = HtmlFormatter(style='friendly', nowrap=True, noclasses=True)
return highlight(value, get_lexer_by_name(language), formatter)
|
ImportError
|
dataset/ETHPy150Open hmarr/django-debug-toolbar-mongo/debug_toolbar_mongo/templatetags/mongo_debug_tags.py/highlight
|
2,440 |
def copy_except(self, src, exceptions):
dst = self.copy_none(src)
for name in dir(src):
setattr(dst, name, getattr(src, name))
for name in exceptions:
try:
delattr(dst, name)
except __HOLE__:
pass
return dst
|
AttributeError
|
dataset/ETHPy150Open ctxis/canape/CANAPE.Scripting/Lib/rexec.py/RExec.copy_except
|
2,441 |
def copy_only(self, src, names):
dst = self.copy_none(src)
for name in names:
try:
value = getattr(src, name)
except __HOLE__:
continue
setattr(dst, name, value)
return dst
|
AttributeError
|
dataset/ETHPy150Open ctxis/canape/CANAPE.Scripting/Lib/rexec.py/RExec.copy_only
|
2,442 |
def test():
import getopt, traceback
opts, args = getopt.getopt(sys.argv[1:], 'vt:')
verbose = 0
trusted = []
for o, a in opts:
if o == '-v':
verbose = verbose+1
if o == '-t':
trusted.append(a)
r = RExec(verbose=verbose)
if trusted:
r.ok_builtin_modules = r.ok_builtin_modules + tuple(trusted)
if args:
r.modules['sys'].argv = args
r.modules['sys'].path.insert(0, os.path.dirname(args[0]))
else:
r.modules['sys'].path.insert(0, "")
fp = sys.stdin
if args and args[0] != '-':
try:
fp = open(args[0])
except __HOLE__, msg:
print "%s: can't open file %r" % (sys.argv[0], args[0])
return 1
if fp.isatty():
try:
import readline
except ImportError:
pass
import code
class RestrictedConsole(code.InteractiveConsole):
def runcode(self, co):
self.locals['__builtins__'] = r.modules['__builtin__']
r.s_apply(code.InteractiveConsole.runcode, (self, co))
try:
RestrictedConsole(r.modules['__main__'].__dict__).interact()
except SystemExit, n:
return n
else:
text = fp.read()
fp.close()
c = compile(text, fp.name, 'exec')
try:
r.s_exec(c)
except SystemExit, n:
return n
except:
traceback.print_exc()
return 1
|
IOError
|
dataset/ETHPy150Open ctxis/canape/CANAPE.Scripting/Lib/rexec.py/test
|
2,443 |
def urlencode(query):
"""Encode a sequence of two-element tuples or dictionary into a URL query string.
This version is adapted from the standard library to understand operators in the
pyesgf.search.constraints module.
If the query arg is a sequence of two-element tuples, the order of the
parameters in the output will match the order of parameters in the
input.
"""
if hasattr(query,"items"):
# mapping objects
query = query.items()
else:
# it's a bother at times that strings and string-like objects are
# sequences...
try:
# non-sequence items should not work with len()
# non-empty strings will fail this
if len(query) and not isinstance(query[0], tuple):
raise TypeError
# zero-length sequences of all types will get here and succeed,
# but that's a minor nit - since the original implementation
# allowed empty dicts that type of behavior probably should be
# preserved for consistency
except TypeError:
ty,va,tb = sys.exc_info()
raise TypeError, "not a valid non-string sequence or mapping object", tb
def append(k, v, tag, l):
from .search.consts import OPERATOR_NEQ
if tag == OPERATOR_NEQ:
l.append('%s!=%s' % (k, v))
elif tag is None:
l.append('%s=%s' % (k, v))
else:
raise ValueError('Unknown operator tag %s' % tag)
def strip_tag(v):
if type(v) == tuple:
tag, v = v
else:
tag = None
return tag, v
l = []
for k, v in query:
tag, v = strip_tag(v)
k = quote_plus(str(k))
if isinstance(v, str):
v = quote_plus(v)
append(k, v, tag, l)
elif _is_unicode(v):
# is there a reasonable way to convert to ASCII?
# encode generates a string, but "replace" or "ignore"
# lose information and "strict" can raise UnicodeError
v = quote_plus(v.encode("ASCII","replace"))
append(k, v, tag, l)
else:
try:
# is this a sufficient test for sequence-ness?
len(v)
except __HOLE__:
# not a sequence
v = quote_plus(str(v))
append(k, v, tag, l)
else:
# loop over the sequence
for elt in v:
append(k, quote_plus(str(elt)), tag, l)
return '&'.join(l)
|
TypeError
|
dataset/ETHPy150Open stephenpascoe/esgf-pyclient/pyesgf/util.py/urlencode
|
2,444 |
def _iter_convert_to_object(self, iterable):
"""Iterable yields tuples of (binsha, mode, name), which will be converted
to the respective object representation"""
for binsha, mode, name in iterable:
path = join_path(self.path, name)
try:
yield self._map_id_to_type[mode >> 12](self.repo, binsha, mode, path)
except __HOLE__:
raise TypeError("Unknown mode %o found in tree data for path '%s'" % (mode, path))
# END for each item
|
KeyError
|
dataset/ETHPy150Open gitpython-developers/GitPython/git/objects/tree.py/Tree._iter_convert_to_object
|
2,445 |
def start(self):
"""Starts the registry server (blocks)"""
if self.active:
raise ValueError("server is already running")
if self.sock is None:
raise ValueError("object disposed")
self.logger.debug("server started on %s:%s", *self.sock.getsockname())
try:
try:
self.active = True
self._work()
except __HOLE__:
self.logger.warn("User interrupt!")
finally:
self.active = False
self.logger.debug("server closed")
self.sock.close()
self.sock = None
|
KeyboardInterrupt
|
dataset/ETHPy150Open sccn/SNAP/src/rpyc/utils/registry.py/RegistryServer.start
|
2,446 |
def get_content(response):
"Handle the incompatibilities between Django <=1.4 and 1.5+"
try:
return ''.join(chunk.decode() for chunk in response.streaming_content)
except __HOLE__:
return response.content
|
AttributeError
|
dataset/ETHPy150Open smartfile/django-transfer/django_transfer/tests.py/get_content
|
2,447 |
def _get_by_field(self, recs, k, v, return_singleton):
result = []
for ref, rec in recs.iteritems():
if rec.get(k) == v:
result.append(ref)
if return_singleton:
try:
return result[0]
except __HOLE__:
raise Failure(['UUID_INVALID', v, result, recs, k])
return result
# Based upon _Method from xmlrpclib.
|
IndexError
|
dataset/ETHPy150Open nii-cloud/dodai-compute/nova/virt/xenapi/fake.py/SessionBase._get_by_field
|
2,448 |
@PostOnly
def fetch_org_by_centroid(request):
try:
lat = float(request.POST.get('lat'))
lon = float(request.POST.get('lon'))
limit = float(request.POST.get('limit', 20))
except __HOLE__:
json_error(INVALID_CENTROID_ERROR)
orgs = Org.objects.filter(location__latitude__range = (lat - limit, lat + limit)).filter(location__longitude__range = (lon - limit, lon + limit))[0:limit]
return json_response(json_encode(orgs))
|
AttributeError
|
dataset/ETHPy150Open jumoconnect/openjumo/jumodjango/org/ajax/views.py/fetch_org_by_centroid
|
2,449 |
@PostOnly
def update_org(request):
try:
org = json.loads(request.POST.get('org', {}))
org_id = int(org['id'])
except __HOLE__:
json_error(INVALID_ORG_ID_ERROR)
str_fields = [
'name', 'email', 'phone_number', 'url', 'img_url',
'revenue', 'size', 'vision_statement', 'mission_statement',
'blog_url', 'twitter_id', 'flickr_id', 'vimeo_id', 'youtube_id',
]
int_fields = [
'year_founded', 'facebook_id', # changed 'year' to 'year_founded' here -b
]
bool_fields = [
'fb_fetch_enabled', 'twitter_fetch_enabled',
]
original = Org.objects.get(id = org_id)
if 'parent_orgs' in org:
if org['parent_orgs']:
original.parent_org = Org.objects.get(id = org['parent_orgs'][0])
if 'ein' in org and org['ein'] != original.ein:
original.donation_enabled = False
if org['ein'] == '':
original.ein = ''
else:
original.ein = org['ein']
try:
original.donation_enable = False
# if nfg_api.npo_is_donation_enabled(org['ein']):
# original.donation_enabled = True
except Exception, inst:
logging.exception("Error checking donation status with nfs")
if 'child_orgs' in org:
org['child_orgs'] = [int(o) for o in org['child_orgs']]
for o in org['child_orgs']:
if o not in [l.id for l in original.parentorg.all()]:
original.parentorg.add(Org.objects.get(id = o))
for o in original.parentorg.all():
if o.id not in org['child_orgs']:
original.parentorg.remove(Org.objects.get(id = o.id))
# this probably needs to change down the road because i can't imagine this is very sustainable.
for i in org['tags']['context']:
iss = Issue.objects.get(name__iexact = i['name'])
try:
r = OrgIssueRelationship.objects.get(issue = iss, org = original)
r.rank = i['tag_rank']
r.date_updated = datetime.datetime.now()
r.save()
except:
r = OrgIssueRelationship()
r.issue = iss
r.org = original
r.date_created = datetime.datetime.now()
r.date_updated = datetime.datetime.now()
r.rank = i['tag_rank']
r.save()
'''
{u'locality': u'New York', u'region': u'Brooklyn', u'longitude': u'-73.948883', u'country_name': u'United States', u'postal_code': u'', u'address': u'', u'latitude': u'40.655071', u'type': u'County', u'raw_geodata': {u'lang': u'en-US', u'popRank': u'0', u'name': u'Brooklyn', u'woeid': u'12589335', u'uri': u'http://where.yahooapis.com/v1/place/12589335', u'admin1': {u'content': u'New York', u'code': u'US-NY', u'type': u'State'}, u'admin3': None, u'admin2': {u'content': u'Brooklyn', u'code': u'', u'type': u'County'}, u'centroid': {u'latitude': u'40.655071', u'longitude': u'-73.948883'}, u'locality1': {u'content': u'New York', u'type': u'Town'}, u'locality2': None, u'country': {u'content': u'United States', u'code': u'US', u'type': u'Country'}, u'boundingBox': {u'northEast': {u'latitude': u'40.739471', u'longitude': u'-73.833359'}, u'southWest': {u'latitude': u'40.570679', u'longitude': u'-74.042068'}}, u'areaRank': u'5', u'postal': None, u'placeTypeName': {u'content': u'County', u'code': u'9'}}}
'''
if 'location' in org and org['location']:
loc = org['location']
raw_geodata = json.dumps(loc["raw_geodata"]) if isinstance(loc.get("raw_geodata"), dict) else loc.get("raw_geodata")
#Until we fix duplicate locations we have to do the following...lame.
_locs = Location.objects.filter(raw_geodata = raw_geodata,
longitude = loc.get('longitude', None),
latitude = loc.get('latitude', None),
address = loc.get('address', ' '),
region = loc.get('region', ' '),
locality = loc.get('locality', ' '),
postal_code = loc.get('postal_code', ' '),
country_name = loc.get('country_name', ' '))
if len(_locs) > 0:
_loc = _locs[0]
else:
_loc = Location(raw_geodata = raw_geodata,
longitude = loc.get('longitude', None),
latitude = loc.get('latitude', None),
address = loc.get('address', ' '),
region = loc.get('region', ' '),
locality = loc.get('locality', ' '),
postal_code = loc.get('postal_code', ' '),
country_name = loc.get('country_name', ' '),)
_loc.save()
original.location = _loc
else:
original.location = None
if 'working_locations' in org:
for loc in org['working_locations']:
raw_geodata = json.dumps(loc["raw_geodata"]) if isinstance(loc.get("raw_geodata"), dict) else loc.get("raw_geodata")
if raw_geodata not in [l.raw_geodata for l in original.working_locations.all()]:
locs = Location.objects.filter(raw_geodata = raw_geodata,
longitude = loc.get('longitude', None),
latitude = loc.get('latitude', None),
address = loc.get('address', ' '),
region = loc.get('region', ' '),
locality = loc.get('locality', ' '),
postal_code = loc.get('postal_code', ' '),
country_name = loc.get('country_name', ' '),
)
if len(locs) > 0:
new_l = locs[0]
else:
new_l = Location(raw_geodata = raw_geodata,
longitude = loc.get('longitude', None),
latitude = loc.get('latitude', None),
address = loc.get('address', ' '),
region = loc.get('region', ' '),
locality = loc.get('locality', ' '),
postal_code = loc.get('postal_code', ' '),
country_name = loc.get('country_name', ' '),
)
new_l.save()
#Until we clean up the location DB we can't use get.
"""
new_l, created = Location.objects.get_or_create(
raw_geodata = json.dumps(loc["raw_geodata"]) if isinstance(loc.get("raw_geodata"), dict) else loc.get("raw_geodata"),
longitude = loc.get('longitude', None),
latitude = loc.get('latitude', None),
address = loc.get('address', ' '),
region = loc.get('region', ' '),
locality = loc.get('locality', ' '),
postal_code = loc.get('postal_code', ' '),
country_name = loc.get('country_name', ' '),
)
"""
original.working_locations.add(new_l)
original.save()
raw_geos = []
for new_loc in org['working_locations']:
raw_geodata = json.dumps(new_loc["raw_geodata"]) if isinstance(new_loc.get("raw_geodata"), dict) else new_loc.get("raw_geodata")
raw_geos.append(raw_geodata)
for old_loc in original.working_locations.all():
if old_loc.raw_geodata not in raw_geos:
original.working_locations.remove(old_loc)
for issue in original.issues.all():
if issue.name.lower() not in [l['name'].lower() for l in org['tags']['context']]:
r = OrgIssueRelationship.objects.get(issue = issue, org = original)
r.delete()
for f in str_fields:
if f in org and org[f] != getattr(original, f):
setattr(original, f, smart_str(org[f], encoding='utf-8'))
for f in int_fields:
if f in org and org[f] != getattr(original, f):
if org[f]:
setattr(original, f, int(org[f]))
else:
setattr(original, f, None)
for f in bool_fields:
if f in org and org[f] != getattr(original, f):
setattr(original, f, org[f])
if 'handle' in org and org['handle'] != original.handle:
_handle = original.handle
original.handle = create_handle(org['handle'])
cache.bust_on_handle(original, _handle, False)
if 'methods' in org:
for method in org['methods']:
if method not in [l.method for l in original.method_set.all()]:
m = Method()
m.method = method
m.date_created = datetime.datetime.now()
m.date_updated = datetime.datetime.now()
m.org = original
m.save()
for method in original.method_set.all():
if method.method not in org['methods']:
method.delete()
if 'accomplishments' in org:
for acc in org['accomplishments']:
if acc['text'] not in [l.description for l in original.accomplishment_set.all()]:
a = Accomplishment()
a.org = original
a.header = acc.get('year', '')
a.description = acc.get('text', '')
a.save()
for acc in original.accomplishment_set.all():
acc_header = acc.header
acc_description = acc.description
delete = True
for new_acc in org["accomplishments"]:
if new_acc["year"] == acc_header and new_acc["text"] == acc_description:
delete = False
if delete:
acc.delete()
original.save()
try:
cache.bust_on_handle(original, original.handle)
except:
pass
return json_response({'result' : original.handle})
|
AttributeError
|
dataset/ETHPy150Open jumoconnect/openjumo/jumodjango/org/ajax/views.py/update_org
|
2,450 |
@PostOnly
def remove_org(request):
try:
id = getattr(request.POST, 'id')
org = Org.objects.get(id = id)
except __HOLE__, ObjectDoesNotExist:
return json_error(INVALID_ORG_ID_ERROR)
# TODO: so, uh, we need to figure out if the current user is authorized to do this?
org.delete()
cache.bust_on_handle(org, org.handle, False)
return json_response({'result' : 1})
|
AttributeError
|
dataset/ETHPy150Open jumoconnect/openjumo/jumodjango/org/ajax/views.py/remove_org
|
2,451 |
@PostOnly
def flag_org(request):
try:
org_id = getattr(request.POST, 'org_id')
org = Org.objects.get(id = org_id)
except __HOLE__, ObjectDoesNotExist:
return json_error(CANNOT_FLAG_ERROR)
# TODO: so, uh, we need to figure out if the current user is authorized to do this?
org.flagged = True
org.save()
cache.bust_on_handle(org, org.handle, False)
return json_response({'result' : True})
|
AttributeError
|
dataset/ETHPy150Open jumoconnect/openjumo/jumodjango/org/ajax/views.py/flag_org
|
2,452 |
def __call__(self, *args):
if not self.called:
self.called = True
cb, args, kw = self.tpl
try:
cb(*args, **kw)
finally:
try:
del self.tpl
except __HOLE__:
pass
|
AttributeError
|
dataset/ETHPy150Open veegee/guv/guv/hubs/timer.py/Timer.__call__
|
2,453 |
def __init__(self, cardinal, config):
"""Registers a callback for URL detection."""
# Initialize logger
self.logger = logging.getLogger(__name__)
try:
self._max_description_length = config['max_description_length']
except KeyError:
self.logger.warning("No max description length in config -- using"
" defaults: %d" % DEFAULT_MAX_DESCRIPTION_LENGTH)
self._max_description_length = DEFAULT_MAX_DESCRIPTION_LENGTH
try:
self._language_code = config['language_code']
except __HOLE__:
self.logger.warning("No language in config -- using defaults: %s" %
DEFAULT_LANGUAGE_CODE)
self._language_code = DEFAULT_LANGUAGE_CODE
self._callback_id = cardinal.event_manager.register_callback(
'urls.detection', self.url_callback)
|
KeyError
|
dataset/ETHPy150Open JohnMaguire/Cardinal/plugins/wikipedia/plugin.py/WikipediaPlugin.__init__
|
2,454 |
def get_signature(self, tip):
try:
with open(self.tip_path(tip), 'r') as fh:
data = fh.read()
if not data:
return ""
return (int(data),)
except __HOLE__:
return ""
|
IOError
|
dataset/ETHPy150Open richo/groundstation/groundstation/gref.py/Gref.get_signature
|
2,455 |
def get_obj(self, context):
if self.model and self.lookup:
if isinstance(self.lookup[1], template.Variable):
try:
lookup_val = self.lookup[1].resolve(context)
except template.VariableDoesNotExist, e:
log.warning('BoxNode: Template variable does not exist. var_name=%s', self.lookup[1].var)
raise ObjectNotFoundOrInvalid()
else:
lookup_val = self.lookup[1]
try:
obj = get_cached_object(self.model, **{self.lookup[0] : lookup_val})
except (models.ObjectDoesNotExist, __HOLE__), e:
log.warning('BoxNode: %s (%s : %s)', str(e), self.lookup[0], lookup_val)
raise ObjectNotFoundOrInvalid()
else:
try:
obj = self.var.resolve(context)
except template.VariableDoesNotExist, e:
log.warning('BoxNode: Template variable does not exist. var_name=%s', self.var.var)
raise ObjectNotFoundOrInvalid()
if not obj:
raise ObjectNotFoundOrInvalid()
return obj
|
AssertionError
|
dataset/ETHPy150Open ella/ella/ella/core/templatetags/core.py/BoxNode.get_obj
|
2,456 |
def catch_notimplementederror(f):
"""Decorator to simplify catching drivers raising NotImplementedError
If a particular call makes a driver raise NotImplementedError, we
log it so that we can extract this information afterwards to
automatically generate a hypervisor/feature support matrix."""
def wrapped_func(self, *args, **kwargs):
try:
return f(self, *args, **kwargs)
except __HOLE__:
frame = traceback.extract_tb(sys.exc_info()[2])[-1]
LOG.error('%(driver)s does not implement %(method)s' % {
'driver': type(self.connection),
'method': frame[2]})
wrapped_func.__name__ = f.__name__
wrapped_func.__doc__ = f.__doc__
return wrapped_func
|
NotImplementedError
|
dataset/ETHPy150Open nii-cloud/dodai-compute/nova/tests/test_virt_drivers.py/catch_notimplementederror
|
2,457 |
def extractWord(text):
if not text:
return None
l = text.split(None, 1)
word = l[0]
try:
text = l[1]
except __HOLE__:
text = ''
return word, text
|
IndexError
|
dataset/ETHPy150Open twisted/ldaptor/ldaptor/schema.py/extractWord
|
2,458 |
def setup_module(module):
from nose import SkipTest
try:
import numpy
except __HOLE__:
raise SkipTest("segmentation.doctest requires numpy")
|
ImportError
|
dataset/ETHPy150Open nltk/nltk/nltk/test/segmentation_fixt.py/setup_module
|
2,459 |
def read_images(path, sz=None):
"""Reads the images in a given folder, resizes images on the fly if size is given.
Args:
path: Path to a folder with subfolders representing the subjects (persons).
sz: A tuple with the size Resizes
Returns:
A list [X,y]
X: The images, which is a Python list of numpy arrays.
y: The corresponding labels (the unique number of the subject, person) in a Python list.
"""
c = 0
X,y = [], []
for dirname, dirnames, filenames in os.walk(path):
for subdirname in dirnames:
subject_path = os.path.join(dirname, subdirname)
for filename in os.listdir(subject_path):
try:
im = cv2.imread(os.path.join(subject_path, filename), cv2.IMREAD_GRAYSCALE)
# resize to given size (if given)
if (sz is not None):
im = cv2.resize(im, sz)
X.append(np.asarray(im, dtype=np.uint8))
y.append(c)
except __HOLE__, (errno, strerror):
print "I/O error({0}): {1}".format(errno, strerror)
except:
print "Unexpected error:", sys.exc_info()[0]
raise
c = c+1
return [X,y]
|
IOError
|
dataset/ETHPy150Open fatcloud/PyCV-time/opencv-official-samples/2.4.9/facerec_demo.py/read_images
|
2,460 |
def float_list_to_int_dict(lst, bucket_size):
"""Takes a list of floats and a bucket size and return a dict of
{ n : count, ...} where the count is the number of floats that drop into
the range n * bucket_size to (n + 1) * bucket_size."""
ret = {}
for val in lst:
k = int(round(val / bucket_size, 0))
try:
ret[k] += 1
except __HOLE__:
ret[k] = 1
return ret
|
KeyError
|
dataset/ETHPy150Open manahl/PythonTrainingExercises/Intermediate/Histogram/solution/DictHistogram.py/float_list_to_int_dict
|
2,461 |
def __get(self, name, obj, default=None):
try:
attr = getattr(self, name)
except __HOLE__:
return default
if callable(attr):
return attr(obj)
return attr
|
AttributeError
|
dataset/ETHPy150Open django/django/django/contrib/sitemaps/__init__.py/Sitemap.__get
|
2,462 |
def add_infos(self, *keyvals, **kwargs):
"""Adds the given info and returns a dict composed of just this added info."""
infos = dict(keyvals)
kv_pairs = []
for key, val in infos.items():
key = key.strip()
val = str(val).strip()
if ':' in key:
raise ValueError('info key "{}" must not contain a colon.'.format(key))
kv_pairs.append((key, val))
for k, v in kv_pairs:
if k in self._info:
raise ValueError('info key "{}" already exists with value {}. '
'Cannot add it again with value {}.'.format(k, self._info[k], v))
self._info[k] = v
try:
with open(self._info_file, 'a') as outfile:
for k, v in kv_pairs:
outfile.write('{}: {}\n'.format(k, v))
except __HOLE__:
if not kwargs.get('ignore_errors', False):
raise
|
IOError
|
dataset/ETHPy150Open pantsbuild/pants/src/python/pants/base/run_info.py/RunInfo.add_infos
|
2,463 |
def get_parsers(parsers_path):
parsers = {}
# For each directory in parsers path
for component_directory in parsers_path.split(os.path.pathsep):
# Add component directory to path
sys.path.insert(0, component_directory)
components_list = glob.glob(component_directory + '/*.py')
# For each *.py file in directory
for component_file in components_list:
component = os.path.splitext(os.path.basename(component_file))[0]
try:
current_module = importlib.import_module(component)
for name in dir(current_module):
if not name.startswith('_'):
obj = getattr(current_module, name)
try:
if obj != PythonectInputFileFormatParser and issubclass(obj, PythonectInputFileFormatParser):
for ext in obj.FILE_EXTS:
parsers[ext] = obj()
except __HOLE__:
pass
except Exception:
pass
return parsers
|
TypeError
|
dataset/ETHPy150Open ikotler/pythonect/pythonect/internal/parsers/__init__.py/get_parsers
|
2,464 |
def add_view(self, request, **kwargs):
kwargs['form_url'] = request.get_full_path() # Preserve GET parameters
if 'translation_of' in request.GET and 'language' in request.GET:
try:
original = self.model._tree_manager.get(
pk=request.GET.get('translation_of'))
except (__HOLE__, self.model.DoesNotExist):
pass
else:
language_code = request.GET['language']
language = dict(
django_settings.LANGUAGES).get(language_code, '')
kwargs['extra_context'] = {
'adding_translation': True,
'title': _(
'Add %(language)s translation of "%(page)s"') % {
'language': language,
'page': original,
},
'language_name': language,
'translation_of': original,
}
return super(PageAdmin, self).add_view(request, **kwargs)
|
AttributeError
|
dataset/ETHPy150Open feincms/feincms/feincms/module/page/modeladmins.py/PageAdmin.add_view
|
2,465 |
def response_add(self, request, obj, *args, **kwargs):
response = super(PageAdmin, self).response_add(
request, obj, *args, **kwargs)
if ('parent' in request.GET
and '_addanother' in request.POST
and response.status_code in (301, 302)):
# Preserve GET parameters if we are about to add another page
response['Location'] += '?parent=%s' % request.GET['parent']
if ('translation_of' in request.GET
and '_copy_content_from_original' in request.POST):
# Copy all contents
for content_type in obj._feincms_content_types:
if content_type.objects.filter(parent=obj).exists():
# Short-circuit processing -- don't copy any contents if
# newly added object already has some
return response
try:
original = self.model._tree_manager.get(
pk=request.GET.get('translation_of'))
original = original.original_translation
obj.copy_content_from(original)
obj.save()
self.message_user(request, _(
'The content from the original translation has been copied'
' to the newly created page.'))
except (__HOLE__, self.model.DoesNotExist):
pass
return response
|
AttributeError
|
dataset/ETHPy150Open feincms/feincms/feincms/module/page/modeladmins.py/PageAdmin.response_add
|
2,466 |
def __getattr__(self, key):
try:
return self.__getitem__(key)
except __HOLE__:
# This lets you use dict-type attributes that aren't keys
return getattr(super(AttrDict, self), key)
|
KeyError
|
dataset/ETHPy150Open memsql/memsql-loader/memsql_loader/util/attr_dict.py/AttrDict.__getattr__
|
2,467 |
def on_select_remote(self, remote_index):
"""
After the user selects a remote, display a panel of branches that are
present on that remote, then proceed to `on_select_branch`.
"""
# If the user pressed `esc` or otherwise cancelled.
if remote_index == -1:
return
self.selected_remote = self.remotes[remote_index]
self.branches_on_selected_remote = self.list_remote_branches(self.selected_remote)
self.current_local_branch = self.get_current_branch_name()
try:
pre_selected_index = self.branches_on_selected_remote.index(
self.selected_remote + "/" + self.current_local_branch)
except __HOLE__:
pre_selected_index = 0
self.view.window().show_quick_panel(
self.branches_on_selected_remote,
self.on_select_branch,
flags=sublime.MONOSPACE_FONT,
selected_index=pre_selected_index
)
|
ValueError
|
dataset/ETHPy150Open divmain/GitSavvy/github/commands/pull_request.py/GsCreatePullRequestCommand.on_select_remote
|
2,468 |
def changed(self, originally_changed):
Specification.changed(self, originally_changed)
try:
del self._v_attrs
except __HOLE__:
pass
|
AttributeError
|
dataset/ETHPy150Open nlloyd/SubliminalCollaborator/libs/zope/interface/declarations.py/Declaration.changed
|
2,469 |
def implementedByFallback(cls):
"""Return the interfaces implemented for a class' instances
The value returned is an IDeclaration.
"""
try:
spec = cls.__dict__.get('__implemented__')
except AttributeError:
# we can't get the class dict. This is probably due to a
# security proxy. If this is the case, then probably no
# descriptor was installed for the class.
# We don't want to depend directly on zope.security in
# zope.interface, but we'll try to make reasonable
# accommodations in an indirect way.
# We'll check to see if there's an implements:
spec = getattr(cls, '__implemented__', None)
if spec is None:
# There's no spec stred in the class. Maybe its a builtin:
spec = BuiltinImplementationSpecifications.get(cls)
if spec is not None:
return spec
return _empty
if spec.__class__ == Implements:
# we defaulted to _empty or there was a spec. Good enough.
# Return it.
return spec
# TODO: need old style __implements__ compatibility?
# Hm, there's an __implemented__, but it's not a spec. Must be
# an old-style declaration. Just compute a spec for it
return Declaration(*_normalizeargs((spec, )))
if isinstance(spec, Implements):
return spec
if spec is None:
spec = BuiltinImplementationSpecifications.get(cls)
if spec is not None:
return spec
# TODO: need old style __implements__ compatibility?
if spec is not None:
# old-style __implemented__ = foo declaration
spec = (spec, ) # tuplefy, as it might be just an int
spec = Implements(*_normalizeargs(spec))
spec.inherit = None # old-style implies no inherit
del cls.__implemented__ # get rid of the old-style declaration
else:
try:
bases = cls.__bases__
except __HOLE__:
if not callable(cls):
raise TypeError("ImplementedBy called for non-factory", cls)
bases = ()
spec = Implements(*[implementedBy(c) for c in bases])
spec.inherit = cls
spec.__name__ = (getattr(cls, '__module__', '?') or '?') + \
'.' + (getattr(cls, '__name__', '?') or '?')
try:
cls.__implemented__ = spec
if not hasattr(cls, '__providedBy__'):
cls.__providedBy__ = objectSpecificationDescriptor
if (isinstance(cls, DescriptorAwareMetaClasses)
and
'__provides__' not in cls.__dict__):
# Make sure we get a __provides__ descriptor
cls.__provides__ = ClassProvides(
cls,
getattr(cls, '__class__', type(cls)),
)
except TypeError:
if not isinstance(cls, type):
raise TypeError("ImplementedBy called for non-type", cls)
BuiltinImplementationSpecifications[cls] = spec
return spec
|
AttributeError
|
dataset/ETHPy150Open nlloyd/SubliminalCollaborator/libs/zope/interface/declarations.py/implementedByFallback
|
2,470 |
def __call__(self, ob):
if isinstance(ob, DescriptorAwareMetaClasses):
classImplements(ob, *self.interfaces)
return ob
spec = Implements(*self.interfaces)
try:
ob.__implemented__ = spec
except __HOLE__:
raise TypeError("Can't declare implements", ob)
return ob
|
AttributeError
|
dataset/ETHPy150Open nlloyd/SubliminalCollaborator/libs/zope/interface/declarations.py/implementer.__call__
|
2,471 |
def getObjectSpecificationFallback(ob):
provides = getattr(ob, '__provides__', None)
if provides is not None:
if isinstance(provides, SpecificationBase):
return provides
try:
cls = ob.__class__
except __HOLE__:
# We can't get the class, so just consider provides
return _empty
return implementedBy(cls)
|
AttributeError
|
dataset/ETHPy150Open nlloyd/SubliminalCollaborator/libs/zope/interface/declarations.py/getObjectSpecificationFallback
|
2,472 |
def providedByFallback(ob):
# Here we have either a special object, an old-style declaration
# or a descriptor
# Try to get __providedBy__
try:
r = ob.__providedBy__
except __HOLE__:
# Not set yet. Fall back to lower-level thing that computes it
return getObjectSpecification(ob)
try:
# We might have gotten a descriptor from an instance of a
# class (like an ExtensionClass) that doesn't support
# descriptors. We'll make sure we got one by trying to get
# the only attribute, which all specs have.
r.extends
except AttributeError:
# The object's class doesn't understand descriptors.
# Sigh. We need to get an object descriptor, but we have to be
# careful. We want to use the instance's __provides__, if
# there is one, but only if it didn't come from the class.
try:
r = ob.__provides__
except AttributeError:
# No __provides__, so just fall back to implementedBy
return implementedBy(ob.__class__)
# We need to make sure we got the __provides__ from the
# instance. We'll do this by making sure we don't get the same
# thing from the class:
try:
cp = ob.__class__.__provides__
except AttributeError:
# The ob doesn't have a class or the class has no
# provides, assume we're done:
return r
if r is cp:
# Oops, we got the provides from the class. This means
# the object doesn't have it's own. We should use implementedBy
return implementedBy(ob.__class__)
return r
|
AttributeError
|
dataset/ETHPy150Open nlloyd/SubliminalCollaborator/libs/zope/interface/declarations.py/providedByFallback
|
2,473 |
def __new__(cls, name, bases, attrs):
"""
Field shortcut creation:
Takes field names `db_*` and creates property wrappers named
without the `db_` prefix. So db_key -> key
This wrapper happens on the class level, so there is no
overhead when creating objects. If a class already has a
wrapper of the given name, the automatic creation is skipped.
Notes:
Remember to document this auto-wrapping in the class
header, this could seem very much like magic to the user
otherwise.
"""
attrs["typename"] = cls.__name__
attrs["path"] = "%s.%s" % (attrs["__module__"], name)
attrs["_is_deleted"] = False
# set up the typeclass handling only if a variable _is_typeclass is set on the class
def create_wrapper(cls, fieldname, wrappername, editable=True, foreignkey=False):
"Helper method to create property wrappers with unique names (must be in separate call)"
def _get(cls, fname):
"Wrapper for getting database field"
if _GA(cls, "_is_deleted"):
raise ObjectDoesNotExist("Cannot access %s: Hosting object was already deleted." % fname)
return _GA(cls, fieldname)
def _get_foreign(cls, fname):
"Wrapper for returning foreignkey fields"
if _GA(cls, "_is_deleted"):
raise ObjectDoesNotExist("Cannot access %s: Hosting object was already deleted." % fname)
return _GA(cls, fieldname)
def _set_nonedit(cls, fname, value):
"Wrapper for blocking editing of field"
raise FieldError("Field %s cannot be edited." % fname)
def _set(cls, fname, value):
"Wrapper for setting database field"
if _GA(cls, "_is_deleted"):
raise ObjectDoesNotExist("Cannot set %s to %s: Hosting object was already deleted!" % (fname, value))
_SA(cls, fname, value)
# only use explicit update_fields in save if we actually have a
# primary key assigned already (won't be set when first creating object)
update_fields = [fname] if _GA(cls, "_get_pk_val")(_GA(cls, "_meta")) is not None else None
_GA(cls, "save")(update_fields=update_fields)
def _set_foreign(cls, fname, value):
"Setter only used on foreign key relations, allows setting with #dbref"
if _GA(cls, "_is_deleted"):
raise ObjectDoesNotExist("Cannot set %s to %s: Hosting object was already deleted!" % (fname, value))
try:
value = _GA(value, "dbobj")
except AttributeError:
pass
if isinstance(value, (basestring, int)):
value = to_str(value, force_string=True)
if (value.isdigit() or value.startswith("#")):
# we also allow setting using dbrefs, if so we try to load the matching object.
# (we assume the object is of the same type as the class holding the field, if
# not a custom handler must be used for that field)
dbid = dbref(value, reqhash=False)
if dbid:
model = _GA(cls, "_meta").get_field(fname).model
try:
value = model._default_manager.get(id=dbid)
except __HOLE__:
# maybe it is just a name that happens to look like a dbid
pass
_SA(cls, fname, value)
# only use explicit update_fields in save if we actually have a
# primary key assigned already (won't be set when first creating object)
update_fields = [fname] if _GA(cls, "_get_pk_val")(_GA(cls, "_meta")) is not None else None
_GA(cls, "save")(update_fields=update_fields)
def _del_nonedit(cls, fname):
"wrapper for not allowing deletion"
raise FieldError("Field %s cannot be edited." % fname)
def _del(cls, fname):
"Wrapper for clearing database field - sets it to None"
_SA(cls, fname, None)
update_fields = [fname] if _GA(cls, "_get_pk_val")(_GA(cls, "_meta")) is not None else None
_GA(cls, "save")(update_fields=update_fields)
# wrapper factories
fget = lambda cls: _get(cls, fieldname)
if not editable:
fset = lambda cls, val: _set_nonedit(cls, fieldname, val)
elif foreignkey:
fget = lambda cls: _get_foreign(cls, fieldname)
fset = lambda cls, val: _set_foreign(cls, fieldname, val)
else:
fset = lambda cls, val: _set(cls, fieldname, val)
fdel = lambda cls: _del(cls, fieldname) if editable else _del_nonedit(cls,fieldname)
# set docstrings for auto-doc
fget.__doc__ = "A wrapper for getting database field `%s`." % fieldname
fset.__doc__ = "A wrapper for setting (and saving) database field `%s`." % fieldname
fdel.__doc__ = "A wrapper for deleting database field `%s`." % fieldname
# assigning
attrs[wrappername] = property(fget, fset, fdel)
#type(cls).__setattr__(cls, wrappername, property(fget, fset, fdel))#, doc))
# exclude some models that should not auto-create wrapper fields
if cls.__name__ in ("ServerConfig", "TypeNick"):
return
# dynamically create the wrapper properties for all fields not already handled (manytomanyfields are always handlers)
for fieldname, field in ((fname, field) for fname, field in listitems(attrs)
if fname.startswith("db_") and type(field).__name__ != "ManyToManyField"):
foreignkey = type(field).__name__ == "ForeignKey"
wrappername = "dbid" if fieldname == "id" else fieldname.replace("db_", "", 1)
if wrappername not in attrs:
# makes sure not to overload manually created wrappers on the model
create_wrapper(cls, fieldname, wrappername, editable=field.editable, foreignkey=foreignkey)
return super(SharedMemoryModelBase, cls).__new__(cls, name, bases, attrs)
|
ObjectDoesNotExist
|
dataset/ETHPy150Open evennia/evennia/evennia/utils/idmapper/models.py/SharedMemoryModelBase.__new__
|
2,474 |
@classmethod
def cache_instance(cls, instance, new=False):
"""
Method to store an instance in the cache.
Args:
instance (Class instance): the instance to cache.
new (bool, optional): this is the first time this instance is
cached (i.e. this is not an update operation like after a
db save).
"""
pk = instance._get_pk_val()
if pk is not None:
cls.__dbclass__.__instance_cache__[pk] = instance
if new:
try:
# trigger the at_init hook only
# at first initialization
instance.at_init()
except __HOLE__:
pass
|
AttributeError
|
dataset/ETHPy150Open evennia/evennia/evennia/utils/idmapper/models.py/SharedMemoryModel.cache_instance
|
2,475 |
@classmethod
def _flush_cached_by_key(cls, key, force=True):
"""
Remove the cached reference.
"""
try:
if force or not cls._idmapper_recache_protection:
del cls.__dbclass__.__instance_cache__[key]
except __HOLE__:
pass
|
KeyError
|
dataset/ETHPy150Open evennia/evennia/evennia/utils/idmapper/models.py/SharedMemoryModel._flush_cached_by_key
|
2,476 |
def import_model(self, name, path="floyd.db.models"):
"""imports a model of name from path, returning from local model
cache if it has been previously loaded otherwise importing"""
if name in self._model_cache:
return self._model_cache[name]
try:
model = getattr(__import__(path, None, None, [name]), name)
self._model_cache[name] = model
except __HOLE__:
return False
return model
|
ImportError
|
dataset/ETHPy150Open nikcub/floyd/floyd/db/__init__.py/DataStore.import_model
|
2,477 |
def jsonify(fnc):
def wrapper(*args, **kwargs):
ret = fnc(*args, **kwargs)
try:
json.loads(ret)
except __HOLE__:
# Value should already be JSON-encoded, but some operations
# may write raw sting values; this will catch those and
# properly encode them.
ret = json.dumps(ret)
return ret
return wrapper
|
ValueError
|
dataset/ETHPy150Open nii-cloud/dodai-compute/plugins/xenserver/xenapi/etc/xapi.d/plugins/xenstore.py/jsonify
|
2,478 |
@jsonify
def list_records(self, arg_dict):
"""Returns all the stored data at or below the given path for the
given dom_id. The data is returned as a json-ified dict, with the
path as the key and the stored value as the value. If the path
doesn't exist, an empty dict is returned.
"""
dirpath = "/local/domain/%(dom_id)s/%(path)s" % arg_dict
cmd = ["xenstore-ls", dirpath.rstrip("/")]
try:
ret, recs = _run_command(cmd)
except pluginlib.PluginError, e:
if "No such file or directory" in "%s" % e:
# Path doesn't exist.
return {}
return str(e)
raise
base_path = arg_dict["path"]
paths = _paths_from_ls(recs)
ret = {}
for path in paths:
if base_path:
arg_dict["path"] = "%s/%s" % (base_path, path)
else:
arg_dict["path"] = path
rec = read_record(self, arg_dict)
try:
val = json.loads(rec)
except __HOLE__:
val = rec
ret[path] = val
return ret
|
ValueError
|
dataset/ETHPy150Open nii-cloud/dodai-compute/plugins/xenserver/xenapi/etc/xapi.d/plugins/xenstore.py/list_records
|
2,479 |
@view_config(route_name='admin_badge',
request_method='POST',
request_param='add',
renderer='h:templates/admin/badge.html.jinja2',
permission='admin_badge')
def badge_add(request):
try:
request.db.add(models.Blocklist(uri=request.params['add']))
except __HOLE__ as err:
request.session.flash(err.message, 'error')
return badge_index(request)
|
ValueError
|
dataset/ETHPy150Open hypothesis/h/h/admin/views/badge.py/badge_add
|
2,480 |
def compat_urllib_parse_unquote(string, encoding='utf-8', errors='replace'):
if string == '':
return string
res = string.split('%')
if len(res) == 1:
return string
if encoding is None:
encoding = 'utf-8'
if errors is None:
errors = 'replace'
# pct_sequence: contiguous sequence of percent-encoded bytes, decoded
pct_sequence = b''
string = res[0]
for item in res[1:]:
try:
if not item:
raise ValueError
pct_sequence += item[:2].decode('hex')
rest = item[2:]
if not rest:
# This segment was just a single percent-encoded character.
# May be part of a sequence of code units, so delay decoding.
# (Stored in pct_sequence).
continue
except __HOLE__:
rest = '%' + item
# Encountered non-percent-encoded characters. Flush the current
# pct_sequence.
string += pct_sequence.decode(encoding, errors) + rest
pct_sequence = b''
if pct_sequence:
# Flush the final pct_sequence
string += pct_sequence.decode(encoding, errors)
return string
|
ValueError
|
dataset/ETHPy150Open yasoob/youtube-dl-GUI/youtube_dl/compat.py/compat_urllib_parse_unquote
|
2,481 |
def compat_expanduser(path):
"""Expand ~ and ~user constructions. If user or $HOME is unknown,
do nothing."""
if not path.startswith('~'):
return path
i = path.find('/', 1)
if i < 0:
i = len(path)
if i == 1:
if 'HOME' not in os.environ:
import pwd
userhome = pwd.getpwuid(os.getuid()).pw_dir
else:
userhome = compat_getenv('HOME')
else:
import pwd
try:
pwent = pwd.getpwnam(path[1:i])
except __HOLE__:
return path
userhome = pwent.pw_dir
userhome = userhome.rstrip('/')
return (userhome + path[i:]) or '/'
|
KeyError
|
dataset/ETHPy150Open yasoob/youtube-dl-GUI/youtube_dl/compat.py/compat_expanduser
|
2,482 |
def compat_expanduser(path):
"""Expand ~ and ~user constructs.
If user or $HOME is unknown, do nothing."""
if path[:1] != '~':
return path
i, n = 1, len(path)
while i < n and path[i] not in '/\\':
i = i + 1
if 'HOME' in os.environ:
userhome = compat_getenv('HOME')
elif 'USERPROFILE' in os.environ:
userhome = compat_getenv('USERPROFILE')
elif 'HOMEPATH' not in os.environ:
return path
else:
try:
drive = compat_getenv('HOMEDRIVE')
except __HOLE__:
drive = ''
userhome = os.path.join(drive, compat_getenv('HOMEPATH'))
if i != 1: # ~user
userhome = os.path.join(os.path.dirname(userhome), path[1:i])
return userhome + path[i:]
|
KeyError
|
dataset/ETHPy150Open yasoob/youtube-dl-GUI/youtube_dl/compat.py/compat_expanduser
|
2,483 |
def workaround_optparse_bug9161():
op = optparse.OptionParser()
og = optparse.OptionGroup(op, 'foo')
try:
og.add_option('-t')
except __HOLE__:
real_add_option = optparse.OptionGroup.add_option
def _compat_add_option(self, *args, **kwargs):
enc = lambda v: (
v.encode('ascii', 'replace') if isinstance(v, compat_str)
else v)
bargs = [enc(a) for a in args]
bkwargs = dict(
(k, enc(v)) for k, v in kwargs.items())
return real_add_option(self, *bargs, **bkwargs)
optparse.OptionGroup.add_option = _compat_add_option
|
TypeError
|
dataset/ETHPy150Open yasoob/youtube-dl-GUI/youtube_dl/compat.py/workaround_optparse_bug9161
|
2,484 |
def _mock_delete_addon(name, *args, **kwargs):
try:
active_addons.remove(name)
except __HOLE__:
pass
|
ValueError
|
dataset/ETHPy150Open CenterForOpenScience/osf.io/tests/test_registrations/test_archiver.py/_mock_delete_addon
|
2,485 |
def rm(path):
debg('Deleting %r' % path)
try:
if os.path.isdir(path):
shutil.rmtree(path)
else:
os.remove(path)
except __HOLE__:
pass
|
OSError
|
dataset/ETHPy150Open kuri65536/python-for-android/python-build/build.py/rm
|
2,486 |
def settrace_and_raise(tracefunc):
try:
_settrace_and_raise(tracefunc)
except __HOLE__ as exc:
pass
|
RuntimeError
|
dataset/ETHPy150Open amrdraz/kodr/app/brython/www/src/Lib/test/test_sys_settrace.py/settrace_and_raise
|
2,487 |
def tightloop_example():
items = range(0, 3)
try:
i = 0
while 1:
b = items[i]; i+=1
except __HOLE__:
pass
|
IndexError
|
dataset/ETHPy150Open amrdraz/kodr/app/brython/www/src/Lib/test/test_sys_settrace.py/tightloop_example
|
2,488 |
def tighterloop_example():
items = range(1, 4)
try:
i = 0
while 1: i = items[i]
except __HOLE__:
pass
|
IndexError
|
dataset/ETHPy150Open amrdraz/kodr/app/brython/www/src/Lib/test/test_sys_settrace.py/tighterloop_example
|
2,489 |
def run_test_for_event(self, event):
"""Tests that an exception raised in response to the given event is
handled OK."""
self.raiseOnEvent = event
try:
for i in range(sys.getrecursionlimit() + 1):
sys.settrace(self.trace)
try:
self.f()
except ValueError:
pass
else:
self.fail("exception not raised!")
except __HOLE__:
self.fail("recursion counter not reset")
# Test the handling of exceptions raised by each kind of trace event.
|
RuntimeError
|
dataset/ETHPy150Open amrdraz/kodr/app/brython/www/src/Lib/test/test_sys_settrace.py/RaisingTraceFuncTestCase.run_test_for_event
|
2,490 |
def test_trash_stack(self):
def f():
for i in range(5):
print(i) # line tracing will raise an exception at this line
def g(frame, why, extra):
if (why == 'line' and
frame.f_lineno == f.__code__.co_firstlineno + 2):
raise RuntimeError("i am crashing")
return g
sys.settrace(g)
try:
f()
except __HOLE__:
# the test is really that this doesn't segfault:
import gc
gc.collect()
else:
self.fail("exception not propagated")
|
RuntimeError
|
dataset/ETHPy150Open amrdraz/kodr/app/brython/www/src/Lib/test/test_sys_settrace.py/RaisingTraceFuncTestCase.test_trash_stack
|
2,491 |
def test_exception_arguments(self):
def f():
x = 0
# this should raise an error
x.no_such_attr
def g(frame, event, arg):
if (event == 'exception'):
type, exception, trace = arg
self.assertIsInstance(exception, Exception)
return g
existing = sys.gettrace()
try:
sys.settrace(g)
try:
f()
except __HOLE__:
# this is expected
pass
finally:
sys.settrace(existing)
# 'Jump' tests: assigning to frame.f_lineno within a trace function
# moves the execution position - it's how debuggers implement a Jump
# command (aka. "Set next statement").
|
AttributeError
|
dataset/ETHPy150Open amrdraz/kodr/app/brython/www/src/Lib/test/test_sys_settrace.py/RaisingTraceFuncTestCase.test_exception_arguments
|
2,492 |
def trace(self, frame, event, arg):
if not self.done and frame.f_code == self.function.__code__:
firstLine = frame.f_code.co_firstlineno
if event == 'line' and frame.f_lineno == firstLine + self.jumpFrom:
# Cope with non-integer self.jumpTo (because of
# no_jump_to_non_integers below).
try:
frame.f_lineno = firstLine + self.jumpTo
except __HOLE__:
frame.f_lineno = self.jumpTo
self.done = True
return self.trace
# The first set of 'jump' tests are for things that are allowed:
|
TypeError
|
dataset/ETHPy150Open amrdraz/kodr/app/brython/www/src/Lib/test/test_sys_settrace.py/JumpTracer.trace
|
2,493 |
def no_jump_too_far_forwards(output):
try:
output.append(2)
output.append(3)
except __HOLE__ as e:
output.append('after' in str(e))
|
ValueError
|
dataset/ETHPy150Open amrdraz/kodr/app/brython/www/src/Lib/test/test_sys_settrace.py/no_jump_too_far_forwards
|
2,494 |
def no_jump_too_far_backwards(output):
try:
output.append(2)
output.append(3)
except __HOLE__ as e:
output.append('before' in str(e))
|
ValueError
|
dataset/ETHPy150Open amrdraz/kodr/app/brython/www/src/Lib/test/test_sys_settrace.py/no_jump_too_far_backwards
|
2,495 |
def no_jump_to_except_2(output):
try:
output.append(2)
except __HOLE__:
e = sys.exc_info()[1]
output.append('except' in str(e))
|
ValueError
|
dataset/ETHPy150Open amrdraz/kodr/app/brython/www/src/Lib/test/test_sys_settrace.py/no_jump_to_except_2
|
2,496 |
def no_jump_to_except_3(output):
try:
output.append(2)
except __HOLE__ as e:
output.append('except' in str(e))
|
ValueError
|
dataset/ETHPy150Open amrdraz/kodr/app/brython/www/src/Lib/test/test_sys_settrace.py/no_jump_to_except_3
|
2,497 |
def no_jump_to_except_4(output):
try:
output.append(2)
except (ValueError, __HOLE__) as e:
output.append('except' in str(e))
|
RuntimeError
|
dataset/ETHPy150Open amrdraz/kodr/app/brython/www/src/Lib/test/test_sys_settrace.py/no_jump_to_except_4
|
2,498 |
def no_jump_forwards_into_block(output):
try:
output.append(2)
for i in 1, 2:
output.append(4)
except __HOLE__ as e:
output.append('into' in str(e))
|
ValueError
|
dataset/ETHPy150Open amrdraz/kodr/app/brython/www/src/Lib/test/test_sys_settrace.py/no_jump_forwards_into_block
|
2,499 |
def no_jump_backwards_into_block(output):
try:
for i in 1, 2:
output.append(3)
output.append(4)
except __HOLE__ as e:
output.append('into' in str(e))
|
ValueError
|
dataset/ETHPy150Open amrdraz/kodr/app/brython/www/src/Lib/test/test_sys_settrace.py/no_jump_backwards_into_block
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.