repo
stringlengths 7
55
| path
stringlengths 4
127
| func_name
stringlengths 1
88
| original_string
stringlengths 75
19.8k
| language
stringclasses 1
value | code
stringlengths 75
19.8k
| code_tokens
sequence | docstring
stringlengths 3
17.3k
| docstring_tokens
sequence | sha
stringlengths 40
40
| url
stringlengths 87
242
| partition
stringclasses 1
value |
---|---|---|---|---|---|---|---|---|---|---|---|
jahuth/litus | __init__.py | PDContainerList.param | def param(self,key,default=None):
"""for accessing global parameters"""
if key in self.parameters:
return self.parameters[key]
return default | python | def param(self,key,default=None):
"""for accessing global parameters"""
if key in self.parameters:
return self.parameters[key]
return default | [
"def",
"param",
"(",
"self",
",",
"key",
",",
"default",
"=",
"None",
")",
":",
"if",
"key",
"in",
"self",
".",
"parameters",
":",
"return",
"self",
".",
"parameters",
"[",
"key",
"]",
"return",
"default"
] | for accessing global parameters | [
"for",
"accessing",
"global",
"parameters"
] | 712b016ea2dbb1cf0a30bfdbb0a136945a7b7c5e | https://github.com/jahuth/litus/blob/712b016ea2dbb1cf0a30bfdbb0a136945a7b7c5e/__init__.py#L811-L815 | train |
jahuth/litus | __init__.py | Lists.generator | def generator(self,gen,*args,**kwargs):
"""
Use this function to enter and exit the context at the beginning and end of a generator.
Example::
li = litus.Lists()
for i in li.generator(range(100)):
li.append(i)
"""
with self(*args,**kwargs):
for i in gen:
yield i | python | def generator(self,gen,*args,**kwargs):
"""
Use this function to enter and exit the context at the beginning and end of a generator.
Example::
li = litus.Lists()
for i in li.generator(range(100)):
li.append(i)
"""
with self(*args,**kwargs):
for i in gen:
yield i | [
"def",
"generator",
"(",
"self",
",",
"gen",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"with",
"self",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"for",
"i",
"in",
"gen",
":",
"yield",
"i"
] | Use this function to enter and exit the context at the beginning and end of a generator.
Example::
li = litus.Lists()
for i in li.generator(range(100)):
li.append(i) | [
"Use",
"this",
"function",
"to",
"enter",
"and",
"exit",
"the",
"context",
"at",
"the",
"beginning",
"and",
"end",
"of",
"a",
"generator",
"."
] | 712b016ea2dbb1cf0a30bfdbb0a136945a7b7c5e | https://github.com/jahuth/litus/blob/712b016ea2dbb1cf0a30bfdbb0a136945a7b7c5e/__init__.py#L962-L975 | train |
Loudr/pale | pale/arguments/url.py | URLArgument.validate_url | def validate_url(self, original_string):
"""Returns the original string if it was valid, raises an argument
error if it's not.
"""
# nipped from stack overflow: http://stackoverflow.com/questions/827557/how-do-you-validate-a-url-with-a-regular-expression-in-python
# I preferred this to the thorough regex approach for simplicity and
# readability
pieces = urlparse.urlparse(original_string)
try:
if self.path_only:
assert not any([pieces.scheme, pieces.netloc])
assert pieces.path
else:
assert all([pieces.scheme, pieces.netloc])
valid_chars = set(string.letters + string.digits + ":-_.")
assert set(pieces.netloc) <= valid_chars
assert pieces.scheme in ['http', 'https']
except AssertionError as e:
raise ArgumentError(self.item_name,
"The input you've provided is not a valid URL.")
return pieces | python | def validate_url(self, original_string):
"""Returns the original string if it was valid, raises an argument
error if it's not.
"""
# nipped from stack overflow: http://stackoverflow.com/questions/827557/how-do-you-validate-a-url-with-a-regular-expression-in-python
# I preferred this to the thorough regex approach for simplicity and
# readability
pieces = urlparse.urlparse(original_string)
try:
if self.path_only:
assert not any([pieces.scheme, pieces.netloc])
assert pieces.path
else:
assert all([pieces.scheme, pieces.netloc])
valid_chars = set(string.letters + string.digits + ":-_.")
assert set(pieces.netloc) <= valid_chars
assert pieces.scheme in ['http', 'https']
except AssertionError as e:
raise ArgumentError(self.item_name,
"The input you've provided is not a valid URL.")
return pieces | [
"def",
"validate_url",
"(",
"self",
",",
"original_string",
")",
":",
"# nipped from stack overflow: http://stackoverflow.com/questions/827557/how-do-you-validate-a-url-with-a-regular-expression-in-python",
"# I preferred this to the thorough regex approach for simplicity and",
"# readability",
"pieces",
"=",
"urlparse",
".",
"urlparse",
"(",
"original_string",
")",
"try",
":",
"if",
"self",
".",
"path_only",
":",
"assert",
"not",
"any",
"(",
"[",
"pieces",
".",
"scheme",
",",
"pieces",
".",
"netloc",
"]",
")",
"assert",
"pieces",
".",
"path",
"else",
":",
"assert",
"all",
"(",
"[",
"pieces",
".",
"scheme",
",",
"pieces",
".",
"netloc",
"]",
")",
"valid_chars",
"=",
"set",
"(",
"string",
".",
"letters",
"+",
"string",
".",
"digits",
"+",
"\":-_.\"",
")",
"assert",
"set",
"(",
"pieces",
".",
"netloc",
")",
"<=",
"valid_chars",
"assert",
"pieces",
".",
"scheme",
"in",
"[",
"'http'",
",",
"'https'",
"]",
"except",
"AssertionError",
"as",
"e",
":",
"raise",
"ArgumentError",
"(",
"self",
".",
"item_name",
",",
"\"The input you've provided is not a valid URL.\"",
")",
"return",
"pieces"
] | Returns the original string if it was valid, raises an argument
error if it's not. | [
"Returns",
"the",
"original",
"string",
"if",
"it",
"was",
"valid",
"raises",
"an",
"argument",
"error",
"if",
"it",
"s",
"not",
"."
] | dc002ee6032c856551143af222ff8f71ed9853fe | https://github.com/Loudr/pale/blob/dc002ee6032c856551143af222ff8f71ed9853fe/pale/arguments/url.py#L15-L37 | train |
alextricity25/dwell_in_you_richly | diyr/utils/bible.py | Bible.get_chapter | def get_chapter(self, book_name, book_chapter, cache_chapter = True):
"""
Returns a chapter of the bible, first checking to see if that
chapter is on disk. If not, hen it attempts to fetch it from
the internet.
NOTE: This is public facing method. If the method signature changes,
then it needs to be documented and backwards-compatablity
needs to be preserved.
"""
try:
logging.debug("Attempting to read chapter from disk")
verses_list = self._get_ondisk_chapter(book_name, book_chapter)
except Exception as e:
logging.debug("Could not read file from disk. Attempting the internet..")
logging.debug(e.message)
verses_list = self._get_online_chapter(book_name, book_chapter,
cache_chapter = cache_chapter)
return verses_list | python | def get_chapter(self, book_name, book_chapter, cache_chapter = True):
"""
Returns a chapter of the bible, first checking to see if that
chapter is on disk. If not, hen it attempts to fetch it from
the internet.
NOTE: This is public facing method. If the method signature changes,
then it needs to be documented and backwards-compatablity
needs to be preserved.
"""
try:
logging.debug("Attempting to read chapter from disk")
verses_list = self._get_ondisk_chapter(book_name, book_chapter)
except Exception as e:
logging.debug("Could not read file from disk. Attempting the internet..")
logging.debug(e.message)
verses_list = self._get_online_chapter(book_name, book_chapter,
cache_chapter = cache_chapter)
return verses_list | [
"def",
"get_chapter",
"(",
"self",
",",
"book_name",
",",
"book_chapter",
",",
"cache_chapter",
"=",
"True",
")",
":",
"try",
":",
"logging",
".",
"debug",
"(",
"\"Attempting to read chapter from disk\"",
")",
"verses_list",
"=",
"self",
".",
"_get_ondisk_chapter",
"(",
"book_name",
",",
"book_chapter",
")",
"except",
"Exception",
"as",
"e",
":",
"logging",
".",
"debug",
"(",
"\"Could not read file from disk. Attempting the internet..\"",
")",
"logging",
".",
"debug",
"(",
"e",
".",
"message",
")",
"verses_list",
"=",
"self",
".",
"_get_online_chapter",
"(",
"book_name",
",",
"book_chapter",
",",
"cache_chapter",
"=",
"cache_chapter",
")",
"return",
"verses_list"
] | Returns a chapter of the bible, first checking to see if that
chapter is on disk. If not, hen it attempts to fetch it from
the internet.
NOTE: This is public facing method. If the method signature changes,
then it needs to be documented and backwards-compatablity
needs to be preserved. | [
"Returns",
"a",
"chapter",
"of",
"the",
"bible",
"first",
"checking",
"to",
"see",
"if",
"that",
"chapter",
"is",
"on",
"disk",
".",
"If",
"not",
"hen",
"it",
"attempts",
"to",
"fetch",
"it",
"from",
"the",
"internet",
"."
] | e705e1bc4fc0b8d2aa25680dfc432762b361c783 | https://github.com/alextricity25/dwell_in_you_richly/blob/e705e1bc4fc0b8d2aa25680dfc432762b361c783/diyr/utils/bible.py#L122-L141 | train |
alextricity25/dwell_in_you_richly | diyr/utils/bible.py | Bible.verse_lookup | def verse_lookup(self, book_name, book_chapter, verse, cache_chapter = True):
"""
Looks up a verse from online.recoveryversion.bible, then returns it.
"""
verses_list = self.get_chapter(
book_name,
str(book_chapter),
cache_chapter = cache_chapter)
return verses_list[int(verse) - 1] | python | def verse_lookup(self, book_name, book_chapter, verse, cache_chapter = True):
"""
Looks up a verse from online.recoveryversion.bible, then returns it.
"""
verses_list = self.get_chapter(
book_name,
str(book_chapter),
cache_chapter = cache_chapter)
return verses_list[int(verse) - 1] | [
"def",
"verse_lookup",
"(",
"self",
",",
"book_name",
",",
"book_chapter",
",",
"verse",
",",
"cache_chapter",
"=",
"True",
")",
":",
"verses_list",
"=",
"self",
".",
"get_chapter",
"(",
"book_name",
",",
"str",
"(",
"book_chapter",
")",
",",
"cache_chapter",
"=",
"cache_chapter",
")",
"return",
"verses_list",
"[",
"int",
"(",
"verse",
")",
"-",
"1",
"]"
] | Looks up a verse from online.recoveryversion.bible, then returns it. | [
"Looks",
"up",
"a",
"verse",
"from",
"online",
".",
"recoveryversion",
".",
"bible",
"then",
"returns",
"it",
"."
] | e705e1bc4fc0b8d2aa25680dfc432762b361c783 | https://github.com/alextricity25/dwell_in_you_richly/blob/e705e1bc4fc0b8d2aa25680dfc432762b361c783/diyr/utils/bible.py#L230-L238 | train |
projectshift/shift-schema | shiftschema/ext/flask_wtf.py | WtfSchemaMixin.validate_on_submit | def validate_on_submit(self):
""" Extend validate on submit to allow validation with schema """
# validate form
valid = FlaskWtf.validate_on_submit(self)
# return in case no schema or not submitted
if not self._schema or not self.is_submitted():
return valid
# validate data with schema if got one and form was submitted
data = dict()
for field in self._fields:
data[field] = self._fields[field].data
result = self.schema.process(data, context=self._force_context)
self.set_errors(result)
# set filtered data back to form
for field in data:
self._fields[field].data = data[field]
return valid and not bool(self.errors) | python | def validate_on_submit(self):
""" Extend validate on submit to allow validation with schema """
# validate form
valid = FlaskWtf.validate_on_submit(self)
# return in case no schema or not submitted
if not self._schema or not self.is_submitted():
return valid
# validate data with schema if got one and form was submitted
data = dict()
for field in self._fields:
data[field] = self._fields[field].data
result = self.schema.process(data, context=self._force_context)
self.set_errors(result)
# set filtered data back to form
for field in data:
self._fields[field].data = data[field]
return valid and not bool(self.errors) | [
"def",
"validate_on_submit",
"(",
"self",
")",
":",
"# validate form",
"valid",
"=",
"FlaskWtf",
".",
"validate_on_submit",
"(",
"self",
")",
"# return in case no schema or not submitted",
"if",
"not",
"self",
".",
"_schema",
"or",
"not",
"self",
".",
"is_submitted",
"(",
")",
":",
"return",
"valid",
"# validate data with schema if got one and form was submitted",
"data",
"=",
"dict",
"(",
")",
"for",
"field",
"in",
"self",
".",
"_fields",
":",
"data",
"[",
"field",
"]",
"=",
"self",
".",
"_fields",
"[",
"field",
"]",
".",
"data",
"result",
"=",
"self",
".",
"schema",
".",
"process",
"(",
"data",
",",
"context",
"=",
"self",
".",
"_force_context",
")",
"self",
".",
"set_errors",
"(",
"result",
")",
"# set filtered data back to form",
"for",
"field",
"in",
"data",
":",
"self",
".",
"_fields",
"[",
"field",
"]",
".",
"data",
"=",
"data",
"[",
"field",
"]",
"return",
"valid",
"and",
"not",
"bool",
"(",
"self",
".",
"errors",
")"
] | Extend validate on submit to allow validation with schema | [
"Extend",
"validate",
"on",
"submit",
"to",
"allow",
"validation",
"with",
"schema"
] | 07787b540d3369bb37217ffbfbe629118edaf0eb | https://github.com/projectshift/shift-schema/blob/07787b540d3369bb37217ffbfbe629118edaf0eb/shiftschema/ext/flask_wtf.py#L13-L35 | train |
projectshift/shift-schema | shiftschema/ext/flask_wtf.py | WtfSchemaMixin.set_errors | def set_errors(self, result):
""" Populate field errors with errors from schema validation """
# todo: use wtf locale
errors = result.get_messages()
for property_name in errors:
if not hasattr(self, property_name):
continue # ignore errors for missing fields
prop_errors = errors[property_name]
if type(prop_errors) is not list:
prop_errors = ['<Nested schema result following...>']
if property_name in self.errors:
self.errors[property_name].extend(prop_errors)
else:
self.errors[property_name] = prop_errors | python | def set_errors(self, result):
""" Populate field errors with errors from schema validation """
# todo: use wtf locale
errors = result.get_messages()
for property_name in errors:
if not hasattr(self, property_name):
continue # ignore errors for missing fields
prop_errors = errors[property_name]
if type(prop_errors) is not list:
prop_errors = ['<Nested schema result following...>']
if property_name in self.errors:
self.errors[property_name].extend(prop_errors)
else:
self.errors[property_name] = prop_errors | [
"def",
"set_errors",
"(",
"self",
",",
"result",
")",
":",
"# todo: use wtf locale",
"errors",
"=",
"result",
".",
"get_messages",
"(",
")",
"for",
"property_name",
"in",
"errors",
":",
"if",
"not",
"hasattr",
"(",
"self",
",",
"property_name",
")",
":",
"continue",
"# ignore errors for missing fields",
"prop_errors",
"=",
"errors",
"[",
"property_name",
"]",
"if",
"type",
"(",
"prop_errors",
")",
"is",
"not",
"list",
":",
"prop_errors",
"=",
"[",
"'<Nested schema result following...>'",
"]",
"if",
"property_name",
"in",
"self",
".",
"errors",
":",
"self",
".",
"errors",
"[",
"property_name",
"]",
".",
"extend",
"(",
"prop_errors",
")",
"else",
":",
"self",
".",
"errors",
"[",
"property_name",
"]",
"=",
"prop_errors"
] | Populate field errors with errors from schema validation | [
"Populate",
"field",
"errors",
"with",
"errors",
"from",
"schema",
"validation"
] | 07787b540d3369bb37217ffbfbe629118edaf0eb | https://github.com/projectshift/shift-schema/blob/07787b540d3369bb37217ffbfbe629118edaf0eb/shiftschema/ext/flask_wtf.py#L40-L56 | train |
mediawiki-utilities/python-mwpersistence | mwpersistence/utilities/diffs2persistence.py | diffs2persistence | def diffs2persistence(rev_docs, window_size=50, revert_radius=15, sunset=None,
verbose=False):
"""
Processes a sorted and page-partitioned sequence of revision documents into
and adds a 'persistence' field to them containing statistics about how each
token "added" in the revision persisted through future revisions.
:Parameters:
rev_docs : `iterable` ( `dict` )
JSON documents of revision data containing a 'diff' field as
generated by ``dump2diffs``. It's assumed that rev_docs are
partitioned by page and otherwise in chronological order.
window_size : `int`
The size of the window of revisions from which persistence data
will be generated.
revert_radius : `int`
The number of revisions back that a revert can reference.
sunset : :class:`mwtypes.Timestamp`
The date of the database dump we are generating from. This is
used to apply a 'time visible' statistic. If not set, now() will
be assumed.
keep_diff : `bool`
Do not drop the `diff` field from the revision document after
processing is complete.
verbose : `bool`
Prints out dots and stuff to stderr
:Returns:
A generator of rev_docs with a 'persistence' field containing
statistics about individual tokens.
"""
rev_docs = mwxml.utilities.normalize(rev_docs)
window_size = int(window_size)
revert_radius = int(revert_radius)
sunset = Timestamp(sunset) if sunset is not None \
else Timestamp(time.time())
# Group the docs by page
page_docs = groupby(rev_docs, key=lambda d: d['page']['title'])
for page_title, rev_docs in page_docs:
if verbose:
sys.stderr.write(page_title + ": ")
# We need a look-ahead to know how long this revision was visible
rev_docs = peekable(rev_docs)
# The window allows us to manage memory
window = deque(maxlen=window_size)
# The state does the actual processing work
state = DiffState(revert_radius=revert_radius)
while rev_docs:
rev_doc = next(rev_docs)
next_doc = rev_docs.peek(None)
if next_doc is not None:
seconds_visible = Timestamp(next_doc['timestamp']) - \
Timestamp(rev_doc['timestamp'])
else:
seconds_visible = sunset - Timestamp(rev_doc['timestamp'])
if seconds_visible < 0:
logger.warn("Seconds visible {0} is less than zero."
.format(seconds_visible))
seconds_visible = 0
_, tokens_added, _ = \
state.update_opdocs(rev_doc['sha1'], rev_doc['diff']['ops'],
(rev_doc['user'], seconds_visible))
if len(window) == window_size:
# Time to start writing some stats
old_doc, old_added = window[0]
window.append((rev_doc, tokens_added))
persistence = token_persistence(old_doc, old_added, window,
None)
old_doc['persistence'] = persistence
yield old_doc
if verbose:
sys.stderr.write(".")
sys.stderr.flush()
else:
window.append((rev_doc, tokens_added))
while len(window) > 0:
old_doc, old_added = window.popleft()
persistence = token_persistence(old_doc, old_added, window, sunset)
old_doc['persistence'] = persistence
yield old_doc
if verbose:
sys.stderr.write("_")
sys.stderr.flush()
if verbose:
sys.stderr.write("\n") | python | def diffs2persistence(rev_docs, window_size=50, revert_radius=15, sunset=None,
verbose=False):
"""
Processes a sorted and page-partitioned sequence of revision documents into
and adds a 'persistence' field to them containing statistics about how each
token "added" in the revision persisted through future revisions.
:Parameters:
rev_docs : `iterable` ( `dict` )
JSON documents of revision data containing a 'diff' field as
generated by ``dump2diffs``. It's assumed that rev_docs are
partitioned by page and otherwise in chronological order.
window_size : `int`
The size of the window of revisions from which persistence data
will be generated.
revert_radius : `int`
The number of revisions back that a revert can reference.
sunset : :class:`mwtypes.Timestamp`
The date of the database dump we are generating from. This is
used to apply a 'time visible' statistic. If not set, now() will
be assumed.
keep_diff : `bool`
Do not drop the `diff` field from the revision document after
processing is complete.
verbose : `bool`
Prints out dots and stuff to stderr
:Returns:
A generator of rev_docs with a 'persistence' field containing
statistics about individual tokens.
"""
rev_docs = mwxml.utilities.normalize(rev_docs)
window_size = int(window_size)
revert_radius = int(revert_radius)
sunset = Timestamp(sunset) if sunset is not None \
else Timestamp(time.time())
# Group the docs by page
page_docs = groupby(rev_docs, key=lambda d: d['page']['title'])
for page_title, rev_docs in page_docs:
if verbose:
sys.stderr.write(page_title + ": ")
# We need a look-ahead to know how long this revision was visible
rev_docs = peekable(rev_docs)
# The window allows us to manage memory
window = deque(maxlen=window_size)
# The state does the actual processing work
state = DiffState(revert_radius=revert_radius)
while rev_docs:
rev_doc = next(rev_docs)
next_doc = rev_docs.peek(None)
if next_doc is not None:
seconds_visible = Timestamp(next_doc['timestamp']) - \
Timestamp(rev_doc['timestamp'])
else:
seconds_visible = sunset - Timestamp(rev_doc['timestamp'])
if seconds_visible < 0:
logger.warn("Seconds visible {0} is less than zero."
.format(seconds_visible))
seconds_visible = 0
_, tokens_added, _ = \
state.update_opdocs(rev_doc['sha1'], rev_doc['diff']['ops'],
(rev_doc['user'], seconds_visible))
if len(window) == window_size:
# Time to start writing some stats
old_doc, old_added = window[0]
window.append((rev_doc, tokens_added))
persistence = token_persistence(old_doc, old_added, window,
None)
old_doc['persistence'] = persistence
yield old_doc
if verbose:
sys.stderr.write(".")
sys.stderr.flush()
else:
window.append((rev_doc, tokens_added))
while len(window) > 0:
old_doc, old_added = window.popleft()
persistence = token_persistence(old_doc, old_added, window, sunset)
old_doc['persistence'] = persistence
yield old_doc
if verbose:
sys.stderr.write("_")
sys.stderr.flush()
if verbose:
sys.stderr.write("\n") | [
"def",
"diffs2persistence",
"(",
"rev_docs",
",",
"window_size",
"=",
"50",
",",
"revert_radius",
"=",
"15",
",",
"sunset",
"=",
"None",
",",
"verbose",
"=",
"False",
")",
":",
"rev_docs",
"=",
"mwxml",
".",
"utilities",
".",
"normalize",
"(",
"rev_docs",
")",
"window_size",
"=",
"int",
"(",
"window_size",
")",
"revert_radius",
"=",
"int",
"(",
"revert_radius",
")",
"sunset",
"=",
"Timestamp",
"(",
"sunset",
")",
"if",
"sunset",
"is",
"not",
"None",
"else",
"Timestamp",
"(",
"time",
".",
"time",
"(",
")",
")",
"# Group the docs by page",
"page_docs",
"=",
"groupby",
"(",
"rev_docs",
",",
"key",
"=",
"lambda",
"d",
":",
"d",
"[",
"'page'",
"]",
"[",
"'title'",
"]",
")",
"for",
"page_title",
",",
"rev_docs",
"in",
"page_docs",
":",
"if",
"verbose",
":",
"sys",
".",
"stderr",
".",
"write",
"(",
"page_title",
"+",
"\": \"",
")",
"# We need a look-ahead to know how long this revision was visible",
"rev_docs",
"=",
"peekable",
"(",
"rev_docs",
")",
"# The window allows us to manage memory",
"window",
"=",
"deque",
"(",
"maxlen",
"=",
"window_size",
")",
"# The state does the actual processing work",
"state",
"=",
"DiffState",
"(",
"revert_radius",
"=",
"revert_radius",
")",
"while",
"rev_docs",
":",
"rev_doc",
"=",
"next",
"(",
"rev_docs",
")",
"next_doc",
"=",
"rev_docs",
".",
"peek",
"(",
"None",
")",
"if",
"next_doc",
"is",
"not",
"None",
":",
"seconds_visible",
"=",
"Timestamp",
"(",
"next_doc",
"[",
"'timestamp'",
"]",
")",
"-",
"Timestamp",
"(",
"rev_doc",
"[",
"'timestamp'",
"]",
")",
"else",
":",
"seconds_visible",
"=",
"sunset",
"-",
"Timestamp",
"(",
"rev_doc",
"[",
"'timestamp'",
"]",
")",
"if",
"seconds_visible",
"<",
"0",
":",
"logger",
".",
"warn",
"(",
"\"Seconds visible {0} is less than zero.\"",
".",
"format",
"(",
"seconds_visible",
")",
")",
"seconds_visible",
"=",
"0",
"_",
",",
"tokens_added",
",",
"_",
"=",
"state",
".",
"update_opdocs",
"(",
"rev_doc",
"[",
"'sha1'",
"]",
",",
"rev_doc",
"[",
"'diff'",
"]",
"[",
"'ops'",
"]",
",",
"(",
"rev_doc",
"[",
"'user'",
"]",
",",
"seconds_visible",
")",
")",
"if",
"len",
"(",
"window",
")",
"==",
"window_size",
":",
"# Time to start writing some stats",
"old_doc",
",",
"old_added",
"=",
"window",
"[",
"0",
"]",
"window",
".",
"append",
"(",
"(",
"rev_doc",
",",
"tokens_added",
")",
")",
"persistence",
"=",
"token_persistence",
"(",
"old_doc",
",",
"old_added",
",",
"window",
",",
"None",
")",
"old_doc",
"[",
"'persistence'",
"]",
"=",
"persistence",
"yield",
"old_doc",
"if",
"verbose",
":",
"sys",
".",
"stderr",
".",
"write",
"(",
"\".\"",
")",
"sys",
".",
"stderr",
".",
"flush",
"(",
")",
"else",
":",
"window",
".",
"append",
"(",
"(",
"rev_doc",
",",
"tokens_added",
")",
")",
"while",
"len",
"(",
"window",
")",
">",
"0",
":",
"old_doc",
",",
"old_added",
"=",
"window",
".",
"popleft",
"(",
")",
"persistence",
"=",
"token_persistence",
"(",
"old_doc",
",",
"old_added",
",",
"window",
",",
"sunset",
")",
"old_doc",
"[",
"'persistence'",
"]",
"=",
"persistence",
"yield",
"old_doc",
"if",
"verbose",
":",
"sys",
".",
"stderr",
".",
"write",
"(",
"\"_\"",
")",
"sys",
".",
"stderr",
".",
"flush",
"(",
")",
"if",
"verbose",
":",
"sys",
".",
"stderr",
".",
"write",
"(",
"\"\\n\"",
")"
] | Processes a sorted and page-partitioned sequence of revision documents into
and adds a 'persistence' field to them containing statistics about how each
token "added" in the revision persisted through future revisions.
:Parameters:
rev_docs : `iterable` ( `dict` )
JSON documents of revision data containing a 'diff' field as
generated by ``dump2diffs``. It's assumed that rev_docs are
partitioned by page and otherwise in chronological order.
window_size : `int`
The size of the window of revisions from which persistence data
will be generated.
revert_radius : `int`
The number of revisions back that a revert can reference.
sunset : :class:`mwtypes.Timestamp`
The date of the database dump we are generating from. This is
used to apply a 'time visible' statistic. If not set, now() will
be assumed.
keep_diff : `bool`
Do not drop the `diff` field from the revision document after
processing is complete.
verbose : `bool`
Prints out dots and stuff to stderr
:Returns:
A generator of rev_docs with a 'persistence' field containing
statistics about individual tokens. | [
"Processes",
"a",
"sorted",
"and",
"page",
"-",
"partitioned",
"sequence",
"of",
"revision",
"documents",
"into",
"and",
"adds",
"a",
"persistence",
"field",
"to",
"them",
"containing",
"statistics",
"about",
"how",
"each",
"token",
"added",
"in",
"the",
"revision",
"persisted",
"through",
"future",
"revisions",
"."
] | 2b98847fb8acaca38b3cbf94bde3fd7e27d2b67d | https://github.com/mediawiki-utilities/python-mwpersistence/blob/2b98847fb8acaca38b3cbf94bde3fd7e27d2b67d/mwpersistence/utilities/diffs2persistence.py#L100-L197 | train |
a1ezzz/wasp-general | wasp_general/crypto/hash.py | WHash.generator | def generator(name):
""" Return generator by its name
:param name: name of hash-generator
:return: WHashGeneratorProto class
"""
name = name.upper()
if name not in WHash.__hash_map__.keys():
raise ValueError('Hash generator "%s" not available' % name)
return WHash.__hash_map__[name] | python | def generator(name):
""" Return generator by its name
:param name: name of hash-generator
:return: WHashGeneratorProto class
"""
name = name.upper()
if name not in WHash.__hash_map__.keys():
raise ValueError('Hash generator "%s" not available' % name)
return WHash.__hash_map__[name] | [
"def",
"generator",
"(",
"name",
")",
":",
"name",
"=",
"name",
".",
"upper",
"(",
")",
"if",
"name",
"not",
"in",
"WHash",
".",
"__hash_map__",
".",
"keys",
"(",
")",
":",
"raise",
"ValueError",
"(",
"'Hash generator \"%s\" not available'",
"%",
"name",
")",
"return",
"WHash",
".",
"__hash_map__",
"[",
"name",
"]"
] | Return generator by its name
:param name: name of hash-generator
:return: WHashGeneratorProto class | [
"Return",
"generator",
"by",
"its",
"name"
] | 1029839d33eb663f8dec76c1c46754d53c1de4a9 | https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/crypto/hash.py#L241-L251 | train |
a1ezzz/wasp-general | wasp_general/crypto/hash.py | WHash.generator_by_digest | def generator_by_digest(family, digest_size):
""" Return generator by hash generator family name and digest size
:param family: name of hash-generator family
:return: WHashGeneratorProto class
"""
for generator_name in WHash.available_generators(family=family):
generator = WHash.generator(generator_name)
if generator.generator_digest_size() == digest_size:
return generator
raise ValueError('Hash generator is not available') | python | def generator_by_digest(family, digest_size):
""" Return generator by hash generator family name and digest size
:param family: name of hash-generator family
:return: WHashGeneratorProto class
"""
for generator_name in WHash.available_generators(family=family):
generator = WHash.generator(generator_name)
if generator.generator_digest_size() == digest_size:
return generator
raise ValueError('Hash generator is not available') | [
"def",
"generator_by_digest",
"(",
"family",
",",
"digest_size",
")",
":",
"for",
"generator_name",
"in",
"WHash",
".",
"available_generators",
"(",
"family",
"=",
"family",
")",
":",
"generator",
"=",
"WHash",
".",
"generator",
"(",
"generator_name",
")",
"if",
"generator",
".",
"generator_digest_size",
"(",
")",
"==",
"digest_size",
":",
"return",
"generator",
"raise",
"ValueError",
"(",
"'Hash generator is not available'",
")"
] | Return generator by hash generator family name and digest size
:param family: name of hash-generator family
:return: WHashGeneratorProto class | [
"Return",
"generator",
"by",
"hash",
"generator",
"family",
"name",
"and",
"digest",
"size"
] | 1029839d33eb663f8dec76c1c46754d53c1de4a9 | https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/crypto/hash.py#L254-L265 | train |
a1ezzz/wasp-general | wasp_general/network/messenger/session.py | WMessengerOnionSessionFlow.sequence | def sequence(cls, *info):
""" Useful method to generate iterator. It is generated by chaining the given info. If no info is
specified, then None is returned
:param info: iterator info sequence
:return: WMessengerOnionSessionFlowProto.Iterator or None
"""
if len(info) == 0:
return
info = list(info)
info.reverse()
result = WMessengerOnionSessionFlowProto.Iterator(
info[0].layer_name(), **info[0].layer_args()
)
for i in range(1, len(info)):
result = WMessengerOnionSessionFlowProto.Iterator(
info[i].layer_name(), next_iterator=result, **info[i].layer_args()
)
return result | python | def sequence(cls, *info):
""" Useful method to generate iterator. It is generated by chaining the given info. If no info is
specified, then None is returned
:param info: iterator info sequence
:return: WMessengerOnionSessionFlowProto.Iterator or None
"""
if len(info) == 0:
return
info = list(info)
info.reverse()
result = WMessengerOnionSessionFlowProto.Iterator(
info[0].layer_name(), **info[0].layer_args()
)
for i in range(1, len(info)):
result = WMessengerOnionSessionFlowProto.Iterator(
info[i].layer_name(), next_iterator=result, **info[i].layer_args()
)
return result | [
"def",
"sequence",
"(",
"cls",
",",
"*",
"info",
")",
":",
"if",
"len",
"(",
"info",
")",
"==",
"0",
":",
"return",
"info",
"=",
"list",
"(",
"info",
")",
"info",
".",
"reverse",
"(",
")",
"result",
"=",
"WMessengerOnionSessionFlowProto",
".",
"Iterator",
"(",
"info",
"[",
"0",
"]",
".",
"layer_name",
"(",
")",
",",
"*",
"*",
"info",
"[",
"0",
"]",
".",
"layer_args",
"(",
")",
")",
"for",
"i",
"in",
"range",
"(",
"1",
",",
"len",
"(",
"info",
")",
")",
":",
"result",
"=",
"WMessengerOnionSessionFlowProto",
".",
"Iterator",
"(",
"info",
"[",
"i",
"]",
".",
"layer_name",
"(",
")",
",",
"next_iterator",
"=",
"result",
",",
"*",
"*",
"info",
"[",
"i",
"]",
".",
"layer_args",
"(",
")",
")",
"return",
"result"
] | Useful method to generate iterator. It is generated by chaining the given info. If no info is
specified, then None is returned
:param info: iterator info sequence
:return: WMessengerOnionSessionFlowProto.Iterator or None | [
"Useful",
"method",
"to",
"generate",
"iterator",
".",
"It",
"is",
"generated",
"by",
"chaining",
"the",
"given",
"info",
".",
"If",
"no",
"info",
"is",
"specified",
"then",
"None",
"is",
"returned"
] | 1029839d33eb663f8dec76c1c46754d53c1de4a9 | https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/network/messenger/session.py#L58-L80 | train |
a1ezzz/wasp-general | wasp_general/network/primitives.py | WMACAddress.from_string | def from_string(address):
""" Return new object by the given MAC-address
:param address: address to convert
:return: WMACAddress
"""
str_address = None
if WMACAddress.re_dash_format.match(address):
str_address = "".join(address.split("-"))
elif WMACAddress.re_colon_format.match(address):
str_address = "".join(address.split(":"))
elif WMACAddress.re_cisco_format.match(address):
str_address = "".join(address.split("."))
elif WMACAddress.re_spaceless_format.match(address):
str_address = address
if str_address is None:
raise ValueError("Invalid MAC address format: " + address)
result = WMACAddress()
for octet_index in range(WMACAddress.octet_count):
octet = str_address[:2]
result.__address[octet_index] = int(octet, 16)
str_address = str_address[2:]
return result | python | def from_string(address):
""" Return new object by the given MAC-address
:param address: address to convert
:return: WMACAddress
"""
str_address = None
if WMACAddress.re_dash_format.match(address):
str_address = "".join(address.split("-"))
elif WMACAddress.re_colon_format.match(address):
str_address = "".join(address.split(":"))
elif WMACAddress.re_cisco_format.match(address):
str_address = "".join(address.split("."))
elif WMACAddress.re_spaceless_format.match(address):
str_address = address
if str_address is None:
raise ValueError("Invalid MAC address format: " + address)
result = WMACAddress()
for octet_index in range(WMACAddress.octet_count):
octet = str_address[:2]
result.__address[octet_index] = int(octet, 16)
str_address = str_address[2:]
return result | [
"def",
"from_string",
"(",
"address",
")",
":",
"str_address",
"=",
"None",
"if",
"WMACAddress",
".",
"re_dash_format",
".",
"match",
"(",
"address",
")",
":",
"str_address",
"=",
"\"\"",
".",
"join",
"(",
"address",
".",
"split",
"(",
"\"-\"",
")",
")",
"elif",
"WMACAddress",
".",
"re_colon_format",
".",
"match",
"(",
"address",
")",
":",
"str_address",
"=",
"\"\"",
".",
"join",
"(",
"address",
".",
"split",
"(",
"\":\"",
")",
")",
"elif",
"WMACAddress",
".",
"re_cisco_format",
".",
"match",
"(",
"address",
")",
":",
"str_address",
"=",
"\"\"",
".",
"join",
"(",
"address",
".",
"split",
"(",
"\".\"",
")",
")",
"elif",
"WMACAddress",
".",
"re_spaceless_format",
".",
"match",
"(",
"address",
")",
":",
"str_address",
"=",
"address",
"if",
"str_address",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"\"Invalid MAC address format: \"",
"+",
"address",
")",
"result",
"=",
"WMACAddress",
"(",
")",
"for",
"octet_index",
"in",
"range",
"(",
"WMACAddress",
".",
"octet_count",
")",
":",
"octet",
"=",
"str_address",
"[",
":",
"2",
"]",
"result",
".",
"__address",
"[",
"octet_index",
"]",
"=",
"int",
"(",
"octet",
",",
"16",
")",
"str_address",
"=",
"str_address",
"[",
"2",
":",
"]",
"return",
"result"
] | Return new object by the given MAC-address
:param address: address to convert
:return: WMACAddress | [
"Return",
"new",
"object",
"by",
"the",
"given",
"MAC",
"-",
"address"
] | 1029839d33eb663f8dec76c1c46754d53c1de4a9 | https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/network/primitives.py#L87-L113 | train |
a1ezzz/wasp-general | wasp_general/network/primitives.py | WIPV4Address.from_string | def from_string(address):
""" Parse string for IPv4 address
:param address: address to parse
:return:
"""
address = address.split('.')
if len(address) != WIPV4Address.octet_count:
raise ValueError('Invalid ip address: %s' % address)
result = WIPV4Address()
for i in range(WIPV4Address.octet_count):
result.__address[i] = WBinArray(int(address[i]), WFixedSizeByteArray.byte_size)
return result | python | def from_string(address):
""" Parse string for IPv4 address
:param address: address to parse
:return:
"""
address = address.split('.')
if len(address) != WIPV4Address.octet_count:
raise ValueError('Invalid ip address: %s' % address)
result = WIPV4Address()
for i in range(WIPV4Address.octet_count):
result.__address[i] = WBinArray(int(address[i]), WFixedSizeByteArray.byte_size)
return result | [
"def",
"from_string",
"(",
"address",
")",
":",
"address",
"=",
"address",
".",
"split",
"(",
"'.'",
")",
"if",
"len",
"(",
"address",
")",
"!=",
"WIPV4Address",
".",
"octet_count",
":",
"raise",
"ValueError",
"(",
"'Invalid ip address: %s'",
"%",
"address",
")",
"result",
"=",
"WIPV4Address",
"(",
")",
"for",
"i",
"in",
"range",
"(",
"WIPV4Address",
".",
"octet_count",
")",
":",
"result",
".",
"__address",
"[",
"i",
"]",
"=",
"WBinArray",
"(",
"int",
"(",
"address",
"[",
"i",
"]",
")",
",",
"WFixedSizeByteArray",
".",
"byte_size",
")",
"return",
"result"
] | Parse string for IPv4 address
:param address: address to parse
:return: | [
"Parse",
"string",
"for",
"IPv4",
"address"
] | 1029839d33eb663f8dec76c1c46754d53c1de4a9 | https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/network/primitives.py#L173-L186 | train |
a1ezzz/wasp-general | wasp_general/network/primitives.py | WIPV4Address.to_string | def to_string(address, dns_format=False):
""" Convert address to string
:param address: WIPV4Address to convert
:param dns_format: whether to use arpa-format or not
:return:
"""
if isinstance(address, WIPV4Address) is False:
raise TypeError('Invalid address type')
address = [str(int(x)) for x in address.__address]
if dns_format is False:
return '.'.join(address)
address.reverse()
return ('.'.join(address) + '.in-addr.arpa') | python | def to_string(address, dns_format=False):
""" Convert address to string
:param address: WIPV4Address to convert
:param dns_format: whether to use arpa-format or not
:return:
"""
if isinstance(address, WIPV4Address) is False:
raise TypeError('Invalid address type')
address = [str(int(x)) for x in address.__address]
if dns_format is False:
return '.'.join(address)
address.reverse()
return ('.'.join(address) + '.in-addr.arpa') | [
"def",
"to_string",
"(",
"address",
",",
"dns_format",
"=",
"False",
")",
":",
"if",
"isinstance",
"(",
"address",
",",
"WIPV4Address",
")",
"is",
"False",
":",
"raise",
"TypeError",
"(",
"'Invalid address type'",
")",
"address",
"=",
"[",
"str",
"(",
"int",
"(",
"x",
")",
")",
"for",
"x",
"in",
"address",
".",
"__address",
"]",
"if",
"dns_format",
"is",
"False",
":",
"return",
"'.'",
".",
"join",
"(",
"address",
")",
"address",
".",
"reverse",
"(",
")",
"return",
"(",
"'.'",
".",
"join",
"(",
"address",
")",
"+",
"'.in-addr.arpa'",
")"
] | Convert address to string
:param address: WIPV4Address to convert
:param dns_format: whether to use arpa-format or not
:return: | [
"Convert",
"address",
"to",
"string"
] | 1029839d33eb663f8dec76c1c46754d53c1de4a9 | https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/network/primitives.py#L190-L204 | train |
a1ezzz/wasp-general | wasp_general/network/primitives.py | WNetworkIPV4.first_address | def first_address(self, skip_network_address=True):
""" Return the first IP address of this network
:param skip_network_address: this flag specifies whether this function returns address of the network \
or returns address that follows address of the network (address, that a host could have)
:return: WIPV4Address
"""
bin_address = self.__address.bin_address()
bin_address_length = len(bin_address)
if self.__mask > (bin_address_length - 2):
skip_network_address = False
for i in range(bin_address_length - self.__mask):
bin_address[self.__mask + i] = 0
if skip_network_address:
bin_address[bin_address_length - 1] = 1
return WIPV4Address(bin_address) | python | def first_address(self, skip_network_address=True):
""" Return the first IP address of this network
:param skip_network_address: this flag specifies whether this function returns address of the network \
or returns address that follows address of the network (address, that a host could have)
:return: WIPV4Address
"""
bin_address = self.__address.bin_address()
bin_address_length = len(bin_address)
if self.__mask > (bin_address_length - 2):
skip_network_address = False
for i in range(bin_address_length - self.__mask):
bin_address[self.__mask + i] = 0
if skip_network_address:
bin_address[bin_address_length - 1] = 1
return WIPV4Address(bin_address) | [
"def",
"first_address",
"(",
"self",
",",
"skip_network_address",
"=",
"True",
")",
":",
"bin_address",
"=",
"self",
".",
"__address",
".",
"bin_address",
"(",
")",
"bin_address_length",
"=",
"len",
"(",
"bin_address",
")",
"if",
"self",
".",
"__mask",
">",
"(",
"bin_address_length",
"-",
"2",
")",
":",
"skip_network_address",
"=",
"False",
"for",
"i",
"in",
"range",
"(",
"bin_address_length",
"-",
"self",
".",
"__mask",
")",
":",
"bin_address",
"[",
"self",
".",
"__mask",
"+",
"i",
"]",
"=",
"0",
"if",
"skip_network_address",
":",
"bin_address",
"[",
"bin_address_length",
"-",
"1",
"]",
"=",
"1",
"return",
"WIPV4Address",
"(",
"bin_address",
")"
] | Return the first IP address of this network
:param skip_network_address: this flag specifies whether this function returns address of the network \
or returns address that follows address of the network (address, that a host could have)
:return: WIPV4Address | [
"Return",
"the",
"first",
"IP",
"address",
"of",
"this",
"network"
] | 1029839d33eb663f8dec76c1c46754d53c1de4a9 | https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/network/primitives.py#L269-L287 | train |
a1ezzz/wasp-general | wasp_general/network/primitives.py | WNetworkIPV4.last_address | def last_address(self, skip_broadcast_address=True):
""" Return the last IP address of this network
:param skip_broadcast_address: this flag specifies whether to skip the very last address (that is \
usually used as broadcast address) or not.
:return: WIPV4Address
"""
bin_address = self.__address.bin_address()
bin_address_length = len(bin_address)
if self.__mask > (bin_address_length - 2):
skip_broadcast_address = False
for i in range(bin_address_length - self.__mask):
bin_address[self.__mask + i] = 1
if skip_broadcast_address:
bin_address[bin_address_length - 1] = 0
return WIPV4Address(bin_address) | python | def last_address(self, skip_broadcast_address=True):
""" Return the last IP address of this network
:param skip_broadcast_address: this flag specifies whether to skip the very last address (that is \
usually used as broadcast address) or not.
:return: WIPV4Address
"""
bin_address = self.__address.bin_address()
bin_address_length = len(bin_address)
if self.__mask > (bin_address_length - 2):
skip_broadcast_address = False
for i in range(bin_address_length - self.__mask):
bin_address[self.__mask + i] = 1
if skip_broadcast_address:
bin_address[bin_address_length - 1] = 0
return WIPV4Address(bin_address) | [
"def",
"last_address",
"(",
"self",
",",
"skip_broadcast_address",
"=",
"True",
")",
":",
"bin_address",
"=",
"self",
".",
"__address",
".",
"bin_address",
"(",
")",
"bin_address_length",
"=",
"len",
"(",
"bin_address",
")",
"if",
"self",
".",
"__mask",
">",
"(",
"bin_address_length",
"-",
"2",
")",
":",
"skip_broadcast_address",
"=",
"False",
"for",
"i",
"in",
"range",
"(",
"bin_address_length",
"-",
"self",
".",
"__mask",
")",
":",
"bin_address",
"[",
"self",
".",
"__mask",
"+",
"i",
"]",
"=",
"1",
"if",
"skip_broadcast_address",
":",
"bin_address",
"[",
"bin_address_length",
"-",
"1",
"]",
"=",
"0",
"return",
"WIPV4Address",
"(",
"bin_address",
")"
] | Return the last IP address of this network
:param skip_broadcast_address: this flag specifies whether to skip the very last address (that is \
usually used as broadcast address) or not.
:return: WIPV4Address | [
"Return",
"the",
"last",
"IP",
"address",
"of",
"this",
"network"
] | 1029839d33eb663f8dec76c1c46754d53c1de4a9 | https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/network/primitives.py#L290-L308 | train |
a1ezzz/wasp-general | wasp_general/network/primitives.py | WNetworkIPV4.iterator | def iterator(self, skip_network_address=True, skip_broadcast_address=True):
""" Return iterator, that can iterate over network addresses
:param skip_network_address: same as skip_network_address in :meth:`.NetworkIPV4.first_address` method
:param skip_broadcast_address: same as skip_broadcast_address in :meth:`.NetworkIPV4.last_address` \
method
:return: NetworkIPV4Iterator
"""
return WNetworkIPV4Iterator(self, skip_network_address, skip_broadcast_address) | python | def iterator(self, skip_network_address=True, skip_broadcast_address=True):
""" Return iterator, that can iterate over network addresses
:param skip_network_address: same as skip_network_address in :meth:`.NetworkIPV4.first_address` method
:param skip_broadcast_address: same as skip_broadcast_address in :meth:`.NetworkIPV4.last_address` \
method
:return: NetworkIPV4Iterator
"""
return WNetworkIPV4Iterator(self, skip_network_address, skip_broadcast_address) | [
"def",
"iterator",
"(",
"self",
",",
"skip_network_address",
"=",
"True",
",",
"skip_broadcast_address",
"=",
"True",
")",
":",
"return",
"WNetworkIPV4Iterator",
"(",
"self",
",",
"skip_network_address",
",",
"skip_broadcast_address",
")"
] | Return iterator, that can iterate over network addresses
:param skip_network_address: same as skip_network_address in :meth:`.NetworkIPV4.first_address` method
:param skip_broadcast_address: same as skip_broadcast_address in :meth:`.NetworkIPV4.last_address` \
method
:return: NetworkIPV4Iterator | [
"Return",
"iterator",
"that",
"can",
"iterate",
"over",
"network",
"addresses"
] | 1029839d33eb663f8dec76c1c46754d53c1de4a9 | https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/network/primitives.py#L310-L318 | train |
a1ezzz/wasp-general | wasp_general/network/primitives.py | WFQDN.from_string | def from_string(address):
""" Convert doted-written FQDN address to WFQDN object
:param address: address to convert
:return: WFQDN
"""
if len(address) == 0:
return WFQDN()
if address[-1] == '.':
address = address[:-1]
if len(address) > WFQDN.maximum_fqdn_length:
raise ValueError('Invalid address')
result = WFQDN()
for label in address.split('.'):
if isinstance(label, str) and WFQDN.re_label.match(label):
result._labels.append(label)
else:
raise ValueError('Invalid address')
return result | python | def from_string(address):
""" Convert doted-written FQDN address to WFQDN object
:param address: address to convert
:return: WFQDN
"""
if len(address) == 0:
return WFQDN()
if address[-1] == '.':
address = address[:-1]
if len(address) > WFQDN.maximum_fqdn_length:
raise ValueError('Invalid address')
result = WFQDN()
for label in address.split('.'):
if isinstance(label, str) and WFQDN.re_label.match(label):
result._labels.append(label)
else:
raise ValueError('Invalid address')
return result | [
"def",
"from_string",
"(",
"address",
")",
":",
"if",
"len",
"(",
"address",
")",
"==",
"0",
":",
"return",
"WFQDN",
"(",
")",
"if",
"address",
"[",
"-",
"1",
"]",
"==",
"'.'",
":",
"address",
"=",
"address",
"[",
":",
"-",
"1",
"]",
"if",
"len",
"(",
"address",
")",
">",
"WFQDN",
".",
"maximum_fqdn_length",
":",
"raise",
"ValueError",
"(",
"'Invalid address'",
")",
"result",
"=",
"WFQDN",
"(",
")",
"for",
"label",
"in",
"address",
".",
"split",
"(",
"'.'",
")",
":",
"if",
"isinstance",
"(",
"label",
",",
"str",
")",
"and",
"WFQDN",
".",
"re_label",
".",
"match",
"(",
"label",
")",
":",
"result",
".",
"_labels",
".",
"append",
"(",
"label",
")",
"else",
":",
"raise",
"ValueError",
"(",
"'Invalid address'",
")",
"return",
"result"
] | Convert doted-written FQDN address to WFQDN object
:param address: address to convert
:return: WFQDN | [
"Convert",
"doted",
"-",
"written",
"FQDN",
"address",
"to",
"WFQDN",
"object"
] | 1029839d33eb663f8dec76c1c46754d53c1de4a9 | https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/network/primitives.py#L443-L465 | train |
a1ezzz/wasp-general | wasp_general/network/primitives.py | WFQDN.to_string | def to_string(address, leading_dot=False):
""" Return doted-written address by the given WFQDN object
:param address: address to convert
:param leading_dot: whether this function place leading dot to the result or not
:return: str
"""
if isinstance(address, WFQDN) is False:
raise TypeError('Invalid type for FQDN address')
result = '.'.join(address._labels)
return result if leading_dot is False else (result + '.') | python | def to_string(address, leading_dot=False):
""" Return doted-written address by the given WFQDN object
:param address: address to convert
:param leading_dot: whether this function place leading dot to the result or not
:return: str
"""
if isinstance(address, WFQDN) is False:
raise TypeError('Invalid type for FQDN address')
result = '.'.join(address._labels)
return result if leading_dot is False else (result + '.') | [
"def",
"to_string",
"(",
"address",
",",
"leading_dot",
"=",
"False",
")",
":",
"if",
"isinstance",
"(",
"address",
",",
"WFQDN",
")",
"is",
"False",
":",
"raise",
"TypeError",
"(",
"'Invalid type for FQDN address'",
")",
"result",
"=",
"'.'",
".",
"join",
"(",
"address",
".",
"_labels",
")",
"return",
"result",
"if",
"leading_dot",
"is",
"False",
"else",
"(",
"result",
"+",
"'.'",
")"
] | Return doted-written address by the given WFQDN object
:param address: address to convert
:param leading_dot: whether this function place leading dot to the result or not
:return: str | [
"Return",
"doted",
"-",
"written",
"address",
"by",
"the",
"given",
"WFQDN",
"object"
] | 1029839d33eb663f8dec76c1c46754d53c1de4a9 | https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/network/primitives.py#L469-L481 | train |
olitheolix/qtmacs | qtmacs/base_applet.py | QtmacsApplet.qteReparent | def qteReparent(self, parent):
"""
Re-parent the applet.
This is little more then calling Qt's native ``setParent()``
method but also updates the ``qteParentWindow`` handle. This
method is usually called when the applet is added/removed from
a splitter and thus requires re-parenting.
|Args|
* ``parent`` (**QWidget**): the new parent of this object.
|Returns|
* **None**
|Raises|
* **None**
"""
# Set the new parent.
self.setParent(parent)
# If this parent has a Qtmacs structure then query it for the
# parent window, otherwise set the parent to None.
try:
self._qteAdmin.parentWindow = parent.qteParentWindow()
except AttributeError:
self._qteAdmin.parentWindow = None
# Sanity check:
if parent:
msg = 'Parent is neither None, nor does it have a'
msg += 'qteParentWindow field --> bug'
print(msg) | python | def qteReparent(self, parent):
"""
Re-parent the applet.
This is little more then calling Qt's native ``setParent()``
method but also updates the ``qteParentWindow`` handle. This
method is usually called when the applet is added/removed from
a splitter and thus requires re-parenting.
|Args|
* ``parent`` (**QWidget**): the new parent of this object.
|Returns|
* **None**
|Raises|
* **None**
"""
# Set the new parent.
self.setParent(parent)
# If this parent has a Qtmacs structure then query it for the
# parent window, otherwise set the parent to None.
try:
self._qteAdmin.parentWindow = parent.qteParentWindow()
except AttributeError:
self._qteAdmin.parentWindow = None
# Sanity check:
if parent:
msg = 'Parent is neither None, nor does it have a'
msg += 'qteParentWindow field --> bug'
print(msg) | [
"def",
"qteReparent",
"(",
"self",
",",
"parent",
")",
":",
"# Set the new parent.",
"self",
".",
"setParent",
"(",
"parent",
")",
"# If this parent has a Qtmacs structure then query it for the",
"# parent window, otherwise set the parent to None.",
"try",
":",
"self",
".",
"_qteAdmin",
".",
"parentWindow",
"=",
"parent",
".",
"qteParentWindow",
"(",
")",
"except",
"AttributeError",
":",
"self",
".",
"_qteAdmin",
".",
"parentWindow",
"=",
"None",
"# Sanity check:",
"if",
"parent",
":",
"msg",
"=",
"'Parent is neither None, nor does it have a'",
"msg",
"+=",
"'qteParentWindow field --> bug'",
"print",
"(",
"msg",
")"
] | Re-parent the applet.
This is little more then calling Qt's native ``setParent()``
method but also updates the ``qteParentWindow`` handle. This
method is usually called when the applet is added/removed from
a splitter and thus requires re-parenting.
|Args|
* ``parent`` (**QWidget**): the new parent of this object.
|Returns|
* **None**
|Raises|
* **None** | [
"Re",
"-",
"parent",
"the",
"applet",
"."
] | 36253b082b82590f183fe154b053eb3a1e741be2 | https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/base_applet.py#L222-L257 | train |
olitheolix/qtmacs | qtmacs/base_applet.py | QtmacsApplet.qteAddWidget | def qteAddWidget(self, widgetObj: QtGui.QWidget, isFocusable: bool=True,
widgetSignature: str=None, autoBind: bool=True):
"""
Augment the standard Qt ``widgetObj`` with Qtmacs specific fields.
Example: from a programmers perspective there is no difference
between::
wid = QtGui.QTextEdit(self)
and::
wid = self.qteAddWidget(QtGui.QTextEdit(self))
Both return a handle to a Qt widget (a ``QTextEdit`` in this
case). However, the ``qteAddWidget`` adds the following fields
to the object:
* ``_qteAdmin``: this is an instance of the ``QtmacsAdminStructure``
to tell Qtmacs how to treat the widget.
* ``qteSignature``: an attribute that returns the signature of the
widget and equals ``widgetSignature``. If no such signature was
specified it defaults to the Qt internal name as a string, eg.
for a push button this would be 'QPushButton'.
* ``qteSetKeyFilterPolicy``: this points directly to the equally
named method inside the _qteAdmin object. This is a convenience
shortcut to avoid using the _qteAdmin structure directly in
macro/applet code, because only Qtmacs itself should temper
with it.
|Args|
* ``widgetObj`` (**QWidget**): any widget from the QtGui library.
* ``isFocusable`` (**bool**): whether or not the widget can
receive the focus.
* ``widgetSignature`` (**str**): specify the widget signature
(defaults to class name)
* ``autoBind`` (**bool**): if **True** and ``widgetSignature``
is a recognisable name (eg. **QTextEdit**) then automatically
load the appropriate key-bindings for this widget.
|Returns|
* **QWidget**: handle to widget object (or **None** if it could
not be added).
|Raises|
* **QtmacsArgumentError** if at least one argument has an invalid type.
"""
# Add a Qtmacs data structure to the widget to allow their
# event administration. Note that, in all likelihood, the
# widget is an arbitrary Qt widget (eg. QLineEdit,
# QPushButton, etc).
widgetObj._qteAdmin = QtmacsAdminStructure(
self, isFocusable=isFocusable)
widgetObj._qteAdmin.appletID = self._qteAdmin.appletID
# Specify that this widget is not a QtmacsApplet.
widgetObj._qteAdmin.isQtmacsApplet = False
# Remember the signature of the applet containing this widget.
widgetObj._qteAdmin.appletSignature = self.qteAppletSignature()
# Set the widget signature. If none was specified, use the
# class name (eg. QLineEdit).
if widgetSignature is None:
widgetObj._qteAdmin.widgetSignature = widgetObj.__class__.__name__
else:
widgetObj._qteAdmin.widgetSignature = widgetSignature
# For convenience, as it is otherwise difficult for the macro
# programmer to determine the widget signature used by Qtmacs.
# Note: the "wo" is only a shorthand to avoid too long lines.
wo = widgetObj
wo.qteSignature = wo._qteAdmin.widgetSignature
wo.qteSetKeyFilterPolicy = wo._qteAdmin.qteSetKeyFilterPolicy
del wo
# Add the widget to the widgetList of this QtmacsApplet.
# Important: this MUST happen before macros and key-bindings are loaded
# and bound automatically (see code below) because the method to
# bind the keys will verify that the widget exists in ``widgetList``.
self._qteAdmin.widgetList.append(widgetObj)
# If a widget has a default key-bindings file then the global
# dictionary ``default_widget_keybindings`` will contain its
# name.
default_bind = qte_global.default_widget_keybindings
if autoBind and (widgetObj.qteSignature in default_bind):
# Shorthand.
module_name = default_bind[widgetObj.qteSignature]
# Import the module with the default key-bindings for the
# current widget type.
try:
mod = importlib.import_module(module_name)
except ImportError:
msg = ('Module <b>{}</b> could not be imported.'.format(
module_name))
self.qteLogger.exception(msg, stack_info=True)
return
if hasattr(mod, 'install_macros_and_bindings'):
# By convention, the module has an
# install_macros_and_bindings method. If an error
# occurs intercept it, but do not abort the method
# since the error only relates to a failed attempt to
# apply default key-bindings, not to register the
# widget (the main purpose of this method).
try:
mod.install_macros_and_bindings(widgetObj)
except Exception:
msg = ('<b>install_macros_and_bindings</b> function'
' in <b>{}</b> did not execute properly.')
msg = msg.format(module_name)
self.qteLogger.error(msg, stack_info=True)
else:
msg = ('Module <b>{}</b> has no '
'<b>install_macros_and_bindings</b>'
' method'.format(module_name))
self.qteLogger.error(msg)
return widgetObj | python | def qteAddWidget(self, widgetObj: QtGui.QWidget, isFocusable: bool=True,
widgetSignature: str=None, autoBind: bool=True):
"""
Augment the standard Qt ``widgetObj`` with Qtmacs specific fields.
Example: from a programmers perspective there is no difference
between::
wid = QtGui.QTextEdit(self)
and::
wid = self.qteAddWidget(QtGui.QTextEdit(self))
Both return a handle to a Qt widget (a ``QTextEdit`` in this
case). However, the ``qteAddWidget`` adds the following fields
to the object:
* ``_qteAdmin``: this is an instance of the ``QtmacsAdminStructure``
to tell Qtmacs how to treat the widget.
* ``qteSignature``: an attribute that returns the signature of the
widget and equals ``widgetSignature``. If no such signature was
specified it defaults to the Qt internal name as a string, eg.
for a push button this would be 'QPushButton'.
* ``qteSetKeyFilterPolicy``: this points directly to the equally
named method inside the _qteAdmin object. This is a convenience
shortcut to avoid using the _qteAdmin structure directly in
macro/applet code, because only Qtmacs itself should temper
with it.
|Args|
* ``widgetObj`` (**QWidget**): any widget from the QtGui library.
* ``isFocusable`` (**bool**): whether or not the widget can
receive the focus.
* ``widgetSignature`` (**str**): specify the widget signature
(defaults to class name)
* ``autoBind`` (**bool**): if **True** and ``widgetSignature``
is a recognisable name (eg. **QTextEdit**) then automatically
load the appropriate key-bindings for this widget.
|Returns|
* **QWidget**: handle to widget object (or **None** if it could
not be added).
|Raises|
* **QtmacsArgumentError** if at least one argument has an invalid type.
"""
# Add a Qtmacs data structure to the widget to allow their
# event administration. Note that, in all likelihood, the
# widget is an arbitrary Qt widget (eg. QLineEdit,
# QPushButton, etc).
widgetObj._qteAdmin = QtmacsAdminStructure(
self, isFocusable=isFocusable)
widgetObj._qteAdmin.appletID = self._qteAdmin.appletID
# Specify that this widget is not a QtmacsApplet.
widgetObj._qteAdmin.isQtmacsApplet = False
# Remember the signature of the applet containing this widget.
widgetObj._qteAdmin.appletSignature = self.qteAppletSignature()
# Set the widget signature. If none was specified, use the
# class name (eg. QLineEdit).
if widgetSignature is None:
widgetObj._qteAdmin.widgetSignature = widgetObj.__class__.__name__
else:
widgetObj._qteAdmin.widgetSignature = widgetSignature
# For convenience, as it is otherwise difficult for the macro
# programmer to determine the widget signature used by Qtmacs.
# Note: the "wo" is only a shorthand to avoid too long lines.
wo = widgetObj
wo.qteSignature = wo._qteAdmin.widgetSignature
wo.qteSetKeyFilterPolicy = wo._qteAdmin.qteSetKeyFilterPolicy
del wo
# Add the widget to the widgetList of this QtmacsApplet.
# Important: this MUST happen before macros and key-bindings are loaded
# and bound automatically (see code below) because the method to
# bind the keys will verify that the widget exists in ``widgetList``.
self._qteAdmin.widgetList.append(widgetObj)
# If a widget has a default key-bindings file then the global
# dictionary ``default_widget_keybindings`` will contain its
# name.
default_bind = qte_global.default_widget_keybindings
if autoBind and (widgetObj.qteSignature in default_bind):
# Shorthand.
module_name = default_bind[widgetObj.qteSignature]
# Import the module with the default key-bindings for the
# current widget type.
try:
mod = importlib.import_module(module_name)
except ImportError:
msg = ('Module <b>{}</b> could not be imported.'.format(
module_name))
self.qteLogger.exception(msg, stack_info=True)
return
if hasattr(mod, 'install_macros_and_bindings'):
# By convention, the module has an
# install_macros_and_bindings method. If an error
# occurs intercept it, but do not abort the method
# since the error only relates to a failed attempt to
# apply default key-bindings, not to register the
# widget (the main purpose of this method).
try:
mod.install_macros_and_bindings(widgetObj)
except Exception:
msg = ('<b>install_macros_and_bindings</b> function'
' in <b>{}</b> did not execute properly.')
msg = msg.format(module_name)
self.qteLogger.error(msg, stack_info=True)
else:
msg = ('Module <b>{}</b> has no '
'<b>install_macros_and_bindings</b>'
' method'.format(module_name))
self.qteLogger.error(msg)
return widgetObj | [
"def",
"qteAddWidget",
"(",
"self",
",",
"widgetObj",
":",
"QtGui",
".",
"QWidget",
",",
"isFocusable",
":",
"bool",
"=",
"True",
",",
"widgetSignature",
":",
"str",
"=",
"None",
",",
"autoBind",
":",
"bool",
"=",
"True",
")",
":",
"# Add a Qtmacs data structure to the widget to allow their",
"# event administration. Note that, in all likelihood, the",
"# widget is an arbitrary Qt widget (eg. QLineEdit,",
"# QPushButton, etc).",
"widgetObj",
".",
"_qteAdmin",
"=",
"QtmacsAdminStructure",
"(",
"self",
",",
"isFocusable",
"=",
"isFocusable",
")",
"widgetObj",
".",
"_qteAdmin",
".",
"appletID",
"=",
"self",
".",
"_qteAdmin",
".",
"appletID",
"# Specify that this widget is not a QtmacsApplet.",
"widgetObj",
".",
"_qteAdmin",
".",
"isQtmacsApplet",
"=",
"False",
"# Remember the signature of the applet containing this widget.",
"widgetObj",
".",
"_qteAdmin",
".",
"appletSignature",
"=",
"self",
".",
"qteAppletSignature",
"(",
")",
"# Set the widget signature. If none was specified, use the",
"# class name (eg. QLineEdit).",
"if",
"widgetSignature",
"is",
"None",
":",
"widgetObj",
".",
"_qteAdmin",
".",
"widgetSignature",
"=",
"widgetObj",
".",
"__class__",
".",
"__name__",
"else",
":",
"widgetObj",
".",
"_qteAdmin",
".",
"widgetSignature",
"=",
"widgetSignature",
"# For convenience, as it is otherwise difficult for the macro",
"# programmer to determine the widget signature used by Qtmacs.",
"# Note: the \"wo\" is only a shorthand to avoid too long lines.",
"wo",
"=",
"widgetObj",
"wo",
".",
"qteSignature",
"=",
"wo",
".",
"_qteAdmin",
".",
"widgetSignature",
"wo",
".",
"qteSetKeyFilterPolicy",
"=",
"wo",
".",
"_qteAdmin",
".",
"qteSetKeyFilterPolicy",
"del",
"wo",
"# Add the widget to the widgetList of this QtmacsApplet.",
"# Important: this MUST happen before macros and key-bindings are loaded",
"# and bound automatically (see code below) because the method to",
"# bind the keys will verify that the widget exists in ``widgetList``.",
"self",
".",
"_qteAdmin",
".",
"widgetList",
".",
"append",
"(",
"widgetObj",
")",
"# If a widget has a default key-bindings file then the global",
"# dictionary ``default_widget_keybindings`` will contain its",
"# name.",
"default_bind",
"=",
"qte_global",
".",
"default_widget_keybindings",
"if",
"autoBind",
"and",
"(",
"widgetObj",
".",
"qteSignature",
"in",
"default_bind",
")",
":",
"# Shorthand.",
"module_name",
"=",
"default_bind",
"[",
"widgetObj",
".",
"qteSignature",
"]",
"# Import the module with the default key-bindings for the",
"# current widget type.",
"try",
":",
"mod",
"=",
"importlib",
".",
"import_module",
"(",
"module_name",
")",
"except",
"ImportError",
":",
"msg",
"=",
"(",
"'Module <b>{}</b> could not be imported.'",
".",
"format",
"(",
"module_name",
")",
")",
"self",
".",
"qteLogger",
".",
"exception",
"(",
"msg",
",",
"stack_info",
"=",
"True",
")",
"return",
"if",
"hasattr",
"(",
"mod",
",",
"'install_macros_and_bindings'",
")",
":",
"# By convention, the module has an",
"# install_macros_and_bindings method. If an error",
"# occurs intercept it, but do not abort the method",
"# since the error only relates to a failed attempt to",
"# apply default key-bindings, not to register the",
"# widget (the main purpose of this method).",
"try",
":",
"mod",
".",
"install_macros_and_bindings",
"(",
"widgetObj",
")",
"except",
"Exception",
":",
"msg",
"=",
"(",
"'<b>install_macros_and_bindings</b> function'",
"' in <b>{}</b> did not execute properly.'",
")",
"msg",
"=",
"msg",
".",
"format",
"(",
"module_name",
")",
"self",
".",
"qteLogger",
".",
"error",
"(",
"msg",
",",
"stack_info",
"=",
"True",
")",
"else",
":",
"msg",
"=",
"(",
"'Module <b>{}</b> has no '",
"'<b>install_macros_and_bindings</b>'",
"' method'",
".",
"format",
"(",
"module_name",
")",
")",
"self",
".",
"qteLogger",
".",
"error",
"(",
"msg",
")",
"return",
"widgetObj"
] | Augment the standard Qt ``widgetObj`` with Qtmacs specific fields.
Example: from a programmers perspective there is no difference
between::
wid = QtGui.QTextEdit(self)
and::
wid = self.qteAddWidget(QtGui.QTextEdit(self))
Both return a handle to a Qt widget (a ``QTextEdit`` in this
case). However, the ``qteAddWidget`` adds the following fields
to the object:
* ``_qteAdmin``: this is an instance of the ``QtmacsAdminStructure``
to tell Qtmacs how to treat the widget.
* ``qteSignature``: an attribute that returns the signature of the
widget and equals ``widgetSignature``. If no such signature was
specified it defaults to the Qt internal name as a string, eg.
for a push button this would be 'QPushButton'.
* ``qteSetKeyFilterPolicy``: this points directly to the equally
named method inside the _qteAdmin object. This is a convenience
shortcut to avoid using the _qteAdmin structure directly in
macro/applet code, because only Qtmacs itself should temper
with it.
|Args|
* ``widgetObj`` (**QWidget**): any widget from the QtGui library.
* ``isFocusable`` (**bool**): whether or not the widget can
receive the focus.
* ``widgetSignature`` (**str**): specify the widget signature
(defaults to class name)
* ``autoBind`` (**bool**): if **True** and ``widgetSignature``
is a recognisable name (eg. **QTextEdit**) then automatically
load the appropriate key-bindings for this widget.
|Returns|
* **QWidget**: handle to widget object (or **None** if it could
not be added).
|Raises|
* **QtmacsArgumentError** if at least one argument has an invalid type. | [
"Augment",
"the",
"standard",
"Qt",
"widgetObj",
"with",
"Qtmacs",
"specific",
"fields",
"."
] | 36253b082b82590f183fe154b053eb3a1e741be2 | https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/base_applet.py#L260-L383 | train |
olitheolix/qtmacs | qtmacs/base_applet.py | QtmacsApplet.qteSetAppletSignature | def qteSetAppletSignature(self, signature: str):
"""
Specify the applet signature.
This signature is used by Qtmacs at run time to determine
which macros are compatible with the apple. Macros have an
identically called method so that Qtmacs can determine which
macros are compatible with which applets. This method is
typically called only in the applet constructor, but changing
the macro signature at run time, is possible.
The signature must be a non-empty string and not contain the
'*' symbol.
Note: the default signature is the class name as a string,
eg. if the applet class is called MyAppClass, then the initial
macro signature is the string 'MyAppClass'.
|Args|
* ``signature`` (**str**): the signature of this applet to
determine compatible macros run time.
|Returns|
* **None**
|Raises|
* **QtmacsArgumentError** if at least one argument has an invalid type.
* **QtmacsOtherError** if the signature is empty or contains
the '*' wildcard symbol.
"""
if '*' in signature:
raise QtmacsOtherError('The applet signature must not contain "*"')
if signature == '':
raise QtmacsOtherError('The applet signature must be non-empty')
self._qteAdmin.appletSignature = signature | python | def qteSetAppletSignature(self, signature: str):
"""
Specify the applet signature.
This signature is used by Qtmacs at run time to determine
which macros are compatible with the apple. Macros have an
identically called method so that Qtmacs can determine which
macros are compatible with which applets. This method is
typically called only in the applet constructor, but changing
the macro signature at run time, is possible.
The signature must be a non-empty string and not contain the
'*' symbol.
Note: the default signature is the class name as a string,
eg. if the applet class is called MyAppClass, then the initial
macro signature is the string 'MyAppClass'.
|Args|
* ``signature`` (**str**): the signature of this applet to
determine compatible macros run time.
|Returns|
* **None**
|Raises|
* **QtmacsArgumentError** if at least one argument has an invalid type.
* **QtmacsOtherError** if the signature is empty or contains
the '*' wildcard symbol.
"""
if '*' in signature:
raise QtmacsOtherError('The applet signature must not contain "*"')
if signature == '':
raise QtmacsOtherError('The applet signature must be non-empty')
self._qteAdmin.appletSignature = signature | [
"def",
"qteSetAppletSignature",
"(",
"self",
",",
"signature",
":",
"str",
")",
":",
"if",
"'*'",
"in",
"signature",
":",
"raise",
"QtmacsOtherError",
"(",
"'The applet signature must not contain \"*\"'",
")",
"if",
"signature",
"==",
"''",
":",
"raise",
"QtmacsOtherError",
"(",
"'The applet signature must be non-empty'",
")",
"self",
".",
"_qteAdmin",
".",
"appletSignature",
"=",
"signature"
] | Specify the applet signature.
This signature is used by Qtmacs at run time to determine
which macros are compatible with the apple. Macros have an
identically called method so that Qtmacs can determine which
macros are compatible with which applets. This method is
typically called only in the applet constructor, but changing
the macro signature at run time, is possible.
The signature must be a non-empty string and not contain the
'*' symbol.
Note: the default signature is the class name as a string,
eg. if the applet class is called MyAppClass, then the initial
macro signature is the string 'MyAppClass'.
|Args|
* ``signature`` (**str**): the signature of this applet to
determine compatible macros run time.
|Returns|
* **None**
|Raises|
* **QtmacsArgumentError** if at least one argument has an invalid type.
* **QtmacsOtherError** if the signature is empty or contains
the '*' wildcard symbol. | [
"Specify",
"the",
"applet",
"signature",
"."
] | 36253b082b82590f183fe154b053eb3a1e741be2 | https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/base_applet.py#L386-L425 | train |
olitheolix/qtmacs | qtmacs/base_applet.py | QtmacsApplet.qteAutoremoveDeletedWidgets | def qteAutoremoveDeletedWidgets(self):
"""
Remove all widgets from the internal widget list that do not
exist anymore according to SIP.
|Args|
* **None**
|Returns|
* **None**
|Raises|
* **None**
"""
widget_list = self._qteAdmin.widgetList
deleted_widgets = [_ for _ in widget_list if sip.isdeleted(_)]
for widgetObj in deleted_widgets:
self._qteAdmin.widgetList.remove(widgetObj) | python | def qteAutoremoveDeletedWidgets(self):
"""
Remove all widgets from the internal widget list that do not
exist anymore according to SIP.
|Args|
* **None**
|Returns|
* **None**
|Raises|
* **None**
"""
widget_list = self._qteAdmin.widgetList
deleted_widgets = [_ for _ in widget_list if sip.isdeleted(_)]
for widgetObj in deleted_widgets:
self._qteAdmin.widgetList.remove(widgetObj) | [
"def",
"qteAutoremoveDeletedWidgets",
"(",
"self",
")",
":",
"widget_list",
"=",
"self",
".",
"_qteAdmin",
".",
"widgetList",
"deleted_widgets",
"=",
"[",
"_",
"for",
"_",
"in",
"widget_list",
"if",
"sip",
".",
"isdeleted",
"(",
"_",
")",
"]",
"for",
"widgetObj",
"in",
"deleted_widgets",
":",
"self",
".",
"_qteAdmin",
".",
"widgetList",
".",
"remove",
"(",
"widgetObj",
")"
] | Remove all widgets from the internal widget list that do not
exist anymore according to SIP.
|Args|
* **None**
|Returns|
* **None**
|Raises|
* **None** | [
"Remove",
"all",
"widgets",
"from",
"the",
"internal",
"widget",
"list",
"that",
"do",
"not",
"exist",
"anymore",
"according",
"to",
"SIP",
"."
] | 36253b082b82590f183fe154b053eb3a1e741be2 | https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/base_applet.py#L446-L466 | train |
olitheolix/qtmacs | qtmacs/base_applet.py | QtmacsApplet.qteSetWidgetFocusOrder | def qteSetWidgetFocusOrder(self, widList: tuple):
"""
Change the focus order of the widgets in this applet.
This method re-arranges the internal (cyclic) widget list so
that all widgets specified in ``widList`` will be focused in
the given order.
|Args|
* ``widList`` (**tuple**): a tuple of widget objects.
|Returns|
* **None**
|Raises|
* **None**
"""
# A list with less than two entries cannot be re-ordered.
if len(widList) < 2:
return
# Housekeeping: remove non-existing widgets from the admin structure.
self.qteAutoremoveDeletedWidgets()
# Remove all **None** widgets.
widList = [_ for _ in widList if _ is not None]
# Ensure that all widgets exist in the current applet.
for wid in widList:
if wid not in self._qteAdmin.widgetList:
msg = 'Cannot change focus order because some '
msg += 'widgets do not exist.'
self.qteLogger.warning(msg)
return
# Remove all duplicates from the user supplied list.
newList = [widList[0]]
for wid in widList[1:]:
if wid not in newList:
newList.append(wid)
# If the duplicate free list has only one entry then there is
# nothing left to reorder.
if len(newList) < 2:
return
# The purpose of the code is the following: suppose
# _qteAdmin.widgetList = [0,1,2,3,4,5] and newList=[2,5,1].
# Then change _qteAdmin.widgetList to [0,1,2,5,1,3,4]. Step
# 1: remove all but the first widget in newList from
# _qteAdmin.widgetList.
for wid in newList[1:]:
self._qteAdmin.widgetList.remove(wid)
# 2: re-insert the removed elements as a sequence again.
startIdx = self._qteAdmin.widgetList.index(newList[0]) + 1
for idx, wid in enumerate(newList[1:]):
self._qteAdmin.widgetList.insert(startIdx + idx, wid) | python | def qteSetWidgetFocusOrder(self, widList: tuple):
"""
Change the focus order of the widgets in this applet.
This method re-arranges the internal (cyclic) widget list so
that all widgets specified in ``widList`` will be focused in
the given order.
|Args|
* ``widList`` (**tuple**): a tuple of widget objects.
|Returns|
* **None**
|Raises|
* **None**
"""
# A list with less than two entries cannot be re-ordered.
if len(widList) < 2:
return
# Housekeeping: remove non-existing widgets from the admin structure.
self.qteAutoremoveDeletedWidgets()
# Remove all **None** widgets.
widList = [_ for _ in widList if _ is not None]
# Ensure that all widgets exist in the current applet.
for wid in widList:
if wid not in self._qteAdmin.widgetList:
msg = 'Cannot change focus order because some '
msg += 'widgets do not exist.'
self.qteLogger.warning(msg)
return
# Remove all duplicates from the user supplied list.
newList = [widList[0]]
for wid in widList[1:]:
if wid not in newList:
newList.append(wid)
# If the duplicate free list has only one entry then there is
# nothing left to reorder.
if len(newList) < 2:
return
# The purpose of the code is the following: suppose
# _qteAdmin.widgetList = [0,1,2,3,4,5] and newList=[2,5,1].
# Then change _qteAdmin.widgetList to [0,1,2,5,1,3,4]. Step
# 1: remove all but the first widget in newList from
# _qteAdmin.widgetList.
for wid in newList[1:]:
self._qteAdmin.widgetList.remove(wid)
# 2: re-insert the removed elements as a sequence again.
startIdx = self._qteAdmin.widgetList.index(newList[0]) + 1
for idx, wid in enumerate(newList[1:]):
self._qteAdmin.widgetList.insert(startIdx + idx, wid) | [
"def",
"qteSetWidgetFocusOrder",
"(",
"self",
",",
"widList",
":",
"tuple",
")",
":",
"# A list with less than two entries cannot be re-ordered.",
"if",
"len",
"(",
"widList",
")",
"<",
"2",
":",
"return",
"# Housekeeping: remove non-existing widgets from the admin structure.",
"self",
".",
"qteAutoremoveDeletedWidgets",
"(",
")",
"# Remove all **None** widgets.",
"widList",
"=",
"[",
"_",
"for",
"_",
"in",
"widList",
"if",
"_",
"is",
"not",
"None",
"]",
"# Ensure that all widgets exist in the current applet.",
"for",
"wid",
"in",
"widList",
":",
"if",
"wid",
"not",
"in",
"self",
".",
"_qteAdmin",
".",
"widgetList",
":",
"msg",
"=",
"'Cannot change focus order because some '",
"msg",
"+=",
"'widgets do not exist.'",
"self",
".",
"qteLogger",
".",
"warning",
"(",
"msg",
")",
"return",
"# Remove all duplicates from the user supplied list.",
"newList",
"=",
"[",
"widList",
"[",
"0",
"]",
"]",
"for",
"wid",
"in",
"widList",
"[",
"1",
":",
"]",
":",
"if",
"wid",
"not",
"in",
"newList",
":",
"newList",
".",
"append",
"(",
"wid",
")",
"# If the duplicate free list has only one entry then there is",
"# nothing left to reorder.",
"if",
"len",
"(",
"newList",
")",
"<",
"2",
":",
"return",
"# The purpose of the code is the following: suppose",
"# _qteAdmin.widgetList = [0,1,2,3,4,5] and newList=[2,5,1].",
"# Then change _qteAdmin.widgetList to [0,1,2,5,1,3,4]. Step",
"# 1: remove all but the first widget in newList from",
"# _qteAdmin.widgetList.",
"for",
"wid",
"in",
"newList",
"[",
"1",
":",
"]",
":",
"self",
".",
"_qteAdmin",
".",
"widgetList",
".",
"remove",
"(",
"wid",
")",
"# 2: re-insert the removed elements as a sequence again.",
"startIdx",
"=",
"self",
".",
"_qteAdmin",
".",
"widgetList",
".",
"index",
"(",
"newList",
"[",
"0",
"]",
")",
"+",
"1",
"for",
"idx",
",",
"wid",
"in",
"enumerate",
"(",
"newList",
"[",
"1",
":",
"]",
")",
":",
"self",
".",
"_qteAdmin",
".",
"widgetList",
".",
"insert",
"(",
"startIdx",
"+",
"idx",
",",
"wid",
")"
] | Change the focus order of the widgets in this applet.
This method re-arranges the internal (cyclic) widget list so
that all widgets specified in ``widList`` will be focused in
the given order.
|Args|
* ``widList`` (**tuple**): a tuple of widget objects.
|Returns|
* **None**
|Raises|
* **None** | [
"Change",
"the",
"focus",
"order",
"of",
"the",
"widgets",
"in",
"this",
"applet",
"."
] | 36253b082b82590f183fe154b053eb3a1e741be2 | https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/base_applet.py#L469-L529 | train |
olitheolix/qtmacs | qtmacs/base_applet.py | QtmacsApplet.qteNextWidget | def qteNextWidget(self, numSkip: int=1, ofsWidget: QtGui.QWidget=None,
skipVisible: bool=False, skipInvisible: bool=True,
skipFocusable: bool=False,
skipUnfocusable: bool=True):
"""
Return the next widget in cyclic order.
If ``ofsWidget`` is **None** then start counting at the
currently active widget and return the applet ``numSkip``
items away in cyclic order in the internal widget list. If
``numSkip`` is positive traverse the applet list forwards,
otherwise backwards. The methods supports the following
selection criteria:
* ``skipVisible``: only invisible widgets are considered.
* ``skipInvisible``: only visible widgets are considered.
* ``skipFocusable``: only unfocusable widgets are considered.
* ``skipUnfocusable``: only unfocusable widgets are considered.
|Args|
* ``numSkip`` (**int**): number of applets to skip.
* ``ofsWidget`` (**QWidget**): widget from where to start counting.
* ``skipVisible`` (**bool**): whether or not to skip currently
shown widgets.
* ``skipInvisible`` (**bool**): whether or not to skip currently
not shown widgets.
* ``skipFocusable`` (**bool**): whether or not to skip focusable
widgets.
* ``skipUnfocusable`` (**bool**): whether or not to skip unfocusable
widgets.
|Returns|
* **QWidget**: either the next widget that fits the criteria, or
**None** if no such widget exists.
|Raises|
* **QtmacsArgumentError** if at least one argument has an invalid type.
* **QtmacsOtherError** if ``ofsWidget`` was not added with
``qteAddWidget``.
"""
# Check type of input arguments.
if not hasattr(ofsWidget, '_qteAdmin') and (ofsWidget is not None):
msg = '<ofsWidget> was probably not added with <qteAddWidget>'
msg += ' method because it lacks the <_qteAdmin> attribute.'
raise QtmacsOtherError(msg)
# Housekeeping: remove non-existing widgets from the admin structure.
self.qteAutoremoveDeletedWidgets()
# Make a copy of the widget list.
widList = list(self._qteAdmin.widgetList)
# Return immediately if the widget list is empty. The actual
# return value is either self._qteActiveWidget (if it points
# to a child widget of the current applet), or None.
if not len(widList):
if qteGetAppletFromWidget(self._qteActiveWidget) is self:
return self._qteActiveWidget
else:
return None
if skipInvisible:
# Remove all invisible widgets.
widList = [wid for wid in widList if wid.isVisible()]
if skipVisible:
# Remove all visible widgets.
widList = [wid for wid in widList if not wid.isVisible()]
if skipFocusable:
# Remove all visible widgets.
widList = [wid for wid in widList if not wid._qteAdmin.isFocusable]
if skipUnfocusable:
# Remove all unfocusable widgets.
widList = [wid for wid in widList if wid._qteAdmin.isFocusable]
# Return immediately if the list is empty. This is typically
# the case at startup before any applet has been added.
if not len(widList):
return None
# If no offset widget was given then use the currently active one.
if ofsWidget is None:
ofsWidget = self._qteActiveWidget
if (ofsWidget is not None) and (numSkip == 0):
if qteIsQtmacsWidget(ofsWidget):
return ofsWidget
# Determine the index of the offset widget; assume it is zero
# if the widget does not exist, eg. if the currently active
# applet is not part of the pruned widList list.
try:
ofsIdx = widList.index(ofsWidget)
except ValueError:
ofsIdx = 0
# Compute the index of the next widget and wrap around the
# list if necessary.
ofsIdx = (ofsIdx + numSkip) % len(widList)
# Return the widget.
return widList[ofsIdx] | python | def qteNextWidget(self, numSkip: int=1, ofsWidget: QtGui.QWidget=None,
skipVisible: bool=False, skipInvisible: bool=True,
skipFocusable: bool=False,
skipUnfocusable: bool=True):
"""
Return the next widget in cyclic order.
If ``ofsWidget`` is **None** then start counting at the
currently active widget and return the applet ``numSkip``
items away in cyclic order in the internal widget list. If
``numSkip`` is positive traverse the applet list forwards,
otherwise backwards. The methods supports the following
selection criteria:
* ``skipVisible``: only invisible widgets are considered.
* ``skipInvisible``: only visible widgets are considered.
* ``skipFocusable``: only unfocusable widgets are considered.
* ``skipUnfocusable``: only unfocusable widgets are considered.
|Args|
* ``numSkip`` (**int**): number of applets to skip.
* ``ofsWidget`` (**QWidget**): widget from where to start counting.
* ``skipVisible`` (**bool**): whether or not to skip currently
shown widgets.
* ``skipInvisible`` (**bool**): whether or not to skip currently
not shown widgets.
* ``skipFocusable`` (**bool**): whether or not to skip focusable
widgets.
* ``skipUnfocusable`` (**bool**): whether or not to skip unfocusable
widgets.
|Returns|
* **QWidget**: either the next widget that fits the criteria, or
**None** if no such widget exists.
|Raises|
* **QtmacsArgumentError** if at least one argument has an invalid type.
* **QtmacsOtherError** if ``ofsWidget`` was not added with
``qteAddWidget``.
"""
# Check type of input arguments.
if not hasattr(ofsWidget, '_qteAdmin') and (ofsWidget is not None):
msg = '<ofsWidget> was probably not added with <qteAddWidget>'
msg += ' method because it lacks the <_qteAdmin> attribute.'
raise QtmacsOtherError(msg)
# Housekeeping: remove non-existing widgets from the admin structure.
self.qteAutoremoveDeletedWidgets()
# Make a copy of the widget list.
widList = list(self._qteAdmin.widgetList)
# Return immediately if the widget list is empty. The actual
# return value is either self._qteActiveWidget (if it points
# to a child widget of the current applet), or None.
if not len(widList):
if qteGetAppletFromWidget(self._qteActiveWidget) is self:
return self._qteActiveWidget
else:
return None
if skipInvisible:
# Remove all invisible widgets.
widList = [wid for wid in widList if wid.isVisible()]
if skipVisible:
# Remove all visible widgets.
widList = [wid for wid in widList if not wid.isVisible()]
if skipFocusable:
# Remove all visible widgets.
widList = [wid for wid in widList if not wid._qteAdmin.isFocusable]
if skipUnfocusable:
# Remove all unfocusable widgets.
widList = [wid for wid in widList if wid._qteAdmin.isFocusable]
# Return immediately if the list is empty. This is typically
# the case at startup before any applet has been added.
if not len(widList):
return None
# If no offset widget was given then use the currently active one.
if ofsWidget is None:
ofsWidget = self._qteActiveWidget
if (ofsWidget is not None) and (numSkip == 0):
if qteIsQtmacsWidget(ofsWidget):
return ofsWidget
# Determine the index of the offset widget; assume it is zero
# if the widget does not exist, eg. if the currently active
# applet is not part of the pruned widList list.
try:
ofsIdx = widList.index(ofsWidget)
except ValueError:
ofsIdx = 0
# Compute the index of the next widget and wrap around the
# list if necessary.
ofsIdx = (ofsIdx + numSkip) % len(widList)
# Return the widget.
return widList[ofsIdx] | [
"def",
"qteNextWidget",
"(",
"self",
",",
"numSkip",
":",
"int",
"=",
"1",
",",
"ofsWidget",
":",
"QtGui",
".",
"QWidget",
"=",
"None",
",",
"skipVisible",
":",
"bool",
"=",
"False",
",",
"skipInvisible",
":",
"bool",
"=",
"True",
",",
"skipFocusable",
":",
"bool",
"=",
"False",
",",
"skipUnfocusable",
":",
"bool",
"=",
"True",
")",
":",
"# Check type of input arguments.",
"if",
"not",
"hasattr",
"(",
"ofsWidget",
",",
"'_qteAdmin'",
")",
"and",
"(",
"ofsWidget",
"is",
"not",
"None",
")",
":",
"msg",
"=",
"'<ofsWidget> was probably not added with <qteAddWidget>'",
"msg",
"+=",
"' method because it lacks the <_qteAdmin> attribute.'",
"raise",
"QtmacsOtherError",
"(",
"msg",
")",
"# Housekeeping: remove non-existing widgets from the admin structure.",
"self",
".",
"qteAutoremoveDeletedWidgets",
"(",
")",
"# Make a copy of the widget list.",
"widList",
"=",
"list",
"(",
"self",
".",
"_qteAdmin",
".",
"widgetList",
")",
"# Return immediately if the widget list is empty. The actual",
"# return value is either self._qteActiveWidget (if it points",
"# to a child widget of the current applet), or None.",
"if",
"not",
"len",
"(",
"widList",
")",
":",
"if",
"qteGetAppletFromWidget",
"(",
"self",
".",
"_qteActiveWidget",
")",
"is",
"self",
":",
"return",
"self",
".",
"_qteActiveWidget",
"else",
":",
"return",
"None",
"if",
"skipInvisible",
":",
"# Remove all invisible widgets.",
"widList",
"=",
"[",
"wid",
"for",
"wid",
"in",
"widList",
"if",
"wid",
".",
"isVisible",
"(",
")",
"]",
"if",
"skipVisible",
":",
"# Remove all visible widgets.",
"widList",
"=",
"[",
"wid",
"for",
"wid",
"in",
"widList",
"if",
"not",
"wid",
".",
"isVisible",
"(",
")",
"]",
"if",
"skipFocusable",
":",
"# Remove all visible widgets.",
"widList",
"=",
"[",
"wid",
"for",
"wid",
"in",
"widList",
"if",
"not",
"wid",
".",
"_qteAdmin",
".",
"isFocusable",
"]",
"if",
"skipUnfocusable",
":",
"# Remove all unfocusable widgets.",
"widList",
"=",
"[",
"wid",
"for",
"wid",
"in",
"widList",
"if",
"wid",
".",
"_qteAdmin",
".",
"isFocusable",
"]",
"# Return immediately if the list is empty. This is typically",
"# the case at startup before any applet has been added.",
"if",
"not",
"len",
"(",
"widList",
")",
":",
"return",
"None",
"# If no offset widget was given then use the currently active one.",
"if",
"ofsWidget",
"is",
"None",
":",
"ofsWidget",
"=",
"self",
".",
"_qteActiveWidget",
"if",
"(",
"ofsWidget",
"is",
"not",
"None",
")",
"and",
"(",
"numSkip",
"==",
"0",
")",
":",
"if",
"qteIsQtmacsWidget",
"(",
"ofsWidget",
")",
":",
"return",
"ofsWidget",
"# Determine the index of the offset widget; assume it is zero",
"# if the widget does not exist, eg. if the currently active",
"# applet is not part of the pruned widList list.",
"try",
":",
"ofsIdx",
"=",
"widList",
".",
"index",
"(",
"ofsWidget",
")",
"except",
"ValueError",
":",
"ofsIdx",
"=",
"0",
"# Compute the index of the next widget and wrap around the",
"# list if necessary.",
"ofsIdx",
"=",
"(",
"ofsIdx",
"+",
"numSkip",
")",
"%",
"len",
"(",
"widList",
")",
"# Return the widget.",
"return",
"widList",
"[",
"ofsIdx",
"]"
] | Return the next widget in cyclic order.
If ``ofsWidget`` is **None** then start counting at the
currently active widget and return the applet ``numSkip``
items away in cyclic order in the internal widget list. If
``numSkip`` is positive traverse the applet list forwards,
otherwise backwards. The methods supports the following
selection criteria:
* ``skipVisible``: only invisible widgets are considered.
* ``skipInvisible``: only visible widgets are considered.
* ``skipFocusable``: only unfocusable widgets are considered.
* ``skipUnfocusable``: only unfocusable widgets are considered.
|Args|
* ``numSkip`` (**int**): number of applets to skip.
* ``ofsWidget`` (**QWidget**): widget from where to start counting.
* ``skipVisible`` (**bool**): whether or not to skip currently
shown widgets.
* ``skipInvisible`` (**bool**): whether or not to skip currently
not shown widgets.
* ``skipFocusable`` (**bool**): whether or not to skip focusable
widgets.
* ``skipUnfocusable`` (**bool**): whether or not to skip unfocusable
widgets.
|Returns|
* **QWidget**: either the next widget that fits the criteria, or
**None** if no such widget exists.
|Raises|
* **QtmacsArgumentError** if at least one argument has an invalid type.
* **QtmacsOtherError** if ``ofsWidget`` was not added with
``qteAddWidget``. | [
"Return",
"the",
"next",
"widget",
"in",
"cyclic",
"order",
"."
] | 36253b082b82590f183fe154b053eb3a1e741be2 | https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/base_applet.py#L532-L638 | train |
olitheolix/qtmacs | qtmacs/base_applet.py | QtmacsApplet.qteMakeWidgetActive | def qteMakeWidgetActive(self, widgetObj: QtGui.QWidget):
"""
Give keyboard focus to ``widgetObj``.
If ``widgetObj`` is **None** then the internal focus state
is reset, but the focus manger will automatically
activate the first available widget again.
|Args|
* ``widgetObj`` (**QWidget**): the widget to focus on.
|Returns|
* **None**
|Raises|
* **QtmacsOtherError** if ``widgetObj`` was not added with
``qteAddWidget``.
"""
# Void the active widget information.
if widgetObj is None:
self._qteActiveWidget = None
return
# Ensure that this applet is an ancestor of ``widgetObj``
# inside the Qt hierarchy.
if qteGetAppletFromWidget(widgetObj) is not self:
msg = 'The specified widget is not inside the current applet.'
raise QtmacsOtherError(msg)
# If widgetObj is not registered with Qtmacs then simply declare
# it active and return.
if not hasattr(widgetObj, '_qteAdmin'):
self._qteActiveWidget = widgetObj
return
# Do nothing if widgetObj refers to an applet.
if widgetObj._qteAdmin.isQtmacsApplet:
self._qteActiveWidget = None
return
# Housekeeping: remove non-existing widgets from the admin structure.
self.qteAutoremoveDeletedWidgets()
# Verify the widget is registered for this applet.
if widgetObj not in self._qteAdmin.widgetList:
msg = 'Widget is not registered for this applet.'
self.qteLogger.error(msg, stack_info=True)
self._qteActiveWidget = None
return
# The focus manager in QtmacsMain will hand the focus to
# whatever the _qteActiveWidget variable of the active applet
# points to.
self.qteSetWidgetFocusOrder((self._qteActiveWidget, widgetObj))
self._qteActiveWidget = widgetObj | python | def qteMakeWidgetActive(self, widgetObj: QtGui.QWidget):
"""
Give keyboard focus to ``widgetObj``.
If ``widgetObj`` is **None** then the internal focus state
is reset, but the focus manger will automatically
activate the first available widget again.
|Args|
* ``widgetObj`` (**QWidget**): the widget to focus on.
|Returns|
* **None**
|Raises|
* **QtmacsOtherError** if ``widgetObj`` was not added with
``qteAddWidget``.
"""
# Void the active widget information.
if widgetObj is None:
self._qteActiveWidget = None
return
# Ensure that this applet is an ancestor of ``widgetObj``
# inside the Qt hierarchy.
if qteGetAppletFromWidget(widgetObj) is not self:
msg = 'The specified widget is not inside the current applet.'
raise QtmacsOtherError(msg)
# If widgetObj is not registered with Qtmacs then simply declare
# it active and return.
if not hasattr(widgetObj, '_qteAdmin'):
self._qteActiveWidget = widgetObj
return
# Do nothing if widgetObj refers to an applet.
if widgetObj._qteAdmin.isQtmacsApplet:
self._qteActiveWidget = None
return
# Housekeeping: remove non-existing widgets from the admin structure.
self.qteAutoremoveDeletedWidgets()
# Verify the widget is registered for this applet.
if widgetObj not in self._qteAdmin.widgetList:
msg = 'Widget is not registered for this applet.'
self.qteLogger.error(msg, stack_info=True)
self._qteActiveWidget = None
return
# The focus manager in QtmacsMain will hand the focus to
# whatever the _qteActiveWidget variable of the active applet
# points to.
self.qteSetWidgetFocusOrder((self._qteActiveWidget, widgetObj))
self._qteActiveWidget = widgetObj | [
"def",
"qteMakeWidgetActive",
"(",
"self",
",",
"widgetObj",
":",
"QtGui",
".",
"QWidget",
")",
":",
"# Void the active widget information.",
"if",
"widgetObj",
"is",
"None",
":",
"self",
".",
"_qteActiveWidget",
"=",
"None",
"return",
"# Ensure that this applet is an ancestor of ``widgetObj``",
"# inside the Qt hierarchy.",
"if",
"qteGetAppletFromWidget",
"(",
"widgetObj",
")",
"is",
"not",
"self",
":",
"msg",
"=",
"'The specified widget is not inside the current applet.'",
"raise",
"QtmacsOtherError",
"(",
"msg",
")",
"# If widgetObj is not registered with Qtmacs then simply declare",
"# it active and return.",
"if",
"not",
"hasattr",
"(",
"widgetObj",
",",
"'_qteAdmin'",
")",
":",
"self",
".",
"_qteActiveWidget",
"=",
"widgetObj",
"return",
"# Do nothing if widgetObj refers to an applet.",
"if",
"widgetObj",
".",
"_qteAdmin",
".",
"isQtmacsApplet",
":",
"self",
".",
"_qteActiveWidget",
"=",
"None",
"return",
"# Housekeeping: remove non-existing widgets from the admin structure.",
"self",
".",
"qteAutoremoveDeletedWidgets",
"(",
")",
"# Verify the widget is registered for this applet.",
"if",
"widgetObj",
"not",
"in",
"self",
".",
"_qteAdmin",
".",
"widgetList",
":",
"msg",
"=",
"'Widget is not registered for this applet.'",
"self",
".",
"qteLogger",
".",
"error",
"(",
"msg",
",",
"stack_info",
"=",
"True",
")",
"self",
".",
"_qteActiveWidget",
"=",
"None",
"return",
"# The focus manager in QtmacsMain will hand the focus to",
"# whatever the _qteActiveWidget variable of the active applet",
"# points to.",
"self",
".",
"qteSetWidgetFocusOrder",
"(",
"(",
"self",
".",
"_qteActiveWidget",
",",
"widgetObj",
")",
")",
"self",
".",
"_qteActiveWidget",
"=",
"widgetObj"
] | Give keyboard focus to ``widgetObj``.
If ``widgetObj`` is **None** then the internal focus state
is reset, but the focus manger will automatically
activate the first available widget again.
|Args|
* ``widgetObj`` (**QWidget**): the widget to focus on.
|Returns|
* **None**
|Raises|
* **QtmacsOtherError** if ``widgetObj`` was not added with
``qteAddWidget``. | [
"Give",
"keyboard",
"focus",
"to",
"widgetObj",
"."
] | 36253b082b82590f183fe154b053eb3a1e741be2 | https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/base_applet.py#L641-L698 | train |
sublee/etc | etc/adapters/mock.py | split_key | def split_key(key):
"""Splits a node key."""
if key == KEY_SEP:
return ()
key_chunks = tuple(key.strip(KEY_SEP).split(KEY_SEP))
if key_chunks[0].startswith(KEY_SEP):
return (key_chunks[0][len(KEY_SEP):],) + key_chunks[1:]
else:
return key_chunks | python | def split_key(key):
"""Splits a node key."""
if key == KEY_SEP:
return ()
key_chunks = tuple(key.strip(KEY_SEP).split(KEY_SEP))
if key_chunks[0].startswith(KEY_SEP):
return (key_chunks[0][len(KEY_SEP):],) + key_chunks[1:]
else:
return key_chunks | [
"def",
"split_key",
"(",
"key",
")",
":",
"if",
"key",
"==",
"KEY_SEP",
":",
"return",
"(",
")",
"key_chunks",
"=",
"tuple",
"(",
"key",
".",
"strip",
"(",
"KEY_SEP",
")",
".",
"split",
"(",
"KEY_SEP",
")",
")",
"if",
"key_chunks",
"[",
"0",
"]",
".",
"startswith",
"(",
"KEY_SEP",
")",
":",
"return",
"(",
"key_chunks",
"[",
"0",
"]",
"[",
"len",
"(",
"KEY_SEP",
")",
":",
"]",
",",
")",
"+",
"key_chunks",
"[",
"1",
":",
"]",
"else",
":",
"return",
"key_chunks"
] | Splits a node key. | [
"Splits",
"a",
"node",
"key",
"."
] | f2be64604da5af0d7739cfacf36f55712f0fc5cb | https://github.com/sublee/etc/blob/f2be64604da5af0d7739cfacf36f55712f0fc5cb/etc/adapters/mock.py#L32-L40 | train |
sublee/etc | etc/adapters/mock.py | MockNode.set | def set(self, index, value=None, dir=False, ttl=None, expiration=None):
"""Updates the node data."""
if bool(dir) is (value is not None):
raise TypeError('Choose one of value or directory')
if (ttl is not None) is (expiration is None):
raise TypeError('Both of ttl and expiration required')
self.value = value
if self.dir != dir:
self.dir = dir
self.nodes = {} if dir else None
self.ttl = ttl
self.expiration = expiration
self.modified_index = index | python | def set(self, index, value=None, dir=False, ttl=None, expiration=None):
"""Updates the node data."""
if bool(dir) is (value is not None):
raise TypeError('Choose one of value or directory')
if (ttl is not None) is (expiration is None):
raise TypeError('Both of ttl and expiration required')
self.value = value
if self.dir != dir:
self.dir = dir
self.nodes = {} if dir else None
self.ttl = ttl
self.expiration = expiration
self.modified_index = index | [
"def",
"set",
"(",
"self",
",",
"index",
",",
"value",
"=",
"None",
",",
"dir",
"=",
"False",
",",
"ttl",
"=",
"None",
",",
"expiration",
"=",
"None",
")",
":",
"if",
"bool",
"(",
"dir",
")",
"is",
"(",
"value",
"is",
"not",
"None",
")",
":",
"raise",
"TypeError",
"(",
"'Choose one of value or directory'",
")",
"if",
"(",
"ttl",
"is",
"not",
"None",
")",
"is",
"(",
"expiration",
"is",
"None",
")",
":",
"raise",
"TypeError",
"(",
"'Both of ttl and expiration required'",
")",
"self",
".",
"value",
"=",
"value",
"if",
"self",
".",
"dir",
"!=",
"dir",
":",
"self",
".",
"dir",
"=",
"dir",
"self",
".",
"nodes",
"=",
"{",
"}",
"if",
"dir",
"else",
"None",
"self",
".",
"ttl",
"=",
"ttl",
"self",
".",
"expiration",
"=",
"expiration",
"self",
".",
"modified_index",
"=",
"index"
] | Updates the node data. | [
"Updates",
"the",
"node",
"data",
"."
] | f2be64604da5af0d7739cfacf36f55712f0fc5cb | https://github.com/sublee/etc/blob/f2be64604da5af0d7739cfacf36f55712f0fc5cb/etc/adapters/mock.py#L54-L66 | train |
sublee/etc | etc/adapters/mock.py | MockAdapter.make_result | def make_result(self, result_class, node=None, prev_node=None,
remember=True, key_chunks=None, notify=True, **kwargs):
"""Makes an etcd result.
If `remember` is ``True``, it keeps the result in the history and
triggers events if waiting. `key_chunks` is the result of
:func:`split_key` of the `node.key`. It is not required if `remember`
is ``False``. Otherwise, it is optional but recommended to eliminate
waste if the key chunks are already supplied.
"""
def canonicalize(node, **kwargs):
return None if node is None else node.canonicalize(**kwargs)
index = self.index
result = result_class(canonicalize(node, **kwargs),
canonicalize(prev_node, **kwargs), index)
if not remember:
return result
self.history[index] = result_class(
canonicalize(node, include_nodes=False),
canonicalize(prev_node, include_nodes=False), index)
key_chunks = key_chunks or split_key(node.key)
asymptotic_key_chunks = (key_chunks[:x + 1]
for x in xrange(len(key_chunks)))
event_keys = [(False, key_chunks)]
for _key_chunks in asymptotic_key_chunks:
exact = _key_chunks == key_chunks
self.indices.setdefault(_key_chunks, []).append((index, exact))
event_keys.append((True, _key_chunks))
if notify:
for event_key in event_keys:
try:
event = self.events.pop(event_key)
except KeyError:
pass
else:
event.set()
return result | python | def make_result(self, result_class, node=None, prev_node=None,
remember=True, key_chunks=None, notify=True, **kwargs):
"""Makes an etcd result.
If `remember` is ``True``, it keeps the result in the history and
triggers events if waiting. `key_chunks` is the result of
:func:`split_key` of the `node.key`. It is not required if `remember`
is ``False``. Otherwise, it is optional but recommended to eliminate
waste if the key chunks are already supplied.
"""
def canonicalize(node, **kwargs):
return None if node is None else node.canonicalize(**kwargs)
index = self.index
result = result_class(canonicalize(node, **kwargs),
canonicalize(prev_node, **kwargs), index)
if not remember:
return result
self.history[index] = result_class(
canonicalize(node, include_nodes=False),
canonicalize(prev_node, include_nodes=False), index)
key_chunks = key_chunks or split_key(node.key)
asymptotic_key_chunks = (key_chunks[:x + 1]
for x in xrange(len(key_chunks)))
event_keys = [(False, key_chunks)]
for _key_chunks in asymptotic_key_chunks:
exact = _key_chunks == key_chunks
self.indices.setdefault(_key_chunks, []).append((index, exact))
event_keys.append((True, _key_chunks))
if notify:
for event_key in event_keys:
try:
event = self.events.pop(event_key)
except KeyError:
pass
else:
event.set()
return result | [
"def",
"make_result",
"(",
"self",
",",
"result_class",
",",
"node",
"=",
"None",
",",
"prev_node",
"=",
"None",
",",
"remember",
"=",
"True",
",",
"key_chunks",
"=",
"None",
",",
"notify",
"=",
"True",
",",
"*",
"*",
"kwargs",
")",
":",
"def",
"canonicalize",
"(",
"node",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"None",
"if",
"node",
"is",
"None",
"else",
"node",
".",
"canonicalize",
"(",
"*",
"*",
"kwargs",
")",
"index",
"=",
"self",
".",
"index",
"result",
"=",
"result_class",
"(",
"canonicalize",
"(",
"node",
",",
"*",
"*",
"kwargs",
")",
",",
"canonicalize",
"(",
"prev_node",
",",
"*",
"*",
"kwargs",
")",
",",
"index",
")",
"if",
"not",
"remember",
":",
"return",
"result",
"self",
".",
"history",
"[",
"index",
"]",
"=",
"result_class",
"(",
"canonicalize",
"(",
"node",
",",
"include_nodes",
"=",
"False",
")",
",",
"canonicalize",
"(",
"prev_node",
",",
"include_nodes",
"=",
"False",
")",
",",
"index",
")",
"key_chunks",
"=",
"key_chunks",
"or",
"split_key",
"(",
"node",
".",
"key",
")",
"asymptotic_key_chunks",
"=",
"(",
"key_chunks",
"[",
":",
"x",
"+",
"1",
"]",
"for",
"x",
"in",
"xrange",
"(",
"len",
"(",
"key_chunks",
")",
")",
")",
"event_keys",
"=",
"[",
"(",
"False",
",",
"key_chunks",
")",
"]",
"for",
"_key_chunks",
"in",
"asymptotic_key_chunks",
":",
"exact",
"=",
"_key_chunks",
"==",
"key_chunks",
"self",
".",
"indices",
".",
"setdefault",
"(",
"_key_chunks",
",",
"[",
"]",
")",
".",
"append",
"(",
"(",
"index",
",",
"exact",
")",
")",
"event_keys",
".",
"append",
"(",
"(",
"True",
",",
"_key_chunks",
")",
")",
"if",
"notify",
":",
"for",
"event_key",
"in",
"event_keys",
":",
"try",
":",
"event",
"=",
"self",
".",
"events",
".",
"pop",
"(",
"event_key",
")",
"except",
"KeyError",
":",
"pass",
"else",
":",
"event",
".",
"set",
"(",
")",
"return",
"result"
] | Makes an etcd result.
If `remember` is ``True``, it keeps the result in the history and
triggers events if waiting. `key_chunks` is the result of
:func:`split_key` of the `node.key`. It is not required if `remember`
is ``False``. Otherwise, it is optional but recommended to eliminate
waste if the key chunks are already supplied. | [
"Makes",
"an",
"etcd",
"result",
"."
] | f2be64604da5af0d7739cfacf36f55712f0fc5cb | https://github.com/sublee/etc/blob/f2be64604da5af0d7739cfacf36f55712f0fc5cb/etc/adapters/mock.py#L124-L161 | train |
haum/hms_base | hms_base/client.py | Client.connect | def connect(self, host='localhost'):
"""Connect to the server and set everything up.
Args:
host: hostname to connect to
"""
# Connect
get_logger().info("Connecting to RabbitMQ server...")
self._conn = pika.BlockingConnection(
pika.ConnectionParameters(host=host))
self._channel = self._conn.channel()
# Exchanger
get_logger().info("Declaring topic exchanger {}...".format(
self.exchange))
self._channel.exchange_declare(exchange=self.exchange, type='topic')
# Create queue
get_logger().info("Creating RabbitMQ queue...")
result = self._channel.queue_declare(exclusive=True)
self._queue_name = result.method.queue
# Binding
if self.listen_all:
get_logger().info(
"Binding queue to exchanger {} (listen all)...".format(
self.exchange
)
)
self._channel.queue_bind(
exchange=self.exchange,
queue=self._queue_name,
routing_key='*'
)
else:
for routing_key in self.topics:
get_logger().info(
"Binding queue to exchanger {} "
"with routing key {}...".format(
self.exchange, routing_key)
)
self._channel.queue_bind(
exchange=self.exchange,
queue=self._queue_name,
routing_key=routing_key
)
# Callback
get_logger().info("Binding callback...")
self._channel.basic_consume(
self._callback, queue=self._queue_name, no_ack=True) | python | def connect(self, host='localhost'):
"""Connect to the server and set everything up.
Args:
host: hostname to connect to
"""
# Connect
get_logger().info("Connecting to RabbitMQ server...")
self._conn = pika.BlockingConnection(
pika.ConnectionParameters(host=host))
self._channel = self._conn.channel()
# Exchanger
get_logger().info("Declaring topic exchanger {}...".format(
self.exchange))
self._channel.exchange_declare(exchange=self.exchange, type='topic')
# Create queue
get_logger().info("Creating RabbitMQ queue...")
result = self._channel.queue_declare(exclusive=True)
self._queue_name = result.method.queue
# Binding
if self.listen_all:
get_logger().info(
"Binding queue to exchanger {} (listen all)...".format(
self.exchange
)
)
self._channel.queue_bind(
exchange=self.exchange,
queue=self._queue_name,
routing_key='*'
)
else:
for routing_key in self.topics:
get_logger().info(
"Binding queue to exchanger {} "
"with routing key {}...".format(
self.exchange, routing_key)
)
self._channel.queue_bind(
exchange=self.exchange,
queue=self._queue_name,
routing_key=routing_key
)
# Callback
get_logger().info("Binding callback...")
self._channel.basic_consume(
self._callback, queue=self._queue_name, no_ack=True) | [
"def",
"connect",
"(",
"self",
",",
"host",
"=",
"'localhost'",
")",
":",
"# Connect",
"get_logger",
"(",
")",
".",
"info",
"(",
"\"Connecting to RabbitMQ server...\"",
")",
"self",
".",
"_conn",
"=",
"pika",
".",
"BlockingConnection",
"(",
"pika",
".",
"ConnectionParameters",
"(",
"host",
"=",
"host",
")",
")",
"self",
".",
"_channel",
"=",
"self",
".",
"_conn",
".",
"channel",
"(",
")",
"# Exchanger",
"get_logger",
"(",
")",
".",
"info",
"(",
"\"Declaring topic exchanger {}...\"",
".",
"format",
"(",
"self",
".",
"exchange",
")",
")",
"self",
".",
"_channel",
".",
"exchange_declare",
"(",
"exchange",
"=",
"self",
".",
"exchange",
",",
"type",
"=",
"'topic'",
")",
"# Create queue",
"get_logger",
"(",
")",
".",
"info",
"(",
"\"Creating RabbitMQ queue...\"",
")",
"result",
"=",
"self",
".",
"_channel",
".",
"queue_declare",
"(",
"exclusive",
"=",
"True",
")",
"self",
".",
"_queue_name",
"=",
"result",
".",
"method",
".",
"queue",
"# Binding",
"if",
"self",
".",
"listen_all",
":",
"get_logger",
"(",
")",
".",
"info",
"(",
"\"Binding queue to exchanger {} (listen all)...\"",
".",
"format",
"(",
"self",
".",
"exchange",
")",
")",
"self",
".",
"_channel",
".",
"queue_bind",
"(",
"exchange",
"=",
"self",
".",
"exchange",
",",
"queue",
"=",
"self",
".",
"_queue_name",
",",
"routing_key",
"=",
"'*'",
")",
"else",
":",
"for",
"routing_key",
"in",
"self",
".",
"topics",
":",
"get_logger",
"(",
")",
".",
"info",
"(",
"\"Binding queue to exchanger {} \"",
"\"with routing key {}...\"",
".",
"format",
"(",
"self",
".",
"exchange",
",",
"routing_key",
")",
")",
"self",
".",
"_channel",
".",
"queue_bind",
"(",
"exchange",
"=",
"self",
".",
"exchange",
",",
"queue",
"=",
"self",
".",
"_queue_name",
",",
"routing_key",
"=",
"routing_key",
")",
"# Callback",
"get_logger",
"(",
")",
".",
"info",
"(",
"\"Binding callback...\"",
")",
"self",
".",
"_channel",
".",
"basic_consume",
"(",
"self",
".",
"_callback",
",",
"queue",
"=",
"self",
".",
"_queue_name",
",",
"no_ack",
"=",
"True",
")"
] | Connect to the server and set everything up.
Args:
host: hostname to connect to | [
"Connect",
"to",
"the",
"server",
"and",
"set",
"everything",
"up",
"."
] | 7c0aed961b43cba043c703102e503cb40db81f58 | https://github.com/haum/hms_base/blob/7c0aed961b43cba043c703102e503cb40db81f58/hms_base/client.py#L45-L106 | train |
haum/hms_base | hms_base/client.py | Client.publish | def publish(self, topic, dct):
"""Send a dict with internal routing key to the exchange.
Args:
topic: topic to publish the message to
dct: dict object to send
"""
get_logger().info("Publishing message {} on routing key "
"{}...".format(dct, topic))
self._channel.basic_publish(
exchange=self.exchange,
routing_key=topic,
body=json.dumps(dct)
) | python | def publish(self, topic, dct):
"""Send a dict with internal routing key to the exchange.
Args:
topic: topic to publish the message to
dct: dict object to send
"""
get_logger().info("Publishing message {} on routing key "
"{}...".format(dct, topic))
self._channel.basic_publish(
exchange=self.exchange,
routing_key=topic,
body=json.dumps(dct)
) | [
"def",
"publish",
"(",
"self",
",",
"topic",
",",
"dct",
")",
":",
"get_logger",
"(",
")",
".",
"info",
"(",
"\"Publishing message {} on routing key \"",
"\"{}...\"",
".",
"format",
"(",
"dct",
",",
"topic",
")",
")",
"self",
".",
"_channel",
".",
"basic_publish",
"(",
"exchange",
"=",
"self",
".",
"exchange",
",",
"routing_key",
"=",
"topic",
",",
"body",
"=",
"json",
".",
"dumps",
"(",
"dct",
")",
")"
] | Send a dict with internal routing key to the exchange.
Args:
topic: topic to publish the message to
dct: dict object to send | [
"Send",
"a",
"dict",
"with",
"internal",
"routing",
"key",
"to",
"the",
"exchange",
"."
] | 7c0aed961b43cba043c703102e503cb40db81f58 | https://github.com/haum/hms_base/blob/7c0aed961b43cba043c703102e503cb40db81f58/hms_base/client.py#L108-L123 | train |
haum/hms_base | hms_base/client.py | Client._callback | def _callback(self, ch, method, properties, body):
"""Internal method that will be called when receiving message."""
get_logger().info("Message received! Calling listeners...")
topic = method.routing_key
dct = json.loads(body.decode('utf-8'))
for listener in self.listeners:
listener(self, topic, dct) | python | def _callback(self, ch, method, properties, body):
"""Internal method that will be called when receiving message."""
get_logger().info("Message received! Calling listeners...")
topic = method.routing_key
dct = json.loads(body.decode('utf-8'))
for listener in self.listeners:
listener(self, topic, dct) | [
"def",
"_callback",
"(",
"self",
",",
"ch",
",",
"method",
",",
"properties",
",",
"body",
")",
":",
"get_logger",
"(",
")",
".",
"info",
"(",
"\"Message received! Calling listeners...\"",
")",
"topic",
"=",
"method",
".",
"routing_key",
"dct",
"=",
"json",
".",
"loads",
"(",
"body",
".",
"decode",
"(",
"'utf-8'",
")",
")",
"for",
"listener",
"in",
"self",
".",
"listeners",
":",
"listener",
"(",
"self",
",",
"topic",
",",
"dct",
")"
] | Internal method that will be called when receiving message. | [
"Internal",
"method",
"that",
"will",
"be",
"called",
"when",
"receiving",
"message",
"."
] | 7c0aed961b43cba043c703102e503cb40db81f58 | https://github.com/haum/hms_base/blob/7c0aed961b43cba043c703102e503cb40db81f58/hms_base/client.py#L140-L149 | train |
haum/hms_base | hms_base/client.py | Client._handle_ping | def _handle_ping(client, topic, dct):
"""Internal method that will be called when receiving ping message."""
if dct['type'] == 'request':
resp = {
'type': 'answer',
'name': client.name,
'source': dct
}
client.publish('ping', resp) | python | def _handle_ping(client, topic, dct):
"""Internal method that will be called when receiving ping message."""
if dct['type'] == 'request':
resp = {
'type': 'answer',
'name': client.name,
'source': dct
}
client.publish('ping', resp) | [
"def",
"_handle_ping",
"(",
"client",
",",
"topic",
",",
"dct",
")",
":",
"if",
"dct",
"[",
"'type'",
"]",
"==",
"'request'",
":",
"resp",
"=",
"{",
"'type'",
":",
"'answer'",
",",
"'name'",
":",
"client",
".",
"name",
",",
"'source'",
":",
"dct",
"}",
"client",
".",
"publish",
"(",
"'ping'",
",",
"resp",
")"
] | Internal method that will be called when receiving ping message. | [
"Internal",
"method",
"that",
"will",
"be",
"called",
"when",
"receiving",
"ping",
"message",
"."
] | 7c0aed961b43cba043c703102e503cb40db81f58 | https://github.com/haum/hms_base/blob/7c0aed961b43cba043c703102e503cb40db81f58/hms_base/client.py#L153-L162 | train |
pmacosta/pexdoc | pexdoc/pcontracts.py | _create_argument_value_pairs | def _create_argument_value_pairs(func, *args, **kwargs):
"""
Create dictionary with argument names as keys and their passed values as values.
An empty dictionary is returned if an error is detected, such as more
arguments than in the function definition, argument(s) defined by position
and keyword, etc.
"""
# Capture parameters that have been explicitly specified in function call
try:
arg_dict = signature(func).bind_partial(*args, **kwargs).arguments
except TypeError:
return dict()
# Capture parameters that have not been explicitly specified
# but have default values
arguments = signature(func).parameters
for arg_name in arguments:
if (arguments[arg_name].default != Parameter.empty) and (
arguments[arg_name].name not in arg_dict
):
arg_dict[arguments[arg_name].name] = arguments[arg_name].default
return arg_dict | python | def _create_argument_value_pairs(func, *args, **kwargs):
"""
Create dictionary with argument names as keys and their passed values as values.
An empty dictionary is returned if an error is detected, such as more
arguments than in the function definition, argument(s) defined by position
and keyword, etc.
"""
# Capture parameters that have been explicitly specified in function call
try:
arg_dict = signature(func).bind_partial(*args, **kwargs).arguments
except TypeError:
return dict()
# Capture parameters that have not been explicitly specified
# but have default values
arguments = signature(func).parameters
for arg_name in arguments:
if (arguments[arg_name].default != Parameter.empty) and (
arguments[arg_name].name not in arg_dict
):
arg_dict[arguments[arg_name].name] = arguments[arg_name].default
return arg_dict | [
"def",
"_create_argument_value_pairs",
"(",
"func",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"# Capture parameters that have been explicitly specified in function call",
"try",
":",
"arg_dict",
"=",
"signature",
"(",
"func",
")",
".",
"bind_partial",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
".",
"arguments",
"except",
"TypeError",
":",
"return",
"dict",
"(",
")",
"# Capture parameters that have not been explicitly specified",
"# but have default values",
"arguments",
"=",
"signature",
"(",
"func",
")",
".",
"parameters",
"for",
"arg_name",
"in",
"arguments",
":",
"if",
"(",
"arguments",
"[",
"arg_name",
"]",
".",
"default",
"!=",
"Parameter",
".",
"empty",
")",
"and",
"(",
"arguments",
"[",
"arg_name",
"]",
".",
"name",
"not",
"in",
"arg_dict",
")",
":",
"arg_dict",
"[",
"arguments",
"[",
"arg_name",
"]",
".",
"name",
"]",
"=",
"arguments",
"[",
"arg_name",
"]",
".",
"default",
"return",
"arg_dict"
] | Create dictionary with argument names as keys and their passed values as values.
An empty dictionary is returned if an error is detected, such as more
arguments than in the function definition, argument(s) defined by position
and keyword, etc. | [
"Create",
"dictionary",
"with",
"argument",
"names",
"as",
"keys",
"and",
"their",
"passed",
"values",
"as",
"values",
"."
] | 201ac243e5781347feb75896a4231429fe6da4b1 | https://github.com/pmacosta/pexdoc/blob/201ac243e5781347feb75896a4231429fe6da4b1/pexdoc/pcontracts.py#L48-L69 | train |
pmacosta/pexdoc | pexdoc/pcontracts.py | _get_contract_exception_dict | def _get_contract_exception_dict(contract_msg):
"""Generate message for exception."""
# A pcontract-defined custom exception message is wrapped in a string
# that starts with '[START CONTRACT MSG:' and ends with
# '[STOP CONTRACT MSG]'. This is done to easily detect if an
# exception raised is from a custom contract and thus be able
# to easily retrieve the actual exception message
start_token = "[START CONTRACT MSG: "
stop_token = "[STOP CONTRACT MSG]"
# No custom contract
if contract_msg.find(start_token) == -1:
return {
"num": 0,
"msg": "Argument `*[argument_name]*` is not valid",
"type": RuntimeError,
"field": "argument_name",
}
# Custom contract
msg_start = contract_msg.find(start_token) + len(start_token)
contract_msg = contract_msg[msg_start:]
contract_name = contract_msg[: contract_msg.find("]")]
contract_msg = contract_msg[
contract_msg.find("]") + 1 : contract_msg.find(stop_token)
]
exdict = _CUSTOM_CONTRACTS[contract_name]
for exvalue in exdict.values(): # pragma: no branch
if exvalue["msg"] == contract_msg:
return exvalue | python | def _get_contract_exception_dict(contract_msg):
"""Generate message for exception."""
# A pcontract-defined custom exception message is wrapped in a string
# that starts with '[START CONTRACT MSG:' and ends with
# '[STOP CONTRACT MSG]'. This is done to easily detect if an
# exception raised is from a custom contract and thus be able
# to easily retrieve the actual exception message
start_token = "[START CONTRACT MSG: "
stop_token = "[STOP CONTRACT MSG]"
# No custom contract
if contract_msg.find(start_token) == -1:
return {
"num": 0,
"msg": "Argument `*[argument_name]*` is not valid",
"type": RuntimeError,
"field": "argument_name",
}
# Custom contract
msg_start = contract_msg.find(start_token) + len(start_token)
contract_msg = contract_msg[msg_start:]
contract_name = contract_msg[: contract_msg.find("]")]
contract_msg = contract_msg[
contract_msg.find("]") + 1 : contract_msg.find(stop_token)
]
exdict = _CUSTOM_CONTRACTS[contract_name]
for exvalue in exdict.values(): # pragma: no branch
if exvalue["msg"] == contract_msg:
return exvalue | [
"def",
"_get_contract_exception_dict",
"(",
"contract_msg",
")",
":",
"# A pcontract-defined custom exception message is wrapped in a string",
"# that starts with '[START CONTRACT MSG:' and ends with",
"# '[STOP CONTRACT MSG]'. This is done to easily detect if an",
"# exception raised is from a custom contract and thus be able",
"# to easily retrieve the actual exception message",
"start_token",
"=",
"\"[START CONTRACT MSG: \"",
"stop_token",
"=",
"\"[STOP CONTRACT MSG]\"",
"# No custom contract",
"if",
"contract_msg",
".",
"find",
"(",
"start_token",
")",
"==",
"-",
"1",
":",
"return",
"{",
"\"num\"",
":",
"0",
",",
"\"msg\"",
":",
"\"Argument `*[argument_name]*` is not valid\"",
",",
"\"type\"",
":",
"RuntimeError",
",",
"\"field\"",
":",
"\"argument_name\"",
",",
"}",
"# Custom contract",
"msg_start",
"=",
"contract_msg",
".",
"find",
"(",
"start_token",
")",
"+",
"len",
"(",
"start_token",
")",
"contract_msg",
"=",
"contract_msg",
"[",
"msg_start",
":",
"]",
"contract_name",
"=",
"contract_msg",
"[",
":",
"contract_msg",
".",
"find",
"(",
"\"]\"",
")",
"]",
"contract_msg",
"=",
"contract_msg",
"[",
"contract_msg",
".",
"find",
"(",
"\"]\"",
")",
"+",
"1",
":",
"contract_msg",
".",
"find",
"(",
"stop_token",
")",
"]",
"exdict",
"=",
"_CUSTOM_CONTRACTS",
"[",
"contract_name",
"]",
"for",
"exvalue",
"in",
"exdict",
".",
"values",
"(",
")",
":",
"# pragma: no branch",
"if",
"exvalue",
"[",
"\"msg\"",
"]",
"==",
"contract_msg",
":",
"return",
"exvalue"
] | Generate message for exception. | [
"Generate",
"message",
"for",
"exception",
"."
] | 201ac243e5781347feb75896a4231429fe6da4b1 | https://github.com/pmacosta/pexdoc/blob/201ac243e5781347feb75896a4231429fe6da4b1/pexdoc/pcontracts.py#L134-L161 | train |
pmacosta/pexdoc | pexdoc/pcontracts.py | _get_custom_contract | def _get_custom_contract(param_contract):
"""Return True if parameter contract is a custom contract, False otherwise."""
if not isinstance(param_contract, str):
return None
for custom_contract in _CUSTOM_CONTRACTS:
if re.search(r"\b{0}\b".format(custom_contract), param_contract):
return custom_contract
return None | python | def _get_custom_contract(param_contract):
"""Return True if parameter contract is a custom contract, False otherwise."""
if not isinstance(param_contract, str):
return None
for custom_contract in _CUSTOM_CONTRACTS:
if re.search(r"\b{0}\b".format(custom_contract), param_contract):
return custom_contract
return None | [
"def",
"_get_custom_contract",
"(",
"param_contract",
")",
":",
"if",
"not",
"isinstance",
"(",
"param_contract",
",",
"str",
")",
":",
"return",
"None",
"for",
"custom_contract",
"in",
"_CUSTOM_CONTRACTS",
":",
"if",
"re",
".",
"search",
"(",
"r\"\\b{0}\\b\"",
".",
"format",
"(",
"custom_contract",
")",
",",
"param_contract",
")",
":",
"return",
"custom_contract",
"return",
"None"
] | Return True if parameter contract is a custom contract, False otherwise. | [
"Return",
"True",
"if",
"parameter",
"contract",
"is",
"a",
"custom",
"contract",
"False",
"otherwise",
"."
] | 201ac243e5781347feb75896a4231429fe6da4b1 | https://github.com/pmacosta/pexdoc/blob/201ac243e5781347feb75896a4231429fe6da4b1/pexdoc/pcontracts.py#L164-L171 | train |
pmacosta/pexdoc | pexdoc/pcontracts.py | _get_replacement_token | def _get_replacement_token(msg):
"""Extract replacement token from exception message."""
return (
None
if not re.search(r"\*\[[\w|\W]+\]\*", msg)
else re.search(r"\*\[[\w|\W]+\]\*", msg).group()[2:-2]
) | python | def _get_replacement_token(msg):
"""Extract replacement token from exception message."""
return (
None
if not re.search(r"\*\[[\w|\W]+\]\*", msg)
else re.search(r"\*\[[\w|\W]+\]\*", msg).group()[2:-2]
) | [
"def",
"_get_replacement_token",
"(",
"msg",
")",
":",
"return",
"(",
"None",
"if",
"not",
"re",
".",
"search",
"(",
"r\"\\*\\[[\\w|\\W]+\\]\\*\"",
",",
"msg",
")",
"else",
"re",
".",
"search",
"(",
"r\"\\*\\[[\\w|\\W]+\\]\\*\"",
",",
"msg",
")",
".",
"group",
"(",
")",
"[",
"2",
":",
"-",
"2",
"]",
")"
] | Extract replacement token from exception message. | [
"Extract",
"replacement",
"token",
"from",
"exception",
"message",
"."
] | 201ac243e5781347feb75896a4231429fe6da4b1 | https://github.com/pmacosta/pexdoc/blob/201ac243e5781347feb75896a4231429fe6da4b1/pexdoc/pcontracts.py#L304-L310 | train |
contains-io/typet | typet/objects.py | _get_type_name | def _get_type_name(type_):
# type: (type) -> str
"""Return a displayable name for the type.
Args:
type_: A class object.
Returns:
A string value describing the class name that can be used in a natural
language sentence.
"""
name = repr(type_)
if name.startswith("<"):
name = getattr(type_, "__qualname__", getattr(type_, "__name__", ""))
return name.rsplit(".", 1)[-1] or repr(type_) | python | def _get_type_name(type_):
# type: (type) -> str
"""Return a displayable name for the type.
Args:
type_: A class object.
Returns:
A string value describing the class name that can be used in a natural
language sentence.
"""
name = repr(type_)
if name.startswith("<"):
name = getattr(type_, "__qualname__", getattr(type_, "__name__", ""))
return name.rsplit(".", 1)[-1] or repr(type_) | [
"def",
"_get_type_name",
"(",
"type_",
")",
":",
"# type: (type) -> str",
"name",
"=",
"repr",
"(",
"type_",
")",
"if",
"name",
".",
"startswith",
"(",
"\"<\"",
")",
":",
"name",
"=",
"getattr",
"(",
"type_",
",",
"\"__qualname__\"",
",",
"getattr",
"(",
"type_",
",",
"\"__name__\"",
",",
"\"\"",
")",
")",
"return",
"name",
".",
"rsplit",
"(",
"\".\"",
",",
"1",
")",
"[",
"-",
"1",
"]",
"or",
"repr",
"(",
"type_",
")"
] | Return a displayable name for the type.
Args:
type_: A class object.
Returns:
A string value describing the class name that can be used in a natural
language sentence. | [
"Return",
"a",
"displayable",
"name",
"for",
"the",
"type",
"."
] | ad5087c567af84db299eca186776e1cee228e442 | https://github.com/contains-io/typet/blob/ad5087c567af84db299eca186776e1cee228e442/typet/objects.py#L46-L60 | train |
contains-io/typet | typet/objects.py | _get_class_frame_source | def _get_class_frame_source(class_name):
# type: (str) -> Optional[str]
"""Return the source code for a class by checking the frame stack.
This is necessary because it is not possible to get the source of a class
being created by a metaclass directly.
Args:
class_name: The class to look for on the stack.
Returns:
The source code for the requested class if the class was found and the
source was accessible.
"""
for frame_info in inspect.stack():
try:
with open(frame_info[1]) as fp:
src = "".join(fp.readlines()[frame_info[2] - 1 :])
except IOError:
continue
if re.search(r"\bclass\b\s+\b{}\b".format(class_name), src):
reader = six.StringIO(src).readline
tokens = tokenize.generate_tokens(reader)
source_tokens = []
indent_level = 0
base_indent_level = 0
has_base_level = False
for token, value, _, _, _ in tokens: # type: ignore
source_tokens.append((token, value))
if token == tokenize.INDENT:
indent_level += 1
elif token == tokenize.DEDENT:
indent_level -= 1
if has_base_level and indent_level <= base_indent_level:
return (
tokenize.untokenize(source_tokens),
frame_info[0].f_globals,
frame_info[0].f_locals,
)
elif not has_base_level:
has_base_level = True
base_indent_level = indent_level
raise TypeError(
'Unable to retrieve source for class "{}"'.format(class_name)
) | python | def _get_class_frame_source(class_name):
# type: (str) -> Optional[str]
"""Return the source code for a class by checking the frame stack.
This is necessary because it is not possible to get the source of a class
being created by a metaclass directly.
Args:
class_name: The class to look for on the stack.
Returns:
The source code for the requested class if the class was found and the
source was accessible.
"""
for frame_info in inspect.stack():
try:
with open(frame_info[1]) as fp:
src = "".join(fp.readlines()[frame_info[2] - 1 :])
except IOError:
continue
if re.search(r"\bclass\b\s+\b{}\b".format(class_name), src):
reader = six.StringIO(src).readline
tokens = tokenize.generate_tokens(reader)
source_tokens = []
indent_level = 0
base_indent_level = 0
has_base_level = False
for token, value, _, _, _ in tokens: # type: ignore
source_tokens.append((token, value))
if token == tokenize.INDENT:
indent_level += 1
elif token == tokenize.DEDENT:
indent_level -= 1
if has_base_level and indent_level <= base_indent_level:
return (
tokenize.untokenize(source_tokens),
frame_info[0].f_globals,
frame_info[0].f_locals,
)
elif not has_base_level:
has_base_level = True
base_indent_level = indent_level
raise TypeError(
'Unable to retrieve source for class "{}"'.format(class_name)
) | [
"def",
"_get_class_frame_source",
"(",
"class_name",
")",
":",
"# type: (str) -> Optional[str]",
"for",
"frame_info",
"in",
"inspect",
".",
"stack",
"(",
")",
":",
"try",
":",
"with",
"open",
"(",
"frame_info",
"[",
"1",
"]",
")",
"as",
"fp",
":",
"src",
"=",
"\"\"",
".",
"join",
"(",
"fp",
".",
"readlines",
"(",
")",
"[",
"frame_info",
"[",
"2",
"]",
"-",
"1",
":",
"]",
")",
"except",
"IOError",
":",
"continue",
"if",
"re",
".",
"search",
"(",
"r\"\\bclass\\b\\s+\\b{}\\b\"",
".",
"format",
"(",
"class_name",
")",
",",
"src",
")",
":",
"reader",
"=",
"six",
".",
"StringIO",
"(",
"src",
")",
".",
"readline",
"tokens",
"=",
"tokenize",
".",
"generate_tokens",
"(",
"reader",
")",
"source_tokens",
"=",
"[",
"]",
"indent_level",
"=",
"0",
"base_indent_level",
"=",
"0",
"has_base_level",
"=",
"False",
"for",
"token",
",",
"value",
",",
"_",
",",
"_",
",",
"_",
"in",
"tokens",
":",
"# type: ignore",
"source_tokens",
".",
"append",
"(",
"(",
"token",
",",
"value",
")",
")",
"if",
"token",
"==",
"tokenize",
".",
"INDENT",
":",
"indent_level",
"+=",
"1",
"elif",
"token",
"==",
"tokenize",
".",
"DEDENT",
":",
"indent_level",
"-=",
"1",
"if",
"has_base_level",
"and",
"indent_level",
"<=",
"base_indent_level",
":",
"return",
"(",
"tokenize",
".",
"untokenize",
"(",
"source_tokens",
")",
",",
"frame_info",
"[",
"0",
"]",
".",
"f_globals",
",",
"frame_info",
"[",
"0",
"]",
".",
"f_locals",
",",
")",
"elif",
"not",
"has_base_level",
":",
"has_base_level",
"=",
"True",
"base_indent_level",
"=",
"indent_level",
"raise",
"TypeError",
"(",
"'Unable to retrieve source for class \"{}\"'",
".",
"format",
"(",
"class_name",
")",
")"
] | Return the source code for a class by checking the frame stack.
This is necessary because it is not possible to get the source of a class
being created by a metaclass directly.
Args:
class_name: The class to look for on the stack.
Returns:
The source code for the requested class if the class was found and the
source was accessible. | [
"Return",
"the",
"source",
"code",
"for",
"a",
"class",
"by",
"checking",
"the",
"frame",
"stack",
"."
] | ad5087c567af84db299eca186776e1cee228e442 | https://github.com/contains-io/typet/blob/ad5087c567af84db299eca186776e1cee228e442/typet/objects.py#L63-L107 | train |
contains-io/typet | typet/objects.py | _is_propertyable | def _is_propertyable(
names, # type: List[str]
attrs, # type: Dict[str, Any]
annotations, # type: Dict[str, type]
attr, # Dict[str, Any]
):
# type: (...) -> bool
"""Determine if an attribute can be replaced with a property.
Args:
names: The complete list of all attribute names for the class.
attrs: The attribute dict returned by __prepare__.
annotations: A mapping of all defined annotations for the class.
attr: The attribute to test.
Returns:
True if the attribute can be replaced with a property; else False.
"""
return (
attr in annotations
and not attr.startswith("_")
and not attr.isupper()
and "__{}".format(attr) not in names
and not isinstance(getattr(attrs, attr, None), types.MethodType)
) | python | def _is_propertyable(
names, # type: List[str]
attrs, # type: Dict[str, Any]
annotations, # type: Dict[str, type]
attr, # Dict[str, Any]
):
# type: (...) -> bool
"""Determine if an attribute can be replaced with a property.
Args:
names: The complete list of all attribute names for the class.
attrs: The attribute dict returned by __prepare__.
annotations: A mapping of all defined annotations for the class.
attr: The attribute to test.
Returns:
True if the attribute can be replaced with a property; else False.
"""
return (
attr in annotations
and not attr.startswith("_")
and not attr.isupper()
and "__{}".format(attr) not in names
and not isinstance(getattr(attrs, attr, None), types.MethodType)
) | [
"def",
"_is_propertyable",
"(",
"names",
",",
"# type: List[str]",
"attrs",
",",
"# type: Dict[str, Any]",
"annotations",
",",
"# type: Dict[str, type]",
"attr",
",",
"# Dict[str, Any]",
")",
":",
"# type: (...) -> bool",
"return",
"(",
"attr",
"in",
"annotations",
"and",
"not",
"attr",
".",
"startswith",
"(",
"\"_\"",
")",
"and",
"not",
"attr",
".",
"isupper",
"(",
")",
"and",
"\"__{}\"",
".",
"format",
"(",
"attr",
")",
"not",
"in",
"names",
"and",
"not",
"isinstance",
"(",
"getattr",
"(",
"attrs",
",",
"attr",
",",
"None",
")",
",",
"types",
".",
"MethodType",
")",
")"
] | Determine if an attribute can be replaced with a property.
Args:
names: The complete list of all attribute names for the class.
attrs: The attribute dict returned by __prepare__.
annotations: A mapping of all defined annotations for the class.
attr: The attribute to test.
Returns:
True if the attribute can be replaced with a property; else False. | [
"Determine",
"if",
"an",
"attribute",
"can",
"be",
"replaced",
"with",
"a",
"property",
"."
] | ad5087c567af84db299eca186776e1cee228e442 | https://github.com/contains-io/typet/blob/ad5087c567af84db299eca186776e1cee228e442/typet/objects.py#L110-L134 | train |
contains-io/typet | typet/objects.py | _create_typed_object_meta | def _create_typed_object_meta(get_fset):
# type: (Callable[[str, str, Type[_T]], Callable[[_T], None]]) -> type
"""Create a metaclass for typed objects.
Args:
get_fset: A function that takes three parameters: the name of an
attribute, the name of the private attribute that holds the
property data, and a type. This function must an object method that
accepts a value.
Returns:
A metaclass that reads annotations from a class definition and creates
properties for annotated, public, non-constant, non-method attributes
that will guarantee the type of the stored value matches the
annotation.
"""
def _get_fget(attr, private_attr, type_):
# type: (str, str, Type[_T]) -> Callable[[], Any]
"""Create a property getter method for an attribute.
Args:
attr: The name of the attribute that will be retrieved.
private_attr: The name of the attribute that will store any data
related to the attribute.
type_: The annotated type defining what values can be stored in the
attribute.
Returns:
A function that takes self and retrieves the private attribute from
self.
"""
def _fget(self):
# type: (...) -> Any
"""Get attribute from self without revealing the private name."""
try:
return getattr(self, private_attr)
except AttributeError:
raise AttributeError(
"'{}' object has no attribute '{}'".format(
_get_type_name(type_), attr
)
)
return _fget
class _AnnotatedObjectMeta(type):
"""A metaclass that reads annotations from a class definition."""
def __new__(
mcs, # type: Type[_AnnotatedObjectMeta]
name, # type: str
bases, # type: List[type]
attrs, # type: Dict[str, Any]
**kwargs # type: Dict[str, Any]
):
# type: (...) -> type
"""Create class objs that replaces annotated attrs with properties.
Args:
mcs: The class object being created.
name: The name of the class to create.
bases: The list of all base classes for the new class.
attrs: The list of all attributes for the new class from the
definition.
Returns:
A new class instance with the expected base classes and
attributes, but with annotated, public, non-constant,
non-method attributes replaced by property objects that
validate against the annotated type.
"""
annotations = attrs.get("__annotations__", {})
use_comment_type_hints = (
not annotations and attrs.get("__module__") != __name__
)
if use_comment_type_hints:
frame_source = _get_class_frame_source(name)
annotations = get_type_hints(*frame_source)
names = list(attrs) + list(annotations)
typed_attrs = {}
for attr in names:
typed_attrs[attr] = attrs.get(attr)
if _is_propertyable(names, attrs, annotations, attr):
private_attr = "__{}".format(attr)
if attr in attrs:
typed_attrs[private_attr] = attrs[attr]
type_ = (
Optional[annotations[attr]]
if not use_comment_type_hints
and attr in attrs
and attrs[attr] is None
else annotations[attr]
)
typed_attrs[attr] = property(
_get_fget(attr, private_attr, type_),
get_fset(attr, private_attr, type_),
)
properties = [
attr
for attr in annotations
if _is_propertyable(names, attrs, annotations, attr)
]
typed_attrs["_tp__typed_properties"] = properties
typed_attrs["_tp__required_typed_properties"] = [
attr
for attr in properties
if (
attr not in attrs
or attrs[attr] is None
and use_comment_type_hints
)
and NoneType not in getattr(annotations[attr], "__args__", ())
]
return super(_AnnotatedObjectMeta, mcs).__new__( # type: ignore
mcs, name, bases, typed_attrs, **kwargs
)
return _AnnotatedObjectMeta | python | def _create_typed_object_meta(get_fset):
# type: (Callable[[str, str, Type[_T]], Callable[[_T], None]]) -> type
"""Create a metaclass for typed objects.
Args:
get_fset: A function that takes three parameters: the name of an
attribute, the name of the private attribute that holds the
property data, and a type. This function must an object method that
accepts a value.
Returns:
A metaclass that reads annotations from a class definition and creates
properties for annotated, public, non-constant, non-method attributes
that will guarantee the type of the stored value matches the
annotation.
"""
def _get_fget(attr, private_attr, type_):
# type: (str, str, Type[_T]) -> Callable[[], Any]
"""Create a property getter method for an attribute.
Args:
attr: The name of the attribute that will be retrieved.
private_attr: The name of the attribute that will store any data
related to the attribute.
type_: The annotated type defining what values can be stored in the
attribute.
Returns:
A function that takes self and retrieves the private attribute from
self.
"""
def _fget(self):
# type: (...) -> Any
"""Get attribute from self without revealing the private name."""
try:
return getattr(self, private_attr)
except AttributeError:
raise AttributeError(
"'{}' object has no attribute '{}'".format(
_get_type_name(type_), attr
)
)
return _fget
class _AnnotatedObjectMeta(type):
"""A metaclass that reads annotations from a class definition."""
def __new__(
mcs, # type: Type[_AnnotatedObjectMeta]
name, # type: str
bases, # type: List[type]
attrs, # type: Dict[str, Any]
**kwargs # type: Dict[str, Any]
):
# type: (...) -> type
"""Create class objs that replaces annotated attrs with properties.
Args:
mcs: The class object being created.
name: The name of the class to create.
bases: The list of all base classes for the new class.
attrs: The list of all attributes for the new class from the
definition.
Returns:
A new class instance with the expected base classes and
attributes, but with annotated, public, non-constant,
non-method attributes replaced by property objects that
validate against the annotated type.
"""
annotations = attrs.get("__annotations__", {})
use_comment_type_hints = (
not annotations and attrs.get("__module__") != __name__
)
if use_comment_type_hints:
frame_source = _get_class_frame_source(name)
annotations = get_type_hints(*frame_source)
names = list(attrs) + list(annotations)
typed_attrs = {}
for attr in names:
typed_attrs[attr] = attrs.get(attr)
if _is_propertyable(names, attrs, annotations, attr):
private_attr = "__{}".format(attr)
if attr in attrs:
typed_attrs[private_attr] = attrs[attr]
type_ = (
Optional[annotations[attr]]
if not use_comment_type_hints
and attr in attrs
and attrs[attr] is None
else annotations[attr]
)
typed_attrs[attr] = property(
_get_fget(attr, private_attr, type_),
get_fset(attr, private_attr, type_),
)
properties = [
attr
for attr in annotations
if _is_propertyable(names, attrs, annotations, attr)
]
typed_attrs["_tp__typed_properties"] = properties
typed_attrs["_tp__required_typed_properties"] = [
attr
for attr in properties
if (
attr not in attrs
or attrs[attr] is None
and use_comment_type_hints
)
and NoneType not in getattr(annotations[attr], "__args__", ())
]
return super(_AnnotatedObjectMeta, mcs).__new__( # type: ignore
mcs, name, bases, typed_attrs, **kwargs
)
return _AnnotatedObjectMeta | [
"def",
"_create_typed_object_meta",
"(",
"get_fset",
")",
":",
"# type: (Callable[[str, str, Type[_T]], Callable[[_T], None]]) -> type",
"def",
"_get_fget",
"(",
"attr",
",",
"private_attr",
",",
"type_",
")",
":",
"# type: (str, str, Type[_T]) -> Callable[[], Any]",
"\"\"\"Create a property getter method for an attribute.\n\n Args:\n attr: The name of the attribute that will be retrieved.\n private_attr: The name of the attribute that will store any data\n related to the attribute.\n type_: The annotated type defining what values can be stored in the\n attribute.\n\n Returns:\n A function that takes self and retrieves the private attribute from\n self.\n \"\"\"",
"def",
"_fget",
"(",
"self",
")",
":",
"# type: (...) -> Any",
"\"\"\"Get attribute from self without revealing the private name.\"\"\"",
"try",
":",
"return",
"getattr",
"(",
"self",
",",
"private_attr",
")",
"except",
"AttributeError",
":",
"raise",
"AttributeError",
"(",
"\"'{}' object has no attribute '{}'\"",
".",
"format",
"(",
"_get_type_name",
"(",
"type_",
")",
",",
"attr",
")",
")",
"return",
"_fget",
"class",
"_AnnotatedObjectMeta",
"(",
"type",
")",
":",
"\"\"\"A metaclass that reads annotations from a class definition.\"\"\"",
"def",
"__new__",
"(",
"mcs",
",",
"# type: Type[_AnnotatedObjectMeta]",
"name",
",",
"# type: str",
"bases",
",",
"# type: List[type]",
"attrs",
",",
"# type: Dict[str, Any]",
"*",
"*",
"kwargs",
"# type: Dict[str, Any]",
")",
":",
"# type: (...) -> type",
"\"\"\"Create class objs that replaces annotated attrs with properties.\n\n Args:\n mcs: The class object being created.\n name: The name of the class to create.\n bases: The list of all base classes for the new class.\n attrs: The list of all attributes for the new class from the\n definition.\n\n Returns:\n A new class instance with the expected base classes and\n attributes, but with annotated, public, non-constant,\n non-method attributes replaced by property objects that\n validate against the annotated type.\n \"\"\"",
"annotations",
"=",
"attrs",
".",
"get",
"(",
"\"__annotations__\"",
",",
"{",
"}",
")",
"use_comment_type_hints",
"=",
"(",
"not",
"annotations",
"and",
"attrs",
".",
"get",
"(",
"\"__module__\"",
")",
"!=",
"__name__",
")",
"if",
"use_comment_type_hints",
":",
"frame_source",
"=",
"_get_class_frame_source",
"(",
"name",
")",
"annotations",
"=",
"get_type_hints",
"(",
"*",
"frame_source",
")",
"names",
"=",
"list",
"(",
"attrs",
")",
"+",
"list",
"(",
"annotations",
")",
"typed_attrs",
"=",
"{",
"}",
"for",
"attr",
"in",
"names",
":",
"typed_attrs",
"[",
"attr",
"]",
"=",
"attrs",
".",
"get",
"(",
"attr",
")",
"if",
"_is_propertyable",
"(",
"names",
",",
"attrs",
",",
"annotations",
",",
"attr",
")",
":",
"private_attr",
"=",
"\"__{}\"",
".",
"format",
"(",
"attr",
")",
"if",
"attr",
"in",
"attrs",
":",
"typed_attrs",
"[",
"private_attr",
"]",
"=",
"attrs",
"[",
"attr",
"]",
"type_",
"=",
"(",
"Optional",
"[",
"annotations",
"[",
"attr",
"]",
"]",
"if",
"not",
"use_comment_type_hints",
"and",
"attr",
"in",
"attrs",
"and",
"attrs",
"[",
"attr",
"]",
"is",
"None",
"else",
"annotations",
"[",
"attr",
"]",
")",
"typed_attrs",
"[",
"attr",
"]",
"=",
"property",
"(",
"_get_fget",
"(",
"attr",
",",
"private_attr",
",",
"type_",
")",
",",
"get_fset",
"(",
"attr",
",",
"private_attr",
",",
"type_",
")",
",",
")",
"properties",
"=",
"[",
"attr",
"for",
"attr",
"in",
"annotations",
"if",
"_is_propertyable",
"(",
"names",
",",
"attrs",
",",
"annotations",
",",
"attr",
")",
"]",
"typed_attrs",
"[",
"\"_tp__typed_properties\"",
"]",
"=",
"properties",
"typed_attrs",
"[",
"\"_tp__required_typed_properties\"",
"]",
"=",
"[",
"attr",
"for",
"attr",
"in",
"properties",
"if",
"(",
"attr",
"not",
"in",
"attrs",
"or",
"attrs",
"[",
"attr",
"]",
"is",
"None",
"and",
"use_comment_type_hints",
")",
"and",
"NoneType",
"not",
"in",
"getattr",
"(",
"annotations",
"[",
"attr",
"]",
",",
"\"__args__\"",
",",
"(",
")",
")",
"]",
"return",
"super",
"(",
"_AnnotatedObjectMeta",
",",
"mcs",
")",
".",
"__new__",
"(",
"# type: ignore",
"mcs",
",",
"name",
",",
"bases",
",",
"typed_attrs",
",",
"*",
"*",
"kwargs",
")",
"return",
"_AnnotatedObjectMeta"
] | Create a metaclass for typed objects.
Args:
get_fset: A function that takes three parameters: the name of an
attribute, the name of the private attribute that holds the
property data, and a type. This function must an object method that
accepts a value.
Returns:
A metaclass that reads annotations from a class definition and creates
properties for annotated, public, non-constant, non-method attributes
that will guarantee the type of the stored value matches the
annotation. | [
"Create",
"a",
"metaclass",
"for",
"typed",
"objects",
"."
] | ad5087c567af84db299eca186776e1cee228e442 | https://github.com/contains-io/typet/blob/ad5087c567af84db299eca186776e1cee228e442/typet/objects.py#L137-L256 | train |
contains-io/typet | typet/objects.py | _AnnotatedObjectComparisonMixin._tp__get_typed_properties | def _tp__get_typed_properties(self):
"""Return a tuple of typed attrs that can be used for comparisons.
Raises:
NotImplementedError: Raised if this class was mixed into a class
that was not created by _AnnotatedObjectMeta.
"""
try:
return tuple(getattr(self, p) for p in self._tp__typed_properties)
except AttributeError:
raise NotImplementedError | python | def _tp__get_typed_properties(self):
"""Return a tuple of typed attrs that can be used for comparisons.
Raises:
NotImplementedError: Raised if this class was mixed into a class
that was not created by _AnnotatedObjectMeta.
"""
try:
return tuple(getattr(self, p) for p in self._tp__typed_properties)
except AttributeError:
raise NotImplementedError | [
"def",
"_tp__get_typed_properties",
"(",
"self",
")",
":",
"try",
":",
"return",
"tuple",
"(",
"getattr",
"(",
"self",
",",
"p",
")",
"for",
"p",
"in",
"self",
".",
"_tp__typed_properties",
")",
"except",
"AttributeError",
":",
"raise",
"NotImplementedError"
] | Return a tuple of typed attrs that can be used for comparisons.
Raises:
NotImplementedError: Raised if this class was mixed into a class
that was not created by _AnnotatedObjectMeta. | [
"Return",
"a",
"tuple",
"of",
"typed",
"attrs",
"that",
"can",
"be",
"used",
"for",
"comparisons",
"."
] | ad5087c567af84db299eca186776e1cee228e442 | https://github.com/contains-io/typet/blob/ad5087c567af84db299eca186776e1cee228e442/typet/objects.py#L398-L408 | train |
dmonroy/chilero | chilero/web/__init__.py | run | def run(cls, routes, *args, **kwargs): # pragma: no cover
"""
Run a web application.
:param cls: Application class
:param routes: list of routes
:param args: additional arguments
:param kwargs: additional keyword arguments
:return: None
"""
app = init(cls, routes, *args, **kwargs)
HOST = os.getenv('HOST', '0.0.0.0')
PORT = int(os.getenv('PORT', 8000))
aiohttp.web.run_app(app, port=PORT, host=HOST) | python | def run(cls, routes, *args, **kwargs): # pragma: no cover
"""
Run a web application.
:param cls: Application class
:param routes: list of routes
:param args: additional arguments
:param kwargs: additional keyword arguments
:return: None
"""
app = init(cls, routes, *args, **kwargs)
HOST = os.getenv('HOST', '0.0.0.0')
PORT = int(os.getenv('PORT', 8000))
aiohttp.web.run_app(app, port=PORT, host=HOST) | [
"def",
"run",
"(",
"cls",
",",
"routes",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"# pragma: no cover",
"app",
"=",
"init",
"(",
"cls",
",",
"routes",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"HOST",
"=",
"os",
".",
"getenv",
"(",
"'HOST'",
",",
"'0.0.0.0'",
")",
"PORT",
"=",
"int",
"(",
"os",
".",
"getenv",
"(",
"'PORT'",
",",
"8000",
")",
")",
"aiohttp",
".",
"web",
".",
"run_app",
"(",
"app",
",",
"port",
"=",
"PORT",
",",
"host",
"=",
"HOST",
")"
] | Run a web application.
:param cls: Application class
:param routes: list of routes
:param args: additional arguments
:param kwargs: additional keyword arguments
:return: None | [
"Run",
"a",
"web",
"application",
"."
] | 8f1118a60cb7eab3f9ad31cb8a14b30bc102893d | https://github.com/dmonroy/chilero/blob/8f1118a60cb7eab3f9ad31cb8a14b30bc102893d/chilero/web/__init__.py#L25-L41 | train |
vecnet/vecnet.openmalaria | vecnet/openmalaria/scenario/entomology.py | Vectors.add | def add(self, vector, InterventionAnophelesParams=None):
"""
Add a vector to entomology section.
vector is either ElementTree or xml snippet
InterventionAnophelesParams is an anophelesParams section for every GVI, ITN and IRS intervention
already defined in the scenario.xml
"""
# TODO
# 1. If there are GVI interventions, for every GVI, add anophelesParams section.
# (gvi_anophelesParams field in AnophelesSnippets models)
# 2. If there are ITN interventions, for every ITN, add anophelesParams section
# (itn_anophelesParams field in AnophelesSnippets models)
# 3. If there are IRS interventions, for every IRS section add anophelesParams section
# (irs_anophelesParams field in AnophelesSnippets models)
assert isinstance(vector, six.string_types)
et = ElementTree.fromstring(vector)
# check if it is valid vector
mosquito = Vector(et)
assert isinstance(mosquito.mosquito, str)
assert isinstance(mosquito.propInfected, float)
assert len(mosquito.seasonality.monthlyValues) == 12
index = len(self.et.findall("anopheles"))
self.et.insert(index, et) | python | def add(self, vector, InterventionAnophelesParams=None):
"""
Add a vector to entomology section.
vector is either ElementTree or xml snippet
InterventionAnophelesParams is an anophelesParams section for every GVI, ITN and IRS intervention
already defined in the scenario.xml
"""
# TODO
# 1. If there are GVI interventions, for every GVI, add anophelesParams section.
# (gvi_anophelesParams field in AnophelesSnippets models)
# 2. If there are ITN interventions, for every ITN, add anophelesParams section
# (itn_anophelesParams field in AnophelesSnippets models)
# 3. If there are IRS interventions, for every IRS section add anophelesParams section
# (irs_anophelesParams field in AnophelesSnippets models)
assert isinstance(vector, six.string_types)
et = ElementTree.fromstring(vector)
# check if it is valid vector
mosquito = Vector(et)
assert isinstance(mosquito.mosquito, str)
assert isinstance(mosquito.propInfected, float)
assert len(mosquito.seasonality.monthlyValues) == 12
index = len(self.et.findall("anopheles"))
self.et.insert(index, et) | [
"def",
"add",
"(",
"self",
",",
"vector",
",",
"InterventionAnophelesParams",
"=",
"None",
")",
":",
"# TODO",
"# 1. If there are GVI interventions, for every GVI, add anophelesParams section.",
"# (gvi_anophelesParams field in AnophelesSnippets models)",
"# 2. If there are ITN interventions, for every ITN, add anophelesParams section",
"# (itn_anophelesParams field in AnophelesSnippets models)",
"# 3. If there are IRS interventions, for every IRS section add anophelesParams section",
"# (irs_anophelesParams field in AnophelesSnippets models)",
"assert",
"isinstance",
"(",
"vector",
",",
"six",
".",
"string_types",
")",
"et",
"=",
"ElementTree",
".",
"fromstring",
"(",
"vector",
")",
"# check if it is valid vector",
"mosquito",
"=",
"Vector",
"(",
"et",
")",
"assert",
"isinstance",
"(",
"mosquito",
".",
"mosquito",
",",
"str",
")",
"assert",
"isinstance",
"(",
"mosquito",
".",
"propInfected",
",",
"float",
")",
"assert",
"len",
"(",
"mosquito",
".",
"seasonality",
".",
"monthlyValues",
")",
"==",
"12",
"index",
"=",
"len",
"(",
"self",
".",
"et",
".",
"findall",
"(",
"\"anopheles\"",
")",
")",
"self",
".",
"et",
".",
"insert",
"(",
"index",
",",
"et",
")"
] | Add a vector to entomology section.
vector is either ElementTree or xml snippet
InterventionAnophelesParams is an anophelesParams section for every GVI, ITN and IRS intervention
already defined in the scenario.xml | [
"Add",
"a",
"vector",
"to",
"entomology",
"section",
".",
"vector",
"is",
"either",
"ElementTree",
"or",
"xml",
"snippet"
] | 795bc9d1b81a6c664f14879edda7a7c41188e95a | https://github.com/vecnet/vecnet.openmalaria/blob/795bc9d1b81a6c664f14879edda7a7c41188e95a/vecnet/openmalaria/scenario/entomology.py#L197-L222 | train |
pmacosta/pexdoc | pexdoc/exdoc.py | _format_msg | def _format_msg(text, width, indent=0, prefix=""):
r"""
Format exception message.
Replace newline characters \n with ``\n``, ` with \` and then wrap text as
needed
"""
text = repr(text).replace("`", "\\`").replace("\\n", " ``\\n`` ")
sindent = " " * indent if not prefix else prefix
wrapped_text = textwrap.wrap(text, width, subsequent_indent=sindent)
# [1:-1] eliminates quotes generated by repr in first line
return ("\n".join(wrapped_text))[1:-1].rstrip() | python | def _format_msg(text, width, indent=0, prefix=""):
r"""
Format exception message.
Replace newline characters \n with ``\n``, ` with \` and then wrap text as
needed
"""
text = repr(text).replace("`", "\\`").replace("\\n", " ``\\n`` ")
sindent = " " * indent if not prefix else prefix
wrapped_text = textwrap.wrap(text, width, subsequent_indent=sindent)
# [1:-1] eliminates quotes generated by repr in first line
return ("\n".join(wrapped_text))[1:-1].rstrip() | [
"def",
"_format_msg",
"(",
"text",
",",
"width",
",",
"indent",
"=",
"0",
",",
"prefix",
"=",
"\"\"",
")",
":",
"text",
"=",
"repr",
"(",
"text",
")",
".",
"replace",
"(",
"\"`\"",
",",
"\"\\\\`\"",
")",
".",
"replace",
"(",
"\"\\\\n\"",
",",
"\" ``\\\\n`` \"",
")",
"sindent",
"=",
"\" \"",
"*",
"indent",
"if",
"not",
"prefix",
"else",
"prefix",
"wrapped_text",
"=",
"textwrap",
".",
"wrap",
"(",
"text",
",",
"width",
",",
"subsequent_indent",
"=",
"sindent",
")",
"# [1:-1] eliminates quotes generated by repr in first line",
"return",
"(",
"\"\\n\"",
".",
"join",
"(",
"wrapped_text",
")",
")",
"[",
"1",
":",
"-",
"1",
"]",
".",
"rstrip",
"(",
")"
] | r"""
Format exception message.
Replace newline characters \n with ``\n``, ` with \` and then wrap text as
needed | [
"r",
"Format",
"exception",
"message",
"."
] | 201ac243e5781347feb75896a4231429fe6da4b1 | https://github.com/pmacosta/pexdoc/blob/201ac243e5781347feb75896a4231429fe6da4b1/pexdoc/exdoc.py#L44-L55 | train |
pmacosta/pexdoc | pexdoc/exdoc.py | _validate_fname | def _validate_fname(fname, arg_name):
"""Validate that a string is a valid file name."""
if fname is not None:
msg = "Argument `{0}` is not valid".format(arg_name)
if (not isinstance(fname, str)) or (isinstance(fname, str) and ("\0" in fname)):
raise RuntimeError(msg)
try:
if not os.path.exists(fname):
os.access(fname, os.W_OK)
except (TypeError, ValueError): # pragma: no cover
raise RuntimeError(msg) | python | def _validate_fname(fname, arg_name):
"""Validate that a string is a valid file name."""
if fname is not None:
msg = "Argument `{0}` is not valid".format(arg_name)
if (not isinstance(fname, str)) or (isinstance(fname, str) and ("\0" in fname)):
raise RuntimeError(msg)
try:
if not os.path.exists(fname):
os.access(fname, os.W_OK)
except (TypeError, ValueError): # pragma: no cover
raise RuntimeError(msg) | [
"def",
"_validate_fname",
"(",
"fname",
",",
"arg_name",
")",
":",
"if",
"fname",
"is",
"not",
"None",
":",
"msg",
"=",
"\"Argument `{0}` is not valid\"",
".",
"format",
"(",
"arg_name",
")",
"if",
"(",
"not",
"isinstance",
"(",
"fname",
",",
"str",
")",
")",
"or",
"(",
"isinstance",
"(",
"fname",
",",
"str",
")",
"and",
"(",
"\"\\0\"",
"in",
"fname",
")",
")",
":",
"raise",
"RuntimeError",
"(",
"msg",
")",
"try",
":",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"fname",
")",
":",
"os",
".",
"access",
"(",
"fname",
",",
"os",
".",
"W_OK",
")",
"except",
"(",
"TypeError",
",",
"ValueError",
")",
":",
"# pragma: no cover",
"raise",
"RuntimeError",
"(",
"msg",
")"
] | Validate that a string is a valid file name. | [
"Validate",
"that",
"a",
"string",
"is",
"a",
"valid",
"file",
"name",
"."
] | 201ac243e5781347feb75896a4231429fe6da4b1 | https://github.com/pmacosta/pexdoc/blob/201ac243e5781347feb75896a4231429fe6da4b1/pexdoc/exdoc.py#L58-L68 | train |
pmacosta/pexdoc | pexdoc/exdoc.py | ExDoc._build_ex_tree | def _build_ex_tree(self):
"""Construct exception tree from trace."""
# Load exception data into tree structure
sep = self._exh_obj.callables_separator
data = self._exh_obj.exceptions_db
if not data:
raise RuntimeError("Exceptions database is empty")
# Add root node to exceptions, needed when tracing done
# through test runner which is excluded from callable path
for item in data:
item["name"] = "root{sep}{name}".format(sep=sep, name=item["name"])
self._tobj = ptrie.Trie(sep)
try:
self._tobj.add_nodes(data)
except ValueError as eobj:
if str(eobj).startswith("Illegal node name"):
raise RuntimeError("Exceptions do not have a common callable")
raise
# Find closest root node to first multi-leaf branching or first
# callable with exceptions and make that the root node
node = self._tobj.root_name
while (len(self._tobj.get_children(node)) == 1) and (
not self._tobj.get_data(node)
):
node = self._tobj.get_children(node)[0]
if not self._tobj.is_root(node): # pragma: no branch
self._tobj.make_root(node)
nsep = self._tobj.node_separator
prefix = nsep.join(node.split(self._tobj.node_separator)[:-1])
self._tobj.delete_prefix(prefix)
self._print_ex_tree() | python | def _build_ex_tree(self):
"""Construct exception tree from trace."""
# Load exception data into tree structure
sep = self._exh_obj.callables_separator
data = self._exh_obj.exceptions_db
if not data:
raise RuntimeError("Exceptions database is empty")
# Add root node to exceptions, needed when tracing done
# through test runner which is excluded from callable path
for item in data:
item["name"] = "root{sep}{name}".format(sep=sep, name=item["name"])
self._tobj = ptrie.Trie(sep)
try:
self._tobj.add_nodes(data)
except ValueError as eobj:
if str(eobj).startswith("Illegal node name"):
raise RuntimeError("Exceptions do not have a common callable")
raise
# Find closest root node to first multi-leaf branching or first
# callable with exceptions and make that the root node
node = self._tobj.root_name
while (len(self._tobj.get_children(node)) == 1) and (
not self._tobj.get_data(node)
):
node = self._tobj.get_children(node)[0]
if not self._tobj.is_root(node): # pragma: no branch
self._tobj.make_root(node)
nsep = self._tobj.node_separator
prefix = nsep.join(node.split(self._tobj.node_separator)[:-1])
self._tobj.delete_prefix(prefix)
self._print_ex_tree() | [
"def",
"_build_ex_tree",
"(",
"self",
")",
":",
"# Load exception data into tree structure",
"sep",
"=",
"self",
".",
"_exh_obj",
".",
"callables_separator",
"data",
"=",
"self",
".",
"_exh_obj",
".",
"exceptions_db",
"if",
"not",
"data",
":",
"raise",
"RuntimeError",
"(",
"\"Exceptions database is empty\"",
")",
"# Add root node to exceptions, needed when tracing done",
"# through test runner which is excluded from callable path",
"for",
"item",
"in",
"data",
":",
"item",
"[",
"\"name\"",
"]",
"=",
"\"root{sep}{name}\"",
".",
"format",
"(",
"sep",
"=",
"sep",
",",
"name",
"=",
"item",
"[",
"\"name\"",
"]",
")",
"self",
".",
"_tobj",
"=",
"ptrie",
".",
"Trie",
"(",
"sep",
")",
"try",
":",
"self",
".",
"_tobj",
".",
"add_nodes",
"(",
"data",
")",
"except",
"ValueError",
"as",
"eobj",
":",
"if",
"str",
"(",
"eobj",
")",
".",
"startswith",
"(",
"\"Illegal node name\"",
")",
":",
"raise",
"RuntimeError",
"(",
"\"Exceptions do not have a common callable\"",
")",
"raise",
"# Find closest root node to first multi-leaf branching or first",
"# callable with exceptions and make that the root node",
"node",
"=",
"self",
".",
"_tobj",
".",
"root_name",
"while",
"(",
"len",
"(",
"self",
".",
"_tobj",
".",
"get_children",
"(",
"node",
")",
")",
"==",
"1",
")",
"and",
"(",
"not",
"self",
".",
"_tobj",
".",
"get_data",
"(",
"node",
")",
")",
":",
"node",
"=",
"self",
".",
"_tobj",
".",
"get_children",
"(",
"node",
")",
"[",
"0",
"]",
"if",
"not",
"self",
".",
"_tobj",
".",
"is_root",
"(",
"node",
")",
":",
"# pragma: no branch",
"self",
".",
"_tobj",
".",
"make_root",
"(",
"node",
")",
"nsep",
"=",
"self",
".",
"_tobj",
".",
"node_separator",
"prefix",
"=",
"nsep",
".",
"join",
"(",
"node",
".",
"split",
"(",
"self",
".",
"_tobj",
".",
"node_separator",
")",
"[",
":",
"-",
"1",
"]",
")",
"self",
".",
"_tobj",
".",
"delete_prefix",
"(",
"prefix",
")",
"self",
".",
"_print_ex_tree",
"(",
")"
] | Construct exception tree from trace. | [
"Construct",
"exception",
"tree",
"from",
"trace",
"."
] | 201ac243e5781347feb75896a4231429fe6da4b1 | https://github.com/pmacosta/pexdoc/blob/201ac243e5781347feb75896a4231429fe6da4b1/pexdoc/exdoc.py#L324-L354 | train |
pmacosta/pexdoc | pexdoc/exdoc.py | ExDoc._build_module_db | def _build_module_db(self):
"""
Build database of module callables sorted by line number.
The database is a dictionary whose keys are module file names and
whose values are lists of dictionaries containing name and line
number of callables in that module
"""
tdict = collections.defaultdict(lambda: [])
for callable_name, callable_dict in self._exh_obj.callables_db.items():
fname, line_no = callable_dict["code_id"]
cname = (
"{cls_name}.__init__".format(cls_name=callable_name)
if callable_dict["type"] == "class"
else callable_name
)
tdict[fname].append({"name": cname, "line": line_no})
for fname in tdict.keys():
self._module_obj_db[fname] = sorted(
tdict[fname], key=lambda idict: idict["line"]
) | python | def _build_module_db(self):
"""
Build database of module callables sorted by line number.
The database is a dictionary whose keys are module file names and
whose values are lists of dictionaries containing name and line
number of callables in that module
"""
tdict = collections.defaultdict(lambda: [])
for callable_name, callable_dict in self._exh_obj.callables_db.items():
fname, line_no = callable_dict["code_id"]
cname = (
"{cls_name}.__init__".format(cls_name=callable_name)
if callable_dict["type"] == "class"
else callable_name
)
tdict[fname].append({"name": cname, "line": line_no})
for fname in tdict.keys():
self._module_obj_db[fname] = sorted(
tdict[fname], key=lambda idict: idict["line"]
) | [
"def",
"_build_module_db",
"(",
"self",
")",
":",
"tdict",
"=",
"collections",
".",
"defaultdict",
"(",
"lambda",
":",
"[",
"]",
")",
"for",
"callable_name",
",",
"callable_dict",
"in",
"self",
".",
"_exh_obj",
".",
"callables_db",
".",
"items",
"(",
")",
":",
"fname",
",",
"line_no",
"=",
"callable_dict",
"[",
"\"code_id\"",
"]",
"cname",
"=",
"(",
"\"{cls_name}.__init__\"",
".",
"format",
"(",
"cls_name",
"=",
"callable_name",
")",
"if",
"callable_dict",
"[",
"\"type\"",
"]",
"==",
"\"class\"",
"else",
"callable_name",
")",
"tdict",
"[",
"fname",
"]",
".",
"append",
"(",
"{",
"\"name\"",
":",
"cname",
",",
"\"line\"",
":",
"line_no",
"}",
")",
"for",
"fname",
"in",
"tdict",
".",
"keys",
"(",
")",
":",
"self",
".",
"_module_obj_db",
"[",
"fname",
"]",
"=",
"sorted",
"(",
"tdict",
"[",
"fname",
"]",
",",
"key",
"=",
"lambda",
"idict",
":",
"idict",
"[",
"\"line\"",
"]",
")"
] | Build database of module callables sorted by line number.
The database is a dictionary whose keys are module file names and
whose values are lists of dictionaries containing name and line
number of callables in that module | [
"Build",
"database",
"of",
"module",
"callables",
"sorted",
"by",
"line",
"number",
"."
] | 201ac243e5781347feb75896a4231429fe6da4b1 | https://github.com/pmacosta/pexdoc/blob/201ac243e5781347feb75896a4231429fe6da4b1/pexdoc/exdoc.py#L356-L376 | train |
pmacosta/pexdoc | pexdoc/exdoc.py | ExDoc._process_exlist | def _process_exlist(self, exc, raised):
"""Remove raised info from exception message and create separate list for it."""
if (not raised) or (raised and exc.endswith("*")):
return exc[:-1] if exc.endswith("*") else exc
return None | python | def _process_exlist(self, exc, raised):
"""Remove raised info from exception message and create separate list for it."""
if (not raised) or (raised and exc.endswith("*")):
return exc[:-1] if exc.endswith("*") else exc
return None | [
"def",
"_process_exlist",
"(",
"self",
",",
"exc",
",",
"raised",
")",
":",
"if",
"(",
"not",
"raised",
")",
"or",
"(",
"raised",
"and",
"exc",
".",
"endswith",
"(",
"\"*\"",
")",
")",
":",
"return",
"exc",
"[",
":",
"-",
"1",
"]",
"if",
"exc",
".",
"endswith",
"(",
"\"*\"",
")",
"else",
"exc",
"return",
"None"
] | Remove raised info from exception message and create separate list for it. | [
"Remove",
"raised",
"info",
"from",
"exception",
"message",
"and",
"create",
"separate",
"list",
"for",
"it",
"."
] | 201ac243e5781347feb75896a4231429fe6da4b1 | https://github.com/pmacosta/pexdoc/blob/201ac243e5781347feb75896a4231429fe6da4b1/pexdoc/exdoc.py#L391-L395 | train |
pmacosta/pexdoc | pexdoc/exdoc.py | ExDoc._set_depth | def _set_depth(self, depth):
"""Depth setter."""
if depth and (
(not isinstance(depth, int)) or (isinstance(depth, int) and (depth < 0))
):
raise RuntimeError("Argument `depth` is not valid")
self._depth = depth | python | def _set_depth(self, depth):
"""Depth setter."""
if depth and (
(not isinstance(depth, int)) or (isinstance(depth, int) and (depth < 0))
):
raise RuntimeError("Argument `depth` is not valid")
self._depth = depth | [
"def",
"_set_depth",
"(",
"self",
",",
"depth",
")",
":",
"if",
"depth",
"and",
"(",
"(",
"not",
"isinstance",
"(",
"depth",
",",
"int",
")",
")",
"or",
"(",
"isinstance",
"(",
"depth",
",",
"int",
")",
"and",
"(",
"depth",
"<",
"0",
")",
")",
")",
":",
"raise",
"RuntimeError",
"(",
"\"Argument `depth` is not valid\"",
")",
"self",
".",
"_depth",
"=",
"depth"
] | Depth setter. | [
"Depth",
"setter",
"."
] | 201ac243e5781347feb75896a4231429fe6da4b1 | https://github.com/pmacosta/pexdoc/blob/201ac243e5781347feb75896a4231429fe6da4b1/pexdoc/exdoc.py#L397-L403 | train |
pmacosta/pexdoc | pexdoc/exdoc.py | ExDoc._set_exclude | def _set_exclude(self, exclude):
"""Exclude setter."""
if exclude and (
(not isinstance(exclude, list))
or (
isinstance(exclude, list)
and any([not isinstance(item, str) for item in exclude])
)
):
raise RuntimeError("Argument `exclude` is not valid")
self._exclude = exclude | python | def _set_exclude(self, exclude):
"""Exclude setter."""
if exclude and (
(not isinstance(exclude, list))
or (
isinstance(exclude, list)
and any([not isinstance(item, str) for item in exclude])
)
):
raise RuntimeError("Argument `exclude` is not valid")
self._exclude = exclude | [
"def",
"_set_exclude",
"(",
"self",
",",
"exclude",
")",
":",
"if",
"exclude",
"and",
"(",
"(",
"not",
"isinstance",
"(",
"exclude",
",",
"list",
")",
")",
"or",
"(",
"isinstance",
"(",
"exclude",
",",
"list",
")",
"and",
"any",
"(",
"[",
"not",
"isinstance",
"(",
"item",
",",
"str",
")",
"for",
"item",
"in",
"exclude",
"]",
")",
")",
")",
":",
"raise",
"RuntimeError",
"(",
"\"Argument `exclude` is not valid\"",
")",
"self",
".",
"_exclude",
"=",
"exclude"
] | Exclude setter. | [
"Exclude",
"setter",
"."
] | 201ac243e5781347feb75896a4231429fe6da4b1 | https://github.com/pmacosta/pexdoc/blob/201ac243e5781347feb75896a4231429fe6da4b1/pexdoc/exdoc.py#L405-L415 | train |
pmacosta/pexdoc | pexdoc/exdoc.py | ExDoc.get_sphinx_autodoc | def get_sphinx_autodoc(
self,
depth=None,
exclude=None,
width=72,
error=False,
raised=False,
no_comment=False,
):
r"""
Return exception list in `reStructuredText`_ auto-determining callable name.
:param depth: Hierarchy levels to include in the exceptions list
(overrides default **depth** argument; see
:py:attr:`pexdoc.ExDoc.depth`). If None exceptions
at all depths are included
:type depth: non-negative integer or None
:param exclude: List of (potentially partial) module and callable
names to exclude from exceptions list (overrides
default **exclude** argument, see
:py:attr:`pexdoc.ExDoc.exclude`). If None all
callables are included
:type exclude: list of strings or None
:param width: Maximum width of the lines of text (minimum 40)
:type width: integer
:param error: Flag that indicates whether an exception should be
raised if the callable is not found in the callables
exceptions database (True) or not (False)
:type error: boolean
:param raised: Flag that indicates whether only exceptions that
were raised (and presumably caught) should be
documented (True) or all registered exceptions should
be documented (False)
:type raised: boolean
:param no_comment: Flag that indicates whether a `reStructuredText`_
comment labeling the callable (method, function or
class property) should be printed (False) or not
(True) before the exceptions documentation
:type no_comment: boolean
:raises:
* RuntimeError (Argument \\`depth\\` is not valid)
* RuntimeError (Argument \\`error\\` is not valid)
* RuntimeError (Argument \\`exclude\\` is not valid)
* RuntimeError (Argument \\`no_comment\\` is not valid)
* RuntimeError (Argument \\`raised\\` is not valid)
* RuntimeError (Argument \\`width\\` is not valid)
* RuntimeError (Callable not found in exception list: *[name]*)
* RuntimeError (Unable to determine callable name)
"""
# This code is cog-specific: cog code file name is the module
# file name, a plus (+), and then the line number where the
# cog function is
frame = sys._getframe(1)
index = frame.f_code.co_filename.rfind("+")
fname = os.path.abspath(frame.f_code.co_filename[:index])
# Find name of callable based on module name and line number
# within that module, then get the exceptions by using the
# get_sphinx_doc() method with this information
line_num = int(frame.f_code.co_filename[index + 1 :])
module_db = self._module_obj_db[fname]
names = [callable_dict["name"] for callable_dict in module_db]
line_nums = [callable_dict["line"] for callable_dict in module_db]
name = names[bisect.bisect(line_nums, line_num) - 1]
return self.get_sphinx_doc(
name=name,
depth=depth,
exclude=exclude,
width=width,
error=error,
raised=raised,
no_comment=no_comment,
) | python | def get_sphinx_autodoc(
self,
depth=None,
exclude=None,
width=72,
error=False,
raised=False,
no_comment=False,
):
r"""
Return exception list in `reStructuredText`_ auto-determining callable name.
:param depth: Hierarchy levels to include in the exceptions list
(overrides default **depth** argument; see
:py:attr:`pexdoc.ExDoc.depth`). If None exceptions
at all depths are included
:type depth: non-negative integer or None
:param exclude: List of (potentially partial) module and callable
names to exclude from exceptions list (overrides
default **exclude** argument, see
:py:attr:`pexdoc.ExDoc.exclude`). If None all
callables are included
:type exclude: list of strings or None
:param width: Maximum width of the lines of text (minimum 40)
:type width: integer
:param error: Flag that indicates whether an exception should be
raised if the callable is not found in the callables
exceptions database (True) or not (False)
:type error: boolean
:param raised: Flag that indicates whether only exceptions that
were raised (and presumably caught) should be
documented (True) or all registered exceptions should
be documented (False)
:type raised: boolean
:param no_comment: Flag that indicates whether a `reStructuredText`_
comment labeling the callable (method, function or
class property) should be printed (False) or not
(True) before the exceptions documentation
:type no_comment: boolean
:raises:
* RuntimeError (Argument \\`depth\\` is not valid)
* RuntimeError (Argument \\`error\\` is not valid)
* RuntimeError (Argument \\`exclude\\` is not valid)
* RuntimeError (Argument \\`no_comment\\` is not valid)
* RuntimeError (Argument \\`raised\\` is not valid)
* RuntimeError (Argument \\`width\\` is not valid)
* RuntimeError (Callable not found in exception list: *[name]*)
* RuntimeError (Unable to determine callable name)
"""
# This code is cog-specific: cog code file name is the module
# file name, a plus (+), and then the line number where the
# cog function is
frame = sys._getframe(1)
index = frame.f_code.co_filename.rfind("+")
fname = os.path.abspath(frame.f_code.co_filename[:index])
# Find name of callable based on module name and line number
# within that module, then get the exceptions by using the
# get_sphinx_doc() method with this information
line_num = int(frame.f_code.co_filename[index + 1 :])
module_db = self._module_obj_db[fname]
names = [callable_dict["name"] for callable_dict in module_db]
line_nums = [callable_dict["line"] for callable_dict in module_db]
name = names[bisect.bisect(line_nums, line_num) - 1]
return self.get_sphinx_doc(
name=name,
depth=depth,
exclude=exclude,
width=width,
error=error,
raised=raised,
no_comment=no_comment,
) | [
"def",
"get_sphinx_autodoc",
"(",
"self",
",",
"depth",
"=",
"None",
",",
"exclude",
"=",
"None",
",",
"width",
"=",
"72",
",",
"error",
"=",
"False",
",",
"raised",
"=",
"False",
",",
"no_comment",
"=",
"False",
",",
")",
":",
"# This code is cog-specific: cog code file name is the module",
"# file name, a plus (+), and then the line number where the",
"# cog function is",
"frame",
"=",
"sys",
".",
"_getframe",
"(",
"1",
")",
"index",
"=",
"frame",
".",
"f_code",
".",
"co_filename",
".",
"rfind",
"(",
"\"+\"",
")",
"fname",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"frame",
".",
"f_code",
".",
"co_filename",
"[",
":",
"index",
"]",
")",
"# Find name of callable based on module name and line number",
"# within that module, then get the exceptions by using the",
"# get_sphinx_doc() method with this information",
"line_num",
"=",
"int",
"(",
"frame",
".",
"f_code",
".",
"co_filename",
"[",
"index",
"+",
"1",
":",
"]",
")",
"module_db",
"=",
"self",
".",
"_module_obj_db",
"[",
"fname",
"]",
"names",
"=",
"[",
"callable_dict",
"[",
"\"name\"",
"]",
"for",
"callable_dict",
"in",
"module_db",
"]",
"line_nums",
"=",
"[",
"callable_dict",
"[",
"\"line\"",
"]",
"for",
"callable_dict",
"in",
"module_db",
"]",
"name",
"=",
"names",
"[",
"bisect",
".",
"bisect",
"(",
"line_nums",
",",
"line_num",
")",
"-",
"1",
"]",
"return",
"self",
".",
"get_sphinx_doc",
"(",
"name",
"=",
"name",
",",
"depth",
"=",
"depth",
",",
"exclude",
"=",
"exclude",
",",
"width",
"=",
"width",
",",
"error",
"=",
"error",
",",
"raised",
"=",
"raised",
",",
"no_comment",
"=",
"no_comment",
",",
")"
] | r"""
Return exception list in `reStructuredText`_ auto-determining callable name.
:param depth: Hierarchy levels to include in the exceptions list
(overrides default **depth** argument; see
:py:attr:`pexdoc.ExDoc.depth`). If None exceptions
at all depths are included
:type depth: non-negative integer or None
:param exclude: List of (potentially partial) module and callable
names to exclude from exceptions list (overrides
default **exclude** argument, see
:py:attr:`pexdoc.ExDoc.exclude`). If None all
callables are included
:type exclude: list of strings or None
:param width: Maximum width of the lines of text (minimum 40)
:type width: integer
:param error: Flag that indicates whether an exception should be
raised if the callable is not found in the callables
exceptions database (True) or not (False)
:type error: boolean
:param raised: Flag that indicates whether only exceptions that
were raised (and presumably caught) should be
documented (True) or all registered exceptions should
be documented (False)
:type raised: boolean
:param no_comment: Flag that indicates whether a `reStructuredText`_
comment labeling the callable (method, function or
class property) should be printed (False) or not
(True) before the exceptions documentation
:type no_comment: boolean
:raises:
* RuntimeError (Argument \\`depth\\` is not valid)
* RuntimeError (Argument \\`error\\` is not valid)
* RuntimeError (Argument \\`exclude\\` is not valid)
* RuntimeError (Argument \\`no_comment\\` is not valid)
* RuntimeError (Argument \\`raised\\` is not valid)
* RuntimeError (Argument \\`width\\` is not valid)
* RuntimeError (Callable not found in exception list: *[name]*)
* RuntimeError (Unable to determine callable name) | [
"r",
"Return",
"exception",
"list",
"in",
"reStructuredText",
"_",
"auto",
"-",
"determining",
"callable",
"name",
"."
] | 201ac243e5781347feb75896a4231429fe6da4b1 | https://github.com/pmacosta/pexdoc/blob/201ac243e5781347feb75896a4231429fe6da4b1/pexdoc/exdoc.py#L417-L503 | train |
a1ezzz/wasp-general | wasp_general/types/bytearray.py | WFixedSizeByteArray.resize | def resize(self, size):
""" Grow this array to specified length. This array can't be shrinked
:param size: new length
:return: None
"""
if size < len(self):
raise ValueError("Value is out of bound. Array can't be shrinked")
current_size = self.__size
for i in range(size - current_size):
self.__array.append(WBinArray(0, self.__class__.byte_size))
self.__size = size | python | def resize(self, size):
""" Grow this array to specified length. This array can't be shrinked
:param size: new length
:return: None
"""
if size < len(self):
raise ValueError("Value is out of bound. Array can't be shrinked")
current_size = self.__size
for i in range(size - current_size):
self.__array.append(WBinArray(0, self.__class__.byte_size))
self.__size = size | [
"def",
"resize",
"(",
"self",
",",
"size",
")",
":",
"if",
"size",
"<",
"len",
"(",
"self",
")",
":",
"raise",
"ValueError",
"(",
"\"Value is out of bound. Array can't be shrinked\"",
")",
"current_size",
"=",
"self",
".",
"__size",
"for",
"i",
"in",
"range",
"(",
"size",
"-",
"current_size",
")",
":",
"self",
".",
"__array",
".",
"append",
"(",
"WBinArray",
"(",
"0",
",",
"self",
".",
"__class__",
".",
"byte_size",
")",
")",
"self",
".",
"__size",
"=",
"size"
] | Grow this array to specified length. This array can't be shrinked
:param size: new length
:return: None | [
"Grow",
"this",
"array",
"to",
"specified",
"length",
".",
"This",
"array",
"can",
"t",
"be",
"shrinked"
] | 1029839d33eb663f8dec76c1c46754d53c1de4a9 | https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/types/bytearray.py#L83-L94 | train |
a1ezzz/wasp-general | wasp_general/types/bytearray.py | WFixedSizeByteArray.swipe | def swipe(self):
""" Mirror current array value in reverse. Bytes that had greater index will have lesser index, and
vice-versa. This method doesn't change this array. It creates a new one and return it as a result.
:return: WFixedSizeByteArray
"""
result = WFixedSizeByteArray(len(self))
for i in range(len(self)):
result[len(self) - i - 1] = self[i]
return result | python | def swipe(self):
""" Mirror current array value in reverse. Bytes that had greater index will have lesser index, and
vice-versa. This method doesn't change this array. It creates a new one and return it as a result.
:return: WFixedSizeByteArray
"""
result = WFixedSizeByteArray(len(self))
for i in range(len(self)):
result[len(self) - i - 1] = self[i]
return result | [
"def",
"swipe",
"(",
"self",
")",
":",
"result",
"=",
"WFixedSizeByteArray",
"(",
"len",
"(",
"self",
")",
")",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"self",
")",
")",
":",
"result",
"[",
"len",
"(",
"self",
")",
"-",
"i",
"-",
"1",
"]",
"=",
"self",
"[",
"i",
"]",
"return",
"result"
] | Mirror current array value in reverse. Bytes that had greater index will have lesser index, and
vice-versa. This method doesn't change this array. It creates a new one and return it as a result.
:return: WFixedSizeByteArray | [
"Mirror",
"current",
"array",
"value",
"in",
"reverse",
".",
"Bytes",
"that",
"had",
"greater",
"index",
"will",
"have",
"lesser",
"index",
"and",
"vice",
"-",
"versa",
".",
"This",
"method",
"doesn",
"t",
"change",
"this",
"array",
".",
"It",
"creates",
"a",
"new",
"one",
"and",
"return",
"it",
"as",
"a",
"result",
"."
] | 1029839d33eb663f8dec76c1c46754d53c1de4a9 | https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/types/bytearray.py#L140-L149 | train |
a1ezzz/wasp-general | wasp_general/mime.py | mime_type | def mime_type(filename):
""" Guess mime type for the given file name
Note: this implementation uses python_magic package which is not thread-safe, as a workaround global lock is
used for the ability to work in threaded environment
:param filename: file name to guess
:return: str
"""
# TODO: write lock-free mime_type function
try:
__mime_lock.acquire()
extension = filename.split(".")
extension = extension[len(extension) - 1]
if extension == "woff2":
return "application/font-woff2"
if extension == "css":
return "text/css"
m = magic.from_file(filename, mime=True)
m = m.decode() if isinstance(m, bytes) else m # compatibility fix, some versions return bytes some - str
if m == "text/plain":
guessed_type = mimetypes.guess_type(filename)[0] # for js-detection
if guessed_type:
return guessed_type
return m
finally:
__mime_lock.release() | python | def mime_type(filename):
""" Guess mime type for the given file name
Note: this implementation uses python_magic package which is not thread-safe, as a workaround global lock is
used for the ability to work in threaded environment
:param filename: file name to guess
:return: str
"""
# TODO: write lock-free mime_type function
try:
__mime_lock.acquire()
extension = filename.split(".")
extension = extension[len(extension) - 1]
if extension == "woff2":
return "application/font-woff2"
if extension == "css":
return "text/css"
m = magic.from_file(filename, mime=True)
m = m.decode() if isinstance(m, bytes) else m # compatibility fix, some versions return bytes some - str
if m == "text/plain":
guessed_type = mimetypes.guess_type(filename)[0] # for js-detection
if guessed_type:
return guessed_type
return m
finally:
__mime_lock.release() | [
"def",
"mime_type",
"(",
"filename",
")",
":",
"# TODO: write lock-free mime_type function",
"try",
":",
"__mime_lock",
".",
"acquire",
"(",
")",
"extension",
"=",
"filename",
".",
"split",
"(",
"\".\"",
")",
"extension",
"=",
"extension",
"[",
"len",
"(",
"extension",
")",
"-",
"1",
"]",
"if",
"extension",
"==",
"\"woff2\"",
":",
"return",
"\"application/font-woff2\"",
"if",
"extension",
"==",
"\"css\"",
":",
"return",
"\"text/css\"",
"m",
"=",
"magic",
".",
"from_file",
"(",
"filename",
",",
"mime",
"=",
"True",
")",
"m",
"=",
"m",
".",
"decode",
"(",
")",
"if",
"isinstance",
"(",
"m",
",",
"bytes",
")",
"else",
"m",
"# compatibility fix, some versions return bytes some - str",
"if",
"m",
"==",
"\"text/plain\"",
":",
"guessed_type",
"=",
"mimetypes",
".",
"guess_type",
"(",
"filename",
")",
"[",
"0",
"]",
"# for js-detection",
"if",
"guessed_type",
":",
"return",
"guessed_type",
"return",
"m",
"finally",
":",
"__mime_lock",
".",
"release",
"(",
")"
] | Guess mime type for the given file name
Note: this implementation uses python_magic package which is not thread-safe, as a workaround global lock is
used for the ability to work in threaded environment
:param filename: file name to guess
:return: str | [
"Guess",
"mime",
"type",
"for",
"the",
"given",
"file",
"name"
] | 1029839d33eb663f8dec76c1c46754d53c1de4a9 | https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/mime.py#L36-L65 | train |
Loudr/pale | pale/arguments/base.py | BaseArgument._validate_type | def _validate_type(self, item, name):
"""Validate the item against `allowed_types`."""
if item is None:
# don't validate None items, since they'll be caught by the portion
# of the validator responsible for handling `required`ness
return
if not isinstance(item, self.allowed_types):
item_class_name = item.__class__.__name__
raise ArgumentError(name,
"Expected one of %s, but got `%s`" % (
self.allowed_types, item_class_name)) | python | def _validate_type(self, item, name):
"""Validate the item against `allowed_types`."""
if item is None:
# don't validate None items, since they'll be caught by the portion
# of the validator responsible for handling `required`ness
return
if not isinstance(item, self.allowed_types):
item_class_name = item.__class__.__name__
raise ArgumentError(name,
"Expected one of %s, but got `%s`" % (
self.allowed_types, item_class_name)) | [
"def",
"_validate_type",
"(",
"self",
",",
"item",
",",
"name",
")",
":",
"if",
"item",
"is",
"None",
":",
"# don't validate None items, since they'll be caught by the portion",
"# of the validator responsible for handling `required`ness",
"return",
"if",
"not",
"isinstance",
"(",
"item",
",",
"self",
".",
"allowed_types",
")",
":",
"item_class_name",
"=",
"item",
".",
"__class__",
".",
"__name__",
"raise",
"ArgumentError",
"(",
"name",
",",
"\"Expected one of %s, but got `%s`\"",
"%",
"(",
"self",
".",
"allowed_types",
",",
"item_class_name",
")",
")"
] | Validate the item against `allowed_types`. | [
"Validate",
"the",
"item",
"against",
"allowed_types",
"."
] | dc002ee6032c856551143af222ff8f71ed9853fe | https://github.com/Loudr/pale/blob/dc002ee6032c856551143af222ff8f71ed9853fe/pale/arguments/base.py#L59-L70 | train |
Loudr/pale | pale/arguments/base.py | BaseArgument._validate_required | def _validate_required(self, item, name):
"""Validate that the item is present if it's required."""
if self.required is True and item is None:
raise ArgumentError(name, "This argument is required.") | python | def _validate_required(self, item, name):
"""Validate that the item is present if it's required."""
if self.required is True and item is None:
raise ArgumentError(name, "This argument is required.") | [
"def",
"_validate_required",
"(",
"self",
",",
"item",
",",
"name",
")",
":",
"if",
"self",
".",
"required",
"is",
"True",
"and",
"item",
"is",
"None",
":",
"raise",
"ArgumentError",
"(",
"name",
",",
"\"This argument is required.\"",
")"
] | Validate that the item is present if it's required. | [
"Validate",
"that",
"the",
"item",
"is",
"present",
"if",
"it",
"s",
"required",
"."
] | dc002ee6032c856551143af222ff8f71ed9853fe | https://github.com/Loudr/pale/blob/dc002ee6032c856551143af222ff8f71ed9853fe/pale/arguments/base.py#L73-L76 | train |
Loudr/pale | pale/arguments/base.py | BaseArgument.doc_dict | def doc_dict(self):
"""Returns the documentation dictionary for this argument."""
doc = {
'type': self.__class__.__name__,
'description': self.description,
'default': self.default,
'required': self.required
}
if hasattr(self, 'details'):
doc['detailed_description'] = self.details
return doc | python | def doc_dict(self):
"""Returns the documentation dictionary for this argument."""
doc = {
'type': self.__class__.__name__,
'description': self.description,
'default': self.default,
'required': self.required
}
if hasattr(self, 'details'):
doc['detailed_description'] = self.details
return doc | [
"def",
"doc_dict",
"(",
"self",
")",
":",
"doc",
"=",
"{",
"'type'",
":",
"self",
".",
"__class__",
".",
"__name__",
",",
"'description'",
":",
"self",
".",
"description",
",",
"'default'",
":",
"self",
".",
"default",
",",
"'required'",
":",
"self",
".",
"required",
"}",
"if",
"hasattr",
"(",
"self",
",",
"'details'",
")",
":",
"doc",
"[",
"'detailed_description'",
"]",
"=",
"self",
".",
"details",
"return",
"doc"
] | Returns the documentation dictionary for this argument. | [
"Returns",
"the",
"documentation",
"dictionary",
"for",
"this",
"argument",
"."
] | dc002ee6032c856551143af222ff8f71ed9853fe | https://github.com/Loudr/pale/blob/dc002ee6032c856551143af222ff8f71ed9853fe/pale/arguments/base.py#L79-L89 | train |
Loudr/pale | pale/arguments/base.py | ListArgument.validate_items | def validate_items(self, input_list):
"""Validates that items in the list are of the type specified.
Returns the input list if it's valid, or raises an ArgumentError if
it's not."""
output_list = []
for item in input_list:
valid = self.list_item_type.validate(item, self.item_name)
output_list.append(valid)
# this might lead to confusing error messages. tbh, we need to
# figure out a better way to do validation and error handling here,
# but i'm brute forcing this a bit so that we have something
# workable
return output_list | python | def validate_items(self, input_list):
"""Validates that items in the list are of the type specified.
Returns the input list if it's valid, or raises an ArgumentError if
it's not."""
output_list = []
for item in input_list:
valid = self.list_item_type.validate(item, self.item_name)
output_list.append(valid)
# this might lead to confusing error messages. tbh, we need to
# figure out a better way to do validation and error handling here,
# but i'm brute forcing this a bit so that we have something
# workable
return output_list | [
"def",
"validate_items",
"(",
"self",
",",
"input_list",
")",
":",
"output_list",
"=",
"[",
"]",
"for",
"item",
"in",
"input_list",
":",
"valid",
"=",
"self",
".",
"list_item_type",
".",
"validate",
"(",
"item",
",",
"self",
".",
"item_name",
")",
"output_list",
".",
"append",
"(",
"valid",
")",
"# this might lead to confusing error messages. tbh, we need to",
"# figure out a better way to do validation and error handling here,",
"# but i'm brute forcing this a bit so that we have something",
"# workable",
"return",
"output_list"
] | Validates that items in the list are of the type specified.
Returns the input list if it's valid, or raises an ArgumentError if
it's not. | [
"Validates",
"that",
"items",
"in",
"the",
"list",
"are",
"of",
"the",
"type",
"specified",
"."
] | dc002ee6032c856551143af222ff8f71ed9853fe | https://github.com/Loudr/pale/blob/dc002ee6032c856551143af222ff8f71ed9853fe/pale/arguments/base.py#L114-L128 | train |
F483/apigen | apigen/apigen.py | Definition.startserver | def startserver(self, hostname="localhost", port=8080,
daemon=False, handle_sigint=True):
"""Start json-rpc service."""
if daemon:
print("Sorry daemon server not supported just yet.")
# TODO start as daemon similar to bitcoind
else:
print("Starting %s json-rpc service at http://%s:%s" % (
self.__class__.__name__, hostname, port
))
self._http_server = HTTPServer(
server_address=(hostname, int(port)),
RequestHandlerClass=self.get_http_request_handler()
)
if handle_sigint:
def sigint_handler(signum, frame):
self._post_shutdown()
sys.exit(0)
signal.signal(signal.SIGINT, sigint_handler)
self._http_server.serve_forever() | python | def startserver(self, hostname="localhost", port=8080,
daemon=False, handle_sigint=True):
"""Start json-rpc service."""
if daemon:
print("Sorry daemon server not supported just yet.")
# TODO start as daemon similar to bitcoind
else:
print("Starting %s json-rpc service at http://%s:%s" % (
self.__class__.__name__, hostname, port
))
self._http_server = HTTPServer(
server_address=(hostname, int(port)),
RequestHandlerClass=self.get_http_request_handler()
)
if handle_sigint:
def sigint_handler(signum, frame):
self._post_shutdown()
sys.exit(0)
signal.signal(signal.SIGINT, sigint_handler)
self._http_server.serve_forever() | [
"def",
"startserver",
"(",
"self",
",",
"hostname",
"=",
"\"localhost\"",
",",
"port",
"=",
"8080",
",",
"daemon",
"=",
"False",
",",
"handle_sigint",
"=",
"True",
")",
":",
"if",
"daemon",
":",
"print",
"(",
"\"Sorry daemon server not supported just yet.\"",
")",
"# TODO start as daemon similar to bitcoind",
"else",
":",
"print",
"(",
"\"Starting %s json-rpc service at http://%s:%s\"",
"%",
"(",
"self",
".",
"__class__",
".",
"__name__",
",",
"hostname",
",",
"port",
")",
")",
"self",
".",
"_http_server",
"=",
"HTTPServer",
"(",
"server_address",
"=",
"(",
"hostname",
",",
"int",
"(",
"port",
")",
")",
",",
"RequestHandlerClass",
"=",
"self",
".",
"get_http_request_handler",
"(",
")",
")",
"if",
"handle_sigint",
":",
"def",
"sigint_handler",
"(",
"signum",
",",
"frame",
")",
":",
"self",
".",
"_post_shutdown",
"(",
")",
"sys",
".",
"exit",
"(",
"0",
")",
"signal",
".",
"signal",
"(",
"signal",
".",
"SIGINT",
",",
"sigint_handler",
")",
"self",
".",
"_http_server",
".",
"serve_forever",
"(",
")"
] | Start json-rpc service. | [
"Start",
"json",
"-",
"rpc",
"service",
"."
] | f05ce1509030764721cc3393410fa12b609e88f2 | https://github.com/F483/apigen/blob/f05ce1509030764721cc3393410fa12b609e88f2/apigen/apigen.py#L118-L138 | train |
dfujim/bdata | bdata/bdata.py | bdata._get_asym_hel | def _get_asym_hel(self,d):
"""
Find the asymmetry of each helicity.
"""
# get data 1+ 2+ 1- 2-
d0 = d[0]; d1 = d[2]; d2 = d[1]; d3 = d[3]
# pre-calcs
denom1 = d0+d1;
denom2 = d2+d3
# check for div by zero
denom1[denom1==0] = np.nan
denom2[denom2==0] = np.nan
# asymmetries in both helicities
asym_hel = [(d0-d1)/denom1,
(d2-d3)/denom2]
# errors
# https://www.wolframalpha.com/input/?i=%E2%88%9A(F*(derivative+of+((F-B)%2F(F%2BB))+with+respect+to+F)%5E2+%2B+B*(derivative+of+((F-B)%2F(F%2BB))+with+respect+to+B)%5E2)
asym_hel_err = [2*np.sqrt(d0*d1/np.power(denom1,3)),
2*np.sqrt(d2*d3/np.power(denom2,3))]
# remove nan
for i in range(2):
asym_hel[i][np.isnan(asym_hel[i])] = 0.
asym_hel_err[i][np.isnan(asym_hel_err[i])] = 0.
# exit
return [[asym_hel[1],asym_hel_err[1]], # something wrong with file?
[asym_hel[0],asym_hel_err[0]]] | python | def _get_asym_hel(self,d):
"""
Find the asymmetry of each helicity.
"""
# get data 1+ 2+ 1- 2-
d0 = d[0]; d1 = d[2]; d2 = d[1]; d3 = d[3]
# pre-calcs
denom1 = d0+d1;
denom2 = d2+d3
# check for div by zero
denom1[denom1==0] = np.nan
denom2[denom2==0] = np.nan
# asymmetries in both helicities
asym_hel = [(d0-d1)/denom1,
(d2-d3)/denom2]
# errors
# https://www.wolframalpha.com/input/?i=%E2%88%9A(F*(derivative+of+((F-B)%2F(F%2BB))+with+respect+to+F)%5E2+%2B+B*(derivative+of+((F-B)%2F(F%2BB))+with+respect+to+B)%5E2)
asym_hel_err = [2*np.sqrt(d0*d1/np.power(denom1,3)),
2*np.sqrt(d2*d3/np.power(denom2,3))]
# remove nan
for i in range(2):
asym_hel[i][np.isnan(asym_hel[i])] = 0.
asym_hel_err[i][np.isnan(asym_hel_err[i])] = 0.
# exit
return [[asym_hel[1],asym_hel_err[1]], # something wrong with file?
[asym_hel[0],asym_hel_err[0]]] | [
"def",
"_get_asym_hel",
"(",
"self",
",",
"d",
")",
":",
"# get data 1+ 2+ 1- 2-",
"d0",
"=",
"d",
"[",
"0",
"]",
"d1",
"=",
"d",
"[",
"2",
"]",
"d2",
"=",
"d",
"[",
"1",
"]",
"d3",
"=",
"d",
"[",
"3",
"]",
"# pre-calcs",
"denom1",
"=",
"d0",
"+",
"d1",
"denom2",
"=",
"d2",
"+",
"d3",
"# check for div by zero",
"denom1",
"[",
"denom1",
"==",
"0",
"]",
"=",
"np",
".",
"nan",
"denom2",
"[",
"denom2",
"==",
"0",
"]",
"=",
"np",
".",
"nan",
"# asymmetries in both helicities",
"asym_hel",
"=",
"[",
"(",
"d0",
"-",
"d1",
")",
"/",
"denom1",
",",
"(",
"d2",
"-",
"d3",
")",
"/",
"denom2",
"]",
"# errors ",
"# https://www.wolframalpha.com/input/?i=%E2%88%9A(F*(derivative+of+((F-B)%2F(F%2BB))+with+respect+to+F)%5E2+%2B+B*(derivative+of+((F-B)%2F(F%2BB))+with+respect+to+B)%5E2)",
"asym_hel_err",
"=",
"[",
"2",
"*",
"np",
".",
"sqrt",
"(",
"d0",
"*",
"d1",
"/",
"np",
".",
"power",
"(",
"denom1",
",",
"3",
")",
")",
",",
"2",
"*",
"np",
".",
"sqrt",
"(",
"d2",
"*",
"d3",
"/",
"np",
".",
"power",
"(",
"denom2",
",",
"3",
")",
")",
"]",
"# remove nan ",
"for",
"i",
"in",
"range",
"(",
"2",
")",
":",
"asym_hel",
"[",
"i",
"]",
"[",
"np",
".",
"isnan",
"(",
"asym_hel",
"[",
"i",
"]",
")",
"]",
"=",
"0.",
"asym_hel_err",
"[",
"i",
"]",
"[",
"np",
".",
"isnan",
"(",
"asym_hel_err",
"[",
"i",
"]",
")",
"]",
"=",
"0.",
"# exit",
"return",
"[",
"[",
"asym_hel",
"[",
"1",
"]",
",",
"asym_hel_err",
"[",
"1",
"]",
"]",
",",
"# something wrong with file?",
"[",
"asym_hel",
"[",
"0",
"]",
",",
"asym_hel_err",
"[",
"0",
"]",
"]",
"]"
] | Find the asymmetry of each helicity. | [
"Find",
"the",
"asymmetry",
"of",
"each",
"helicity",
"."
] | 86af7b091e5cc167d2b9a3146953da347cc38614 | https://github.com/dfujim/bdata/blob/86af7b091e5cc167d2b9a3146953da347cc38614/bdata/bdata.py#L545-L577 | train |
dfujim/bdata | bdata/bdata.py | bdata._get_asym_comb | def _get_asym_comb(self,d):
"""
Find the combined asymmetry for slr runs. Elegant 4-counter method.
"""
# get data
d0 = d[0]; d1 = d[2]; d2 = d[1]; d3 = d[3]
# pre-calcs
r_denom = d0*d3
r_denom[r_denom==0] = np.nan
r = np.sqrt((d1*d2/r_denom))
r[r==-1] = np.nan
# combined asymmetry
asym_comb = (r-1)/(r+1)
# check for div by zero
d0[d0==0] = np.nan
d1[d1==0] = np.nan
d2[d2==0] = np.nan
d3[d3==0] = np.nan
# error in combined asymmetry
asym_comb_err = r*np.sqrt(1/d1 + 1/d0 + 1/d3 + 1/d2)/np.square(r+1)
# replace nan with zero
asym_comb[np.isnan(asym_comb)] = 0.
asym_comb_err[np.isnan(asym_comb_err)] = 0.
return [asym_comb,asym_comb_err] | python | def _get_asym_comb(self,d):
"""
Find the combined asymmetry for slr runs. Elegant 4-counter method.
"""
# get data
d0 = d[0]; d1 = d[2]; d2 = d[1]; d3 = d[3]
# pre-calcs
r_denom = d0*d3
r_denom[r_denom==0] = np.nan
r = np.sqrt((d1*d2/r_denom))
r[r==-1] = np.nan
# combined asymmetry
asym_comb = (r-1)/(r+1)
# check for div by zero
d0[d0==0] = np.nan
d1[d1==0] = np.nan
d2[d2==0] = np.nan
d3[d3==0] = np.nan
# error in combined asymmetry
asym_comb_err = r*np.sqrt(1/d1 + 1/d0 + 1/d3 + 1/d2)/np.square(r+1)
# replace nan with zero
asym_comb[np.isnan(asym_comb)] = 0.
asym_comb_err[np.isnan(asym_comb_err)] = 0.
return [asym_comb,asym_comb_err] | [
"def",
"_get_asym_comb",
"(",
"self",
",",
"d",
")",
":",
"# get data",
"d0",
"=",
"d",
"[",
"0",
"]",
"d1",
"=",
"d",
"[",
"2",
"]",
"d2",
"=",
"d",
"[",
"1",
"]",
"d3",
"=",
"d",
"[",
"3",
"]",
"# pre-calcs",
"r_denom",
"=",
"d0",
"*",
"d3",
"r_denom",
"[",
"r_denom",
"==",
"0",
"]",
"=",
"np",
".",
"nan",
"r",
"=",
"np",
".",
"sqrt",
"(",
"(",
"d1",
"*",
"d2",
"/",
"r_denom",
")",
")",
"r",
"[",
"r",
"==",
"-",
"1",
"]",
"=",
"np",
".",
"nan",
"# combined asymmetry",
"asym_comb",
"=",
"(",
"r",
"-",
"1",
")",
"/",
"(",
"r",
"+",
"1",
")",
"# check for div by zero",
"d0",
"[",
"d0",
"==",
"0",
"]",
"=",
"np",
".",
"nan",
"d1",
"[",
"d1",
"==",
"0",
"]",
"=",
"np",
".",
"nan",
"d2",
"[",
"d2",
"==",
"0",
"]",
"=",
"np",
".",
"nan",
"d3",
"[",
"d3",
"==",
"0",
"]",
"=",
"np",
".",
"nan",
"# error in combined asymmetry",
"asym_comb_err",
"=",
"r",
"*",
"np",
".",
"sqrt",
"(",
"1",
"/",
"d1",
"+",
"1",
"/",
"d0",
"+",
"1",
"/",
"d3",
"+",
"1",
"/",
"d2",
")",
"/",
"np",
".",
"square",
"(",
"r",
"+",
"1",
")",
"# replace nan with zero ",
"asym_comb",
"[",
"np",
".",
"isnan",
"(",
"asym_comb",
")",
"]",
"=",
"0.",
"asym_comb_err",
"[",
"np",
".",
"isnan",
"(",
"asym_comb_err",
")",
"]",
"=",
"0.",
"return",
"[",
"asym_comb",
",",
"asym_comb_err",
"]"
] | Find the combined asymmetry for slr runs. Elegant 4-counter method. | [
"Find",
"the",
"combined",
"asymmetry",
"for",
"slr",
"runs",
".",
"Elegant",
"4",
"-",
"counter",
"method",
"."
] | 86af7b091e5cc167d2b9a3146953da347cc38614 | https://github.com/dfujim/bdata/blob/86af7b091e5cc167d2b9a3146953da347cc38614/bdata/bdata.py#L580-L610 | train |
dfujim/bdata | bdata/bdata.py | bdata._get_1f_sum_scans | def _get_1f_sum_scans(self,d,freq):
"""
Sum counts in each frequency bin over 1f scans.
"""
# combine scans: values with same frequency
unique_freq = np.unique(freq)
sum_scans = [[] for i in range(len(d))]
for f in unique_freq:
tag = freq==f
for i in range(len(d)):
sum_scans[i].append(np.sum(d[i][tag]))
return (np.array(unique_freq),np.array(sum_scans)) | python | def _get_1f_sum_scans(self,d,freq):
"""
Sum counts in each frequency bin over 1f scans.
"""
# combine scans: values with same frequency
unique_freq = np.unique(freq)
sum_scans = [[] for i in range(len(d))]
for f in unique_freq:
tag = freq==f
for i in range(len(d)):
sum_scans[i].append(np.sum(d[i][tag]))
return (np.array(unique_freq),np.array(sum_scans)) | [
"def",
"_get_1f_sum_scans",
"(",
"self",
",",
"d",
",",
"freq",
")",
":",
"# combine scans: values with same frequency ",
"unique_freq",
"=",
"np",
".",
"unique",
"(",
"freq",
")",
"sum_scans",
"=",
"[",
"[",
"]",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"d",
")",
")",
"]",
"for",
"f",
"in",
"unique_freq",
":",
"tag",
"=",
"freq",
"==",
"f",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"d",
")",
")",
":",
"sum_scans",
"[",
"i",
"]",
".",
"append",
"(",
"np",
".",
"sum",
"(",
"d",
"[",
"i",
"]",
"[",
"tag",
"]",
")",
")",
"return",
"(",
"np",
".",
"array",
"(",
"unique_freq",
")",
",",
"np",
".",
"array",
"(",
"sum_scans",
")",
")"
] | Sum counts in each frequency bin over 1f scans. | [
"Sum",
"counts",
"in",
"each",
"frequency",
"bin",
"over",
"1f",
"scans",
"."
] | 86af7b091e5cc167d2b9a3146953da347cc38614 | https://github.com/dfujim/bdata/blob/86af7b091e5cc167d2b9a3146953da347cc38614/bdata/bdata.py#L672-L687 | train |
dfujim/bdata | bdata/bdata.py | bdata.get_pulse_s | def get_pulse_s(self):
"""Get pulse duration in seconds, for pulsed measurements."""
try:
dwelltime = self.ppg.dwelltime.mean
beam_on = self.ppg.beam_on.mean
except AttributeError:
raise AttributeError("Missing logged ppg parameter: dwelltime "+\
"or beam_on")
return dwelltime*beam_on/1000. | python | def get_pulse_s(self):
"""Get pulse duration in seconds, for pulsed measurements."""
try:
dwelltime = self.ppg.dwelltime.mean
beam_on = self.ppg.beam_on.mean
except AttributeError:
raise AttributeError("Missing logged ppg parameter: dwelltime "+\
"or beam_on")
return dwelltime*beam_on/1000. | [
"def",
"get_pulse_s",
"(",
"self",
")",
":",
"try",
":",
"dwelltime",
"=",
"self",
".",
"ppg",
".",
"dwelltime",
".",
"mean",
"beam_on",
"=",
"self",
".",
"ppg",
".",
"beam_on",
".",
"mean",
"except",
"AttributeError",
":",
"raise",
"AttributeError",
"(",
"\"Missing logged ppg parameter: dwelltime \"",
"+",
"\"or beam_on\"",
")",
"return",
"dwelltime",
"*",
"beam_on",
"/",
"1000."
] | Get pulse duration in seconds, for pulsed measurements. | [
"Get",
"pulse",
"duration",
"in",
"seconds",
"for",
"pulsed",
"measurements",
"."
] | 86af7b091e5cc167d2b9a3146953da347cc38614 | https://github.com/dfujim/bdata/blob/86af7b091e5cc167d2b9a3146953da347cc38614/bdata/bdata.py#L1334-L1343 | train |
Loudr/pale | pale/__init__.py | extract_endpoints | def extract_endpoints(api_module):
"""Return the endpoints from an API implementation module.
The results returned by this are used to populate your HTTP layer's
route handler, as well as by the documentation generator.
"""
if not hasattr(api_module, 'endpoints'):
raise ValueError(("pale.extract_endpoints expected the passed in "
"api_module to have an `endpoints` attribute, but it didn't!"))
endpoints = api_module.endpoints
if isinstance(endpoints, types.ModuleType):
classes = [v for (k,v) in inspect.getmembers(endpoints,
inspect.isclass)]
elif isinstance(endpoints, (list, tuple)):
classes = endpoints
else:
raise ValueError("Endpoints is not a module or list type!")
instances = []
for cls in classes:
if cls not in (Endpoint, PatchEndpoint, PutResourceEndpoint) and \
Endpoint in inspect.getmro(cls):
source_code = inspect.getsource(cls)
if "@requires_permission" in source_code:
permission_match = re.search(r"@requires_permission\(\[?[\'\"]+(\w+)[\'\"]+", source_code)
if permission_match != None:
cls._requires_permission = permission_match.group(1)
instances.append(cls())
return instances | python | def extract_endpoints(api_module):
"""Return the endpoints from an API implementation module.
The results returned by this are used to populate your HTTP layer's
route handler, as well as by the documentation generator.
"""
if not hasattr(api_module, 'endpoints'):
raise ValueError(("pale.extract_endpoints expected the passed in "
"api_module to have an `endpoints` attribute, but it didn't!"))
endpoints = api_module.endpoints
if isinstance(endpoints, types.ModuleType):
classes = [v for (k,v) in inspect.getmembers(endpoints,
inspect.isclass)]
elif isinstance(endpoints, (list, tuple)):
classes = endpoints
else:
raise ValueError("Endpoints is not a module or list type!")
instances = []
for cls in classes:
if cls not in (Endpoint, PatchEndpoint, PutResourceEndpoint) and \
Endpoint in inspect.getmro(cls):
source_code = inspect.getsource(cls)
if "@requires_permission" in source_code:
permission_match = re.search(r"@requires_permission\(\[?[\'\"]+(\w+)[\'\"]+", source_code)
if permission_match != None:
cls._requires_permission = permission_match.group(1)
instances.append(cls())
return instances | [
"def",
"extract_endpoints",
"(",
"api_module",
")",
":",
"if",
"not",
"hasattr",
"(",
"api_module",
",",
"'endpoints'",
")",
":",
"raise",
"ValueError",
"(",
"(",
"\"pale.extract_endpoints expected the passed in \"",
"\"api_module to have an `endpoints` attribute, but it didn't!\"",
")",
")",
"endpoints",
"=",
"api_module",
".",
"endpoints",
"if",
"isinstance",
"(",
"endpoints",
",",
"types",
".",
"ModuleType",
")",
":",
"classes",
"=",
"[",
"v",
"for",
"(",
"k",
",",
"v",
")",
"in",
"inspect",
".",
"getmembers",
"(",
"endpoints",
",",
"inspect",
".",
"isclass",
")",
"]",
"elif",
"isinstance",
"(",
"endpoints",
",",
"(",
"list",
",",
"tuple",
")",
")",
":",
"classes",
"=",
"endpoints",
"else",
":",
"raise",
"ValueError",
"(",
"\"Endpoints is not a module or list type!\"",
")",
"instances",
"=",
"[",
"]",
"for",
"cls",
"in",
"classes",
":",
"if",
"cls",
"not",
"in",
"(",
"Endpoint",
",",
"PatchEndpoint",
",",
"PutResourceEndpoint",
")",
"and",
"Endpoint",
"in",
"inspect",
".",
"getmro",
"(",
"cls",
")",
":",
"source_code",
"=",
"inspect",
".",
"getsource",
"(",
"cls",
")",
"if",
"\"@requires_permission\"",
"in",
"source_code",
":",
"permission_match",
"=",
"re",
".",
"search",
"(",
"r\"@requires_permission\\(\\[?[\\'\\\"]+(\\w+)[\\'\\\"]+\"",
",",
"source_code",
")",
"if",
"permission_match",
"!=",
"None",
":",
"cls",
".",
"_requires_permission",
"=",
"permission_match",
".",
"group",
"(",
"1",
")",
"instances",
".",
"append",
"(",
"cls",
"(",
")",
")",
"return",
"instances"
] | Return the endpoints from an API implementation module.
The results returned by this are used to populate your HTTP layer's
route handler, as well as by the documentation generator. | [
"Return",
"the",
"endpoints",
"from",
"an",
"API",
"implementation",
"module",
"."
] | dc002ee6032c856551143af222ff8f71ed9853fe | https://github.com/Loudr/pale/blob/dc002ee6032c856551143af222ff8f71ed9853fe/pale/__init__.py#L44-L74 | train |
Loudr/pale | pale/__init__.py | extract_resources | def extract_resources(api_module):
"""Return the resources from an API implementation module.
The results returned here are used to generate documentation. They aren't
currently used for anything in production.
"""
endpoints = extract_endpoints(api_module)
resource_classes = [ e._returns.__class__ for e in endpoints ]
return list(set(resource_classes)) | python | def extract_resources(api_module):
"""Return the resources from an API implementation module.
The results returned here are used to generate documentation. They aren't
currently used for anything in production.
"""
endpoints = extract_endpoints(api_module)
resource_classes = [ e._returns.__class__ for e in endpoints ]
return list(set(resource_classes)) | [
"def",
"extract_resources",
"(",
"api_module",
")",
":",
"endpoints",
"=",
"extract_endpoints",
"(",
"api_module",
")",
"resource_classes",
"=",
"[",
"e",
".",
"_returns",
".",
"__class__",
"for",
"e",
"in",
"endpoints",
"]",
"return",
"list",
"(",
"set",
"(",
"resource_classes",
")",
")"
] | Return the resources from an API implementation module.
The results returned here are used to generate documentation. They aren't
currently used for anything in production. | [
"Return",
"the",
"resources",
"from",
"an",
"API",
"implementation",
"module",
"."
] | dc002ee6032c856551143af222ff8f71ed9853fe | https://github.com/Loudr/pale/blob/dc002ee6032c856551143af222ff8f71ed9853fe/pale/__init__.py#L76-L85 | train |
weijia/djangoautoconf | djangoautoconf/django_adv_zip_template_loader.py | Loader.load_template_source | def load_template_source(self, template_name, template_dirs=None):
"""Template loader that loads templates from zipped modules."""
#Get every app's folder
log.error("Calling zip loader")
for folder in app_template_dirs:
if ".zip/" in folder.replace("\\", "/"):
lib_file, relative_folder = get_zip_file_and_relative_path(folder)
log.error(lib_file, relative_folder)
try:
z = zipfile.ZipFile(lib_file)
log.error(relative_folder + template_name)
template_path_in_zip = os.path.join(relative_folder, template_name).replace("\\", "/")
source = z.read(template_path_in_zip)
except (IOError, KeyError) as e:
import traceback
log.error(traceback.format_exc())
try:
z.close()
except:
pass
continue
z.close()
# We found a template, so return the source.
template_path = "%s:%s" % (lib_file, template_path_in_zip)
return (source, template_path)
# If we reach here, the template couldn't be loaded
raise TemplateDoesNotExist(template_name) | python | def load_template_source(self, template_name, template_dirs=None):
"""Template loader that loads templates from zipped modules."""
#Get every app's folder
log.error("Calling zip loader")
for folder in app_template_dirs:
if ".zip/" in folder.replace("\\", "/"):
lib_file, relative_folder = get_zip_file_and_relative_path(folder)
log.error(lib_file, relative_folder)
try:
z = zipfile.ZipFile(lib_file)
log.error(relative_folder + template_name)
template_path_in_zip = os.path.join(relative_folder, template_name).replace("\\", "/")
source = z.read(template_path_in_zip)
except (IOError, KeyError) as e:
import traceback
log.error(traceback.format_exc())
try:
z.close()
except:
pass
continue
z.close()
# We found a template, so return the source.
template_path = "%s:%s" % (lib_file, template_path_in_zip)
return (source, template_path)
# If we reach here, the template couldn't be loaded
raise TemplateDoesNotExist(template_name) | [
"def",
"load_template_source",
"(",
"self",
",",
"template_name",
",",
"template_dirs",
"=",
"None",
")",
":",
"#Get every app's folder",
"log",
".",
"error",
"(",
"\"Calling zip loader\"",
")",
"for",
"folder",
"in",
"app_template_dirs",
":",
"if",
"\".zip/\"",
"in",
"folder",
".",
"replace",
"(",
"\"\\\\\"",
",",
"\"/\"",
")",
":",
"lib_file",
",",
"relative_folder",
"=",
"get_zip_file_and_relative_path",
"(",
"folder",
")",
"log",
".",
"error",
"(",
"lib_file",
",",
"relative_folder",
")",
"try",
":",
"z",
"=",
"zipfile",
".",
"ZipFile",
"(",
"lib_file",
")",
"log",
".",
"error",
"(",
"relative_folder",
"+",
"template_name",
")",
"template_path_in_zip",
"=",
"os",
".",
"path",
".",
"join",
"(",
"relative_folder",
",",
"template_name",
")",
".",
"replace",
"(",
"\"\\\\\"",
",",
"\"/\"",
")",
"source",
"=",
"z",
".",
"read",
"(",
"template_path_in_zip",
")",
"except",
"(",
"IOError",
",",
"KeyError",
")",
"as",
"e",
":",
"import",
"traceback",
"log",
".",
"error",
"(",
"traceback",
".",
"format_exc",
"(",
")",
")",
"try",
":",
"z",
".",
"close",
"(",
")",
"except",
":",
"pass",
"continue",
"z",
".",
"close",
"(",
")",
"# We found a template, so return the source.",
"template_path",
"=",
"\"%s:%s\"",
"%",
"(",
"lib_file",
",",
"template_path_in_zip",
")",
"return",
"(",
"source",
",",
"template_path",
")",
"# If we reach here, the template couldn't be loaded",
"raise",
"TemplateDoesNotExist",
"(",
"template_name",
")"
] | Template loader that loads templates from zipped modules. | [
"Template",
"loader",
"that",
"loads",
"templates",
"from",
"zipped",
"modules",
"."
] | b7dbda2287ed8cb9de6d02cb3abaaa1c36b1ced0 | https://github.com/weijia/djangoautoconf/blob/b7dbda2287ed8cb9de6d02cb3abaaa1c36b1ced0/djangoautoconf/django_adv_zip_template_loader.py#L44-L71 | train |
olitheolix/qtmacs | qtmacs/logging_handler.py | QtmacsLoggingHandler.fetch | def fetch(self, start=None, stop=None):
"""
Fetch log records and return them as a list.
|Args|
* ``start`` (**int**): non-negative index of the first log
record to return.
* ``stop`` (**int**): non-negative index of the last log
record to return.
|Returns|
* **list**: list of log records (see ``logger`` module for
definition of log record).
|Raises|
* **None**
"""
# Set defaults if no explicit indices were provided.
if not start:
start = 0
if not stop:
stop = len(self.log)
# Sanity check: indices must be valid.
if start < 0:
start = 0
if stop > len(self.log):
stop = len(self.log)
# Clear the fetch flag. It will be set again in the emit()
# method once new data arrives.
self.waitForFetch = False
# Return the specified range of log records.
return self.log[start:stop] | python | def fetch(self, start=None, stop=None):
"""
Fetch log records and return them as a list.
|Args|
* ``start`` (**int**): non-negative index of the first log
record to return.
* ``stop`` (**int**): non-negative index of the last log
record to return.
|Returns|
* **list**: list of log records (see ``logger`` module for
definition of log record).
|Raises|
* **None**
"""
# Set defaults if no explicit indices were provided.
if not start:
start = 0
if not stop:
stop = len(self.log)
# Sanity check: indices must be valid.
if start < 0:
start = 0
if stop > len(self.log):
stop = len(self.log)
# Clear the fetch flag. It will be set again in the emit()
# method once new data arrives.
self.waitForFetch = False
# Return the specified range of log records.
return self.log[start:stop] | [
"def",
"fetch",
"(",
"self",
",",
"start",
"=",
"None",
",",
"stop",
"=",
"None",
")",
":",
"# Set defaults if no explicit indices were provided.",
"if",
"not",
"start",
":",
"start",
"=",
"0",
"if",
"not",
"stop",
":",
"stop",
"=",
"len",
"(",
"self",
".",
"log",
")",
"# Sanity check: indices must be valid.",
"if",
"start",
"<",
"0",
":",
"start",
"=",
"0",
"if",
"stop",
">",
"len",
"(",
"self",
".",
"log",
")",
":",
"stop",
"=",
"len",
"(",
"self",
".",
"log",
")",
"# Clear the fetch flag. It will be set again in the emit()",
"# method once new data arrives.",
"self",
".",
"waitForFetch",
"=",
"False",
"# Return the specified range of log records.",
"return",
"self",
".",
"log",
"[",
"start",
":",
"stop",
"]"
] | Fetch log records and return them as a list.
|Args|
* ``start`` (**int**): non-negative index of the first log
record to return.
* ``stop`` (**int**): non-negative index of the last log
record to return.
|Returns|
* **list**: list of log records (see ``logger`` module for
definition of log record).
|Raises|
* **None** | [
"Fetch",
"log",
"records",
"and",
"return",
"them",
"as",
"a",
"list",
"."
] | 36253b082b82590f183fe154b053eb3a1e741be2 | https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/logging_handler.py#L127-L165 | train |
Loudr/pale | pale/adapters/flask.py | bind_blueprint | def bind_blueprint(pale_api_module, flask_blueprint):
"""Binds an implemented pale API module to a Flask Blueprint."""
if not isinstance(flask_blueprint, Blueprint):
raise TypeError(("pale.flask_adapter.bind_blueprint expected the "
"passed in flask_blueprint to be an instance of "
"Blueprint, but it was an instance of %s instead.")
% (type(flask_blueprint),))
if not pale.is_pale_module(pale_api_module):
raise TypeError(("pale.flask_adapter.bind_blueprint expected the "
"passed in pale_api_module to be a module, and to "
"have a _module_type defined to equal "
"pale.ImplementationModule, but it was an instance of "
"%s instead.")
% (type(pale_api_module),))
endpoints = pale.extract_endpoints(pale_api_module)
for endpoint in endpoints:
endpoint._set_response_class(RESPONSE_CLASS)
method = [endpoint._http_method]
name = endpoint._route_name
handler = endpoint._execute
flask_blueprint.add_url_rule(
endpoint._uri,
name,
view_func=ContextualizedHandler(handler),
methods=method) | python | def bind_blueprint(pale_api_module, flask_blueprint):
"""Binds an implemented pale API module to a Flask Blueprint."""
if not isinstance(flask_blueprint, Blueprint):
raise TypeError(("pale.flask_adapter.bind_blueprint expected the "
"passed in flask_blueprint to be an instance of "
"Blueprint, but it was an instance of %s instead.")
% (type(flask_blueprint),))
if not pale.is_pale_module(pale_api_module):
raise TypeError(("pale.flask_adapter.bind_blueprint expected the "
"passed in pale_api_module to be a module, and to "
"have a _module_type defined to equal "
"pale.ImplementationModule, but it was an instance of "
"%s instead.")
% (type(pale_api_module),))
endpoints = pale.extract_endpoints(pale_api_module)
for endpoint in endpoints:
endpoint._set_response_class(RESPONSE_CLASS)
method = [endpoint._http_method]
name = endpoint._route_name
handler = endpoint._execute
flask_blueprint.add_url_rule(
endpoint._uri,
name,
view_func=ContextualizedHandler(handler),
methods=method) | [
"def",
"bind_blueprint",
"(",
"pale_api_module",
",",
"flask_blueprint",
")",
":",
"if",
"not",
"isinstance",
"(",
"flask_blueprint",
",",
"Blueprint",
")",
":",
"raise",
"TypeError",
"(",
"(",
"\"pale.flask_adapter.bind_blueprint expected the \"",
"\"passed in flask_blueprint to be an instance of \"",
"\"Blueprint, but it was an instance of %s instead.\"",
")",
"%",
"(",
"type",
"(",
"flask_blueprint",
")",
",",
")",
")",
"if",
"not",
"pale",
".",
"is_pale_module",
"(",
"pale_api_module",
")",
":",
"raise",
"TypeError",
"(",
"(",
"\"pale.flask_adapter.bind_blueprint expected the \"",
"\"passed in pale_api_module to be a module, and to \"",
"\"have a _module_type defined to equal \"",
"\"pale.ImplementationModule, but it was an instance of \"",
"\"%s instead.\"",
")",
"%",
"(",
"type",
"(",
"pale_api_module",
")",
",",
")",
")",
"endpoints",
"=",
"pale",
".",
"extract_endpoints",
"(",
"pale_api_module",
")",
"for",
"endpoint",
"in",
"endpoints",
":",
"endpoint",
".",
"_set_response_class",
"(",
"RESPONSE_CLASS",
")",
"method",
"=",
"[",
"endpoint",
".",
"_http_method",
"]",
"name",
"=",
"endpoint",
".",
"_route_name",
"handler",
"=",
"endpoint",
".",
"_execute",
"flask_blueprint",
".",
"add_url_rule",
"(",
"endpoint",
".",
"_uri",
",",
"name",
",",
"view_func",
"=",
"ContextualizedHandler",
"(",
"handler",
")",
",",
"methods",
"=",
"method",
")"
] | Binds an implemented pale API module to a Flask Blueprint. | [
"Binds",
"an",
"implemented",
"pale",
"API",
"module",
"to",
"a",
"Flask",
"Blueprint",
"."
] | dc002ee6032c856551143af222ff8f71ed9853fe | https://github.com/Loudr/pale/blob/dc002ee6032c856551143af222ff8f71ed9853fe/pale/adapters/flask.py#L29-L58 | train |
a1ezzz/wasp-general | wasp_general/network/web/cookies.py | WHTTPCookie.cookie_name_check | def cookie_name_check(cookie_name):
""" Check cookie name for validity. Return True if name is valid
:param cookie_name: name to check
:return: bool
"""
cookie_match = WHTTPCookie.cookie_name_non_compliance_re.match(cookie_name.encode('us-ascii'))
return len(cookie_name) > 0 and cookie_match is None | python | def cookie_name_check(cookie_name):
""" Check cookie name for validity. Return True if name is valid
:param cookie_name: name to check
:return: bool
"""
cookie_match = WHTTPCookie.cookie_name_non_compliance_re.match(cookie_name.encode('us-ascii'))
return len(cookie_name) > 0 and cookie_match is None | [
"def",
"cookie_name_check",
"(",
"cookie_name",
")",
":",
"cookie_match",
"=",
"WHTTPCookie",
".",
"cookie_name_non_compliance_re",
".",
"match",
"(",
"cookie_name",
".",
"encode",
"(",
"'us-ascii'",
")",
")",
"return",
"len",
"(",
"cookie_name",
")",
">",
"0",
"and",
"cookie_match",
"is",
"None"
] | Check cookie name for validity. Return True if name is valid
:param cookie_name: name to check
:return: bool | [
"Check",
"cookie",
"name",
"for",
"validity",
".",
"Return",
"True",
"if",
"name",
"is",
"valid"
] | 1029839d33eb663f8dec76c1c46754d53c1de4a9 | https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/network/web/cookies.py#L66-L73 | train |
a1ezzz/wasp-general | wasp_general/network/web/cookies.py | WHTTPCookie.cookie_attr_value_check | def cookie_attr_value_check(attr_name, attr_value):
""" Check cookie attribute value for validity. Return True if value is valid
:param attr_name: attribute name to check
:param attr_value: attribute value to check
:return: bool
"""
attr_value.encode('us-ascii')
return WHTTPCookie.cookie_attr_value_compliance[attr_name].match(attr_value) is not None | python | def cookie_attr_value_check(attr_name, attr_value):
""" Check cookie attribute value for validity. Return True if value is valid
:param attr_name: attribute name to check
:param attr_value: attribute value to check
:return: bool
"""
attr_value.encode('us-ascii')
return WHTTPCookie.cookie_attr_value_compliance[attr_name].match(attr_value) is not None | [
"def",
"cookie_attr_value_check",
"(",
"attr_name",
",",
"attr_value",
")",
":",
"attr_value",
".",
"encode",
"(",
"'us-ascii'",
")",
"return",
"WHTTPCookie",
".",
"cookie_attr_value_compliance",
"[",
"attr_name",
"]",
".",
"match",
"(",
"attr_value",
")",
"is",
"not",
"None"
] | Check cookie attribute value for validity. Return True if value is valid
:param attr_name: attribute name to check
:param attr_value: attribute value to check
:return: bool | [
"Check",
"cookie",
"attribute",
"value",
"for",
"validity",
".",
"Return",
"True",
"if",
"value",
"is",
"valid"
] | 1029839d33eb663f8dec76c1c46754d53c1de4a9 | https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/network/web/cookies.py#L85-L93 | train |
a1ezzz/wasp-general | wasp_general/network/web/cookies.py | WHTTPCookie.__attr_name | def __attr_name(self, name):
""" Return suitable and valid attribute name. This method replaces dash char to underscore. If name
is invalid ValueError exception is raised
:param name: cookie attribute name
:return: str
"""
if name not in self.cookie_attr_value_compliance.keys():
suggested_name = name.replace('_', '-').lower()
if suggested_name not in self.cookie_attr_value_compliance.keys():
raise ValueError('Invalid attribute name is specified')
name = suggested_name
return name | python | def __attr_name(self, name):
""" Return suitable and valid attribute name. This method replaces dash char to underscore. If name
is invalid ValueError exception is raised
:param name: cookie attribute name
:return: str
"""
if name not in self.cookie_attr_value_compliance.keys():
suggested_name = name.replace('_', '-').lower()
if suggested_name not in self.cookie_attr_value_compliance.keys():
raise ValueError('Invalid attribute name is specified')
name = suggested_name
return name | [
"def",
"__attr_name",
"(",
"self",
",",
"name",
")",
":",
"if",
"name",
"not",
"in",
"self",
".",
"cookie_attr_value_compliance",
".",
"keys",
"(",
")",
":",
"suggested_name",
"=",
"name",
".",
"replace",
"(",
"'_'",
",",
"'-'",
")",
".",
"lower",
"(",
")",
"if",
"suggested_name",
"not",
"in",
"self",
".",
"cookie_attr_value_compliance",
".",
"keys",
"(",
")",
":",
"raise",
"ValueError",
"(",
"'Invalid attribute name is specified'",
")",
"name",
"=",
"suggested_name",
"return",
"name"
] | Return suitable and valid attribute name. This method replaces dash char to underscore. If name
is invalid ValueError exception is raised
:param name: cookie attribute name
:return: str | [
"Return",
"suitable",
"and",
"valid",
"attribute",
"name",
".",
"This",
"method",
"replaces",
"dash",
"char",
"to",
"underscore",
".",
"If",
"name",
"is",
"invalid",
"ValueError",
"exception",
"is",
"raised"
] | 1029839d33eb663f8dec76c1c46754d53c1de4a9 | https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/network/web/cookies.py#L145-L157 | train |
a1ezzz/wasp-general | wasp_general/network/web/cookies.py | WHTTPCookieJar.remove_cookie | def remove_cookie(self, cookie_name):
""" Remove cookie by its name
:param cookie_name: cookie name
:return:
"""
if self.__ro_flag:
raise RuntimeError('Read-only cookie-jar changing attempt')
if cookie_name in self.__cookies.keys():
self.__cookies.pop(cookie_name) | python | def remove_cookie(self, cookie_name):
""" Remove cookie by its name
:param cookie_name: cookie name
:return:
"""
if self.__ro_flag:
raise RuntimeError('Read-only cookie-jar changing attempt')
if cookie_name in self.__cookies.keys():
self.__cookies.pop(cookie_name) | [
"def",
"remove_cookie",
"(",
"self",
",",
"cookie_name",
")",
":",
"if",
"self",
".",
"__ro_flag",
":",
"raise",
"RuntimeError",
"(",
"'Read-only cookie-jar changing attempt'",
")",
"if",
"cookie_name",
"in",
"self",
".",
"__cookies",
".",
"keys",
"(",
")",
":",
"self",
".",
"__cookies",
".",
"pop",
"(",
"cookie_name",
")"
] | Remove cookie by its name
:param cookie_name: cookie name
:return: | [
"Remove",
"cookie",
"by",
"its",
"name"
] | 1029839d33eb663f8dec76c1c46754d53c1de4a9 | https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/network/web/cookies.py#L254-L263 | train |
a1ezzz/wasp-general | wasp_general/network/web/cookies.py | WHTTPCookieJar.ro | def ro(self):
""" Return read-only copy
:return: WHTTPCookieJar
"""
ro_jar = WHTTPCookieJar()
for cookie in self.__cookies.values():
ro_jar.add_cookie(cookie.ro())
ro_jar.__ro_flag = True
return ro_jar | python | def ro(self):
""" Return read-only copy
:return: WHTTPCookieJar
"""
ro_jar = WHTTPCookieJar()
for cookie in self.__cookies.values():
ro_jar.add_cookie(cookie.ro())
ro_jar.__ro_flag = True
return ro_jar | [
"def",
"ro",
"(",
"self",
")",
":",
"ro_jar",
"=",
"WHTTPCookieJar",
"(",
")",
"for",
"cookie",
"in",
"self",
".",
"__cookies",
".",
"values",
"(",
")",
":",
"ro_jar",
".",
"add_cookie",
"(",
"cookie",
".",
"ro",
"(",
")",
")",
"ro_jar",
".",
"__ro_flag",
"=",
"True",
"return",
"ro_jar"
] | Return read-only copy
:return: WHTTPCookieJar | [
"Return",
"read",
"-",
"only",
"copy"
] | 1029839d33eb663f8dec76c1c46754d53c1de4a9 | https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/network/web/cookies.py#L282-L291 | train |
a1ezzz/wasp-general | wasp_general/network/web/cookies.py | WHTTPCookieJar.import_simple_cookie | def import_simple_cookie(cls, simple_cookie):
""" Create cookie jar from SimpleCookie object
:param simple_cookie: cookies to import
:return: WHTTPCookieJar
"""
cookie_jar = WHTTPCookieJar()
for cookie_name in simple_cookie.keys():
cookie_attrs = {}
for attr_name in WHTTPCookie.cookie_attr_value_compliance.keys():
attr_value = simple_cookie[cookie_name][attr_name]
if attr_value != '':
cookie_attrs[attr_name] = attr_value
cookie_jar.add_cookie(WHTTPCookie(
cookie_name, simple_cookie[cookie_name].value, **cookie_attrs
))
return cookie_jar | python | def import_simple_cookie(cls, simple_cookie):
""" Create cookie jar from SimpleCookie object
:param simple_cookie: cookies to import
:return: WHTTPCookieJar
"""
cookie_jar = WHTTPCookieJar()
for cookie_name in simple_cookie.keys():
cookie_attrs = {}
for attr_name in WHTTPCookie.cookie_attr_value_compliance.keys():
attr_value = simple_cookie[cookie_name][attr_name]
if attr_value != '':
cookie_attrs[attr_name] = attr_value
cookie_jar.add_cookie(WHTTPCookie(
cookie_name, simple_cookie[cookie_name].value, **cookie_attrs
))
return cookie_jar | [
"def",
"import_simple_cookie",
"(",
"cls",
",",
"simple_cookie",
")",
":",
"cookie_jar",
"=",
"WHTTPCookieJar",
"(",
")",
"for",
"cookie_name",
"in",
"simple_cookie",
".",
"keys",
"(",
")",
":",
"cookie_attrs",
"=",
"{",
"}",
"for",
"attr_name",
"in",
"WHTTPCookie",
".",
"cookie_attr_value_compliance",
".",
"keys",
"(",
")",
":",
"attr_value",
"=",
"simple_cookie",
"[",
"cookie_name",
"]",
"[",
"attr_name",
"]",
"if",
"attr_value",
"!=",
"''",
":",
"cookie_attrs",
"[",
"attr_name",
"]",
"=",
"attr_value",
"cookie_jar",
".",
"add_cookie",
"(",
"WHTTPCookie",
"(",
"cookie_name",
",",
"simple_cookie",
"[",
"cookie_name",
"]",
".",
"value",
",",
"*",
"*",
"cookie_attrs",
")",
")",
"return",
"cookie_jar"
] | Create cookie jar from SimpleCookie object
:param simple_cookie: cookies to import
:return: WHTTPCookieJar | [
"Create",
"cookie",
"jar",
"from",
"SimpleCookie",
"object"
] | 1029839d33eb663f8dec76c1c46754d53c1de4a9 | https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/network/web/cookies.py#L295-L312 | train |
vecnet/vecnet.openmalaria | vecnet/openmalaria/helpers.py | is_prime | def is_prime(n):
"""
Check if n is a prime number
"""
if n % 2 == 0 and n > 2:
return False
return all(n % i for i in range(3, int(math.sqrt(n)) + 1, 2)) | python | def is_prime(n):
"""
Check if n is a prime number
"""
if n % 2 == 0 and n > 2:
return False
return all(n % i for i in range(3, int(math.sqrt(n)) + 1, 2)) | [
"def",
"is_prime",
"(",
"n",
")",
":",
"if",
"n",
"%",
"2",
"==",
"0",
"and",
"n",
">",
"2",
":",
"return",
"False",
"return",
"all",
"(",
"n",
"%",
"i",
"for",
"i",
"in",
"range",
"(",
"3",
",",
"int",
"(",
"math",
".",
"sqrt",
"(",
"n",
")",
")",
"+",
"1",
",",
"2",
")",
")"
] | Check if n is a prime number | [
"Check",
"if",
"n",
"is",
"a",
"prime",
"number"
] | 795bc9d1b81a6c664f14879edda7a7c41188e95a | https://github.com/vecnet/vecnet.openmalaria/blob/795bc9d1b81a6c664f14879edda7a7c41188e95a/vecnet/openmalaria/helpers.py#L15-L21 | train |
olitheolix/qtmacs | qtmacs/applets/richeditor.py | RichEditor.loadFile | def loadFile(self, fileName):
"""
Display the file associated with the appletID.
"""
# Assign QFile object with the current name.
self.file = QtCore.QFile(fileName)
if self.file.exists():
self.qteText.append(open(fileName).read())
else:
msg = "File <b>{}</b> does not exist".format(self.qteAppletID())
self.qteLogger.info(msg) | python | def loadFile(self, fileName):
"""
Display the file associated with the appletID.
"""
# Assign QFile object with the current name.
self.file = QtCore.QFile(fileName)
if self.file.exists():
self.qteText.append(open(fileName).read())
else:
msg = "File <b>{}</b> does not exist".format(self.qteAppletID())
self.qteLogger.info(msg) | [
"def",
"loadFile",
"(",
"self",
",",
"fileName",
")",
":",
"# Assign QFile object with the current name.",
"self",
".",
"file",
"=",
"QtCore",
".",
"QFile",
"(",
"fileName",
")",
"if",
"self",
".",
"file",
".",
"exists",
"(",
")",
":",
"self",
".",
"qteText",
".",
"append",
"(",
"open",
"(",
"fileName",
")",
".",
"read",
"(",
")",
")",
"else",
":",
"msg",
"=",
"\"File <b>{}</b> does not exist\"",
".",
"format",
"(",
"self",
".",
"qteAppletID",
"(",
")",
")",
"self",
".",
"qteLogger",
".",
"info",
"(",
"msg",
")"
] | Display the file associated with the appletID. | [
"Display",
"the",
"file",
"associated",
"with",
"the",
"appletID",
"."
] | 36253b082b82590f183fe154b053eb3a1e741be2 | https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/applets/richeditor.py#L67-L78 | train |
jreese/ent | ent/ent.py | Ent._encode | def _encode(self):
"""Generate a recursive JSON representation of the ent."""
obj = {k: v for k, v in self.__dict__.items()
if not k.startswith('_') and type(v) in SAFE_TYPES}
obj.update({k: v._encode() for k, v in self.__dict__.items()
if isinstance(v, Ent)})
return obj | python | def _encode(self):
"""Generate a recursive JSON representation of the ent."""
obj = {k: v for k, v in self.__dict__.items()
if not k.startswith('_') and type(v) in SAFE_TYPES}
obj.update({k: v._encode() for k, v in self.__dict__.items()
if isinstance(v, Ent)})
return obj | [
"def",
"_encode",
"(",
"self",
")",
":",
"obj",
"=",
"{",
"k",
":",
"v",
"for",
"k",
",",
"v",
"in",
"self",
".",
"__dict__",
".",
"items",
"(",
")",
"if",
"not",
"k",
".",
"startswith",
"(",
"'_'",
")",
"and",
"type",
"(",
"v",
")",
"in",
"SAFE_TYPES",
"}",
"obj",
".",
"update",
"(",
"{",
"k",
":",
"v",
".",
"_encode",
"(",
")",
"for",
"k",
",",
"v",
"in",
"self",
".",
"__dict__",
".",
"items",
"(",
")",
"if",
"isinstance",
"(",
"v",
",",
"Ent",
")",
"}",
")",
"return",
"obj"
] | Generate a recursive JSON representation of the ent. | [
"Generate",
"a",
"recursive",
"JSON",
"representation",
"of",
"the",
"ent",
"."
] | 65f7c6498536c551ee1fdb43c3c429f24aa0f755 | https://github.com/jreese/ent/blob/65f7c6498536c551ee1fdb43c3c429f24aa0f755/ent/ent.py#L81-L87 | train |
jreese/ent | ent/ent.py | Ent.merge | def merge(cls, *args, **kwargs):
"""Create a new Ent from one or more existing Ents. Keys in the
later Ent objects will overwrite the keys of the previous Ents.
Later keys of different type than in earlier Ents will be bravely
ignored.
The following keyword arguments are recognized:
newkeys: boolean value to determine whether keys from later Ents
should be included if they do not exist in earlier Ents.
ignore: list of strings of key names that should not be overridden by
later Ent keys.
"""
newkeys = bool(kwargs.get('newkeys', False))
ignore = kwargs.get('ignore', list())
if len(args) < 1:
raise ValueError('no ents given to Ent.merge()')
elif not all(isinstance(s, Ent) for s in args):
raise ValueError('all positional arguments to Ent.merge() must '
'be instances of Ent')
ent = args[0]
data = cls.load(ent)
for ent in args[1:]:
for key, value in ent.__dict__.items():
if key in ignore:
continue
if key in data.__dict__:
v1 = data.__dict__[key]
if type(value) == type(v1):
if isinstance(v1, Ent):
data.__dict__[key] = cls.merge(v1, value, **kwargs)
else:
data.__dict__[key] = cls.load(value)
elif newkeys:
data.__dict__[key] = value
return data | python | def merge(cls, *args, **kwargs):
"""Create a new Ent from one or more existing Ents. Keys in the
later Ent objects will overwrite the keys of the previous Ents.
Later keys of different type than in earlier Ents will be bravely
ignored.
The following keyword arguments are recognized:
newkeys: boolean value to determine whether keys from later Ents
should be included if they do not exist in earlier Ents.
ignore: list of strings of key names that should not be overridden by
later Ent keys.
"""
newkeys = bool(kwargs.get('newkeys', False))
ignore = kwargs.get('ignore', list())
if len(args) < 1:
raise ValueError('no ents given to Ent.merge()')
elif not all(isinstance(s, Ent) for s in args):
raise ValueError('all positional arguments to Ent.merge() must '
'be instances of Ent')
ent = args[0]
data = cls.load(ent)
for ent in args[1:]:
for key, value in ent.__dict__.items():
if key in ignore:
continue
if key in data.__dict__:
v1 = data.__dict__[key]
if type(value) == type(v1):
if isinstance(v1, Ent):
data.__dict__[key] = cls.merge(v1, value, **kwargs)
else:
data.__dict__[key] = cls.load(value)
elif newkeys:
data.__dict__[key] = value
return data | [
"def",
"merge",
"(",
"cls",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"newkeys",
"=",
"bool",
"(",
"kwargs",
".",
"get",
"(",
"'newkeys'",
",",
"False",
")",
")",
"ignore",
"=",
"kwargs",
".",
"get",
"(",
"'ignore'",
",",
"list",
"(",
")",
")",
"if",
"len",
"(",
"args",
")",
"<",
"1",
":",
"raise",
"ValueError",
"(",
"'no ents given to Ent.merge()'",
")",
"elif",
"not",
"all",
"(",
"isinstance",
"(",
"s",
",",
"Ent",
")",
"for",
"s",
"in",
"args",
")",
":",
"raise",
"ValueError",
"(",
"'all positional arguments to Ent.merge() must '",
"'be instances of Ent'",
")",
"ent",
"=",
"args",
"[",
"0",
"]",
"data",
"=",
"cls",
".",
"load",
"(",
"ent",
")",
"for",
"ent",
"in",
"args",
"[",
"1",
":",
"]",
":",
"for",
"key",
",",
"value",
"in",
"ent",
".",
"__dict__",
".",
"items",
"(",
")",
":",
"if",
"key",
"in",
"ignore",
":",
"continue",
"if",
"key",
"in",
"data",
".",
"__dict__",
":",
"v1",
"=",
"data",
".",
"__dict__",
"[",
"key",
"]",
"if",
"type",
"(",
"value",
")",
"==",
"type",
"(",
"v1",
")",
":",
"if",
"isinstance",
"(",
"v1",
",",
"Ent",
")",
":",
"data",
".",
"__dict__",
"[",
"key",
"]",
"=",
"cls",
".",
"merge",
"(",
"v1",
",",
"value",
",",
"*",
"*",
"kwargs",
")",
"else",
":",
"data",
".",
"__dict__",
"[",
"key",
"]",
"=",
"cls",
".",
"load",
"(",
"value",
")",
"elif",
"newkeys",
":",
"data",
".",
"__dict__",
"[",
"key",
"]",
"=",
"value",
"return",
"data"
] | Create a new Ent from one or more existing Ents. Keys in the
later Ent objects will overwrite the keys of the previous Ents.
Later keys of different type than in earlier Ents will be bravely
ignored.
The following keyword arguments are recognized:
newkeys: boolean value to determine whether keys from later Ents
should be included if they do not exist in earlier Ents.
ignore: list of strings of key names that should not be overridden by
later Ent keys. | [
"Create",
"a",
"new",
"Ent",
"from",
"one",
"or",
"more",
"existing",
"Ents",
".",
"Keys",
"in",
"the",
"later",
"Ent",
"objects",
"will",
"overwrite",
"the",
"keys",
"of",
"the",
"previous",
"Ents",
".",
"Later",
"keys",
"of",
"different",
"type",
"than",
"in",
"earlier",
"Ents",
"will",
"be",
"bravely",
"ignored",
"."
] | 65f7c6498536c551ee1fdb43c3c429f24aa0f755 | https://github.com/jreese/ent/blob/65f7c6498536c551ee1fdb43c3c429f24aa0f755/ent/ent.py#L133-L176 | train |
jreese/ent | ent/ent.py | Ent.diff | def diff(cls, *args, **kwargs):
"""Create a new Ent representing the differences in two or more
existing Ents. Keys in the later Ents with values that differ
from the earlier Ents will be present in the final Ent with the
latest value seen for that key. Later keys of different type than in
earlier Ents will be bravely ignored.
The following keywoard arguments are recognized:
newkeys: boolean value to determine whether keys from later Ents
should be included if they do not exist in earlier Ents.
ignore: list of strings of key names that will not be included.
"""
newkeys = bool(kwargs.get('newkeys', False))
ignore = kwargs.get('ignore', list())
if len(args) < 2:
raise ValueError('less than two ents given to Ent.diff()')
elif not all(isinstance(s, Ent) for s in args):
raise ValueError('all positional arguments to Ent.diff() must '
'be instances of Ent')
s1 = args[0]
differences = Ent()
for s2 in args[1:]:
for key, value in s2.__dict__.items():
if key in ignore:
continue
if key in s1.__dict__:
v1 = s1.__dict__[key]
if type(value) == type(v1):
if isinstance(v1, Ent):
delta = cls.diff(v1, value, **kwargs)
if len(delta.__dict__):
differences.__dict__[key] = delta
elif v1 != value:
differences.__dict__[key] = cls.load(value)
elif newkeys:
differences.__dict__[key] = cls.load(value)
s1 = s2
return differences | python | def diff(cls, *args, **kwargs):
"""Create a new Ent representing the differences in two or more
existing Ents. Keys in the later Ents with values that differ
from the earlier Ents will be present in the final Ent with the
latest value seen for that key. Later keys of different type than in
earlier Ents will be bravely ignored.
The following keywoard arguments are recognized:
newkeys: boolean value to determine whether keys from later Ents
should be included if they do not exist in earlier Ents.
ignore: list of strings of key names that will not be included.
"""
newkeys = bool(kwargs.get('newkeys', False))
ignore = kwargs.get('ignore', list())
if len(args) < 2:
raise ValueError('less than two ents given to Ent.diff()')
elif not all(isinstance(s, Ent) for s in args):
raise ValueError('all positional arguments to Ent.diff() must '
'be instances of Ent')
s1 = args[0]
differences = Ent()
for s2 in args[1:]:
for key, value in s2.__dict__.items():
if key in ignore:
continue
if key in s1.__dict__:
v1 = s1.__dict__[key]
if type(value) == type(v1):
if isinstance(v1, Ent):
delta = cls.diff(v1, value, **kwargs)
if len(delta.__dict__):
differences.__dict__[key] = delta
elif v1 != value:
differences.__dict__[key] = cls.load(value)
elif newkeys:
differences.__dict__[key] = cls.load(value)
s1 = s2
return differences | [
"def",
"diff",
"(",
"cls",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"newkeys",
"=",
"bool",
"(",
"kwargs",
".",
"get",
"(",
"'newkeys'",
",",
"False",
")",
")",
"ignore",
"=",
"kwargs",
".",
"get",
"(",
"'ignore'",
",",
"list",
"(",
")",
")",
"if",
"len",
"(",
"args",
")",
"<",
"2",
":",
"raise",
"ValueError",
"(",
"'less than two ents given to Ent.diff()'",
")",
"elif",
"not",
"all",
"(",
"isinstance",
"(",
"s",
",",
"Ent",
")",
"for",
"s",
"in",
"args",
")",
":",
"raise",
"ValueError",
"(",
"'all positional arguments to Ent.diff() must '",
"'be instances of Ent'",
")",
"s1",
"=",
"args",
"[",
"0",
"]",
"differences",
"=",
"Ent",
"(",
")",
"for",
"s2",
"in",
"args",
"[",
"1",
":",
"]",
":",
"for",
"key",
",",
"value",
"in",
"s2",
".",
"__dict__",
".",
"items",
"(",
")",
":",
"if",
"key",
"in",
"ignore",
":",
"continue",
"if",
"key",
"in",
"s1",
".",
"__dict__",
":",
"v1",
"=",
"s1",
".",
"__dict__",
"[",
"key",
"]",
"if",
"type",
"(",
"value",
")",
"==",
"type",
"(",
"v1",
")",
":",
"if",
"isinstance",
"(",
"v1",
",",
"Ent",
")",
":",
"delta",
"=",
"cls",
".",
"diff",
"(",
"v1",
",",
"value",
",",
"*",
"*",
"kwargs",
")",
"if",
"len",
"(",
"delta",
".",
"__dict__",
")",
":",
"differences",
".",
"__dict__",
"[",
"key",
"]",
"=",
"delta",
"elif",
"v1",
"!=",
"value",
":",
"differences",
".",
"__dict__",
"[",
"key",
"]",
"=",
"cls",
".",
"load",
"(",
"value",
")",
"elif",
"newkeys",
":",
"differences",
".",
"__dict__",
"[",
"key",
"]",
"=",
"cls",
".",
"load",
"(",
"value",
")",
"s1",
"=",
"s2",
"return",
"differences"
] | Create a new Ent representing the differences in two or more
existing Ents. Keys in the later Ents with values that differ
from the earlier Ents will be present in the final Ent with the
latest value seen for that key. Later keys of different type than in
earlier Ents will be bravely ignored.
The following keywoard arguments are recognized:
newkeys: boolean value to determine whether keys from later Ents
should be included if they do not exist in earlier Ents.
ignore: list of strings of key names that will not be included. | [
"Create",
"a",
"new",
"Ent",
"representing",
"the",
"differences",
"in",
"two",
"or",
"more",
"existing",
"Ents",
".",
"Keys",
"in",
"the",
"later",
"Ents",
"with",
"values",
"that",
"differ",
"from",
"the",
"earlier",
"Ents",
"will",
"be",
"present",
"in",
"the",
"final",
"Ent",
"with",
"the",
"latest",
"value",
"seen",
"for",
"that",
"key",
".",
"Later",
"keys",
"of",
"different",
"type",
"than",
"in",
"earlier",
"Ents",
"will",
"be",
"bravely",
"ignored",
"."
] | 65f7c6498536c551ee1fdb43c3c429f24aa0f755 | https://github.com/jreese/ent/blob/65f7c6498536c551ee1fdb43c3c429f24aa0f755/ent/ent.py#L179-L226 | train |
jreese/ent | ent/ent.py | Ent.subclasses | def subclasses(cls):
"""Return a set of all Ent subclasses, recursively."""
seen = set()
queue = set([cls])
while queue:
c = queue.pop()
seen.add(c)
sc = c.__subclasses__()
for c in sc:
if c not in seen:
queue.add(c)
seen.remove(cls)
return seen | python | def subclasses(cls):
"""Return a set of all Ent subclasses, recursively."""
seen = set()
queue = set([cls])
while queue:
c = queue.pop()
seen.add(c)
sc = c.__subclasses__()
for c in sc:
if c not in seen:
queue.add(c)
seen.remove(cls)
return seen | [
"def",
"subclasses",
"(",
"cls",
")",
":",
"seen",
"=",
"set",
"(",
")",
"queue",
"=",
"set",
"(",
"[",
"cls",
"]",
")",
"while",
"queue",
":",
"c",
"=",
"queue",
".",
"pop",
"(",
")",
"seen",
".",
"add",
"(",
"c",
")",
"sc",
"=",
"c",
".",
"__subclasses__",
"(",
")",
"for",
"c",
"in",
"sc",
":",
"if",
"c",
"not",
"in",
"seen",
":",
"queue",
".",
"add",
"(",
"c",
")",
"seen",
".",
"remove",
"(",
"cls",
")",
"return",
"seen"
] | Return a set of all Ent subclasses, recursively. | [
"Return",
"a",
"set",
"of",
"all",
"Ent",
"subclasses",
"recursively",
"."
] | 65f7c6498536c551ee1fdb43c3c429f24aa0f755 | https://github.com/jreese/ent/blob/65f7c6498536c551ee1fdb43c3c429f24aa0f755/ent/ent.py#L229-L244 | train |
atl/py-smartdc | smartdc/datacenter.py | DataCenter.base_url | def base_url(self):
"""Protocol + hostname"""
if self.location in self.known_locations:
return self.known_locations[self.location]
elif '.' in self.location or self.location == 'localhost':
return 'https://' + self.location
else:
return 'https://' + self.location + API_HOST_SUFFIX | python | def base_url(self):
"""Protocol + hostname"""
if self.location in self.known_locations:
return self.known_locations[self.location]
elif '.' in self.location or self.location == 'localhost':
return 'https://' + self.location
else:
return 'https://' + self.location + API_HOST_SUFFIX | [
"def",
"base_url",
"(",
"self",
")",
":",
"if",
"self",
".",
"location",
"in",
"self",
".",
"known_locations",
":",
"return",
"self",
".",
"known_locations",
"[",
"self",
".",
"location",
"]",
"elif",
"'.'",
"in",
"self",
".",
"location",
"or",
"self",
".",
"location",
"==",
"'localhost'",
":",
"return",
"'https://'",
"+",
"self",
".",
"location",
"else",
":",
"return",
"'https://'",
"+",
"self",
".",
"location",
"+",
"API_HOST_SUFFIX"
] | Protocol + hostname | [
"Protocol",
"+",
"hostname"
] | cc5cd5910e19004cc46e376ce035affe28fc798e | https://github.com/atl/py-smartdc/blob/cc5cd5910e19004cc46e376ce035affe28fc798e/smartdc/datacenter.py#L201-L208 | train |
pmacosta/pexdoc | pexdoc/exh.py | _build_exclusion_list | def _build_exclusion_list(exclude):
"""Build file names list of modules to exclude from exception handling."""
mod_files = []
if exclude:
for mod in exclude:
mdir = None
mod_file = None
for token in mod.split("."):
try:
mfile, mdir, _ = imp.find_module(token, mdir and [mdir])
if mfile:
mod_file = mfile.name
mfile.close()
except ImportError:
msg = "Source for module {mod_name} could not be found"
raise ValueError(msg.format(mod_name=mod))
if mod_file:
mod_files.append(mod_file.replace(".pyc", ".py"))
return mod_files | python | def _build_exclusion_list(exclude):
"""Build file names list of modules to exclude from exception handling."""
mod_files = []
if exclude:
for mod in exclude:
mdir = None
mod_file = None
for token in mod.split("."):
try:
mfile, mdir, _ = imp.find_module(token, mdir and [mdir])
if mfile:
mod_file = mfile.name
mfile.close()
except ImportError:
msg = "Source for module {mod_name} could not be found"
raise ValueError(msg.format(mod_name=mod))
if mod_file:
mod_files.append(mod_file.replace(".pyc", ".py"))
return mod_files | [
"def",
"_build_exclusion_list",
"(",
"exclude",
")",
":",
"mod_files",
"=",
"[",
"]",
"if",
"exclude",
":",
"for",
"mod",
"in",
"exclude",
":",
"mdir",
"=",
"None",
"mod_file",
"=",
"None",
"for",
"token",
"in",
"mod",
".",
"split",
"(",
"\".\"",
")",
":",
"try",
":",
"mfile",
",",
"mdir",
",",
"_",
"=",
"imp",
".",
"find_module",
"(",
"token",
",",
"mdir",
"and",
"[",
"mdir",
"]",
")",
"if",
"mfile",
":",
"mod_file",
"=",
"mfile",
".",
"name",
"mfile",
".",
"close",
"(",
")",
"except",
"ImportError",
":",
"msg",
"=",
"\"Source for module {mod_name} could not be found\"",
"raise",
"ValueError",
"(",
"msg",
".",
"format",
"(",
"mod_name",
"=",
"mod",
")",
")",
"if",
"mod_file",
":",
"mod_files",
".",
"append",
"(",
"mod_file",
".",
"replace",
"(",
"\".pyc\"",
",",
"\".py\"",
")",
")",
"return",
"mod_files"
] | Build file names list of modules to exclude from exception handling. | [
"Build",
"file",
"names",
"list",
"of",
"modules",
"to",
"exclude",
"from",
"exception",
"handling",
"."
] | 201ac243e5781347feb75896a4231429fe6da4b1 | https://github.com/pmacosta/pexdoc/blob/201ac243e5781347feb75896a4231429fe6da4b1/pexdoc/exh.py#L42-L60 | train |
pmacosta/pexdoc | pexdoc/exh.py | _invalid_frame | def _invalid_frame(fobj):
"""Select valid stack frame to process."""
fin = fobj.f_code.co_filename
invalid_module = any([fin.endswith(item) for item in _INVALID_MODULES_LIST])
return invalid_module or (not os.path.isfile(fin)) | python | def _invalid_frame(fobj):
"""Select valid stack frame to process."""
fin = fobj.f_code.co_filename
invalid_module = any([fin.endswith(item) for item in _INVALID_MODULES_LIST])
return invalid_module or (not os.path.isfile(fin)) | [
"def",
"_invalid_frame",
"(",
"fobj",
")",
":",
"fin",
"=",
"fobj",
".",
"f_code",
".",
"co_filename",
"invalid_module",
"=",
"any",
"(",
"[",
"fin",
".",
"endswith",
"(",
"item",
")",
"for",
"item",
"in",
"_INVALID_MODULES_LIST",
"]",
")",
"return",
"invalid_module",
"or",
"(",
"not",
"os",
".",
"path",
".",
"isfile",
"(",
"fin",
")",
")"
] | Select valid stack frame to process. | [
"Select",
"valid",
"stack",
"frame",
"to",
"process",
"."
] | 201ac243e5781347feb75896a4231429fe6da4b1 | https://github.com/pmacosta/pexdoc/blob/201ac243e5781347feb75896a4231429fe6da4b1/pexdoc/exh.py#L63-L67 | train |
pmacosta/pexdoc | pexdoc/exh.py | _sorted_keys_items | def _sorted_keys_items(dobj):
"""Return dictionary items sorted by key."""
keys = sorted(dobj.keys())
for key in keys:
yield key, dobj[key] | python | def _sorted_keys_items(dobj):
"""Return dictionary items sorted by key."""
keys = sorted(dobj.keys())
for key in keys:
yield key, dobj[key] | [
"def",
"_sorted_keys_items",
"(",
"dobj",
")",
":",
"keys",
"=",
"sorted",
"(",
"dobj",
".",
"keys",
"(",
")",
")",
"for",
"key",
"in",
"keys",
":",
"yield",
"key",
",",
"dobj",
"[",
"key",
"]"
] | Return dictionary items sorted by key. | [
"Return",
"dictionary",
"items",
"sorted",
"by",
"key",
"."
] | 201ac243e5781347feb75896a4231429fe6da4b1 | https://github.com/pmacosta/pexdoc/blob/201ac243e5781347feb75896a4231429fe6da4b1/pexdoc/exh.py#L150-L154 | train |
pmacosta/pexdoc | pexdoc/exh.py | addex | def addex(extype, exmsg, condition=None, edata=None):
r"""
Add an exception in the global exception handler.
:param extype: Exception type; *must* be derived from the `Exception
<https://docs.python.org/2/library/exceptions.html#
exceptions.Exception>`_ class
:type extype: Exception type object, i.e. RuntimeError, TypeError,
etc.
:param exmsg: Exception message; it can contain fields to be replaced
when the exception is raised via
:py:meth:`pexdoc.ExHandle.raise_exception_if`.
A field starts with the characters :code:`'\*['` and
ends with the characters :code:`']\*'`, the field name
follows the same rules as variable names and is between
these two sets of characters. For example,
:code:`'\*[fname]\*'` defines the fname field
:type exmsg: string
:param condition: Flag that indicates whether the exception is
raised *(True)* or not *(False)*. If None the
flag is not used an no exception is raised
:type condition: boolean or None
:param edata: Replacement values for fields in the exception message
(see :py:meth:`pexdoc.ExHandle.add_exception` for how
to define fields). Each dictionary entry can only have
these two keys:
* **field** *(string)* -- Field name
* **value** *(any)* -- Field value, to be converted into
a string with the `format
<https://docs.python.org/2/library/stdtypes.html#
str.format>`_ string method
If None no field replacement is done
:rtype: (if condition is not given or None) function
:raises:
* RuntimeError (Argument \`condition\` is not valid)
* RuntimeError (Argument \`edata\` is not valid)
* RuntimeError (Argument \`exmsg\` is not valid)
* RuntimeError (Argument \`extype\` is not valid)
"""
return _ExObj(extype, exmsg, condition, edata).craise | python | def addex(extype, exmsg, condition=None, edata=None):
r"""
Add an exception in the global exception handler.
:param extype: Exception type; *must* be derived from the `Exception
<https://docs.python.org/2/library/exceptions.html#
exceptions.Exception>`_ class
:type extype: Exception type object, i.e. RuntimeError, TypeError,
etc.
:param exmsg: Exception message; it can contain fields to be replaced
when the exception is raised via
:py:meth:`pexdoc.ExHandle.raise_exception_if`.
A field starts with the characters :code:`'\*['` and
ends with the characters :code:`']\*'`, the field name
follows the same rules as variable names and is between
these two sets of characters. For example,
:code:`'\*[fname]\*'` defines the fname field
:type exmsg: string
:param condition: Flag that indicates whether the exception is
raised *(True)* or not *(False)*. If None the
flag is not used an no exception is raised
:type condition: boolean or None
:param edata: Replacement values for fields in the exception message
(see :py:meth:`pexdoc.ExHandle.add_exception` for how
to define fields). Each dictionary entry can only have
these two keys:
* **field** *(string)* -- Field name
* **value** *(any)* -- Field value, to be converted into
a string with the `format
<https://docs.python.org/2/library/stdtypes.html#
str.format>`_ string method
If None no field replacement is done
:rtype: (if condition is not given or None) function
:raises:
* RuntimeError (Argument \`condition\` is not valid)
* RuntimeError (Argument \`edata\` is not valid)
* RuntimeError (Argument \`exmsg\` is not valid)
* RuntimeError (Argument \`extype\` is not valid)
"""
return _ExObj(extype, exmsg, condition, edata).craise | [
"def",
"addex",
"(",
"extype",
",",
"exmsg",
",",
"condition",
"=",
"None",
",",
"edata",
"=",
"None",
")",
":",
"return",
"_ExObj",
"(",
"extype",
",",
"exmsg",
",",
"condition",
",",
"edata",
")",
".",
"craise"
] | r"""
Add an exception in the global exception handler.
:param extype: Exception type; *must* be derived from the `Exception
<https://docs.python.org/2/library/exceptions.html#
exceptions.Exception>`_ class
:type extype: Exception type object, i.e. RuntimeError, TypeError,
etc.
:param exmsg: Exception message; it can contain fields to be replaced
when the exception is raised via
:py:meth:`pexdoc.ExHandle.raise_exception_if`.
A field starts with the characters :code:`'\*['` and
ends with the characters :code:`']\*'`, the field name
follows the same rules as variable names and is between
these two sets of characters. For example,
:code:`'\*[fname]\*'` defines the fname field
:type exmsg: string
:param condition: Flag that indicates whether the exception is
raised *(True)* or not *(False)*. If None the
flag is not used an no exception is raised
:type condition: boolean or None
:param edata: Replacement values for fields in the exception message
(see :py:meth:`pexdoc.ExHandle.add_exception` for how
to define fields). Each dictionary entry can only have
these two keys:
* **field** *(string)* -- Field name
* **value** *(any)* -- Field value, to be converted into
a string with the `format
<https://docs.python.org/2/library/stdtypes.html#
str.format>`_ string method
If None no field replacement is done
:rtype: (if condition is not given or None) function
:raises:
* RuntimeError (Argument \`condition\` is not valid)
* RuntimeError (Argument \`edata\` is not valid)
* RuntimeError (Argument \`exmsg\` is not valid)
* RuntimeError (Argument \`extype\` is not valid) | [
"r",
"Add",
"an",
"exception",
"in",
"the",
"global",
"exception",
"handler",
"."
] | 201ac243e5781347feb75896a4231429fe6da4b1 | https://github.com/pmacosta/pexdoc/blob/201ac243e5781347feb75896a4231429fe6da4b1/pexdoc/exh.py#L157-L207 | train |
pmacosta/pexdoc | pexdoc/exh.py | addai | def addai(argname, condition=None):
r"""
Add an "AI" exception in the global exception handler.
An "AI" exception is of the type :code:`RuntimeError('Argument
\`*[argname]*\` is not valid')` where :code:`*[argname]*` is the value of
the **argname** argument
:param argname: Argument name
:type argname: string
:param condition: Flag that indicates whether the exception is
raised *(True)* or not *(False)*. If None the flag is not
used and no exception is raised
:type condition: boolean or None
:rtype: (if condition is not given or None) function
:raises:
* RuntimeError (Argument \`argname\` is not valid)
* RuntimeError (Argument \`condition\` is not valid)
"""
# pylint: disable=C0123
if not isinstance(argname, str):
raise RuntimeError("Argument `argname` is not valid")
if (condition is not None) and (type(condition) != bool):
raise RuntimeError("Argument `condition` is not valid")
obj = _ExObj(RuntimeError, "Argument `{0}` is not valid".format(argname), condition)
return obj.craise | python | def addai(argname, condition=None):
r"""
Add an "AI" exception in the global exception handler.
An "AI" exception is of the type :code:`RuntimeError('Argument
\`*[argname]*\` is not valid')` where :code:`*[argname]*` is the value of
the **argname** argument
:param argname: Argument name
:type argname: string
:param condition: Flag that indicates whether the exception is
raised *(True)* or not *(False)*. If None the flag is not
used and no exception is raised
:type condition: boolean or None
:rtype: (if condition is not given or None) function
:raises:
* RuntimeError (Argument \`argname\` is not valid)
* RuntimeError (Argument \`condition\` is not valid)
"""
# pylint: disable=C0123
if not isinstance(argname, str):
raise RuntimeError("Argument `argname` is not valid")
if (condition is not None) and (type(condition) != bool):
raise RuntimeError("Argument `condition` is not valid")
obj = _ExObj(RuntimeError, "Argument `{0}` is not valid".format(argname), condition)
return obj.craise | [
"def",
"addai",
"(",
"argname",
",",
"condition",
"=",
"None",
")",
":",
"# pylint: disable=C0123",
"if",
"not",
"isinstance",
"(",
"argname",
",",
"str",
")",
":",
"raise",
"RuntimeError",
"(",
"\"Argument `argname` is not valid\"",
")",
"if",
"(",
"condition",
"is",
"not",
"None",
")",
"and",
"(",
"type",
"(",
"condition",
")",
"!=",
"bool",
")",
":",
"raise",
"RuntimeError",
"(",
"\"Argument `condition` is not valid\"",
")",
"obj",
"=",
"_ExObj",
"(",
"RuntimeError",
",",
"\"Argument `{0}` is not valid\"",
".",
"format",
"(",
"argname",
")",
",",
"condition",
")",
"return",
"obj",
".",
"craise"
] | r"""
Add an "AI" exception in the global exception handler.
An "AI" exception is of the type :code:`RuntimeError('Argument
\`*[argname]*\` is not valid')` where :code:`*[argname]*` is the value of
the **argname** argument
:param argname: Argument name
:type argname: string
:param condition: Flag that indicates whether the exception is
raised *(True)* or not *(False)*. If None the flag is not
used and no exception is raised
:type condition: boolean or None
:rtype: (if condition is not given or None) function
:raises:
* RuntimeError (Argument \`argname\` is not valid)
* RuntimeError (Argument \`condition\` is not valid) | [
"r",
"Add",
"an",
"AI",
"exception",
"in",
"the",
"global",
"exception",
"handler",
"."
] | 201ac243e5781347feb75896a4231429fe6da4b1 | https://github.com/pmacosta/pexdoc/blob/201ac243e5781347feb75896a4231429fe6da4b1/pexdoc/exh.py#L210-L239 | train |
pmacosta/pexdoc | pexdoc/exh.py | get_or_create_exh_obj | def get_or_create_exh_obj(full_cname=False, exclude=None, callables_fname=None):
r"""
Return global exception handler if set, otherwise create a new one and return it.
:param full_cname: Flag that indicates whether fully qualified
function/method/class property names are obtained for
functions/methods/class properties that use the
exception manager (True) or not (False).
There is a performance penalty if the flag is True as
the call stack needs to be traced. This argument is
only relevant if the global exception handler is not
set and a new one is created
:type full_cname: boolean
:param exclude: Module exclusion list. A particular callable in an
otherwise fully qualified name is omitted if it belongs
to a module in this list. If None all callables are
included
:type exclude: list of strings or None
:param callables_fname: File name that contains traced modules information.
File can be produced by either the
:py:meth:`pexdoc.pinspect.Callables.save` or
:py:meth:`pexdoc.ExHandle.save_callables`
methods
:type callables_fname: :ref:`FileNameExists` or None
:rtype: :py:class:`pexdoc.ExHandle`
:raises:
* OSError (File *[callables_fname]* could not be found
* RuntimeError (Argument \\`exclude\\` is not valid)
* RuntimeError (Argument \\`callables_fname\\` is not valid)
* RuntimeError (Argument \\`full_cname\\` is not valid)
"""
if not hasattr(__builtin__, "_EXH"):
set_exh_obj(
ExHandle(
full_cname=full_cname, exclude=exclude, callables_fname=callables_fname
)
)
return get_exh_obj() | python | def get_or_create_exh_obj(full_cname=False, exclude=None, callables_fname=None):
r"""
Return global exception handler if set, otherwise create a new one and return it.
:param full_cname: Flag that indicates whether fully qualified
function/method/class property names are obtained for
functions/methods/class properties that use the
exception manager (True) or not (False).
There is a performance penalty if the flag is True as
the call stack needs to be traced. This argument is
only relevant if the global exception handler is not
set and a new one is created
:type full_cname: boolean
:param exclude: Module exclusion list. A particular callable in an
otherwise fully qualified name is omitted if it belongs
to a module in this list. If None all callables are
included
:type exclude: list of strings or None
:param callables_fname: File name that contains traced modules information.
File can be produced by either the
:py:meth:`pexdoc.pinspect.Callables.save` or
:py:meth:`pexdoc.ExHandle.save_callables`
methods
:type callables_fname: :ref:`FileNameExists` or None
:rtype: :py:class:`pexdoc.ExHandle`
:raises:
* OSError (File *[callables_fname]* could not be found
* RuntimeError (Argument \\`exclude\\` is not valid)
* RuntimeError (Argument \\`callables_fname\\` is not valid)
* RuntimeError (Argument \\`full_cname\\` is not valid)
"""
if not hasattr(__builtin__, "_EXH"):
set_exh_obj(
ExHandle(
full_cname=full_cname, exclude=exclude, callables_fname=callables_fname
)
)
return get_exh_obj() | [
"def",
"get_or_create_exh_obj",
"(",
"full_cname",
"=",
"False",
",",
"exclude",
"=",
"None",
",",
"callables_fname",
"=",
"None",
")",
":",
"if",
"not",
"hasattr",
"(",
"__builtin__",
",",
"\"_EXH\"",
")",
":",
"set_exh_obj",
"(",
"ExHandle",
"(",
"full_cname",
"=",
"full_cname",
",",
"exclude",
"=",
"exclude",
",",
"callables_fname",
"=",
"callables_fname",
")",
")",
"return",
"get_exh_obj",
"(",
")"
] | r"""
Return global exception handler if set, otherwise create a new one and return it.
:param full_cname: Flag that indicates whether fully qualified
function/method/class property names are obtained for
functions/methods/class properties that use the
exception manager (True) or not (False).
There is a performance penalty if the flag is True as
the call stack needs to be traced. This argument is
only relevant if the global exception handler is not
set and a new one is created
:type full_cname: boolean
:param exclude: Module exclusion list. A particular callable in an
otherwise fully qualified name is omitted if it belongs
to a module in this list. If None all callables are
included
:type exclude: list of strings or None
:param callables_fname: File name that contains traced modules information.
File can be produced by either the
:py:meth:`pexdoc.pinspect.Callables.save` or
:py:meth:`pexdoc.ExHandle.save_callables`
methods
:type callables_fname: :ref:`FileNameExists` or None
:rtype: :py:class:`pexdoc.ExHandle`
:raises:
* OSError (File *[callables_fname]* could not be found
* RuntimeError (Argument \\`exclude\\` is not valid)
* RuntimeError (Argument \\`callables_fname\\` is not valid)
* RuntimeError (Argument \\`full_cname\\` is not valid) | [
"r",
"Return",
"global",
"exception",
"handler",
"if",
"set",
"otherwise",
"create",
"a",
"new",
"one",
"and",
"return",
"it",
"."
] | 201ac243e5781347feb75896a4231429fe6da4b1 | https://github.com/pmacosta/pexdoc/blob/201ac243e5781347feb75896a4231429fe6da4b1/pexdoc/exh.py#L260-L305 | train |
pmacosta/pexdoc | pexdoc/exh.py | ExHandle._flatten_ex_dict | def _flatten_ex_dict(self):
"""Flatten structure of exceptions dictionary."""
odict = {}
for _, fdict in self._ex_dict.items():
for (extype, exmsg), value in fdict.items():
key = value["name"]
odict[key] = copy.deepcopy(value)
del odict[key]["name"]
odict[key]["type"] = extype
odict[key]["msg"] = exmsg
return odict | python | def _flatten_ex_dict(self):
"""Flatten structure of exceptions dictionary."""
odict = {}
for _, fdict in self._ex_dict.items():
for (extype, exmsg), value in fdict.items():
key = value["name"]
odict[key] = copy.deepcopy(value)
del odict[key]["name"]
odict[key]["type"] = extype
odict[key]["msg"] = exmsg
return odict | [
"def",
"_flatten_ex_dict",
"(",
"self",
")",
":",
"odict",
"=",
"{",
"}",
"for",
"_",
",",
"fdict",
"in",
"self",
".",
"_ex_dict",
".",
"items",
"(",
")",
":",
"for",
"(",
"extype",
",",
"exmsg",
")",
",",
"value",
"in",
"fdict",
".",
"items",
"(",
")",
":",
"key",
"=",
"value",
"[",
"\"name\"",
"]",
"odict",
"[",
"key",
"]",
"=",
"copy",
".",
"deepcopy",
"(",
"value",
")",
"del",
"odict",
"[",
"key",
"]",
"[",
"\"name\"",
"]",
"odict",
"[",
"key",
"]",
"[",
"\"type\"",
"]",
"=",
"extype",
"odict",
"[",
"key",
"]",
"[",
"\"msg\"",
"]",
"=",
"exmsg",
"return",
"odict"
] | Flatten structure of exceptions dictionary. | [
"Flatten",
"structure",
"of",
"exceptions",
"dictionary",
"."
] | 201ac243e5781347feb75896a4231429fe6da4b1 | https://github.com/pmacosta/pexdoc/blob/201ac243e5781347feb75896a4231429fe6da4b1/pexdoc/exh.py#L804-L814 | train |
pmacosta/pexdoc | pexdoc/exh.py | ExHandle._format_msg | def _format_msg(self, msg, edata):
"""Substitute parameters in exception message."""
edata = edata if isinstance(edata, list) else [edata]
for fdict in edata:
if "*[{token}]*".format(token=fdict["field"]) not in msg:
raise RuntimeError(
"Field {token} not in exception message".format(
token=fdict["field"]
)
)
msg = msg.replace(
"*[{token}]*".format(token=fdict["field"]), "{value}"
).format(value=fdict["value"])
return msg | python | def _format_msg(self, msg, edata):
"""Substitute parameters in exception message."""
edata = edata if isinstance(edata, list) else [edata]
for fdict in edata:
if "*[{token}]*".format(token=fdict["field"]) not in msg:
raise RuntimeError(
"Field {token} not in exception message".format(
token=fdict["field"]
)
)
msg = msg.replace(
"*[{token}]*".format(token=fdict["field"]), "{value}"
).format(value=fdict["value"])
return msg | [
"def",
"_format_msg",
"(",
"self",
",",
"msg",
",",
"edata",
")",
":",
"edata",
"=",
"edata",
"if",
"isinstance",
"(",
"edata",
",",
"list",
")",
"else",
"[",
"edata",
"]",
"for",
"fdict",
"in",
"edata",
":",
"if",
"\"*[{token}]*\"",
".",
"format",
"(",
"token",
"=",
"fdict",
"[",
"\"field\"",
"]",
")",
"not",
"in",
"msg",
":",
"raise",
"RuntimeError",
"(",
"\"Field {token} not in exception message\"",
".",
"format",
"(",
"token",
"=",
"fdict",
"[",
"\"field\"",
"]",
")",
")",
"msg",
"=",
"msg",
".",
"replace",
"(",
"\"*[{token}]*\"",
".",
"format",
"(",
"token",
"=",
"fdict",
"[",
"\"field\"",
"]",
")",
",",
"\"{value}\"",
")",
".",
"format",
"(",
"value",
"=",
"fdict",
"[",
"\"value\"",
"]",
")",
"return",
"msg"
] | Substitute parameters in exception message. | [
"Substitute",
"parameters",
"in",
"exception",
"message",
"."
] | 201ac243e5781347feb75896a4231429fe6da4b1 | https://github.com/pmacosta/pexdoc/blob/201ac243e5781347feb75896a4231429fe6da4b1/pexdoc/exh.py#L816-L829 | train |
pmacosta/pexdoc | pexdoc/exh.py | ExHandle._get_exceptions_db | def _get_exceptions_db(self):
"""Return a list of dictionaries suitable to be used with ptrie module."""
template = "{extype} ({exmsg}){raised}"
if not self._full_cname:
# When full callable name is not used the calling path is
# irrelevant and there is no function associated with an
# exception
ret = []
for _, fdict in self._ex_dict.items():
for key in fdict.keys():
ret.append(
{
"name": fdict[key]["name"],
"data": template.format(
extype=_ex_type_str(key[0]),
exmsg=key[1],
raised="*" if fdict[key]["raised"][0] else "",
),
}
)
return ret
# When full callable name is used, all calling paths are saved
ret = []
for fdict in self._ex_dict.values():
for key in fdict.keys():
for func_name in fdict[key]["function"]:
rindex = fdict[key]["function"].index(func_name)
raised = fdict[key]["raised"][rindex]
ret.append(
{
"name": self.decode_call(func_name),
"data": template.format(
extype=_ex_type_str(key[0]),
exmsg=key[1],
raised="*" if raised else "",
),
}
)
return ret | python | def _get_exceptions_db(self):
"""Return a list of dictionaries suitable to be used with ptrie module."""
template = "{extype} ({exmsg}){raised}"
if not self._full_cname:
# When full callable name is not used the calling path is
# irrelevant and there is no function associated with an
# exception
ret = []
for _, fdict in self._ex_dict.items():
for key in fdict.keys():
ret.append(
{
"name": fdict[key]["name"],
"data": template.format(
extype=_ex_type_str(key[0]),
exmsg=key[1],
raised="*" if fdict[key]["raised"][0] else "",
),
}
)
return ret
# When full callable name is used, all calling paths are saved
ret = []
for fdict in self._ex_dict.values():
for key in fdict.keys():
for func_name in fdict[key]["function"]:
rindex = fdict[key]["function"].index(func_name)
raised = fdict[key]["raised"][rindex]
ret.append(
{
"name": self.decode_call(func_name),
"data": template.format(
extype=_ex_type_str(key[0]),
exmsg=key[1],
raised="*" if raised else "",
),
}
)
return ret | [
"def",
"_get_exceptions_db",
"(",
"self",
")",
":",
"template",
"=",
"\"{extype} ({exmsg}){raised}\"",
"if",
"not",
"self",
".",
"_full_cname",
":",
"# When full callable name is not used the calling path is",
"# irrelevant and there is no function associated with an",
"# exception",
"ret",
"=",
"[",
"]",
"for",
"_",
",",
"fdict",
"in",
"self",
".",
"_ex_dict",
".",
"items",
"(",
")",
":",
"for",
"key",
"in",
"fdict",
".",
"keys",
"(",
")",
":",
"ret",
".",
"append",
"(",
"{",
"\"name\"",
":",
"fdict",
"[",
"key",
"]",
"[",
"\"name\"",
"]",
",",
"\"data\"",
":",
"template",
".",
"format",
"(",
"extype",
"=",
"_ex_type_str",
"(",
"key",
"[",
"0",
"]",
")",
",",
"exmsg",
"=",
"key",
"[",
"1",
"]",
",",
"raised",
"=",
"\"*\"",
"if",
"fdict",
"[",
"key",
"]",
"[",
"\"raised\"",
"]",
"[",
"0",
"]",
"else",
"\"\"",
",",
")",
",",
"}",
")",
"return",
"ret",
"# When full callable name is used, all calling paths are saved",
"ret",
"=",
"[",
"]",
"for",
"fdict",
"in",
"self",
".",
"_ex_dict",
".",
"values",
"(",
")",
":",
"for",
"key",
"in",
"fdict",
".",
"keys",
"(",
")",
":",
"for",
"func_name",
"in",
"fdict",
"[",
"key",
"]",
"[",
"\"function\"",
"]",
":",
"rindex",
"=",
"fdict",
"[",
"key",
"]",
"[",
"\"function\"",
"]",
".",
"index",
"(",
"func_name",
")",
"raised",
"=",
"fdict",
"[",
"key",
"]",
"[",
"\"raised\"",
"]",
"[",
"rindex",
"]",
"ret",
".",
"append",
"(",
"{",
"\"name\"",
":",
"self",
".",
"decode_call",
"(",
"func_name",
")",
",",
"\"data\"",
":",
"template",
".",
"format",
"(",
"extype",
"=",
"_ex_type_str",
"(",
"key",
"[",
"0",
"]",
")",
",",
"exmsg",
"=",
"key",
"[",
"1",
"]",
",",
"raised",
"=",
"\"*\"",
"if",
"raised",
"else",
"\"\"",
",",
")",
",",
"}",
")",
"return",
"ret"
] | Return a list of dictionaries suitable to be used with ptrie module. | [
"Return",
"a",
"list",
"of",
"dictionaries",
"suitable",
"to",
"be",
"used",
"with",
"ptrie",
"module",
"."
] | 201ac243e5781347feb75896a4231429fe6da4b1 | https://github.com/pmacosta/pexdoc/blob/201ac243e5781347feb75896a4231429fe6da4b1/pexdoc/exh.py#L1025-L1063 | train |
pmacosta/pexdoc | pexdoc/exh.py | ExHandle._get_ex_data | def _get_ex_data(self):
"""Return hierarchical function name."""
func_id, func_name = self._get_callable_path()
if self._full_cname:
func_name = self.encode_call(func_name)
return func_id, func_name | python | def _get_ex_data(self):
"""Return hierarchical function name."""
func_id, func_name = self._get_callable_path()
if self._full_cname:
func_name = self.encode_call(func_name)
return func_id, func_name | [
"def",
"_get_ex_data",
"(",
"self",
")",
":",
"func_id",
",",
"func_name",
"=",
"self",
".",
"_get_callable_path",
"(",
")",
"if",
"self",
".",
"_full_cname",
":",
"func_name",
"=",
"self",
".",
"encode_call",
"(",
"func_name",
")",
"return",
"func_id",
",",
"func_name"
] | Return hierarchical function name. | [
"Return",
"hierarchical",
"function",
"name",
"."
] | 201ac243e5781347feb75896a4231429fe6da4b1 | https://github.com/pmacosta/pexdoc/blob/201ac243e5781347feb75896a4231429fe6da4b1/pexdoc/exh.py#L1065-L1070 | train |
pmacosta/pexdoc | pexdoc/exh.py | ExHandle._property_search | def _property_search(self, fobj):
"""Return full name if object is a class property, otherwise return None."""
# Get class object
scontext = fobj.f_locals.get("self", None)
class_obj = scontext.__class__ if scontext is not None else None
if not class_obj:
del fobj, scontext, class_obj
return None
# Get class properties objects
class_props = [
(member_name, member_obj)
for member_name, member_obj in inspect.getmembers(class_obj)
if isinstance(member_obj, property)
]
if not class_props:
del fobj, scontext, class_obj
return None
class_file = inspect.getfile(class_obj).replace(".pyc", ".py")
class_name = self._callables_obj.get_callable_from_line(
class_file, inspect.getsourcelines(class_obj)[1]
)
# Get properties actions
prop_actions_dicts = {}
for prop_name, prop_obj in class_props:
prop_dict = {"fdel": None, "fget": None, "fset": None}
for action in prop_dict:
action_obj = getattr(prop_obj, action)
if action_obj:
# Unwrap action object. Contracts match the wrapped
# code object while exceptions registered in the
# body of the function/method which has decorators
# match the unwrapped object
prev_func_obj, next_func_obj = (
action_obj,
getattr(action_obj, "__wrapped__", None),
)
while next_func_obj:
prev_func_obj, next_func_obj = (
next_func_obj,
getattr(next_func_obj, "__wrapped__", None),
)
prop_dict[action] = [
id(_get_func_code(action_obj)),
id(_get_func_code(prev_func_obj)),
]
prop_actions_dicts[prop_name] = prop_dict
# Create properties directory
func_id = id(fobj.f_code)
desc_dict = {"fget": "getter", "fset": "setter", "fdel": "deleter"}
for prop_name, prop_actions_dict in prop_actions_dicts.items():
for action_name, action_id_list in prop_actions_dict.items():
if action_id_list and (func_id in action_id_list):
prop_name = ".".join([class_name, prop_name])
del fobj, scontext, class_obj, class_props
return "{prop_name}({prop_action})".format(
prop_name=prop_name, prop_action=desc_dict[action_name]
)
return None | python | def _property_search(self, fobj):
"""Return full name if object is a class property, otherwise return None."""
# Get class object
scontext = fobj.f_locals.get("self", None)
class_obj = scontext.__class__ if scontext is not None else None
if not class_obj:
del fobj, scontext, class_obj
return None
# Get class properties objects
class_props = [
(member_name, member_obj)
for member_name, member_obj in inspect.getmembers(class_obj)
if isinstance(member_obj, property)
]
if not class_props:
del fobj, scontext, class_obj
return None
class_file = inspect.getfile(class_obj).replace(".pyc", ".py")
class_name = self._callables_obj.get_callable_from_line(
class_file, inspect.getsourcelines(class_obj)[1]
)
# Get properties actions
prop_actions_dicts = {}
for prop_name, prop_obj in class_props:
prop_dict = {"fdel": None, "fget": None, "fset": None}
for action in prop_dict:
action_obj = getattr(prop_obj, action)
if action_obj:
# Unwrap action object. Contracts match the wrapped
# code object while exceptions registered in the
# body of the function/method which has decorators
# match the unwrapped object
prev_func_obj, next_func_obj = (
action_obj,
getattr(action_obj, "__wrapped__", None),
)
while next_func_obj:
prev_func_obj, next_func_obj = (
next_func_obj,
getattr(next_func_obj, "__wrapped__", None),
)
prop_dict[action] = [
id(_get_func_code(action_obj)),
id(_get_func_code(prev_func_obj)),
]
prop_actions_dicts[prop_name] = prop_dict
# Create properties directory
func_id = id(fobj.f_code)
desc_dict = {"fget": "getter", "fset": "setter", "fdel": "deleter"}
for prop_name, prop_actions_dict in prop_actions_dicts.items():
for action_name, action_id_list in prop_actions_dict.items():
if action_id_list and (func_id in action_id_list):
prop_name = ".".join([class_name, prop_name])
del fobj, scontext, class_obj, class_props
return "{prop_name}({prop_action})".format(
prop_name=prop_name, prop_action=desc_dict[action_name]
)
return None | [
"def",
"_property_search",
"(",
"self",
",",
"fobj",
")",
":",
"# Get class object",
"scontext",
"=",
"fobj",
".",
"f_locals",
".",
"get",
"(",
"\"self\"",
",",
"None",
")",
"class_obj",
"=",
"scontext",
".",
"__class__",
"if",
"scontext",
"is",
"not",
"None",
"else",
"None",
"if",
"not",
"class_obj",
":",
"del",
"fobj",
",",
"scontext",
",",
"class_obj",
"return",
"None",
"# Get class properties objects",
"class_props",
"=",
"[",
"(",
"member_name",
",",
"member_obj",
")",
"for",
"member_name",
",",
"member_obj",
"in",
"inspect",
".",
"getmembers",
"(",
"class_obj",
")",
"if",
"isinstance",
"(",
"member_obj",
",",
"property",
")",
"]",
"if",
"not",
"class_props",
":",
"del",
"fobj",
",",
"scontext",
",",
"class_obj",
"return",
"None",
"class_file",
"=",
"inspect",
".",
"getfile",
"(",
"class_obj",
")",
".",
"replace",
"(",
"\".pyc\"",
",",
"\".py\"",
")",
"class_name",
"=",
"self",
".",
"_callables_obj",
".",
"get_callable_from_line",
"(",
"class_file",
",",
"inspect",
".",
"getsourcelines",
"(",
"class_obj",
")",
"[",
"1",
"]",
")",
"# Get properties actions",
"prop_actions_dicts",
"=",
"{",
"}",
"for",
"prop_name",
",",
"prop_obj",
"in",
"class_props",
":",
"prop_dict",
"=",
"{",
"\"fdel\"",
":",
"None",
",",
"\"fget\"",
":",
"None",
",",
"\"fset\"",
":",
"None",
"}",
"for",
"action",
"in",
"prop_dict",
":",
"action_obj",
"=",
"getattr",
"(",
"prop_obj",
",",
"action",
")",
"if",
"action_obj",
":",
"# Unwrap action object. Contracts match the wrapped",
"# code object while exceptions registered in the",
"# body of the function/method which has decorators",
"# match the unwrapped object",
"prev_func_obj",
",",
"next_func_obj",
"=",
"(",
"action_obj",
",",
"getattr",
"(",
"action_obj",
",",
"\"__wrapped__\"",
",",
"None",
")",
",",
")",
"while",
"next_func_obj",
":",
"prev_func_obj",
",",
"next_func_obj",
"=",
"(",
"next_func_obj",
",",
"getattr",
"(",
"next_func_obj",
",",
"\"__wrapped__\"",
",",
"None",
")",
",",
")",
"prop_dict",
"[",
"action",
"]",
"=",
"[",
"id",
"(",
"_get_func_code",
"(",
"action_obj",
")",
")",
",",
"id",
"(",
"_get_func_code",
"(",
"prev_func_obj",
")",
")",
",",
"]",
"prop_actions_dicts",
"[",
"prop_name",
"]",
"=",
"prop_dict",
"# Create properties directory",
"func_id",
"=",
"id",
"(",
"fobj",
".",
"f_code",
")",
"desc_dict",
"=",
"{",
"\"fget\"",
":",
"\"getter\"",
",",
"\"fset\"",
":",
"\"setter\"",
",",
"\"fdel\"",
":",
"\"deleter\"",
"}",
"for",
"prop_name",
",",
"prop_actions_dict",
"in",
"prop_actions_dicts",
".",
"items",
"(",
")",
":",
"for",
"action_name",
",",
"action_id_list",
"in",
"prop_actions_dict",
".",
"items",
"(",
")",
":",
"if",
"action_id_list",
"and",
"(",
"func_id",
"in",
"action_id_list",
")",
":",
"prop_name",
"=",
"\".\"",
".",
"join",
"(",
"[",
"class_name",
",",
"prop_name",
"]",
")",
"del",
"fobj",
",",
"scontext",
",",
"class_obj",
",",
"class_props",
"return",
"\"{prop_name}({prop_action})\"",
".",
"format",
"(",
"prop_name",
"=",
"prop_name",
",",
"prop_action",
"=",
"desc_dict",
"[",
"action_name",
"]",
")",
"return",
"None"
] | Return full name if object is a class property, otherwise return None. | [
"Return",
"full",
"name",
"if",
"object",
"is",
"a",
"class",
"property",
"otherwise",
"return",
"None",
"."
] | 201ac243e5781347feb75896a4231429fe6da4b1 | https://github.com/pmacosta/pexdoc/blob/201ac243e5781347feb75896a4231429fe6da4b1/pexdoc/exh.py#L1072-L1129 | train |
pmacosta/pexdoc | pexdoc/exh.py | ExHandle._raise_exception | def _raise_exception(self, eobj, edata=None):
"""Raise exception by name."""
_, _, tbobj = sys.exc_info()
if edata:
emsg = self._format_msg(eobj["msg"], edata)
_rwtb(eobj["type"], emsg, tbobj)
else:
_rwtb(eobj["type"], eobj["msg"], tbobj) | python | def _raise_exception(self, eobj, edata=None):
"""Raise exception by name."""
_, _, tbobj = sys.exc_info()
if edata:
emsg = self._format_msg(eobj["msg"], edata)
_rwtb(eobj["type"], emsg, tbobj)
else:
_rwtb(eobj["type"], eobj["msg"], tbobj) | [
"def",
"_raise_exception",
"(",
"self",
",",
"eobj",
",",
"edata",
"=",
"None",
")",
":",
"_",
",",
"_",
",",
"tbobj",
"=",
"sys",
".",
"exc_info",
"(",
")",
"if",
"edata",
":",
"emsg",
"=",
"self",
".",
"_format_msg",
"(",
"eobj",
"[",
"\"msg\"",
"]",
",",
"edata",
")",
"_rwtb",
"(",
"eobj",
"[",
"\"type\"",
"]",
",",
"emsg",
",",
"tbobj",
")",
"else",
":",
"_rwtb",
"(",
"eobj",
"[",
"\"type\"",
"]",
",",
"eobj",
"[",
"\"msg\"",
"]",
",",
"tbobj",
")"
] | Raise exception by name. | [
"Raise",
"exception",
"by",
"name",
"."
] | 201ac243e5781347feb75896a4231429fe6da4b1 | https://github.com/pmacosta/pexdoc/blob/201ac243e5781347feb75896a4231429fe6da4b1/pexdoc/exh.py#L1131-L1138 | train |
pmacosta/pexdoc | pexdoc/exh.py | ExHandle._unwrap_obj | def _unwrap_obj(self, fobj, fun):
"""Unwrap decorators."""
try:
prev_func_obj, next_func_obj = (
fobj.f_globals[fun],
getattr(fobj.f_globals[fun], "__wrapped__", None),
)
while next_func_obj:
prev_func_obj, next_func_obj = (
next_func_obj,
getattr(next_func_obj, "__wrapped__", None),
)
return (prev_func_obj, inspect.getfile(prev_func_obj).replace(".pyc", "py"))
except (KeyError, AttributeError, TypeError):
# KeyErrror: fun not in fobj.f_globals
# AttributeError: fobj.f_globals does not have
# a __wrapped__ attribute
# TypeError: pref_func_obj does not have a file associated with it
return None, None | python | def _unwrap_obj(self, fobj, fun):
"""Unwrap decorators."""
try:
prev_func_obj, next_func_obj = (
fobj.f_globals[fun],
getattr(fobj.f_globals[fun], "__wrapped__", None),
)
while next_func_obj:
prev_func_obj, next_func_obj = (
next_func_obj,
getattr(next_func_obj, "__wrapped__", None),
)
return (prev_func_obj, inspect.getfile(prev_func_obj).replace(".pyc", "py"))
except (KeyError, AttributeError, TypeError):
# KeyErrror: fun not in fobj.f_globals
# AttributeError: fobj.f_globals does not have
# a __wrapped__ attribute
# TypeError: pref_func_obj does not have a file associated with it
return None, None | [
"def",
"_unwrap_obj",
"(",
"self",
",",
"fobj",
",",
"fun",
")",
":",
"try",
":",
"prev_func_obj",
",",
"next_func_obj",
"=",
"(",
"fobj",
".",
"f_globals",
"[",
"fun",
"]",
",",
"getattr",
"(",
"fobj",
".",
"f_globals",
"[",
"fun",
"]",
",",
"\"__wrapped__\"",
",",
"None",
")",
",",
")",
"while",
"next_func_obj",
":",
"prev_func_obj",
",",
"next_func_obj",
"=",
"(",
"next_func_obj",
",",
"getattr",
"(",
"next_func_obj",
",",
"\"__wrapped__\"",
",",
"None",
")",
",",
")",
"return",
"(",
"prev_func_obj",
",",
"inspect",
".",
"getfile",
"(",
"prev_func_obj",
")",
".",
"replace",
"(",
"\".pyc\"",
",",
"\"py\"",
")",
")",
"except",
"(",
"KeyError",
",",
"AttributeError",
",",
"TypeError",
")",
":",
"# KeyErrror: fun not in fobj.f_globals",
"# AttributeError: fobj.f_globals does not have",
"# a __wrapped__ attribute",
"# TypeError: pref_func_obj does not have a file associated with it",
"return",
"None",
",",
"None"
] | Unwrap decorators. | [
"Unwrap",
"decorators",
"."
] | 201ac243e5781347feb75896a4231429fe6da4b1 | https://github.com/pmacosta/pexdoc/blob/201ac243e5781347feb75896a4231429fe6da4b1/pexdoc/exh.py#L1140-L1158 | train |
pmacosta/pexdoc | pexdoc/exh.py | ExHandle._validate_edata | def _validate_edata(self, edata):
"""Validate edata argument of raise_exception_if method."""
# pylint: disable=R0916
if edata is None:
return True
if not (isinstance(edata, dict) or _isiterable(edata)):
return False
edata = [edata] if isinstance(edata, dict) else edata
for edict in edata:
if (not isinstance(edict, dict)) or (
isinstance(edict, dict)
and (
("field" not in edict)
or ("field" in edict and (not isinstance(edict["field"], str)))
or ("value" not in edict)
)
):
return False
return True | python | def _validate_edata(self, edata):
"""Validate edata argument of raise_exception_if method."""
# pylint: disable=R0916
if edata is None:
return True
if not (isinstance(edata, dict) or _isiterable(edata)):
return False
edata = [edata] if isinstance(edata, dict) else edata
for edict in edata:
if (not isinstance(edict, dict)) or (
isinstance(edict, dict)
and (
("field" not in edict)
or ("field" in edict and (not isinstance(edict["field"], str)))
or ("value" not in edict)
)
):
return False
return True | [
"def",
"_validate_edata",
"(",
"self",
",",
"edata",
")",
":",
"# pylint: disable=R0916",
"if",
"edata",
"is",
"None",
":",
"return",
"True",
"if",
"not",
"(",
"isinstance",
"(",
"edata",
",",
"dict",
")",
"or",
"_isiterable",
"(",
"edata",
")",
")",
":",
"return",
"False",
"edata",
"=",
"[",
"edata",
"]",
"if",
"isinstance",
"(",
"edata",
",",
"dict",
")",
"else",
"edata",
"for",
"edict",
"in",
"edata",
":",
"if",
"(",
"not",
"isinstance",
"(",
"edict",
",",
"dict",
")",
")",
"or",
"(",
"isinstance",
"(",
"edict",
",",
"dict",
")",
"and",
"(",
"(",
"\"field\"",
"not",
"in",
"edict",
")",
"or",
"(",
"\"field\"",
"in",
"edict",
"and",
"(",
"not",
"isinstance",
"(",
"edict",
"[",
"\"field\"",
"]",
",",
"str",
")",
")",
")",
"or",
"(",
"\"value\"",
"not",
"in",
"edict",
")",
")",
")",
":",
"return",
"False",
"return",
"True"
] | Validate edata argument of raise_exception_if method. | [
"Validate",
"edata",
"argument",
"of",
"raise_exception_if",
"method",
"."
] | 201ac243e5781347feb75896a4231429fe6da4b1 | https://github.com/pmacosta/pexdoc/blob/201ac243e5781347feb75896a4231429fe6da4b1/pexdoc/exh.py#L1160-L1178 | train |
pmacosta/pexdoc | pexdoc/exh.py | ExHandle.add_exception | def add_exception(self, exname, extype, exmsg):
r"""
Add an exception to the handler.
:param exname: Exception name; has to be unique within the namespace,
duplicates are eliminated
:type exname: non-numeric string
:param extype: Exception type; *must* be derived from the `Exception
<https://docs.python.org/2/library/exceptions.html#
exceptions.Exception>`_ class
:type extype: Exception type object, i.e. RuntimeError, TypeError,
etc.
:param exmsg: Exception message; it can contain fields to be replaced
when the exception is raised via
:py:meth:`pexdoc.ExHandle.raise_exception_if`.
A field starts with the characters :code:`'\*['` and
ends with the characters :code:`']\*'`, the field name
follows the same rules as variable names and is between
these two sets of characters. For example,
:code:`'\*[fname]\*'` defines the fname field
:type exmsg: string
:rtype: tuple
The returned tuple has the following items:
* **callable id** (string) first returned item, identification (as
reported by the `id
<https://docs.python.org/2/library/functions.html#id>`_ built-in
function) of the callable where the exception was added
* **exception definition** (tuple), second returned item, first item
is the exception type and the second item is the exception message
* **callable name** (string), third returned item, callable full
name (encoded with the :py:meth:`ExHandle.encode_call` method
:raises:
* RuntimeError (Argument \`exmsg\` is not valid)
* RuntimeError (Argument \`exname\` is not valid)
* RuntimeError (Argument \`extype\` is not valid)
"""
if not isinstance(exname, str):
raise RuntimeError("Argument `exname` is not valid")
number = True
try:
int(exname)
except ValueError:
number = False
if number:
raise RuntimeError("Argument `exname` is not valid")
if not isinstance(exmsg, str):
raise RuntimeError("Argument `exmsg` is not valid")
msg = ""
try:
raise extype(exmsg)
except Exception as eobj:
msg = _get_ex_msg(eobj)
if msg != exmsg:
raise RuntimeError("Argument `extype` is not valid")
# A callable that defines an exception can be accessed by
# multiple functions or paths, therefore the callable
# dictionary key 'function' is a list
func_id, func_name = self._get_ex_data()
if func_id not in self._ex_dict:
self._ex_dict[func_id] = {}
key = (extype, exmsg)
exname = "{0}{1}{2}".format(func_id, self._callables_separator, exname)
entry = self._ex_dict[func_id].get(
key, {"function": [], "name": exname, "raised": []}
)
if func_name not in entry["function"]:
entry["function"].append(func_name)
entry["raised"].append(False)
self._ex_dict[func_id][key] = entry
return (func_id, key, func_name) | python | def add_exception(self, exname, extype, exmsg):
r"""
Add an exception to the handler.
:param exname: Exception name; has to be unique within the namespace,
duplicates are eliminated
:type exname: non-numeric string
:param extype: Exception type; *must* be derived from the `Exception
<https://docs.python.org/2/library/exceptions.html#
exceptions.Exception>`_ class
:type extype: Exception type object, i.e. RuntimeError, TypeError,
etc.
:param exmsg: Exception message; it can contain fields to be replaced
when the exception is raised via
:py:meth:`pexdoc.ExHandle.raise_exception_if`.
A field starts with the characters :code:`'\*['` and
ends with the characters :code:`']\*'`, the field name
follows the same rules as variable names and is between
these two sets of characters. For example,
:code:`'\*[fname]\*'` defines the fname field
:type exmsg: string
:rtype: tuple
The returned tuple has the following items:
* **callable id** (string) first returned item, identification (as
reported by the `id
<https://docs.python.org/2/library/functions.html#id>`_ built-in
function) of the callable where the exception was added
* **exception definition** (tuple), second returned item, first item
is the exception type and the second item is the exception message
* **callable name** (string), third returned item, callable full
name (encoded with the :py:meth:`ExHandle.encode_call` method
:raises:
* RuntimeError (Argument \`exmsg\` is not valid)
* RuntimeError (Argument \`exname\` is not valid)
* RuntimeError (Argument \`extype\` is not valid)
"""
if not isinstance(exname, str):
raise RuntimeError("Argument `exname` is not valid")
number = True
try:
int(exname)
except ValueError:
number = False
if number:
raise RuntimeError("Argument `exname` is not valid")
if not isinstance(exmsg, str):
raise RuntimeError("Argument `exmsg` is not valid")
msg = ""
try:
raise extype(exmsg)
except Exception as eobj:
msg = _get_ex_msg(eobj)
if msg != exmsg:
raise RuntimeError("Argument `extype` is not valid")
# A callable that defines an exception can be accessed by
# multiple functions or paths, therefore the callable
# dictionary key 'function' is a list
func_id, func_name = self._get_ex_data()
if func_id not in self._ex_dict:
self._ex_dict[func_id] = {}
key = (extype, exmsg)
exname = "{0}{1}{2}".format(func_id, self._callables_separator, exname)
entry = self._ex_dict[func_id].get(
key, {"function": [], "name": exname, "raised": []}
)
if func_name not in entry["function"]:
entry["function"].append(func_name)
entry["raised"].append(False)
self._ex_dict[func_id][key] = entry
return (func_id, key, func_name) | [
"def",
"add_exception",
"(",
"self",
",",
"exname",
",",
"extype",
",",
"exmsg",
")",
":",
"if",
"not",
"isinstance",
"(",
"exname",
",",
"str",
")",
":",
"raise",
"RuntimeError",
"(",
"\"Argument `exname` is not valid\"",
")",
"number",
"=",
"True",
"try",
":",
"int",
"(",
"exname",
")",
"except",
"ValueError",
":",
"number",
"=",
"False",
"if",
"number",
":",
"raise",
"RuntimeError",
"(",
"\"Argument `exname` is not valid\"",
")",
"if",
"not",
"isinstance",
"(",
"exmsg",
",",
"str",
")",
":",
"raise",
"RuntimeError",
"(",
"\"Argument `exmsg` is not valid\"",
")",
"msg",
"=",
"\"\"",
"try",
":",
"raise",
"extype",
"(",
"exmsg",
")",
"except",
"Exception",
"as",
"eobj",
":",
"msg",
"=",
"_get_ex_msg",
"(",
"eobj",
")",
"if",
"msg",
"!=",
"exmsg",
":",
"raise",
"RuntimeError",
"(",
"\"Argument `extype` is not valid\"",
")",
"# A callable that defines an exception can be accessed by",
"# multiple functions or paths, therefore the callable",
"# dictionary key 'function' is a list",
"func_id",
",",
"func_name",
"=",
"self",
".",
"_get_ex_data",
"(",
")",
"if",
"func_id",
"not",
"in",
"self",
".",
"_ex_dict",
":",
"self",
".",
"_ex_dict",
"[",
"func_id",
"]",
"=",
"{",
"}",
"key",
"=",
"(",
"extype",
",",
"exmsg",
")",
"exname",
"=",
"\"{0}{1}{2}\"",
".",
"format",
"(",
"func_id",
",",
"self",
".",
"_callables_separator",
",",
"exname",
")",
"entry",
"=",
"self",
".",
"_ex_dict",
"[",
"func_id",
"]",
".",
"get",
"(",
"key",
",",
"{",
"\"function\"",
":",
"[",
"]",
",",
"\"name\"",
":",
"exname",
",",
"\"raised\"",
":",
"[",
"]",
"}",
")",
"if",
"func_name",
"not",
"in",
"entry",
"[",
"\"function\"",
"]",
":",
"entry",
"[",
"\"function\"",
"]",
".",
"append",
"(",
"func_name",
")",
"entry",
"[",
"\"raised\"",
"]",
".",
"append",
"(",
"False",
")",
"self",
".",
"_ex_dict",
"[",
"func_id",
"]",
"[",
"key",
"]",
"=",
"entry",
"return",
"(",
"func_id",
",",
"key",
",",
"func_name",
")"
] | r"""
Add an exception to the handler.
:param exname: Exception name; has to be unique within the namespace,
duplicates are eliminated
:type exname: non-numeric string
:param extype: Exception type; *must* be derived from the `Exception
<https://docs.python.org/2/library/exceptions.html#
exceptions.Exception>`_ class
:type extype: Exception type object, i.e. RuntimeError, TypeError,
etc.
:param exmsg: Exception message; it can contain fields to be replaced
when the exception is raised via
:py:meth:`pexdoc.ExHandle.raise_exception_if`.
A field starts with the characters :code:`'\*['` and
ends with the characters :code:`']\*'`, the field name
follows the same rules as variable names and is between
these two sets of characters. For example,
:code:`'\*[fname]\*'` defines the fname field
:type exmsg: string
:rtype: tuple
The returned tuple has the following items:
* **callable id** (string) first returned item, identification (as
reported by the `id
<https://docs.python.org/2/library/functions.html#id>`_ built-in
function) of the callable where the exception was added
* **exception definition** (tuple), second returned item, first item
is the exception type and the second item is the exception message
* **callable name** (string), third returned item, callable full
name (encoded with the :py:meth:`ExHandle.encode_call` method
:raises:
* RuntimeError (Argument \`exmsg\` is not valid)
* RuntimeError (Argument \`exname\` is not valid)
* RuntimeError (Argument \`extype\` is not valid) | [
"r",
"Add",
"an",
"exception",
"to",
"the",
"handler",
"."
] | 201ac243e5781347feb75896a4231429fe6da4b1 | https://github.com/pmacosta/pexdoc/blob/201ac243e5781347feb75896a4231429fe6da4b1/pexdoc/exh.py#L1180-L1259 | train |
pmacosta/pexdoc | pexdoc/exh.py | ExHandle.decode_call | def decode_call(self, call):
"""
Replace callable tokens with callable names.
:param call: Encoded callable name
:type call: string
:rtype: string
"""
# Callable name is None when callable is part of exclude list
if call is None:
return None
itokens = call.split(self._callables_separator)
odict = {}
for key, value in self._clut.items():
if value in itokens:
odict[itokens[itokens.index(value)]] = key
return self._callables_separator.join([odict[itoken] for itoken in itokens]) | python | def decode_call(self, call):
"""
Replace callable tokens with callable names.
:param call: Encoded callable name
:type call: string
:rtype: string
"""
# Callable name is None when callable is part of exclude list
if call is None:
return None
itokens = call.split(self._callables_separator)
odict = {}
for key, value in self._clut.items():
if value in itokens:
odict[itokens[itokens.index(value)]] = key
return self._callables_separator.join([odict[itoken] for itoken in itokens]) | [
"def",
"decode_call",
"(",
"self",
",",
"call",
")",
":",
"# Callable name is None when callable is part of exclude list",
"if",
"call",
"is",
"None",
":",
"return",
"None",
"itokens",
"=",
"call",
".",
"split",
"(",
"self",
".",
"_callables_separator",
")",
"odict",
"=",
"{",
"}",
"for",
"key",
",",
"value",
"in",
"self",
".",
"_clut",
".",
"items",
"(",
")",
":",
"if",
"value",
"in",
"itokens",
":",
"odict",
"[",
"itokens",
"[",
"itokens",
".",
"index",
"(",
"value",
")",
"]",
"]",
"=",
"key",
"return",
"self",
".",
"_callables_separator",
".",
"join",
"(",
"[",
"odict",
"[",
"itoken",
"]",
"for",
"itoken",
"in",
"itokens",
"]",
")"
] | Replace callable tokens with callable names.
:param call: Encoded callable name
:type call: string
:rtype: string | [
"Replace",
"callable",
"tokens",
"with",
"callable",
"names",
"."
] | 201ac243e5781347feb75896a4231429fe6da4b1 | https://github.com/pmacosta/pexdoc/blob/201ac243e5781347feb75896a4231429fe6da4b1/pexdoc/exh.py#L1261-L1278 | train |
pmacosta/pexdoc | pexdoc/exh.py | ExHandle.encode_call | def encode_call(self, call):
"""
Replace callables with tokens to reduce object memory footprint.
A callable token is an integer that denotes the order in which the
callable was encountered by the encoder, i.e. the first callable
encoded is assigned token 0, the second callable encoded is assigned
token 1, etc.
:param call: Callable name
:type call: string
:rtype: string
"""
# Callable name is None when callable is part of exclude list
if call is None:
return None
itokens = call.split(self._callables_separator)
otokens = []
for itoken in itokens:
otoken = self._clut.get(itoken, None)
if not otoken:
otoken = str(len(self._clut))
self._clut[itoken] = otoken
otokens.append(otoken)
return self._callables_separator.join(otokens) | python | def encode_call(self, call):
"""
Replace callables with tokens to reduce object memory footprint.
A callable token is an integer that denotes the order in which the
callable was encountered by the encoder, i.e. the first callable
encoded is assigned token 0, the second callable encoded is assigned
token 1, etc.
:param call: Callable name
:type call: string
:rtype: string
"""
# Callable name is None when callable is part of exclude list
if call is None:
return None
itokens = call.split(self._callables_separator)
otokens = []
for itoken in itokens:
otoken = self._clut.get(itoken, None)
if not otoken:
otoken = str(len(self._clut))
self._clut[itoken] = otoken
otokens.append(otoken)
return self._callables_separator.join(otokens) | [
"def",
"encode_call",
"(",
"self",
",",
"call",
")",
":",
"# Callable name is None when callable is part of exclude list",
"if",
"call",
"is",
"None",
":",
"return",
"None",
"itokens",
"=",
"call",
".",
"split",
"(",
"self",
".",
"_callables_separator",
")",
"otokens",
"=",
"[",
"]",
"for",
"itoken",
"in",
"itokens",
":",
"otoken",
"=",
"self",
".",
"_clut",
".",
"get",
"(",
"itoken",
",",
"None",
")",
"if",
"not",
"otoken",
":",
"otoken",
"=",
"str",
"(",
"len",
"(",
"self",
".",
"_clut",
")",
")",
"self",
".",
"_clut",
"[",
"itoken",
"]",
"=",
"otoken",
"otokens",
".",
"append",
"(",
"otoken",
")",
"return",
"self",
".",
"_callables_separator",
".",
"join",
"(",
"otokens",
")"
] | Replace callables with tokens to reduce object memory footprint.
A callable token is an integer that denotes the order in which the
callable was encountered by the encoder, i.e. the first callable
encoded is assigned token 0, the second callable encoded is assigned
token 1, etc.
:param call: Callable name
:type call: string
:rtype: string | [
"Replace",
"callables",
"with",
"tokens",
"to",
"reduce",
"object",
"memory",
"footprint",
"."
] | 201ac243e5781347feb75896a4231429fe6da4b1 | https://github.com/pmacosta/pexdoc/blob/201ac243e5781347feb75896a4231429fe6da4b1/pexdoc/exh.py#L1280-L1305 | train |
Loudr/pale | pale/endpoint.py | PaleDefaultJSONEncoder.default | def default(self, obj):
"""Default JSON encoding."""
try:
if isinstance(obj, datetime.datetime):
# do the datetime thing, or
encoded = arrow.get(obj).isoformat()
else:
# try the normal encoder
encoded = json.JSONEncoder.default(self, obj)
except TypeError as e:
# if that fails, check for the to_dict method,
if hasattr(obj, 'to_dict') and callable(obj.to_dict):
# and use it!
encoded = obj.to_dict()
else:
raise e
return encoded | python | def default(self, obj):
"""Default JSON encoding."""
try:
if isinstance(obj, datetime.datetime):
# do the datetime thing, or
encoded = arrow.get(obj).isoformat()
else:
# try the normal encoder
encoded = json.JSONEncoder.default(self, obj)
except TypeError as e:
# if that fails, check for the to_dict method,
if hasattr(obj, 'to_dict') and callable(obj.to_dict):
# and use it!
encoded = obj.to_dict()
else:
raise e
return encoded | [
"def",
"default",
"(",
"self",
",",
"obj",
")",
":",
"try",
":",
"if",
"isinstance",
"(",
"obj",
",",
"datetime",
".",
"datetime",
")",
":",
"# do the datetime thing, or",
"encoded",
"=",
"arrow",
".",
"get",
"(",
"obj",
")",
".",
"isoformat",
"(",
")",
"else",
":",
"# try the normal encoder",
"encoded",
"=",
"json",
".",
"JSONEncoder",
".",
"default",
"(",
"self",
",",
"obj",
")",
"except",
"TypeError",
"as",
"e",
":",
"# if that fails, check for the to_dict method,",
"if",
"hasattr",
"(",
"obj",
",",
"'to_dict'",
")",
"and",
"callable",
"(",
"obj",
".",
"to_dict",
")",
":",
"# and use it!",
"encoded",
"=",
"obj",
".",
"to_dict",
"(",
")",
"else",
":",
"raise",
"e",
"return",
"encoded"
] | Default JSON encoding. | [
"Default",
"JSON",
"encoding",
"."
] | dc002ee6032c856551143af222ff8f71ed9853fe | https://github.com/Loudr/pale/blob/dc002ee6032c856551143af222ff8f71ed9853fe/pale/endpoint.py#L36-L52 | train |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.