repository_name
stringlengths
5
67
func_path_in_repository
stringlengths
4
234
func_name
stringlengths
0
314
whole_func_string
stringlengths
52
3.87M
language
stringclasses
6 values
func_code_string
stringlengths
52
3.87M
func_documentation_string
stringlengths
1
47.2k
func_code_url
stringlengths
85
339
luckydonald/pytgbot
pytgbot/api_types/receivable/inline.py
InlineQuery.to_array
def to_array(self): """ Serializes this InlineQuery to a dictionary. :return: dictionary representation of this object. :rtype: dict """ array = super(InlineQuery, self).to_array() array['id'] = u(self.id) # py2: type unicode, py3: type str array['from'] = self.from_peer.to_array() # type User array['query'] = u(self.query) # py2: type unicode, py3: type str array['offset'] = u(self.offset) # py2: type unicode, py3: type str if self.location is not None: array['location'] = self.location.to_array() # type Location return array
python
def to_array(self): """ Serializes this InlineQuery to a dictionary. :return: dictionary representation of this object. :rtype: dict """ array = super(InlineQuery, self).to_array() array['id'] = u(self.id) # py2: type unicode, py3: type str array['from'] = self.from_peer.to_array() # type User array['query'] = u(self.query) # py2: type unicode, py3: type str array['offset'] = u(self.offset) # py2: type unicode, py3: type str if self.location is not None: array['location'] = self.location.to_array() # type Location return array
Serializes this InlineQuery to a dictionary. :return: dictionary representation of this object. :rtype: dict
https://github.com/luckydonald/pytgbot/blob/67f4b5a1510d4583d40b5477e876b1ef0eb8971b/pytgbot/api_types/receivable/inline.py#L95-L109
luckydonald/pytgbot
pytgbot/api_types/receivable/inline.py
InlineQuery.from_array
def from_array(array): """ Deserialize a new InlineQuery from a given dictionary. :return: new InlineQuery instance. :rtype: InlineQuery """ if array is None or not array: return None # end if assert_type_or_raise(array, dict, parameter_name="array") from pytgbot.api_types.receivable.media import Location from pytgbot.api_types.receivable.peer import User data = {} data['id'] = u(array.get('id')) data['from_peer'] = User.from_array(array.get('from')) data['query'] = u(array.get('query')) data['offset'] = u(array.get('offset')) data['location'] = Location.from_array(array.get('location')) if array.get('location') is not None else None data['_raw'] = array return InlineQuery(**data)
python
def from_array(array): """ Deserialize a new InlineQuery from a given dictionary. :return: new InlineQuery instance. :rtype: InlineQuery """ if array is None or not array: return None # end if assert_type_or_raise(array, dict, parameter_name="array") from pytgbot.api_types.receivable.media import Location from pytgbot.api_types.receivable.peer import User data = {} data['id'] = u(array.get('id')) data['from_peer'] = User.from_array(array.get('from')) data['query'] = u(array.get('query')) data['offset'] = u(array.get('offset')) data['location'] = Location.from_array(array.get('location')) if array.get('location') is not None else None data['_raw'] = array return InlineQuery(**data)
Deserialize a new InlineQuery from a given dictionary. :return: new InlineQuery instance. :rtype: InlineQuery
https://github.com/luckydonald/pytgbot/blob/67f4b5a1510d4583d40b5477e876b1ef0eb8971b/pytgbot/api_types/receivable/inline.py#L113-L134
luckydonald/pytgbot
pytgbot/api_types/receivable/inline.py
ChosenInlineResult.to_array
def to_array(self): """ Serializes this ChosenInlineResult to a dictionary. :return: dictionary representation of this object. :rtype: dict """ array = super(ChosenInlineResult, self).to_array() array['result_id'] = u(self.result_id) # py2: type unicode, py3: type str array['from'] = self.from_peer.to_array() # type User array['query'] = u(self.query) # py2: type unicode, py3: type str if self.location is not None: array['location'] = self.location.to_array() # type Location if self.inline_message_id is not None: array['inline_message_id'] = u(self.inline_message_id) # py2: type unicode, py3: type str return array
python
def to_array(self): """ Serializes this ChosenInlineResult to a dictionary. :return: dictionary representation of this object. :rtype: dict """ array = super(ChosenInlineResult, self).to_array() array['result_id'] = u(self.result_id) # py2: type unicode, py3: type str array['from'] = self.from_peer.to_array() # type User array['query'] = u(self.query) # py2: type unicode, py3: type str if self.location is not None: array['location'] = self.location.to_array() # type Location if self.inline_message_id is not None: array['inline_message_id'] = u(self.inline_message_id) # py2: type unicode, py3: type str return array
Serializes this ChosenInlineResult to a dictionary. :return: dictionary representation of this object. :rtype: dict
https://github.com/luckydonald/pytgbot/blob/67f4b5a1510d4583d40b5477e876b1ef0eb8971b/pytgbot/api_types/receivable/inline.py#L248-L263
luckydonald/pytgbot
pytgbot/api_types/receivable/inline.py
ChosenInlineResult.from_array
def from_array(array): """ Deserialize a new ChosenInlineResult from a given dictionary. :return: new ChosenInlineResult instance. :rtype: ChosenInlineResult """ if array is None or not array: return None # end if assert_type_or_raise(array, dict, parameter_name="array") from ..receivable.media import Location from ..receivable.peer import User data = {} data['result_id'] = u(array.get('result_id')) data['from_peer'] = User.from_array(array.get('from')) data['query'] = u(array.get('query')) data['location'] = Location.from_array(array.get('location')) if array.get('location') is not None else None data['inline_message_id'] = u(array.get('inline_message_id')) if array.get('inline_message_id') is not None else None data['_raw'] = array return ChosenInlineResult(**data)
python
def from_array(array): """ Deserialize a new ChosenInlineResult from a given dictionary. :return: new ChosenInlineResult instance. :rtype: ChosenInlineResult """ if array is None or not array: return None # end if assert_type_or_raise(array, dict, parameter_name="array") from ..receivable.media import Location from ..receivable.peer import User data = {} data['result_id'] = u(array.get('result_id')) data['from_peer'] = User.from_array(array.get('from')) data['query'] = u(array.get('query')) data['location'] = Location.from_array(array.get('location')) if array.get('location') is not None else None data['inline_message_id'] = u(array.get('inline_message_id')) if array.get('inline_message_id') is not None else None data['_raw'] = array return ChosenInlineResult(**data)
Deserialize a new ChosenInlineResult from a given dictionary. :return: new ChosenInlineResult instance. :rtype: ChosenInlineResult
https://github.com/luckydonald/pytgbot/blob/67f4b5a1510d4583d40b5477e876b1ef0eb8971b/pytgbot/api_types/receivable/inline.py#L267-L289
luckydonald/pytgbot
code_generation/output/pytgbot/api_types/receivable/updates.py
Update.from_array
def from_array(array): """ Deserialize a new Update from a given dictionary. :return: new Update instance. :rtype: Update """ if array is None or not array: return None # end if assert_type_or_raise(array, dict, parameter_name="array") from pytgbot.api_types.receivable.inline import ChosenInlineResult from pytgbot.api_types.receivable.inline import InlineQuery from pytgbot.api_types.receivable.payments import PreCheckoutQuery from pytgbot.api_types.receivable.payments import ShippingQuery from pytgbot.api_types.receivable.updates import CallbackQuery from pytgbot.api_types.receivable.updates import Message data = {} data['update_id'] = int(array.get('update_id')) data['message'] = Message.from_array(array.get('message')) if array.get('message') is not None else None data['edited_message'] = Message.from_array(array.get('edited_message')) if array.get('edited_message') is not None else None data['channel_post'] = Message.from_array(array.get('channel_post')) if array.get('channel_post') is not None else None data['edited_channel_post'] = Message.from_array(array.get('edited_channel_post')) if array.get('edited_channel_post') is not None else None data['inline_query'] = InlineQuery.from_array(array.get('inline_query')) if array.get('inline_query') is not None else None data['chosen_inline_result'] = ChosenInlineResult.from_array(array.get('chosen_inline_result')) if array.get('chosen_inline_result') is not None else None data['callback_query'] = CallbackQuery.from_array(array.get('callback_query')) if array.get('callback_query') is not None else None data['shipping_query'] = ShippingQuery.from_array(array.get('shipping_query')) if array.get('shipping_query') is not None else None data['pre_checkout_query'] = PreCheckoutQuery.from_array(array.get('pre_checkout_query')) if array.get('pre_checkout_query') is not None else None data['_raw'] = array return Update(**data)
python
def from_array(array): """ Deserialize a new Update from a given dictionary. :return: new Update instance. :rtype: Update """ if array is None or not array: return None # end if assert_type_or_raise(array, dict, parameter_name="array") from pytgbot.api_types.receivable.inline import ChosenInlineResult from pytgbot.api_types.receivable.inline import InlineQuery from pytgbot.api_types.receivable.payments import PreCheckoutQuery from pytgbot.api_types.receivable.payments import ShippingQuery from pytgbot.api_types.receivable.updates import CallbackQuery from pytgbot.api_types.receivable.updates import Message data = {} data['update_id'] = int(array.get('update_id')) data['message'] = Message.from_array(array.get('message')) if array.get('message') is not None else None data['edited_message'] = Message.from_array(array.get('edited_message')) if array.get('edited_message') is not None else None data['channel_post'] = Message.from_array(array.get('channel_post')) if array.get('channel_post') is not None else None data['edited_channel_post'] = Message.from_array(array.get('edited_channel_post')) if array.get('edited_channel_post') is not None else None data['inline_query'] = InlineQuery.from_array(array.get('inline_query')) if array.get('inline_query') is not None else None data['chosen_inline_result'] = ChosenInlineResult.from_array(array.get('chosen_inline_result')) if array.get('chosen_inline_result') is not None else None data['callback_query'] = CallbackQuery.from_array(array.get('callback_query')) if array.get('callback_query') is not None else None data['shipping_query'] = ShippingQuery.from_array(array.get('shipping_query')) if array.get('shipping_query') is not None else None data['pre_checkout_query'] = PreCheckoutQuery.from_array(array.get('pre_checkout_query')) if array.get('pre_checkout_query') is not None else None data['_raw'] = array return Update(**data)
Deserialize a new Update from a given dictionary. :return: new Update instance. :rtype: Update
https://github.com/luckydonald/pytgbot/blob/67f4b5a1510d4583d40b5477e876b1ef0eb8971b/code_generation/output/pytgbot/api_types/receivable/updates.py#L181-L212
delph-in/pydelphin
delphin/tokens.py
YyToken.from_dict
def from_dict(cls, d): """ Decode from a dictionary as from :meth:`to_dict`. """ return cls( d['id'], d['start'], d['end'], Lnk.charspan(d['from'], d['to']) if 'from' in d else None, # d.get('paths', [1]), form=d['form'], surface=d.get('surface'), # ipos= # lrules= pos=zip(d.get('tags', []), d.get('probabilities', [])) )
python
def from_dict(cls, d): """ Decode from a dictionary as from :meth:`to_dict`. """ return cls( d['id'], d['start'], d['end'], Lnk.charspan(d['from'], d['to']) if 'from' in d else None, # d.get('paths', [1]), form=d['form'], surface=d.get('surface'), # ipos= # lrules= pos=zip(d.get('tags', []), d.get('probabilities', [])) )
Decode from a dictionary as from :meth:`to_dict`.
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/tokens.py#L68-L83
delph-in/pydelphin
delphin/tokens.py
YyToken.to_dict
def to_dict(self): """ Encode the token as a dictionary suitable for JSON serialization. """ d = { 'id': self.id, 'start': self.start, 'end': self.end, 'form': self.form } if self.lnk is not None: cfrom, cto = self.lnk.data d['from'] = cfrom d['to'] = cto # d['paths'] = self.paths if self.surface is not None: d['surface'] = self.surface # d['ipos'] = self.ipos # d['lrules'] = self.lrules if self.pos: d['tags'] = [ps[0] for ps in self.pos] d['probabilities'] = [ps[1] for ps in self.pos] return d
python
def to_dict(self): """ Encode the token as a dictionary suitable for JSON serialization. """ d = { 'id': self.id, 'start': self.start, 'end': self.end, 'form': self.form } if self.lnk is not None: cfrom, cto = self.lnk.data d['from'] = cfrom d['to'] = cto # d['paths'] = self.paths if self.surface is not None: d['surface'] = self.surface # d['ipos'] = self.ipos # d['lrules'] = self.lrules if self.pos: d['tags'] = [ps[0] for ps in self.pos] d['probabilities'] = [ps[1] for ps in self.pos] return d
Encode the token as a dictionary suitable for JSON serialization.
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/tokens.py#L85-L107
delph-in/pydelphin
delphin/tokens.py
YyTokenLattice.from_string
def from_string(cls, s): """ Decode from the YY token lattice format. """ def _qstrip(s): return s[1:-1] # remove assumed quote characters tokens = [] for match in _yy_re.finditer(s): d = match.groupdict() lnk, pos = None, [] if d['lnkfrom'] is not None: lnk = Lnk.charspan(d['lnkfrom'], d['lnkto']) if d['pos'] is not None: ps = d['pos'].strip().split() pos = list(zip(map(_qstrip, ps[::2]), map(float, ps[1::2]))) tokens.append( YyToken( int(d['id']), int(d['start']), int(d['end']), lnk, list(map(int, d['paths'].strip().split())), _qstrip(d['form']), None if d['surface'] is None else _qstrip(d['surface']), int(d['ipos']), list(map(_qstrip, d['lrules'].strip().split())), pos ) ) return cls(tokens)
python
def from_string(cls, s): """ Decode from the YY token lattice format. """ def _qstrip(s): return s[1:-1] # remove assumed quote characters tokens = [] for match in _yy_re.finditer(s): d = match.groupdict() lnk, pos = None, [] if d['lnkfrom'] is not None: lnk = Lnk.charspan(d['lnkfrom'], d['lnkto']) if d['pos'] is not None: ps = d['pos'].strip().split() pos = list(zip(map(_qstrip, ps[::2]), map(float, ps[1::2]))) tokens.append( YyToken( int(d['id']), int(d['start']), int(d['end']), lnk, list(map(int, d['paths'].strip().split())), _qstrip(d['form']), None if d['surface'] is None else _qstrip(d['surface']), int(d['ipos']), list(map(_qstrip, d['lrules'].strip().split())), pos ) ) return cls(tokens)
Decode from the YY token lattice format.
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/tokens.py#L145-L174
delph-in/pydelphin
delphin/mrs/simplemrs.py
load
def load(fh, single=False, version=_default_version, strict=False, errors='warn'): """ Deserialize SimpleMRSs from a file (handle or filename) Args: fh (str, file): input filename or file object single: if `True`, only return the first read Xmrs object strict: deprecated; a `True` value is the same as `errors='strict'`, and a `False` value is the same as `errors='warn'` errors: if `'strict'`, ill-formed MRSs raise an error; if `'warn'`, raise a warning instead; if `'ignore'`, do not warn or raise errors for ill-formed MRSs Returns: a generator of Xmrs objects (unless the *single* option is `True`) """ if isinstance(fh, stringtypes): s = open(fh, 'r').read() else: s = fh.read() return loads(s, single=single, version=version, strict=strict, errors=errors)
python
def load(fh, single=False, version=_default_version, strict=False, errors='warn'): """ Deserialize SimpleMRSs from a file (handle or filename) Args: fh (str, file): input filename or file object single: if `True`, only return the first read Xmrs object strict: deprecated; a `True` value is the same as `errors='strict'`, and a `False` value is the same as `errors='warn'` errors: if `'strict'`, ill-formed MRSs raise an error; if `'warn'`, raise a warning instead; if `'ignore'`, do not warn or raise errors for ill-formed MRSs Returns: a generator of Xmrs objects (unless the *single* option is `True`) """ if isinstance(fh, stringtypes): s = open(fh, 'r').read() else: s = fh.read() return loads(s, single=single, version=version, strict=strict, errors=errors)
Deserialize SimpleMRSs from a file (handle or filename) Args: fh (str, file): input filename or file object single: if `True`, only return the first read Xmrs object strict: deprecated; a `True` value is the same as `errors='strict'`, and a `False` value is the same as `errors='warn'` errors: if `'strict'`, ill-formed MRSs raise an error; if `'warn'`, raise a warning instead; if `'ignore'`, do not warn or raise errors for ill-formed MRSs Returns: a generator of Xmrs objects (unless the *single* option is `True`)
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/mrs/simplemrs.py#L58-L81
delph-in/pydelphin
delphin/mrs/simplemrs.py
loads
def loads(s, single=False, version=_default_version, strict=False, errors='warn'): """ Deserialize SimpleMRS string representations Args: s (str): a SimpleMRS string single (bool): if `True`, only return the first Xmrs object Returns: a generator of Xmrs objects (unless *single* is `True`) """ ms = deserialize(s, version=version, strict=strict, errors=errors) if single: return next(ms) else: return ms
python
def loads(s, single=False, version=_default_version, strict=False, errors='warn'): """ Deserialize SimpleMRS string representations Args: s (str): a SimpleMRS string single (bool): if `True`, only return the first Xmrs object Returns: a generator of Xmrs objects (unless *single* is `True`) """ ms = deserialize(s, version=version, strict=strict, errors=errors) if single: return next(ms) else: return ms
Deserialize SimpleMRS string representations Args: s (str): a SimpleMRS string single (bool): if `True`, only return the first Xmrs object Returns: a generator of Xmrs objects (unless *single* is `True`)
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/mrs/simplemrs.py#L84-L99
delph-in/pydelphin
delphin/mrs/simplemrs.py
dumps
def dumps(ms, single=False, version=_default_version, properties=True, pretty_print=False, color=False, **kwargs): """ Serialize an Xmrs object to a SimpleMRS representation Args: ms: an iterator of Xmrs objects to serialize (unless the *single* option is `True`) single: if `True`, treat *ms* as a single Xmrs object instead of as an iterator properties: if `False`, suppress variable properties pretty_print: if `True`, add newlines and indentation color: if `True`, colorize the output with ANSI color codes Returns: a SimpleMrs string representation of a corpus of Xmrs """ if not pretty_print and kwargs.get('indent'): pretty_print = True if single: ms = [ms] return serialize(ms, version=version, properties=properties, pretty_print=pretty_print, color=color)
python
def dumps(ms, single=False, version=_default_version, properties=True, pretty_print=False, color=False, **kwargs): """ Serialize an Xmrs object to a SimpleMRS representation Args: ms: an iterator of Xmrs objects to serialize (unless the *single* option is `True`) single: if `True`, treat *ms* as a single Xmrs object instead of as an iterator properties: if `False`, suppress variable properties pretty_print: if `True`, add newlines and indentation color: if `True`, colorize the output with ANSI color codes Returns: a SimpleMrs string representation of a corpus of Xmrs """ if not pretty_print and kwargs.get('indent'): pretty_print = True if single: ms = [ms] return serialize(ms, version=version, properties=properties, pretty_print=pretty_print, color=color)
Serialize an Xmrs object to a SimpleMRS representation Args: ms: an iterator of Xmrs objects to serialize (unless the *single* option is `True`) single: if `True`, treat *ms* as a single Xmrs object instead of as an iterator properties: if `False`, suppress variable properties pretty_print: if `True`, add newlines and indentation color: if `True`, colorize the output with ANSI color codes Returns: a SimpleMrs string representation of a corpus of Xmrs
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/mrs/simplemrs.py#L132-L153
delph-in/pydelphin
delphin/mrs/simplemrs.py
_read_lnk
def _read_lnk(tokens): """Read and return a tuple of the pred's lnk type and lnk value, if a pred lnk is specified.""" # < FROM : TO > or < FROM # TO > or < TOK... > or < @ EDGE > lnk = None if tokens[0] == '<': tokens.popleft() # we just checked this is a left angle if tokens[0] == '>': pass # empty <> brackets the same as no lnk specified # edge lnk: ['@', EDGE, ...] elif tokens[0] == '@': tokens.popleft() # remove the @ lnk = Lnk.edge(tokens.popleft()) # edge lnks only have one number # character span lnk: [FROM, ':', TO, ...] elif tokens[1] == ':': lnk = Lnk.charspan(tokens.popleft(), tokens[1]) tokens.popleft() # this should be the colon tokens.popleft() # and this is the cto # chart vertex range lnk: [FROM, '#', TO, ...] elif tokens[1] == '#': lnk = Lnk.chartspan(tokens.popleft(), tokens[1]) tokens.popleft() # this should be the hash tokens.popleft() # and this is the to vertex # tokens lnk: [(TOK,)+ ...] else: lnkdata = [] while tokens[0] != '>': lnkdata.append(int(tokens.popleft())) lnk = Lnk.tokens(lnkdata) _read_literals(tokens, '>') return lnk
python
def _read_lnk(tokens): """Read and return a tuple of the pred's lnk type and lnk value, if a pred lnk is specified.""" # < FROM : TO > or < FROM # TO > or < TOK... > or < @ EDGE > lnk = None if tokens[0] == '<': tokens.popleft() # we just checked this is a left angle if tokens[0] == '>': pass # empty <> brackets the same as no lnk specified # edge lnk: ['@', EDGE, ...] elif tokens[0] == '@': tokens.popleft() # remove the @ lnk = Lnk.edge(tokens.popleft()) # edge lnks only have one number # character span lnk: [FROM, ':', TO, ...] elif tokens[1] == ':': lnk = Lnk.charspan(tokens.popleft(), tokens[1]) tokens.popleft() # this should be the colon tokens.popleft() # and this is the cto # chart vertex range lnk: [FROM, '#', TO, ...] elif tokens[1] == '#': lnk = Lnk.chartspan(tokens.popleft(), tokens[1]) tokens.popleft() # this should be the hash tokens.popleft() # and this is the to vertex # tokens lnk: [(TOK,)+ ...] else: lnkdata = [] while tokens[0] != '>': lnkdata.append(int(tokens.popleft())) lnk = Lnk.tokens(lnkdata) _read_literals(tokens, '>') return lnk
Read and return a tuple of the pred's lnk type and lnk value, if a pred lnk is specified.
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/mrs/simplemrs.py#L338-L368
delph-in/pydelphin
delphin/mrs/simplemrs.py
serialize
def serialize(ms, version=_default_version, properties=True, pretty_print=False, color=False): """Serialize an MRS structure into a SimpleMRS string.""" delim = '\n' if pretty_print else _default_mrs_delim output = delim.join( _serialize_mrs(m, properties=properties, version=version, pretty_print=pretty_print) for m in ms ) if color: output = highlight(output) return output
python
def serialize(ms, version=_default_version, properties=True, pretty_print=False, color=False): """Serialize an MRS structure into a SimpleMRS string.""" delim = '\n' if pretty_print else _default_mrs_delim output = delim.join( _serialize_mrs(m, properties=properties, version=version, pretty_print=pretty_print) for m in ms ) if color: output = highlight(output) return output
Serialize an MRS structure into a SimpleMRS string.
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/mrs/simplemrs.py#L379-L390
delph-in/pydelphin
delphin/mrs/simplemrs.py
_serialize_argument
def _serialize_argument(rargname, value, varprops): """Serialize an MRS argument into the SimpleMRS format.""" _argument = '{rargname}: {value}{props}' if rargname == CONSTARG_ROLE: value = '"{}"'.format(value) props = '' if value in varprops: props = ' [ {} ]'.format( ' '.join( [var_sort(value)] + list(map('{0[0]}: {0[1]}'.format, [(k.upper(), v) for k, v in varprops[value]])) ) ) del varprops[value] # only print props once return _argument.format( rargname=rargname, value=str(value), props=props )
python
def _serialize_argument(rargname, value, varprops): """Serialize an MRS argument into the SimpleMRS format.""" _argument = '{rargname}: {value}{props}' if rargname == CONSTARG_ROLE: value = '"{}"'.format(value) props = '' if value in varprops: props = ' [ {} ]'.format( ' '.join( [var_sort(value)] + list(map('{0[0]}: {0[1]}'.format, [(k.upper(), v) for k, v in varprops[value]])) ) ) del varprops[value] # only print props once return _argument.format( rargname=rargname, value=str(value), props=props )
Serialize an MRS argument into the SimpleMRS format.
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/mrs/simplemrs.py#L430-L449
delph-in/pydelphin
delphin/mrs/simplemrs.py
_serialize_ep
def _serialize_ep(ep, varprops, version=_default_version): """Serialize an Elementary Predication into the SimpleMRS encoding.""" # ('nodeid', 'pred', 'label', 'args', 'lnk', 'surface', 'base') args = ep[3] arglist = ' '.join([_serialize_argument(rarg, args[rarg], varprops) for rarg in sorted(args, key=rargname_sortkey)]) if version < 1.1 or len(ep) < 6 or ep[5] is None: surface = '' else: surface = ' "%s"' % ep[5] lnk = None if len(ep) < 5 else ep[4] pred = ep[1] predstr = pred.string return '[ {pred}{lnk}{surface} LBL: {label}{s}{args} ]'.format( pred=predstr, lnk=_serialize_lnk(lnk), surface=surface, label=str(ep[2]), s=' ' if arglist else '', args=arglist )
python
def _serialize_ep(ep, varprops, version=_default_version): """Serialize an Elementary Predication into the SimpleMRS encoding.""" # ('nodeid', 'pred', 'label', 'args', 'lnk', 'surface', 'base') args = ep[3] arglist = ' '.join([_serialize_argument(rarg, args[rarg], varprops) for rarg in sorted(args, key=rargname_sortkey)]) if version < 1.1 or len(ep) < 6 or ep[5] is None: surface = '' else: surface = ' "%s"' % ep[5] lnk = None if len(ep) < 5 else ep[4] pred = ep[1] predstr = pred.string return '[ {pred}{lnk}{surface} LBL: {label}{s}{args} ]'.format( pred=predstr, lnk=_serialize_lnk(lnk), surface=surface, label=str(ep[2]), s=' ' if arglist else '', args=arglist )
Serialize an Elementary Predication into the SimpleMRS encoding.
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/mrs/simplemrs.py#L452-L472
delph-in/pydelphin
delphin/mrs/simplemrs.py
_serialize_lnk
def _serialize_lnk(lnk): """Serialize a predication lnk to surface form into the SimpleMRS encoding.""" s = "" if lnk is not None: s = '<' if lnk.type == Lnk.CHARSPAN: cfrom, cto = lnk.data s += ''.join([str(cfrom), ':', str(cto)]) elif lnk.type == Lnk.CHARTSPAN: cfrom, cto = lnk.data s += ''.join([str(cfrom), '#', str(cto)]) elif lnk.type == Lnk.TOKENS: s += ' '.join([str(t) for t in lnk.data]) elif lnk.type == Lnk.EDGE: s += ''.join(['@', str(lnk.data)]) s += '>' return s
python
def _serialize_lnk(lnk): """Serialize a predication lnk to surface form into the SimpleMRS encoding.""" s = "" if lnk is not None: s = '<' if lnk.type == Lnk.CHARSPAN: cfrom, cto = lnk.data s += ''.join([str(cfrom), ':', str(cto)]) elif lnk.type == Lnk.CHARTSPAN: cfrom, cto = lnk.data s += ''.join([str(cfrom), '#', str(cto)]) elif lnk.type == Lnk.TOKENS: s += ' '.join([str(t) for t in lnk.data]) elif lnk.type == Lnk.EDGE: s += ''.join(['@', str(lnk.data)]) s += '>' return s
Serialize a predication lnk to surface form into the SimpleMRS encoding.
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/mrs/simplemrs.py#L475-L492
delph-in/pydelphin
delphin/mrs/simplemrs.py
_serialize_hcons
def _serialize_hcons(hcons): """Serialize [HandleConstraints] into the SimpleMRS encoding.""" toks = ['HCONS:', '<'] for hc in hcons: toks.extend(hc) # reln = hcon[1] # toks += [hcon[0], rel, str(hcon.lo)] toks += ['>'] return ' '.join(toks)
python
def _serialize_hcons(hcons): """Serialize [HandleConstraints] into the SimpleMRS encoding.""" toks = ['HCONS:', '<'] for hc in hcons: toks.extend(hc) # reln = hcon[1] # toks += [hcon[0], rel, str(hcon.lo)] toks += ['>'] return ' '.join(toks)
Serialize [HandleConstraints] into the SimpleMRS encoding.
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/mrs/simplemrs.py#L495-L503
delph-in/pydelphin
delphin/mrs/simplemrs.py
_serialize_icons
def _serialize_icons(icons): """Serialize [IndividualConstraints] into the SimpleMRS encoding.""" toks = ['ICONS:', '<'] for ic in icons: toks.extend(ic) # toks += [str(icon.left), # icon.relation, # str(icon.right)] toks += ['>'] return ' '.join(toks)
python
def _serialize_icons(icons): """Serialize [IndividualConstraints] into the SimpleMRS encoding.""" toks = ['ICONS:', '<'] for ic in icons: toks.extend(ic) # toks += [str(icon.left), # icon.relation, # str(icon.right)] toks += ['>'] return ' '.join(toks)
Serialize [IndividualConstraints] into the SimpleMRS encoding.
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/mrs/simplemrs.py#L506-L515
delph-in/pydelphin
delphin/derivation.py
_UdfNodeBase.to_dict
def to_dict(self, fields=_all_fields, labels=None): """ Encode the node as a dictionary suitable for JSON serialization. Args: fields: if given, this is a whitelist of fields to include on nodes (`daughters` and `form` are always shown) labels: optional label annotations to embed in the derivation dict; the value is a list of lists matching the structure of the derivation (e.g., `["S" ["NP" ["NNS" ["Dogs"]]] ["VP" ["VBZ" ["bark"]]]]`) Returns: dict: the dictionary representation of the structure """ fields = set(fields) diff = fields.difference(_all_fields) if isinstance(labels, Sequence): labels = _map_labels(self, labels) elif labels is None: labels = {} if diff: raise ValueError( 'Invalid field(s): {}'.format(', '.join(diff)) ) return _to_dict(self, fields, labels)
python
def to_dict(self, fields=_all_fields, labels=None): """ Encode the node as a dictionary suitable for JSON serialization. Args: fields: if given, this is a whitelist of fields to include on nodes (`daughters` and `form` are always shown) labels: optional label annotations to embed in the derivation dict; the value is a list of lists matching the structure of the derivation (e.g., `["S" ["NP" ["NNS" ["Dogs"]]] ["VP" ["VBZ" ["bark"]]]]`) Returns: dict: the dictionary representation of the structure """ fields = set(fields) diff = fields.difference(_all_fields) if isinstance(labels, Sequence): labels = _map_labels(self, labels) elif labels is None: labels = {} if diff: raise ValueError( 'Invalid field(s): {}'.format(', '.join(diff)) ) return _to_dict(self, fields, labels)
Encode the node as a dictionary suitable for JSON serialization. Args: fields: if given, this is a whitelist of fields to include on nodes (`daughters` and `form` are always shown) labels: optional label annotations to embed in the derivation dict; the value is a list of lists matching the structure of the derivation (e.g., `["S" ["NP" ["NNS" ["Dogs"]]] ["VP" ["VBZ" ["bark"]]]]`) Returns: dict: the dictionary representation of the structure
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/derivation.py#L138-L162
delph-in/pydelphin
delphin/derivation.py
UdfNode.is_head
def is_head(self): """ Return `True` if the node is a head. A node is a head if it is marked as a head in the UDX format or it has no siblings. `False` is returned if the node is known to not be a head (has a sibling that is a head). Otherwise it is indeterminate whether the node is a head, and `None` is returned. """ if (self._head or self.is_root() or len(getattr(self._parent, 'daughters', [None])) == 1): return True elif any(dtr._head for dtr in self._parent.daughters): return False return None
python
def is_head(self): """ Return `True` if the node is a head. A node is a head if it is marked as a head in the UDX format or it has no siblings. `False` is returned if the node is known to not be a head (has a sibling that is a head). Otherwise it is indeterminate whether the node is a head, and `None` is returned. """ if (self._head or self.is_root() or len(getattr(self._parent, 'daughters', [None])) == 1): return True elif any(dtr._head for dtr in self._parent.daughters): return False return None
Return `True` if the node is a head. A node is a head if it is marked as a head in the UDX format or it has no siblings. `False` is returned if the node is known to not be a head (has a sibling that is a head). Otherwise it is indeterminate whether the node is a head, and `None` is returned.
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/derivation.py#L420-L436
delph-in/pydelphin
delphin/derivation.py
UdfNode.preterminals
def preterminals(self): """ Return the list of preterminals (i.e. lexical grammar-entities). """ nodes = [] for dtr in self.daughters: if isinstance(dtr, UdfTerminal): nodes.append(self) else: nodes.extend(dtr.preterminals()) return nodes
python
def preterminals(self): """ Return the list of preterminals (i.e. lexical grammar-entities). """ nodes = [] for dtr in self.daughters: if isinstance(dtr, UdfTerminal): nodes.append(self) else: nodes.extend(dtr.preterminals()) return nodes
Return the list of preterminals (i.e. lexical grammar-entities).
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/derivation.py#L469-L479
delph-in/pydelphin
delphin/derivation.py
Derivation.from_string
def from_string(cls, s): """ Instantiate a `Derivation` from a UDF or UDX string representation. The UDF/UDX representations are as output by a processor like the `LKB <http://moin.delph-in.net/LkbTop>`_ or `ACE <http://sweaglesw.org/linguistics/ace/>`_, or from the :meth:`UdfNode.to_udf` or :meth:`UdfNode.to_udx` methods. Args: s (str): UDF or UDX serialization """ if not (s.startswith('(') and s.endswith(')')): raise ValueError( 'Derivations must begin and end with parentheses: ( )' ) s_ = s[1:] # get rid of initial open-parenthesis stack = [] deriv = None try: matches = cls.udf_re.finditer(s_) for match in matches: if match.group('done'): node = stack.pop() if len(stack) == 0: deriv = node break else: stack[-1].daughters.append(node) elif match.group('form'): if len(stack) == 0: raise ValueError('Possible leaf node with no parent.') gd = match.groupdict() # ignore LKB-style start/end data if it exists on gd term = UdfTerminal( _unquote(gd['form']), tokens=_udf_tokens(gd.get('tokens')), parent=stack[-1] if stack else None ) stack[-1].daughters.append(term) elif match.group('id'): gd = match.groupdict() head = None entity, _, type = gd['entity'].partition('@') if entity[0] == '^': entity = entity[1:] head = True if type == '': type = None udf = UdfNode(gd['id'], entity, gd['score'], gd['start'], gd['end'], head=head, type=type, parent=stack[-1] if stack else None) stack.append(udf) elif match.group('root'): udf = UdfNode(None, match.group('root')) stack.append(udf) except (ValueError, AttributeError): raise ValueError('Invalid derivation: %s' % s) if stack or deriv is None: raise ValueError('Invalid derivation; possibly unbalanced ' 'parentheses: %s' % s) return cls(*deriv, head=deriv._head, type=deriv.type)
python
def from_string(cls, s): """ Instantiate a `Derivation` from a UDF or UDX string representation. The UDF/UDX representations are as output by a processor like the `LKB <http://moin.delph-in.net/LkbTop>`_ or `ACE <http://sweaglesw.org/linguistics/ace/>`_, or from the :meth:`UdfNode.to_udf` or :meth:`UdfNode.to_udx` methods. Args: s (str): UDF or UDX serialization """ if not (s.startswith('(') and s.endswith(')')): raise ValueError( 'Derivations must begin and end with parentheses: ( )' ) s_ = s[1:] # get rid of initial open-parenthesis stack = [] deriv = None try: matches = cls.udf_re.finditer(s_) for match in matches: if match.group('done'): node = stack.pop() if len(stack) == 0: deriv = node break else: stack[-1].daughters.append(node) elif match.group('form'): if len(stack) == 0: raise ValueError('Possible leaf node with no parent.') gd = match.groupdict() # ignore LKB-style start/end data if it exists on gd term = UdfTerminal( _unquote(gd['form']), tokens=_udf_tokens(gd.get('tokens')), parent=stack[-1] if stack else None ) stack[-1].daughters.append(term) elif match.group('id'): gd = match.groupdict() head = None entity, _, type = gd['entity'].partition('@') if entity[0] == '^': entity = entity[1:] head = True if type == '': type = None udf = UdfNode(gd['id'], entity, gd['score'], gd['start'], gd['end'], head=head, type=type, parent=stack[-1] if stack else None) stack.append(udf) elif match.group('root'): udf = UdfNode(None, match.group('root')) stack.append(udf) except (ValueError, AttributeError): raise ValueError('Invalid derivation: %s' % s) if stack or deriv is None: raise ValueError('Invalid derivation; possibly unbalanced ' 'parentheses: %s' % s) return cls(*deriv, head=deriv._head, type=deriv.type)
Instantiate a `Derivation` from a UDF or UDX string representation. The UDF/UDX representations are as output by a processor like the `LKB <http://moin.delph-in.net/LkbTop>`_ or `ACE <http://sweaglesw.org/linguistics/ace/>`_, or from the :meth:`UdfNode.to_udf` or :meth:`UdfNode.to_udx` methods. Args: s (str): UDF or UDX serialization
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/derivation.py#L549-L611
delph-in/pydelphin
delphin/mrs/penman.py
load
def load(fh, model): """ Deserialize PENMAN graphs from a file (handle or filename) Args: fh: filename or file object model: Xmrs subclass instantiated from decoded triples Returns: a list of objects (of class *model*) """ graphs = penman.load(fh, cls=XMRSCodec) xs = [model.from_triples(g.triples()) for g in graphs] return xs
python
def load(fh, model): """ Deserialize PENMAN graphs from a file (handle or filename) Args: fh: filename or file object model: Xmrs subclass instantiated from decoded triples Returns: a list of objects (of class *model*) """ graphs = penman.load(fh, cls=XMRSCodec) xs = [model.from_triples(g.triples()) for g in graphs] return xs
Deserialize PENMAN graphs from a file (handle or filename) Args: fh: filename or file object model: Xmrs subclass instantiated from decoded triples Returns: a list of objects (of class *model*)
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/mrs/penman.py#L29-L41
delph-in/pydelphin
delphin/mrs/penman.py
loads
def loads(s, model): """ Deserialize PENMAN graphs from a string Args: s (str): serialized PENMAN graphs model: Xmrs subclass instantiated from decoded triples Returns: a list of objects (of class *model*) """ graphs = penman.loads(s, cls=XMRSCodec) xs = [model.from_triples(g.triples()) for g in graphs] return xs
python
def loads(s, model): """ Deserialize PENMAN graphs from a string Args: s (str): serialized PENMAN graphs model: Xmrs subclass instantiated from decoded triples Returns: a list of objects (of class *model*) """ graphs = penman.loads(s, cls=XMRSCodec) xs = [model.from_triples(g.triples()) for g in graphs] return xs
Deserialize PENMAN graphs from a string Args: s (str): serialized PENMAN graphs model: Xmrs subclass instantiated from decoded triples Returns: a list of objects (of class *model*)
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/mrs/penman.py#L44-L56
delph-in/pydelphin
delphin/mrs/penman.py
dump
def dump(destination, xs, model=None, properties=False, indent=True, **kwargs): """ Serialize Xmrs (or subclass) objects to PENMAN and write to a file. Args: destination: filename or file object xs: iterator of :class:`~delphin.mrs.xmrs.Xmrs` objects to serialize model: Xmrs subclass used to get triples properties: if `True`, encode variable properties indent: if `True`, adaptively indent; if `False` or `None`, don't indent; if a non-negative integer N, indent N spaces per level """ text = dumps( xs, model=model, properties=properties, indent=indent, **kwargs ) if hasattr(destination, 'write'): print(text, file=destination) else: with open(destination, 'w') as fh: print(text, file=fh)
python
def dump(destination, xs, model=None, properties=False, indent=True, **kwargs): """ Serialize Xmrs (or subclass) objects to PENMAN and write to a file. Args: destination: filename or file object xs: iterator of :class:`~delphin.mrs.xmrs.Xmrs` objects to serialize model: Xmrs subclass used to get triples properties: if `True`, encode variable properties indent: if `True`, adaptively indent; if `False` or `None`, don't indent; if a non-negative integer N, indent N spaces per level """ text = dumps( xs, model=model, properties=properties, indent=indent, **kwargs ) if hasattr(destination, 'write'): print(text, file=destination) else: with open(destination, 'w') as fh: print(text, file=fh)
Serialize Xmrs (or subclass) objects to PENMAN and write to a file. Args: destination: filename or file object xs: iterator of :class:`~delphin.mrs.xmrs.Xmrs` objects to serialize model: Xmrs subclass used to get triples properties: if `True`, encode variable properties indent: if `True`, adaptively indent; if `False` or `None`, don't indent; if a non-negative integer N, indent N spaces per level
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/mrs/penman.py#L59-L80
delph-in/pydelphin
delphin/mrs/penman.py
dumps
def dumps(xs, model=None, properties=False, indent=True, **kwargs): """ Serialize Xmrs (or subclass) objects to PENMAN notation Args: xs: iterator of :class:`~delphin.mrs.xmrs.Xmrs` objects to serialize model: Xmrs subclass used to get triples properties: if `True`, encode variable properties indent: if `True`, adaptively indent; if `False` or `None`, don't indent; if a non-negative integer N, indent N spaces per level Returns: the PENMAN serialization of *xs* """ xs = list(xs) if not xs: return '' given_class = xs[0].__class__ # assume they are all the same if model is None: model = xs[0].__class__ if not hasattr(model, 'to_triples'): raise TypeError( '{} class does not implement to_triples()'.format(model.__name__) ) # convert MRS to DMRS if necessary; EDS cannot convert if given_class.__name__ in ('Mrs', 'Xmrs'): xs = [model.from_xmrs(x, **kwargs) for x in xs] elif given_class.__name__ == 'Eds' and model.__name__ != 'Eds': raise ValueError('Cannot convert EDS to non-EDS') codec = XMRSCodec() graphs = [ codec.triples_to_graph(model.to_triples(x, properties=properties)) for x in xs ] if 'pretty_print' in kwargs: indent = kwargs['pretty_print'] return penman.dumps(graphs, cls=XMRSCodec, indent=indent)
python
def dumps(xs, model=None, properties=False, indent=True, **kwargs): """ Serialize Xmrs (or subclass) objects to PENMAN notation Args: xs: iterator of :class:`~delphin.mrs.xmrs.Xmrs` objects to serialize model: Xmrs subclass used to get triples properties: if `True`, encode variable properties indent: if `True`, adaptively indent; if `False` or `None`, don't indent; if a non-negative integer N, indent N spaces per level Returns: the PENMAN serialization of *xs* """ xs = list(xs) if not xs: return '' given_class = xs[0].__class__ # assume they are all the same if model is None: model = xs[0].__class__ if not hasattr(model, 'to_triples'): raise TypeError( '{} class does not implement to_triples()'.format(model.__name__) ) # convert MRS to DMRS if necessary; EDS cannot convert if given_class.__name__ in ('Mrs', 'Xmrs'): xs = [model.from_xmrs(x, **kwargs) for x in xs] elif given_class.__name__ == 'Eds' and model.__name__ != 'Eds': raise ValueError('Cannot convert EDS to non-EDS') codec = XMRSCodec() graphs = [ codec.triples_to_graph(model.to_triples(x, properties=properties)) for x in xs ] if 'pretty_print' in kwargs: indent = kwargs['pretty_print'] return penman.dumps(graphs, cls=XMRSCodec, indent=indent)
Serialize Xmrs (or subclass) objects to PENMAN notation Args: xs: iterator of :class:`~delphin.mrs.xmrs.Xmrs` objects to serialize model: Xmrs subclass used to get triples properties: if `True`, encode variable properties indent: if `True`, adaptively indent; if `False` or `None`, don't indent; if a non-negative integer N, indent N spaces per level Returns: the PENMAN serialization of *xs*
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/mrs/penman.py#L83-L127
luckydonald/pytgbot
pytgbot/api_types/sendable/reply_markup.py
ReplyKeyboardMarkup.from_array
def from_array(array): """ Deserialize a new ReplyKeyboardMarkup from a given dictionary. :return: new ReplyKeyboardMarkup instance. :rtype: ReplyKeyboardMarkup """ if array is None or not array: return None # end if assert_type_or_raise(array, dict, parameter_name="array") data = {} data['keyboard'] = KeyboardButton.from_array_list(array.get('keyboard'), list_level=2) data['resize_keyboard'] = bool(array.get('resize_keyboard')) if array.get('resize_keyboard') is not None else None data['one_time_keyboard'] = bool(array.get('one_time_keyboard')) if array.get('one_time_keyboard') is not None else None data['selective'] = bool(array.get('selective')) if array.get('selective') is not None else None instance = ReplyKeyboardMarkup(**data) instance._raw = array return instance
python
def from_array(array): """ Deserialize a new ReplyKeyboardMarkup from a given dictionary. :return: new ReplyKeyboardMarkup instance. :rtype: ReplyKeyboardMarkup """ if array is None or not array: return None # end if assert_type_or_raise(array, dict, parameter_name="array") data = {} data['keyboard'] = KeyboardButton.from_array_list(array.get('keyboard'), list_level=2) data['resize_keyboard'] = bool(array.get('resize_keyboard')) if array.get('resize_keyboard') is not None else None data['one_time_keyboard'] = bool(array.get('one_time_keyboard')) if array.get('one_time_keyboard') is not None else None data['selective'] = bool(array.get('selective')) if array.get('selective') is not None else None instance = ReplyKeyboardMarkup(**data) instance._raw = array return instance
Deserialize a new ReplyKeyboardMarkup from a given dictionary. :return: new ReplyKeyboardMarkup instance. :rtype: ReplyKeyboardMarkup
https://github.com/luckydonald/pytgbot/blob/67f4b5a1510d4583d40b5477e876b1ef0eb8971b/pytgbot/api_types/sendable/reply_markup.py#L122-L142
luckydonald/pytgbot
pytgbot/api_types/sendable/reply_markup.py
InlineKeyboardMarkup.from_array
def from_array(array): """ Deserialize a new InlineKeyboardMarkup from a given dictionary. :return: new InlineKeyboardMarkup instance. :rtype: InlineKeyboardMarkup """ if array is None or not array: return None # end if assert_type_or_raise(array, dict, parameter_name="array") data = {} data['inline_keyboard'] = InlineKeyboardButton.from_array_list(array.get('inline_keyboard'), list_level=2) instance = InlineKeyboardMarkup(**data) instance._raw = array return instance
python
def from_array(array): """ Deserialize a new InlineKeyboardMarkup from a given dictionary. :return: new InlineKeyboardMarkup instance. :rtype: InlineKeyboardMarkup """ if array is None or not array: return None # end if assert_type_or_raise(array, dict, parameter_name="array") data = {} data['inline_keyboard'] = InlineKeyboardButton.from_array_list(array.get('inline_keyboard'), list_level=2) instance = InlineKeyboardMarkup(**data) instance._raw = array return instance
Deserialize a new InlineKeyboardMarkup from a given dictionary. :return: new InlineKeyboardMarkup instance. :rtype: InlineKeyboardMarkup
https://github.com/luckydonald/pytgbot/blob/67f4b5a1510d4583d40b5477e876b1ef0eb8971b/pytgbot/api_types/sendable/reply_markup.py#L459-L476
luckydonald/pytgbot
pytgbot/webhook.py
Webhook._prepare_request
def _prepare_request(self, command, query): """ :param command: The Url command parameter :type command: str :param query: will get json encoded. :type query: dict :return: """ from luckydonaldUtils.encoding import to_native as n from pytgbot.api_types.sendable import Sendable from pytgbot.api_types import as_array from DictObject import DictObject import json params = {} for key in query.keys(): element = query[key] if element is not None: if isinstance(element, Sendable): params[key] = json.dumps(as_array(element)) else: params[key] = element url = self._base_url.format(api_key=n(self.api_key), command=n(command)) return DictObject(url=url, params=params)
python
def _prepare_request(self, command, query): """ :param command: The Url command parameter :type command: str :param query: will get json encoded. :type query: dict :return: """ from luckydonaldUtils.encoding import to_native as n from pytgbot.api_types.sendable import Sendable from pytgbot.api_types import as_array from DictObject import DictObject import json params = {} for key in query.keys(): element = query[key] if element is not None: if isinstance(element, Sendable): params[key] = json.dumps(as_array(element)) else: params[key] = element url = self._base_url.format(api_key=n(self.api_key), command=n(command)) return DictObject(url=url, params=params)
:param command: The Url command parameter :type command: str :param query: will get json encoded. :type query: dict :return:
https://github.com/luckydonald/pytgbot/blob/67f4b5a1510d4583d40b5477e876b1ef0eb8971b/pytgbot/webhook.py#L20-L45
luckydonald/pytgbot
pytgbot/webhook.py
Webhook.do
def do(self, command, files=None, use_long_polling=False, request_timeout=None, **query): """ Send a request to the api. If the bot is set to return the json objects, it will look like this: ```json { "ok": bool, "result": {...}, # optionally present: "description": "human-readable description of the result", "error_code": int } ``` :param command: The Url command parameter :type command: str :keyword request_timeout: When the request should time out. :type request_timeout: int :keyword files: if it needs to send files. :keyword use_long_polling: if it should use long polling. (see http://docs.python-requests.org/en/latest/api/#requests.Response.iter_content) :type use_long_polling: bool :param query: will get json encoded. :return: The json response from the server, or, if `self.return_python_objects` is `True`, a parsed return type. :rtype: DictObject.DictObject | pytgbot.api_types.receivable.Receivable """ params = self._prepare_request(command, query) r = self._do_request( params.url, params=params.params, files=files, stream=use_long_polling, timeout=request_timeout ) return self._process_response(r)
python
def do(self, command, files=None, use_long_polling=False, request_timeout=None, **query): """ Send a request to the api. If the bot is set to return the json objects, it will look like this: ```json { "ok": bool, "result": {...}, # optionally present: "description": "human-readable description of the result", "error_code": int } ``` :param command: The Url command parameter :type command: str :keyword request_timeout: When the request should time out. :type request_timeout: int :keyword files: if it needs to send files. :keyword use_long_polling: if it should use long polling. (see http://docs.python-requests.org/en/latest/api/#requests.Response.iter_content) :type use_long_polling: bool :param query: will get json encoded. :return: The json response from the server, or, if `self.return_python_objects` is `True`, a parsed return type. :rtype: DictObject.DictObject | pytgbot.api_types.receivable.Receivable """ params = self._prepare_request(command, query) r = self._do_request( params.url, params=params.params, files=files, stream=use_long_polling, timeout=request_timeout ) return self._process_response(r)
Send a request to the api. If the bot is set to return the json objects, it will look like this: ```json { "ok": bool, "result": {...}, # optionally present: "description": "human-readable description of the result", "error_code": int } ``` :param command: The Url command parameter :type command: str :keyword request_timeout: When the request should time out. :type request_timeout: int :keyword files: if it needs to send files. :keyword use_long_polling: if it should use long polling. (see http://docs.python-requests.org/en/latest/api/#requests.Response.iter_content) :type use_long_polling: bool :param query: will get json encoded. :return: The json response from the server, or, if `self.return_python_objects` is `True`, a parsed return type. :rtype: DictObject.DictObject | pytgbot.api_types.receivable.Receivable
https://github.com/luckydonald/pytgbot/blob/67f4b5a1510d4583d40b5477e876b1ef0eb8971b/pytgbot/webhook.py#L103-L141
luckydonald/pytgbot
code_generation/code_generator_online.py
parse_table
def parse_table(tag): """ returns tuple of type ("class"/"func") and list of param strings. :param tag: :return: """ first = True table_header = None table_type = 'unknown' param_strings = [] thead = tag.find('thead', recursive=False) theads = None # list (items in <tr> row) of <th>/<tr> elements. if thead: theads = thead.find_all(["th", "td"]) # end if tbody = tag.find('tbody', recursive=False) if tbody: tbody_rows = tbody.find_all("tr") else: tbody_rows = tag.find_all("tr") # end if tbodys = [ # list (rows) of list (items in <tr> row) of <tr> elements. row.find_all(["td" ,"th"]) for row in tbody_rows ] if not thead: # so first row = header theads = tbody_rows[0] tbodys = tbody_rows[1:] # end if # TABLE HEADER found_columns = [] for column in theads: # Either (a) `<td><strong> ... </strong></td>` # or new (b) `<th> ... </th>` col = column.find("strong") if col: # (a) `<td><strong> ... </strong></td>` col_text = col.text else: # (b) `<th> ... </th>` col_text = column.text # end if found_columns.append(col_text) # end def # if TABLE is func for test_columns in func_fields: if found_columns == test_columns: table_header = test_columns table_type = 'func' break # end if # end for # if TABLE is class if not table_header: # only check if we don't have a result yet # search class now for test_columns in class_fields: if found_columns == test_columns: if table_header is not None: raise AssertionError("Table detected as func and class: {!r}".format(found_columns)) table_header = test_columns table_type = 'class' break # end if # end for # end if # TABLE is none of the above if not table_header: # we don't have a result yet raise AssertionError("Unknown table, {!r}".format(found_columns)) # end if # TABLE BODY for tds in tbodys: string = "\t".join([col.text for col in tds]) logger.debug("t: " + string) param_strings.append(string) pass # end for row return table_type, param_strings
python
def parse_table(tag): """ returns tuple of type ("class"/"func") and list of param strings. :param tag: :return: """ first = True table_header = None table_type = 'unknown' param_strings = [] thead = tag.find('thead', recursive=False) theads = None # list (items in <tr> row) of <th>/<tr> elements. if thead: theads = thead.find_all(["th", "td"]) # end if tbody = tag.find('tbody', recursive=False) if tbody: tbody_rows = tbody.find_all("tr") else: tbody_rows = tag.find_all("tr") # end if tbodys = [ # list (rows) of list (items in <tr> row) of <tr> elements. row.find_all(["td" ,"th"]) for row in tbody_rows ] if not thead: # so first row = header theads = tbody_rows[0] tbodys = tbody_rows[1:] # end if # TABLE HEADER found_columns = [] for column in theads: # Either (a) `<td><strong> ... </strong></td>` # or new (b) `<th> ... </th>` col = column.find("strong") if col: # (a) `<td><strong> ... </strong></td>` col_text = col.text else: # (b) `<th> ... </th>` col_text = column.text # end if found_columns.append(col_text) # end def # if TABLE is func for test_columns in func_fields: if found_columns == test_columns: table_header = test_columns table_type = 'func' break # end if # end for # if TABLE is class if not table_header: # only check if we don't have a result yet # search class now for test_columns in class_fields: if found_columns == test_columns: if table_header is not None: raise AssertionError("Table detected as func and class: {!r}".format(found_columns)) table_header = test_columns table_type = 'class' break # end if # end for # end if # TABLE is none of the above if not table_header: # we don't have a result yet raise AssertionError("Unknown table, {!r}".format(found_columns)) # end if # TABLE BODY for tds in tbodys: string = "\t".join([col.text for col in tds]) logger.debug("t: " + string) param_strings.append(string) pass # end for row return table_type, param_strings
returns tuple of type ("class"/"func") and list of param strings. :param tag: :return:
https://github.com/luckydonald/pytgbot/blob/67f4b5a1510d4583d40b5477e876b1ef0eb8971b/code_generation/code_generator_online.py#L40-L123
luckydonald/pytgbot
code_generation/code_generator_online.py
safe_to_file
def safe_to_file(folder, results): """ Receives a list of results (type :class:`Clazz` or :class:`Function`), and put them into the right files in :var:`folder` :param folder: Where the files should be in. :type folder: str :param results: A list of :class:`Clazz` or :class:`Function` objects, which will be used to calculate the source code. :type results: Union(Clazz, Function) """ functions = [] message_send_functions = [] clazzes = {} # "filepath": [Class, Class, ...] # split results into functions and classes for result in results: assert isinstance(result, (Clazz, Function)) if isinstance(result, Clazz): import_path = get_type_path(result.clazz) import_path = import_path.rstrip(".") file_path = calc_path_and_create_folders(folder, import_path) result.filepath = file_path if file_path not in clazzes: clazzes[file_path] = [] clazzes[file_path].append(result) else: assert isinstance(result, Function) import_path = "pytgbot.bot." file_path = calc_path_and_create_folders(folder, import_path) result.filepath = file_path functions.append(result) if result.name.startswith('send_'): import_path = "teleflask_messages." file_path = calc_path_and_create_folders(folder, import_path) result2 = safe_eval(repr(result), SAVE_VALUES) # serialize + unserialize = deepcopy result2.filepath = file_path message_send_functions.append(result2) # end if # end if # end for bot_template = get_template("bot.template") clazzfile_template = get_template("classfile.template") teleflask_messages_template = get_template("teleflask_messages_file.template") for path, clazz_list in clazzes.items(): clazz_imports = set() for clazz_ in clazz_list: assert isinstance(clazz_, Clazz) assert isinstance(clazz_.parent_clazz, Type) clazz_imports.add(clazz_.parent_clazz.as_import) # end for clazz_imports = list(clazz_imports) clazz_imports.sort() is_sendable = ("sendable" in path) try: with open(path, "w") as f: result = clazzfile_template.render(clazzes=clazz_list, imports=clazz_imports, is_sendable=is_sendable) result = result.replace("\t", " ") f.write(result) # end with except IOError: raise # lol # end try # end for classes if functions: txt = bot_template.render(functions=functions) with open(functions[0].filepath, "w") as f: f.write(txt) # end with # end if if message_send_functions: txt = teleflask_messages_template.render(functions=message_send_functions) with open(message_send_functions[0].filepath, "w") as f: f.write(txt)
python
def safe_to_file(folder, results): """ Receives a list of results (type :class:`Clazz` or :class:`Function`), and put them into the right files in :var:`folder` :param folder: Where the files should be in. :type folder: str :param results: A list of :class:`Clazz` or :class:`Function` objects, which will be used to calculate the source code. :type results: Union(Clazz, Function) """ functions = [] message_send_functions = [] clazzes = {} # "filepath": [Class, Class, ...] # split results into functions and classes for result in results: assert isinstance(result, (Clazz, Function)) if isinstance(result, Clazz): import_path = get_type_path(result.clazz) import_path = import_path.rstrip(".") file_path = calc_path_and_create_folders(folder, import_path) result.filepath = file_path if file_path not in clazzes: clazzes[file_path] = [] clazzes[file_path].append(result) else: assert isinstance(result, Function) import_path = "pytgbot.bot." file_path = calc_path_and_create_folders(folder, import_path) result.filepath = file_path functions.append(result) if result.name.startswith('send_'): import_path = "teleflask_messages." file_path = calc_path_and_create_folders(folder, import_path) result2 = safe_eval(repr(result), SAVE_VALUES) # serialize + unserialize = deepcopy result2.filepath = file_path message_send_functions.append(result2) # end if # end if # end for bot_template = get_template("bot.template") clazzfile_template = get_template("classfile.template") teleflask_messages_template = get_template("teleflask_messages_file.template") for path, clazz_list in clazzes.items(): clazz_imports = set() for clazz_ in clazz_list: assert isinstance(clazz_, Clazz) assert isinstance(clazz_.parent_clazz, Type) clazz_imports.add(clazz_.parent_clazz.as_import) # end for clazz_imports = list(clazz_imports) clazz_imports.sort() is_sendable = ("sendable" in path) try: with open(path, "w") as f: result = clazzfile_template.render(clazzes=clazz_list, imports=clazz_imports, is_sendable=is_sendable) result = result.replace("\t", " ") f.write(result) # end with except IOError: raise # lol # end try # end for classes if functions: txt = bot_template.render(functions=functions) with open(functions[0].filepath, "w") as f: f.write(txt) # end with # end if if message_send_functions: txt = teleflask_messages_template.render(functions=message_send_functions) with open(message_send_functions[0].filepath, "w") as f: f.write(txt)
Receives a list of results (type :class:`Clazz` or :class:`Function`), and put them into the right files in :var:`folder` :param folder: Where the files should be in. :type folder: str :param results: A list of :class:`Clazz` or :class:`Function` objects, which will be used to calculate the source code. :type results: Union(Clazz, Function)
https://github.com/luckydonald/pytgbot/blob/67f4b5a1510d4583d40b5477e876b1ef0eb8971b/code_generation/code_generator_online.py#L427-L502
luckydonald/pytgbot
code_generation/code_generator_online.py
calc_path_and_create_folders
def calc_path_and_create_folders(folder, import_path): """ calculate the path and create the needed folders """ file_path = abspath(path_join(folder, import_path[:import_path.rfind(".")].replace(".", folder_seperator) + ".py")) mkdir_p(dirname(file_path)) return file_path
python
def calc_path_and_create_folders(folder, import_path): """ calculate the path and create the needed folders """ file_path = abspath(path_join(folder, import_path[:import_path.rfind(".")].replace(".", folder_seperator) + ".py")) mkdir_p(dirname(file_path)) return file_path
calculate the path and create the needed folders
https://github.com/luckydonald/pytgbot/blob/67f4b5a1510d4583d40b5477e876b1ef0eb8971b/code_generation/code_generator_online.py#L508-L512
luckydonald/pytgbot
examples/cli.py
read_file_to_buffer
def read_file_to_buffer(filename): """ Reads a file to string buffer :param filename: :return: """ f = open(filename, "r") buf = BytesIO(f.read()) f.close() return buf
python
def read_file_to_buffer(filename): """ Reads a file to string buffer :param filename: :return: """ f = open(filename, "r") buf = BytesIO(f.read()) f.close() return buf
Reads a file to string buffer :param filename: :return:
https://github.com/luckydonald/pytgbot/blob/67f4b5a1510d4583d40b5477e876b1ef0eb8971b/examples/cli.py#L38-L47
luckydonald/pytgbot
examples/cli.py
parse_args
def parse_args(string): """ `"yada hoa" yupi yeah 12 "" None "None"` -> `["yada hoa", "yupi", "yeah", 12, "", None, "None"]` :param str: :return: """ import ast is_quoted = False result_parts = [] current_str = "" while len(string) > 0: if string[0] == "\"": is_quoted = not is_quoted current_str += string[0] elif string[0].isspace(): if is_quoted: current_str += string[0] else: result_parts.append(current_str) current_str = "" # end if else: current_str += string[0] # end if string = string[1:] # end while if current_str: # last part of the array result_parts.append(current_str) # end if for i in range(len(result_parts)): # Will try for each element if it is something pythonic. Parsed type will replace original list element. try: part = ast.literal_eval(result_parts[i]) result_parts[i] = part # write it back. except ValueError: # could not parse -> is string pass # because already is str. # end try # end for return result_parts
python
def parse_args(string): """ `"yada hoa" yupi yeah 12 "" None "None"` -> `["yada hoa", "yupi", "yeah", 12, "", None, "None"]` :param str: :return: """ import ast is_quoted = False result_parts = [] current_str = "" while len(string) > 0: if string[0] == "\"": is_quoted = not is_quoted current_str += string[0] elif string[0].isspace(): if is_quoted: current_str += string[0] else: result_parts.append(current_str) current_str = "" # end if else: current_str += string[0] # end if string = string[1:] # end while if current_str: # last part of the array result_parts.append(current_str) # end if for i in range(len(result_parts)): # Will try for each element if it is something pythonic. Parsed type will replace original list element. try: part = ast.literal_eval(result_parts[i]) result_parts[i] = part # write it back. except ValueError: # could not parse -> is string pass # because already is str. # end try # end for return result_parts
`"yada hoa" yupi yeah 12 "" None "None"` -> `["yada hoa", "yupi", "yeah", 12, "", None, "None"]` :param str: :return:
https://github.com/luckydonald/pytgbot/blob/67f4b5a1510d4583d40b5477e876b1ef0eb8971b/examples/cli.py#L746-L785
luckydonald/pytgbot
examples/cli.py
Color.overwrite_color
def overwrite_color(self, string, color, prefix=False, reset=False): """ :param string: input :param color: new color :param prefix: if it also should start the color to at the beginning. :param reset: if it also should end the color at the ending. :type reset: bool | int | str :return: """ if isinstance(color, int): color = self.prepare_color(color) # end if prefix = color if prefix else "" if isinstance(reset, int): reset = self.prepare_color(reset) elif isinstance(reset, bool): reset = self.formatter.color_off if reset else "" # end if return ( prefix + string.replace(self.formatter.color_off, self.formatter.color_off+color).replace(self.formatter.all_off, self.formatter.all_off + color) + reset )
python
def overwrite_color(self, string, color, prefix=False, reset=False): """ :param string: input :param color: new color :param prefix: if it also should start the color to at the beginning. :param reset: if it also should end the color at the ending. :type reset: bool | int | str :return: """ if isinstance(color, int): color = self.prepare_color(color) # end if prefix = color if prefix else "" if isinstance(reset, int): reset = self.prepare_color(reset) elif isinstance(reset, bool): reset = self.formatter.color_off if reset else "" # end if return ( prefix + string.replace(self.formatter.color_off, self.formatter.color_off+color).replace(self.formatter.all_off, self.formatter.all_off + color) + reset )
:param string: input :param color: new color :param prefix: if it also should start the color to at the beginning. :param reset: if it also should end the color at the ending. :type reset: bool | int | str :return:
https://github.com/luckydonald/pytgbot/blob/67f4b5a1510d4583d40b5477e876b1ef0eb8971b/examples/cli.py#L164-L186
luckydonald/pytgbot
examples/cli.py
CLI.print_peer
def print_peer(self, peer, show_id=True, id_prefix="", reply=True): """ :param id_prefix: Prefix of the #id thing. Set a string, or true to have it generated. :type id_prefix: str|bool """ if isinstance(id_prefix, bool): if id_prefix: # True if isinstance(peer, User): id_prefix = "user" elif isinstance(peer, Chat): id_prefix = peer.type else: id_prefix = "unknown" # end if else: # False id_prefix = "" # end if # end if peer_string = self.peer_to_string(peer) if show_id and "id" in peer: peer_string += " ({color_lightblue}{id_prefix}#{id}{color_off})".format(id_prefix=id_prefix, id=peer.id, **self.color.formatter) return peer_string
python
def print_peer(self, peer, show_id=True, id_prefix="", reply=True): """ :param id_prefix: Prefix of the #id thing. Set a string, or true to have it generated. :type id_prefix: str|bool """ if isinstance(id_prefix, bool): if id_prefix: # True if isinstance(peer, User): id_prefix = "user" elif isinstance(peer, Chat): id_prefix = peer.type else: id_prefix = "unknown" # end if else: # False id_prefix = "" # end if # end if peer_string = self.peer_to_string(peer) if show_id and "id" in peer: peer_string += " ({color_lightblue}{id_prefix}#{id}{color_off})".format(id_prefix=id_prefix, id=peer.id, **self.color.formatter) return peer_string
:param id_prefix: Prefix of the #id thing. Set a string, or true to have it generated. :type id_prefix: str|bool
https://github.com/luckydonald/pytgbot/blob/67f4b5a1510d4583d40b5477e876b1ef0eb8971b/examples/cli.py#L580-L601
delph-in/pydelphin
delphin/mrs/__init__.py
convert
def convert(txt, src_fmt, tgt_fmt, single=True, **kwargs): """ Convert a textual representation of \*MRS from one the src_fmt representation to the tgt_fmt representation. By default, only read and convert a single \*MRS object (e.g. for `mrx` this starts at <mrs> and not <mrs-list>), but changing the `mode` argument to `corpus` (alternatively: `list`) reads and converts multiple \*MRSs. Args: txt: A string of semantic data. src_fmt: The original representation format of txt. tgt_fmt: The representation format to convert to. single: If True, assume txt represents a single \*MRS, otherwise read it as a corpus (or list) of \*MRSs. kwargs: Any other keyword arguments to pass to the serializer of the target format. See Notes. Returns: A string in the target format. Notes: src_fmt and tgt_fmt may be one of the following: | format | description | | --------- | ---------------------------- | | simplemrs | The popular SimpleMRS format | | mrx | The XML format of MRS | | dmrx | The XML format of DMRS | Additional keyword arguments for the serializer may include: | option | description | | ------------ | ----------------------------------- | | pretty_print | print with newlines and indentation | | color | print with syntax highlighting | """ from importlib import import_module reader = import_module('{}.{}'.format('delphin.mrs', src_fmt.lower())) writer = import_module('{}.{}'.format('delphin.mrs', tgt_fmt.lower())) return writer.dumps( reader.loads(txt, single=single), single=single, **kwargs )
python
def convert(txt, src_fmt, tgt_fmt, single=True, **kwargs): """ Convert a textual representation of \*MRS from one the src_fmt representation to the tgt_fmt representation. By default, only read and convert a single \*MRS object (e.g. for `mrx` this starts at <mrs> and not <mrs-list>), but changing the `mode` argument to `corpus` (alternatively: `list`) reads and converts multiple \*MRSs. Args: txt: A string of semantic data. src_fmt: The original representation format of txt. tgt_fmt: The representation format to convert to. single: If True, assume txt represents a single \*MRS, otherwise read it as a corpus (or list) of \*MRSs. kwargs: Any other keyword arguments to pass to the serializer of the target format. See Notes. Returns: A string in the target format. Notes: src_fmt and tgt_fmt may be one of the following: | format | description | | --------- | ---------------------------- | | simplemrs | The popular SimpleMRS format | | mrx | The XML format of MRS | | dmrx | The XML format of DMRS | Additional keyword arguments for the serializer may include: | option | description | | ------------ | ----------------------------------- | | pretty_print | print with newlines and indentation | | color | print with syntax highlighting | """ from importlib import import_module reader = import_module('{}.{}'.format('delphin.mrs', src_fmt.lower())) writer = import_module('{}.{}'.format('delphin.mrs', tgt_fmt.lower())) return writer.dumps( reader.loads(txt, single=single), single=single, **kwargs )
Convert a textual representation of \*MRS from one the src_fmt representation to the tgt_fmt representation. By default, only read and convert a single \*MRS object (e.g. for `mrx` this starts at <mrs> and not <mrs-list>), but changing the `mode` argument to `corpus` (alternatively: `list`) reads and converts multiple \*MRSs. Args: txt: A string of semantic data. src_fmt: The original representation format of txt. tgt_fmt: The representation format to convert to. single: If True, assume txt represents a single \*MRS, otherwise read it as a corpus (or list) of \*MRSs. kwargs: Any other keyword arguments to pass to the serializer of the target format. See Notes. Returns: A string in the target format. Notes: src_fmt and tgt_fmt may be one of the following: | format | description | | --------- | ---------------------------- | | simplemrs | The popular SimpleMRS format | | mrx | The XML format of MRS | | dmrx | The XML format of DMRS | Additional keyword arguments for the serializer may include: | option | description | | ------------ | ----------------------------------- | | pretty_print | print with newlines and indentation | | color | print with syntax highlighting |
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/mrs/__init__.py#L53-L95
delph-in/pydelphin
delphin/tdl.py
_peek
def _peek(tokens, n=0): """peek and drop comments""" return tokens.peek(n=n, skip=_is_comment, drop=True)
python
def _peek(tokens, n=0): """peek and drop comments""" return tokens.peek(n=n, skip=_is_comment, drop=True)
peek and drop comments
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/tdl.py#L1043-L1045
delph-in/pydelphin
delphin/tdl.py
_shift
def _shift(tokens): """pop the next token, then peek the gid of the following""" after = tokens.peek(n=1, skip=_is_comment, drop=True) tok = tokens._buffer.popleft() return tok[0], tok[1], tok[2], after[0]
python
def _shift(tokens): """pop the next token, then peek the gid of the following""" after = tokens.peek(n=1, skip=_is_comment, drop=True) tok = tokens._buffer.popleft() return tok[0], tok[1], tok[2], after[0]
pop the next token, then peek the gid of the following
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/tdl.py#L1053-L1057
delph-in/pydelphin
delphin/tdl.py
_accumulate
def _accumulate(lexitems): """ Yield lists of tokens based on very simple parsing that checks the level of nesting within a structure. This is probably much faster than the LookaheadIterator method, but it is less safe; an unclosed list or AVM may cause it to build a list including the rest of the file, or it may return a list that doesn't span a full definition. As PyDelphin's goals for TDL parsing do not include speed, this method is not currently used, although it is retained in the source code as an example if future priorities change. """ data = [] stack = [] break_on = 10 in_def = False for item in lexitems: gid = item[0] # only yield comments outside of definitions if gid in (2, 3): if len(data) == 0: yield [item] else: continue elif gid == 20: assert len(data) == 0 yield [item] # the following just checks if the previous definition was not # terminated when the next one is read in elif gid in (7, 8): if in_def: yield data[:-1] data = data[-1:] + [item] stack = [] break_on = 10 else: data.append(item) in_def = True else: data.append(item) if gid == break_on: if len(stack) == 0: yield data data = [] in_def = False else: break_on = stack.pop() elif gid in (13, 14, 15): stack.append(break_on) break_on = gid + 3 if data: yield data
python
def _accumulate(lexitems): """ Yield lists of tokens based on very simple parsing that checks the level of nesting within a structure. This is probably much faster than the LookaheadIterator method, but it is less safe; an unclosed list or AVM may cause it to build a list including the rest of the file, or it may return a list that doesn't span a full definition. As PyDelphin's goals for TDL parsing do not include speed, this method is not currently used, although it is retained in the source code as an example if future priorities change. """ data = [] stack = [] break_on = 10 in_def = False for item in lexitems: gid = item[0] # only yield comments outside of definitions if gid in (2, 3): if len(data) == 0: yield [item] else: continue elif gid == 20: assert len(data) == 0 yield [item] # the following just checks if the previous definition was not # terminated when the next one is read in elif gid in (7, 8): if in_def: yield data[:-1] data = data[-1:] + [item] stack = [] break_on = 10 else: data.append(item) in_def = True else: data.append(item) if gid == break_on: if len(stack) == 0: yield data data = [] in_def = False else: break_on = stack.pop() elif gid in (13, 14, 15): stack.append(break_on) break_on = gid + 3 if data: yield data
Yield lists of tokens based on very simple parsing that checks the level of nesting within a structure. This is probably much faster than the LookaheadIterator method, but it is less safe; an unclosed list or AVM may cause it to build a list including the rest of the file, or it may return a list that doesn't span a full definition. As PyDelphin's goals for TDL parsing do not include speed, this method is not currently used, although it is retained in the source code as an example if future priorities change.
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/tdl.py#L1060-L1111
delph-in/pydelphin
delphin/tdl.py
_lex
def _lex(stream): """ Lex the input stream according to _tdl_lex_re. Yields (gid, token, line_number) """ lines = enumerate(stream, 1) line_no = pos = 0 try: while True: if pos == 0: line_no, line = next(lines) matches = _tdl_lex_re.finditer(line, pos) pos = 0 # reset; only used for multiline patterns for m in matches: gid = m.lastindex if gid <= 2: # potentially multiline patterns if gid == 1: # docstring s, start_line_no, line_no, line, pos = _bounded( '"""', '"""', line, m.end(), line_no, lines) elif gid == 2: # comment s, start_line_no, line_no, line, pos = _bounded( '#|', '|#', line, m.end(), line_no, lines) yield (gid, s, line_no) break elif gid == 30: raise TdlParsingError( ('Syntax error:\n {}\n {}^' .format(line, ' ' * m.start())), line_number=line_no) else: # token = None # if not (6 < gid < 20): # token = m.group(gid) token = m.group(gid) yield (gid, token, line_no) except StopIteration: pass
python
def _lex(stream): """ Lex the input stream according to _tdl_lex_re. Yields (gid, token, line_number) """ lines = enumerate(stream, 1) line_no = pos = 0 try: while True: if pos == 0: line_no, line = next(lines) matches = _tdl_lex_re.finditer(line, pos) pos = 0 # reset; only used for multiline patterns for m in matches: gid = m.lastindex if gid <= 2: # potentially multiline patterns if gid == 1: # docstring s, start_line_no, line_no, line, pos = _bounded( '"""', '"""', line, m.end(), line_no, lines) elif gid == 2: # comment s, start_line_no, line_no, line, pos = _bounded( '#|', '|#', line, m.end(), line_no, lines) yield (gid, s, line_no) break elif gid == 30: raise TdlParsingError( ('Syntax error:\n {}\n {}^' .format(line, ' ' * m.start())), line_number=line_no) else: # token = None # if not (6 < gid < 20): # token = m.group(gid) token = m.group(gid) yield (gid, token, line_no) except StopIteration: pass
Lex the input stream according to _tdl_lex_re. Yields (gid, token, line_number)
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/tdl.py#L1114-L1152
delph-in/pydelphin
delphin/tdl.py
_bounded
def _bounded(p1, p2, line, pos, line_no, lines): """Collect the contents of a bounded multiline string""" substrings = [] start_line_no = line_no end = pos while not line.startswith(p2, end): if line[end] == '\\': end += 2 else: end += 1 if end >= len(line): substrings.append(line[pos:]) try: line_no, line = next(lines) except StopIteration: pattern = 'docstring' if p1 == '"""' else 'block comment' raise TdlParsingError('Unterminated {}'.format(pattern), line_number=start_line_no) pos = end = 0 substrings.append(line[pos:end]) end += len(p2) return ''.join(substrings), start_line_no, line_no, line, end
python
def _bounded(p1, p2, line, pos, line_no, lines): """Collect the contents of a bounded multiline string""" substrings = [] start_line_no = line_no end = pos while not line.startswith(p2, end): if line[end] == '\\': end += 2 else: end += 1 if end >= len(line): substrings.append(line[pos:]) try: line_no, line = next(lines) except StopIteration: pattern = 'docstring' if p1 == '"""' else 'block comment' raise TdlParsingError('Unterminated {}'.format(pattern), line_number=start_line_no) pos = end = 0 substrings.append(line[pos:end]) end += len(p2) return ''.join(substrings), start_line_no, line_no, line, end
Collect the contents of a bounded multiline string
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/tdl.py#L1155-L1176
delph-in/pydelphin
delphin/tdl.py
iterparse
def iterparse(source, encoding='utf-8'): """ Parse the TDL file *source* and iteratively yield parse events. If *source* is a filename, the file is opened and closed when the generator has finished, otherwise *source* is an open file object and will not be closed when the generator has finished. Parse events are `(event, object, lineno)` tuples, where `event` is a string (`"TypeDefinition"`, `"TypeAddendum"`, `"LexicalRuleDefinition"`, `"LetterSet"`, `"WildCard"`, `"LineComment"`, or `"BlockComment"`), `object` is the interpreted TDL object, and `lineno` is the line number where the entity began in *source*. Args: source (str, file): a filename or open file object encoding (str): the encoding of the file (default: `"utf-8"`; ignored if *source* is an open file) Yields: `(event, object, lineno)` tuples Example: >>> lex = {} >>> for event, obj, lineno in tdl.iterparse('erg/lexicon.tdl'): ... if event == 'TypeDefinition': ... lex[obj.identifier] = obj ... >>> lex['eucalyptus_n1']['SYNSEM.LKEYS.KEYREL.PRED'] <String object (_eucalyptus_n_1_rel) at 140625748595960> """ if hasattr(source, 'read'): for event in _parse2(source): yield event else: with io.open(source, encoding=encoding) as fh: for event in _parse2(fh): yield event
python
def iterparse(source, encoding='utf-8'): """ Parse the TDL file *source* and iteratively yield parse events. If *source* is a filename, the file is opened and closed when the generator has finished, otherwise *source* is an open file object and will not be closed when the generator has finished. Parse events are `(event, object, lineno)` tuples, where `event` is a string (`"TypeDefinition"`, `"TypeAddendum"`, `"LexicalRuleDefinition"`, `"LetterSet"`, `"WildCard"`, `"LineComment"`, or `"BlockComment"`), `object` is the interpreted TDL object, and `lineno` is the line number where the entity began in *source*. Args: source (str, file): a filename or open file object encoding (str): the encoding of the file (default: `"utf-8"`; ignored if *source* is an open file) Yields: `(event, object, lineno)` tuples Example: >>> lex = {} >>> for event, obj, lineno in tdl.iterparse('erg/lexicon.tdl'): ... if event == 'TypeDefinition': ... lex[obj.identifier] = obj ... >>> lex['eucalyptus_n1']['SYNSEM.LKEYS.KEYREL.PRED'] <String object (_eucalyptus_n_1_rel) at 140625748595960> """ if hasattr(source, 'read'): for event in _parse2(source): yield event else: with io.open(source, encoding=encoding) as fh: for event in _parse2(fh): yield event
Parse the TDL file *source* and iteratively yield parse events. If *source* is a filename, the file is opened and closed when the generator has finished, otherwise *source* is an open file object and will not be closed when the generator has finished. Parse events are `(event, object, lineno)` tuples, where `event` is a string (`"TypeDefinition"`, `"TypeAddendum"`, `"LexicalRuleDefinition"`, `"LetterSet"`, `"WildCard"`, `"LineComment"`, or `"BlockComment"`), `object` is the interpreted TDL object, and `lineno` is the line number where the entity began in *source*. Args: source (str, file): a filename or open file object encoding (str): the encoding of the file (default: `"utf-8"`; ignored if *source* is an open file) Yields: `(event, object, lineno)` tuples Example: >>> lex = {} >>> for event, obj, lineno in tdl.iterparse('erg/lexicon.tdl'): ... if event == 'TypeDefinition': ... lex[obj.identifier] = obj ... >>> lex['eucalyptus_n1']['SYNSEM.LKEYS.KEYREL.PRED'] <String object (_eucalyptus_n_1_rel) at 140625748595960>
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/tdl.py#L1180-L1216
delph-in/pydelphin
delphin/tdl.py
tokenize
def tokenize(s): """ Tokenize a string *s* of TDL code. """ return [m.group(m.lastindex) for m in _tdl_re.finditer(s)]
python
def tokenize(s): """ Tokenize a string *s* of TDL code. """ return [m.group(m.lastindex) for m in _tdl_re.finditer(s)]
Tokenize a string *s* of TDL code.
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/tdl.py#L1544-L1548
delph-in/pydelphin
delphin/tdl.py
parse
def parse(f, encoding='utf-8'): """ Parse the TDL file *f* and yield the interpreted contents. If *f* is a filename, the file is opened and closed when the generator has finished, otherwise *f* is an open file object and will not be closed when the generator has finished. Args: f (str, file): a filename or open file object encoding (str): the encoding of the file (default: `"utf-8"`; ignored if *f* is an open file) """ if hasattr(f, 'read'): for event in _parse(f): yield event else: with io.open(f, encoding=encoding) as fh: for event in _parse(fh): yield event
python
def parse(f, encoding='utf-8'): """ Parse the TDL file *f* and yield the interpreted contents. If *f* is a filename, the file is opened and closed when the generator has finished, otherwise *f* is an open file object and will not be closed when the generator has finished. Args: f (str, file): a filename or open file object encoding (str): the encoding of the file (default: `"utf-8"`; ignored if *f* is an open file) """ if hasattr(f, 'read'): for event in _parse(f): yield event else: with io.open(f, encoding=encoding) as fh: for event in _parse(fh): yield event
Parse the TDL file *f* and yield the interpreted contents. If *f* is a filename, the file is opened and closed when the generator has finished, otherwise *f* is an open file object and will not be closed when the generator has finished. Args: f (str, file): a filename or open file object encoding (str): the encoding of the file (default: `"utf-8"`; ignored if *f* is an open file)
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/tdl.py#L1614-L1633
delph-in/pydelphin
delphin/tdl.py
format
def format(obj, indent=0): """ Serialize TDL objects to strings. Args: obj: instance of :class:`Term`, :class:`Conjunction`, or :class:`TypeDefinition` classes or subclasses indent (int): number of spaces to indent the formatted object Returns: str: serialized form of *obj* Example: >>> conj = tdl.Conjunction([ ... tdl.TypeIdentifier('lex-item'), ... tdl.AVM([('SYNSEM.LOCAL.CAT.HEAD.MOD', ... tdl.ConsList(end=tdl.EMPTY_LIST_TYPE))]) ... ]) >>> t = tdl.TypeDefinition('non-mod-lex-item', conj) >>> print(format(t)) non-mod-lex-item := lex-item & [ SYNSEM.LOCAL.CAT.HEAD.MOD < > ]. """ if isinstance(obj, TypeDefinition): return _format_typedef(obj, indent) elif isinstance(obj, Conjunction): return _format_conjunction(obj, indent) elif isinstance(obj, Term): return _format_term(obj, indent) elif isinstance(obj, _MorphSet): return _format_morphset(obj, indent) elif isinstance(obj, _Environment): return _format_environment(obj, indent) elif isinstance(obj, FileInclude): return _format_include(obj, indent) else: raise ValueError('cannot format object as TDL: {!r}'.format(obj))
python
def format(obj, indent=0): """ Serialize TDL objects to strings. Args: obj: instance of :class:`Term`, :class:`Conjunction`, or :class:`TypeDefinition` classes or subclasses indent (int): number of spaces to indent the formatted object Returns: str: serialized form of *obj* Example: >>> conj = tdl.Conjunction([ ... tdl.TypeIdentifier('lex-item'), ... tdl.AVM([('SYNSEM.LOCAL.CAT.HEAD.MOD', ... tdl.ConsList(end=tdl.EMPTY_LIST_TYPE))]) ... ]) >>> t = tdl.TypeDefinition('non-mod-lex-item', conj) >>> print(format(t)) non-mod-lex-item := lex-item & [ SYNSEM.LOCAL.CAT.HEAD.MOD < > ]. """ if isinstance(obj, TypeDefinition): return _format_typedef(obj, indent) elif isinstance(obj, Conjunction): return _format_conjunction(obj, indent) elif isinstance(obj, Term): return _format_term(obj, indent) elif isinstance(obj, _MorphSet): return _format_morphset(obj, indent) elif isinstance(obj, _Environment): return _format_environment(obj, indent) elif isinstance(obj, FileInclude): return _format_include(obj, indent) else: raise ValueError('cannot format object as TDL: {!r}'.format(obj))
Serialize TDL objects to strings. Args: obj: instance of :class:`Term`, :class:`Conjunction`, or :class:`TypeDefinition` classes or subclasses indent (int): number of spaces to indent the formatted object Returns: str: serialized form of *obj* Example: >>> conj = tdl.Conjunction([ ... tdl.TypeIdentifier('lex-item'), ... tdl.AVM([('SYNSEM.LOCAL.CAT.HEAD.MOD', ... tdl.ConsList(end=tdl.EMPTY_LIST_TYPE))]) ... ]) >>> t = tdl.TypeDefinition('non-mod-lex-item', conj) >>> print(format(t)) non-mod-lex-item := lex-item & [ SYNSEM.LOCAL.CAT.HEAD.MOD < > ].
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/tdl.py#L1872-L1906
delph-in/pydelphin
delphin/tdl.py
AVM.normalize
def normalize(self): """ Reduce trivial AVM conjunctions to just the AVM. For example, in `[ ATTR1 [ ATTR2 val ] ]` the value of `ATTR1` could be a conjunction with the sub-AVM `[ ATTR2 val ]`. This method removes the conjunction so the sub-AVM nests directly (equivalent to `[ ATTR1.ATTR2 val ]` in TDL). """ for attr in self._avm: val = self._avm[attr] if isinstance(val, Conjunction): val.normalize() if len(val.terms) == 1 and isinstance(val.terms[0], AVM): self._avm[attr] = val.terms[0] elif isinstance(val, AVM): val.normalize()
python
def normalize(self): """ Reduce trivial AVM conjunctions to just the AVM. For example, in `[ ATTR1 [ ATTR2 val ] ]` the value of `ATTR1` could be a conjunction with the sub-AVM `[ ATTR2 val ]`. This method removes the conjunction so the sub-AVM nests directly (equivalent to `[ ATTR1.ATTR2 val ]` in TDL). """ for attr in self._avm: val = self._avm[attr] if isinstance(val, Conjunction): val.normalize() if len(val.terms) == 1 and isinstance(val.terms[0], AVM): self._avm[attr] = val.terms[0] elif isinstance(val, AVM): val.normalize()
Reduce trivial AVM conjunctions to just the AVM. For example, in `[ ATTR1 [ ATTR2 val ] ]` the value of `ATTR1` could be a conjunction with the sub-AVM `[ ATTR2 val ]`. This method removes the conjunction so the sub-AVM nests directly (equivalent to `[ ATTR1.ATTR2 val ]` in TDL).
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/tdl.py#L230-L246
delph-in/pydelphin
delphin/tdl.py
AVM.features
def features(self, expand=False): """ Return the list of tuples of feature paths and feature values. Args: expand (bool): if `True`, expand all feature paths Example: >>> avm = AVM([('A.B', TypeIdentifier('1')), ... ('A.C', TypeIdentifier('2')]) >>> avm.features() [('A', <AVM object at ...>)] >>> avm.features(expand=True) [('A.B', <TypeIdentifier object (1) at ...>), ('A.C', <TypeIdentifier object (2) at ...>)] """ fs = [] for featpath, val in super(AVM, self).features(expand=expand): # don't juse Conjunction.features() here because we want to # include the non-AVM terms, too if expand and isinstance(val, Conjunction): for term in val.terms: if isinstance(term, AVM): for fp, v in term.features(True): fs.append(('{}.{}'.format(featpath, fp), v)) else: fs.append((featpath, term)) else: fs.append((featpath, val)) return fs
python
def features(self, expand=False): """ Return the list of tuples of feature paths and feature values. Args: expand (bool): if `True`, expand all feature paths Example: >>> avm = AVM([('A.B', TypeIdentifier('1')), ... ('A.C', TypeIdentifier('2')]) >>> avm.features() [('A', <AVM object at ...>)] >>> avm.features(expand=True) [('A.B', <TypeIdentifier object (1) at ...>), ('A.C', <TypeIdentifier object (2) at ...>)] """ fs = [] for featpath, val in super(AVM, self).features(expand=expand): # don't juse Conjunction.features() here because we want to # include the non-AVM terms, too if expand and isinstance(val, Conjunction): for term in val.terms: if isinstance(term, AVM): for fp, v in term.features(True): fs.append(('{}.{}'.format(featpath, fp), v)) else: fs.append((featpath, term)) else: fs.append((featpath, val)) return fs
Return the list of tuples of feature paths and feature values. Args: expand (bool): if `True`, expand all feature paths Example: >>> avm = AVM([('A.B', TypeIdentifier('1')), ... ('A.C', TypeIdentifier('2')]) >>> avm.features() [('A', <AVM object at ...>)] >>> avm.features(expand=True) [('A.B', <TypeIdentifier object (1) at ...>), ('A.C', <TypeIdentifier object (2) at ...>)]
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/tdl.py#L248-L276
delph-in/pydelphin
delphin/tdl.py
ConsList.values
def values(self): """ Return the list of values in the ConsList feature structure. """ if self._avm is None: return [] else: vals = [val for _, val in _collect_list_items(self)] # the < a . b > notation puts b on the last REST path, # which is not returned by _collect_list_items() if self.terminated and self[self._last_path] is not None: vals.append(self[self._last_path]) return vals
python
def values(self): """ Return the list of values in the ConsList feature structure. """ if self._avm is None: return [] else: vals = [val for _, val in _collect_list_items(self)] # the < a . b > notation puts b on the last REST path, # which is not returned by _collect_list_items() if self.terminated and self[self._last_path] is not None: vals.append(self[self._last_path]) return vals
Return the list of values in the ConsList feature structure.
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/tdl.py#L327-L339
delph-in/pydelphin
delphin/tdl.py
ConsList.append
def append(self, value): """ Append an item to the end of an open ConsList. Args: value (:class:`Conjunction`, :class:`Term`): item to add Raises: :class:`TdlError`: when appending to a closed list """ if self._avm is not None and not self.terminated: path = self._last_path if path: path += '.' self[path + LIST_HEAD] = value self._last_path = path + LIST_TAIL self[self._last_path] = AVM() else: raise TdlError('Cannot append to a closed list.')
python
def append(self, value): """ Append an item to the end of an open ConsList. Args: value (:class:`Conjunction`, :class:`Term`): item to add Raises: :class:`TdlError`: when appending to a closed list """ if self._avm is not None and not self.terminated: path = self._last_path if path: path += '.' self[path + LIST_HEAD] = value self._last_path = path + LIST_TAIL self[self._last_path] = AVM() else: raise TdlError('Cannot append to a closed list.')
Append an item to the end of an open ConsList. Args: value (:class:`Conjunction`, :class:`Term`): item to add Raises: :class:`TdlError`: when appending to a closed list
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/tdl.py#L341-L358
delph-in/pydelphin
delphin/tdl.py
ConsList.terminate
def terminate(self, end): """ Set the value of the tail of the list. Adding values via :meth:`append` places them on the `FIRST` feature of some level of the feature structure (e.g., `REST.FIRST`), while :meth:`terminate` places them on the final `REST` feature (e.g., `REST.REST`). If *end* is a :class:`Conjunction` or :class:`Term`, it is typically a :class:`Coreference`, otherwise *end* is set to `tdl.EMPTY_LIST_TYPE` or `tdl.LIST_TYPE`. This method does not necessarily close the list; if *end* is `tdl.LIST_TYPE`, the list is left open, otherwise it is closed. Args: end (str, :class:`Conjunction`, :class:`Term`): value to use as the end of the list. """ if self.terminated: raise TdlError('Cannot terminate a closed list.') if end == LIST_TYPE: self.terminated = False elif end == EMPTY_LIST_TYPE: if self._last_path: self[self._last_path] = None else: self._avm = None self.terminated = True elif self._last_path: self[self._last_path] = end self.terminated = True else: raise TdlError('Empty list must be {} or {}'.format( LIST_TYPE, EMPTY_LIST_TYPE))
python
def terminate(self, end): """ Set the value of the tail of the list. Adding values via :meth:`append` places them on the `FIRST` feature of some level of the feature structure (e.g., `REST.FIRST`), while :meth:`terminate` places them on the final `REST` feature (e.g., `REST.REST`). If *end* is a :class:`Conjunction` or :class:`Term`, it is typically a :class:`Coreference`, otherwise *end* is set to `tdl.EMPTY_LIST_TYPE` or `tdl.LIST_TYPE`. This method does not necessarily close the list; if *end* is `tdl.LIST_TYPE`, the list is left open, otherwise it is closed. Args: end (str, :class:`Conjunction`, :class:`Term`): value to use as the end of the list. """ if self.terminated: raise TdlError('Cannot terminate a closed list.') if end == LIST_TYPE: self.terminated = False elif end == EMPTY_LIST_TYPE: if self._last_path: self[self._last_path] = None else: self._avm = None self.terminated = True elif self._last_path: self[self._last_path] = end self.terminated = True else: raise TdlError('Empty list must be {} or {}'.format( LIST_TYPE, EMPTY_LIST_TYPE))
Set the value of the tail of the list. Adding values via :meth:`append` places them on the `FIRST` feature of some level of the feature structure (e.g., `REST.FIRST`), while :meth:`terminate` places them on the final `REST` feature (e.g., `REST.REST`). If *end* is a :class:`Conjunction` or :class:`Term`, it is typically a :class:`Coreference`, otherwise *end* is set to `tdl.EMPTY_LIST_TYPE` or `tdl.LIST_TYPE`. This method does not necessarily close the list; if *end* is `tdl.LIST_TYPE`, the list is left open, otherwise it is closed. Args: end (str, :class:`Conjunction`, :class:`Term`): value to use as the end of the list.
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/tdl.py#L360-L393
delph-in/pydelphin
delphin/tdl.py
Conjunction.normalize
def normalize(self): """ Rearrange the conjunction to a conventional form. This puts any coreference(s) first, followed by type terms, then followed by AVM(s) (including lists). AVMs are normalized via :meth:`AVM.normalize`. """ corefs = [] types = [] avms = [] for term in self._terms: if isinstance(term, TypeTerm): types.append(term) elif isinstance(term, AVM): term.normalize() avms.append(term) elif isinstance(term, Coreference): corefs.append(term) else: raise TdlError('unexpected term {}'.format(term)) self._terms = corefs + types + avms
python
def normalize(self): """ Rearrange the conjunction to a conventional form. This puts any coreference(s) first, followed by type terms, then followed by AVM(s) (including lists). AVMs are normalized via :meth:`AVM.normalize`. """ corefs = [] types = [] avms = [] for term in self._terms: if isinstance(term, TypeTerm): types.append(term) elif isinstance(term, AVM): term.normalize() avms.append(term) elif isinstance(term, Coreference): corefs.append(term) else: raise TdlError('unexpected term {}'.format(term)) self._terms = corefs + types + avms
Rearrange the conjunction to a conventional form. This puts any coreference(s) first, followed by type terms, then followed by AVM(s) (including lists). AVMs are normalized via :meth:`AVM.normalize`.
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/tdl.py#L547-L568
delph-in/pydelphin
delphin/tdl.py
Conjunction.add
def add(self, term): """ Add a term to the conjunction. Args: term (:class:`Term`, :class:`Conjunction`): term to add; if a :class:`Conjunction`, all of its terms are added to the current conjunction. Raises: :class:`TypeError`: when *term* is an invalid type """ if isinstance(term, Conjunction): for term_ in term.terms: self.add(term_) elif isinstance(term, Term): self._terms.append(term) else: raise TypeError('Not a Term or Conjunction')
python
def add(self, term): """ Add a term to the conjunction. Args: term (:class:`Term`, :class:`Conjunction`): term to add; if a :class:`Conjunction`, all of its terms are added to the current conjunction. Raises: :class:`TypeError`: when *term* is an invalid type """ if isinstance(term, Conjunction): for term_ in term.terms: self.add(term_) elif isinstance(term, Term): self._terms.append(term) else: raise TypeError('Not a Term or Conjunction')
Add a term to the conjunction. Args: term (:class:`Term`, :class:`Conjunction`): term to add; if a :class:`Conjunction`, all of its terms are added to the current conjunction. Raises: :class:`TypeError`: when *term* is an invalid type
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/tdl.py#L575-L592
delph-in/pydelphin
delphin/tdl.py
Conjunction.types
def types(self): """Return the list of type terms in the conjunction.""" return [term for term in self._terms if isinstance(term, (TypeIdentifier, String, Regex))]
python
def types(self): """Return the list of type terms in the conjunction.""" return [term for term in self._terms if isinstance(term, (TypeIdentifier, String, Regex))]
Return the list of type terms in the conjunction.
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/tdl.py#L594-L597
delph-in/pydelphin
delphin/tdl.py
Conjunction.features
def features(self, expand=False): """Return the list of feature-value pairs in the conjunction.""" featvals = [] for term in self._terms: if isinstance(term, AVM): featvals.extend(term.features(expand=expand)) return featvals
python
def features(self, expand=False): """Return the list of feature-value pairs in the conjunction.""" featvals = [] for term in self._terms: if isinstance(term, AVM): featvals.extend(term.features(expand=expand)) return featvals
Return the list of feature-value pairs in the conjunction.
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/tdl.py#L599-L605
delph-in/pydelphin
delphin/tdl.py
Conjunction.string
def string(self): """ Return the first string term in the conjunction, or `None`. """ for term in self._terms: if isinstance(term, String): return str(term) return None
python
def string(self): """ Return the first string term in the conjunction, or `None`. """ for term in self._terms: if isinstance(term, String): return str(term) return None
Return the first string term in the conjunction, or `None`.
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/tdl.py#L607-L614
delph-in/pydelphin
delphin/tdl.py
TypeDefinition.documentation
def documentation(self, level='first'): """ Return the documentation of the type. By default, this is the first docstring on a top-level term. By setting *level* to `"top"`, the list of all docstrings on top-level terms is returned, including the type's `docstring` value, if not `None`, as the last item. The docstring for the type itself is available via :attr:`TypeDefinition.docstring`. Args: level (str): `"first"` or `"top"` Returns: a single docstring or a list of docstrings """ docs = (t.docstring for t in list(self.conjunction.terms) + [self] if t.docstring is not None) if level.lower() == 'first': doc = next(docs, None) elif level.lower() == 'top': doc = list(docs) return doc
python
def documentation(self, level='first'): """ Return the documentation of the type. By default, this is the first docstring on a top-level term. By setting *level* to `"top"`, the list of all docstrings on top-level terms is returned, including the type's `docstring` value, if not `None`, as the last item. The docstring for the type itself is available via :attr:`TypeDefinition.docstring`. Args: level (str): `"first"` or `"top"` Returns: a single docstring or a list of docstrings """ docs = (t.docstring for t in list(self.conjunction.terms) + [self] if t.docstring is not None) if level.lower() == 'first': doc = next(docs, None) elif level.lower() == 'top': doc = list(docs) return doc
Return the documentation of the type. By default, this is the first docstring on a top-level term. By setting *level* to `"top"`, the list of all docstrings on top-level terms is returned, including the type's `docstring` value, if not `None`, as the last item. The docstring for the type itself is available via :attr:`TypeDefinition.docstring`. Args: level (str): `"first"` or `"top"` Returns: a single docstring or a list of docstrings
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/tdl.py#L668-L690
delph-in/pydelphin
delphin/tdl.py
TdlDefinition.local_constraints
def local_constraints(self): """ Return the constraints defined in the local AVM. """ cs = [] for feat, val in self._avm.items(): try: if val.supertypes and not val._avm: cs.append((feat, val)) else: for subfeat, subval in val.features(): cs.append(('{}.{}'.format(feat, subfeat), subval)) except AttributeError: cs.append((feat, val)) return cs
python
def local_constraints(self): """ Return the constraints defined in the local AVM. """ cs = [] for feat, val in self._avm.items(): try: if val.supertypes and not val._avm: cs.append((feat, val)) else: for subfeat, subval in val.features(): cs.append(('{}.{}'.format(feat, subfeat), subval)) except AttributeError: cs.append((feat, val)) return cs
Return the constraints defined in the local AVM.
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/tdl.py#L876-L890
delph-in/pydelphin
delphin/tdl.py
TdlConsList.values
def values(self): """ Return the list of values. """ def collect(d): if d is None or d.get('FIRST') is None: return [] vals = [d['FIRST']] vals.extend(collect(d.get('REST'))) return vals return collect(self)
python
def values(self): """ Return the list of values. """ def collect(d): if d is None or d.get('FIRST') is None: return [] vals = [d['FIRST']] vals.extend(collect(d.get('REST'))) return vals return collect(self)
Return the list of values.
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/tdl.py#L905-L915
luckydonald/pytgbot
pytgbot/api_types/receivable/peer.py
ChatMember.from_array
def from_array(array): """ Deserialize a new ChatMember from a given dictionary. :return: new ChatMember instance. :rtype: ChatMember """ if array is None or not array: return None # end if assert_type_or_raise(array, dict, parameter_name="array") data = {} data['user'] = User.from_array(array.get('user')) data['status'] = u(array.get('status')) data['until_date'] = int(array.get('until_date')) if array.get('until_date') is not None else None data['can_be_edited'] = bool(array.get('can_be_edited')) if array.get('can_be_edited') is not None else None data['can_change_info'] = bool(array.get('can_change_info')) if array.get('can_change_info') is not None else None data['can_post_messages'] = bool(array.get('can_post_messages')) if array.get('can_post_messages') is not None else None data['can_edit_messages'] = bool(array.get('can_edit_messages')) if array.get('can_edit_messages') is not None else None data['can_delete_messages'] = bool(array.get('can_delete_messages')) if array.get('can_delete_messages') is not None else None data['can_invite_users'] = bool(array.get('can_invite_users')) if array.get('can_invite_users') is not None else None data['can_restrict_members'] = bool(array.get('can_restrict_members')) if array.get('can_restrict_members') is not None else None data['can_pin_messages'] = bool(array.get('can_pin_messages')) if array.get('can_pin_messages') is not None else None data['can_promote_members'] = bool(array.get('can_promote_members')) if array.get('can_promote_members') is not None else None data['can_send_messages'] = bool(array.get('can_send_messages')) if array.get('can_send_messages') is not None else None data['can_send_media_messages'] = bool(array.get('can_send_media_messages')) if array.get('can_send_media_messages') is not None else None data['can_send_other_messages'] = bool(array.get('can_send_other_messages')) if array.get('can_send_other_messages') is not None else None data['can_add_web_page_previews'] = bool(array.get('can_add_web_page_previews')) if array.get('can_add_web_page_previews') is not None else None data['_raw'] = array return ChatMember(**data)
python
def from_array(array): """ Deserialize a new ChatMember from a given dictionary. :return: new ChatMember instance. :rtype: ChatMember """ if array is None or not array: return None # end if assert_type_or_raise(array, dict, parameter_name="array") data = {} data['user'] = User.from_array(array.get('user')) data['status'] = u(array.get('status')) data['until_date'] = int(array.get('until_date')) if array.get('until_date') is not None else None data['can_be_edited'] = bool(array.get('can_be_edited')) if array.get('can_be_edited') is not None else None data['can_change_info'] = bool(array.get('can_change_info')) if array.get('can_change_info') is not None else None data['can_post_messages'] = bool(array.get('can_post_messages')) if array.get('can_post_messages') is not None else None data['can_edit_messages'] = bool(array.get('can_edit_messages')) if array.get('can_edit_messages') is not None else None data['can_delete_messages'] = bool(array.get('can_delete_messages')) if array.get('can_delete_messages') is not None else None data['can_invite_users'] = bool(array.get('can_invite_users')) if array.get('can_invite_users') is not None else None data['can_restrict_members'] = bool(array.get('can_restrict_members')) if array.get('can_restrict_members') is not None else None data['can_pin_messages'] = bool(array.get('can_pin_messages')) if array.get('can_pin_messages') is not None else None data['can_promote_members'] = bool(array.get('can_promote_members')) if array.get('can_promote_members') is not None else None data['can_send_messages'] = bool(array.get('can_send_messages')) if array.get('can_send_messages') is not None else None data['can_send_media_messages'] = bool(array.get('can_send_media_messages')) if array.get('can_send_media_messages') is not None else None data['can_send_other_messages'] = bool(array.get('can_send_other_messages')) if array.get('can_send_other_messages') is not None else None data['can_add_web_page_previews'] = bool(array.get('can_add_web_page_previews')) if array.get('can_add_web_page_previews') is not None else None data['_raw'] = array return ChatMember(**data)
Deserialize a new ChatMember from a given dictionary. :return: new ChatMember instance. :rtype: ChatMember
https://github.com/luckydonald/pytgbot/blob/67f4b5a1510d4583d40b5477e876b1ef0eb8971b/pytgbot/api_types/receivable/peer.py#L647-L677
delph-in/pydelphin
delphin/mrs/semi.py
Variable.from_dict
def from_dict(cls, d): """Instantiate a Variable from a dictionary representation.""" return cls( d['type'], tuple(d['parents']), list(d['properties'].items()) )
python
def from_dict(cls, d): """Instantiate a Variable from a dictionary representation.""" return cls( d['type'], tuple(d['parents']), list(d['properties'].items()) )
Instantiate a Variable from a dictionary representation.
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/mrs/semi.py#L77-L81
delph-in/pydelphin
delphin/mrs/semi.py
Role.from_dict
def from_dict(cls, d): """Instantiate a Role from a dictionary representation.""" return cls( d['rargname'], d['value'], list(d.get('properties', {}).items()), d.get('optional', False) )
python
def from_dict(cls, d): """Instantiate a Role from a dictionary representation.""" return cls( d['rargname'], d['value'], list(d.get('properties', {}).items()), d.get('optional', False) )
Instantiate a Role from a dictionary representation.
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/mrs/semi.py#L107-L114
delph-in/pydelphin
delphin/mrs/semi.py
Role.to_dict
def to_dict(self): """Return a dictionary representation of the Role.""" d = {'rargname': self.rargname, 'value': self.value} if self.properties: d['properties'] = self.properties if self.optional: d['optional'] = self.optional return d
python
def to_dict(self): """Return a dictionary representation of the Role.""" d = {'rargname': self.rargname, 'value': self.value} if self.properties: d['properties'] = self.properties if self.optional: d['optional'] = self.optional return d
Return a dictionary representation of the Role.
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/mrs/semi.py#L116-L123
delph-in/pydelphin
delphin/mrs/semi.py
Predicate.from_dict
def from_dict(cls, d): """Instantiate a Predicate from a dictionary representation.""" synopses = [tuple(map(Role.from_dict, synopsis)) for synopsis in d.get('synopses', [])] return cls(d['predicate'], tuple(d['parents']), synopses)
python
def from_dict(cls, d): """Instantiate a Predicate from a dictionary representation.""" synopses = [tuple(map(Role.from_dict, synopsis)) for synopsis in d.get('synopses', [])] return cls(d['predicate'], tuple(d['parents']), synopses)
Instantiate a Predicate from a dictionary representation.
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/mrs/semi.py#L144-L148
delph-in/pydelphin
delphin/mrs/semi.py
Predicate.to_dict
def to_dict(self): """Return a dictionary representation of the Predicate.""" return { 'predicate': self.predicate, 'parents': list(self.supertypes), 'synopses': [[role.to_dict() for role in synopsis] for synopsis in self.synopses] }
python
def to_dict(self): """Return a dictionary representation of the Predicate.""" return { 'predicate': self.predicate, 'parents': list(self.supertypes), 'synopses': [[role.to_dict() for role in synopsis] for synopsis in self.synopses] }
Return a dictionary representation of the Predicate.
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/mrs/semi.py#L150-L157
delph-in/pydelphin
delphin/mrs/semi.py
SemI.from_dict
def from_dict(cls, d): """Instantiate a SemI from a dictionary representation.""" read = lambda cls: (lambda pair: (pair[0], cls.from_dict(pair[1]))) return cls( variables=map(read(Variable), d.get('variables', {}).items()), properties=map(read(Property), d.get('properties', {}).items()), roles=map(read(Role), d.get('roles', {}).items()), predicates=map(read(Predicate), d.get('predicates', {}).items()) )
python
def from_dict(cls, d): """Instantiate a SemI from a dictionary representation.""" read = lambda cls: (lambda pair: (pair[0], cls.from_dict(pair[1]))) return cls( variables=map(read(Variable), d.get('variables', {}).items()), properties=map(read(Property), d.get('properties', {}).items()), roles=map(read(Role), d.get('roles', {}).items()), predicates=map(read(Predicate), d.get('predicates', {}).items()) )
Instantiate a SemI from a dictionary representation.
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/mrs/semi.py#L322-L330
delph-in/pydelphin
delphin/mrs/semi.py
SemI.to_dict
def to_dict(self): """Return a dictionary representation of the SemI.""" make = lambda pair: (pair[0], pair[1].to_dict()) return dict( variables=dict(make(v) for v in self.variables.items()), properties=dict(make(p) for p in self.properties.items()), roles=dict(make(r) for r in self.roles.items()), predicates=dict(make(p) for p in self.predicates.items()) )
python
def to_dict(self): """Return a dictionary representation of the SemI.""" make = lambda pair: (pair[0], pair[1].to_dict()) return dict( variables=dict(make(v) for v in self.variables.items()), properties=dict(make(p) for p in self.properties.items()), roles=dict(make(r) for r in self.roles.items()), predicates=dict(make(p) for p in self.predicates.items()) )
Return a dictionary representation of the SemI.
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/mrs/semi.py#L332-L340
delph-in/pydelphin
delphin/mrs/components.py
sort_vid_split
def sort_vid_split(vs): """ Split a valid variable string into its variable sort and id. Examples: >>> sort_vid_split('h3') ('h', '3') >>> sort_vid_split('ref-ind12') ('ref-ind', '12') """ match = var_re.match(vs) if match is None: raise ValueError('Invalid variable string: {}'.format(str(vs))) else: return match.groups()
python
def sort_vid_split(vs): """ Split a valid variable string into its variable sort and id. Examples: >>> sort_vid_split('h3') ('h', '3') >>> sort_vid_split('ref-ind12') ('ref-ind', '12') """ match = var_re.match(vs) if match is None: raise ValueError('Invalid variable string: {}'.format(str(vs))) else: return match.groups()
Split a valid variable string into its variable sort and id. Examples: >>> sort_vid_split('h3') ('h', '3') >>> sort_vid_split('ref-ind12') ('ref-ind', '12')
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/mrs/components.py#L31-L45
delph-in/pydelphin
delphin/mrs/components.py
links
def links(xmrs): """Return the list of Links for the *xmrs*.""" # Links exist for every non-intrinsic argument that has a variable # that is the intrinsic variable of some other predicate, as well # as for label equalities when no argument link exists (even # considering transitivity). links = [] prelinks = [] _eps = xmrs._eps _hcons = xmrs._hcons _vars = xmrs._vars lsh = xmrs.labelset_heads lblheads = {v: lsh(v) for v, vd in _vars.items() if 'LBL' in vd['refs']} top = xmrs.top if top is not None: prelinks.append((0, top, None, top, _vars[top])) for nid, ep in _eps.items(): for role, val in ep[3].items(): if role == IVARG_ROLE or val not in _vars: continue prelinks.append((nid, ep[2], role, val, _vars[val])) for src, srclbl, role, val, vd in prelinks: if IVARG_ROLE in vd['refs']: tgtnids = [n for n in vd['refs'][IVARG_ROLE] if not _eps[n].is_quantifier()] if len(tgtnids) == 0: continue # maybe some bad MRS with a lonely quantifier tgt = tgtnids[0] # what do we do if len > 1? tgtlbl = _eps[tgt][2] post = EQ_POST if srclbl == tgtlbl else NEQ_POST elif val in _hcons: lbl = _hcons[val][2] if lbl not in lblheads or len(lblheads[lbl]) == 0: continue # broken MRS; log this? tgt = lblheads[lbl][0] # sorted list; first item is most "heady" post = H_POST elif 'LBL' in vd['refs']: if val not in lblheads or len(lblheads[val]) == 0: continue # broken MRS; log this? tgt = lblheads[val][0] # again, should be sorted already post = HEQ_POST else: continue # CARGs, maybe? links.append(Link(src, tgt, role, post)) # now EQ links unattested by arg links for lbl, heads in lblheads.items(): # I'm pretty sure this does what we want if len(heads) > 1: first = heads[0] for other in heads[1:]: links.append(Link(other, first, BARE_EQ_ROLE, EQ_POST)) # If not, something like this is more explicit # lblset = self.labelset(lbl) # sg = g.subgraph(lblset) # ns = [nid for nid, deg in sg.degree(lblset).items() if deg == 0] # head = self.labelset_head(lbl) # for n in ns: # links.append(Link(head, n, post=EQ_POST)) def _int(x): try: return int(x) except ValueError: return 0 return sorted( links, key=lambda link: (_int(link.start), _int(link.end), link.rargname) )
python
def links(xmrs): """Return the list of Links for the *xmrs*.""" # Links exist for every non-intrinsic argument that has a variable # that is the intrinsic variable of some other predicate, as well # as for label equalities when no argument link exists (even # considering transitivity). links = [] prelinks = [] _eps = xmrs._eps _hcons = xmrs._hcons _vars = xmrs._vars lsh = xmrs.labelset_heads lblheads = {v: lsh(v) for v, vd in _vars.items() if 'LBL' in vd['refs']} top = xmrs.top if top is not None: prelinks.append((0, top, None, top, _vars[top])) for nid, ep in _eps.items(): for role, val in ep[3].items(): if role == IVARG_ROLE or val not in _vars: continue prelinks.append((nid, ep[2], role, val, _vars[val])) for src, srclbl, role, val, vd in prelinks: if IVARG_ROLE in vd['refs']: tgtnids = [n for n in vd['refs'][IVARG_ROLE] if not _eps[n].is_quantifier()] if len(tgtnids) == 0: continue # maybe some bad MRS with a lonely quantifier tgt = tgtnids[0] # what do we do if len > 1? tgtlbl = _eps[tgt][2] post = EQ_POST if srclbl == tgtlbl else NEQ_POST elif val in _hcons: lbl = _hcons[val][2] if lbl not in lblheads or len(lblheads[lbl]) == 0: continue # broken MRS; log this? tgt = lblheads[lbl][0] # sorted list; first item is most "heady" post = H_POST elif 'LBL' in vd['refs']: if val not in lblheads or len(lblheads[val]) == 0: continue # broken MRS; log this? tgt = lblheads[val][0] # again, should be sorted already post = HEQ_POST else: continue # CARGs, maybe? links.append(Link(src, tgt, role, post)) # now EQ links unattested by arg links for lbl, heads in lblheads.items(): # I'm pretty sure this does what we want if len(heads) > 1: first = heads[0] for other in heads[1:]: links.append(Link(other, first, BARE_EQ_ROLE, EQ_POST)) # If not, something like this is more explicit # lblset = self.labelset(lbl) # sg = g.subgraph(lblset) # ns = [nid for nid, deg in sg.degree(lblset).items() if deg == 0] # head = self.labelset_head(lbl) # for n in ns: # links.append(Link(head, n, post=EQ_POST)) def _int(x): try: return int(x) except ValueError: return 0 return sorted( links, key=lambda link: (_int(link.start), _int(link.end), link.rargname) )
Return the list of Links for the *xmrs*.
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/mrs/components.py#L298-L371
delph-in/pydelphin
delphin/mrs/components.py
hcons
def hcons(xmrs): """Return the list of all HandleConstraints in *xmrs*.""" return [ HandleConstraint(hi, reln, lo) for hi, reln, lo in sorted(xmrs.hcons(), key=lambda hc: var_id(hc[0])) ]
python
def hcons(xmrs): """Return the list of all HandleConstraints in *xmrs*.""" return [ HandleConstraint(hi, reln, lo) for hi, reln, lo in sorted(xmrs.hcons(), key=lambda hc: var_id(hc[0])) ]
Return the list of all HandleConstraints in *xmrs*.
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/mrs/components.py#L404-L409
delph-in/pydelphin
delphin/mrs/components.py
icons
def icons(xmrs): """Return the list of all IndividualConstraints in *xmrs*.""" return [ IndividualConstraint(left, reln, right) for left, reln, right in sorted(xmrs.icons(), key=lambda ic: var_id(ic[0])) ]
python
def icons(xmrs): """Return the list of all IndividualConstraints in *xmrs*.""" return [ IndividualConstraint(left, reln, right) for left, reln, right in sorted(xmrs.icons(), key=lambda ic: var_id(ic[0])) ]
Return the list of all IndividualConstraints in *xmrs*.
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/mrs/components.py#L427-L433
delph-in/pydelphin
delphin/mrs/components.py
split_pred_string
def split_pred_string(predstr): """ Split *predstr* and return the (lemma, pos, sense, suffix) components. Examples: >>> Pred.split_pred_string('_dog_n_1_rel') ('dog', 'n', '1', 'rel') >>> Pred.split_pred_string('quant_rel') ('quant', None, None, 'rel') """ predstr = predstr.strip('"\'') # surrounding quotes don't matter rel_added = False if not predstr.lower().endswith('_rel'): logging.debug('Predicate does not end in "_rel": {}' .format(predstr)) rel_added = True predstr += '_rel' match = Pred.pred_re.search(predstr) if match is None: logging.debug('Unexpected predicate string: {}'.format(predstr)) return (predstr, None, None, None) # _lemma_pos(_sense)?_end return (match.group('lemma'), match.group('pos'), match.group('sense'), None if rel_added else match.group('end'))
python
def split_pred_string(predstr): """ Split *predstr* and return the (lemma, pos, sense, suffix) components. Examples: >>> Pred.split_pred_string('_dog_n_1_rel') ('dog', 'n', '1', 'rel') >>> Pred.split_pred_string('quant_rel') ('quant', None, None, 'rel') """ predstr = predstr.strip('"\'') # surrounding quotes don't matter rel_added = False if not predstr.lower().endswith('_rel'): logging.debug('Predicate does not end in "_rel": {}' .format(predstr)) rel_added = True predstr += '_rel' match = Pred.pred_re.search(predstr) if match is None: logging.debug('Unexpected predicate string: {}'.format(predstr)) return (predstr, None, None, None) # _lemma_pos(_sense)?_end return (match.group('lemma'), match.group('pos'), match.group('sense'), None if rel_added else match.group('end'))
Split *predstr* and return the (lemma, pos, sense, suffix) components. Examples: >>> Pred.split_pred_string('_dog_n_1_rel') ('dog', 'n', '1', 'rel') >>> Pred.split_pred_string('quant_rel') ('quant', None, None, 'rel')
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/mrs/components.py#L595-L618
delph-in/pydelphin
delphin/mrs/components.py
is_valid_pred_string
def is_valid_pred_string(predstr): """ Return `True` if *predstr* is a valid predicate string. Examples: >>> is_valid_pred_string('"_dog_n_1_rel"') True >>> is_valid_pred_string('_dog_n_1') True >>> is_valid_pred_string('_dog_noun_1') False >>> is_valid_pred_string('dog_noun_1') True """ predstr = predstr.strip('"').lstrip("'") # this is a stricter regex than in Pred, but doesn't check POS return re.match( r'_([^ _\\]|\\.)+_[a-z](_([^ _\\]|\\.)+)?(_rel)?$' r'|[^_]([^ \\]|\\.)+(_rel)?$', predstr ) is not None
python
def is_valid_pred_string(predstr): """ Return `True` if *predstr* is a valid predicate string. Examples: >>> is_valid_pred_string('"_dog_n_1_rel"') True >>> is_valid_pred_string('_dog_n_1') True >>> is_valid_pred_string('_dog_noun_1') False >>> is_valid_pred_string('dog_noun_1') True """ predstr = predstr.strip('"').lstrip("'") # this is a stricter regex than in Pred, but doesn't check POS return re.match( r'_([^ _\\]|\\.)+_[a-z](_([^ _\\]|\\.)+)?(_rel)?$' r'|[^_]([^ \\]|\\.)+(_rel)?$', predstr ) is not None
Return `True` if *predstr* is a valid predicate string. Examples: >>> is_valid_pred_string('"_dog_n_1_rel"') True >>> is_valid_pred_string('_dog_n_1') True >>> is_valid_pred_string('_dog_noun_1') False >>> is_valid_pred_string('dog_noun_1') True
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/mrs/components.py#L621-L641
delph-in/pydelphin
delphin/mrs/components.py
normalize_pred_string
def normalize_pred_string(predstr): """ Normalize the predicate string *predstr* to a conventional form. This makes predicate strings more consistent by removing quotes and the `_rel` suffix, and by lowercasing them. Examples: >>> normalize_pred_string('"_dog_n_1_rel"') '_dog_n_1' >>> normalize_pred_string('_dog_n_1') '_dog_n_1' """ tokens = [t for t in split_pred_string(predstr)[:3] if t is not None] if predstr.lstrip('\'"')[:1] == '_': tokens = [''] + tokens return '_'.join(tokens).lower()
python
def normalize_pred_string(predstr): """ Normalize the predicate string *predstr* to a conventional form. This makes predicate strings more consistent by removing quotes and the `_rel` suffix, and by lowercasing them. Examples: >>> normalize_pred_string('"_dog_n_1_rel"') '_dog_n_1' >>> normalize_pred_string('_dog_n_1') '_dog_n_1' """ tokens = [t for t in split_pred_string(predstr)[:3] if t is not None] if predstr.lstrip('\'"')[:1] == '_': tokens = [''] + tokens return '_'.join(tokens).lower()
Normalize the predicate string *predstr* to a conventional form. This makes predicate strings more consistent by removing quotes and the `_rel` suffix, and by lowercasing them. Examples: >>> normalize_pred_string('"_dog_n_1_rel"') '_dog_n_1' >>> normalize_pred_string('_dog_n_1') '_dog_n_1'
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/mrs/components.py#L644-L660
delph-in/pydelphin
delphin/mrs/components.py
nodes
def nodes(xmrs): """Return the list of Nodes for *xmrs*.""" nodes = [] _props = xmrs.properties varsplit = sort_vid_split for p in xmrs.eps(): sortinfo = None iv = p.intrinsic_variable if iv is not None: sort, _ = varsplit(iv) sortinfo = _props(iv) sortinfo[CVARSORT] = sort nodes.append( Node(p.nodeid, p.pred, sortinfo, p.lnk, p.surface, p.base, p.carg) ) return nodes
python
def nodes(xmrs): """Return the list of Nodes for *xmrs*.""" nodes = [] _props = xmrs.properties varsplit = sort_vid_split for p in xmrs.eps(): sortinfo = None iv = p.intrinsic_variable if iv is not None: sort, _ = varsplit(iv) sortinfo = _props(iv) sortinfo[CVARSORT] = sort nodes.append( Node(p.nodeid, p.pred, sortinfo, p.lnk, p.surface, p.base, p.carg) ) return nodes
Return the list of Nodes for *xmrs*.
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/mrs/components.py#L786-L801
delph-in/pydelphin
delphin/mrs/components.py
_VarGenerator.new
def new(self, sort, properties=None): """ Create a new variable for the given *sort*. """ if sort is None: sort = UNKNOWNSORT # find next available vid vid, index = self.vid, self.index while vid in index: vid += 1 varstring = '{}{}'.format(sort, vid) index[vid] = varstring if properties is None: properties = [] self.store[varstring] = properties self.vid = vid + 1 return (varstring, properties)
python
def new(self, sort, properties=None): """ Create a new variable for the given *sort*. """ if sort is None: sort = UNKNOWNSORT # find next available vid vid, index = self.vid, self.index while vid in index: vid += 1 varstring = '{}{}'.format(sort, vid) index[vid] = varstring if properties is None: properties = [] self.store[varstring] = properties self.vid = vid + 1 return (varstring, properties)
Create a new variable for the given *sort*.
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/mrs/components.py#L85-L101
delph-in/pydelphin
delphin/mrs/components.py
Lnk.charspan
def charspan(cls, start, end): """ Create a Lnk object for a character span. Args: start: the initial character position (cfrom) end: the final character position (cto) """ return cls(Lnk.CHARSPAN, (int(start), int(end)))
python
def charspan(cls, start, end): """ Create a Lnk object for a character span. Args: start: the initial character position (cfrom) end: the final character position (cto) """ return cls(Lnk.CHARSPAN, (int(start), int(end)))
Create a Lnk object for a character span. Args: start: the initial character position (cfrom) end: the final character position (cto)
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/mrs/components.py#L160-L168
delph-in/pydelphin
delphin/mrs/components.py
Lnk.chartspan
def chartspan(cls, start, end): """ Create a Lnk object for a chart span. Args: start: the initial chart vertex end: the final chart vertex """ return cls(Lnk.CHARTSPAN, (int(start), int(end)))
python
def chartspan(cls, start, end): """ Create a Lnk object for a chart span. Args: start: the initial chart vertex end: the final chart vertex """ return cls(Lnk.CHARTSPAN, (int(start), int(end)))
Create a Lnk object for a chart span. Args: start: the initial chart vertex end: the final chart vertex
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/mrs/components.py#L171-L179
delph-in/pydelphin
delphin/mrs/components.py
Lnk.tokens
def tokens(cls, tokens): """ Create a Lnk object for a token range. Args: tokens: a list of token identifiers """ return cls(Lnk.TOKENS, tuple(map(int, tokens)))
python
def tokens(cls, tokens): """ Create a Lnk object for a token range. Args: tokens: a list of token identifiers """ return cls(Lnk.TOKENS, tuple(map(int, tokens)))
Create a Lnk object for a token range. Args: tokens: a list of token identifiers
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/mrs/components.py#L182-L189
delph-in/pydelphin
delphin/mrs/components.py
_LnkMixin.cfrom
def cfrom(self): """ The initial character position in the surface string. Defaults to -1 if there is no valid cfrom value. """ cfrom = -1 try: if self.lnk.type == Lnk.CHARSPAN: cfrom = self.lnk.data[0] except AttributeError: pass # use default cfrom of -1 return cfrom
python
def cfrom(self): """ The initial character position in the surface string. Defaults to -1 if there is no valid cfrom value. """ cfrom = -1 try: if self.lnk.type == Lnk.CHARSPAN: cfrom = self.lnk.data[0] except AttributeError: pass # use default cfrom of -1 return cfrom
The initial character position in the surface string. Defaults to -1 if there is no valid cfrom value.
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/mrs/components.py#L232-L244
delph-in/pydelphin
delphin/mrs/components.py
_LnkMixin.cto
def cto(self): """ The final character position in the surface string. Defaults to -1 if there is no valid cto value. """ cto = -1 try: if self.lnk.type == Lnk.CHARSPAN: cto = self.lnk.data[1] except AttributeError: pass # use default cto of -1 return cto
python
def cto(self): """ The final character position in the surface string. Defaults to -1 if there is no valid cto value. """ cto = -1 try: if self.lnk.type == Lnk.CHARSPAN: cto = self.lnk.data[1] except AttributeError: pass # use default cto of -1 return cto
The final character position in the surface string. Defaults to -1 if there is no valid cto value.
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/mrs/components.py#L247-L259
delph-in/pydelphin
delphin/mrs/components.py
Pred.surface
def surface(cls, predstr): """Instantiate a Pred from its quoted string representation.""" lemma, pos, sense, _ = split_pred_string(predstr) return cls(Pred.SURFACE, lemma, pos, sense, predstr)
python
def surface(cls, predstr): """Instantiate a Pred from its quoted string representation.""" lemma, pos, sense, _ = split_pred_string(predstr) return cls(Pred.SURFACE, lemma, pos, sense, predstr)
Instantiate a Pred from its quoted string representation.
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/mrs/components.py#L524-L527
delph-in/pydelphin
delphin/mrs/components.py
Pred.abstract
def abstract(cls, predstr): """Instantiate a Pred from its symbol string.""" lemma, pos, sense, _ = split_pred_string(predstr) return cls(Pred.ABSTRACT, lemma, pos, sense, predstr)
python
def abstract(cls, predstr): """Instantiate a Pred from its symbol string.""" lemma, pos, sense, _ = split_pred_string(predstr) return cls(Pred.ABSTRACT, lemma, pos, sense, predstr)
Instantiate a Pred from its symbol string.
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/mrs/components.py#L536-L539
delph-in/pydelphin
delphin/mrs/components.py
Pred.surface_or_abstract
def surface_or_abstract(cls, predstr): """Instantiate a Pred from either its surface or abstract symbol.""" if predstr.strip('"').lstrip("'").startswith('_'): return cls.surface(predstr) else: return cls.abstract(predstr)
python
def surface_or_abstract(cls, predstr): """Instantiate a Pred from either its surface or abstract symbol.""" if predstr.strip('"').lstrip("'").startswith('_'): return cls.surface(predstr) else: return cls.abstract(predstr)
Instantiate a Pred from either its surface or abstract symbol.
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/mrs/components.py#L548-L553
delph-in/pydelphin
delphin/mrs/components.py
Pred.realpred
def realpred(cls, lemma, pos, sense=None): """Instantiate a Pred from its components.""" string_tokens = [lemma] if pos is not None: string_tokens.append(pos) if sense is not None: sense = str(sense) string_tokens.append(sense) predstr = '_'.join([''] + string_tokens + ['rel']) return cls(Pred.REALPRED, lemma, pos, sense, predstr)
python
def realpred(cls, lemma, pos, sense=None): """Instantiate a Pred from its components.""" string_tokens = [lemma] if pos is not None: string_tokens.append(pos) if sense is not None: sense = str(sense) string_tokens.append(sense) predstr = '_'.join([''] + string_tokens + ['rel']) return cls(Pred.REALPRED, lemma, pos, sense, predstr)
Instantiate a Pred from its components.
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/mrs/components.py#L556-L565
delph-in/pydelphin
delphin/mrs/components.py
Node.properties
def properties(self): """ Morphosemantic property mapping. Unlike :attr:`sortinfo`, this does not include `cvarsort`. """ d = dict(self.sortinfo) if CVARSORT in d: del d[CVARSORT] return d
python
def properties(self): """ Morphosemantic property mapping. Unlike :attr:`sortinfo`, this does not include `cvarsort`. """ d = dict(self.sortinfo) if CVARSORT in d: del d[CVARSORT] return d
Morphosemantic property mapping. Unlike :attr:`sortinfo`, this does not include `cvarsort`.
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/mrs/components.py#L762-L771
RIPE-NCC/ripe-atlas-cousteau
ripe/atlas/cousteau/api_meta_data.py
EntityRepresentation.update_get_params
def update_get_params(self): """Update HTTP GET params with the given fields that user wants to fetch.""" if isinstance(self._fields, (tuple, list)): # tuples & lists > x,y,z self.get_params["fields"] = ",".join([str(_) for _ in self._fields]) elif isinstance(self._fields, str): self.get_params["fields"] = self._fields
python
def update_get_params(self): """Update HTTP GET params with the given fields that user wants to fetch.""" if isinstance(self._fields, (tuple, list)): # tuples & lists > x,y,z self.get_params["fields"] = ",".join([str(_) for _ in self._fields]) elif isinstance(self._fields, str): self.get_params["fields"] = self._fields
Update HTTP GET params with the given fields that user wants to fetch.
https://github.com/RIPE-NCC/ripe-atlas-cousteau/blob/ffee2556aaa4df86525b88c269bb098de11678ec/ripe/atlas/cousteau/api_meta_data.py#L54-L59
RIPE-NCC/ripe-atlas-cousteau
ripe/atlas/cousteau/api_meta_data.py
EntityRepresentation._fetch_meta_data
def _fetch_meta_data(self): """Makes an API call to fetch meta data for the given probe and stores the raw data.""" is_success, meta_data = AtlasRequest( url_path=self.API_META_URL.format(self.id), key=self.api_key, server=self.server, verify=self.verify, user_agent=self._user_agent ).get(**self.get_params) self.meta_data = meta_data if not is_success: return False return True
python
def _fetch_meta_data(self): """Makes an API call to fetch meta data for the given probe and stores the raw data.""" is_success, meta_data = AtlasRequest( url_path=self.API_META_URL.format(self.id), key=self.api_key, server=self.server, verify=self.verify, user_agent=self._user_agent ).get(**self.get_params) self.meta_data = meta_data if not is_success: return False return True
Makes an API call to fetch meta data for the given probe and stores the raw data.
https://github.com/RIPE-NCC/ripe-atlas-cousteau/blob/ffee2556aaa4df86525b88c269bb098de11678ec/ripe/atlas/cousteau/api_meta_data.py#L61-L75
RIPE-NCC/ripe-atlas-cousteau
ripe/atlas/cousteau/api_meta_data.py
Probe._populate_data
def _populate_data(self): """Assing some probe's raw meta data from API response to instance properties""" if self.id is None: self.id = self.meta_data.get("id") self.is_anchor = self.meta_data.get("is_anchor") self.country_code = self.meta_data.get("country_code") self.description = self.meta_data.get("description") self.is_public = self.meta_data.get("is_public") self.asn_v4 = self.meta_data.get("asn_v4") self.asn_v6 = self.meta_data.get("asn_v6") self.address_v4 = self.meta_data.get("address_v4") self.address_v6 = self.meta_data.get("address_v6") self.prefix_v4 = self.meta_data.get("prefix_v4") self.prefix_v6 = self.meta_data.get("prefix_v6") self.geometry = self.meta_data.get("geometry") self.tags = self.meta_data.get("tags") self.status = self.meta_data.get("status", {}).get("name")
python
def _populate_data(self): """Assing some probe's raw meta data from API response to instance properties""" if self.id is None: self.id = self.meta_data.get("id") self.is_anchor = self.meta_data.get("is_anchor") self.country_code = self.meta_data.get("country_code") self.description = self.meta_data.get("description") self.is_public = self.meta_data.get("is_public") self.asn_v4 = self.meta_data.get("asn_v4") self.asn_v6 = self.meta_data.get("asn_v6") self.address_v4 = self.meta_data.get("address_v4") self.address_v6 = self.meta_data.get("address_v6") self.prefix_v4 = self.meta_data.get("prefix_v4") self.prefix_v6 = self.meta_data.get("prefix_v6") self.geometry = self.meta_data.get("geometry") self.tags = self.meta_data.get("tags") self.status = self.meta_data.get("status", {}).get("name")
Assing some probe's raw meta data from API response to instance properties
https://github.com/RIPE-NCC/ripe-atlas-cousteau/blob/ffee2556aaa4df86525b88c269bb098de11678ec/ripe/atlas/cousteau/api_meta_data.py#L90-L106
RIPE-NCC/ripe-atlas-cousteau
ripe/atlas/cousteau/api_meta_data.py
Measurement._populate_data
def _populate_data(self): """Assinging some measurement's raw meta data from API response to instance properties""" if self.id is None: self.id = self.meta_data.get("id") self.stop_time = None self.creation_time = None self.start_time = None self.populate_times() self.protocol = self.meta_data.get("af") self.target_ip = self.meta_data.get("target_ip") self.target_asn = self.meta_data.get("target_asn") self.target = self.meta_data.get("target") self.description = self.meta_data.get("description") self.is_oneoff = self.meta_data.get("is_oneoff") self.is_public = self.meta_data.get("is_public") self.interval = self.meta_data.get("interval") self.resolve_on_probe = self.meta_data.get("resolve_on_probe") self.status_id = self.meta_data.get("status", {}).get("id") self.status = self.meta_data.get("status", {}).get("name") self.type = self.get_type() self.result_url = self.meta_data.get("result")
python
def _populate_data(self): """Assinging some measurement's raw meta data from API response to instance properties""" if self.id is None: self.id = self.meta_data.get("id") self.stop_time = None self.creation_time = None self.start_time = None self.populate_times() self.protocol = self.meta_data.get("af") self.target_ip = self.meta_data.get("target_ip") self.target_asn = self.meta_data.get("target_asn") self.target = self.meta_data.get("target") self.description = self.meta_data.get("description") self.is_oneoff = self.meta_data.get("is_oneoff") self.is_public = self.meta_data.get("is_public") self.interval = self.meta_data.get("interval") self.resolve_on_probe = self.meta_data.get("resolve_on_probe") self.status_id = self.meta_data.get("status", {}).get("id") self.status = self.meta_data.get("status", {}).get("name") self.type = self.get_type() self.result_url = self.meta_data.get("result")
Assinging some measurement's raw meta data from API response to instance properties
https://github.com/RIPE-NCC/ripe-atlas-cousteau/blob/ffee2556aaa4df86525b88c269bb098de11678ec/ripe/atlas/cousteau/api_meta_data.py#L121-L142
RIPE-NCC/ripe-atlas-cousteau
ripe/atlas/cousteau/api_meta_data.py
Measurement.get_type
def get_type(self): """ Getting type of measurement keeping backwards compatibility for v2 API output changes. """ mtype = None if "type" not in self.meta_data: return mtype mtype = self.meta_data["type"] if isinstance(mtype, dict): mtype = self.meta_data.get("type", {}).get("name", "").upper() elif isinstance(mtype, str): mtype = mtype return mtype
python
def get_type(self): """ Getting type of measurement keeping backwards compatibility for v2 API output changes. """ mtype = None if "type" not in self.meta_data: return mtype mtype = self.meta_data["type"] if isinstance(mtype, dict): mtype = self.meta_data.get("type", {}).get("name", "").upper() elif isinstance(mtype, str): mtype = mtype return mtype
Getting type of measurement keeping backwards compatibility for v2 API output changes.
https://github.com/RIPE-NCC/ripe-atlas-cousteau/blob/ffee2556aaa4df86525b88c269bb098de11678ec/ripe/atlas/cousteau/api_meta_data.py#L144-L159
RIPE-NCC/ripe-atlas-cousteau
ripe/atlas/cousteau/api_meta_data.py
Measurement.populate_times
def populate_times(self): """ Populates all different meta data times that comes with measurement if they are present. """ stop_time = self.meta_data.get("stop_time") if stop_time: stop_naive = datetime.utcfromtimestamp(stop_time) self.stop_time = stop_naive.replace(tzinfo=tzutc()) creation_time = self.meta_data.get("creation_time") if creation_time: creation_naive = datetime.utcfromtimestamp(creation_time) self.creation_time = creation_naive.replace(tzinfo=tzutc()) start_time = self.meta_data.get("start_time") if start_time: start_naive = datetime.utcfromtimestamp(start_time) self.start_time = start_naive.replace(tzinfo=tzutc())
python
def populate_times(self): """ Populates all different meta data times that comes with measurement if they are present. """ stop_time = self.meta_data.get("stop_time") if stop_time: stop_naive = datetime.utcfromtimestamp(stop_time) self.stop_time = stop_naive.replace(tzinfo=tzutc()) creation_time = self.meta_data.get("creation_time") if creation_time: creation_naive = datetime.utcfromtimestamp(creation_time) self.creation_time = creation_naive.replace(tzinfo=tzutc()) start_time = self.meta_data.get("start_time") if start_time: start_naive = datetime.utcfromtimestamp(start_time) self.start_time = start_naive.replace(tzinfo=tzutc())
Populates all different meta data times that comes with measurement if they are present.
https://github.com/RIPE-NCC/ripe-atlas-cousteau/blob/ffee2556aaa4df86525b88c269bb098de11678ec/ripe/atlas/cousteau/api_meta_data.py#L161-L179
RIPE-NCC/ripe-atlas-cousteau
ripe/atlas/cousteau/source.py
AtlasSource.set_type
def set_type(self, value): """Setter for type attribute""" if value not in self.types_available: log = "Sources field 'type' should be in one of %s" % ( self.types_available ) raise MalFormattedSource(log) self._type = value
python
def set_type(self, value): """Setter for type attribute""" if value not in self.types_available: log = "Sources field 'type' should be in one of %s" % ( self.types_available ) raise MalFormattedSource(log) self._type = value
Setter for type attribute
https://github.com/RIPE-NCC/ripe-atlas-cousteau/blob/ffee2556aaa4df86525b88c269bb098de11678ec/ripe/atlas/cousteau/source.py#L83-L90
RIPE-NCC/ripe-atlas-cousteau
ripe/atlas/cousteau/source.py
AtlasSource.set_tags
def set_tags(self, value): """Setter for tags attribute""" log = ( 'Sources fields "tags" should be a dict in the format ' '{"include": [ "tag1", "tag2", "tagN" ],' '"exclude": [ "tag1", "tag2", "tagN" ] }' ) if not isinstance(value, dict): raise MalFormattedSource(log) if not set(value.keys()).issubset(set(["include", "exclude"])): raise MalFormattedSource(log) for tag_list in value.values(): if not isinstance(tag_list, list): raise MalFormattedSource(log) if [tag for tag in tag_list if not isinstance(tag, str)]: raise MalFormattedSource(log) self._tags = value
python
def set_tags(self, value): """Setter for tags attribute""" log = ( 'Sources fields "tags" should be a dict in the format ' '{"include": [ "tag1", "tag2", "tagN" ],' '"exclude": [ "tag1", "tag2", "tagN" ] }' ) if not isinstance(value, dict): raise MalFormattedSource(log) if not set(value.keys()).issubset(set(["include", "exclude"])): raise MalFormattedSource(log) for tag_list in value.values(): if not isinstance(tag_list, list): raise MalFormattedSource(log) if [tag for tag in tag_list if not isinstance(tag, str)]: raise MalFormattedSource(log) self._tags = value
Setter for tags attribute
https://github.com/RIPE-NCC/ripe-atlas-cousteau/blob/ffee2556aaa4df86525b88c269bb098de11678ec/ripe/atlas/cousteau/source.py#L100-L120
RIPE-NCC/ripe-atlas-cousteau
ripe/atlas/cousteau/source.py
AtlasSource.build_api_struct
def build_api_struct(self): """ Calls the clean method of the class and returns the info in a structure that Atlas API is accepting. """ self.clean() r = { "type": self._type, "requested": self._requested, "value": self._value } if self._tags: r["tags"] = self._tags return r
python
def build_api_struct(self): """ Calls the clean method of the class and returns the info in a structure that Atlas API is accepting. """ self.clean() r = { "type": self._type, "requested": self._requested, "value": self._value } if self._tags: r["tags"] = self._tags return r
Calls the clean method of the class and returns the info in a structure that Atlas API is accepting.
https://github.com/RIPE-NCC/ripe-atlas-cousteau/blob/ffee2556aaa4df86525b88c269bb098de11678ec/ripe/atlas/cousteau/source.py#L135-L149
RIPE-NCC/ripe-atlas-cousteau
ripe/atlas/cousteau/source.py
AtlasChangeSource.set_type
def set_type(self, value): """Setter for type attribute""" if self.action == "remove" and value != "probes": log = "Sources field 'type' when action is remove should always be 'probes'." raise MalFormattedSource(log) self._type = value
python
def set_type(self, value): """Setter for type attribute""" if self.action == "remove" and value != "probes": log = "Sources field 'type' when action is remove should always be 'probes'." raise MalFormattedSource(log) self._type = value
Setter for type attribute
https://github.com/RIPE-NCC/ripe-atlas-cousteau/blob/ffee2556aaa4df86525b88c269bb098de11678ec/ripe/atlas/cousteau/source.py#L173-L178
RIPE-NCC/ripe-atlas-cousteau
ripe/atlas/cousteau/source.py
AtlasChangeSource.set_tags
def set_tags(self, value): """Setter for tags attribute""" if self.action == "remove": log = ( "Tag-based filtering can only be used when adding " "participant probes for a measurement." ) raise MalFormattedSource(log) super(AtlasChangeSource, self).set_tags(value)
python
def set_tags(self, value): """Setter for tags attribute""" if self.action == "remove": log = ( "Tag-based filtering can only be used when adding " "participant probes for a measurement." ) raise MalFormattedSource(log) super(AtlasChangeSource, self).set_tags(value)
Setter for tags attribute
https://github.com/RIPE-NCC/ripe-atlas-cousteau/blob/ffee2556aaa4df86525b88c269bb098de11678ec/ripe/atlas/cousteau/source.py#L188-L196
RIPE-NCC/ripe-atlas-cousteau
ripe/atlas/cousteau/source.py
AtlasChangeSource.set_action
def set_action(self, value): """Setter for action attribute""" if value not in ("remove", "add"): log = "Sources field 'action' should be 'remove' or 'add'." raise MalFormattedSource(log) self._action = value
python
def set_action(self, value): """Setter for action attribute""" if value not in ("remove", "add"): log = "Sources field 'action' should be 'remove' or 'add'." raise MalFormattedSource(log) self._action = value
Setter for action attribute
https://github.com/RIPE-NCC/ripe-atlas-cousteau/blob/ffee2556aaa4df86525b88c269bb098de11678ec/ripe/atlas/cousteau/source.py#L206-L211
RIPE-NCC/ripe-atlas-cousteau
ripe/atlas/cousteau/source.py
AtlasChangeSource.clean
def clean(self): """ Cleans/checks user has entered all required attributes. This might save some queries from being sent to server if they are totally wrong. """ if not all([self._type, self._requested, self._value, self._action]): raise MalFormattedSource( "<type, requested, value, action> fields are required." )
python
def clean(self): """ Cleans/checks user has entered all required attributes. This might save some queries from being sent to server if they are totally wrong. """ if not all([self._type, self._requested, self._value, self._action]): raise MalFormattedSource( "<type, requested, value, action> fields are required." )
Cleans/checks user has entered all required attributes. This might save some queries from being sent to server if they are totally wrong.
https://github.com/RIPE-NCC/ripe-atlas-cousteau/blob/ffee2556aaa4df86525b88c269bb098de11678ec/ripe/atlas/cousteau/source.py#L216-L224