repo
stringlengths 7
55
| path
stringlengths 4
127
| func_name
stringlengths 1
88
| original_string
stringlengths 75
19.8k
| language
stringclasses 1
value | code
stringlengths 75
19.8k
| code_tokens
sequence | docstring
stringlengths 3
17.3k
| docstring_tokens
sequence | sha
stringlengths 40
40
| url
stringlengths 87
242
| partition
stringclasses 1
value |
---|---|---|---|---|---|---|---|---|---|---|---|
geophysics-ubonn/reda | lib/reda/configs/configManager.py | ConfigManager.gen_schlumberger | def gen_schlumberger(self, M, N, a=None):
"""generate one Schlumberger sounding configuration, that is, one set
of configurations for one potential dipole MN.
Parameters
----------
M: int
electrode number for the first potential electrode
N: int
electrode number for the second potential electrode
a: int, optional
stepping between subsequent voltage electrodes. If not set,
determine it as a = abs(M - N)
Returns
-------
configs: Kx4 numpy.ndarray
array holding the configurations
Examples
--------
import reda.configs.configManager as CRconfig
config = CRconfig.ConfigManager(nr_of_electrodes=40)
config.gen_schlumberger(M=20, N=21)
"""
if a is None:
a = np.abs(M - N)
nr_of_steps_left = int(min(M, N) - 1 / a)
nr_of_steps_right = int((self.nr_electrodes - max(M, N)) / a)
configs = []
for i in range(0, min(nr_of_steps_left, nr_of_steps_right)):
A = min(M, N) - (i + 1) * a
B = max(M, N) + (i + 1) * a
configs.append((A, B, M, N))
configs = np.array(configs)
self.add_to_configs(configs)
return configs | python | def gen_schlumberger(self, M, N, a=None):
"""generate one Schlumberger sounding configuration, that is, one set
of configurations for one potential dipole MN.
Parameters
----------
M: int
electrode number for the first potential electrode
N: int
electrode number for the second potential electrode
a: int, optional
stepping between subsequent voltage electrodes. If not set,
determine it as a = abs(M - N)
Returns
-------
configs: Kx4 numpy.ndarray
array holding the configurations
Examples
--------
import reda.configs.configManager as CRconfig
config = CRconfig.ConfigManager(nr_of_electrodes=40)
config.gen_schlumberger(M=20, N=21)
"""
if a is None:
a = np.abs(M - N)
nr_of_steps_left = int(min(M, N) - 1 / a)
nr_of_steps_right = int((self.nr_electrodes - max(M, N)) / a)
configs = []
for i in range(0, min(nr_of_steps_left, nr_of_steps_right)):
A = min(M, N) - (i + 1) * a
B = max(M, N) + (i + 1) * a
configs.append((A, B, M, N))
configs = np.array(configs)
self.add_to_configs(configs)
return configs | [
"def",
"gen_schlumberger",
"(",
"self",
",",
"M",
",",
"N",
",",
"a",
"=",
"None",
")",
":",
"if",
"a",
"is",
"None",
":",
"a",
"=",
"np",
".",
"abs",
"(",
"M",
"-",
"N",
")",
"nr_of_steps_left",
"=",
"int",
"(",
"min",
"(",
"M",
",",
"N",
")",
"-",
"1",
"/",
"a",
")",
"nr_of_steps_right",
"=",
"int",
"(",
"(",
"self",
".",
"nr_electrodes",
"-",
"max",
"(",
"M",
",",
"N",
")",
")",
"/",
"a",
")",
"configs",
"=",
"[",
"]",
"for",
"i",
"in",
"range",
"(",
"0",
",",
"min",
"(",
"nr_of_steps_left",
",",
"nr_of_steps_right",
")",
")",
":",
"A",
"=",
"min",
"(",
"M",
",",
"N",
")",
"-",
"(",
"i",
"+",
"1",
")",
"*",
"a",
"B",
"=",
"max",
"(",
"M",
",",
"N",
")",
"+",
"(",
"i",
"+",
"1",
")",
"*",
"a",
"configs",
".",
"append",
"(",
"(",
"A",
",",
"B",
",",
"M",
",",
"N",
")",
")",
"configs",
"=",
"np",
".",
"array",
"(",
"configs",
")",
"self",
".",
"add_to_configs",
"(",
"configs",
")",
"return",
"configs"
] | generate one Schlumberger sounding configuration, that is, one set
of configurations for one potential dipole MN.
Parameters
----------
M: int
electrode number for the first potential electrode
N: int
electrode number for the second potential electrode
a: int, optional
stepping between subsequent voltage electrodes. If not set,
determine it as a = abs(M - N)
Returns
-------
configs: Kx4 numpy.ndarray
array holding the configurations
Examples
--------
import reda.configs.configManager as CRconfig
config = CRconfig.ConfigManager(nr_of_electrodes=40)
config.gen_schlumberger(M=20, N=21) | [
"generate",
"one",
"Schlumberger",
"sounding",
"configuration",
"that",
"is",
"one",
"set",
"of",
"configurations",
"for",
"one",
"potential",
"dipole",
"MN",
"."
] | 46a939729e40c7c4723315c03679c40761152e9e | https://github.com/geophysics-ubonn/reda/blob/46a939729e40c7c4723315c03679c40761152e9e/lib/reda/configs/configManager.py#L459-L498 | train |
geophysics-ubonn/reda | lib/reda/configs/configManager.py | ConfigManager.add_to_configs | def add_to_configs(self, configs):
"""Add one or more measurement configurations to the stored
configurations
Parameters
----------
configs: list or numpy.ndarray
list or array of configurations
Returns
-------
configs: Kx4 numpy.ndarray
array holding all configurations of this instance
"""
if len(configs) == 0:
return None
if self.configs is None:
self.configs = np.atleast_2d(configs)
else:
configs = np.atleast_2d(configs)
self.configs = np.vstack((self.configs, configs))
return self.configs | python | def add_to_configs(self, configs):
"""Add one or more measurement configurations to the stored
configurations
Parameters
----------
configs: list or numpy.ndarray
list or array of configurations
Returns
-------
configs: Kx4 numpy.ndarray
array holding all configurations of this instance
"""
if len(configs) == 0:
return None
if self.configs is None:
self.configs = np.atleast_2d(configs)
else:
configs = np.atleast_2d(configs)
self.configs = np.vstack((self.configs, configs))
return self.configs | [
"def",
"add_to_configs",
"(",
"self",
",",
"configs",
")",
":",
"if",
"len",
"(",
"configs",
")",
"==",
"0",
":",
"return",
"None",
"if",
"self",
".",
"configs",
"is",
"None",
":",
"self",
".",
"configs",
"=",
"np",
".",
"atleast_2d",
"(",
"configs",
")",
"else",
":",
"configs",
"=",
"np",
".",
"atleast_2d",
"(",
"configs",
")",
"self",
".",
"configs",
"=",
"np",
".",
"vstack",
"(",
"(",
"self",
".",
"configs",
",",
"configs",
")",
")",
"return",
"self",
".",
"configs"
] | Add one or more measurement configurations to the stored
configurations
Parameters
----------
configs: list or numpy.ndarray
list or array of configurations
Returns
-------
configs: Kx4 numpy.ndarray
array holding all configurations of this instance | [
"Add",
"one",
"or",
"more",
"measurement",
"configurations",
"to",
"the",
"stored",
"configurations"
] | 46a939729e40c7c4723315c03679c40761152e9e | https://github.com/geophysics-ubonn/reda/blob/46a939729e40c7c4723315c03679c40761152e9e/lib/reda/configs/configManager.py#L521-L543 | train |
geophysics-ubonn/reda | lib/reda/configs/configManager.py | ConfigManager.split_into_normal_and_reciprocal | def split_into_normal_and_reciprocal(self, pad=False,
return_indices=False):
"""Split the stored configurations into normal and reciprocal
measurements
** *Rule 1: the normal configuration contains the smallest electrode
number of the four involved electrodes in the current dipole* **
Parameters
----------
pad: bool, optional
if True, add numpy.nan values to the reciprocals for non-existent
measuremnts
return_indices: bool, optional
if True, also return the indices of normal and reciprocal
measurments. This can be used to extract corresponding
measurements.
Returns
-------
normal: numpy.ndarray
Nnx4 array. If pad is True, then Nn == N (total number of
unique measurements). Otherwise Nn is the number of normal
measurements.
reciprocal: numpy.ndarray
Nrx4 array. If pad is True, then Nr == N (total number of
unique measurements). Otherwise Nr is the number of reciprocal
measurements.
nor_indices: numpy.ndarray, optional
Nnx1 array containing the indices of normal measurements. Only
returned if return_indices is True.
rec_indices: numpy.ndarray, optional
Nrx1 array containing the indices of normal measurements. Only
returned if return_indices is True.
"""
# for simplicity, we create an array where AB and MN are sorted
configs = np.hstack((np.sort(self.configs[:, 0:2], axis=1),
np.sort(self.configs[:, 2:4], axis=1)))
ab_min = configs[:, 0]
mn_min = configs[:, 2]
# rule 1
indices_normal = np.where(ab_min < mn_min)[0]
# now look for reciprocals
indices_used = []
normal = []
normal_indices = []
reciprocal_indices = []
reciprocal = []
duplicates = []
for index in indices_normal:
indices_used.append(index)
normal.append(self.configs[index, :])
normal_indices.append(index)
# look for reciprocal configuration
index_rec = np.where(
# A == M, B == N, M == A, N == B
(configs[:, 0] == configs[index, 2]) &
(configs[:, 1] == configs[index, 3]) &
(configs[:, 2] == configs[index, 0]) &
(configs[:, 3] == configs[index, 1]))[0]
if len(index_rec) == 0 and pad:
reciprocal.append(np.ones(4) * np.nan)
elif len(index_rec) == 1:
reciprocal.append(self.configs[index_rec[0], :])
indices_used.append(index_rec[0])
reciprocal_indices.append(index_rec[0])
elif len(index_rec > 1):
# take the first one
reciprocal.append(self.configs[index_rec[0], :])
reciprocal_indices.append(index_rec[0])
duplicates += list(index_rec[1:])
indices_used += list(index_rec)
# now determine all reciprocal-only parameters
set_all_indices = set(list(range(0, configs.shape[0])))
set_used_indices = set(indices_used)
reciprocal_only_indices = set_all_indices - set_used_indices
for index in reciprocal_only_indices:
if pad:
normal.append(np.ones(4) * np.nan)
reciprocal.append(self.configs[index, :])
normals = np.array(normal)
reciprocals = np.array(reciprocal)
if return_indices:
return normals, reciprocals, normal_indices, reciprocal_indices
else:
return normals, reciprocals | python | def split_into_normal_and_reciprocal(self, pad=False,
return_indices=False):
"""Split the stored configurations into normal and reciprocal
measurements
** *Rule 1: the normal configuration contains the smallest electrode
number of the four involved electrodes in the current dipole* **
Parameters
----------
pad: bool, optional
if True, add numpy.nan values to the reciprocals for non-existent
measuremnts
return_indices: bool, optional
if True, also return the indices of normal and reciprocal
measurments. This can be used to extract corresponding
measurements.
Returns
-------
normal: numpy.ndarray
Nnx4 array. If pad is True, then Nn == N (total number of
unique measurements). Otherwise Nn is the number of normal
measurements.
reciprocal: numpy.ndarray
Nrx4 array. If pad is True, then Nr == N (total number of
unique measurements). Otherwise Nr is the number of reciprocal
measurements.
nor_indices: numpy.ndarray, optional
Nnx1 array containing the indices of normal measurements. Only
returned if return_indices is True.
rec_indices: numpy.ndarray, optional
Nrx1 array containing the indices of normal measurements. Only
returned if return_indices is True.
"""
# for simplicity, we create an array where AB and MN are sorted
configs = np.hstack((np.sort(self.configs[:, 0:2], axis=1),
np.sort(self.configs[:, 2:4], axis=1)))
ab_min = configs[:, 0]
mn_min = configs[:, 2]
# rule 1
indices_normal = np.where(ab_min < mn_min)[0]
# now look for reciprocals
indices_used = []
normal = []
normal_indices = []
reciprocal_indices = []
reciprocal = []
duplicates = []
for index in indices_normal:
indices_used.append(index)
normal.append(self.configs[index, :])
normal_indices.append(index)
# look for reciprocal configuration
index_rec = np.where(
# A == M, B == N, M == A, N == B
(configs[:, 0] == configs[index, 2]) &
(configs[:, 1] == configs[index, 3]) &
(configs[:, 2] == configs[index, 0]) &
(configs[:, 3] == configs[index, 1]))[0]
if len(index_rec) == 0 and pad:
reciprocal.append(np.ones(4) * np.nan)
elif len(index_rec) == 1:
reciprocal.append(self.configs[index_rec[0], :])
indices_used.append(index_rec[0])
reciprocal_indices.append(index_rec[0])
elif len(index_rec > 1):
# take the first one
reciprocal.append(self.configs[index_rec[0], :])
reciprocal_indices.append(index_rec[0])
duplicates += list(index_rec[1:])
indices_used += list(index_rec)
# now determine all reciprocal-only parameters
set_all_indices = set(list(range(0, configs.shape[0])))
set_used_indices = set(indices_used)
reciprocal_only_indices = set_all_indices - set_used_indices
for index in reciprocal_only_indices:
if pad:
normal.append(np.ones(4) * np.nan)
reciprocal.append(self.configs[index, :])
normals = np.array(normal)
reciprocals = np.array(reciprocal)
if return_indices:
return normals, reciprocals, normal_indices, reciprocal_indices
else:
return normals, reciprocals | [
"def",
"split_into_normal_and_reciprocal",
"(",
"self",
",",
"pad",
"=",
"False",
",",
"return_indices",
"=",
"False",
")",
":",
"# for simplicity, we create an array where AB and MN are sorted",
"configs",
"=",
"np",
".",
"hstack",
"(",
"(",
"np",
".",
"sort",
"(",
"self",
".",
"configs",
"[",
":",
",",
"0",
":",
"2",
"]",
",",
"axis",
"=",
"1",
")",
",",
"np",
".",
"sort",
"(",
"self",
".",
"configs",
"[",
":",
",",
"2",
":",
"4",
"]",
",",
"axis",
"=",
"1",
")",
")",
")",
"ab_min",
"=",
"configs",
"[",
":",
",",
"0",
"]",
"mn_min",
"=",
"configs",
"[",
":",
",",
"2",
"]",
"# rule 1",
"indices_normal",
"=",
"np",
".",
"where",
"(",
"ab_min",
"<",
"mn_min",
")",
"[",
"0",
"]",
"# now look for reciprocals",
"indices_used",
"=",
"[",
"]",
"normal",
"=",
"[",
"]",
"normal_indices",
"=",
"[",
"]",
"reciprocal_indices",
"=",
"[",
"]",
"reciprocal",
"=",
"[",
"]",
"duplicates",
"=",
"[",
"]",
"for",
"index",
"in",
"indices_normal",
":",
"indices_used",
".",
"append",
"(",
"index",
")",
"normal",
".",
"append",
"(",
"self",
".",
"configs",
"[",
"index",
",",
":",
"]",
")",
"normal_indices",
".",
"append",
"(",
"index",
")",
"# look for reciprocal configuration",
"index_rec",
"=",
"np",
".",
"where",
"(",
"# A == M, B == N, M == A, N == B",
"(",
"configs",
"[",
":",
",",
"0",
"]",
"==",
"configs",
"[",
"index",
",",
"2",
"]",
")",
"&",
"(",
"configs",
"[",
":",
",",
"1",
"]",
"==",
"configs",
"[",
"index",
",",
"3",
"]",
")",
"&",
"(",
"configs",
"[",
":",
",",
"2",
"]",
"==",
"configs",
"[",
"index",
",",
"0",
"]",
")",
"&",
"(",
"configs",
"[",
":",
",",
"3",
"]",
"==",
"configs",
"[",
"index",
",",
"1",
"]",
")",
")",
"[",
"0",
"]",
"if",
"len",
"(",
"index_rec",
")",
"==",
"0",
"and",
"pad",
":",
"reciprocal",
".",
"append",
"(",
"np",
".",
"ones",
"(",
"4",
")",
"*",
"np",
".",
"nan",
")",
"elif",
"len",
"(",
"index_rec",
")",
"==",
"1",
":",
"reciprocal",
".",
"append",
"(",
"self",
".",
"configs",
"[",
"index_rec",
"[",
"0",
"]",
",",
":",
"]",
")",
"indices_used",
".",
"append",
"(",
"index_rec",
"[",
"0",
"]",
")",
"reciprocal_indices",
".",
"append",
"(",
"index_rec",
"[",
"0",
"]",
")",
"elif",
"len",
"(",
"index_rec",
">",
"1",
")",
":",
"# take the first one",
"reciprocal",
".",
"append",
"(",
"self",
".",
"configs",
"[",
"index_rec",
"[",
"0",
"]",
",",
":",
"]",
")",
"reciprocal_indices",
".",
"append",
"(",
"index_rec",
"[",
"0",
"]",
")",
"duplicates",
"+=",
"list",
"(",
"index_rec",
"[",
"1",
":",
"]",
")",
"indices_used",
"+=",
"list",
"(",
"index_rec",
")",
"# now determine all reciprocal-only parameters",
"set_all_indices",
"=",
"set",
"(",
"list",
"(",
"range",
"(",
"0",
",",
"configs",
".",
"shape",
"[",
"0",
"]",
")",
")",
")",
"set_used_indices",
"=",
"set",
"(",
"indices_used",
")",
"reciprocal_only_indices",
"=",
"set_all_indices",
"-",
"set_used_indices",
"for",
"index",
"in",
"reciprocal_only_indices",
":",
"if",
"pad",
":",
"normal",
".",
"append",
"(",
"np",
".",
"ones",
"(",
"4",
")",
"*",
"np",
".",
"nan",
")",
"reciprocal",
".",
"append",
"(",
"self",
".",
"configs",
"[",
"index",
",",
":",
"]",
")",
"normals",
"=",
"np",
".",
"array",
"(",
"normal",
")",
"reciprocals",
"=",
"np",
".",
"array",
"(",
"reciprocal",
")",
"if",
"return_indices",
":",
"return",
"normals",
",",
"reciprocals",
",",
"normal_indices",
",",
"reciprocal_indices",
"else",
":",
"return",
"normals",
",",
"reciprocals"
] | Split the stored configurations into normal and reciprocal
measurements
** *Rule 1: the normal configuration contains the smallest electrode
number of the four involved electrodes in the current dipole* **
Parameters
----------
pad: bool, optional
if True, add numpy.nan values to the reciprocals for non-existent
measuremnts
return_indices: bool, optional
if True, also return the indices of normal and reciprocal
measurments. This can be used to extract corresponding
measurements.
Returns
-------
normal: numpy.ndarray
Nnx4 array. If pad is True, then Nn == N (total number of
unique measurements). Otherwise Nn is the number of normal
measurements.
reciprocal: numpy.ndarray
Nrx4 array. If pad is True, then Nr == N (total number of
unique measurements). Otherwise Nr is the number of reciprocal
measurements.
nor_indices: numpy.ndarray, optional
Nnx1 array containing the indices of normal measurements. Only
returned if return_indices is True.
rec_indices: numpy.ndarray, optional
Nrx1 array containing the indices of normal measurements. Only
returned if return_indices is True. | [
"Split",
"the",
"stored",
"configurations",
"into",
"normal",
"and",
"reciprocal",
"measurements"
] | 46a939729e40c7c4723315c03679c40761152e9e | https://github.com/geophysics-ubonn/reda/blob/46a939729e40c7c4723315c03679c40761152e9e/lib/reda/configs/configManager.py#L545-L638 | train |
geophysics-ubonn/reda | lib/reda/configs/configManager.py | ConfigManager.gen_reciprocals | def gen_reciprocals(self, append=False):
""" Generate reciprocal configurations, sort by AB, and optionally
append to configurations.
Parameters
----------
append : bool
Append reciprocals to configs (the default is False).
Examples
--------
>>> cfgs = ConfigManager(nr_of_electrodes=5)
>>> nor = cfgs.gen_dipole_dipole(skipc=0)
>>> rec = cfgs.gen_reciprocals(append=True)
>>> print(cfgs.configs)
[[1 2 3 4]
[1 2 4 5]
[2 3 4 5]
[3 4 1 2]
[4 5 1 2]
[4 5 2 3]]
"""
# Switch AB and MN
reciprocals = self.configs.copy()[:, ::-1]
reciprocals[:, 0:2] = np.sort(reciprocals[:, 0:2], axis=1)
reciprocals[:, 2:4] = np.sort(reciprocals[:, 2:4], axis=1)
# # Sort by current dipoles
ind = np.lexsort((reciprocals[:, 3], reciprocals[:, 2],
reciprocals[:, 1], reciprocals[:, 0]))
reciprocals = reciprocals[ind]
if append:
self.configs = np.vstack((self.configs, reciprocals))
return reciprocals | python | def gen_reciprocals(self, append=False):
""" Generate reciprocal configurations, sort by AB, and optionally
append to configurations.
Parameters
----------
append : bool
Append reciprocals to configs (the default is False).
Examples
--------
>>> cfgs = ConfigManager(nr_of_electrodes=5)
>>> nor = cfgs.gen_dipole_dipole(skipc=0)
>>> rec = cfgs.gen_reciprocals(append=True)
>>> print(cfgs.configs)
[[1 2 3 4]
[1 2 4 5]
[2 3 4 5]
[3 4 1 2]
[4 5 1 2]
[4 5 2 3]]
"""
# Switch AB and MN
reciprocals = self.configs.copy()[:, ::-1]
reciprocals[:, 0:2] = np.sort(reciprocals[:, 0:2], axis=1)
reciprocals[:, 2:4] = np.sort(reciprocals[:, 2:4], axis=1)
# # Sort by current dipoles
ind = np.lexsort((reciprocals[:, 3], reciprocals[:, 2],
reciprocals[:, 1], reciprocals[:, 0]))
reciprocals = reciprocals[ind]
if append:
self.configs = np.vstack((self.configs, reciprocals))
return reciprocals | [
"def",
"gen_reciprocals",
"(",
"self",
",",
"append",
"=",
"False",
")",
":",
"# Switch AB and MN",
"reciprocals",
"=",
"self",
".",
"configs",
".",
"copy",
"(",
")",
"[",
":",
",",
":",
":",
"-",
"1",
"]",
"reciprocals",
"[",
":",
",",
"0",
":",
"2",
"]",
"=",
"np",
".",
"sort",
"(",
"reciprocals",
"[",
":",
",",
"0",
":",
"2",
"]",
",",
"axis",
"=",
"1",
")",
"reciprocals",
"[",
":",
",",
"2",
":",
"4",
"]",
"=",
"np",
".",
"sort",
"(",
"reciprocals",
"[",
":",
",",
"2",
":",
"4",
"]",
",",
"axis",
"=",
"1",
")",
"# # Sort by current dipoles",
"ind",
"=",
"np",
".",
"lexsort",
"(",
"(",
"reciprocals",
"[",
":",
",",
"3",
"]",
",",
"reciprocals",
"[",
":",
",",
"2",
"]",
",",
"reciprocals",
"[",
":",
",",
"1",
"]",
",",
"reciprocals",
"[",
":",
",",
"0",
"]",
")",
")",
"reciprocals",
"=",
"reciprocals",
"[",
"ind",
"]",
"if",
"append",
":",
"self",
".",
"configs",
"=",
"np",
".",
"vstack",
"(",
"(",
"self",
".",
"configs",
",",
"reciprocals",
")",
")",
"return",
"reciprocals"
] | Generate reciprocal configurations, sort by AB, and optionally
append to configurations.
Parameters
----------
append : bool
Append reciprocals to configs (the default is False).
Examples
--------
>>> cfgs = ConfigManager(nr_of_electrodes=5)
>>> nor = cfgs.gen_dipole_dipole(skipc=0)
>>> rec = cfgs.gen_reciprocals(append=True)
>>> print(cfgs.configs)
[[1 2 3 4]
[1 2 4 5]
[2 3 4 5]
[3 4 1 2]
[4 5 1 2]
[4 5 2 3]] | [
"Generate",
"reciprocal",
"configurations",
"sort",
"by",
"AB",
"and",
"optionally",
"append",
"to",
"configurations",
"."
] | 46a939729e40c7c4723315c03679c40761152e9e | https://github.com/geophysics-ubonn/reda/blob/46a939729e40c7c4723315c03679c40761152e9e/lib/reda/configs/configManager.py#L640-L673 | train |
geophysics-ubonn/reda | lib/reda/configs/configManager.py | ConfigManager.gen_configs_permutate | def gen_configs_permutate(self,
injections_raw,
only_same_dipole_length=False,
ignore_crossed_dipoles=False,
silent=False):
""" Create measurement configurations out of a pool of current
injections. Use only the provided dipoles for potential dipole
selection. This means that we have always reciprocal measurements.
Remove quadpoles where electrodes are used both as current and voltage
dipoles.
Parameters
----------
injections_raw : Nx2 array
current injections
only_same_dipole_length : bool, optional
if True, only generate permutations for the same dipole length
ignore_crossed_dipoles : bool, optional
If True, potential dipoles will be ignored that lie between current
dipoles, e.g. 1-4 3-5. In this case it is possible to not have
full normal-reciprocal coverage.
silent: bool, optional
if True, do not print information on ignored configs (default:
False)
Returns
-------
configs : Nx4 array
quadrupoles generated out of the current injections
"""
injections = np.atleast_2d(injections_raw).astype(int)
N = injections.shape[0]
measurements = []
for injection in range(0, N):
dipole_length = np.abs(injections[injection][1] -
injections[injection][0])
# select all dipole EXCEPT for the injection dipole
for i in set(range(0, N)) - set([injection]):
test_dipole_length = np.abs(injections[i, :][1] -
injections[i, :][0])
if (only_same_dipole_length
and test_dipole_length != dipole_length):
continue
quadpole = np.array(
[injections[injection, :], injections[i, :]]).flatten()
if ignore_crossed_dipoles is True:
# check if we need to ignore this dipole
# Note: this could be wrong if electrode number are not
# ascending!
if (quadpole[2] > quadpole[0]
and quadpole[2] < quadpole[1]):
if not silent:
print('A - ignoring', quadpole)
elif (quadpole[3] > quadpole[0]
and quadpole[3] < quadpole[1]):
if not silent:
print('B - ignoring', quadpole)
else:
measurements.append(quadpole)
else:
# add very quadpole
measurements.append(quadpole)
# check and remove double use of electrodes
filtered = []
for quadpole in measurements:
if (not set(quadpole[0:2]).isdisjoint(set(quadpole[2:4]))):
if not silent:
print('Ignoring quadrupole because of ',
'repeated electrode use:', quadpole)
else:
filtered.append(quadpole)
self.add_to_configs(filtered)
return np.array(filtered) | python | def gen_configs_permutate(self,
injections_raw,
only_same_dipole_length=False,
ignore_crossed_dipoles=False,
silent=False):
""" Create measurement configurations out of a pool of current
injections. Use only the provided dipoles for potential dipole
selection. This means that we have always reciprocal measurements.
Remove quadpoles where electrodes are used both as current and voltage
dipoles.
Parameters
----------
injections_raw : Nx2 array
current injections
only_same_dipole_length : bool, optional
if True, only generate permutations for the same dipole length
ignore_crossed_dipoles : bool, optional
If True, potential dipoles will be ignored that lie between current
dipoles, e.g. 1-4 3-5. In this case it is possible to not have
full normal-reciprocal coverage.
silent: bool, optional
if True, do not print information on ignored configs (default:
False)
Returns
-------
configs : Nx4 array
quadrupoles generated out of the current injections
"""
injections = np.atleast_2d(injections_raw).astype(int)
N = injections.shape[0]
measurements = []
for injection in range(0, N):
dipole_length = np.abs(injections[injection][1] -
injections[injection][0])
# select all dipole EXCEPT for the injection dipole
for i in set(range(0, N)) - set([injection]):
test_dipole_length = np.abs(injections[i, :][1] -
injections[i, :][0])
if (only_same_dipole_length
and test_dipole_length != dipole_length):
continue
quadpole = np.array(
[injections[injection, :], injections[i, :]]).flatten()
if ignore_crossed_dipoles is True:
# check if we need to ignore this dipole
# Note: this could be wrong if electrode number are not
# ascending!
if (quadpole[2] > quadpole[0]
and quadpole[2] < quadpole[1]):
if not silent:
print('A - ignoring', quadpole)
elif (quadpole[3] > quadpole[0]
and quadpole[3] < quadpole[1]):
if not silent:
print('B - ignoring', quadpole)
else:
measurements.append(quadpole)
else:
# add very quadpole
measurements.append(quadpole)
# check and remove double use of electrodes
filtered = []
for quadpole in measurements:
if (not set(quadpole[0:2]).isdisjoint(set(quadpole[2:4]))):
if not silent:
print('Ignoring quadrupole because of ',
'repeated electrode use:', quadpole)
else:
filtered.append(quadpole)
self.add_to_configs(filtered)
return np.array(filtered) | [
"def",
"gen_configs_permutate",
"(",
"self",
",",
"injections_raw",
",",
"only_same_dipole_length",
"=",
"False",
",",
"ignore_crossed_dipoles",
"=",
"False",
",",
"silent",
"=",
"False",
")",
":",
"injections",
"=",
"np",
".",
"atleast_2d",
"(",
"injections_raw",
")",
".",
"astype",
"(",
"int",
")",
"N",
"=",
"injections",
".",
"shape",
"[",
"0",
"]",
"measurements",
"=",
"[",
"]",
"for",
"injection",
"in",
"range",
"(",
"0",
",",
"N",
")",
":",
"dipole_length",
"=",
"np",
".",
"abs",
"(",
"injections",
"[",
"injection",
"]",
"[",
"1",
"]",
"-",
"injections",
"[",
"injection",
"]",
"[",
"0",
"]",
")",
"# select all dipole EXCEPT for the injection dipole",
"for",
"i",
"in",
"set",
"(",
"range",
"(",
"0",
",",
"N",
")",
")",
"-",
"set",
"(",
"[",
"injection",
"]",
")",
":",
"test_dipole_length",
"=",
"np",
".",
"abs",
"(",
"injections",
"[",
"i",
",",
":",
"]",
"[",
"1",
"]",
"-",
"injections",
"[",
"i",
",",
":",
"]",
"[",
"0",
"]",
")",
"if",
"(",
"only_same_dipole_length",
"and",
"test_dipole_length",
"!=",
"dipole_length",
")",
":",
"continue",
"quadpole",
"=",
"np",
".",
"array",
"(",
"[",
"injections",
"[",
"injection",
",",
":",
"]",
",",
"injections",
"[",
"i",
",",
":",
"]",
"]",
")",
".",
"flatten",
"(",
")",
"if",
"ignore_crossed_dipoles",
"is",
"True",
":",
"# check if we need to ignore this dipole",
"# Note: this could be wrong if electrode number are not",
"# ascending!",
"if",
"(",
"quadpole",
"[",
"2",
"]",
">",
"quadpole",
"[",
"0",
"]",
"and",
"quadpole",
"[",
"2",
"]",
"<",
"quadpole",
"[",
"1",
"]",
")",
":",
"if",
"not",
"silent",
":",
"print",
"(",
"'A - ignoring'",
",",
"quadpole",
")",
"elif",
"(",
"quadpole",
"[",
"3",
"]",
">",
"quadpole",
"[",
"0",
"]",
"and",
"quadpole",
"[",
"3",
"]",
"<",
"quadpole",
"[",
"1",
"]",
")",
":",
"if",
"not",
"silent",
":",
"print",
"(",
"'B - ignoring'",
",",
"quadpole",
")",
"else",
":",
"measurements",
".",
"append",
"(",
"quadpole",
")",
"else",
":",
"# add very quadpole",
"measurements",
".",
"append",
"(",
"quadpole",
")",
"# check and remove double use of electrodes",
"filtered",
"=",
"[",
"]",
"for",
"quadpole",
"in",
"measurements",
":",
"if",
"(",
"not",
"set",
"(",
"quadpole",
"[",
"0",
":",
"2",
"]",
")",
".",
"isdisjoint",
"(",
"set",
"(",
"quadpole",
"[",
"2",
":",
"4",
"]",
")",
")",
")",
":",
"if",
"not",
"silent",
":",
"print",
"(",
"'Ignoring quadrupole because of '",
",",
"'repeated electrode use:'",
",",
"quadpole",
")",
"else",
":",
"filtered",
".",
"append",
"(",
"quadpole",
")",
"self",
".",
"add_to_configs",
"(",
"filtered",
")",
"return",
"np",
".",
"array",
"(",
"filtered",
")"
] | Create measurement configurations out of a pool of current
injections. Use only the provided dipoles for potential dipole
selection. This means that we have always reciprocal measurements.
Remove quadpoles where electrodes are used both as current and voltage
dipoles.
Parameters
----------
injections_raw : Nx2 array
current injections
only_same_dipole_length : bool, optional
if True, only generate permutations for the same dipole length
ignore_crossed_dipoles : bool, optional
If True, potential dipoles will be ignored that lie between current
dipoles, e.g. 1-4 3-5. In this case it is possible to not have
full normal-reciprocal coverage.
silent: bool, optional
if True, do not print information on ignored configs (default:
False)
Returns
-------
configs : Nx4 array
quadrupoles generated out of the current injections | [
"Create",
"measurement",
"configurations",
"out",
"of",
"a",
"pool",
"of",
"current",
"injections",
".",
"Use",
"only",
"the",
"provided",
"dipoles",
"for",
"potential",
"dipole",
"selection",
".",
"This",
"means",
"that",
"we",
"have",
"always",
"reciprocal",
"measurements",
"."
] | 46a939729e40c7c4723315c03679c40761152e9e | https://github.com/geophysics-ubonn/reda/blob/46a939729e40c7c4723315c03679c40761152e9e/lib/reda/configs/configManager.py#L675-L753 | train |
geophysics-ubonn/reda | lib/reda/configs/configManager.py | ConfigManager.remove_max_dipole_sep | def remove_max_dipole_sep(self, maxsep=10):
"""Remove configurations with dipole separations higher than `maxsep`.
Parameters
----------
maxsep : int
Maximum separation between both dipoles (the default is 10).
"""
sep = np.abs(self.configs[:, 1] - self.configs[:, 2])
self.configs = self.configs[sep <= maxsep] | python | def remove_max_dipole_sep(self, maxsep=10):
"""Remove configurations with dipole separations higher than `maxsep`.
Parameters
----------
maxsep : int
Maximum separation between both dipoles (the default is 10).
"""
sep = np.abs(self.configs[:, 1] - self.configs[:, 2])
self.configs = self.configs[sep <= maxsep] | [
"def",
"remove_max_dipole_sep",
"(",
"self",
",",
"maxsep",
"=",
"10",
")",
":",
"sep",
"=",
"np",
".",
"abs",
"(",
"self",
".",
"configs",
"[",
":",
",",
"1",
"]",
"-",
"self",
".",
"configs",
"[",
":",
",",
"2",
"]",
")",
"self",
".",
"configs",
"=",
"self",
".",
"configs",
"[",
"sep",
"<=",
"maxsep",
"]"
] | Remove configurations with dipole separations higher than `maxsep`.
Parameters
----------
maxsep : int
Maximum separation between both dipoles (the default is 10). | [
"Remove",
"configurations",
"with",
"dipole",
"separations",
"higher",
"than",
"maxsep",
"."
] | 46a939729e40c7c4723315c03679c40761152e9e | https://github.com/geophysics-ubonn/reda/blob/46a939729e40c7c4723315c03679c40761152e9e/lib/reda/configs/configManager.py#L755-L764 | train |
geophysics-ubonn/reda | lib/reda/configs/configManager.py | ConfigManager.to_pg_scheme | def to_pg_scheme(self, container=None, positions=None):
"""Convert the configuration to a pygimli measurement scheme
Parameters
----------
container: reda.containers.ERT.ERT
an ERT data container (we take the electrode positions from here)
positions = None
Returns
-------
data: pybert.DataContainerERT
Examples
--------
import numpy as np
from reda.configs.configManager import ConfigManager
configs = ConfigManager(nr_of_electrodes=48)
new_configs = configs.gen_dipole_dipole(skipc=2)
x = np.arange(0, 48, 1)
z = np.ones(48) * -1
y = np.zeros(48)
xyz = np.vstack((x, y, z)).T
scheme = configs.to_pg_scheme(positions=xyz)
print(scheme)
"""
if container is None and positions is None:
raise Exception('electrode positions are required for BERT export')
if container is not None and container.electrodes is None:
raise Exception('container does not contain electrode positions')
if container is not None and positions is not None:
raise Exception(
'only one of container OR positions must be provided')
if container is not None:
elec_positions = container.electrodes.values
elif positions is not None:
elec_positions = positions
opt_import("pybert", requiredFor="")
import pybert
# Initialize BERT DataContainer
data = pybert.DataContainerERT()
# Define electrodes (48 electrodes spaced by 0.5 m)
for nr, (x, y, z) in enumerate(elec_positions):
data.createSensor((x, y, z))
# Define number of measurements
data.resize(self.configs.shape[0])
for index, token in enumerate("abmn"):
data.set(token, self.configs[:, index].tolist())
# account for zero indexing
for token in "abmn":
data.set(token, data(token) - 1)
# np.vstack([data.get(x).array() for x in ("abmn")]).T
return data | python | def to_pg_scheme(self, container=None, positions=None):
"""Convert the configuration to a pygimli measurement scheme
Parameters
----------
container: reda.containers.ERT.ERT
an ERT data container (we take the electrode positions from here)
positions = None
Returns
-------
data: pybert.DataContainerERT
Examples
--------
import numpy as np
from reda.configs.configManager import ConfigManager
configs = ConfigManager(nr_of_electrodes=48)
new_configs = configs.gen_dipole_dipole(skipc=2)
x = np.arange(0, 48, 1)
z = np.ones(48) * -1
y = np.zeros(48)
xyz = np.vstack((x, y, z)).T
scheme = configs.to_pg_scheme(positions=xyz)
print(scheme)
"""
if container is None and positions is None:
raise Exception('electrode positions are required for BERT export')
if container is not None and container.electrodes is None:
raise Exception('container does not contain electrode positions')
if container is not None and positions is not None:
raise Exception(
'only one of container OR positions must be provided')
if container is not None:
elec_positions = container.electrodes.values
elif positions is not None:
elec_positions = positions
opt_import("pybert", requiredFor="")
import pybert
# Initialize BERT DataContainer
data = pybert.DataContainerERT()
# Define electrodes (48 electrodes spaced by 0.5 m)
for nr, (x, y, z) in enumerate(elec_positions):
data.createSensor((x, y, z))
# Define number of measurements
data.resize(self.configs.shape[0])
for index, token in enumerate("abmn"):
data.set(token, self.configs[:, index].tolist())
# account for zero indexing
for token in "abmn":
data.set(token, data(token) - 1)
# np.vstack([data.get(x).array() for x in ("abmn")]).T
return data | [
"def",
"to_pg_scheme",
"(",
"self",
",",
"container",
"=",
"None",
",",
"positions",
"=",
"None",
")",
":",
"if",
"container",
"is",
"None",
"and",
"positions",
"is",
"None",
":",
"raise",
"Exception",
"(",
"'electrode positions are required for BERT export'",
")",
"if",
"container",
"is",
"not",
"None",
"and",
"container",
".",
"electrodes",
"is",
"None",
":",
"raise",
"Exception",
"(",
"'container does not contain electrode positions'",
")",
"if",
"container",
"is",
"not",
"None",
"and",
"positions",
"is",
"not",
"None",
":",
"raise",
"Exception",
"(",
"'only one of container OR positions must be provided'",
")",
"if",
"container",
"is",
"not",
"None",
":",
"elec_positions",
"=",
"container",
".",
"electrodes",
".",
"values",
"elif",
"positions",
"is",
"not",
"None",
":",
"elec_positions",
"=",
"positions",
"opt_import",
"(",
"\"pybert\"",
",",
"requiredFor",
"=",
"\"\"",
")",
"import",
"pybert",
"# Initialize BERT DataContainer",
"data",
"=",
"pybert",
".",
"DataContainerERT",
"(",
")",
"# Define electrodes (48 electrodes spaced by 0.5 m)",
"for",
"nr",
",",
"(",
"x",
",",
"y",
",",
"z",
")",
"in",
"enumerate",
"(",
"elec_positions",
")",
":",
"data",
".",
"createSensor",
"(",
"(",
"x",
",",
"y",
",",
"z",
")",
")",
"# Define number of measurements",
"data",
".",
"resize",
"(",
"self",
".",
"configs",
".",
"shape",
"[",
"0",
"]",
")",
"for",
"index",
",",
"token",
"in",
"enumerate",
"(",
"\"abmn\"",
")",
":",
"data",
".",
"set",
"(",
"token",
",",
"self",
".",
"configs",
"[",
":",
",",
"index",
"]",
".",
"tolist",
"(",
")",
")",
"# account for zero indexing",
"for",
"token",
"in",
"\"abmn\"",
":",
"data",
".",
"set",
"(",
"token",
",",
"data",
"(",
"token",
")",
"-",
"1",
")",
"# np.vstack([data.get(x).array() for x in (\"abmn\")]).T",
"return",
"data"
] | Convert the configuration to a pygimli measurement scheme
Parameters
----------
container: reda.containers.ERT.ERT
an ERT data container (we take the electrode positions from here)
positions = None
Returns
-------
data: pybert.DataContainerERT
Examples
--------
import numpy as np
from reda.configs.configManager import ConfigManager
configs = ConfigManager(nr_of_electrodes=48)
new_configs = configs.gen_dipole_dipole(skipc=2)
x = np.arange(0, 48, 1)
z = np.ones(48) * -1
y = np.zeros(48)
xyz = np.vstack((x, y, z)).T
scheme = configs.to_pg_scheme(positions=xyz)
print(scheme) | [
"Convert",
"the",
"configuration",
"to",
"a",
"pygimli",
"measurement",
"scheme"
] | 46a939729e40c7c4723315c03679c40761152e9e | https://github.com/geophysics-ubonn/reda/blob/46a939729e40c7c4723315c03679c40761152e9e/lib/reda/configs/configManager.py#L771-L836 | train |
geophysics-ubonn/reda | lib/reda/configs/configManager.py | ConfigManager.to_iris_syscal | def to_iris_syscal(self, filename):
"""Export to IRIS Instrument configuration file
Parameters
----------
filename : string
Path to output filename
"""
with open(filename, 'w') as fid:
# fprintf(fod, '#\t X\t Y\t Z\n');
fid.write('#\t X\t Y\t Z\n')
# fprintf(fod, '%d\t %.1f\t %d\t %d\n', D');
# loop over electrodes and assign increasing x-positions
# TODO: use proper electrode positions, if available
for nr in range(0, self.configs.max()):
fid.write('{} {} 0 0\n'.format(nr + 1, nr))
# fprintf(fod, '#\t A\t B\t M\t N\n');
fid.write('#\t A\t B\t M\t N\n')
# fprintf(fod, '%d\t %d\t %d\t %d\t %d\n', C');
for nr, config in enumerate(self.configs):
fid.write('{} {} {} {} {}\n'.format(nr + 1, *config)) | python | def to_iris_syscal(self, filename):
"""Export to IRIS Instrument configuration file
Parameters
----------
filename : string
Path to output filename
"""
with open(filename, 'w') as fid:
# fprintf(fod, '#\t X\t Y\t Z\n');
fid.write('#\t X\t Y\t Z\n')
# fprintf(fod, '%d\t %.1f\t %d\t %d\n', D');
# loop over electrodes and assign increasing x-positions
# TODO: use proper electrode positions, if available
for nr in range(0, self.configs.max()):
fid.write('{} {} 0 0\n'.format(nr + 1, nr))
# fprintf(fod, '#\t A\t B\t M\t N\n');
fid.write('#\t A\t B\t M\t N\n')
# fprintf(fod, '%d\t %d\t %d\t %d\t %d\n', C');
for nr, config in enumerate(self.configs):
fid.write('{} {} {} {} {}\n'.format(nr + 1, *config)) | [
"def",
"to_iris_syscal",
"(",
"self",
",",
"filename",
")",
":",
"with",
"open",
"(",
"filename",
",",
"'w'",
")",
"as",
"fid",
":",
"# fprintf(fod, '#\\t X\\t Y\\t Z\\n');",
"fid",
".",
"write",
"(",
"'#\\t X\\t Y\\t Z\\n'",
")",
"# fprintf(fod, '%d\\t %.1f\\t %d\\t %d\\n', D');",
"# loop over electrodes and assign increasing x-positions",
"# TODO: use proper electrode positions, if available",
"for",
"nr",
"in",
"range",
"(",
"0",
",",
"self",
".",
"configs",
".",
"max",
"(",
")",
")",
":",
"fid",
".",
"write",
"(",
"'{} {} 0 0\\n'",
".",
"format",
"(",
"nr",
"+",
"1",
",",
"nr",
")",
")",
"# fprintf(fod, '#\\t A\\t B\\t M\\t N\\n');",
"fid",
".",
"write",
"(",
"'#\\t A\\t B\\t M\\t N\\n'",
")",
"# fprintf(fod, '%d\\t %d\\t %d\\t %d\\t %d\\n', C');",
"for",
"nr",
",",
"config",
"in",
"enumerate",
"(",
"self",
".",
"configs",
")",
":",
"fid",
".",
"write",
"(",
"'{} {} {} {} {}\\n'",
".",
"format",
"(",
"nr",
"+",
"1",
",",
"*",
"config",
")",
")"
] | Export to IRIS Instrument configuration file
Parameters
----------
filename : string
Path to output filename | [
"Export",
"to",
"IRIS",
"Instrument",
"configuration",
"file"
] | 46a939729e40c7c4723315c03679c40761152e9e | https://github.com/geophysics-ubonn/reda/blob/46a939729e40c7c4723315c03679c40761152e9e/lib/reda/configs/configManager.py#L838-L860 | train |
GearPlug/payu-python | payu/recurring.py | Recurring.create_plan | def create_plan(self, *, plan_code, description, interval, interval_count, max_payments_allowed,
payment_attempts_delay, plan_value, plan_tax, plan_tax_return_base, currency,
max_payment_attempts=None, max_pending_payments=None, trial_days=None):
"""
Creating a new plan for subscriptions associated with the merchant.
Args:
plan_code: Unique code assigned by the merchant to the plan in order to identify it.
Alphanumeric. Min: 1 Max: 255.
description: Plan description.
Alphanumeric. Min: 1 Max: 255.
interval: Interval that defines how often the suscription payment is performed.
The possible values are: DAY, WEEK, MONTH y YEAR.
Alphanumeric. Min: 3 Max: 5.
interval_count: Interval count that defines how often the suscription payment is performed.
Numeric.
max_payments_allowed: Total amount of payments for the suscription.
Numeric.
payment_attempts_delay: Total amount of waiting days between the payment attempts of the suscription.
Numeric.
plan_value: total value of the plan.
Alphanumeric. Min: 1 Max: 255.
plan_tax: tax value associated to the value of the plan.
Alphanumeric. Min: 1 Max: 255.
plan_tax_return_base: tax return base value associated to the value of the plan.
Alphanumeric. Min: 1 Max: 255.
currency: The ISO currency code associated with the amount.
http://developers.payulatam.com/en/api/variables_table.html
max_payment_attempts: Total amount of payment attempts performed when a suscription payment is declined.
Numeric. Max: 3.
max_pending_payments: Total amount of pending payments that a suscription can have before it is cancelled.
Numeric.
trial_days: Total amount of trial days of the suscription.
Numeric.
Returns:
"""
payload = {
"accountId": self.client.account_id,
"planCode": plan_code,
"description": description,
"interval": interval,
"intervalCount": interval_count,
"maxPaymentsAllowed": max_payments_allowed,
"paymentAttemptsDelay": payment_attempts_delay,
"additionalValues": [
{
"name": "PLAN_VALUE",
"value": plan_value,
"currency": currency
},
{
"name": "PLAN_TAX",
"value": plan_tax,
"currency": currency
},
{
"name": "PLAN_TAX_RETURN_BASE",
"value": plan_tax_return_base,
"currency": currency
}
],
"maxPaymentAttempts": max_payment_attempts,
"maxPendingPayments": max_pending_payments,
"trialDays": trial_days
}
return self.client._post(self.url + 'plans', json=payload, headers=self.get_headers()) | python | def create_plan(self, *, plan_code, description, interval, interval_count, max_payments_allowed,
payment_attempts_delay, plan_value, plan_tax, plan_tax_return_base, currency,
max_payment_attempts=None, max_pending_payments=None, trial_days=None):
"""
Creating a new plan for subscriptions associated with the merchant.
Args:
plan_code: Unique code assigned by the merchant to the plan in order to identify it.
Alphanumeric. Min: 1 Max: 255.
description: Plan description.
Alphanumeric. Min: 1 Max: 255.
interval: Interval that defines how often the suscription payment is performed.
The possible values are: DAY, WEEK, MONTH y YEAR.
Alphanumeric. Min: 3 Max: 5.
interval_count: Interval count that defines how often the suscription payment is performed.
Numeric.
max_payments_allowed: Total amount of payments for the suscription.
Numeric.
payment_attempts_delay: Total amount of waiting days between the payment attempts of the suscription.
Numeric.
plan_value: total value of the plan.
Alphanumeric. Min: 1 Max: 255.
plan_tax: tax value associated to the value of the plan.
Alphanumeric. Min: 1 Max: 255.
plan_tax_return_base: tax return base value associated to the value of the plan.
Alphanumeric. Min: 1 Max: 255.
currency: The ISO currency code associated with the amount.
http://developers.payulatam.com/en/api/variables_table.html
max_payment_attempts: Total amount of payment attempts performed when a suscription payment is declined.
Numeric. Max: 3.
max_pending_payments: Total amount of pending payments that a suscription can have before it is cancelled.
Numeric.
trial_days: Total amount of trial days of the suscription.
Numeric.
Returns:
"""
payload = {
"accountId": self.client.account_id,
"planCode": plan_code,
"description": description,
"interval": interval,
"intervalCount": interval_count,
"maxPaymentsAllowed": max_payments_allowed,
"paymentAttemptsDelay": payment_attempts_delay,
"additionalValues": [
{
"name": "PLAN_VALUE",
"value": plan_value,
"currency": currency
},
{
"name": "PLAN_TAX",
"value": plan_tax,
"currency": currency
},
{
"name": "PLAN_TAX_RETURN_BASE",
"value": plan_tax_return_base,
"currency": currency
}
],
"maxPaymentAttempts": max_payment_attempts,
"maxPendingPayments": max_pending_payments,
"trialDays": trial_days
}
return self.client._post(self.url + 'plans', json=payload, headers=self.get_headers()) | [
"def",
"create_plan",
"(",
"self",
",",
"*",
",",
"plan_code",
",",
"description",
",",
"interval",
",",
"interval_count",
",",
"max_payments_allowed",
",",
"payment_attempts_delay",
",",
"plan_value",
",",
"plan_tax",
",",
"plan_tax_return_base",
",",
"currency",
",",
"max_payment_attempts",
"=",
"None",
",",
"max_pending_payments",
"=",
"None",
",",
"trial_days",
"=",
"None",
")",
":",
"payload",
"=",
"{",
"\"accountId\"",
":",
"self",
".",
"client",
".",
"account_id",
",",
"\"planCode\"",
":",
"plan_code",
",",
"\"description\"",
":",
"description",
",",
"\"interval\"",
":",
"interval",
",",
"\"intervalCount\"",
":",
"interval_count",
",",
"\"maxPaymentsAllowed\"",
":",
"max_payments_allowed",
",",
"\"paymentAttemptsDelay\"",
":",
"payment_attempts_delay",
",",
"\"additionalValues\"",
":",
"[",
"{",
"\"name\"",
":",
"\"PLAN_VALUE\"",
",",
"\"value\"",
":",
"plan_value",
",",
"\"currency\"",
":",
"currency",
"}",
",",
"{",
"\"name\"",
":",
"\"PLAN_TAX\"",
",",
"\"value\"",
":",
"plan_tax",
",",
"\"currency\"",
":",
"currency",
"}",
",",
"{",
"\"name\"",
":",
"\"PLAN_TAX_RETURN_BASE\"",
",",
"\"value\"",
":",
"plan_tax_return_base",
",",
"\"currency\"",
":",
"currency",
"}",
"]",
",",
"\"maxPaymentAttempts\"",
":",
"max_payment_attempts",
",",
"\"maxPendingPayments\"",
":",
"max_pending_payments",
",",
"\"trialDays\"",
":",
"trial_days",
"}",
"return",
"self",
".",
"client",
".",
"_post",
"(",
"self",
".",
"url",
"+",
"'plans'",
",",
"json",
"=",
"payload",
",",
"headers",
"=",
"self",
".",
"get_headers",
"(",
")",
")"
] | Creating a new plan for subscriptions associated with the merchant.
Args:
plan_code: Unique code assigned by the merchant to the plan in order to identify it.
Alphanumeric. Min: 1 Max: 255.
description: Plan description.
Alphanumeric. Min: 1 Max: 255.
interval: Interval that defines how often the suscription payment is performed.
The possible values are: DAY, WEEK, MONTH y YEAR.
Alphanumeric. Min: 3 Max: 5.
interval_count: Interval count that defines how often the suscription payment is performed.
Numeric.
max_payments_allowed: Total amount of payments for the suscription.
Numeric.
payment_attempts_delay: Total amount of waiting days between the payment attempts of the suscription.
Numeric.
plan_value: total value of the plan.
Alphanumeric. Min: 1 Max: 255.
plan_tax: tax value associated to the value of the plan.
Alphanumeric. Min: 1 Max: 255.
plan_tax_return_base: tax return base value associated to the value of the plan.
Alphanumeric. Min: 1 Max: 255.
currency: The ISO currency code associated with the amount.
http://developers.payulatam.com/en/api/variables_table.html
max_payment_attempts: Total amount of payment attempts performed when a suscription payment is declined.
Numeric. Max: 3.
max_pending_payments: Total amount of pending payments that a suscription can have before it is cancelled.
Numeric.
trial_days: Total amount of trial days of the suscription.
Numeric.
Returns: | [
"Creating",
"a",
"new",
"plan",
"for",
"subscriptions",
"associated",
"with",
"the",
"merchant",
"."
] | 47ec5c9fc89f1f89a53ec0a68c84f358bbe3394e | https://github.com/GearPlug/payu-python/blob/47ec5c9fc89f1f89a53ec0a68c84f358bbe3394e/payu/recurring.py#L10-L90 | train |
GearPlug/payu-python | payu/recurring.py | Recurring.get_plan | def get_plan(self, plan_code):
"""
Check all the information of a plan for subscriptions associated with the merchant.
Args:
plan_code: Plan’s identification code for the merchant.
Returns:
"""
return self.client._get(self.url + 'plans/{}'.format(plan_code), headers=self.get_headers()) | python | def get_plan(self, plan_code):
"""
Check all the information of a plan for subscriptions associated with the merchant.
Args:
plan_code: Plan’s identification code for the merchant.
Returns:
"""
return self.client._get(self.url + 'plans/{}'.format(plan_code), headers=self.get_headers()) | [
"def",
"get_plan",
"(",
"self",
",",
"plan_code",
")",
":",
"return",
"self",
".",
"client",
".",
"_get",
"(",
"self",
".",
"url",
"+",
"'plans/{}'",
".",
"format",
"(",
"plan_code",
")",
",",
"headers",
"=",
"self",
".",
"get_headers",
"(",
")",
")"
] | Check all the information of a plan for subscriptions associated with the merchant.
Args:
plan_code: Plan’s identification code for the merchant.
Returns: | [
"Check",
"all",
"the",
"information",
"of",
"a",
"plan",
"for",
"subscriptions",
"associated",
"with",
"the",
"merchant",
"."
] | 47ec5c9fc89f1f89a53ec0a68c84f358bbe3394e | https://github.com/GearPlug/payu-python/blob/47ec5c9fc89f1f89a53ec0a68c84f358bbe3394e/payu/recurring.py#L92-L102 | train |
GearPlug/payu-python | payu/recurring.py | Recurring.delete_plan | def delete_plan(self, plan_code):
"""
Delete an entire subscription plan associated with the merchant.
Args:
plan_code: Plan’s identification code for the merchant.
Returns:
"""
return self.client._delete(self.url + 'plans/{}'.format(plan_code), headers=self.get_headers()) | python | def delete_plan(self, plan_code):
"""
Delete an entire subscription plan associated with the merchant.
Args:
plan_code: Plan’s identification code for the merchant.
Returns:
"""
return self.client._delete(self.url + 'plans/{}'.format(plan_code), headers=self.get_headers()) | [
"def",
"delete_plan",
"(",
"self",
",",
"plan_code",
")",
":",
"return",
"self",
".",
"client",
".",
"_delete",
"(",
"self",
".",
"url",
"+",
"'plans/{}'",
".",
"format",
"(",
"plan_code",
")",
",",
"headers",
"=",
"self",
".",
"get_headers",
"(",
")",
")"
] | Delete an entire subscription plan associated with the merchant.
Args:
plan_code: Plan’s identification code for the merchant.
Returns: | [
"Delete",
"an",
"entire",
"subscription",
"plan",
"associated",
"with",
"the",
"merchant",
"."
] | 47ec5c9fc89f1f89a53ec0a68c84f358bbe3394e | https://github.com/GearPlug/payu-python/blob/47ec5c9fc89f1f89a53ec0a68c84f358bbe3394e/payu/recurring.py#L107-L117 | train |
GearPlug/payu-python | payu/recurring.py | Recurring.create_customer | def create_customer(self, *, full_name, email):
"""
Creation of a customer in the system.
Args:
full_name: Customer's complete name.
Alphanumeric. Max: 255.
email: Customer's email address.
Alphanumeric. Max: 255.
Returns:
"""
payload = {
"fullName": full_name,
"email": email
}
return self.client._post(self.url + 'customers', json=payload, headers=self.get_headers()) | python | def create_customer(self, *, full_name, email):
"""
Creation of a customer in the system.
Args:
full_name: Customer's complete name.
Alphanumeric. Max: 255.
email: Customer's email address.
Alphanumeric. Max: 255.
Returns:
"""
payload = {
"fullName": full_name,
"email": email
}
return self.client._post(self.url + 'customers', json=payload, headers=self.get_headers()) | [
"def",
"create_customer",
"(",
"self",
",",
"*",
",",
"full_name",
",",
"email",
")",
":",
"payload",
"=",
"{",
"\"fullName\"",
":",
"full_name",
",",
"\"email\"",
":",
"email",
"}",
"return",
"self",
".",
"client",
".",
"_post",
"(",
"self",
".",
"url",
"+",
"'customers'",
",",
"json",
"=",
"payload",
",",
"headers",
"=",
"self",
".",
"get_headers",
"(",
")",
")"
] | Creation of a customer in the system.
Args:
full_name: Customer's complete name.
Alphanumeric. Max: 255.
email: Customer's email address.
Alphanumeric. Max: 255.
Returns: | [
"Creation",
"of",
"a",
"customer",
"in",
"the",
"system",
"."
] | 47ec5c9fc89f1f89a53ec0a68c84f358bbe3394e | https://github.com/GearPlug/payu-python/blob/47ec5c9fc89f1f89a53ec0a68c84f358bbe3394e/payu/recurring.py#L119-L137 | train |
GearPlug/payu-python | payu/recurring.py | Recurring.get_customer | def get_customer(self, customer_id):
"""
Queries the information related to the customer.
Args:
customer_id: Identifier of the client from which you want to find the associated information.
Returns:
"""
return self.client._get(self.url + 'customers/{}'.format(customer_id), headers=self.get_headers()) | python | def get_customer(self, customer_id):
"""
Queries the information related to the customer.
Args:
customer_id: Identifier of the client from which you want to find the associated information.
Returns:
"""
return self.client._get(self.url + 'customers/{}'.format(customer_id), headers=self.get_headers()) | [
"def",
"get_customer",
"(",
"self",
",",
"customer_id",
")",
":",
"return",
"self",
".",
"client",
".",
"_get",
"(",
"self",
".",
"url",
"+",
"'customers/{}'",
".",
"format",
"(",
"customer_id",
")",
",",
"headers",
"=",
"self",
".",
"get_headers",
"(",
")",
")"
] | Queries the information related to the customer.
Args:
customer_id: Identifier of the client from which you want to find the associated information.
Returns: | [
"Queries",
"the",
"information",
"related",
"to",
"the",
"customer",
"."
] | 47ec5c9fc89f1f89a53ec0a68c84f358bbe3394e | https://github.com/GearPlug/payu-python/blob/47ec5c9fc89f1f89a53ec0a68c84f358bbe3394e/payu/recurring.py#L139-L149 | train |
GearPlug/payu-python | payu/recurring.py | Recurring.delete_customer | def delete_customer(self, customer_id):
"""
Removes a user from the system.
Args:
customer_id: Identifier of the client to be deleted.
Returns:
"""
return self.client._delete(self.url + 'customers/{}'.format(customer_id), headers=self.get_headers()) | python | def delete_customer(self, customer_id):
"""
Removes a user from the system.
Args:
customer_id: Identifier of the client to be deleted.
Returns:
"""
return self.client._delete(self.url + 'customers/{}'.format(customer_id), headers=self.get_headers()) | [
"def",
"delete_customer",
"(",
"self",
",",
"customer_id",
")",
":",
"return",
"self",
".",
"client",
".",
"_delete",
"(",
"self",
".",
"url",
"+",
"'customers/{}'",
".",
"format",
"(",
"customer_id",
")",
",",
"headers",
"=",
"self",
".",
"get_headers",
"(",
")",
")"
] | Removes a user from the system.
Args:
customer_id: Identifier of the client to be deleted.
Returns: | [
"Removes",
"a",
"user",
"from",
"the",
"system",
"."
] | 47ec5c9fc89f1f89a53ec0a68c84f358bbe3394e | https://github.com/GearPlug/payu-python/blob/47ec5c9fc89f1f89a53ec0a68c84f358bbe3394e/payu/recurring.py#L154-L164 | train |
GearPlug/payu-python | payu/recurring.py | Recurring.create_subscription | def create_subscription(self, *, customer_id, credit_card_token, plan_code, quantity=None, installments=None,
trial_days=None, immediate_payment=None, extra1=None, extra2=None, delivery_address=None,
notify_url=None, recurring_bill_items=None):
"""
Creating a new subscription of a client to a plan.
Args:
customer_id: Customer that will be associated to the subscription.
You can find more information in the "Customer" section of this page.
credit_card_token: Customer's credit card that is selected to make the payment.
You can find more information in the "Credit card" section of this page.
plan_code: Plan that will be associated to the subscription.
You can find more information in the "Plan" section of this page.
quantity: Total amount of plans that will be acquired with the subscription.
Numeric.
installments: Total amount of installments to defer the payment.
Numeric.
trial_days: Total amount of trial days of the subscription.
This variable has preference over the plan's trial days.
Numeric.
immediate_payment:
extra1:
extra2:
delivery_address:
notify_url:
recurring_bill_items:
Returns:
"""
payload = {
"quantity": quantity,
"installments": installments,
"trialDays": trial_days,
"immediatePayment": immediate_payment,
"extra1": extra1,
"extra2": extra2,
"customer": {
"id": customer_id,
"creditCards": [
{
"token": credit_card_token
}
]
},
"plan": {
"planCode": plan_code
},
"deliveryAddress": delivery_address,
"notifyUrl": notify_url,
"recurringBillItems": recurring_bill_items
}
return self.client._post(self.url + 'subscriptions', json=payload, headers=self.get_headers()) | python | def create_subscription(self, *, customer_id, credit_card_token, plan_code, quantity=None, installments=None,
trial_days=None, immediate_payment=None, extra1=None, extra2=None, delivery_address=None,
notify_url=None, recurring_bill_items=None):
"""
Creating a new subscription of a client to a plan.
Args:
customer_id: Customer that will be associated to the subscription.
You can find more information in the "Customer" section of this page.
credit_card_token: Customer's credit card that is selected to make the payment.
You can find more information in the "Credit card" section of this page.
plan_code: Plan that will be associated to the subscription.
You can find more information in the "Plan" section of this page.
quantity: Total amount of plans that will be acquired with the subscription.
Numeric.
installments: Total amount of installments to defer the payment.
Numeric.
trial_days: Total amount of trial days of the subscription.
This variable has preference over the plan's trial days.
Numeric.
immediate_payment:
extra1:
extra2:
delivery_address:
notify_url:
recurring_bill_items:
Returns:
"""
payload = {
"quantity": quantity,
"installments": installments,
"trialDays": trial_days,
"immediatePayment": immediate_payment,
"extra1": extra1,
"extra2": extra2,
"customer": {
"id": customer_id,
"creditCards": [
{
"token": credit_card_token
}
]
},
"plan": {
"planCode": plan_code
},
"deliveryAddress": delivery_address,
"notifyUrl": notify_url,
"recurringBillItems": recurring_bill_items
}
return self.client._post(self.url + 'subscriptions', json=payload, headers=self.get_headers()) | [
"def",
"create_subscription",
"(",
"self",
",",
"*",
",",
"customer_id",
",",
"credit_card_token",
",",
"plan_code",
",",
"quantity",
"=",
"None",
",",
"installments",
"=",
"None",
",",
"trial_days",
"=",
"None",
",",
"immediate_payment",
"=",
"None",
",",
"extra1",
"=",
"None",
",",
"extra2",
"=",
"None",
",",
"delivery_address",
"=",
"None",
",",
"notify_url",
"=",
"None",
",",
"recurring_bill_items",
"=",
"None",
")",
":",
"payload",
"=",
"{",
"\"quantity\"",
":",
"quantity",
",",
"\"installments\"",
":",
"installments",
",",
"\"trialDays\"",
":",
"trial_days",
",",
"\"immediatePayment\"",
":",
"immediate_payment",
",",
"\"extra1\"",
":",
"extra1",
",",
"\"extra2\"",
":",
"extra2",
",",
"\"customer\"",
":",
"{",
"\"id\"",
":",
"customer_id",
",",
"\"creditCards\"",
":",
"[",
"{",
"\"token\"",
":",
"credit_card_token",
"}",
"]",
"}",
",",
"\"plan\"",
":",
"{",
"\"planCode\"",
":",
"plan_code",
"}",
",",
"\"deliveryAddress\"",
":",
"delivery_address",
",",
"\"notifyUrl\"",
":",
"notify_url",
",",
"\"recurringBillItems\"",
":",
"recurring_bill_items",
"}",
"return",
"self",
".",
"client",
".",
"_post",
"(",
"self",
".",
"url",
"+",
"'subscriptions'",
",",
"json",
"=",
"payload",
",",
"headers",
"=",
"self",
".",
"get_headers",
"(",
")",
")"
] | Creating a new subscription of a client to a plan.
Args:
customer_id: Customer that will be associated to the subscription.
You can find more information in the "Customer" section of this page.
credit_card_token: Customer's credit card that is selected to make the payment.
You can find more information in the "Credit card" section of this page.
plan_code: Plan that will be associated to the subscription.
You can find more information in the "Plan" section of this page.
quantity: Total amount of plans that will be acquired with the subscription.
Numeric.
installments: Total amount of installments to defer the payment.
Numeric.
trial_days: Total amount of trial days of the subscription.
This variable has preference over the plan's trial days.
Numeric.
immediate_payment:
extra1:
extra2:
delivery_address:
notify_url:
recurring_bill_items:
Returns: | [
"Creating",
"a",
"new",
"subscription",
"of",
"a",
"client",
"to",
"a",
"plan",
"."
] | 47ec5c9fc89f1f89a53ec0a68c84f358bbe3394e | https://github.com/GearPlug/payu-python/blob/47ec5c9fc89f1f89a53ec0a68c84f358bbe3394e/payu/recurring.py#L240-L303 | train |
GearPlug/payu-python | payu/recurring.py | Recurring.get_subscription | def get_subscription(self, subscription_id):
"""
Check the basic information associated with the specified subscription.
Args:
subscription_id: Identification of the subscription.
Returns:
"""
return self.client._put(self.url + 'subscriptions/{}'.format(subscription_id), headers=self.get_headers()) | python | def get_subscription(self, subscription_id):
"""
Check the basic information associated with the specified subscription.
Args:
subscription_id: Identification of the subscription.
Returns:
"""
return self.client._put(self.url + 'subscriptions/{}'.format(subscription_id), headers=self.get_headers()) | [
"def",
"get_subscription",
"(",
"self",
",",
"subscription_id",
")",
":",
"return",
"self",
".",
"client",
".",
"_put",
"(",
"self",
".",
"url",
"+",
"'subscriptions/{}'",
".",
"format",
"(",
"subscription_id",
")",
",",
"headers",
"=",
"self",
".",
"get_headers",
"(",
")",
")"
] | Check the basic information associated with the specified subscription.
Args:
subscription_id: Identification of the subscription.
Returns: | [
"Check",
"the",
"basic",
"information",
"associated",
"with",
"the",
"specified",
"subscription",
"."
] | 47ec5c9fc89f1f89a53ec0a68c84f358bbe3394e | https://github.com/GearPlug/payu-python/blob/47ec5c9fc89f1f89a53ec0a68c84f358bbe3394e/payu/recurring.py#L305-L315 | train |
GearPlug/payu-python | payu/recurring.py | Recurring.update_subscription | def update_subscription(self, *, subscription_id, credit_card_token):
"""
Update information associated with the specified subscription. At the moment it is only possible
to update the token of the credit card to which the charge of the subscription is made.
Args:
subscription_id: Identification of the subscription.
credit_card_token:
Returns:
"""
payload = {
"creditCardToken": credit_card_token
}
fmt = 'subscriptions/{}'.format(subscription_id)
return self.client._put(self.url + fmt, json=payload, headers=self.get_headers()) | python | def update_subscription(self, *, subscription_id, credit_card_token):
"""
Update information associated with the specified subscription. At the moment it is only possible
to update the token of the credit card to which the charge of the subscription is made.
Args:
subscription_id: Identification of the subscription.
credit_card_token:
Returns:
"""
payload = {
"creditCardToken": credit_card_token
}
fmt = 'subscriptions/{}'.format(subscription_id)
return self.client._put(self.url + fmt, json=payload, headers=self.get_headers()) | [
"def",
"update_subscription",
"(",
"self",
",",
"*",
",",
"subscription_id",
",",
"credit_card_token",
")",
":",
"payload",
"=",
"{",
"\"creditCardToken\"",
":",
"credit_card_token",
"}",
"fmt",
"=",
"'subscriptions/{}'",
".",
"format",
"(",
"subscription_id",
")",
"return",
"self",
".",
"client",
".",
"_put",
"(",
"self",
".",
"url",
"+",
"fmt",
",",
"json",
"=",
"payload",
",",
"headers",
"=",
"self",
".",
"get_headers",
"(",
")",
")"
] | Update information associated with the specified subscription. At the moment it is only possible
to update the token of the credit card to which the charge of the subscription is made.
Args:
subscription_id: Identification of the subscription.
credit_card_token:
Returns: | [
"Update",
"information",
"associated",
"with",
"the",
"specified",
"subscription",
".",
"At",
"the",
"moment",
"it",
"is",
"only",
"possible",
"to",
"update",
"the",
"token",
"of",
"the",
"credit",
"card",
"to",
"which",
"the",
"charge",
"of",
"the",
"subscription",
"is",
"made",
"."
] | 47ec5c9fc89f1f89a53ec0a68c84f358bbe3394e | https://github.com/GearPlug/payu-python/blob/47ec5c9fc89f1f89a53ec0a68c84f358bbe3394e/payu/recurring.py#L317-L333 | train |
GearPlug/payu-python | payu/recurring.py | Recurring.delete_subscription | def delete_subscription(self, subscription_id):
"""
Unsubscribe, delete the relationship of the customer with the plan.
Args:
subscription_id: Identification of the subscription.
Returns:
"""
return self.client._delete(self.url + 'subscriptions/{}'.format(subscription_id), headers=self.get_headers()) | python | def delete_subscription(self, subscription_id):
"""
Unsubscribe, delete the relationship of the customer with the plan.
Args:
subscription_id: Identification of the subscription.
Returns:
"""
return self.client._delete(self.url + 'subscriptions/{}'.format(subscription_id), headers=self.get_headers()) | [
"def",
"delete_subscription",
"(",
"self",
",",
"subscription_id",
")",
":",
"return",
"self",
".",
"client",
".",
"_delete",
"(",
"self",
".",
"url",
"+",
"'subscriptions/{}'",
".",
"format",
"(",
"subscription_id",
")",
",",
"headers",
"=",
"self",
".",
"get_headers",
"(",
")",
")"
] | Unsubscribe, delete the relationship of the customer with the plan.
Args:
subscription_id: Identification of the subscription.
Returns: | [
"Unsubscribe",
"delete",
"the",
"relationship",
"of",
"the",
"customer",
"with",
"the",
"plan",
"."
] | 47ec5c9fc89f1f89a53ec0a68c84f358bbe3394e | https://github.com/GearPlug/payu-python/blob/47ec5c9fc89f1f89a53ec0a68c84f358bbe3394e/payu/recurring.py#L335-L345 | train |
GearPlug/payu-python | payu/recurring.py | Recurring.create_additional_charge | def create_additional_charge(self, *, subscription_id, description, plan_value, plan_tax, plan_tax_return_base,
currency):
"""
Adds extra charges to the respective invoice for the current period.
Args:
subscription_id: Identification of the subscription
description:
plan_value:
plan_tax:
plan_tax_return_base:
currency:
Returns:
"""
payload = {
"description": description,
"additionalValues": [
{
"name": "ITEM_VALUE",
"value": plan_value,
"currency": currency
},
{
"name": "ITEM_TAX",
"value": plan_tax,
"currency": currency
},
{
"name": "ITEM_TAX_RETURN_BASE",
"value": plan_tax_return_base,
"currency": currency
}
]
}
fmt = 'subscriptions/{}/recurringBillItems'.format(subscription_id)
return self.client._post(self.url + fmt, json=payload, headers=self.get_headers()) | python | def create_additional_charge(self, *, subscription_id, description, plan_value, plan_tax, plan_tax_return_base,
currency):
"""
Adds extra charges to the respective invoice for the current period.
Args:
subscription_id: Identification of the subscription
description:
plan_value:
plan_tax:
plan_tax_return_base:
currency:
Returns:
"""
payload = {
"description": description,
"additionalValues": [
{
"name": "ITEM_VALUE",
"value": plan_value,
"currency": currency
},
{
"name": "ITEM_TAX",
"value": plan_tax,
"currency": currency
},
{
"name": "ITEM_TAX_RETURN_BASE",
"value": plan_tax_return_base,
"currency": currency
}
]
}
fmt = 'subscriptions/{}/recurringBillItems'.format(subscription_id)
return self.client._post(self.url + fmt, json=payload, headers=self.get_headers()) | [
"def",
"create_additional_charge",
"(",
"self",
",",
"*",
",",
"subscription_id",
",",
"description",
",",
"plan_value",
",",
"plan_tax",
",",
"plan_tax_return_base",
",",
"currency",
")",
":",
"payload",
"=",
"{",
"\"description\"",
":",
"description",
",",
"\"additionalValues\"",
":",
"[",
"{",
"\"name\"",
":",
"\"ITEM_VALUE\"",
",",
"\"value\"",
":",
"plan_value",
",",
"\"currency\"",
":",
"currency",
"}",
",",
"{",
"\"name\"",
":",
"\"ITEM_TAX\"",
",",
"\"value\"",
":",
"plan_tax",
",",
"\"currency\"",
":",
"currency",
"}",
",",
"{",
"\"name\"",
":",
"\"ITEM_TAX_RETURN_BASE\"",
",",
"\"value\"",
":",
"plan_tax_return_base",
",",
"\"currency\"",
":",
"currency",
"}",
"]",
"}",
"fmt",
"=",
"'subscriptions/{}/recurringBillItems'",
".",
"format",
"(",
"subscription_id",
")",
"return",
"self",
".",
"client",
".",
"_post",
"(",
"self",
".",
"url",
"+",
"fmt",
",",
"json",
"=",
"payload",
",",
"headers",
"=",
"self",
".",
"get_headers",
"(",
")",
")"
] | Adds extra charges to the respective invoice for the current period.
Args:
subscription_id: Identification of the subscription
description:
plan_value:
plan_tax:
plan_tax_return_base:
currency:
Returns: | [
"Adds",
"extra",
"charges",
"to",
"the",
"respective",
"invoice",
"for",
"the",
"current",
"period",
"."
] | 47ec5c9fc89f1f89a53ec0a68c84f358bbe3394e | https://github.com/GearPlug/payu-python/blob/47ec5c9fc89f1f89a53ec0a68c84f358bbe3394e/payu/recurring.py#L347-L384 | train |
GearPlug/payu-python | payu/recurring.py | Recurring.get_additional_charge_by_identifier | def get_additional_charge_by_identifier(self, recurring_billing_id):
"""
Query extra charge information of an invoice from its identifier.
Args:
recurring_billing_id: Identifier of the additional charge.
Returns:
"""
fmt = 'recurringBillItems/{}'.format(recurring_billing_id)
return self.client._get(self.url + fmt, headers=self.get_headers()) | python | def get_additional_charge_by_identifier(self, recurring_billing_id):
"""
Query extra charge information of an invoice from its identifier.
Args:
recurring_billing_id: Identifier of the additional charge.
Returns:
"""
fmt = 'recurringBillItems/{}'.format(recurring_billing_id)
return self.client._get(self.url + fmt, headers=self.get_headers()) | [
"def",
"get_additional_charge_by_identifier",
"(",
"self",
",",
"recurring_billing_id",
")",
":",
"fmt",
"=",
"'recurringBillItems/{}'",
".",
"format",
"(",
"recurring_billing_id",
")",
"return",
"self",
".",
"client",
".",
"_get",
"(",
"self",
".",
"url",
"+",
"fmt",
",",
"headers",
"=",
"self",
".",
"get_headers",
"(",
")",
")"
] | Query extra charge information of an invoice from its identifier.
Args:
recurring_billing_id: Identifier of the additional charge.
Returns: | [
"Query",
"extra",
"charge",
"information",
"of",
"an",
"invoice",
"from",
"its",
"identifier",
"."
] | 47ec5c9fc89f1f89a53ec0a68c84f358bbe3394e | https://github.com/GearPlug/payu-python/blob/47ec5c9fc89f1f89a53ec0a68c84f358bbe3394e/payu/recurring.py#L386-L397 | train |
GearPlug/payu-python | payu/recurring.py | Recurring.update_additional_charge | def update_additional_charge(self, *, recurring_billing_id, description, plan_value, plan_tax, plan_tax_return_base,
currency):
"""
Updates the information from an additional charge in an invoice.
Args:
recurring_billing_id: Identifier of the additional charge.
description:
plan_value:
plan_tax:
plan_tax_return_base:
currency:
Returns:
"""
payload = {
"description": description,
"additionalValues": [
{
"name": "ITEM_VALUE",
"value": plan_value,
"currency": currency
},
{
"name": "ITEM_TAX",
"value": plan_tax,
"currency": currency
},
{
"name": "ITEM_TAX_RETURN_BASE",
"value": plan_tax_return_base,
"currency": currency
}
]
}
fmt = 'recurringBillItems/{}'.format(recurring_billing_id)
return self.client._put(self.url + fmt, payload=payload, headers=self.get_headers()) | python | def update_additional_charge(self, *, recurring_billing_id, description, plan_value, plan_tax, plan_tax_return_base,
currency):
"""
Updates the information from an additional charge in an invoice.
Args:
recurring_billing_id: Identifier of the additional charge.
description:
plan_value:
plan_tax:
plan_tax_return_base:
currency:
Returns:
"""
payload = {
"description": description,
"additionalValues": [
{
"name": "ITEM_VALUE",
"value": plan_value,
"currency": currency
},
{
"name": "ITEM_TAX",
"value": plan_tax,
"currency": currency
},
{
"name": "ITEM_TAX_RETURN_BASE",
"value": plan_tax_return_base,
"currency": currency
}
]
}
fmt = 'recurringBillItems/{}'.format(recurring_billing_id)
return self.client._put(self.url + fmt, payload=payload, headers=self.get_headers()) | [
"def",
"update_additional_charge",
"(",
"self",
",",
"*",
",",
"recurring_billing_id",
",",
"description",
",",
"plan_value",
",",
"plan_tax",
",",
"plan_tax_return_base",
",",
"currency",
")",
":",
"payload",
"=",
"{",
"\"description\"",
":",
"description",
",",
"\"additionalValues\"",
":",
"[",
"{",
"\"name\"",
":",
"\"ITEM_VALUE\"",
",",
"\"value\"",
":",
"plan_value",
",",
"\"currency\"",
":",
"currency",
"}",
",",
"{",
"\"name\"",
":",
"\"ITEM_TAX\"",
",",
"\"value\"",
":",
"plan_tax",
",",
"\"currency\"",
":",
"currency",
"}",
",",
"{",
"\"name\"",
":",
"\"ITEM_TAX_RETURN_BASE\"",
",",
"\"value\"",
":",
"plan_tax_return_base",
",",
"\"currency\"",
":",
"currency",
"}",
"]",
"}",
"fmt",
"=",
"'recurringBillItems/{}'",
".",
"format",
"(",
"recurring_billing_id",
")",
"return",
"self",
".",
"client",
".",
"_put",
"(",
"self",
".",
"url",
"+",
"fmt",
",",
"payload",
"=",
"payload",
",",
"headers",
"=",
"self",
".",
"get_headers",
"(",
")",
")"
] | Updates the information from an additional charge in an invoice.
Args:
recurring_billing_id: Identifier of the additional charge.
description:
plan_value:
plan_tax:
plan_tax_return_base:
currency:
Returns: | [
"Updates",
"the",
"information",
"from",
"an",
"additional",
"charge",
"in",
"an",
"invoice",
"."
] | 47ec5c9fc89f1f89a53ec0a68c84f358bbe3394e | https://github.com/GearPlug/payu-python/blob/47ec5c9fc89f1f89a53ec0a68c84f358bbe3394e/payu/recurring.py#L429-L466 | train |
GearPlug/payu-python | payu/recurring.py | Recurring.delete_additional_charge | def delete_additional_charge(self, recurring_billing_id):
"""
Remove an extra charge from an invoice.
Args:
recurring_billing_id: Identifier of the additional charge.
Returns:
"""
fmt = 'recurringBillItems/{}'.format(recurring_billing_id)
return self.client._delete(self.url + fmt, headers=self.get_headers()) | python | def delete_additional_charge(self, recurring_billing_id):
"""
Remove an extra charge from an invoice.
Args:
recurring_billing_id: Identifier of the additional charge.
Returns:
"""
fmt = 'recurringBillItems/{}'.format(recurring_billing_id)
return self.client._delete(self.url + fmt, headers=self.get_headers()) | [
"def",
"delete_additional_charge",
"(",
"self",
",",
"recurring_billing_id",
")",
":",
"fmt",
"=",
"'recurringBillItems/{}'",
".",
"format",
"(",
"recurring_billing_id",
")",
"return",
"self",
".",
"client",
".",
"_delete",
"(",
"self",
".",
"url",
"+",
"fmt",
",",
"headers",
"=",
"self",
".",
"get_headers",
"(",
")",
")"
] | Remove an extra charge from an invoice.
Args:
recurring_billing_id: Identifier of the additional charge.
Returns: | [
"Remove",
"an",
"extra",
"charge",
"from",
"an",
"invoice",
"."
] | 47ec5c9fc89f1f89a53ec0a68c84f358bbe3394e | https://github.com/GearPlug/payu-python/blob/47ec5c9fc89f1f89a53ec0a68c84f358bbe3394e/payu/recurring.py#L468-L479 | train |
gtaylor/django-athumb | athumb/templatetags/thumbnail.py | thumbnail | def thumbnail(parser, token):
"""
Creates a thumbnail of for an ImageField.
To just output the absolute url to the thumbnail::
{% thumbnail image 80x80 %}
After the image path and dimensions, you can put any options::
{% thumbnail image 80x80 force_ssl=True %}
To put the thumbnail URL on the context instead of just rendering
it, finish the tag with ``as [context_var_name]``::
{% thumbnail image 80x80 as thumb %}
<img src="{{thumb}}" />
"""
args = token.split_contents()
tag = args[0]
# Check to see if we're setting to a context variable.
if len(args) > 4 and args[-2] == 'as':
context_name = args[-1]
args = args[:-2]
else:
context_name = None
if len(args) < 3:
raise TemplateSyntaxError("Invalid syntax. Expected "
"'{%% %s source size [option1 option2 ...] %%}' or "
"'{%% %s source size [option1 option2 ...] as variable %%}'" %
(tag, tag))
# Get the source image path and requested size.
source_var = args[1]
# If the size argument was a correct static format, wrap it in quotes so
# that it is compiled correctly.
m = REGEXP_THUMB_SIZES.match(args[2])
if m:
args[2] = '"%s"' % args[2]
size_var = args[2]
# Get the options.
args_list = split_args(args[3:]).items()
# Check the options.
opts = {}
kwargs = {} # key,values here override settings and defaults
for arg, value in args_list:
value = value and parser.compile_filter(value)
if arg in TAG_SETTINGS and value is not None:
kwargs[str(arg)] = value
continue
else:
raise TemplateSyntaxError("'%s' tag received a bad argument: "
"'%s'" % (tag, arg))
return ThumbnailNode(source_var, size_var, opts=opts,
context_name=context_name, **kwargs) | python | def thumbnail(parser, token):
"""
Creates a thumbnail of for an ImageField.
To just output the absolute url to the thumbnail::
{% thumbnail image 80x80 %}
After the image path and dimensions, you can put any options::
{% thumbnail image 80x80 force_ssl=True %}
To put the thumbnail URL on the context instead of just rendering
it, finish the tag with ``as [context_var_name]``::
{% thumbnail image 80x80 as thumb %}
<img src="{{thumb}}" />
"""
args = token.split_contents()
tag = args[0]
# Check to see if we're setting to a context variable.
if len(args) > 4 and args[-2] == 'as':
context_name = args[-1]
args = args[:-2]
else:
context_name = None
if len(args) < 3:
raise TemplateSyntaxError("Invalid syntax. Expected "
"'{%% %s source size [option1 option2 ...] %%}' or "
"'{%% %s source size [option1 option2 ...] as variable %%}'" %
(tag, tag))
# Get the source image path and requested size.
source_var = args[1]
# If the size argument was a correct static format, wrap it in quotes so
# that it is compiled correctly.
m = REGEXP_THUMB_SIZES.match(args[2])
if m:
args[2] = '"%s"' % args[2]
size_var = args[2]
# Get the options.
args_list = split_args(args[3:]).items()
# Check the options.
opts = {}
kwargs = {} # key,values here override settings and defaults
for arg, value in args_list:
value = value and parser.compile_filter(value)
if arg in TAG_SETTINGS and value is not None:
kwargs[str(arg)] = value
continue
else:
raise TemplateSyntaxError("'%s' tag received a bad argument: "
"'%s'" % (tag, arg))
return ThumbnailNode(source_var, size_var, opts=opts,
context_name=context_name, **kwargs) | [
"def",
"thumbnail",
"(",
"parser",
",",
"token",
")",
":",
"args",
"=",
"token",
".",
"split_contents",
"(",
")",
"tag",
"=",
"args",
"[",
"0",
"]",
"# Check to see if we're setting to a context variable.",
"if",
"len",
"(",
"args",
")",
">",
"4",
"and",
"args",
"[",
"-",
"2",
"]",
"==",
"'as'",
":",
"context_name",
"=",
"args",
"[",
"-",
"1",
"]",
"args",
"=",
"args",
"[",
":",
"-",
"2",
"]",
"else",
":",
"context_name",
"=",
"None",
"if",
"len",
"(",
"args",
")",
"<",
"3",
":",
"raise",
"TemplateSyntaxError",
"(",
"\"Invalid syntax. Expected \"",
"\"'{%% %s source size [option1 option2 ...] %%}' or \"",
"\"'{%% %s source size [option1 option2 ...] as variable %%}'\"",
"%",
"(",
"tag",
",",
"tag",
")",
")",
"# Get the source image path and requested size.",
"source_var",
"=",
"args",
"[",
"1",
"]",
"# If the size argument was a correct static format, wrap it in quotes so",
"# that it is compiled correctly.",
"m",
"=",
"REGEXP_THUMB_SIZES",
".",
"match",
"(",
"args",
"[",
"2",
"]",
")",
"if",
"m",
":",
"args",
"[",
"2",
"]",
"=",
"'\"%s\"'",
"%",
"args",
"[",
"2",
"]",
"size_var",
"=",
"args",
"[",
"2",
"]",
"# Get the options.",
"args_list",
"=",
"split_args",
"(",
"args",
"[",
"3",
":",
"]",
")",
".",
"items",
"(",
")",
"# Check the options.",
"opts",
"=",
"{",
"}",
"kwargs",
"=",
"{",
"}",
"# key,values here override settings and defaults",
"for",
"arg",
",",
"value",
"in",
"args_list",
":",
"value",
"=",
"value",
"and",
"parser",
".",
"compile_filter",
"(",
"value",
")",
"if",
"arg",
"in",
"TAG_SETTINGS",
"and",
"value",
"is",
"not",
"None",
":",
"kwargs",
"[",
"str",
"(",
"arg",
")",
"]",
"=",
"value",
"continue",
"else",
":",
"raise",
"TemplateSyntaxError",
"(",
"\"'%s' tag received a bad argument: \"",
"\"'%s'\"",
"%",
"(",
"tag",
",",
"arg",
")",
")",
"return",
"ThumbnailNode",
"(",
"source_var",
",",
"size_var",
",",
"opts",
"=",
"opts",
",",
"context_name",
"=",
"context_name",
",",
"*",
"*",
"kwargs",
")"
] | Creates a thumbnail of for an ImageField.
To just output the absolute url to the thumbnail::
{% thumbnail image 80x80 %}
After the image path and dimensions, you can put any options::
{% thumbnail image 80x80 force_ssl=True %}
To put the thumbnail URL on the context instead of just rendering
it, finish the tag with ``as [context_var_name]``::
{% thumbnail image 80x80 as thumb %}
<img src="{{thumb}}" /> | [
"Creates",
"a",
"thumbnail",
"of",
"for",
"an",
"ImageField",
"."
] | 69261ace0dff81e33156a54440874456a7b38dfb | https://github.com/gtaylor/django-athumb/blob/69261ace0dff81e33156a54440874456a7b38dfb/athumb/templatetags/thumbnail.py#L142-L201 | train |
spacetelescope/stsci.imagestats | stsci/imagestats/__init__.py | ImageStats.printStats | def printStats(self):
""" Print the requested statistics values for those fields specified on input. """
print("--- Imagestats Results ---")
if (self.fields.find('npix') != -1 ):
print("Number of pixels : ",self.npix)
if (self.fields.find('min') != -1 ):
print("Minimum value : ",self.min)
if (self.fields.find('max') != -1 ):
print("Maximum value : ",self.max)
if (self.fields.find('stddev') != -1 ):
print("Standard Deviation: ",self.stddev)
if (self.fields.find('mean') != -1 ):
print("Mean : ",self.mean)
if (self.fields.find('mode') != -1 ):
print("Mode : ",self.mode)
if (self.fields.find('median') != -1 ):
print("Median : ",self.median)
if (self.fields.find('midpt') != -1 ):
print("Midpt : ",self.midpt) | python | def printStats(self):
""" Print the requested statistics values for those fields specified on input. """
print("--- Imagestats Results ---")
if (self.fields.find('npix') != -1 ):
print("Number of pixels : ",self.npix)
if (self.fields.find('min') != -1 ):
print("Minimum value : ",self.min)
if (self.fields.find('max') != -1 ):
print("Maximum value : ",self.max)
if (self.fields.find('stddev') != -1 ):
print("Standard Deviation: ",self.stddev)
if (self.fields.find('mean') != -1 ):
print("Mean : ",self.mean)
if (self.fields.find('mode') != -1 ):
print("Mode : ",self.mode)
if (self.fields.find('median') != -1 ):
print("Median : ",self.median)
if (self.fields.find('midpt') != -1 ):
print("Midpt : ",self.midpt) | [
"def",
"printStats",
"(",
"self",
")",
":",
"print",
"(",
"\"--- Imagestats Results ---\"",
")",
"if",
"(",
"self",
".",
"fields",
".",
"find",
"(",
"'npix'",
")",
"!=",
"-",
"1",
")",
":",
"print",
"(",
"\"Number of pixels : \"",
",",
"self",
".",
"npix",
")",
"if",
"(",
"self",
".",
"fields",
".",
"find",
"(",
"'min'",
")",
"!=",
"-",
"1",
")",
":",
"print",
"(",
"\"Minimum value : \"",
",",
"self",
".",
"min",
")",
"if",
"(",
"self",
".",
"fields",
".",
"find",
"(",
"'max'",
")",
"!=",
"-",
"1",
")",
":",
"print",
"(",
"\"Maximum value : \"",
",",
"self",
".",
"max",
")",
"if",
"(",
"self",
".",
"fields",
".",
"find",
"(",
"'stddev'",
")",
"!=",
"-",
"1",
")",
":",
"print",
"(",
"\"Standard Deviation: \"",
",",
"self",
".",
"stddev",
")",
"if",
"(",
"self",
".",
"fields",
".",
"find",
"(",
"'mean'",
")",
"!=",
"-",
"1",
")",
":",
"print",
"(",
"\"Mean : \"",
",",
"self",
".",
"mean",
")",
"if",
"(",
"self",
".",
"fields",
".",
"find",
"(",
"'mode'",
")",
"!=",
"-",
"1",
")",
":",
"print",
"(",
"\"Mode : \"",
",",
"self",
".",
"mode",
")",
"if",
"(",
"self",
".",
"fields",
".",
"find",
"(",
"'median'",
")",
"!=",
"-",
"1",
")",
":",
"print",
"(",
"\"Median : \"",
",",
"self",
".",
"median",
")",
"if",
"(",
"self",
".",
"fields",
".",
"find",
"(",
"'midpt'",
")",
"!=",
"-",
"1",
")",
":",
"print",
"(",
"\"Midpt : \"",
",",
"self",
".",
"midpt",
")"
] | Print the requested statistics values for those fields specified on input. | [
"Print",
"the",
"requested",
"statistics",
"values",
"for",
"those",
"fields",
"specified",
"on",
"input",
"."
] | d7fc9fe9783f7ed3dc9e4af47acd357a5ccd68e3 | https://github.com/spacetelescope/stsci.imagestats/blob/d7fc9fe9783f7ed3dc9e4af47acd357a5ccd68e3/stsci/imagestats/__init__.py#L332-L351 | train |
digidotcom/python-wvalib | wva/http_client.py | WVAHttpClient.raw_request | def raw_request(self, method, uri, **kwargs):
"""Perform a WVA web services request and return the raw response object
:param method: The HTTP method to use when making this request
:param uri: The path past /ws to request. That is, the path requested for
a relpath of `a/b/c` would be `/ws/a/b/c`.
:raises WVAHttpSocketError: if there was an error making the HTTP request. That is,
the request was unable to make it to the WVA for some reason.
"""
with warnings.catch_warnings(): # catch warning about certs not being verified
warnings.simplefilter("ignore", urllib3.exceptions.InsecureRequestWarning)
warnings.simplefilter("ignore", urllib3.exceptions.InsecurePlatformWarning)
try:
response = self._get_session().request(method, self._get_ws_url(uri), **kwargs)
except requests.RequestException as e:
# e.g. raise new_exc from old_exc
six.raise_from(WVAHttpRequestError(e), e)
else:
return response | python | def raw_request(self, method, uri, **kwargs):
"""Perform a WVA web services request and return the raw response object
:param method: The HTTP method to use when making this request
:param uri: The path past /ws to request. That is, the path requested for
a relpath of `a/b/c` would be `/ws/a/b/c`.
:raises WVAHttpSocketError: if there was an error making the HTTP request. That is,
the request was unable to make it to the WVA for some reason.
"""
with warnings.catch_warnings(): # catch warning about certs not being verified
warnings.simplefilter("ignore", urllib3.exceptions.InsecureRequestWarning)
warnings.simplefilter("ignore", urllib3.exceptions.InsecurePlatformWarning)
try:
response = self._get_session().request(method, self._get_ws_url(uri), **kwargs)
except requests.RequestException as e:
# e.g. raise new_exc from old_exc
six.raise_from(WVAHttpRequestError(e), e)
else:
return response | [
"def",
"raw_request",
"(",
"self",
",",
"method",
",",
"uri",
",",
"*",
"*",
"kwargs",
")",
":",
"with",
"warnings",
".",
"catch_warnings",
"(",
")",
":",
"# catch warning about certs not being verified",
"warnings",
".",
"simplefilter",
"(",
"\"ignore\"",
",",
"urllib3",
".",
"exceptions",
".",
"InsecureRequestWarning",
")",
"warnings",
".",
"simplefilter",
"(",
"\"ignore\"",
",",
"urllib3",
".",
"exceptions",
".",
"InsecurePlatformWarning",
")",
"try",
":",
"response",
"=",
"self",
".",
"_get_session",
"(",
")",
".",
"request",
"(",
"method",
",",
"self",
".",
"_get_ws_url",
"(",
"uri",
")",
",",
"*",
"*",
"kwargs",
")",
"except",
"requests",
".",
"RequestException",
"as",
"e",
":",
"# e.g. raise new_exc from old_exc",
"six",
".",
"raise_from",
"(",
"WVAHttpRequestError",
"(",
"e",
")",
",",
"e",
")",
"else",
":",
"return",
"response"
] | Perform a WVA web services request and return the raw response object
:param method: The HTTP method to use when making this request
:param uri: The path past /ws to request. That is, the path requested for
a relpath of `a/b/c` would be `/ws/a/b/c`.
:raises WVAHttpSocketError: if there was an error making the HTTP request. That is,
the request was unable to make it to the WVA for some reason. | [
"Perform",
"a",
"WVA",
"web",
"services",
"request",
"and",
"return",
"the",
"raw",
"response",
"object"
] | 4252735e2775f80ebaffd813fbe84046d26906b3 | https://github.com/digidotcom/python-wvalib/blob/4252735e2775f80ebaffd813fbe84046d26906b3/wva/http_client.py#L79-L97 | train |
digidotcom/python-wvalib | wva/http_client.py | WVAHttpClient.request | def request(self, method, uri, **kwargs):
"""Perform a WVA web services request and return the decoded value if successful
:param method: The HTTP method to use when making this request
:param uri: The path past /ws to request. That is, the path requested for
a relpath of `a/b/c` would be `/ws/a/b/c`.
:raises WVAHttpError: if a response is received but the success is non-success
:raises WVAHttpSocketError: if there was an error making the HTTP request. That is,
the request was unable to make it to the WVA for some reason.
:return: If the response content type is JSON, it will be deserialized and a
python dictionary containing the information from the json document will
be returned. If not a JSON response, a unicode string of the response
text will be returned.
"""
response = self.raw_request(method, uri, **kwargs)
if response.status_code != 200:
exception_class = HTTP_STATUS_EXCEPTION_MAP.get(response.status_code, WVAHttpError)
raise exception_class(response)
if response.headers.get("content-type") == "application/json":
return json.loads(response.text)
else:
return response.text | python | def request(self, method, uri, **kwargs):
"""Perform a WVA web services request and return the decoded value if successful
:param method: The HTTP method to use when making this request
:param uri: The path past /ws to request. That is, the path requested for
a relpath of `a/b/c` would be `/ws/a/b/c`.
:raises WVAHttpError: if a response is received but the success is non-success
:raises WVAHttpSocketError: if there was an error making the HTTP request. That is,
the request was unable to make it to the WVA for some reason.
:return: If the response content type is JSON, it will be deserialized and a
python dictionary containing the information from the json document will
be returned. If not a JSON response, a unicode string of the response
text will be returned.
"""
response = self.raw_request(method, uri, **kwargs)
if response.status_code != 200:
exception_class = HTTP_STATUS_EXCEPTION_MAP.get(response.status_code, WVAHttpError)
raise exception_class(response)
if response.headers.get("content-type") == "application/json":
return json.loads(response.text)
else:
return response.text | [
"def",
"request",
"(",
"self",
",",
"method",
",",
"uri",
",",
"*",
"*",
"kwargs",
")",
":",
"response",
"=",
"self",
".",
"raw_request",
"(",
"method",
",",
"uri",
",",
"*",
"*",
"kwargs",
")",
"if",
"response",
".",
"status_code",
"!=",
"200",
":",
"exception_class",
"=",
"HTTP_STATUS_EXCEPTION_MAP",
".",
"get",
"(",
"response",
".",
"status_code",
",",
"WVAHttpError",
")",
"raise",
"exception_class",
"(",
"response",
")",
"if",
"response",
".",
"headers",
".",
"get",
"(",
"\"content-type\"",
")",
"==",
"\"application/json\"",
":",
"return",
"json",
".",
"loads",
"(",
"response",
".",
"text",
")",
"else",
":",
"return",
"response",
".",
"text"
] | Perform a WVA web services request and return the decoded value if successful
:param method: The HTTP method to use when making this request
:param uri: The path past /ws to request. That is, the path requested for
a relpath of `a/b/c` would be `/ws/a/b/c`.
:raises WVAHttpError: if a response is received but the success is non-success
:raises WVAHttpSocketError: if there was an error making the HTTP request. That is,
the request was unable to make it to the WVA for some reason.
:return: If the response content type is JSON, it will be deserialized and a
python dictionary containing the information from the json document will
be returned. If not a JSON response, a unicode string of the response
text will be returned. | [
"Perform",
"a",
"WVA",
"web",
"services",
"request",
"and",
"return",
"the",
"decoded",
"value",
"if",
"successful"
] | 4252735e2775f80ebaffd813fbe84046d26906b3 | https://github.com/digidotcom/python-wvalib/blob/4252735e2775f80ebaffd813fbe84046d26906b3/wva/http_client.py#L99-L121 | train |
digidotcom/python-wvalib | wva/http_client.py | WVAHttpClient.post | def post(self, uri, data, **kwargs):
"""POST the provided data to the specified path
See :meth:`request` for additional details. The `data` parameter here is
expected to be a string type.
"""
return self.request("POST", uri, data=data, **kwargs) | python | def post(self, uri, data, **kwargs):
"""POST the provided data to the specified path
See :meth:`request` for additional details. The `data` parameter here is
expected to be a string type.
"""
return self.request("POST", uri, data=data, **kwargs) | [
"def",
"post",
"(",
"self",
",",
"uri",
",",
"data",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"self",
".",
"request",
"(",
"\"POST\"",
",",
"uri",
",",
"data",
"=",
"data",
",",
"*",
"*",
"kwargs",
")"
] | POST the provided data to the specified path
See :meth:`request` for additional details. The `data` parameter here is
expected to be a string type. | [
"POST",
"the",
"provided",
"data",
"to",
"the",
"specified",
"path"
] | 4252735e2775f80ebaffd813fbe84046d26906b3 | https://github.com/digidotcom/python-wvalib/blob/4252735e2775f80ebaffd813fbe84046d26906b3/wva/http_client.py#L137-L143 | train |
digidotcom/python-wvalib | wva/http_client.py | WVAHttpClient.post_json | def post_json(self, uri, data, **kwargs):
"""POST the provided data as json to the specified path
See :meth:`request` for additional details.
"""
encoded_data = json.dumps(data)
kwargs.setdefault("headers", {}).update({
"Content-Type": "application/json", # tell server we are sending json
})
return self.post(uri, data=encoded_data, **kwargs) | python | def post_json(self, uri, data, **kwargs):
"""POST the provided data as json to the specified path
See :meth:`request` for additional details.
"""
encoded_data = json.dumps(data)
kwargs.setdefault("headers", {}).update({
"Content-Type": "application/json", # tell server we are sending json
})
return self.post(uri, data=encoded_data, **kwargs) | [
"def",
"post_json",
"(",
"self",
",",
"uri",
",",
"data",
",",
"*",
"*",
"kwargs",
")",
":",
"encoded_data",
"=",
"json",
".",
"dumps",
"(",
"data",
")",
"kwargs",
".",
"setdefault",
"(",
"\"headers\"",
",",
"{",
"}",
")",
".",
"update",
"(",
"{",
"\"Content-Type\"",
":",
"\"application/json\"",
",",
"# tell server we are sending json",
"}",
")",
"return",
"self",
".",
"post",
"(",
"uri",
",",
"data",
"=",
"encoded_data",
",",
"*",
"*",
"kwargs",
")"
] | POST the provided data as json to the specified path
See :meth:`request` for additional details. | [
"POST",
"the",
"provided",
"data",
"as",
"json",
"to",
"the",
"specified",
"path"
] | 4252735e2775f80ebaffd813fbe84046d26906b3 | https://github.com/digidotcom/python-wvalib/blob/4252735e2775f80ebaffd813fbe84046d26906b3/wva/http_client.py#L145-L154 | train |
digidotcom/python-wvalib | wva/http_client.py | WVAHttpClient.put | def put(self, uri, data, **kwargs):
"""PUT the provided data to the specified path
See :meth:`request` for additional details. The `data` parameter here is
expected to be a string type.
"""
return self.request("PUT", uri, data=data, **kwargs) | python | def put(self, uri, data, **kwargs):
"""PUT the provided data to the specified path
See :meth:`request` for additional details. The `data` parameter here is
expected to be a string type.
"""
return self.request("PUT", uri, data=data, **kwargs) | [
"def",
"put",
"(",
"self",
",",
"uri",
",",
"data",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"self",
".",
"request",
"(",
"\"PUT\"",
",",
"uri",
",",
"data",
"=",
"data",
",",
"*",
"*",
"kwargs",
")"
] | PUT the provided data to the specified path
See :meth:`request` for additional details. The `data` parameter here is
expected to be a string type. | [
"PUT",
"the",
"provided",
"data",
"to",
"the",
"specified",
"path"
] | 4252735e2775f80ebaffd813fbe84046d26906b3 | https://github.com/digidotcom/python-wvalib/blob/4252735e2775f80ebaffd813fbe84046d26906b3/wva/http_client.py#L156-L162 | train |
digidotcom/python-wvalib | wva/http_client.py | WVAHttpClient.put_json | def put_json(self, uri, data, **kwargs):
"""PUT the provided data as json to the specified path
See :meth:`request` for additional details.
"""
encoded_data = json.dumps(data)
kwargs.setdefault("headers", {}).update({
"Content-Type": "application/json", # tell server we are sending json
})
return self.put(uri, data=encoded_data, **kwargs) | python | def put_json(self, uri, data, **kwargs):
"""PUT the provided data as json to the specified path
See :meth:`request` for additional details.
"""
encoded_data = json.dumps(data)
kwargs.setdefault("headers", {}).update({
"Content-Type": "application/json", # tell server we are sending json
})
return self.put(uri, data=encoded_data, **kwargs) | [
"def",
"put_json",
"(",
"self",
",",
"uri",
",",
"data",
",",
"*",
"*",
"kwargs",
")",
":",
"encoded_data",
"=",
"json",
".",
"dumps",
"(",
"data",
")",
"kwargs",
".",
"setdefault",
"(",
"\"headers\"",
",",
"{",
"}",
")",
".",
"update",
"(",
"{",
"\"Content-Type\"",
":",
"\"application/json\"",
",",
"# tell server we are sending json",
"}",
")",
"return",
"self",
".",
"put",
"(",
"uri",
",",
"data",
"=",
"encoded_data",
",",
"*",
"*",
"kwargs",
")"
] | PUT the provided data as json to the specified path
See :meth:`request` for additional details. | [
"PUT",
"the",
"provided",
"data",
"as",
"json",
"to",
"the",
"specified",
"path"
] | 4252735e2775f80ebaffd813fbe84046d26906b3 | https://github.com/digidotcom/python-wvalib/blob/4252735e2775f80ebaffd813fbe84046d26906b3/wva/http_client.py#L164-L173 | train |
frawau/aiolifx | aiolifx/aiolifx.py | mac_to_ipv6_linklocal | def mac_to_ipv6_linklocal(mac,prefix="fe80::"):
""" Translate a MAC address into an IPv6 address in the prefixed network.
This function calculates the EUI (Extended Unique Identifier) from the given
MAC address and prepend the needed prefix to come up with a valid IPv6 address.
The default prefix is the link local prefix defined by RFC 4291 .
:param mac: the mac address of the device
:type mac: str
:param prefix: the IPv6 network prefix
:type prefix: str
:returns: IPv6 address
:rtype: str
"""
# Remove the most common delimiters; dots, dashes, etc.
mac_value = int(mac.translate(str.maketrans(dict([(x,None) for x in [" ",".",":","-"]]))),16)
# Split out the bytes that slot into the IPv6 address
# XOR the most significant byte with 0x02, inverting the
# Universal / Local bit
high2 = mac_value >> 32 & 0xffff ^ 0x0200
high1 = mac_value >> 24 & 0xff
low1 = mac_value >> 16 & 0xff
low2 = mac_value & 0xffff
return prefix+':{:04x}:{:02x}ff:fe{:02x}:{:04x}'.format(
high2, high1, low1, low2) | python | def mac_to_ipv6_linklocal(mac,prefix="fe80::"):
""" Translate a MAC address into an IPv6 address in the prefixed network.
This function calculates the EUI (Extended Unique Identifier) from the given
MAC address and prepend the needed prefix to come up with a valid IPv6 address.
The default prefix is the link local prefix defined by RFC 4291 .
:param mac: the mac address of the device
:type mac: str
:param prefix: the IPv6 network prefix
:type prefix: str
:returns: IPv6 address
:rtype: str
"""
# Remove the most common delimiters; dots, dashes, etc.
mac_value = int(mac.translate(str.maketrans(dict([(x,None) for x in [" ",".",":","-"]]))),16)
# Split out the bytes that slot into the IPv6 address
# XOR the most significant byte with 0x02, inverting the
# Universal / Local bit
high2 = mac_value >> 32 & 0xffff ^ 0x0200
high1 = mac_value >> 24 & 0xff
low1 = mac_value >> 16 & 0xff
low2 = mac_value & 0xffff
return prefix+':{:04x}:{:02x}ff:fe{:02x}:{:04x}'.format(
high2, high1, low1, low2) | [
"def",
"mac_to_ipv6_linklocal",
"(",
"mac",
",",
"prefix",
"=",
"\"fe80::\"",
")",
":",
"# Remove the most common delimiters; dots, dashes, etc.",
"mac_value",
"=",
"int",
"(",
"mac",
".",
"translate",
"(",
"str",
".",
"maketrans",
"(",
"dict",
"(",
"[",
"(",
"x",
",",
"None",
")",
"for",
"x",
"in",
"[",
"\" \"",
",",
"\".\"",
",",
"\":\"",
",",
"\"-\"",
"]",
"]",
")",
")",
")",
",",
"16",
")",
"# Split out the bytes that slot into the IPv6 address",
"# XOR the most significant byte with 0x02, inverting the",
"# Universal / Local bit",
"high2",
"=",
"mac_value",
">>",
"32",
"&",
"0xffff",
"^",
"0x0200",
"high1",
"=",
"mac_value",
">>",
"24",
"&",
"0xff",
"low1",
"=",
"mac_value",
">>",
"16",
"&",
"0xff",
"low2",
"=",
"mac_value",
"&",
"0xffff",
"return",
"prefix",
"+",
"':{:04x}:{:02x}ff:fe{:02x}:{:04x}'",
".",
"format",
"(",
"high2",
",",
"high1",
",",
"low1",
",",
"low2",
")"
] | Translate a MAC address into an IPv6 address in the prefixed network.
This function calculates the EUI (Extended Unique Identifier) from the given
MAC address and prepend the needed prefix to come up with a valid IPv6 address.
The default prefix is the link local prefix defined by RFC 4291 .
:param mac: the mac address of the device
:type mac: str
:param prefix: the IPv6 network prefix
:type prefix: str
:returns: IPv6 address
:rtype: str | [
"Translate",
"a",
"MAC",
"address",
"into",
"an",
"IPv6",
"address",
"in",
"the",
"prefixed",
"network",
"."
] | 9bd8c5e6d291f4c79314989402f7e2c6476d5851 | https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L41-L67 | train |
frawau/aiolifx | aiolifx/aiolifx.py | Device.datagram_received | def datagram_received(self, data, addr):
"""Method run when data is received from the device
This method will unpack the data according to the LIFX protocol.
If the message represents some state information, it will update
the device state. Following that it will execute the callback corresponding
to the message sequence number. If there is no sequence number, the
default callback will be called.
:param data: raw data
:type data: bytestring
:param addr: sender IP address 2-tuple for IPv4, 4-tuple for IPv6
:type addr: tuple
"""
self.register()
response = unpack_lifx_message(data)
self.lastmsg=datetime.datetime.now()
if response.seq_num in self.message:
response_type,myevent,callb = self.message[response.seq_num]
if type(response) == response_type:
if response.source_id == self.source_id:
if "State" in response.__class__.__name__:
setmethod="resp_set_"+response.__class__.__name__.replace("State","").lower()
if setmethod in dir(self) and callable(getattr(self,setmethod)):
getattr(self,setmethod)(response)
if callb:
callb(self,response)
myevent.set()
del(self.message[response.seq_num])
elif type(response) == Acknowledgement:
pass
else:
del(self.message[response.seq_num])
elif self.default_callb:
self.default_callb(response) | python | def datagram_received(self, data, addr):
"""Method run when data is received from the device
This method will unpack the data according to the LIFX protocol.
If the message represents some state information, it will update
the device state. Following that it will execute the callback corresponding
to the message sequence number. If there is no sequence number, the
default callback will be called.
:param data: raw data
:type data: bytestring
:param addr: sender IP address 2-tuple for IPv4, 4-tuple for IPv6
:type addr: tuple
"""
self.register()
response = unpack_lifx_message(data)
self.lastmsg=datetime.datetime.now()
if response.seq_num in self.message:
response_type,myevent,callb = self.message[response.seq_num]
if type(response) == response_type:
if response.source_id == self.source_id:
if "State" in response.__class__.__name__:
setmethod="resp_set_"+response.__class__.__name__.replace("State","").lower()
if setmethod in dir(self) and callable(getattr(self,setmethod)):
getattr(self,setmethod)(response)
if callb:
callb(self,response)
myevent.set()
del(self.message[response.seq_num])
elif type(response) == Acknowledgement:
pass
else:
del(self.message[response.seq_num])
elif self.default_callb:
self.default_callb(response) | [
"def",
"datagram_received",
"(",
"self",
",",
"data",
",",
"addr",
")",
":",
"self",
".",
"register",
"(",
")",
"response",
"=",
"unpack_lifx_message",
"(",
"data",
")",
"self",
".",
"lastmsg",
"=",
"datetime",
".",
"datetime",
".",
"now",
"(",
")",
"if",
"response",
".",
"seq_num",
"in",
"self",
".",
"message",
":",
"response_type",
",",
"myevent",
",",
"callb",
"=",
"self",
".",
"message",
"[",
"response",
".",
"seq_num",
"]",
"if",
"type",
"(",
"response",
")",
"==",
"response_type",
":",
"if",
"response",
".",
"source_id",
"==",
"self",
".",
"source_id",
":",
"if",
"\"State\"",
"in",
"response",
".",
"__class__",
".",
"__name__",
":",
"setmethod",
"=",
"\"resp_set_\"",
"+",
"response",
".",
"__class__",
".",
"__name__",
".",
"replace",
"(",
"\"State\"",
",",
"\"\"",
")",
".",
"lower",
"(",
")",
"if",
"setmethod",
"in",
"dir",
"(",
"self",
")",
"and",
"callable",
"(",
"getattr",
"(",
"self",
",",
"setmethod",
")",
")",
":",
"getattr",
"(",
"self",
",",
"setmethod",
")",
"(",
"response",
")",
"if",
"callb",
":",
"callb",
"(",
"self",
",",
"response",
")",
"myevent",
".",
"set",
"(",
")",
"del",
"(",
"self",
".",
"message",
"[",
"response",
".",
"seq_num",
"]",
")",
"elif",
"type",
"(",
"response",
")",
"==",
"Acknowledgement",
":",
"pass",
"else",
":",
"del",
"(",
"self",
".",
"message",
"[",
"response",
".",
"seq_num",
"]",
")",
"elif",
"self",
".",
"default_callb",
":",
"self",
".",
"default_callb",
"(",
"response",
")"
] | Method run when data is received from the device
This method will unpack the data according to the LIFX protocol.
If the message represents some state information, it will update
the device state. Following that it will execute the callback corresponding
to the message sequence number. If there is no sequence number, the
default callback will be called.
:param data: raw data
:type data: bytestring
:param addr: sender IP address 2-tuple for IPv4, 4-tuple for IPv6
:type addr: tuple | [
"Method",
"run",
"when",
"data",
"is",
"received",
"from",
"the",
"device"
] | 9bd8c5e6d291f4c79314989402f7e2c6476d5851 | https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L146-L180 | train |
frawau/aiolifx | aiolifx/aiolifx.py | Device.register | def register(self):
"""Proxy method to register the device with the parent.
"""
if not self.registered:
self.registered = True
if self.parent:
self.parent.register(self) | python | def register(self):
"""Proxy method to register the device with the parent.
"""
if not self.registered:
self.registered = True
if self.parent:
self.parent.register(self) | [
"def",
"register",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"registered",
":",
"self",
".",
"registered",
"=",
"True",
"if",
"self",
".",
"parent",
":",
"self",
".",
"parent",
".",
"register",
"(",
"self",
")"
] | Proxy method to register the device with the parent. | [
"Proxy",
"method",
"to",
"register",
"the",
"device",
"with",
"the",
"parent",
"."
] | 9bd8c5e6d291f4c79314989402f7e2c6476d5851 | https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L182-L188 | train |
frawau/aiolifx | aiolifx/aiolifx.py | Device.unregister | def unregister(self):
"""Proxy method to unregister the device with the parent.
"""
if self.registered:
#Only if we have not received any message recently.
if datetime.datetime.now()-datetime.timedelta(seconds=self.unregister_timeout) > self.lastmsg:
self.registered = False
if self.parent:
self.parent.unregister(self) | python | def unregister(self):
"""Proxy method to unregister the device with the parent.
"""
if self.registered:
#Only if we have not received any message recently.
if datetime.datetime.now()-datetime.timedelta(seconds=self.unregister_timeout) > self.lastmsg:
self.registered = False
if self.parent:
self.parent.unregister(self) | [
"def",
"unregister",
"(",
"self",
")",
":",
"if",
"self",
".",
"registered",
":",
"#Only if we have not received any message recently.",
"if",
"datetime",
".",
"datetime",
".",
"now",
"(",
")",
"-",
"datetime",
".",
"timedelta",
"(",
"seconds",
"=",
"self",
".",
"unregister_timeout",
")",
">",
"self",
".",
"lastmsg",
":",
"self",
".",
"registered",
"=",
"False",
"if",
"self",
".",
"parent",
":",
"self",
".",
"parent",
".",
"unregister",
"(",
"self",
")"
] | Proxy method to unregister the device with the parent. | [
"Proxy",
"method",
"to",
"unregister",
"the",
"device",
"with",
"the",
"parent",
"."
] | 9bd8c5e6d291f4c79314989402f7e2c6476d5851 | https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L190-L198 | train |
frawau/aiolifx | aiolifx/aiolifx.py | Device.fire_sending | async def fire_sending(self,msg,num_repeats):
"""Coroutine used to send message to the device when no response is needed.
:param msg: Message to send
:type msg: aiolifx.
:param num_repeats: number of times the message is to be sent.
:returns: The coroutine that can be scheduled to run
:rtype: coroutine
"""
if num_repeats is None:
num_repeats = self.retry_count
sent_msg_count = 0
sleep_interval = 0.05
while(sent_msg_count < num_repeats):
if self.transport:
self.transport.sendto(msg.packed_message)
sent_msg_count += 1
await aio.sleep(sleep_interval) | python | async def fire_sending(self,msg,num_repeats):
"""Coroutine used to send message to the device when no response is needed.
:param msg: Message to send
:type msg: aiolifx.
:param num_repeats: number of times the message is to be sent.
:returns: The coroutine that can be scheduled to run
:rtype: coroutine
"""
if num_repeats is None:
num_repeats = self.retry_count
sent_msg_count = 0
sleep_interval = 0.05
while(sent_msg_count < num_repeats):
if self.transport:
self.transport.sendto(msg.packed_message)
sent_msg_count += 1
await aio.sleep(sleep_interval) | [
"async",
"def",
"fire_sending",
"(",
"self",
",",
"msg",
",",
"num_repeats",
")",
":",
"if",
"num_repeats",
"is",
"None",
":",
"num_repeats",
"=",
"self",
".",
"retry_count",
"sent_msg_count",
"=",
"0",
"sleep_interval",
"=",
"0.05",
"while",
"(",
"sent_msg_count",
"<",
"num_repeats",
")",
":",
"if",
"self",
".",
"transport",
":",
"self",
".",
"transport",
".",
"sendto",
"(",
"msg",
".",
"packed_message",
")",
"sent_msg_count",
"+=",
"1",
"await",
"aio",
".",
"sleep",
"(",
"sleep_interval",
")"
] | Coroutine used to send message to the device when no response is needed.
:param msg: Message to send
:type msg: aiolifx.
:param num_repeats: number of times the message is to be sent.
:returns: The coroutine that can be scheduled to run
:rtype: coroutine | [
"Coroutine",
"used",
"to",
"send",
"message",
"to",
"the",
"device",
"when",
"no",
"response",
"is",
"needed",
"."
] | 9bd8c5e6d291f4c79314989402f7e2c6476d5851 | https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L214-L231 | train |
frawau/aiolifx | aiolifx/aiolifx.py | Device.try_sending | async def try_sending(self,msg,timeout_secs, max_attempts):
"""Coroutine used to send message to the device when a response or ack is needed.
This coroutine will try to send up to max_attempts time the message, waiting timeout_secs
for an answer. If no answer is received, it will consider that the device is no longer
accessible and will unregister it.
:param msg: The message to send
:type msg: aiolifx.Message
:param timeout_secs: Number of seconds to wait for a response or ack
:type timeout_secs: int
:param max_attempts: .
:type max_attempts: int
:returns: a coroutine to be scheduled
:rtype: coroutine
"""
if timeout_secs is None:
timeout_secs = self.timeout
if max_attempts is None:
max_attempts = self.retry_count
attempts = 0
while attempts < max_attempts:
if msg.seq_num not in self.message: return
event = aio.Event()
self.message[msg.seq_num][1]= event
attempts += 1
if self.transport:
self.transport.sendto(msg.packed_message)
try:
myresult = await aio.wait_for(event.wait(),timeout_secs)
break
except Exception as inst:
if attempts >= max_attempts:
if msg.seq_num in self.message:
callb = self.message[msg.seq_num][2]
if callb:
callb(self, None)
del(self.message[msg.seq_num])
#It's dead Jim
self.unregister() | python | async def try_sending(self,msg,timeout_secs, max_attempts):
"""Coroutine used to send message to the device when a response or ack is needed.
This coroutine will try to send up to max_attempts time the message, waiting timeout_secs
for an answer. If no answer is received, it will consider that the device is no longer
accessible and will unregister it.
:param msg: The message to send
:type msg: aiolifx.Message
:param timeout_secs: Number of seconds to wait for a response or ack
:type timeout_secs: int
:param max_attempts: .
:type max_attempts: int
:returns: a coroutine to be scheduled
:rtype: coroutine
"""
if timeout_secs is None:
timeout_secs = self.timeout
if max_attempts is None:
max_attempts = self.retry_count
attempts = 0
while attempts < max_attempts:
if msg.seq_num not in self.message: return
event = aio.Event()
self.message[msg.seq_num][1]= event
attempts += 1
if self.transport:
self.transport.sendto(msg.packed_message)
try:
myresult = await aio.wait_for(event.wait(),timeout_secs)
break
except Exception as inst:
if attempts >= max_attempts:
if msg.seq_num in self.message:
callb = self.message[msg.seq_num][2]
if callb:
callb(self, None)
del(self.message[msg.seq_num])
#It's dead Jim
self.unregister() | [
"async",
"def",
"try_sending",
"(",
"self",
",",
"msg",
",",
"timeout_secs",
",",
"max_attempts",
")",
":",
"if",
"timeout_secs",
"is",
"None",
":",
"timeout_secs",
"=",
"self",
".",
"timeout",
"if",
"max_attempts",
"is",
"None",
":",
"max_attempts",
"=",
"self",
".",
"retry_count",
"attempts",
"=",
"0",
"while",
"attempts",
"<",
"max_attempts",
":",
"if",
"msg",
".",
"seq_num",
"not",
"in",
"self",
".",
"message",
":",
"return",
"event",
"=",
"aio",
".",
"Event",
"(",
")",
"self",
".",
"message",
"[",
"msg",
".",
"seq_num",
"]",
"[",
"1",
"]",
"=",
"event",
"attempts",
"+=",
"1",
"if",
"self",
".",
"transport",
":",
"self",
".",
"transport",
".",
"sendto",
"(",
"msg",
".",
"packed_message",
")",
"try",
":",
"myresult",
"=",
"await",
"aio",
".",
"wait_for",
"(",
"event",
".",
"wait",
"(",
")",
",",
"timeout_secs",
")",
"break",
"except",
"Exception",
"as",
"inst",
":",
"if",
"attempts",
">=",
"max_attempts",
":",
"if",
"msg",
".",
"seq_num",
"in",
"self",
".",
"message",
":",
"callb",
"=",
"self",
".",
"message",
"[",
"msg",
".",
"seq_num",
"]",
"[",
"2",
"]",
"if",
"callb",
":",
"callb",
"(",
"self",
",",
"None",
")",
"del",
"(",
"self",
".",
"message",
"[",
"msg",
".",
"seq_num",
"]",
")",
"#It's dead Jim",
"self",
".",
"unregister",
"(",
")"
] | Coroutine used to send message to the device when a response or ack is needed.
This coroutine will try to send up to max_attempts time the message, waiting timeout_secs
for an answer. If no answer is received, it will consider that the device is no longer
accessible and will unregister it.
:param msg: The message to send
:type msg: aiolifx.Message
:param timeout_secs: Number of seconds to wait for a response or ack
:type timeout_secs: int
:param max_attempts: .
:type max_attempts: int
:returns: a coroutine to be scheduled
:rtype: coroutine | [
"Coroutine",
"used",
"to",
"send",
"message",
"to",
"the",
"device",
"when",
"a",
"response",
"or",
"ack",
"is",
"needed",
"."
] | 9bd8c5e6d291f4c79314989402f7e2c6476d5851 | https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L253-L293 | train |
frawau/aiolifx | aiolifx/aiolifx.py | Device.req_with_ack | def req_with_ack(self, msg_type, payload, callb = None, timeout_secs=None, max_attempts=None):
"""Method to send a message expecting to receive an ACK.
:param msg_type: The type of the message to send, a subclass of aiolifx.Message
:type msg_type: class
:param payload: value to use when instantiating msg_type
:type payload: dict
:param callb: A callback that will be executed when the ACK is received in datagram_received
:type callb: callable
:param timeout_secs: Number of seconds to wait for an ack
:type timeout_secs: int
:param max_attempts: .
:type max_attempts: int
:returns: True
:rtype: bool
"""
msg = msg_type(self.mac_addr, self.source_id, seq_num=self.seq_next(), payload=payload, ack_requested=True, response_requested=False)
self.message[msg.seq_num]=[Acknowledgement,None,callb]
xx=self.loop.create_task(self.try_sending(msg,timeout_secs, max_attempts))
return True | python | def req_with_ack(self, msg_type, payload, callb = None, timeout_secs=None, max_attempts=None):
"""Method to send a message expecting to receive an ACK.
:param msg_type: The type of the message to send, a subclass of aiolifx.Message
:type msg_type: class
:param payload: value to use when instantiating msg_type
:type payload: dict
:param callb: A callback that will be executed when the ACK is received in datagram_received
:type callb: callable
:param timeout_secs: Number of seconds to wait for an ack
:type timeout_secs: int
:param max_attempts: .
:type max_attempts: int
:returns: True
:rtype: bool
"""
msg = msg_type(self.mac_addr, self.source_id, seq_num=self.seq_next(), payload=payload, ack_requested=True, response_requested=False)
self.message[msg.seq_num]=[Acknowledgement,None,callb]
xx=self.loop.create_task(self.try_sending(msg,timeout_secs, max_attempts))
return True | [
"def",
"req_with_ack",
"(",
"self",
",",
"msg_type",
",",
"payload",
",",
"callb",
"=",
"None",
",",
"timeout_secs",
"=",
"None",
",",
"max_attempts",
"=",
"None",
")",
":",
"msg",
"=",
"msg_type",
"(",
"self",
".",
"mac_addr",
",",
"self",
".",
"source_id",
",",
"seq_num",
"=",
"self",
".",
"seq_next",
"(",
")",
",",
"payload",
"=",
"payload",
",",
"ack_requested",
"=",
"True",
",",
"response_requested",
"=",
"False",
")",
"self",
".",
"message",
"[",
"msg",
".",
"seq_num",
"]",
"=",
"[",
"Acknowledgement",
",",
"None",
",",
"callb",
"]",
"xx",
"=",
"self",
".",
"loop",
".",
"create_task",
"(",
"self",
".",
"try_sending",
"(",
"msg",
",",
"timeout_secs",
",",
"max_attempts",
")",
")",
"return",
"True"
] | Method to send a message expecting to receive an ACK.
:param msg_type: The type of the message to send, a subclass of aiolifx.Message
:type msg_type: class
:param payload: value to use when instantiating msg_type
:type payload: dict
:param callb: A callback that will be executed when the ACK is received in datagram_received
:type callb: callable
:param timeout_secs: Number of seconds to wait for an ack
:type timeout_secs: int
:param max_attempts: .
:type max_attempts: int
:returns: True
:rtype: bool | [
"Method",
"to",
"send",
"a",
"message",
"expecting",
"to",
"receive",
"an",
"ACK",
"."
] | 9bd8c5e6d291f4c79314989402f7e2c6476d5851 | https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L296-L315 | train |
frawau/aiolifx | aiolifx/aiolifx.py | Device.get_label | def get_label(self,callb=None):
"""Convenience method to request the label from the device
This method will check whether the value has already been retrieved from the device,
if so, it will simply return it. If no, it will request the information from the device
and request that callb be executed when a response is received. The default callback
will simply cache the value.
:param callb: Callable to be used when the response is received. If not set,
self.resp_set_label will be used.
:type callb: callable
:returns: The cached value
:rtype: str
"""
if self.label is None:
mypartial=partial(self.resp_set_label)
if callb:
mycallb=lambda x,y:(mypartial(y),callb(x,y))
else:
mycallb=lambda x,y:mypartial(y)
response = self.req_with_resp(GetLabel, StateLabel, callb=mycallb )
return self.label | python | def get_label(self,callb=None):
"""Convenience method to request the label from the device
This method will check whether the value has already been retrieved from the device,
if so, it will simply return it. If no, it will request the information from the device
and request that callb be executed when a response is received. The default callback
will simply cache the value.
:param callb: Callable to be used when the response is received. If not set,
self.resp_set_label will be used.
:type callb: callable
:returns: The cached value
:rtype: str
"""
if self.label is None:
mypartial=partial(self.resp_set_label)
if callb:
mycallb=lambda x,y:(mypartial(y),callb(x,y))
else:
mycallb=lambda x,y:mypartial(y)
response = self.req_with_resp(GetLabel, StateLabel, callb=mycallb )
return self.label | [
"def",
"get_label",
"(",
"self",
",",
"callb",
"=",
"None",
")",
":",
"if",
"self",
".",
"label",
"is",
"None",
":",
"mypartial",
"=",
"partial",
"(",
"self",
".",
"resp_set_label",
")",
"if",
"callb",
":",
"mycallb",
"=",
"lambda",
"x",
",",
"y",
":",
"(",
"mypartial",
"(",
"y",
")",
",",
"callb",
"(",
"x",
",",
"y",
")",
")",
"else",
":",
"mycallb",
"=",
"lambda",
"x",
",",
"y",
":",
"mypartial",
"(",
"y",
")",
"response",
"=",
"self",
".",
"req_with_resp",
"(",
"GetLabel",
",",
"StateLabel",
",",
"callb",
"=",
"mycallb",
")",
"return",
"self",
".",
"label"
] | Convenience method to request the label from the device
This method will check whether the value has already been retrieved from the device,
if so, it will simply return it. If no, it will request the information from the device
and request that callb be executed when a response is received. The default callback
will simply cache the value.
:param callb: Callable to be used when the response is received. If not set,
self.resp_set_label will be used.
:type callb: callable
:returns: The cached value
:rtype: str | [
"Convenience",
"method",
"to",
"request",
"the",
"label",
"from",
"the",
"device"
] | 9bd8c5e6d291f4c79314989402f7e2c6476d5851 | https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L367-L388 | train |
frawau/aiolifx | aiolifx/aiolifx.py | Device.set_label | def set_label(self, value,callb=None):
"""Convenience method to set the label of the device
This method will send a SetLabel message to the device, and request callb be executed
when an ACK is received. The default callback will simply cache the value.
:param value: The new label
:type value: str
:param callb: Callable to be used when the response is received. If not set,
self.resp_set_label will be used.
:type callb: callable
:returns: None
:rtype: None
"""
if len(value) > 32:
value = value[:32]
mypartial=partial(self.resp_set_label,label=value)
if callb:
self.req_with_ack(SetLabel, {"label": value},lambda x,y:(mypartial(y),callb(x,y)) )
else:
self.req_with_ack(SetLabel, {"label": value},lambda x,y:mypartial(y) ) | python | def set_label(self, value,callb=None):
"""Convenience method to set the label of the device
This method will send a SetLabel message to the device, and request callb be executed
when an ACK is received. The default callback will simply cache the value.
:param value: The new label
:type value: str
:param callb: Callable to be used when the response is received. If not set,
self.resp_set_label will be used.
:type callb: callable
:returns: None
:rtype: None
"""
if len(value) > 32:
value = value[:32]
mypartial=partial(self.resp_set_label,label=value)
if callb:
self.req_with_ack(SetLabel, {"label": value},lambda x,y:(mypartial(y),callb(x,y)) )
else:
self.req_with_ack(SetLabel, {"label": value},lambda x,y:mypartial(y) ) | [
"def",
"set_label",
"(",
"self",
",",
"value",
",",
"callb",
"=",
"None",
")",
":",
"if",
"len",
"(",
"value",
")",
">",
"32",
":",
"value",
"=",
"value",
"[",
":",
"32",
"]",
"mypartial",
"=",
"partial",
"(",
"self",
".",
"resp_set_label",
",",
"label",
"=",
"value",
")",
"if",
"callb",
":",
"self",
".",
"req_with_ack",
"(",
"SetLabel",
",",
"{",
"\"label\"",
":",
"value",
"}",
",",
"lambda",
"x",
",",
"y",
":",
"(",
"mypartial",
"(",
"y",
")",
",",
"callb",
"(",
"x",
",",
"y",
")",
")",
")",
"else",
":",
"self",
".",
"req_with_ack",
"(",
"SetLabel",
",",
"{",
"\"label\"",
":",
"value",
"}",
",",
"lambda",
"x",
",",
"y",
":",
"mypartial",
"(",
"y",
")",
")"
] | Convenience method to set the label of the device
This method will send a SetLabel message to the device, and request callb be executed
when an ACK is received. The default callback will simply cache the value.
:param value: The new label
:type value: str
:param callb: Callable to be used when the response is received. If not set,
self.resp_set_label will be used.
:type callb: callable
:returns: None
:rtype: None | [
"Convenience",
"method",
"to",
"set",
"the",
"label",
"of",
"the",
"device"
] | 9bd8c5e6d291f4c79314989402f7e2c6476d5851 | https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L390-L410 | train |
frawau/aiolifx | aiolifx/aiolifx.py | Device.get_location | def get_location(self,callb=None):
"""Convenience method to request the location from the device
This method will check whether the value has already been retrieved from the device,
if so, it will simply return it. If no, it will request the information from the device
and request that callb be executed when a response is received. The default callback
will simply cache the value.
:param callb: Callable to be used when the response is received. If not set,
self.resp_set_label will be used.
:type callb: callable
:returns: The cached value
:rtype: str
"""
if self.location is None:
mypartial=partial(self.resp_set_location)
if callb:
mycallb=lambda x,y:(mypartial(y),callb(x,y))
else:
mycallb=lambda x,y:mypartial(y)
response = self.req_with_resp(GetLocation, StateLocation,callb=mycallb )
return self.location | python | def get_location(self,callb=None):
"""Convenience method to request the location from the device
This method will check whether the value has already been retrieved from the device,
if so, it will simply return it. If no, it will request the information from the device
and request that callb be executed when a response is received. The default callback
will simply cache the value.
:param callb: Callable to be used when the response is received. If not set,
self.resp_set_label will be used.
:type callb: callable
:returns: The cached value
:rtype: str
"""
if self.location is None:
mypartial=partial(self.resp_set_location)
if callb:
mycallb=lambda x,y:(mypartial(y),callb(x,y))
else:
mycallb=lambda x,y:mypartial(y)
response = self.req_with_resp(GetLocation, StateLocation,callb=mycallb )
return self.location | [
"def",
"get_location",
"(",
"self",
",",
"callb",
"=",
"None",
")",
":",
"if",
"self",
".",
"location",
"is",
"None",
":",
"mypartial",
"=",
"partial",
"(",
"self",
".",
"resp_set_location",
")",
"if",
"callb",
":",
"mycallb",
"=",
"lambda",
"x",
",",
"y",
":",
"(",
"mypartial",
"(",
"y",
")",
",",
"callb",
"(",
"x",
",",
"y",
")",
")",
"else",
":",
"mycallb",
"=",
"lambda",
"x",
",",
"y",
":",
"mypartial",
"(",
"y",
")",
"response",
"=",
"self",
".",
"req_with_resp",
"(",
"GetLocation",
",",
"StateLocation",
",",
"callb",
"=",
"mycallb",
")",
"return",
"self",
".",
"location"
] | Convenience method to request the location from the device
This method will check whether the value has already been retrieved from the device,
if so, it will simply return it. If no, it will request the information from the device
and request that callb be executed when a response is received. The default callback
will simply cache the value.
:param callb: Callable to be used when the response is received. If not set,
self.resp_set_label will be used.
:type callb: callable
:returns: The cached value
:rtype: str | [
"Convenience",
"method",
"to",
"request",
"the",
"location",
"from",
"the",
"device"
] | 9bd8c5e6d291f4c79314989402f7e2c6476d5851 | https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L420-L441 | train |
frawau/aiolifx | aiolifx/aiolifx.py | Device.get_group | def get_group(self,callb=None):
"""Convenience method to request the group from the device
This method will check whether the value has already been retrieved from the device,
if so, it will simply return it. If no, it will request the information from the device
and request that callb be executed when a response is received. The default callback
will simply cache the value.
:param callb: Callable to be used when the response is received. If not set,
self.resp_set_label will be used.
:type callb: callable
:returns: The cached value
:rtype: str
"""
if self.group is None:
mypartial=partial(self.resp_set_group)
if callb:
mycallb=lambda x,y:(mypartial(y),callb(x,y))
else:
mycallb=lambda x,y:mypartial(y)
response = self.req_with_resp(GetGroup, StateGroup, callb=callb )
return self.group | python | def get_group(self,callb=None):
"""Convenience method to request the group from the device
This method will check whether the value has already been retrieved from the device,
if so, it will simply return it. If no, it will request the information from the device
and request that callb be executed when a response is received. The default callback
will simply cache the value.
:param callb: Callable to be used when the response is received. If not set,
self.resp_set_label will be used.
:type callb: callable
:returns: The cached value
:rtype: str
"""
if self.group is None:
mypartial=partial(self.resp_set_group)
if callb:
mycallb=lambda x,y:(mypartial(y),callb(x,y))
else:
mycallb=lambda x,y:mypartial(y)
response = self.req_with_resp(GetGroup, StateGroup, callb=callb )
return self.group | [
"def",
"get_group",
"(",
"self",
",",
"callb",
"=",
"None",
")",
":",
"if",
"self",
".",
"group",
"is",
"None",
":",
"mypartial",
"=",
"partial",
"(",
"self",
".",
"resp_set_group",
")",
"if",
"callb",
":",
"mycallb",
"=",
"lambda",
"x",
",",
"y",
":",
"(",
"mypartial",
"(",
"y",
")",
",",
"callb",
"(",
"x",
",",
"y",
")",
")",
"else",
":",
"mycallb",
"=",
"lambda",
"x",
",",
"y",
":",
"mypartial",
"(",
"y",
")",
"response",
"=",
"self",
".",
"req_with_resp",
"(",
"GetGroup",
",",
"StateGroup",
",",
"callb",
"=",
"callb",
")",
"return",
"self",
".",
"group"
] | Convenience method to request the group from the device
This method will check whether the value has already been retrieved from the device,
if so, it will simply return it. If no, it will request the information from the device
and request that callb be executed when a response is received. The default callback
will simply cache the value.
:param callb: Callable to be used when the response is received. If not set,
self.resp_set_label will be used.
:type callb: callable
:returns: The cached value
:rtype: str | [
"Convenience",
"method",
"to",
"request",
"the",
"group",
"from",
"the",
"device"
] | 9bd8c5e6d291f4c79314989402f7e2c6476d5851 | https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L460-L481 | train |
frawau/aiolifx | aiolifx/aiolifx.py | Device.get_wififirmware | def get_wififirmware(self,callb=None):
"""Convenience method to request the wifi firmware info from the device
This method will check whether the value has already been retrieved from the device,
if so, it will simply return it. If no, it will request the information from the device
and request that callb be executed when a response is received. The default callback
will simply cache the value.
:param callb: Callable to be used when the response is received. If not set,
self.resp_set_label will be used.
:type callb: callable
:returns: The cached value (version, timestamp)
:rtype: 2-tuple
"""
if self.wifi_firmware_version is None:
mypartial=partial(self.resp_set_wififirmware)
if callb:
mycallb=lambda x,y:(mypartial(y),callb(x,y))
else:
mycallb=lambda x,y:mypartial(y)
response = self.req_with_resp(GetWifiFirmware, StateWifiFirmware,mycallb )
return (self.wifi_firmware_version,self.wifi_firmware_build_timestamp) | python | def get_wififirmware(self,callb=None):
"""Convenience method to request the wifi firmware info from the device
This method will check whether the value has already been retrieved from the device,
if so, it will simply return it. If no, it will request the information from the device
and request that callb be executed when a response is received. The default callback
will simply cache the value.
:param callb: Callable to be used when the response is received. If not set,
self.resp_set_label will be used.
:type callb: callable
:returns: The cached value (version, timestamp)
:rtype: 2-tuple
"""
if self.wifi_firmware_version is None:
mypartial=partial(self.resp_set_wififirmware)
if callb:
mycallb=lambda x,y:(mypartial(y),callb(x,y))
else:
mycallb=lambda x,y:mypartial(y)
response = self.req_with_resp(GetWifiFirmware, StateWifiFirmware,mycallb )
return (self.wifi_firmware_version,self.wifi_firmware_build_timestamp) | [
"def",
"get_wififirmware",
"(",
"self",
",",
"callb",
"=",
"None",
")",
":",
"if",
"self",
".",
"wifi_firmware_version",
"is",
"None",
":",
"mypartial",
"=",
"partial",
"(",
"self",
".",
"resp_set_wififirmware",
")",
"if",
"callb",
":",
"mycallb",
"=",
"lambda",
"x",
",",
"y",
":",
"(",
"mypartial",
"(",
"y",
")",
",",
"callb",
"(",
"x",
",",
"y",
")",
")",
"else",
":",
"mycallb",
"=",
"lambda",
"x",
",",
"y",
":",
"mypartial",
"(",
"y",
")",
"response",
"=",
"self",
".",
"req_with_resp",
"(",
"GetWifiFirmware",
",",
"StateWifiFirmware",
",",
"mycallb",
")",
"return",
"(",
"self",
".",
"wifi_firmware_version",
",",
"self",
".",
"wifi_firmware_build_timestamp",
")"
] | Convenience method to request the wifi firmware info from the device
This method will check whether the value has already been retrieved from the device,
if so, it will simply return it. If no, it will request the information from the device
and request that callb be executed when a response is received. The default callback
will simply cache the value.
:param callb: Callable to be used when the response is received. If not set,
self.resp_set_label will be used.
:type callb: callable
:returns: The cached value (version, timestamp)
:rtype: 2-tuple | [
"Convenience",
"method",
"to",
"request",
"the",
"wifi",
"firmware",
"info",
"from",
"the",
"device"
] | 9bd8c5e6d291f4c79314989402f7e2c6476d5851 | https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L560-L581 | train |
frawau/aiolifx | aiolifx/aiolifx.py | Device.resp_set_wififirmware | def resp_set_wififirmware(self, resp):
"""Default callback for get_wififirmware
"""
if resp:
self.wifi_firmware_version = float(str(str(resp.version >> 16) + "." + str(resp.version & 0xff)))
self.wifi_firmware_build_timestamp = resp.build | python | def resp_set_wififirmware(self, resp):
"""Default callback for get_wififirmware
"""
if resp:
self.wifi_firmware_version = float(str(str(resp.version >> 16) + "." + str(resp.version & 0xff)))
self.wifi_firmware_build_timestamp = resp.build | [
"def",
"resp_set_wififirmware",
"(",
"self",
",",
"resp",
")",
":",
"if",
"resp",
":",
"self",
".",
"wifi_firmware_version",
"=",
"float",
"(",
"str",
"(",
"str",
"(",
"resp",
".",
"version",
">>",
"16",
")",
"+",
"\".\"",
"+",
"str",
"(",
"resp",
".",
"version",
"&",
"0xff",
")",
")",
")",
"self",
".",
"wifi_firmware_build_timestamp",
"=",
"resp",
".",
"build"
] | Default callback for get_wififirmware | [
"Default",
"callback",
"for",
"get_wififirmware"
] | 9bd8c5e6d291f4c79314989402f7e2c6476d5851 | https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L583-L588 | train |
frawau/aiolifx | aiolifx/aiolifx.py | Device.get_wifiinfo | def get_wifiinfo(self,callb=None):
"""Convenience method to request the wifi info from the device
This will request the information from the device and request that callb be executed
when a response is received. The is no default callback
:param callb: Callable to be used when the response is received. If not set,
self.resp_set_label will be used.
:type callb: callable
:returns: None
:rtype: None
"""
response = self.req_with_resp(GetWifiInfo, StateWifiInfo,callb=callb )
return None | python | def get_wifiinfo(self,callb=None):
"""Convenience method to request the wifi info from the device
This will request the information from the device and request that callb be executed
when a response is received. The is no default callback
:param callb: Callable to be used when the response is received. If not set,
self.resp_set_label will be used.
:type callb: callable
:returns: None
:rtype: None
"""
response = self.req_with_resp(GetWifiInfo, StateWifiInfo,callb=callb )
return None | [
"def",
"get_wifiinfo",
"(",
"self",
",",
"callb",
"=",
"None",
")",
":",
"response",
"=",
"self",
".",
"req_with_resp",
"(",
"GetWifiInfo",
",",
"StateWifiInfo",
",",
"callb",
"=",
"callb",
")",
"return",
"None"
] | Convenience method to request the wifi info from the device
This will request the information from the device and request that callb be executed
when a response is received. The is no default callback
:param callb: Callable to be used when the response is received. If not set,
self.resp_set_label will be used.
:type callb: callable
:returns: None
:rtype: None | [
"Convenience",
"method",
"to",
"request",
"the",
"wifi",
"info",
"from",
"the",
"device"
] | 9bd8c5e6d291f4c79314989402f7e2c6476d5851 | https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L591-L604 | train |
frawau/aiolifx | aiolifx/aiolifx.py | Device.get_hostfirmware | def get_hostfirmware(self,callb=None):
"""Convenience method to request the device firmware info from the device
This method will check whether the value has already been retrieved from the device,
if so, it will simply return it. If no, it will request the information from the device
and request that callb be executed when a response is received. The default callback
will simply cache the value.
:param callb: Callable to be used when the response is received. If not set,
self.resp_set_label will be used.
:type callb: callable
:returns: The cached value
:rtype: str
"""
if self.host_firmware_version is None:
mypartial=partial(self.resp_set_hostfirmware)
if callb:
mycallb=lambda x,y:(mypartial(y),callb(x,y))
else:
mycallb=lambda x,y:mypartial(y)
response = self.req_with_resp(GetHostFirmware, StateHostFirmware,mycallb )
return (self.host_firmware_version,self.host_firmware_build_timestamp) | python | def get_hostfirmware(self,callb=None):
"""Convenience method to request the device firmware info from the device
This method will check whether the value has already been retrieved from the device,
if so, it will simply return it. If no, it will request the information from the device
and request that callb be executed when a response is received. The default callback
will simply cache the value.
:param callb: Callable to be used when the response is received. If not set,
self.resp_set_label will be used.
:type callb: callable
:returns: The cached value
:rtype: str
"""
if self.host_firmware_version is None:
mypartial=partial(self.resp_set_hostfirmware)
if callb:
mycallb=lambda x,y:(mypartial(y),callb(x,y))
else:
mycallb=lambda x,y:mypartial(y)
response = self.req_with_resp(GetHostFirmware, StateHostFirmware,mycallb )
return (self.host_firmware_version,self.host_firmware_build_timestamp) | [
"def",
"get_hostfirmware",
"(",
"self",
",",
"callb",
"=",
"None",
")",
":",
"if",
"self",
".",
"host_firmware_version",
"is",
"None",
":",
"mypartial",
"=",
"partial",
"(",
"self",
".",
"resp_set_hostfirmware",
")",
"if",
"callb",
":",
"mycallb",
"=",
"lambda",
"x",
",",
"y",
":",
"(",
"mypartial",
"(",
"y",
")",
",",
"callb",
"(",
"x",
",",
"y",
")",
")",
"else",
":",
"mycallb",
"=",
"lambda",
"x",
",",
"y",
":",
"mypartial",
"(",
"y",
")",
"response",
"=",
"self",
".",
"req_with_resp",
"(",
"GetHostFirmware",
",",
"StateHostFirmware",
",",
"mycallb",
")",
"return",
"(",
"self",
".",
"host_firmware_version",
",",
"self",
".",
"host_firmware_build_timestamp",
")"
] | Convenience method to request the device firmware info from the device
This method will check whether the value has already been retrieved from the device,
if so, it will simply return it. If no, it will request the information from the device
and request that callb be executed when a response is received. The default callback
will simply cache the value.
:param callb: Callable to be used when the response is received. If not set,
self.resp_set_label will be used.
:type callb: callable
:returns: The cached value
:rtype: str | [
"Convenience",
"method",
"to",
"request",
"the",
"device",
"firmware",
"info",
"from",
"the",
"device"
] | 9bd8c5e6d291f4c79314989402f7e2c6476d5851 | https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L607-L628 | train |
frawau/aiolifx | aiolifx/aiolifx.py | Device.resp_set_hostfirmware | def resp_set_hostfirmware(self, resp):
"""Default callback for get_hostfirmware
"""
if resp:
self.host_firmware_version = float(str(str(resp.version >> 16) + "." + str(resp.version & 0xff)))
self.host_firmware_build_timestamp = resp.build | python | def resp_set_hostfirmware(self, resp):
"""Default callback for get_hostfirmware
"""
if resp:
self.host_firmware_version = float(str(str(resp.version >> 16) + "." + str(resp.version & 0xff)))
self.host_firmware_build_timestamp = resp.build | [
"def",
"resp_set_hostfirmware",
"(",
"self",
",",
"resp",
")",
":",
"if",
"resp",
":",
"self",
".",
"host_firmware_version",
"=",
"float",
"(",
"str",
"(",
"str",
"(",
"resp",
".",
"version",
">>",
"16",
")",
"+",
"\".\"",
"+",
"str",
"(",
"resp",
".",
"version",
"&",
"0xff",
")",
")",
")",
"self",
".",
"host_firmware_build_timestamp",
"=",
"resp",
".",
"build"
] | Default callback for get_hostfirmware | [
"Default",
"callback",
"for",
"get_hostfirmware"
] | 9bd8c5e6d291f4c79314989402f7e2c6476d5851 | https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L630-L635 | train |
frawau/aiolifx | aiolifx/aiolifx.py | Device.get_hostinfo | def get_hostinfo(self,callb=None):
"""Convenience method to request the device info from the device
This will request the information from the device and request that callb be executed
when a response is received. The is no default callback
:param callb: Callable to be used when the response is received. If not set,
self.resp_set_label will be used.
:type callb: callable
:returns: None
:rtype: None
"""
response = self.req_with_resp(GetInfo, StateInfo,callb=callb )
return None | python | def get_hostinfo(self,callb=None):
"""Convenience method to request the device info from the device
This will request the information from the device and request that callb be executed
when a response is received. The is no default callback
:param callb: Callable to be used when the response is received. If not set,
self.resp_set_label will be used.
:type callb: callable
:returns: None
:rtype: None
"""
response = self.req_with_resp(GetInfo, StateInfo,callb=callb )
return None | [
"def",
"get_hostinfo",
"(",
"self",
",",
"callb",
"=",
"None",
")",
":",
"response",
"=",
"self",
".",
"req_with_resp",
"(",
"GetInfo",
",",
"StateInfo",
",",
"callb",
"=",
"callb",
")",
"return",
"None"
] | Convenience method to request the device info from the device
This will request the information from the device and request that callb be executed
when a response is received. The is no default callback
:param callb: Callable to be used when the response is received. If not set,
self.resp_set_label will be used.
:type callb: callable
:returns: None
:rtype: None | [
"Convenience",
"method",
"to",
"request",
"the",
"device",
"info",
"from",
"the",
"device"
] | 9bd8c5e6d291f4c79314989402f7e2c6476d5851 | https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L638-L651 | train |
frawau/aiolifx | aiolifx/aiolifx.py | Device.get_version | def get_version(self,callb=None):
"""Convenience method to request the version from the device
This method will check whether the value has already been retrieved from the device,
if so, it will simply return it. If no, it will request the information from the device
and request that callb be executed when a response is received. The default callback
will simply cache the value.
:param callb: Callable to be used when the response is received. If not set,
self.resp_set_label will be used.
:type callb: callable
:returns: The cached value
:rtype: str
"""
if self.vendor is None:
mypartial=partial(self.resp_set_version)
if callb:
mycallb=lambda x,y:(mypartial(y),callb(x,y))
else:
mycallb=lambda x,y:mypartial(y)
response = self.req_with_resp(GetVersion, StateVersion,callb=mycallb )
return (self.host_firmware_version,self.host_firmware_build_timestamp) | python | def get_version(self,callb=None):
"""Convenience method to request the version from the device
This method will check whether the value has already been retrieved from the device,
if so, it will simply return it. If no, it will request the information from the device
and request that callb be executed when a response is received. The default callback
will simply cache the value.
:param callb: Callable to be used when the response is received. If not set,
self.resp_set_label will be used.
:type callb: callable
:returns: The cached value
:rtype: str
"""
if self.vendor is None:
mypartial=partial(self.resp_set_version)
if callb:
mycallb=lambda x,y:(mypartial(y),callb(x,y))
else:
mycallb=lambda x,y:mypartial(y)
response = self.req_with_resp(GetVersion, StateVersion,callb=mycallb )
return (self.host_firmware_version,self.host_firmware_build_timestamp) | [
"def",
"get_version",
"(",
"self",
",",
"callb",
"=",
"None",
")",
":",
"if",
"self",
".",
"vendor",
"is",
"None",
":",
"mypartial",
"=",
"partial",
"(",
"self",
".",
"resp_set_version",
")",
"if",
"callb",
":",
"mycallb",
"=",
"lambda",
"x",
",",
"y",
":",
"(",
"mypartial",
"(",
"y",
")",
",",
"callb",
"(",
"x",
",",
"y",
")",
")",
"else",
":",
"mycallb",
"=",
"lambda",
"x",
",",
"y",
":",
"mypartial",
"(",
"y",
")",
"response",
"=",
"self",
".",
"req_with_resp",
"(",
"GetVersion",
",",
"StateVersion",
",",
"callb",
"=",
"mycallb",
")",
"return",
"(",
"self",
".",
"host_firmware_version",
",",
"self",
".",
"host_firmware_build_timestamp",
")"
] | Convenience method to request the version from the device
This method will check whether the value has already been retrieved from the device,
if so, it will simply return it. If no, it will request the information from the device
and request that callb be executed when a response is received. The default callback
will simply cache the value.
:param callb: Callable to be used when the response is received. If not set,
self.resp_set_label will be used.
:type callb: callable
:returns: The cached value
:rtype: str | [
"Convenience",
"method",
"to",
"request",
"the",
"version",
"from",
"the",
"device"
] | 9bd8c5e6d291f4c79314989402f7e2c6476d5851 | https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L653-L674 | train |
frawau/aiolifx | aiolifx/aiolifx.py | Device.resp_set_version | def resp_set_version(self, resp):
"""Default callback for get_version
"""
if resp:
self.vendor = resp.vendor
self.product = resp.product
self.version = resp.version | python | def resp_set_version(self, resp):
"""Default callback for get_version
"""
if resp:
self.vendor = resp.vendor
self.product = resp.product
self.version = resp.version | [
"def",
"resp_set_version",
"(",
"self",
",",
"resp",
")",
":",
"if",
"resp",
":",
"self",
".",
"vendor",
"=",
"resp",
".",
"vendor",
"self",
".",
"product",
"=",
"resp",
".",
"product",
"self",
".",
"version",
"=",
"resp",
".",
"version"
] | Default callback for get_version | [
"Default",
"callback",
"for",
"get_version"
] | 9bd8c5e6d291f4c79314989402f7e2c6476d5851 | https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L676-L682 | train |
frawau/aiolifx | aiolifx/aiolifx.py | Light.resp_set_lightpower | def resp_set_lightpower(self, resp, power_level=None):
"""Default callback for set_power
"""
if power_level is not None:
self.power_level=power_level
elif resp:
self.power_level=resp.power_level | python | def resp_set_lightpower(self, resp, power_level=None):
"""Default callback for set_power
"""
if power_level is not None:
self.power_level=power_level
elif resp:
self.power_level=resp.power_level | [
"def",
"resp_set_lightpower",
"(",
"self",
",",
"resp",
",",
"power_level",
"=",
"None",
")",
":",
"if",
"power_level",
"is",
"not",
"None",
":",
"self",
".",
"power_level",
"=",
"power_level",
"elif",
"resp",
":",
"self",
".",
"power_level",
"=",
"resp",
".",
"power_level"
] | Default callback for set_power | [
"Default",
"callback",
"for",
"set_power"
] | 9bd8c5e6d291f4c79314989402f7e2c6476d5851 | https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L834-L840 | train |
frawau/aiolifx | aiolifx/aiolifx.py | Light.get_color | def get_color(self,callb=None):
"""Convenience method to request the colour status from the device
This method will check whether the value has already been retrieved from the device,
if so, it will simply return it. If no, it will request the information from the device
and request that callb be executed when a response is received. The default callback
will simply cache the value.
:param callb: Callable to be used when the response is received. If not set,
self.resp_set_label will be used.
:type callb: callable
:returns: The cached value
:rtype: int
"""
response = self.req_with_resp(LightGet, LightState, callb=callb)
return self.color | python | def get_color(self,callb=None):
"""Convenience method to request the colour status from the device
This method will check whether the value has already been retrieved from the device,
if so, it will simply return it. If no, it will request the information from the device
and request that callb be executed when a response is received. The default callback
will simply cache the value.
:param callb: Callable to be used when the response is received. If not set,
self.resp_set_label will be used.
:type callb: callable
:returns: The cached value
:rtype: int
"""
response = self.req_with_resp(LightGet, LightState, callb=callb)
return self.color | [
"def",
"get_color",
"(",
"self",
",",
"callb",
"=",
"None",
")",
":",
"response",
"=",
"self",
".",
"req_with_resp",
"(",
"LightGet",
",",
"LightState",
",",
"callb",
"=",
"callb",
")",
"return",
"self",
".",
"color"
] | Convenience method to request the colour status from the device
This method will check whether the value has already been retrieved from the device,
if so, it will simply return it. If no, it will request the information from the device
and request that callb be executed when a response is received. The default callback
will simply cache the value.
:param callb: Callable to be used when the response is received. If not set,
self.resp_set_label will be used.
:type callb: callable
:returns: The cached value
:rtype: int | [
"Convenience",
"method",
"to",
"request",
"the",
"colour",
"status",
"from",
"the",
"device"
] | 9bd8c5e6d291f4c79314989402f7e2c6476d5851 | https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L843-L858 | train |
frawau/aiolifx | aiolifx/aiolifx.py | Light.set_color | def set_color(self, value, callb=None, duration=0, rapid=False):
"""Convenience method to set the colour status of the device
This method will send a LightSetColor message to the device, and request callb be executed
when an ACK is received. The default callback will simply cache the value.
:param value: The new state, a dictionary onf int with 4 keys Hue, Saturation, Brightness, Kelvin
:type value: dict
:param duration: The duration, in seconds, of the power state transition.
:type duration: int
:param callb: Callable to be used when the response is received. If not set,
self.resp_set_label will be used.
:type callb: callable
:param rapid: Whether to ask for ack (False) or not (True). Default False
:type rapid: bool
:returns: None
:rtype: None
"""
if len(value) == 4:
mypartial=partial(self.resp_set_light,color=value)
if callb:
mycallb=lambda x,y:(mypartial(y),callb(x,y))
else:
mycallb=lambda x,y:mypartial(y)
#try:
if rapid:
self.fire_and_forget(LightSetColor, {"color": value, "duration": duration}, num_repeats=1)
self.resp_set_light(None,color=value)
if callb:
callb(self,None)
else:
self.req_with_ack(LightSetColor, {"color": value, "duration": duration},callb=mycallb) | python | def set_color(self, value, callb=None, duration=0, rapid=False):
"""Convenience method to set the colour status of the device
This method will send a LightSetColor message to the device, and request callb be executed
when an ACK is received. The default callback will simply cache the value.
:param value: The new state, a dictionary onf int with 4 keys Hue, Saturation, Brightness, Kelvin
:type value: dict
:param duration: The duration, in seconds, of the power state transition.
:type duration: int
:param callb: Callable to be used when the response is received. If not set,
self.resp_set_label will be used.
:type callb: callable
:param rapid: Whether to ask for ack (False) or not (True). Default False
:type rapid: bool
:returns: None
:rtype: None
"""
if len(value) == 4:
mypartial=partial(self.resp_set_light,color=value)
if callb:
mycallb=lambda x,y:(mypartial(y),callb(x,y))
else:
mycallb=lambda x,y:mypartial(y)
#try:
if rapid:
self.fire_and_forget(LightSetColor, {"color": value, "duration": duration}, num_repeats=1)
self.resp_set_light(None,color=value)
if callb:
callb(self,None)
else:
self.req_with_ack(LightSetColor, {"color": value, "duration": duration},callb=mycallb) | [
"def",
"set_color",
"(",
"self",
",",
"value",
",",
"callb",
"=",
"None",
",",
"duration",
"=",
"0",
",",
"rapid",
"=",
"False",
")",
":",
"if",
"len",
"(",
"value",
")",
"==",
"4",
":",
"mypartial",
"=",
"partial",
"(",
"self",
".",
"resp_set_light",
",",
"color",
"=",
"value",
")",
"if",
"callb",
":",
"mycallb",
"=",
"lambda",
"x",
",",
"y",
":",
"(",
"mypartial",
"(",
"y",
")",
",",
"callb",
"(",
"x",
",",
"y",
")",
")",
"else",
":",
"mycallb",
"=",
"lambda",
"x",
",",
"y",
":",
"mypartial",
"(",
"y",
")",
"#try:",
"if",
"rapid",
":",
"self",
".",
"fire_and_forget",
"(",
"LightSetColor",
",",
"{",
"\"color\"",
":",
"value",
",",
"\"duration\"",
":",
"duration",
"}",
",",
"num_repeats",
"=",
"1",
")",
"self",
".",
"resp_set_light",
"(",
"None",
",",
"color",
"=",
"value",
")",
"if",
"callb",
":",
"callb",
"(",
"self",
",",
"None",
")",
"else",
":",
"self",
".",
"req_with_ack",
"(",
"LightSetColor",
",",
"{",
"\"color\"",
":",
"value",
",",
"\"duration\"",
":",
"duration",
"}",
",",
"callb",
"=",
"mycallb",
")"
] | Convenience method to set the colour status of the device
This method will send a LightSetColor message to the device, and request callb be executed
when an ACK is received. The default callback will simply cache the value.
:param value: The new state, a dictionary onf int with 4 keys Hue, Saturation, Brightness, Kelvin
:type value: dict
:param duration: The duration, in seconds, of the power state transition.
:type duration: int
:param callb: Callable to be used when the response is received. If not set,
self.resp_set_label will be used.
:type callb: callable
:param rapid: Whether to ask for ack (False) or not (True). Default False
:type rapid: bool
:returns: None
:rtype: None | [
"Convenience",
"method",
"to",
"set",
"the",
"colour",
"status",
"of",
"the",
"device"
] | 9bd8c5e6d291f4c79314989402f7e2c6476d5851 | https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L861-L892 | train |
frawau/aiolifx | aiolifx/aiolifx.py | Light.resp_set_light | def resp_set_light(self, resp, color=None):
"""Default callback for set_color
"""
if color:
self.color=color
elif resp:
self.power_level = resp.power_level
self.color = resp.color
self.label = resp.label.decode().replace("\x00", "") | python | def resp_set_light(self, resp, color=None):
"""Default callback for set_color
"""
if color:
self.color=color
elif resp:
self.power_level = resp.power_level
self.color = resp.color
self.label = resp.label.decode().replace("\x00", "") | [
"def",
"resp_set_light",
"(",
"self",
",",
"resp",
",",
"color",
"=",
"None",
")",
":",
"if",
"color",
":",
"self",
".",
"color",
"=",
"color",
"elif",
"resp",
":",
"self",
".",
"power_level",
"=",
"resp",
".",
"power_level",
"self",
".",
"color",
"=",
"resp",
".",
"color",
"self",
".",
"label",
"=",
"resp",
".",
"label",
".",
"decode",
"(",
")",
".",
"replace",
"(",
"\"\\x00\"",
",",
"\"\"",
")"
] | Default callback for set_color | [
"Default",
"callback",
"for",
"set_color"
] | 9bd8c5e6d291f4c79314989402f7e2c6476d5851 | https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L897-L905 | train |
frawau/aiolifx | aiolifx/aiolifx.py | Light.get_color_zones | def get_color_zones(self, start_index, end_index=None, callb=None):
"""Convenience method to request the state of colour by zones from the device
This method will request the information from the device and request that callb
be executed when a response is received.
:param start_index: Index of the start of the zone of interest
:type start_index: int
:param end_index: Index of the end of the zone of interest. By default start_index+7
:type end_index: int
:param callb: Callable to be used when the response is received. If not set,
self.resp_set_label will be used.
:type callb: callable
:returns: None
:rtype: None
"""
if end_index is None:
end_index = start_index + 7
args = {
"start_index": start_index,
"end_index": end_index,
}
self.req_with_resp(MultiZoneGetColorZones, MultiZoneStateMultiZone, payload=args, callb=callb) | python | def get_color_zones(self, start_index, end_index=None, callb=None):
"""Convenience method to request the state of colour by zones from the device
This method will request the information from the device and request that callb
be executed when a response is received.
:param start_index: Index of the start of the zone of interest
:type start_index: int
:param end_index: Index of the end of the zone of interest. By default start_index+7
:type end_index: int
:param callb: Callable to be used when the response is received. If not set,
self.resp_set_label will be used.
:type callb: callable
:returns: None
:rtype: None
"""
if end_index is None:
end_index = start_index + 7
args = {
"start_index": start_index,
"end_index": end_index,
}
self.req_with_resp(MultiZoneGetColorZones, MultiZoneStateMultiZone, payload=args, callb=callb) | [
"def",
"get_color_zones",
"(",
"self",
",",
"start_index",
",",
"end_index",
"=",
"None",
",",
"callb",
"=",
"None",
")",
":",
"if",
"end_index",
"is",
"None",
":",
"end_index",
"=",
"start_index",
"+",
"7",
"args",
"=",
"{",
"\"start_index\"",
":",
"start_index",
",",
"\"end_index\"",
":",
"end_index",
",",
"}",
"self",
".",
"req_with_resp",
"(",
"MultiZoneGetColorZones",
",",
"MultiZoneStateMultiZone",
",",
"payload",
"=",
"args",
",",
"callb",
"=",
"callb",
")"
] | Convenience method to request the state of colour by zones from the device
This method will request the information from the device and request that callb
be executed when a response is received.
:param start_index: Index of the start of the zone of interest
:type start_index: int
:param end_index: Index of the end of the zone of interest. By default start_index+7
:type end_index: int
:param callb: Callable to be used when the response is received. If not set,
self.resp_set_label will be used.
:type callb: callable
:returns: None
:rtype: None | [
"Convenience",
"method",
"to",
"request",
"the",
"state",
"of",
"colour",
"by",
"zones",
"from",
"the",
"device"
] | 9bd8c5e6d291f4c79314989402f7e2c6476d5851 | https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L908-L930 | train |
frawau/aiolifx | aiolifx/aiolifx.py | Light.set_color_zones | def set_color_zones(self, start_index, end_index, color, duration=0, apply=1, callb=None, rapid=False):
"""Convenience method to set the colour status zone of the device
This method will send a MultiZoneSetColorZones message to the device, and request callb be executed
when an ACK is received. The default callback will simply cache the value.
:param start_index: Index of the start of the zone of interest
:type start_index: int
:param end_index: Index of the end of the zone of interest. By default start_index+7
:type end_index: int
:param apply: Indicates if the colour change is to be applied or memorized. Default: 1
:type apply: int
:param value: The new state, a dictionary onf int with 4 keys Hue, Saturation, Brightness, Kelvin
:type value: dict
:param duration: The duration, in seconds, of the power state transition.
:type duration: int
:param callb: Callable to be used when the response is received. If not set,
self.resp_set_label will be used.
:type callb: callable
:param rapid: Whether to ask for ack (False) or not (True). Default False
:type rapid: bool
:returns: None
:rtype: None
"""
if len(color) == 4:
args = {
"start_index": start_index,
"end_index": end_index,
"color": color,
"duration": duration,
"apply": apply,
}
mypartial=partial(self.resp_set_multizonemultizone, args=args)
if callb:
mycallb=lambda x,y:(mypartial(y),callb(x,y))
else:
mycallb=lambda x,y:mypartial(y)
if rapid:
self.fire_and_forget(MultiZoneSetColorZones, args, num_repeats=1)
mycallb(self, None)
else:
self.req_with_ack(MultiZoneSetColorZones, args, callb=mycallb) | python | def set_color_zones(self, start_index, end_index, color, duration=0, apply=1, callb=None, rapid=False):
"""Convenience method to set the colour status zone of the device
This method will send a MultiZoneSetColorZones message to the device, and request callb be executed
when an ACK is received. The default callback will simply cache the value.
:param start_index: Index of the start of the zone of interest
:type start_index: int
:param end_index: Index of the end of the zone of interest. By default start_index+7
:type end_index: int
:param apply: Indicates if the colour change is to be applied or memorized. Default: 1
:type apply: int
:param value: The new state, a dictionary onf int with 4 keys Hue, Saturation, Brightness, Kelvin
:type value: dict
:param duration: The duration, in seconds, of the power state transition.
:type duration: int
:param callb: Callable to be used when the response is received. If not set,
self.resp_set_label will be used.
:type callb: callable
:param rapid: Whether to ask for ack (False) or not (True). Default False
:type rapid: bool
:returns: None
:rtype: None
"""
if len(color) == 4:
args = {
"start_index": start_index,
"end_index": end_index,
"color": color,
"duration": duration,
"apply": apply,
}
mypartial=partial(self.resp_set_multizonemultizone, args=args)
if callb:
mycallb=lambda x,y:(mypartial(y),callb(x,y))
else:
mycallb=lambda x,y:mypartial(y)
if rapid:
self.fire_and_forget(MultiZoneSetColorZones, args, num_repeats=1)
mycallb(self, None)
else:
self.req_with_ack(MultiZoneSetColorZones, args, callb=mycallb) | [
"def",
"set_color_zones",
"(",
"self",
",",
"start_index",
",",
"end_index",
",",
"color",
",",
"duration",
"=",
"0",
",",
"apply",
"=",
"1",
",",
"callb",
"=",
"None",
",",
"rapid",
"=",
"False",
")",
":",
"if",
"len",
"(",
"color",
")",
"==",
"4",
":",
"args",
"=",
"{",
"\"start_index\"",
":",
"start_index",
",",
"\"end_index\"",
":",
"end_index",
",",
"\"color\"",
":",
"color",
",",
"\"duration\"",
":",
"duration",
",",
"\"apply\"",
":",
"apply",
",",
"}",
"mypartial",
"=",
"partial",
"(",
"self",
".",
"resp_set_multizonemultizone",
",",
"args",
"=",
"args",
")",
"if",
"callb",
":",
"mycallb",
"=",
"lambda",
"x",
",",
"y",
":",
"(",
"mypartial",
"(",
"y",
")",
",",
"callb",
"(",
"x",
",",
"y",
")",
")",
"else",
":",
"mycallb",
"=",
"lambda",
"x",
",",
"y",
":",
"mypartial",
"(",
"y",
")",
"if",
"rapid",
":",
"self",
".",
"fire_and_forget",
"(",
"MultiZoneSetColorZones",
",",
"args",
",",
"num_repeats",
"=",
"1",
")",
"mycallb",
"(",
"self",
",",
"None",
")",
"else",
":",
"self",
".",
"req_with_ack",
"(",
"MultiZoneSetColorZones",
",",
"args",
",",
"callb",
"=",
"mycallb",
")"
] | Convenience method to set the colour status zone of the device
This method will send a MultiZoneSetColorZones message to the device, and request callb be executed
when an ACK is received. The default callback will simply cache the value.
:param start_index: Index of the start of the zone of interest
:type start_index: int
:param end_index: Index of the end of the zone of interest. By default start_index+7
:type end_index: int
:param apply: Indicates if the colour change is to be applied or memorized. Default: 1
:type apply: int
:param value: The new state, a dictionary onf int with 4 keys Hue, Saturation, Brightness, Kelvin
:type value: dict
:param duration: The duration, in seconds, of the power state transition.
:type duration: int
:param callb: Callable to be used when the response is received. If not set,
self.resp_set_label will be used.
:type callb: callable
:param rapid: Whether to ask for ack (False) or not (True). Default False
:type rapid: bool
:returns: None
:rtype: None | [
"Convenience",
"method",
"to",
"set",
"the",
"colour",
"status",
"zone",
"of",
"the",
"device"
] | 9bd8c5e6d291f4c79314989402f7e2c6476d5851 | https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L932-L975 | train |
frawau/aiolifx | aiolifx/aiolifx.py | Light.get_infrared | def get_infrared(self,callb=None):
"""Convenience method to request the infrared brightness from the device
This method will check whether the value has already been retrieved from the device,
if so, it will simply return it. If no, it will request the information from the device
and request that callb be executed when a response is received. The default callback
will simply cache the value.
:param callb: Callable to be used when the response is received. If not set,
self.resp_set_label will be used.
:type callb: callable
:returns: The cached value
:rtype: int
"""
response = self.req_with_resp(LightGetInfrared, LightStateInfrared,callb=callb)
return self.infrared_brightness | python | def get_infrared(self,callb=None):
"""Convenience method to request the infrared brightness from the device
This method will check whether the value has already been retrieved from the device,
if so, it will simply return it. If no, it will request the information from the device
and request that callb be executed when a response is received. The default callback
will simply cache the value.
:param callb: Callable to be used when the response is received. If not set,
self.resp_set_label will be used.
:type callb: callable
:returns: The cached value
:rtype: int
"""
response = self.req_with_resp(LightGetInfrared, LightStateInfrared,callb=callb)
return self.infrared_brightness | [
"def",
"get_infrared",
"(",
"self",
",",
"callb",
"=",
"None",
")",
":",
"response",
"=",
"self",
".",
"req_with_resp",
"(",
"LightGetInfrared",
",",
"LightStateInfrared",
",",
"callb",
"=",
"callb",
")",
"return",
"self",
".",
"infrared_brightness"
] | Convenience method to request the infrared brightness from the device
This method will check whether the value has already been retrieved from the device,
if so, it will simply return it. If no, it will request the information from the device
and request that callb be executed when a response is received. The default callback
will simply cache the value.
:param callb: Callable to be used when the response is received. If not set,
self.resp_set_label will be used.
:type callb: callable
:returns: The cached value
:rtype: int | [
"Convenience",
"method",
"to",
"request",
"the",
"infrared",
"brightness",
"from",
"the",
"device"
] | 9bd8c5e6d291f4c79314989402f7e2c6476d5851 | https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L1045-L1060 | train |
frawau/aiolifx | aiolifx/aiolifx.py | Light.set_infrared | def set_infrared(self, infrared_brightness, callb=None, rapid=False):
"""Convenience method to set the infrared status of the device
This method will send a SetPower message to the device, and request callb be executed
when an ACK is received. The default callback will simply cache the value.
:param infrared_brightness: The new state
:type infrared_brightness: int
:param duration: The duration, in seconds, of the power state transition.
:type duration: int
:param callb: Callable to be used when the response is received. If not set,
self.resp_set_label will be used.
:type callb: callable
:param rapid: Whether to ask for ack (False) or not (True). Default False
:type rapid: bool
:returns: None
:rtype: None
"""
mypartial=partial(self.resp_set_infrared,infrared_brightness=infrared_brightness)
if callb:
mycallb=lambda x,y:(mypartial(y),callb(x,y))
else:
mycallb=lambda x,y:mypartial(y)
if rapid:
self.fire_and_forget(LightSetInfrared, {"infrared_brightness": infrared_brightness}, num_repeats=1)
self.resp_set_infrared(None,infrared_brightness=infrared_brightness)
if callb:
callb(self,None)
else:
self.req_with_ack(LightSetInfrared, {"infrared_brightness": infrared_brightness}, callb=mycallb) | python | def set_infrared(self, infrared_brightness, callb=None, rapid=False):
"""Convenience method to set the infrared status of the device
This method will send a SetPower message to the device, and request callb be executed
when an ACK is received. The default callback will simply cache the value.
:param infrared_brightness: The new state
:type infrared_brightness: int
:param duration: The duration, in seconds, of the power state transition.
:type duration: int
:param callb: Callable to be used when the response is received. If not set,
self.resp_set_label will be used.
:type callb: callable
:param rapid: Whether to ask for ack (False) or not (True). Default False
:type rapid: bool
:returns: None
:rtype: None
"""
mypartial=partial(self.resp_set_infrared,infrared_brightness=infrared_brightness)
if callb:
mycallb=lambda x,y:(mypartial(y),callb(x,y))
else:
mycallb=lambda x,y:mypartial(y)
if rapid:
self.fire_and_forget(LightSetInfrared, {"infrared_brightness": infrared_brightness}, num_repeats=1)
self.resp_set_infrared(None,infrared_brightness=infrared_brightness)
if callb:
callb(self,None)
else:
self.req_with_ack(LightSetInfrared, {"infrared_brightness": infrared_brightness}, callb=mycallb) | [
"def",
"set_infrared",
"(",
"self",
",",
"infrared_brightness",
",",
"callb",
"=",
"None",
",",
"rapid",
"=",
"False",
")",
":",
"mypartial",
"=",
"partial",
"(",
"self",
".",
"resp_set_infrared",
",",
"infrared_brightness",
"=",
"infrared_brightness",
")",
"if",
"callb",
":",
"mycallb",
"=",
"lambda",
"x",
",",
"y",
":",
"(",
"mypartial",
"(",
"y",
")",
",",
"callb",
"(",
"x",
",",
"y",
")",
")",
"else",
":",
"mycallb",
"=",
"lambda",
"x",
",",
"y",
":",
"mypartial",
"(",
"y",
")",
"if",
"rapid",
":",
"self",
".",
"fire_and_forget",
"(",
"LightSetInfrared",
",",
"{",
"\"infrared_brightness\"",
":",
"infrared_brightness",
"}",
",",
"num_repeats",
"=",
"1",
")",
"self",
".",
"resp_set_infrared",
"(",
"None",
",",
"infrared_brightness",
"=",
"infrared_brightness",
")",
"if",
"callb",
":",
"callb",
"(",
"self",
",",
"None",
")",
"else",
":",
"self",
".",
"req_with_ack",
"(",
"LightSetInfrared",
",",
"{",
"\"infrared_brightness\"",
":",
"infrared_brightness",
"}",
",",
"callb",
"=",
"mycallb",
")"
] | Convenience method to set the infrared status of the device
This method will send a SetPower message to the device, and request callb be executed
when an ACK is received. The default callback will simply cache the value.
:param infrared_brightness: The new state
:type infrared_brightness: int
:param duration: The duration, in seconds, of the power state transition.
:type duration: int
:param callb: Callable to be used when the response is received. If not set,
self.resp_set_label will be used.
:type callb: callable
:param rapid: Whether to ask for ack (False) or not (True). Default False
:type rapid: bool
:returns: None
:rtype: None | [
"Convenience",
"method",
"to",
"set",
"the",
"infrared",
"status",
"of",
"the",
"device"
] | 9bd8c5e6d291f4c79314989402f7e2c6476d5851 | https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L1063-L1092 | train |
frawau/aiolifx | aiolifx/aiolifx.py | LifxDiscovery.start | def start(self, listen_ip=LISTEN_IP, listen_port=0):
"""Start discovery task."""
coro = self.loop.create_datagram_endpoint(
lambda: self, local_addr=(listen_ip, listen_port))
self.task = self.loop.create_task(coro)
return self.task | python | def start(self, listen_ip=LISTEN_IP, listen_port=0):
"""Start discovery task."""
coro = self.loop.create_datagram_endpoint(
lambda: self, local_addr=(listen_ip, listen_port))
self.task = self.loop.create_task(coro)
return self.task | [
"def",
"start",
"(",
"self",
",",
"listen_ip",
"=",
"LISTEN_IP",
",",
"listen_port",
"=",
"0",
")",
":",
"coro",
"=",
"self",
".",
"loop",
".",
"create_datagram_endpoint",
"(",
"lambda",
":",
"self",
",",
"local_addr",
"=",
"(",
"listen_ip",
",",
"listen_port",
")",
")",
"self",
".",
"task",
"=",
"self",
".",
"loop",
".",
"create_task",
"(",
"coro",
")",
"return",
"self",
".",
"task"
] | Start discovery task. | [
"Start",
"discovery",
"task",
"."
] | 9bd8c5e6d291f4c79314989402f7e2c6476d5851 | https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L1150-L1156 | train |
frawau/aiolifx | aiolifx/aiolifx.py | LifxDiscovery.connection_made | def connection_made(self, transport):
"""Method run when the UDP broadcast server is started
"""
#print('started')
self.transport = transport
sock = self.transport.get_extra_info("socket")
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
self.loop.call_soon(self.discover) | python | def connection_made(self, transport):
"""Method run when the UDP broadcast server is started
"""
#print('started')
self.transport = transport
sock = self.transport.get_extra_info("socket")
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
self.loop.call_soon(self.discover) | [
"def",
"connection_made",
"(",
"self",
",",
"transport",
")",
":",
"#print('started')",
"self",
".",
"transport",
"=",
"transport",
"sock",
"=",
"self",
".",
"transport",
".",
"get_extra_info",
"(",
"\"socket\"",
")",
"sock",
".",
"setsockopt",
"(",
"socket",
".",
"SOL_SOCKET",
",",
"socket",
".",
"SO_REUSEADDR",
",",
"1",
")",
"sock",
".",
"setsockopt",
"(",
"socket",
".",
"SOL_SOCKET",
",",
"socket",
".",
"SO_BROADCAST",
",",
"1",
")",
"self",
".",
"loop",
".",
"call_soon",
"(",
"self",
".",
"discover",
")"
] | Method run when the UDP broadcast server is started | [
"Method",
"run",
"when",
"the",
"UDP",
"broadcast",
"server",
"is",
"started"
] | 9bd8c5e6d291f4c79314989402f7e2c6476d5851 | https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L1158-L1166 | train |
frawau/aiolifx | aiolifx/aiolifx.py | LifxDiscovery.datagram_received | def datagram_received(self, data, addr):
"""Method run when data is received from the devices
This method will unpack the data according to the LIFX protocol.
If a new device is found, the Light device will be created and started aa
a DatagramProtocol and will be registered with the parent.
:param data: raw data
:type data: bytestring
:param addr: sender IP address 2-tuple for IPv4, 4-tuple for IPv6
:type addr: tuple
"""
response = unpack_lifx_message(data)
response.ip_addr = addr[0]
mac_addr = response.target_addr
if mac_addr == BROADCAST_MAC:
return
if type(response) == StateService and response.service == 1: # only look for UDP services
# discovered
remote_port = response.port
elif type(response) == LightState:
# looks like the lights are volunteering LigthState after booting
remote_port = UDP_BROADCAST_PORT
else:
return
if self.ipv6prefix:
family = socket.AF_INET6
remote_ip = mac_to_ipv6_linklocal(mac_addr, self.ipv6prefix)
else:
family = socket.AF_INET
remote_ip = response.ip_addr
if mac_addr in self.lights:
# rediscovered
light = self.lights[mac_addr]
# nothing to do
if light.registered:
return
light.cleanup()
light.ip_addr = remote_ip
light.port = remote_port
else:
# newly discovered
light = Light(self.loop, mac_addr, remote_ip, remote_port, parent=self)
self.lights[mac_addr] = light
coro = self.loop.create_datagram_endpoint(
lambda: light, family=family, remote_addr=(remote_ip, remote_port))
light.task = self.loop.create_task(coro) | python | def datagram_received(self, data, addr):
"""Method run when data is received from the devices
This method will unpack the data according to the LIFX protocol.
If a new device is found, the Light device will be created and started aa
a DatagramProtocol and will be registered with the parent.
:param data: raw data
:type data: bytestring
:param addr: sender IP address 2-tuple for IPv4, 4-tuple for IPv6
:type addr: tuple
"""
response = unpack_lifx_message(data)
response.ip_addr = addr[0]
mac_addr = response.target_addr
if mac_addr == BROADCAST_MAC:
return
if type(response) == StateService and response.service == 1: # only look for UDP services
# discovered
remote_port = response.port
elif type(response) == LightState:
# looks like the lights are volunteering LigthState after booting
remote_port = UDP_BROADCAST_PORT
else:
return
if self.ipv6prefix:
family = socket.AF_INET6
remote_ip = mac_to_ipv6_linklocal(mac_addr, self.ipv6prefix)
else:
family = socket.AF_INET
remote_ip = response.ip_addr
if mac_addr in self.lights:
# rediscovered
light = self.lights[mac_addr]
# nothing to do
if light.registered:
return
light.cleanup()
light.ip_addr = remote_ip
light.port = remote_port
else:
# newly discovered
light = Light(self.loop, mac_addr, remote_ip, remote_port, parent=self)
self.lights[mac_addr] = light
coro = self.loop.create_datagram_endpoint(
lambda: light, family=family, remote_addr=(remote_ip, remote_port))
light.task = self.loop.create_task(coro) | [
"def",
"datagram_received",
"(",
"self",
",",
"data",
",",
"addr",
")",
":",
"response",
"=",
"unpack_lifx_message",
"(",
"data",
")",
"response",
".",
"ip_addr",
"=",
"addr",
"[",
"0",
"]",
"mac_addr",
"=",
"response",
".",
"target_addr",
"if",
"mac_addr",
"==",
"BROADCAST_MAC",
":",
"return",
"if",
"type",
"(",
"response",
")",
"==",
"StateService",
"and",
"response",
".",
"service",
"==",
"1",
":",
"# only look for UDP services",
"# discovered",
"remote_port",
"=",
"response",
".",
"port",
"elif",
"type",
"(",
"response",
")",
"==",
"LightState",
":",
"# looks like the lights are volunteering LigthState after booting",
"remote_port",
"=",
"UDP_BROADCAST_PORT",
"else",
":",
"return",
"if",
"self",
".",
"ipv6prefix",
":",
"family",
"=",
"socket",
".",
"AF_INET6",
"remote_ip",
"=",
"mac_to_ipv6_linklocal",
"(",
"mac_addr",
",",
"self",
".",
"ipv6prefix",
")",
"else",
":",
"family",
"=",
"socket",
".",
"AF_INET",
"remote_ip",
"=",
"response",
".",
"ip_addr",
"if",
"mac_addr",
"in",
"self",
".",
"lights",
":",
"# rediscovered",
"light",
"=",
"self",
".",
"lights",
"[",
"mac_addr",
"]",
"# nothing to do",
"if",
"light",
".",
"registered",
":",
"return",
"light",
".",
"cleanup",
"(",
")",
"light",
".",
"ip_addr",
"=",
"remote_ip",
"light",
".",
"port",
"=",
"remote_port",
"else",
":",
"# newly discovered",
"light",
"=",
"Light",
"(",
"self",
".",
"loop",
",",
"mac_addr",
",",
"remote_ip",
",",
"remote_port",
",",
"parent",
"=",
"self",
")",
"self",
".",
"lights",
"[",
"mac_addr",
"]",
"=",
"light",
"coro",
"=",
"self",
".",
"loop",
".",
"create_datagram_endpoint",
"(",
"lambda",
":",
"light",
",",
"family",
"=",
"family",
",",
"remote_addr",
"=",
"(",
"remote_ip",
",",
"remote_port",
")",
")",
"light",
".",
"task",
"=",
"self",
".",
"loop",
".",
"create_task",
"(",
"coro",
")"
] | Method run when data is received from the devices
This method will unpack the data according to the LIFX protocol.
If a new device is found, the Light device will be created and started aa
a DatagramProtocol and will be registered with the parent.
:param data: raw data
:type data: bytestring
:param addr: sender IP address 2-tuple for IPv4, 4-tuple for IPv6
:type addr: tuple | [
"Method",
"run",
"when",
"data",
"is",
"received",
"from",
"the",
"devices"
] | 9bd8c5e6d291f4c79314989402f7e2c6476d5851 | https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L1168-L1222 | train |
frawau/aiolifx | aiolifx/aiolifx.py | LifxDiscovery.discover | def discover(self):
"""Method to send a discovery message
"""
if self.transport:
if self.discovery_countdown <= 0:
self.discovery_countdown = self.discovery_interval
msg = GetService(BROADCAST_MAC, self.source_id, seq_num=0, payload={}, ack_requested=False, response_requested=True)
self.transport.sendto(msg.generate_packed_message(), (self.broadcast_ip, UDP_BROADCAST_PORT))
else:
self.discovery_countdown -= self.discovery_step
self.loop.call_later(self.discovery_step, self.discover) | python | def discover(self):
"""Method to send a discovery message
"""
if self.transport:
if self.discovery_countdown <= 0:
self.discovery_countdown = self.discovery_interval
msg = GetService(BROADCAST_MAC, self.source_id, seq_num=0, payload={}, ack_requested=False, response_requested=True)
self.transport.sendto(msg.generate_packed_message(), (self.broadcast_ip, UDP_BROADCAST_PORT))
else:
self.discovery_countdown -= self.discovery_step
self.loop.call_later(self.discovery_step, self.discover) | [
"def",
"discover",
"(",
"self",
")",
":",
"if",
"self",
".",
"transport",
":",
"if",
"self",
".",
"discovery_countdown",
"<=",
"0",
":",
"self",
".",
"discovery_countdown",
"=",
"self",
".",
"discovery_interval",
"msg",
"=",
"GetService",
"(",
"BROADCAST_MAC",
",",
"self",
".",
"source_id",
",",
"seq_num",
"=",
"0",
",",
"payload",
"=",
"{",
"}",
",",
"ack_requested",
"=",
"False",
",",
"response_requested",
"=",
"True",
")",
"self",
".",
"transport",
".",
"sendto",
"(",
"msg",
".",
"generate_packed_message",
"(",
")",
",",
"(",
"self",
".",
"broadcast_ip",
",",
"UDP_BROADCAST_PORT",
")",
")",
"else",
":",
"self",
".",
"discovery_countdown",
"-=",
"self",
".",
"discovery_step",
"self",
".",
"loop",
".",
"call_later",
"(",
"self",
".",
"discovery_step",
",",
"self",
".",
"discover",
")"
] | Method to send a discovery message | [
"Method",
"to",
"send",
"a",
"discovery",
"message"
] | 9bd8c5e6d291f4c79314989402f7e2c6476d5851 | https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L1224-L1234 | train |
frawau/aiolifx | aiolifx/aiolifx.py | LifxScan.scan | async def scan(self, timeout=1):
"""Return a list of local IP addresses on interfaces with LIFX bulbs."""
adapters = await self.loop.run_in_executor(None, ifaddr.get_adapters)
ips = [ip.ip for adapter in ifaddr.get_adapters() for ip in adapter.ips if ip.is_IPv4]
if not ips:
return []
tasks = []
discoveries = []
for ip in ips:
manager = ScanManager(ip)
lifx_discovery = LifxDiscovery(self.loop, manager)
discoveries.append(lifx_discovery)
lifx_discovery.start(listen_ip=ip)
tasks.append(self.loop.create_task(manager.lifx_ip()))
(done, pending) = await aio.wait(tasks, timeout=timeout)
for discovery in discoveries:
discovery.cleanup()
for task in pending:
task.cancel()
return [task.result() for task in done] | python | async def scan(self, timeout=1):
"""Return a list of local IP addresses on interfaces with LIFX bulbs."""
adapters = await self.loop.run_in_executor(None, ifaddr.get_adapters)
ips = [ip.ip for adapter in ifaddr.get_adapters() for ip in adapter.ips if ip.is_IPv4]
if not ips:
return []
tasks = []
discoveries = []
for ip in ips:
manager = ScanManager(ip)
lifx_discovery = LifxDiscovery(self.loop, manager)
discoveries.append(lifx_discovery)
lifx_discovery.start(listen_ip=ip)
tasks.append(self.loop.create_task(manager.lifx_ip()))
(done, pending) = await aio.wait(tasks, timeout=timeout)
for discovery in discoveries:
discovery.cleanup()
for task in pending:
task.cancel()
return [task.result() for task in done] | [
"async",
"def",
"scan",
"(",
"self",
",",
"timeout",
"=",
"1",
")",
":",
"adapters",
"=",
"await",
"self",
".",
"loop",
".",
"run_in_executor",
"(",
"None",
",",
"ifaddr",
".",
"get_adapters",
")",
"ips",
"=",
"[",
"ip",
".",
"ip",
"for",
"adapter",
"in",
"ifaddr",
".",
"get_adapters",
"(",
")",
"for",
"ip",
"in",
"adapter",
".",
"ips",
"if",
"ip",
".",
"is_IPv4",
"]",
"if",
"not",
"ips",
":",
"return",
"[",
"]",
"tasks",
"=",
"[",
"]",
"discoveries",
"=",
"[",
"]",
"for",
"ip",
"in",
"ips",
":",
"manager",
"=",
"ScanManager",
"(",
"ip",
")",
"lifx_discovery",
"=",
"LifxDiscovery",
"(",
"self",
".",
"loop",
",",
"manager",
")",
"discoveries",
".",
"append",
"(",
"lifx_discovery",
")",
"lifx_discovery",
".",
"start",
"(",
"listen_ip",
"=",
"ip",
")",
"tasks",
".",
"append",
"(",
"self",
".",
"loop",
".",
"create_task",
"(",
"manager",
".",
"lifx_ip",
"(",
")",
")",
")",
"(",
"done",
",",
"pending",
")",
"=",
"await",
"aio",
".",
"wait",
"(",
"tasks",
",",
"timeout",
"=",
"timeout",
")",
"for",
"discovery",
"in",
"discoveries",
":",
"discovery",
".",
"cleanup",
"(",
")",
"for",
"task",
"in",
"pending",
":",
"task",
".",
"cancel",
"(",
")",
"return",
"[",
"task",
".",
"result",
"(",
")",
"for",
"task",
"in",
"done",
"]"
] | Return a list of local IP addresses on interfaces with LIFX bulbs. | [
"Return",
"a",
"list",
"of",
"local",
"IP",
"addresses",
"on",
"interfaces",
"with",
"LIFX",
"bulbs",
"."
] | 9bd8c5e6d291f4c79314989402f7e2c6476d5851 | https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L1269-L1294 | train |
geophysics-ubonn/reda | lib/reda/importers/eit_fzj.py | _get_file_version | def _get_file_version(filename):
"""High level import function that tries to determine the specific version
of the data format used.
Parameters
----------
filename: string
File path to a .mat matlab filename, as produced by the various
versions of the emmt_pp.exe postprocessing program.
Returns
-------
version: string
a sanitized version of the file format version
"""
mat = sio.loadmat(filename, squeeze_me=True)
version = mat['MP']['Version'].item()
del(mat)
return version | python | def _get_file_version(filename):
"""High level import function that tries to determine the specific version
of the data format used.
Parameters
----------
filename: string
File path to a .mat matlab filename, as produced by the various
versions of the emmt_pp.exe postprocessing program.
Returns
-------
version: string
a sanitized version of the file format version
"""
mat = sio.loadmat(filename, squeeze_me=True)
version = mat['MP']['Version'].item()
del(mat)
return version | [
"def",
"_get_file_version",
"(",
"filename",
")",
":",
"mat",
"=",
"sio",
".",
"loadmat",
"(",
"filename",
",",
"squeeze_me",
"=",
"True",
")",
"version",
"=",
"mat",
"[",
"'MP'",
"]",
"[",
"'Version'",
"]",
".",
"item",
"(",
")",
"del",
"(",
"mat",
")",
"return",
"version"
] | High level import function that tries to determine the specific version
of the data format used.
Parameters
----------
filename: string
File path to a .mat matlab filename, as produced by the various
versions of the emmt_pp.exe postprocessing program.
Returns
-------
version: string
a sanitized version of the file format version | [
"High",
"level",
"import",
"function",
"that",
"tries",
"to",
"determine",
"the",
"specific",
"version",
"of",
"the",
"data",
"format",
"used",
"."
] | 46a939729e40c7c4723315c03679c40761152e9e | https://github.com/geophysics-ubonn/reda/blob/46a939729e40c7c4723315c03679c40761152e9e/lib/reda/importers/eit_fzj.py#L34-L54 | train |
geophysics-ubonn/reda | lib/reda/importers/eit_fzj.py | MD_ConfigsPermutate | def MD_ConfigsPermutate(df_md):
"""Given a MD DataFrame, return a Nx4 array which permutes the current
injection dipoles.
"""
g_current_injections = df_md.groupby(['a', 'b'])
ab = np.array(list(g_current_injections.groups.keys()))
config_mgr = ConfigManager(nr_of_electrodes=ab.max())
config_mgr.gen_configs_permutate(ab, silent=True)
return config_mgr.configs | python | def MD_ConfigsPermutate(df_md):
"""Given a MD DataFrame, return a Nx4 array which permutes the current
injection dipoles.
"""
g_current_injections = df_md.groupby(['a', 'b'])
ab = np.array(list(g_current_injections.groups.keys()))
config_mgr = ConfigManager(nr_of_electrodes=ab.max())
config_mgr.gen_configs_permutate(ab, silent=True)
return config_mgr.configs | [
"def",
"MD_ConfigsPermutate",
"(",
"df_md",
")",
":",
"g_current_injections",
"=",
"df_md",
".",
"groupby",
"(",
"[",
"'a'",
",",
"'b'",
"]",
")",
"ab",
"=",
"np",
".",
"array",
"(",
"list",
"(",
"g_current_injections",
".",
"groups",
".",
"keys",
"(",
")",
")",
")",
"config_mgr",
"=",
"ConfigManager",
"(",
"nr_of_electrodes",
"=",
"ab",
".",
"max",
"(",
")",
")",
"config_mgr",
".",
"gen_configs_permutate",
"(",
"ab",
",",
"silent",
"=",
"True",
")",
"return",
"config_mgr",
".",
"configs"
] | Given a MD DataFrame, return a Nx4 array which permutes the current
injection dipoles. | [
"Given",
"a",
"MD",
"DataFrame",
"return",
"a",
"Nx4",
"array",
"which",
"permutes",
"the",
"current",
"injection",
"dipoles",
"."
] | 46a939729e40c7c4723315c03679c40761152e9e | https://github.com/geophysics-ubonn/reda/blob/46a939729e40c7c4723315c03679c40761152e9e/lib/reda/importers/eit_fzj.py#L57-L65 | train |
geophysics-ubonn/reda | lib/reda/importers/eit_fzj.py | apply_correction_factors | def apply_correction_factors(df, correction_file):
"""Apply correction factors for a pseudo-2D measurement setup. See Weigand
and Kemna, 2017, Biogeosciences, for detailed information.
"""
if isinstance(correction_file, (list, tuple)):
corr_data_raw = np.vstack(
[np.loadtxt(x) for x in correction_file]
)
else:
corr_data_raw = np.loadtxt(correction_file)
if corr_data_raw.shape[1] == 3:
A = (corr_data_raw[:, 0] / 1e4).astype(int)
B = (corr_data_raw[:, 0] % 1e4).astype(int)
M = (corr_data_raw[:, 1] / 1e4).astype(int)
N = (corr_data_raw[:, 1] % 1e4).astype(int)
corr_data = np.vstack((A, B, M, N, corr_data_raw[:, 2])).T
elif corr_data_raw.shape[1] == 5:
corr_data = corr_data_raw
else:
raise Exception('error')
corr_data[:, 0:2] = np.sort(corr_data[:, 0:2], axis=1)
corr_data[:, 2:4] = np.sort(corr_data[:, 2:4], axis=1)
if 'frequency' not in df.columns:
raise Exception(
'No frequency data found. Are you sure this is a seit data set?'
)
df = df.reset_index()
gf = df.groupby(['a', 'b', 'm', 'n'])
for key, item in gf.indices.items():
# print('key', key)
# print(item)
item_norm = np.hstack((np.sort(key[0:2]), np.sort(key[2:4])))
# print(item_norm)
index = np.where(
(corr_data[:, 0] == item_norm[0]) &
(corr_data[:, 1] == item_norm[1]) &
(corr_data[:, 2] == item_norm[2]) &
(corr_data[:, 3] == item_norm[3])
)[0]
# print(index, corr_data[index])
if len(index) == 0:
print(key)
import IPython
IPython.embed()
raise Exception(
'No correction factor found for this configuration'
)
factor = corr_data[index, 4]
# if key == (1, 4, 2, 3):
# print(key)
# print(factor)
# print(df['R'])
# print(df['k'])
# import IPython
# IPython.embed()
# exit()
# apply correction factor
for col in ('r', 'Zt', 'Vmn', 'rho_a'):
if col in df.columns:
df.ix[item, col] *= factor
df.ix[item, 'corr_fac'] = factor
return df, corr_data | python | def apply_correction_factors(df, correction_file):
"""Apply correction factors for a pseudo-2D measurement setup. See Weigand
and Kemna, 2017, Biogeosciences, for detailed information.
"""
if isinstance(correction_file, (list, tuple)):
corr_data_raw = np.vstack(
[np.loadtxt(x) for x in correction_file]
)
else:
corr_data_raw = np.loadtxt(correction_file)
if corr_data_raw.shape[1] == 3:
A = (corr_data_raw[:, 0] / 1e4).astype(int)
B = (corr_data_raw[:, 0] % 1e4).astype(int)
M = (corr_data_raw[:, 1] / 1e4).astype(int)
N = (corr_data_raw[:, 1] % 1e4).astype(int)
corr_data = np.vstack((A, B, M, N, corr_data_raw[:, 2])).T
elif corr_data_raw.shape[1] == 5:
corr_data = corr_data_raw
else:
raise Exception('error')
corr_data[:, 0:2] = np.sort(corr_data[:, 0:2], axis=1)
corr_data[:, 2:4] = np.sort(corr_data[:, 2:4], axis=1)
if 'frequency' not in df.columns:
raise Exception(
'No frequency data found. Are you sure this is a seit data set?'
)
df = df.reset_index()
gf = df.groupby(['a', 'b', 'm', 'n'])
for key, item in gf.indices.items():
# print('key', key)
# print(item)
item_norm = np.hstack((np.sort(key[0:2]), np.sort(key[2:4])))
# print(item_norm)
index = np.where(
(corr_data[:, 0] == item_norm[0]) &
(corr_data[:, 1] == item_norm[1]) &
(corr_data[:, 2] == item_norm[2]) &
(corr_data[:, 3] == item_norm[3])
)[0]
# print(index, corr_data[index])
if len(index) == 0:
print(key)
import IPython
IPython.embed()
raise Exception(
'No correction factor found for this configuration'
)
factor = corr_data[index, 4]
# if key == (1, 4, 2, 3):
# print(key)
# print(factor)
# print(df['R'])
# print(df['k'])
# import IPython
# IPython.embed()
# exit()
# apply correction factor
for col in ('r', 'Zt', 'Vmn', 'rho_a'):
if col in df.columns:
df.ix[item, col] *= factor
df.ix[item, 'corr_fac'] = factor
return df, corr_data | [
"def",
"apply_correction_factors",
"(",
"df",
",",
"correction_file",
")",
":",
"if",
"isinstance",
"(",
"correction_file",
",",
"(",
"list",
",",
"tuple",
")",
")",
":",
"corr_data_raw",
"=",
"np",
".",
"vstack",
"(",
"[",
"np",
".",
"loadtxt",
"(",
"x",
")",
"for",
"x",
"in",
"correction_file",
"]",
")",
"else",
":",
"corr_data_raw",
"=",
"np",
".",
"loadtxt",
"(",
"correction_file",
")",
"if",
"corr_data_raw",
".",
"shape",
"[",
"1",
"]",
"==",
"3",
":",
"A",
"=",
"(",
"corr_data_raw",
"[",
":",
",",
"0",
"]",
"/",
"1e4",
")",
".",
"astype",
"(",
"int",
")",
"B",
"=",
"(",
"corr_data_raw",
"[",
":",
",",
"0",
"]",
"%",
"1e4",
")",
".",
"astype",
"(",
"int",
")",
"M",
"=",
"(",
"corr_data_raw",
"[",
":",
",",
"1",
"]",
"/",
"1e4",
")",
".",
"astype",
"(",
"int",
")",
"N",
"=",
"(",
"corr_data_raw",
"[",
":",
",",
"1",
"]",
"%",
"1e4",
")",
".",
"astype",
"(",
"int",
")",
"corr_data",
"=",
"np",
".",
"vstack",
"(",
"(",
"A",
",",
"B",
",",
"M",
",",
"N",
",",
"corr_data_raw",
"[",
":",
",",
"2",
"]",
")",
")",
".",
"T",
"elif",
"corr_data_raw",
".",
"shape",
"[",
"1",
"]",
"==",
"5",
":",
"corr_data",
"=",
"corr_data_raw",
"else",
":",
"raise",
"Exception",
"(",
"'error'",
")",
"corr_data",
"[",
":",
",",
"0",
":",
"2",
"]",
"=",
"np",
".",
"sort",
"(",
"corr_data",
"[",
":",
",",
"0",
":",
"2",
"]",
",",
"axis",
"=",
"1",
")",
"corr_data",
"[",
":",
",",
"2",
":",
"4",
"]",
"=",
"np",
".",
"sort",
"(",
"corr_data",
"[",
":",
",",
"2",
":",
"4",
"]",
",",
"axis",
"=",
"1",
")",
"if",
"'frequency'",
"not",
"in",
"df",
".",
"columns",
":",
"raise",
"Exception",
"(",
"'No frequency data found. Are you sure this is a seit data set?'",
")",
"df",
"=",
"df",
".",
"reset_index",
"(",
")",
"gf",
"=",
"df",
".",
"groupby",
"(",
"[",
"'a'",
",",
"'b'",
",",
"'m'",
",",
"'n'",
"]",
")",
"for",
"key",
",",
"item",
"in",
"gf",
".",
"indices",
".",
"items",
"(",
")",
":",
"# print('key', key)",
"# print(item)",
"item_norm",
"=",
"np",
".",
"hstack",
"(",
"(",
"np",
".",
"sort",
"(",
"key",
"[",
"0",
":",
"2",
"]",
")",
",",
"np",
".",
"sort",
"(",
"key",
"[",
"2",
":",
"4",
"]",
")",
")",
")",
"# print(item_norm)",
"index",
"=",
"np",
".",
"where",
"(",
"(",
"corr_data",
"[",
":",
",",
"0",
"]",
"==",
"item_norm",
"[",
"0",
"]",
")",
"&",
"(",
"corr_data",
"[",
":",
",",
"1",
"]",
"==",
"item_norm",
"[",
"1",
"]",
")",
"&",
"(",
"corr_data",
"[",
":",
",",
"2",
"]",
"==",
"item_norm",
"[",
"2",
"]",
")",
"&",
"(",
"corr_data",
"[",
":",
",",
"3",
"]",
"==",
"item_norm",
"[",
"3",
"]",
")",
")",
"[",
"0",
"]",
"# print(index, corr_data[index])",
"if",
"len",
"(",
"index",
")",
"==",
"0",
":",
"print",
"(",
"key",
")",
"import",
"IPython",
"IPython",
".",
"embed",
"(",
")",
"raise",
"Exception",
"(",
"'No correction factor found for this configuration'",
")",
"factor",
"=",
"corr_data",
"[",
"index",
",",
"4",
"]",
"# if key == (1, 4, 2, 3):",
"# print(key)",
"# print(factor)",
"# print(df['R'])",
"# print(df['k'])",
"# import IPython",
"# IPython.embed()",
"# exit()",
"# apply correction factor",
"for",
"col",
"in",
"(",
"'r'",
",",
"'Zt'",
",",
"'Vmn'",
",",
"'rho_a'",
")",
":",
"if",
"col",
"in",
"df",
".",
"columns",
":",
"df",
".",
"ix",
"[",
"item",
",",
"col",
"]",
"*=",
"factor",
"df",
".",
"ix",
"[",
"item",
",",
"'corr_fac'",
"]",
"=",
"factor",
"return",
"df",
",",
"corr_data"
] | Apply correction factors for a pseudo-2D measurement setup. See Weigand
and Kemna, 2017, Biogeosciences, for detailed information. | [
"Apply",
"correction",
"factors",
"for",
"a",
"pseudo",
"-",
"2D",
"measurement",
"setup",
".",
"See",
"Weigand",
"and",
"Kemna",
"2017",
"Biogeosciences",
"for",
"detailed",
"information",
"."
] | 46a939729e40c7c4723315c03679c40761152e9e | https://github.com/geophysics-ubonn/reda/blob/46a939729e40c7c4723315c03679c40761152e9e/lib/reda/importers/eit_fzj.py#L217-L283 | train |
evolbioinfo/pastml | pastml/ml.py | get_pij_method | def get_pij_method(model=F81, frequencies=None, kappa=None):
"""
Returns a function for calculation of probability matrix of substitutions i->j over time t.
:param kappa: kappa parameter for HKY model
:type kappa: float
:param frequencies: array of state frequencies \pi_i
:type frequencies: numpy.array
:param model: model of character evolution
:type model: str
:return: probability matrix
:rtype: function
"""
if is_f81_like(model):
mu = get_mu(frequencies)
return lambda t: get_f81_pij(t, frequencies, mu)
if JTT == model:
return get_jtt_pij
if HKY == model:
return lambda t: get_hky_pij(t, frequencies, kappa) | python | def get_pij_method(model=F81, frequencies=None, kappa=None):
"""
Returns a function for calculation of probability matrix of substitutions i->j over time t.
:param kappa: kappa parameter for HKY model
:type kappa: float
:param frequencies: array of state frequencies \pi_i
:type frequencies: numpy.array
:param model: model of character evolution
:type model: str
:return: probability matrix
:rtype: function
"""
if is_f81_like(model):
mu = get_mu(frequencies)
return lambda t: get_f81_pij(t, frequencies, mu)
if JTT == model:
return get_jtt_pij
if HKY == model:
return lambda t: get_hky_pij(t, frequencies, kappa) | [
"def",
"get_pij_method",
"(",
"model",
"=",
"F81",
",",
"frequencies",
"=",
"None",
",",
"kappa",
"=",
"None",
")",
":",
"if",
"is_f81_like",
"(",
"model",
")",
":",
"mu",
"=",
"get_mu",
"(",
"frequencies",
")",
"return",
"lambda",
"t",
":",
"get_f81_pij",
"(",
"t",
",",
"frequencies",
",",
"mu",
")",
"if",
"JTT",
"==",
"model",
":",
"return",
"get_jtt_pij",
"if",
"HKY",
"==",
"model",
":",
"return",
"lambda",
"t",
":",
"get_hky_pij",
"(",
"t",
",",
"frequencies",
",",
"kappa",
")"
] | Returns a function for calculation of probability matrix of substitutions i->j over time t.
:param kappa: kappa parameter for HKY model
:type kappa: float
:param frequencies: array of state frequencies \pi_i
:type frequencies: numpy.array
:param model: model of character evolution
:type model: str
:return: probability matrix
:rtype: function | [
"Returns",
"a",
"function",
"for",
"calculation",
"of",
"probability",
"matrix",
"of",
"substitutions",
"i",
"-",
">",
"j",
"over",
"time",
"t",
"."
] | df8a375841525738383e59548eed3441b07dbd3e | https://github.com/evolbioinfo/pastml/blob/df8a375841525738383e59548eed3441b07dbd3e/pastml/ml.py#L87-L106 | train |
evolbioinfo/pastml | pastml/ml.py | initialize_allowed_states | def initialize_allowed_states(tree, feature, states):
"""
Initializes the allowed state arrays for tips based on their states given by the feature.
:param tree: tree for which the tip likelihoods are to be initialized
:type tree: ete3.Tree
:param feature: feature in which the tip states are stored
(the value could be None for a missing state or list if multiple stated are possible)
:type feature: str
:param states: ordered array of states.
:type states: numpy.array
:return: void, adds the get_personalised_feature_name(feature, ALLOWED_STATES) feature to tree tips.
"""
allowed_states_feature = get_personalized_feature_name(feature, ALLOWED_STATES)
state2index = dict(zip(states, range(len(states))))
for node in tree.traverse():
node_states = getattr(node, feature, set())
if not node_states:
allowed_states = np.ones(len(state2index), dtype=np.int)
else:
allowed_states = np.zeros(len(state2index), dtype=np.int)
for state in node_states:
allowed_states[state2index[state]] = 1
node.add_feature(allowed_states_feature, allowed_states) | python | def initialize_allowed_states(tree, feature, states):
"""
Initializes the allowed state arrays for tips based on their states given by the feature.
:param tree: tree for which the tip likelihoods are to be initialized
:type tree: ete3.Tree
:param feature: feature in which the tip states are stored
(the value could be None for a missing state or list if multiple stated are possible)
:type feature: str
:param states: ordered array of states.
:type states: numpy.array
:return: void, adds the get_personalised_feature_name(feature, ALLOWED_STATES) feature to tree tips.
"""
allowed_states_feature = get_personalized_feature_name(feature, ALLOWED_STATES)
state2index = dict(zip(states, range(len(states))))
for node in tree.traverse():
node_states = getattr(node, feature, set())
if not node_states:
allowed_states = np.ones(len(state2index), dtype=np.int)
else:
allowed_states = np.zeros(len(state2index), dtype=np.int)
for state in node_states:
allowed_states[state2index[state]] = 1
node.add_feature(allowed_states_feature, allowed_states) | [
"def",
"initialize_allowed_states",
"(",
"tree",
",",
"feature",
",",
"states",
")",
":",
"allowed_states_feature",
"=",
"get_personalized_feature_name",
"(",
"feature",
",",
"ALLOWED_STATES",
")",
"state2index",
"=",
"dict",
"(",
"zip",
"(",
"states",
",",
"range",
"(",
"len",
"(",
"states",
")",
")",
")",
")",
"for",
"node",
"in",
"tree",
".",
"traverse",
"(",
")",
":",
"node_states",
"=",
"getattr",
"(",
"node",
",",
"feature",
",",
"set",
"(",
")",
")",
"if",
"not",
"node_states",
":",
"allowed_states",
"=",
"np",
".",
"ones",
"(",
"len",
"(",
"state2index",
")",
",",
"dtype",
"=",
"np",
".",
"int",
")",
"else",
":",
"allowed_states",
"=",
"np",
".",
"zeros",
"(",
"len",
"(",
"state2index",
")",
",",
"dtype",
"=",
"np",
".",
"int",
")",
"for",
"state",
"in",
"node_states",
":",
"allowed_states",
"[",
"state2index",
"[",
"state",
"]",
"]",
"=",
"1",
"node",
".",
"add_feature",
"(",
"allowed_states_feature",
",",
"allowed_states",
")"
] | Initializes the allowed state arrays for tips based on their states given by the feature.
:param tree: tree for which the tip likelihoods are to be initialized
:type tree: ete3.Tree
:param feature: feature in which the tip states are stored
(the value could be None for a missing state or list if multiple stated are possible)
:type feature: str
:param states: ordered array of states.
:type states: numpy.array
:return: void, adds the get_personalised_feature_name(feature, ALLOWED_STATES) feature to tree tips. | [
"Initializes",
"the",
"allowed",
"state",
"arrays",
"for",
"tips",
"based",
"on",
"their",
"states",
"given",
"by",
"the",
"feature",
"."
] | df8a375841525738383e59548eed3441b07dbd3e | https://github.com/evolbioinfo/pastml/blob/df8a375841525738383e59548eed3441b07dbd3e/pastml/ml.py#L309-L333 | train |
evolbioinfo/pastml | pastml/ml.py | alter_zero_tip_allowed_states | def alter_zero_tip_allowed_states(tree, feature):
"""
Alters the bottom-up likelihood arrays for zero-distance tips
to make sure they do not contradict with other zero-distance tip siblings.
:param tree: ete3.Tree, the tree of interest
:param feature: str, character for which the likelihood is altered
:return: void, modifies the get_personalised_feature_name(feature, BU_LH) feature to zero-distance tips.
"""
zero_parent2tips = defaultdict(list)
allowed_state_feature = get_personalized_feature_name(feature, ALLOWED_STATES)
for tip in tree:
if tip.dist == 0:
state = getattr(tip, feature, None)
if state is not None and state != '':
zero_parent2tips[tip.up].append(tip)
# adjust zero tips to contain all the zero tip options as states
for parent, zero_tips in zero_parent2tips.items():
# If there is a common state do nothing
counts = None
for tip in zero_tips:
if counts is None:
counts = getattr(tip, allowed_state_feature).copy()
else:
counts += getattr(tip, allowed_state_feature)
if counts.max() == len(zero_tips):
continue
# Otherwise set all tip states to state union
allowed_states = None
for tip in zero_tips:
if allowed_states is None:
allowed_states = getattr(tip, allowed_state_feature).copy()
else:
tip_allowed_states = getattr(tip, allowed_state_feature)
allowed_states[np.nonzero(tip_allowed_states)] = 1
tip.add_feature(allowed_state_feature, allowed_states) | python | def alter_zero_tip_allowed_states(tree, feature):
"""
Alters the bottom-up likelihood arrays for zero-distance tips
to make sure they do not contradict with other zero-distance tip siblings.
:param tree: ete3.Tree, the tree of interest
:param feature: str, character for which the likelihood is altered
:return: void, modifies the get_personalised_feature_name(feature, BU_LH) feature to zero-distance tips.
"""
zero_parent2tips = defaultdict(list)
allowed_state_feature = get_personalized_feature_name(feature, ALLOWED_STATES)
for tip in tree:
if tip.dist == 0:
state = getattr(tip, feature, None)
if state is not None and state != '':
zero_parent2tips[tip.up].append(tip)
# adjust zero tips to contain all the zero tip options as states
for parent, zero_tips in zero_parent2tips.items():
# If there is a common state do nothing
counts = None
for tip in zero_tips:
if counts is None:
counts = getattr(tip, allowed_state_feature).copy()
else:
counts += getattr(tip, allowed_state_feature)
if counts.max() == len(zero_tips):
continue
# Otherwise set all tip states to state union
allowed_states = None
for tip in zero_tips:
if allowed_states is None:
allowed_states = getattr(tip, allowed_state_feature).copy()
else:
tip_allowed_states = getattr(tip, allowed_state_feature)
allowed_states[np.nonzero(tip_allowed_states)] = 1
tip.add_feature(allowed_state_feature, allowed_states) | [
"def",
"alter_zero_tip_allowed_states",
"(",
"tree",
",",
"feature",
")",
":",
"zero_parent2tips",
"=",
"defaultdict",
"(",
"list",
")",
"allowed_state_feature",
"=",
"get_personalized_feature_name",
"(",
"feature",
",",
"ALLOWED_STATES",
")",
"for",
"tip",
"in",
"tree",
":",
"if",
"tip",
".",
"dist",
"==",
"0",
":",
"state",
"=",
"getattr",
"(",
"tip",
",",
"feature",
",",
"None",
")",
"if",
"state",
"is",
"not",
"None",
"and",
"state",
"!=",
"''",
":",
"zero_parent2tips",
"[",
"tip",
".",
"up",
"]",
".",
"append",
"(",
"tip",
")",
"# adjust zero tips to contain all the zero tip options as states",
"for",
"parent",
",",
"zero_tips",
"in",
"zero_parent2tips",
".",
"items",
"(",
")",
":",
"# If there is a common state do nothing",
"counts",
"=",
"None",
"for",
"tip",
"in",
"zero_tips",
":",
"if",
"counts",
"is",
"None",
":",
"counts",
"=",
"getattr",
"(",
"tip",
",",
"allowed_state_feature",
")",
".",
"copy",
"(",
")",
"else",
":",
"counts",
"+=",
"getattr",
"(",
"tip",
",",
"allowed_state_feature",
")",
"if",
"counts",
".",
"max",
"(",
")",
"==",
"len",
"(",
"zero_tips",
")",
":",
"continue",
"# Otherwise set all tip states to state union",
"allowed_states",
"=",
"None",
"for",
"tip",
"in",
"zero_tips",
":",
"if",
"allowed_states",
"is",
"None",
":",
"allowed_states",
"=",
"getattr",
"(",
"tip",
",",
"allowed_state_feature",
")",
".",
"copy",
"(",
")",
"else",
":",
"tip_allowed_states",
"=",
"getattr",
"(",
"tip",
",",
"allowed_state_feature",
")",
"allowed_states",
"[",
"np",
".",
"nonzero",
"(",
"tip_allowed_states",
")",
"]",
"=",
"1",
"tip",
".",
"add_feature",
"(",
"allowed_state_feature",
",",
"allowed_states",
")"
] | Alters the bottom-up likelihood arrays for zero-distance tips
to make sure they do not contradict with other zero-distance tip siblings.
:param tree: ete3.Tree, the tree of interest
:param feature: str, character for which the likelihood is altered
:return: void, modifies the get_personalised_feature_name(feature, BU_LH) feature to zero-distance tips. | [
"Alters",
"the",
"bottom",
"-",
"up",
"likelihood",
"arrays",
"for",
"zero",
"-",
"distance",
"tips",
"to",
"make",
"sure",
"they",
"do",
"not",
"contradict",
"with",
"other",
"zero",
"-",
"distance",
"tip",
"siblings",
"."
] | df8a375841525738383e59548eed3441b07dbd3e | https://github.com/evolbioinfo/pastml/blob/df8a375841525738383e59548eed3441b07dbd3e/pastml/ml.py#L336-L375 | train |
evolbioinfo/pastml | pastml/ml.py | unalter_zero_tip_allowed_states | def unalter_zero_tip_allowed_states(tree, feature, state2index):
"""
Unalters the bottom-up likelihood arrays for zero-distance tips
to contain ones only in their states.
:param state2index: dict, mapping between states and their indices in the likelihood array
:param tree: ete3.Tree, the tree of interest
:param feature: str, character for which the likelihood was altered
:return: void, modifies the get_personalised_feature_name(feature, BU_LH) feature to zero-distance tips.
"""
allowed_state_feature = get_personalized_feature_name(feature, ALLOWED_STATES)
for tip in tree:
if tip.dist > 0:
continue
state = getattr(tip, feature, set())
if state:
initial_allowed_states = np.zeros(len(state2index), np.int)
for _ in state:
initial_allowed_states[state2index[_]] = 1
allowed_states = getattr(tip, allowed_state_feature) & initial_allowed_states
tip.add_feature(allowed_state_feature, (allowed_states
if np.any(allowed_states > 0) else initial_allowed_states)) | python | def unalter_zero_tip_allowed_states(tree, feature, state2index):
"""
Unalters the bottom-up likelihood arrays for zero-distance tips
to contain ones only in their states.
:param state2index: dict, mapping between states and their indices in the likelihood array
:param tree: ete3.Tree, the tree of interest
:param feature: str, character for which the likelihood was altered
:return: void, modifies the get_personalised_feature_name(feature, BU_LH) feature to zero-distance tips.
"""
allowed_state_feature = get_personalized_feature_name(feature, ALLOWED_STATES)
for tip in tree:
if tip.dist > 0:
continue
state = getattr(tip, feature, set())
if state:
initial_allowed_states = np.zeros(len(state2index), np.int)
for _ in state:
initial_allowed_states[state2index[_]] = 1
allowed_states = getattr(tip, allowed_state_feature) & initial_allowed_states
tip.add_feature(allowed_state_feature, (allowed_states
if np.any(allowed_states > 0) else initial_allowed_states)) | [
"def",
"unalter_zero_tip_allowed_states",
"(",
"tree",
",",
"feature",
",",
"state2index",
")",
":",
"allowed_state_feature",
"=",
"get_personalized_feature_name",
"(",
"feature",
",",
"ALLOWED_STATES",
")",
"for",
"tip",
"in",
"tree",
":",
"if",
"tip",
".",
"dist",
">",
"0",
":",
"continue",
"state",
"=",
"getattr",
"(",
"tip",
",",
"feature",
",",
"set",
"(",
")",
")",
"if",
"state",
":",
"initial_allowed_states",
"=",
"np",
".",
"zeros",
"(",
"len",
"(",
"state2index",
")",
",",
"np",
".",
"int",
")",
"for",
"_",
"in",
"state",
":",
"initial_allowed_states",
"[",
"state2index",
"[",
"_",
"]",
"]",
"=",
"1",
"allowed_states",
"=",
"getattr",
"(",
"tip",
",",
"allowed_state_feature",
")",
"&",
"initial_allowed_states",
"tip",
".",
"add_feature",
"(",
"allowed_state_feature",
",",
"(",
"allowed_states",
"if",
"np",
".",
"any",
"(",
"allowed_states",
">",
"0",
")",
"else",
"initial_allowed_states",
")",
")"
] | Unalters the bottom-up likelihood arrays for zero-distance tips
to contain ones only in their states.
:param state2index: dict, mapping between states and their indices in the likelihood array
:param tree: ete3.Tree, the tree of interest
:param feature: str, character for which the likelihood was altered
:return: void, modifies the get_personalised_feature_name(feature, BU_LH) feature to zero-distance tips. | [
"Unalters",
"the",
"bottom",
"-",
"up",
"likelihood",
"arrays",
"for",
"zero",
"-",
"distance",
"tips",
"to",
"contain",
"ones",
"only",
"in",
"their",
"states",
"."
] | df8a375841525738383e59548eed3441b07dbd3e | https://github.com/evolbioinfo/pastml/blob/df8a375841525738383e59548eed3441b07dbd3e/pastml/ml.py#L378-L399 | train |
evolbioinfo/pastml | pastml/ml.py | unalter_zero_tip_joint_states | def unalter_zero_tip_joint_states(tree, feature, state2index):
"""
Unalters the joint tip states for zero-distance tips
to contain only their states.
:param state2index: dict, mapping between states and their indices in the joint state array
:param tree: ete3.Tree, the tree of interest
:param feature: str, character for which the likelihood was altered
:return: void, modifies the get_personalised_feature_name(feature, BU_LH_JOINT_STATES) feature to zero-distance tips.
"""
lh_joint_state_feature = get_personalized_feature_name(feature, BU_LH_JOINT_STATES)
for tip in tree:
if tip.dist > 0:
continue
state = getattr(tip, feature, set())
if len(state) > 1:
allowed_indices = {state2index[_] for _ in state}
allowed_index = next(iter(allowed_indices))
joint_states = getattr(tip, lh_joint_state_feature)
for i in range(len(state2index)):
if joint_states[i] not in allowed_indices:
joint_states[i] = allowed_index
elif len(state) == 1:
tip.add_feature(lh_joint_state_feature, np.ones(len(state2index), np.int) * state2index[next(iter(state))]) | python | def unalter_zero_tip_joint_states(tree, feature, state2index):
"""
Unalters the joint tip states for zero-distance tips
to contain only their states.
:param state2index: dict, mapping between states and their indices in the joint state array
:param tree: ete3.Tree, the tree of interest
:param feature: str, character for which the likelihood was altered
:return: void, modifies the get_personalised_feature_name(feature, BU_LH_JOINT_STATES) feature to zero-distance tips.
"""
lh_joint_state_feature = get_personalized_feature_name(feature, BU_LH_JOINT_STATES)
for tip in tree:
if tip.dist > 0:
continue
state = getattr(tip, feature, set())
if len(state) > 1:
allowed_indices = {state2index[_] for _ in state}
allowed_index = next(iter(allowed_indices))
joint_states = getattr(tip, lh_joint_state_feature)
for i in range(len(state2index)):
if joint_states[i] not in allowed_indices:
joint_states[i] = allowed_index
elif len(state) == 1:
tip.add_feature(lh_joint_state_feature, np.ones(len(state2index), np.int) * state2index[next(iter(state))]) | [
"def",
"unalter_zero_tip_joint_states",
"(",
"tree",
",",
"feature",
",",
"state2index",
")",
":",
"lh_joint_state_feature",
"=",
"get_personalized_feature_name",
"(",
"feature",
",",
"BU_LH_JOINT_STATES",
")",
"for",
"tip",
"in",
"tree",
":",
"if",
"tip",
".",
"dist",
">",
"0",
":",
"continue",
"state",
"=",
"getattr",
"(",
"tip",
",",
"feature",
",",
"set",
"(",
")",
")",
"if",
"len",
"(",
"state",
")",
">",
"1",
":",
"allowed_indices",
"=",
"{",
"state2index",
"[",
"_",
"]",
"for",
"_",
"in",
"state",
"}",
"allowed_index",
"=",
"next",
"(",
"iter",
"(",
"allowed_indices",
")",
")",
"joint_states",
"=",
"getattr",
"(",
"tip",
",",
"lh_joint_state_feature",
")",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"state2index",
")",
")",
":",
"if",
"joint_states",
"[",
"i",
"]",
"not",
"in",
"allowed_indices",
":",
"joint_states",
"[",
"i",
"]",
"=",
"allowed_index",
"elif",
"len",
"(",
"state",
")",
"==",
"1",
":",
"tip",
".",
"add_feature",
"(",
"lh_joint_state_feature",
",",
"np",
".",
"ones",
"(",
"len",
"(",
"state2index",
")",
",",
"np",
".",
"int",
")",
"*",
"state2index",
"[",
"next",
"(",
"iter",
"(",
"state",
")",
")",
"]",
")"
] | Unalters the joint tip states for zero-distance tips
to contain only their states.
:param state2index: dict, mapping between states and their indices in the joint state array
:param tree: ete3.Tree, the tree of interest
:param feature: str, character for which the likelihood was altered
:return: void, modifies the get_personalised_feature_name(feature, BU_LH_JOINT_STATES) feature to zero-distance tips. | [
"Unalters",
"the",
"joint",
"tip",
"states",
"for",
"zero",
"-",
"distance",
"tips",
"to",
"contain",
"only",
"their",
"states",
"."
] | df8a375841525738383e59548eed3441b07dbd3e | https://github.com/evolbioinfo/pastml/blob/df8a375841525738383e59548eed3441b07dbd3e/pastml/ml.py#L402-L425 | train |
evolbioinfo/pastml | pastml/ml.py | calculate_marginal_likelihoods | def calculate_marginal_likelihoods(tree, feature, frequencies):
"""
Calculates marginal likelihoods for each tree node
by multiplying state frequencies with their bottom-up and top-down likelihoods.
:param tree: ete3.Tree, the tree of interest
:param feature: str, character for which the likelihood is calculated
:param frequencies: numpy array of state frequencies
:return: void, stores the node marginal likelihoods in the get_personalised_feature_name(feature, LH) feature.
"""
bu_lh_feature = get_personalized_feature_name(feature, BU_LH)
bu_lh_sf_feature = get_personalized_feature_name(feature, BU_LH_SF)
td_lh_feature = get_personalized_feature_name(feature, TD_LH)
td_lh_sf_feature = get_personalized_feature_name(feature, TD_LH_SF)
lh_feature = get_personalized_feature_name(feature, LH)
lh_sf_feature = get_personalized_feature_name(feature, LH_SF)
allowed_state_feature = get_personalized_feature_name(feature, ALLOWED_STATES)
for node in tree.traverse('preorder'):
likelihood = getattr(node, bu_lh_feature) * getattr(node, td_lh_feature) * frequencies \
* getattr(node, allowed_state_feature)
node.add_feature(lh_feature, likelihood)
node.add_feature(lh_sf_feature, getattr(node, td_lh_sf_feature) + getattr(node, bu_lh_sf_feature))
node.del_feature(bu_lh_feature)
node.del_feature(bu_lh_sf_feature)
node.del_feature(td_lh_feature)
node.del_feature(td_lh_sf_feature) | python | def calculate_marginal_likelihoods(tree, feature, frequencies):
"""
Calculates marginal likelihoods for each tree node
by multiplying state frequencies with their bottom-up and top-down likelihoods.
:param tree: ete3.Tree, the tree of interest
:param feature: str, character for which the likelihood is calculated
:param frequencies: numpy array of state frequencies
:return: void, stores the node marginal likelihoods in the get_personalised_feature_name(feature, LH) feature.
"""
bu_lh_feature = get_personalized_feature_name(feature, BU_LH)
bu_lh_sf_feature = get_personalized_feature_name(feature, BU_LH_SF)
td_lh_feature = get_personalized_feature_name(feature, TD_LH)
td_lh_sf_feature = get_personalized_feature_name(feature, TD_LH_SF)
lh_feature = get_personalized_feature_name(feature, LH)
lh_sf_feature = get_personalized_feature_name(feature, LH_SF)
allowed_state_feature = get_personalized_feature_name(feature, ALLOWED_STATES)
for node in tree.traverse('preorder'):
likelihood = getattr(node, bu_lh_feature) * getattr(node, td_lh_feature) * frequencies \
* getattr(node, allowed_state_feature)
node.add_feature(lh_feature, likelihood)
node.add_feature(lh_sf_feature, getattr(node, td_lh_sf_feature) + getattr(node, bu_lh_sf_feature))
node.del_feature(bu_lh_feature)
node.del_feature(bu_lh_sf_feature)
node.del_feature(td_lh_feature)
node.del_feature(td_lh_sf_feature) | [
"def",
"calculate_marginal_likelihoods",
"(",
"tree",
",",
"feature",
",",
"frequencies",
")",
":",
"bu_lh_feature",
"=",
"get_personalized_feature_name",
"(",
"feature",
",",
"BU_LH",
")",
"bu_lh_sf_feature",
"=",
"get_personalized_feature_name",
"(",
"feature",
",",
"BU_LH_SF",
")",
"td_lh_feature",
"=",
"get_personalized_feature_name",
"(",
"feature",
",",
"TD_LH",
")",
"td_lh_sf_feature",
"=",
"get_personalized_feature_name",
"(",
"feature",
",",
"TD_LH_SF",
")",
"lh_feature",
"=",
"get_personalized_feature_name",
"(",
"feature",
",",
"LH",
")",
"lh_sf_feature",
"=",
"get_personalized_feature_name",
"(",
"feature",
",",
"LH_SF",
")",
"allowed_state_feature",
"=",
"get_personalized_feature_name",
"(",
"feature",
",",
"ALLOWED_STATES",
")",
"for",
"node",
"in",
"tree",
".",
"traverse",
"(",
"'preorder'",
")",
":",
"likelihood",
"=",
"getattr",
"(",
"node",
",",
"bu_lh_feature",
")",
"*",
"getattr",
"(",
"node",
",",
"td_lh_feature",
")",
"*",
"frequencies",
"*",
"getattr",
"(",
"node",
",",
"allowed_state_feature",
")",
"node",
".",
"add_feature",
"(",
"lh_feature",
",",
"likelihood",
")",
"node",
".",
"add_feature",
"(",
"lh_sf_feature",
",",
"getattr",
"(",
"node",
",",
"td_lh_sf_feature",
")",
"+",
"getattr",
"(",
"node",
",",
"bu_lh_sf_feature",
")",
")",
"node",
".",
"del_feature",
"(",
"bu_lh_feature",
")",
"node",
".",
"del_feature",
"(",
"bu_lh_sf_feature",
")",
"node",
".",
"del_feature",
"(",
"td_lh_feature",
")",
"node",
".",
"del_feature",
"(",
"td_lh_sf_feature",
")"
] | Calculates marginal likelihoods for each tree node
by multiplying state frequencies with their bottom-up and top-down likelihoods.
:param tree: ete3.Tree, the tree of interest
:param feature: str, character for which the likelihood is calculated
:param frequencies: numpy array of state frequencies
:return: void, stores the node marginal likelihoods in the get_personalised_feature_name(feature, LH) feature. | [
"Calculates",
"marginal",
"likelihoods",
"for",
"each",
"tree",
"node",
"by",
"multiplying",
"state",
"frequencies",
"with",
"their",
"bottom",
"-",
"up",
"and",
"top",
"-",
"down",
"likelihoods",
"."
] | df8a375841525738383e59548eed3441b07dbd3e | https://github.com/evolbioinfo/pastml/blob/df8a375841525738383e59548eed3441b07dbd3e/pastml/ml.py#L428-L455 | train |
evolbioinfo/pastml | pastml/ml.py | convert_likelihoods_to_probabilities | def convert_likelihoods_to_probabilities(tree, feature, states):
"""
Normalizes each node marginal likelihoods to convert them to marginal probabilities.
:param states: numpy array of states in the order corresponding to the marginal likelihood arrays
:param tree: ete3.Tree, the tree of interest
:param feature: str, character for which the probabilities are calculated
:return: pandas DataFrame, that maps node names to their marginal likelihoods.
"""
lh_feature = get_personalized_feature_name(feature, LH)
name2probs = {}
for node in tree.traverse():
lh = getattr(node, lh_feature)
name2probs[node.name] = lh / lh.sum()
return pd.DataFrame.from_dict(name2probs, orient='index', columns=states) | python | def convert_likelihoods_to_probabilities(tree, feature, states):
"""
Normalizes each node marginal likelihoods to convert them to marginal probabilities.
:param states: numpy array of states in the order corresponding to the marginal likelihood arrays
:param tree: ete3.Tree, the tree of interest
:param feature: str, character for which the probabilities are calculated
:return: pandas DataFrame, that maps node names to their marginal likelihoods.
"""
lh_feature = get_personalized_feature_name(feature, LH)
name2probs = {}
for node in tree.traverse():
lh = getattr(node, lh_feature)
name2probs[node.name] = lh / lh.sum()
return pd.DataFrame.from_dict(name2probs, orient='index', columns=states) | [
"def",
"convert_likelihoods_to_probabilities",
"(",
"tree",
",",
"feature",
",",
"states",
")",
":",
"lh_feature",
"=",
"get_personalized_feature_name",
"(",
"feature",
",",
"LH",
")",
"name2probs",
"=",
"{",
"}",
"for",
"node",
"in",
"tree",
".",
"traverse",
"(",
")",
":",
"lh",
"=",
"getattr",
"(",
"node",
",",
"lh_feature",
")",
"name2probs",
"[",
"node",
".",
"name",
"]",
"=",
"lh",
"/",
"lh",
".",
"sum",
"(",
")",
"return",
"pd",
".",
"DataFrame",
".",
"from_dict",
"(",
"name2probs",
",",
"orient",
"=",
"'index'",
",",
"columns",
"=",
"states",
")"
] | Normalizes each node marginal likelihoods to convert them to marginal probabilities.
:param states: numpy array of states in the order corresponding to the marginal likelihood arrays
:param tree: ete3.Tree, the tree of interest
:param feature: str, character for which the probabilities are calculated
:return: pandas DataFrame, that maps node names to their marginal likelihoods. | [
"Normalizes",
"each",
"node",
"marginal",
"likelihoods",
"to",
"convert",
"them",
"to",
"marginal",
"probabilities",
"."
] | df8a375841525738383e59548eed3441b07dbd3e | https://github.com/evolbioinfo/pastml/blob/df8a375841525738383e59548eed3441b07dbd3e/pastml/ml.py#L476-L493 | train |
evolbioinfo/pastml | pastml/ml.py | choose_ancestral_states_mppa | def choose_ancestral_states_mppa(tree, feature, states, force_joint=True):
"""
Chooses node ancestral states based on their marginal probabilities using MPPA method.
:param force_joint: make sure that Joint state is chosen even if it has a low probability.
:type force_joint: bool
:param tree: tree of interest
:type tree: ete3.Tree
:param feature: character for which the ancestral states are to be chosen
:type feature: str
:param states: possible character states in order corresponding to the probabilities array
:type states: numpy.array
:return: number of ancestral scenarios selected,
calculated by multiplying the number of selected states for all nodes.
Also modified the get_personalized_feature_name(feature, ALLOWED_STATES) feature of each node
to only contain the selected states.
:rtype: int
"""
lh_feature = get_personalized_feature_name(feature, LH)
allowed_state_feature = get_personalized_feature_name(feature, ALLOWED_STATES)
joint_state_feature = get_personalized_feature_name(feature, JOINT_STATE)
n = len(states)
_, state2array = get_state2allowed_states(states, False)
num_scenarios = 1
unresolved_nodes = 0
num_states = 0
# If force_joint == True,
# we make sure that the joint state is always chosen,
# for this we sort the marginal probabilities array as [lowest_non_joint_mp, ..., highest_non_joint_mp, joint_mp]
# select k in 1:n such as the correction between choosing 0, 0, ..., 1/k, ..., 1/k and our sorted array is min
# and return the corresponding states
for node in tree.traverse():
marginal_likelihoods = getattr(node, lh_feature)
marginal_probs = marginal_likelihoods / marginal_likelihoods.sum()
if force_joint:
joint_index = getattr(node, joint_state_feature)
joint_prob = marginal_probs[joint_index]
marginal_probs = np.hstack((np.sort(np.delete(marginal_probs, joint_index)), [joint_prob]))
else:
marginal_probs = np.sort(marginal_probs)
best_k = n
best_correstion = np.inf
for k in range(1, n + 1):
correction = np.hstack((np.zeros(n - k), np.ones(k) / k)) - marginal_probs
correction = correction.dot(correction)
if correction < best_correstion:
best_correstion = correction
best_k = k
num_scenarios *= best_k
num_states += best_k
if force_joint:
indices_selected = sorted(range(n),
key=lambda _: (0 if n == joint_index else 1, -marginal_likelihoods[_]))[:best_k]
else:
indices_selected = sorted(range(n), key=lambda _: -marginal_likelihoods[_])[:best_k]
if best_k == 1:
allowed_states = state2array[indices_selected[0]]
else:
allowed_states = np.zeros(len(states), dtype=np.int)
allowed_states[indices_selected] = 1
unresolved_nodes += 1
node.add_feature(allowed_state_feature, allowed_states)
return num_scenarios, unresolved_nodes, num_states | python | def choose_ancestral_states_mppa(tree, feature, states, force_joint=True):
"""
Chooses node ancestral states based on their marginal probabilities using MPPA method.
:param force_joint: make sure that Joint state is chosen even if it has a low probability.
:type force_joint: bool
:param tree: tree of interest
:type tree: ete3.Tree
:param feature: character for which the ancestral states are to be chosen
:type feature: str
:param states: possible character states in order corresponding to the probabilities array
:type states: numpy.array
:return: number of ancestral scenarios selected,
calculated by multiplying the number of selected states for all nodes.
Also modified the get_personalized_feature_name(feature, ALLOWED_STATES) feature of each node
to only contain the selected states.
:rtype: int
"""
lh_feature = get_personalized_feature_name(feature, LH)
allowed_state_feature = get_personalized_feature_name(feature, ALLOWED_STATES)
joint_state_feature = get_personalized_feature_name(feature, JOINT_STATE)
n = len(states)
_, state2array = get_state2allowed_states(states, False)
num_scenarios = 1
unresolved_nodes = 0
num_states = 0
# If force_joint == True,
# we make sure that the joint state is always chosen,
# for this we sort the marginal probabilities array as [lowest_non_joint_mp, ..., highest_non_joint_mp, joint_mp]
# select k in 1:n such as the correction between choosing 0, 0, ..., 1/k, ..., 1/k and our sorted array is min
# and return the corresponding states
for node in tree.traverse():
marginal_likelihoods = getattr(node, lh_feature)
marginal_probs = marginal_likelihoods / marginal_likelihoods.sum()
if force_joint:
joint_index = getattr(node, joint_state_feature)
joint_prob = marginal_probs[joint_index]
marginal_probs = np.hstack((np.sort(np.delete(marginal_probs, joint_index)), [joint_prob]))
else:
marginal_probs = np.sort(marginal_probs)
best_k = n
best_correstion = np.inf
for k in range(1, n + 1):
correction = np.hstack((np.zeros(n - k), np.ones(k) / k)) - marginal_probs
correction = correction.dot(correction)
if correction < best_correstion:
best_correstion = correction
best_k = k
num_scenarios *= best_k
num_states += best_k
if force_joint:
indices_selected = sorted(range(n),
key=lambda _: (0 if n == joint_index else 1, -marginal_likelihoods[_]))[:best_k]
else:
indices_selected = sorted(range(n), key=lambda _: -marginal_likelihoods[_])[:best_k]
if best_k == 1:
allowed_states = state2array[indices_selected[0]]
else:
allowed_states = np.zeros(len(states), dtype=np.int)
allowed_states[indices_selected] = 1
unresolved_nodes += 1
node.add_feature(allowed_state_feature, allowed_states)
return num_scenarios, unresolved_nodes, num_states | [
"def",
"choose_ancestral_states_mppa",
"(",
"tree",
",",
"feature",
",",
"states",
",",
"force_joint",
"=",
"True",
")",
":",
"lh_feature",
"=",
"get_personalized_feature_name",
"(",
"feature",
",",
"LH",
")",
"allowed_state_feature",
"=",
"get_personalized_feature_name",
"(",
"feature",
",",
"ALLOWED_STATES",
")",
"joint_state_feature",
"=",
"get_personalized_feature_name",
"(",
"feature",
",",
"JOINT_STATE",
")",
"n",
"=",
"len",
"(",
"states",
")",
"_",
",",
"state2array",
"=",
"get_state2allowed_states",
"(",
"states",
",",
"False",
")",
"num_scenarios",
"=",
"1",
"unresolved_nodes",
"=",
"0",
"num_states",
"=",
"0",
"# If force_joint == True,",
"# we make sure that the joint state is always chosen,",
"# for this we sort the marginal probabilities array as [lowest_non_joint_mp, ..., highest_non_joint_mp, joint_mp]",
"# select k in 1:n such as the correction between choosing 0, 0, ..., 1/k, ..., 1/k and our sorted array is min",
"# and return the corresponding states",
"for",
"node",
"in",
"tree",
".",
"traverse",
"(",
")",
":",
"marginal_likelihoods",
"=",
"getattr",
"(",
"node",
",",
"lh_feature",
")",
"marginal_probs",
"=",
"marginal_likelihoods",
"/",
"marginal_likelihoods",
".",
"sum",
"(",
")",
"if",
"force_joint",
":",
"joint_index",
"=",
"getattr",
"(",
"node",
",",
"joint_state_feature",
")",
"joint_prob",
"=",
"marginal_probs",
"[",
"joint_index",
"]",
"marginal_probs",
"=",
"np",
".",
"hstack",
"(",
"(",
"np",
".",
"sort",
"(",
"np",
".",
"delete",
"(",
"marginal_probs",
",",
"joint_index",
")",
")",
",",
"[",
"joint_prob",
"]",
")",
")",
"else",
":",
"marginal_probs",
"=",
"np",
".",
"sort",
"(",
"marginal_probs",
")",
"best_k",
"=",
"n",
"best_correstion",
"=",
"np",
".",
"inf",
"for",
"k",
"in",
"range",
"(",
"1",
",",
"n",
"+",
"1",
")",
":",
"correction",
"=",
"np",
".",
"hstack",
"(",
"(",
"np",
".",
"zeros",
"(",
"n",
"-",
"k",
")",
",",
"np",
".",
"ones",
"(",
"k",
")",
"/",
"k",
")",
")",
"-",
"marginal_probs",
"correction",
"=",
"correction",
".",
"dot",
"(",
"correction",
")",
"if",
"correction",
"<",
"best_correstion",
":",
"best_correstion",
"=",
"correction",
"best_k",
"=",
"k",
"num_scenarios",
"*=",
"best_k",
"num_states",
"+=",
"best_k",
"if",
"force_joint",
":",
"indices_selected",
"=",
"sorted",
"(",
"range",
"(",
"n",
")",
",",
"key",
"=",
"lambda",
"_",
":",
"(",
"0",
"if",
"n",
"==",
"joint_index",
"else",
"1",
",",
"-",
"marginal_likelihoods",
"[",
"_",
"]",
")",
")",
"[",
":",
"best_k",
"]",
"else",
":",
"indices_selected",
"=",
"sorted",
"(",
"range",
"(",
"n",
")",
",",
"key",
"=",
"lambda",
"_",
":",
"-",
"marginal_likelihoods",
"[",
"_",
"]",
")",
"[",
":",
"best_k",
"]",
"if",
"best_k",
"==",
"1",
":",
"allowed_states",
"=",
"state2array",
"[",
"indices_selected",
"[",
"0",
"]",
"]",
"else",
":",
"allowed_states",
"=",
"np",
".",
"zeros",
"(",
"len",
"(",
"states",
")",
",",
"dtype",
"=",
"np",
".",
"int",
")",
"allowed_states",
"[",
"indices_selected",
"]",
"=",
"1",
"unresolved_nodes",
"+=",
"1",
"node",
".",
"add_feature",
"(",
"allowed_state_feature",
",",
"allowed_states",
")",
"return",
"num_scenarios",
",",
"unresolved_nodes",
",",
"num_states"
] | Chooses node ancestral states based on their marginal probabilities using MPPA method.
:param force_joint: make sure that Joint state is chosen even if it has a low probability.
:type force_joint: bool
:param tree: tree of interest
:type tree: ete3.Tree
:param feature: character for which the ancestral states are to be chosen
:type feature: str
:param states: possible character states in order corresponding to the probabilities array
:type states: numpy.array
:return: number of ancestral scenarios selected,
calculated by multiplying the number of selected states for all nodes.
Also modified the get_personalized_feature_name(feature, ALLOWED_STATES) feature of each node
to only contain the selected states.
:rtype: int | [
"Chooses",
"node",
"ancestral",
"states",
"based",
"on",
"their",
"marginal",
"probabilities",
"using",
"MPPA",
"method",
"."
] | df8a375841525738383e59548eed3441b07dbd3e | https://github.com/evolbioinfo/pastml/blob/df8a375841525738383e59548eed3441b07dbd3e/pastml/ml.py#L496-L563 | train |
evolbioinfo/pastml | pastml/ml.py | choose_ancestral_states_map | def choose_ancestral_states_map(tree, feature, states):
"""
Chooses node ancestral states based on their marginal probabilities using MAP method.
:param tree: ete3.Tree, the tree of interest
:param feature: str, character for which the ancestral states are to be chosen
:param states: numpy.array of possible character states in order corresponding to the probabilities array
:return: void, modified the get_personalized_feature_name(feature, ALLOWED_STATES) feature of each node
to only contain the selected states.
"""
lh_feature = get_personalized_feature_name(feature, LH)
allowed_state_feature = get_personalized_feature_name(feature, ALLOWED_STATES)
_, state2array = get_state2allowed_states(states, False)
for node in tree.traverse():
marginal_likelihoods = getattr(node, lh_feature)
node.add_feature(allowed_state_feature, state2array[marginal_likelihoods.argmax()]) | python | def choose_ancestral_states_map(tree, feature, states):
"""
Chooses node ancestral states based on their marginal probabilities using MAP method.
:param tree: ete3.Tree, the tree of interest
:param feature: str, character for which the ancestral states are to be chosen
:param states: numpy.array of possible character states in order corresponding to the probabilities array
:return: void, modified the get_personalized_feature_name(feature, ALLOWED_STATES) feature of each node
to only contain the selected states.
"""
lh_feature = get_personalized_feature_name(feature, LH)
allowed_state_feature = get_personalized_feature_name(feature, ALLOWED_STATES)
_, state2array = get_state2allowed_states(states, False)
for node in tree.traverse():
marginal_likelihoods = getattr(node, lh_feature)
node.add_feature(allowed_state_feature, state2array[marginal_likelihoods.argmax()]) | [
"def",
"choose_ancestral_states_map",
"(",
"tree",
",",
"feature",
",",
"states",
")",
":",
"lh_feature",
"=",
"get_personalized_feature_name",
"(",
"feature",
",",
"LH",
")",
"allowed_state_feature",
"=",
"get_personalized_feature_name",
"(",
"feature",
",",
"ALLOWED_STATES",
")",
"_",
",",
"state2array",
"=",
"get_state2allowed_states",
"(",
"states",
",",
"False",
")",
"for",
"node",
"in",
"tree",
".",
"traverse",
"(",
")",
":",
"marginal_likelihoods",
"=",
"getattr",
"(",
"node",
",",
"lh_feature",
")",
"node",
".",
"add_feature",
"(",
"allowed_state_feature",
",",
"state2array",
"[",
"marginal_likelihoods",
".",
"argmax",
"(",
")",
"]",
")"
] | Chooses node ancestral states based on their marginal probabilities using MAP method.
:param tree: ete3.Tree, the tree of interest
:param feature: str, character for which the ancestral states are to be chosen
:param states: numpy.array of possible character states in order corresponding to the probabilities array
:return: void, modified the get_personalized_feature_name(feature, ALLOWED_STATES) feature of each node
to only contain the selected states. | [
"Chooses",
"node",
"ancestral",
"states",
"based",
"on",
"their",
"marginal",
"probabilities",
"using",
"MAP",
"method",
"."
] | df8a375841525738383e59548eed3441b07dbd3e | https://github.com/evolbioinfo/pastml/blob/df8a375841525738383e59548eed3441b07dbd3e/pastml/ml.py#L566-L582 | train |
evolbioinfo/pastml | pastml/ml.py | choose_ancestral_states_joint | def choose_ancestral_states_joint(tree, feature, states, frequencies):
"""
Chooses node ancestral states based on their marginal probabilities using joint method.
:param frequencies: numpy array of state frequencies
:param tree: ete3.Tree, the tree of interest
:param feature: str, character for which the ancestral states are to be chosen
:param states: numpy.array of possible character states in order corresponding to the probabilities array
:return: void, modified the get_personalized_feature_name(feature, ALLOWED_STATES) feature of each node
to only contain the selected states.
"""
lh_feature = get_personalized_feature_name(feature, BU_LH)
lh_state_feature = get_personalized_feature_name(feature, BU_LH_JOINT_STATES)
allowed_state_feature = get_personalized_feature_name(feature, ALLOWED_STATES)
joint_state_feature = get_personalized_feature_name(feature, JOINT_STATE)
_, state2array = get_state2allowed_states(states, False)
def chose_consistent_state(node, state_index):
node.add_feature(joint_state_feature, state_index)
node.add_feature(allowed_state_feature, state2array[state_index])
for child in node.children:
chose_consistent_state(child, getattr(child, lh_state_feature)[state_index])
chose_consistent_state(tree, (getattr(tree, lh_feature) * frequencies).argmax()) | python | def choose_ancestral_states_joint(tree, feature, states, frequencies):
"""
Chooses node ancestral states based on their marginal probabilities using joint method.
:param frequencies: numpy array of state frequencies
:param tree: ete3.Tree, the tree of interest
:param feature: str, character for which the ancestral states are to be chosen
:param states: numpy.array of possible character states in order corresponding to the probabilities array
:return: void, modified the get_personalized_feature_name(feature, ALLOWED_STATES) feature of each node
to only contain the selected states.
"""
lh_feature = get_personalized_feature_name(feature, BU_LH)
lh_state_feature = get_personalized_feature_name(feature, BU_LH_JOINT_STATES)
allowed_state_feature = get_personalized_feature_name(feature, ALLOWED_STATES)
joint_state_feature = get_personalized_feature_name(feature, JOINT_STATE)
_, state2array = get_state2allowed_states(states, False)
def chose_consistent_state(node, state_index):
node.add_feature(joint_state_feature, state_index)
node.add_feature(allowed_state_feature, state2array[state_index])
for child in node.children:
chose_consistent_state(child, getattr(child, lh_state_feature)[state_index])
chose_consistent_state(tree, (getattr(tree, lh_feature) * frequencies).argmax()) | [
"def",
"choose_ancestral_states_joint",
"(",
"tree",
",",
"feature",
",",
"states",
",",
"frequencies",
")",
":",
"lh_feature",
"=",
"get_personalized_feature_name",
"(",
"feature",
",",
"BU_LH",
")",
"lh_state_feature",
"=",
"get_personalized_feature_name",
"(",
"feature",
",",
"BU_LH_JOINT_STATES",
")",
"allowed_state_feature",
"=",
"get_personalized_feature_name",
"(",
"feature",
",",
"ALLOWED_STATES",
")",
"joint_state_feature",
"=",
"get_personalized_feature_name",
"(",
"feature",
",",
"JOINT_STATE",
")",
"_",
",",
"state2array",
"=",
"get_state2allowed_states",
"(",
"states",
",",
"False",
")",
"def",
"chose_consistent_state",
"(",
"node",
",",
"state_index",
")",
":",
"node",
".",
"add_feature",
"(",
"joint_state_feature",
",",
"state_index",
")",
"node",
".",
"add_feature",
"(",
"allowed_state_feature",
",",
"state2array",
"[",
"state_index",
"]",
")",
"for",
"child",
"in",
"node",
".",
"children",
":",
"chose_consistent_state",
"(",
"child",
",",
"getattr",
"(",
"child",
",",
"lh_state_feature",
")",
"[",
"state_index",
"]",
")",
"chose_consistent_state",
"(",
"tree",
",",
"(",
"getattr",
"(",
"tree",
",",
"lh_feature",
")",
"*",
"frequencies",
")",
".",
"argmax",
"(",
")",
")"
] | Chooses node ancestral states based on their marginal probabilities using joint method.
:param frequencies: numpy array of state frequencies
:param tree: ete3.Tree, the tree of interest
:param feature: str, character for which the ancestral states are to be chosen
:param states: numpy.array of possible character states in order corresponding to the probabilities array
:return: void, modified the get_personalized_feature_name(feature, ALLOWED_STATES) feature of each node
to only contain the selected states. | [
"Chooses",
"node",
"ancestral",
"states",
"based",
"on",
"their",
"marginal",
"probabilities",
"using",
"joint",
"method",
"."
] | df8a375841525738383e59548eed3441b07dbd3e | https://github.com/evolbioinfo/pastml/blob/df8a375841525738383e59548eed3441b07dbd3e/pastml/ml.py#L585-L609 | train |
evolbioinfo/pastml | pastml/__init__.py | col_name2cat | def col_name2cat(column):
"""
Reformats the column string to make sure it contains only numerical, letter characters or underscore.
:param column: column name to be reformatted
:type column: str
:return: column name with illegal characters removed
:rtype: str
"""
column_string = ''.join(s for s in column.replace(' ', '_') if s.isalnum() or '_' == s)
return column_string | python | def col_name2cat(column):
"""
Reformats the column string to make sure it contains only numerical, letter characters or underscore.
:param column: column name to be reformatted
:type column: str
:return: column name with illegal characters removed
:rtype: str
"""
column_string = ''.join(s for s in column.replace(' ', '_') if s.isalnum() or '_' == s)
return column_string | [
"def",
"col_name2cat",
"(",
"column",
")",
":",
"column_string",
"=",
"''",
".",
"join",
"(",
"s",
"for",
"s",
"in",
"column",
".",
"replace",
"(",
"' '",
",",
"'_'",
")",
"if",
"s",
".",
"isalnum",
"(",
")",
"or",
"'_'",
"==",
"s",
")",
"return",
"column_string"
] | Reformats the column string to make sure it contains only numerical, letter characters or underscore.
:param column: column name to be reformatted
:type column: str
:return: column name with illegal characters removed
:rtype: str | [
"Reformats",
"the",
"column",
"string",
"to",
"make",
"sure",
"it",
"contains",
"only",
"numerical",
"letter",
"characters",
"or",
"underscore",
"."
] | df8a375841525738383e59548eed3441b07dbd3e | https://github.com/evolbioinfo/pastml/blob/df8a375841525738383e59548eed3441b07dbd3e/pastml/__init__.py#L14-L24 | train |
lambdalisue/notify | src/notify/conf.py | get_user_config_filename | def get_user_config_filename(appname='notify'):
"""
Get user config filename.
It will return operating system dependent config filename.
Parameters
----------
appname : string
An application name used for filename
Returns
-------
string
A filename of user configuration.
"""
import platform
system = platform.system()
if system == 'Windows':
rootname = os.path.join(os.environ['APPDATA'], appname)
filename = appname + ".cfg"
prefix = ''
elif system == 'Linux':
XDG_CONFIG_HOME = os.environ.get('XDG_CONFIG_HOME', None)
rootname = XDG_CONFIG_HOME or os.path.join('~', '.config')
rootname = os.path.expanduser(rootname)
# check if XDG_CONFIG_HOME exists
if not os.path.exists(rootname) and XDG_CONFIG_HOME is None:
# XDG_CONFIG_HOME is not used
rootname = os.path.expanduser('~')
filename = appname + ".cfg"
prefix = '.'
else:
rootname = os.path.join(rootname, appname)
filename = appname + ".cfg"
prefix = ''
elif system == 'Darwin':
rootname = os.path.expanduser('~')
filename = appname + ".cfg"
prefix = '.'
else:
# Unknown
rootname = os.path.expanduser('~')
filename = appname + ".cfg"
prefix = ''
return os.path.join(rootname, prefix + filename) | python | def get_user_config_filename(appname='notify'):
"""
Get user config filename.
It will return operating system dependent config filename.
Parameters
----------
appname : string
An application name used for filename
Returns
-------
string
A filename of user configuration.
"""
import platform
system = platform.system()
if system == 'Windows':
rootname = os.path.join(os.environ['APPDATA'], appname)
filename = appname + ".cfg"
prefix = ''
elif system == 'Linux':
XDG_CONFIG_HOME = os.environ.get('XDG_CONFIG_HOME', None)
rootname = XDG_CONFIG_HOME or os.path.join('~', '.config')
rootname = os.path.expanduser(rootname)
# check if XDG_CONFIG_HOME exists
if not os.path.exists(rootname) and XDG_CONFIG_HOME is None:
# XDG_CONFIG_HOME is not used
rootname = os.path.expanduser('~')
filename = appname + ".cfg"
prefix = '.'
else:
rootname = os.path.join(rootname, appname)
filename = appname + ".cfg"
prefix = ''
elif system == 'Darwin':
rootname = os.path.expanduser('~')
filename = appname + ".cfg"
prefix = '.'
else:
# Unknown
rootname = os.path.expanduser('~')
filename = appname + ".cfg"
prefix = ''
return os.path.join(rootname, prefix + filename) | [
"def",
"get_user_config_filename",
"(",
"appname",
"=",
"'notify'",
")",
":",
"import",
"platform",
"system",
"=",
"platform",
".",
"system",
"(",
")",
"if",
"system",
"==",
"'Windows'",
":",
"rootname",
"=",
"os",
".",
"path",
".",
"join",
"(",
"os",
".",
"environ",
"[",
"'APPDATA'",
"]",
",",
"appname",
")",
"filename",
"=",
"appname",
"+",
"\".cfg\"",
"prefix",
"=",
"''",
"elif",
"system",
"==",
"'Linux'",
":",
"XDG_CONFIG_HOME",
"=",
"os",
".",
"environ",
".",
"get",
"(",
"'XDG_CONFIG_HOME'",
",",
"None",
")",
"rootname",
"=",
"XDG_CONFIG_HOME",
"or",
"os",
".",
"path",
".",
"join",
"(",
"'~'",
",",
"'.config'",
")",
"rootname",
"=",
"os",
".",
"path",
".",
"expanduser",
"(",
"rootname",
")",
"# check if XDG_CONFIG_HOME exists",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"rootname",
")",
"and",
"XDG_CONFIG_HOME",
"is",
"None",
":",
"# XDG_CONFIG_HOME is not used",
"rootname",
"=",
"os",
".",
"path",
".",
"expanduser",
"(",
"'~'",
")",
"filename",
"=",
"appname",
"+",
"\".cfg\"",
"prefix",
"=",
"'.'",
"else",
":",
"rootname",
"=",
"os",
".",
"path",
".",
"join",
"(",
"rootname",
",",
"appname",
")",
"filename",
"=",
"appname",
"+",
"\".cfg\"",
"prefix",
"=",
"''",
"elif",
"system",
"==",
"'Darwin'",
":",
"rootname",
"=",
"os",
".",
"path",
".",
"expanduser",
"(",
"'~'",
")",
"filename",
"=",
"appname",
"+",
"\".cfg\"",
"prefix",
"=",
"'.'",
"else",
":",
"# Unknown",
"rootname",
"=",
"os",
".",
"path",
".",
"expanduser",
"(",
"'~'",
")",
"filename",
"=",
"appname",
"+",
"\".cfg\"",
"prefix",
"=",
"''",
"return",
"os",
".",
"path",
".",
"join",
"(",
"rootname",
",",
"prefix",
"+",
"filename",
")"
] | Get user config filename.
It will return operating system dependent config filename.
Parameters
----------
appname : string
An application name used for filename
Returns
-------
string
A filename of user configuration. | [
"Get",
"user",
"config",
"filename",
"."
] | 1b6d7d1faa2cea13bfaa1f35130f279a0115e686 | https://github.com/lambdalisue/notify/blob/1b6d7d1faa2cea13bfaa1f35130f279a0115e686/src/notify/conf.py#L24-L70 | train |
lambdalisue/notify | src/notify/conf.py | config_to_options | def config_to_options(config):
"""
Convert ConfigParser instance to argparse.Namespace
Parameters
----------
config : object
A ConfigParser instance
Returns
-------
object
An argparse.Namespace instance
"""
class Options:
host=config.get('smtp', 'host', raw=True)
port=config.getint('smtp', 'port')
to_addr=config.get('mail', 'to_addr', raw=True)
from_addr=config.get('mail', 'from_addr', raw=True)
subject=config.get('mail', 'subject', raw=True)
encoding=config.get('mail', 'encoding', raw=True)
username=config.get('auth', 'username')
opts = Options()
# format
opts.from_addr % {'host': opts.host, 'prog': 'notify'}
opts.to_addr % {'host': opts.host, 'prog': 'notify'}
return opts | python | def config_to_options(config):
"""
Convert ConfigParser instance to argparse.Namespace
Parameters
----------
config : object
A ConfigParser instance
Returns
-------
object
An argparse.Namespace instance
"""
class Options:
host=config.get('smtp', 'host', raw=True)
port=config.getint('smtp', 'port')
to_addr=config.get('mail', 'to_addr', raw=True)
from_addr=config.get('mail', 'from_addr', raw=True)
subject=config.get('mail', 'subject', raw=True)
encoding=config.get('mail', 'encoding', raw=True)
username=config.get('auth', 'username')
opts = Options()
# format
opts.from_addr % {'host': opts.host, 'prog': 'notify'}
opts.to_addr % {'host': opts.host, 'prog': 'notify'}
return opts | [
"def",
"config_to_options",
"(",
"config",
")",
":",
"class",
"Options",
":",
"host",
"=",
"config",
".",
"get",
"(",
"'smtp'",
",",
"'host'",
",",
"raw",
"=",
"True",
")",
"port",
"=",
"config",
".",
"getint",
"(",
"'smtp'",
",",
"'port'",
")",
"to_addr",
"=",
"config",
".",
"get",
"(",
"'mail'",
",",
"'to_addr'",
",",
"raw",
"=",
"True",
")",
"from_addr",
"=",
"config",
".",
"get",
"(",
"'mail'",
",",
"'from_addr'",
",",
"raw",
"=",
"True",
")",
"subject",
"=",
"config",
".",
"get",
"(",
"'mail'",
",",
"'subject'",
",",
"raw",
"=",
"True",
")",
"encoding",
"=",
"config",
".",
"get",
"(",
"'mail'",
",",
"'encoding'",
",",
"raw",
"=",
"True",
")",
"username",
"=",
"config",
".",
"get",
"(",
"'auth'",
",",
"'username'",
")",
"opts",
"=",
"Options",
"(",
")",
"# format",
"opts",
".",
"from_addr",
"%",
"{",
"'host'",
":",
"opts",
".",
"host",
",",
"'prog'",
":",
"'notify'",
"}",
"opts",
".",
"to_addr",
"%",
"{",
"'host'",
":",
"opts",
".",
"host",
",",
"'prog'",
":",
"'notify'",
"}",
"return",
"opts"
] | Convert ConfigParser instance to argparse.Namespace
Parameters
----------
config : object
A ConfigParser instance
Returns
-------
object
An argparse.Namespace instance | [
"Convert",
"ConfigParser",
"instance",
"to",
"argparse",
".",
"Namespace"
] | 1b6d7d1faa2cea13bfaa1f35130f279a0115e686 | https://github.com/lambdalisue/notify/blob/1b6d7d1faa2cea13bfaa1f35130f279a0115e686/src/notify/conf.py#L73-L99 | train |
lambdalisue/notify | src/notify/conf.py | create_default_config | def create_default_config():
"""
Create default ConfigParser instance
"""
import codecs
config = ConfigParser.SafeConfigParser()
config.readfp(StringIO(DEFAULT_CONFIG))
# Load user settings
filename = get_user_config_filename()
if not os.path.exists(filename):
from wizard import setup_wizard
setup_wizard(config)
else:
try:
fi = codecs.open(filename, 'r', encoding='utf-8')
config.readfp(fi)
finally:
fi.close()
return config | python | def create_default_config():
"""
Create default ConfigParser instance
"""
import codecs
config = ConfigParser.SafeConfigParser()
config.readfp(StringIO(DEFAULT_CONFIG))
# Load user settings
filename = get_user_config_filename()
if not os.path.exists(filename):
from wizard import setup_wizard
setup_wizard(config)
else:
try:
fi = codecs.open(filename, 'r', encoding='utf-8')
config.readfp(fi)
finally:
fi.close()
return config | [
"def",
"create_default_config",
"(",
")",
":",
"import",
"codecs",
"config",
"=",
"ConfigParser",
".",
"SafeConfigParser",
"(",
")",
"config",
".",
"readfp",
"(",
"StringIO",
"(",
"DEFAULT_CONFIG",
")",
")",
"# Load user settings",
"filename",
"=",
"get_user_config_filename",
"(",
")",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"filename",
")",
":",
"from",
"wizard",
"import",
"setup_wizard",
"setup_wizard",
"(",
"config",
")",
"else",
":",
"try",
":",
"fi",
"=",
"codecs",
".",
"open",
"(",
"filename",
",",
"'r'",
",",
"encoding",
"=",
"'utf-8'",
")",
"config",
".",
"readfp",
"(",
"fi",
")",
"finally",
":",
"fi",
".",
"close",
"(",
")",
"return",
"config"
] | Create default ConfigParser instance | [
"Create",
"default",
"ConfigParser",
"instance"
] | 1b6d7d1faa2cea13bfaa1f35130f279a0115e686 | https://github.com/lambdalisue/notify/blob/1b6d7d1faa2cea13bfaa1f35130f279a0115e686/src/notify/conf.py#L102-L121 | train |
geophysics-ubonn/reda | lib/reda/utils/helper_functions.py | has_multiple_timesteps | def has_multiple_timesteps(data):
"""Return True if `data` container has multiple timesteps."""
if "timestep" in data.keys():
if len(np.unique(data["timestep"])) > 1:
return True
return False | python | def has_multiple_timesteps(data):
"""Return True if `data` container has multiple timesteps."""
if "timestep" in data.keys():
if len(np.unique(data["timestep"])) > 1:
return True
return False | [
"def",
"has_multiple_timesteps",
"(",
"data",
")",
":",
"if",
"\"timestep\"",
"in",
"data",
".",
"keys",
"(",
")",
":",
"if",
"len",
"(",
"np",
".",
"unique",
"(",
"data",
"[",
"\"timestep\"",
"]",
")",
")",
">",
"1",
":",
"return",
"True",
"return",
"False"
] | Return True if `data` container has multiple timesteps. | [
"Return",
"True",
"if",
"data",
"container",
"has",
"multiple",
"timesteps",
"."
] | 46a939729e40c7c4723315c03679c40761152e9e | https://github.com/geophysics-ubonn/reda/blob/46a939729e40c7c4723315c03679c40761152e9e/lib/reda/utils/helper_functions.py#L83-L88 | train |
geophysics-ubonn/reda | lib/reda/utils/helper_functions.py | split_timesteps | def split_timesteps(data, consistent_abmn=False):
"""Split data into multiple timesteps."""
if has_multiple_timesteps(data):
grouped = data.groupby("timestep")
return [group[1] for group in grouped]
else:
return data | python | def split_timesteps(data, consistent_abmn=False):
"""Split data into multiple timesteps."""
if has_multiple_timesteps(data):
grouped = data.groupby("timestep")
return [group[1] for group in grouped]
else:
return data | [
"def",
"split_timesteps",
"(",
"data",
",",
"consistent_abmn",
"=",
"False",
")",
":",
"if",
"has_multiple_timesteps",
"(",
"data",
")",
":",
"grouped",
"=",
"data",
".",
"groupby",
"(",
"\"timestep\"",
")",
"return",
"[",
"group",
"[",
"1",
"]",
"for",
"group",
"in",
"grouped",
"]",
"else",
":",
"return",
"data"
] | Split data into multiple timesteps. | [
"Split",
"data",
"into",
"multiple",
"timesteps",
"."
] | 46a939729e40c7c4723315c03679c40761152e9e | https://github.com/geophysics-ubonn/reda/blob/46a939729e40c7c4723315c03679c40761152e9e/lib/reda/utils/helper_functions.py#L90-L96 | train |
kaustavdm/pyAvroPhonetic | pyavrophonetic/avro.py | parse | def parse(text):
"""Parses input text, matches and replaces using avrodict
If a valid replacement is found, returns the replaced string. If
no replacement is found, returns the input text.
Usage:
::
from pyavrophonetic import avro
avro.parse("ami banglay gan gai")
"""
# Sanitize text case to meet phonetic comparison standards
fixed_text = validate.fix_string_case(utf(text))
# prepare output list
output = []
# cursor end point
cur_end = 0
# iterate through input text
for cur, i in enumerate(fixed_text):
# Trap characters with unicode encoding errors
try:
i.encode('utf-8')
except UnicodeDecodeError:
uni_pass = False
else:
uni_pass = True
# Default value for match
match = {'matched': False}
# Check cur is greater than or equals cur_end. If cursor is in
# a position that has alread been processed/replaced, we don't
# process anything at all
if not uni_pass:
cur_end = cur + 1
output.append(i)
elif cur >= cur_end and uni_pass:
# Try looking in non rule patterns with current string portion
match = match_non_rule_patterns(fixed_text, cur)
# Check if non rule patterns have matched
if match["matched"]:
output.append(match["replaced"])
cur_end = cur + len(match["found"])
else:
# if non rule patterns have not matched, try rule patterns
match = match_rule_patterns(fixed_text, cur)
# Check if rule patterns have matched
if match["matched"]:
# Update cur_end as cursor + length of match found
cur_end = cur + len(match["found"])
# Process its rules
replaced = process_rules(rules = match["rules"],
fixed_text = fixed_text,
cur = cur, cur_end = cur_end)
# If any rules match, output replacement from the
# rule, else output it's default top-level/default
# replacement
if replaced is not None:
# Rule has matched
output.append(replaced)
else:
# No rules have matched
# output common match
output.append(match["replaced"])
# If none matched, append present cursor value
if not match["matched"]:
cur_end = cur + 1
output.append(i)
# End looping through input text and produce output
return ''.join(output) | python | def parse(text):
"""Parses input text, matches and replaces using avrodict
If a valid replacement is found, returns the replaced string. If
no replacement is found, returns the input text.
Usage:
::
from pyavrophonetic import avro
avro.parse("ami banglay gan gai")
"""
# Sanitize text case to meet phonetic comparison standards
fixed_text = validate.fix_string_case(utf(text))
# prepare output list
output = []
# cursor end point
cur_end = 0
# iterate through input text
for cur, i in enumerate(fixed_text):
# Trap characters with unicode encoding errors
try:
i.encode('utf-8')
except UnicodeDecodeError:
uni_pass = False
else:
uni_pass = True
# Default value for match
match = {'matched': False}
# Check cur is greater than or equals cur_end. If cursor is in
# a position that has alread been processed/replaced, we don't
# process anything at all
if not uni_pass:
cur_end = cur + 1
output.append(i)
elif cur >= cur_end and uni_pass:
# Try looking in non rule patterns with current string portion
match = match_non_rule_patterns(fixed_text, cur)
# Check if non rule patterns have matched
if match["matched"]:
output.append(match["replaced"])
cur_end = cur + len(match["found"])
else:
# if non rule patterns have not matched, try rule patterns
match = match_rule_patterns(fixed_text, cur)
# Check if rule patterns have matched
if match["matched"]:
# Update cur_end as cursor + length of match found
cur_end = cur + len(match["found"])
# Process its rules
replaced = process_rules(rules = match["rules"],
fixed_text = fixed_text,
cur = cur, cur_end = cur_end)
# If any rules match, output replacement from the
# rule, else output it's default top-level/default
# replacement
if replaced is not None:
# Rule has matched
output.append(replaced)
else:
# No rules have matched
# output common match
output.append(match["replaced"])
# If none matched, append present cursor value
if not match["matched"]:
cur_end = cur + 1
output.append(i)
# End looping through input text and produce output
return ''.join(output) | [
"def",
"parse",
"(",
"text",
")",
":",
"# Sanitize text case to meet phonetic comparison standards",
"fixed_text",
"=",
"validate",
".",
"fix_string_case",
"(",
"utf",
"(",
"text",
")",
")",
"# prepare output list",
"output",
"=",
"[",
"]",
"# cursor end point",
"cur_end",
"=",
"0",
"# iterate through input text",
"for",
"cur",
",",
"i",
"in",
"enumerate",
"(",
"fixed_text",
")",
":",
"# Trap characters with unicode encoding errors",
"try",
":",
"i",
".",
"encode",
"(",
"'utf-8'",
")",
"except",
"UnicodeDecodeError",
":",
"uni_pass",
"=",
"False",
"else",
":",
"uni_pass",
"=",
"True",
"# Default value for match",
"match",
"=",
"{",
"'matched'",
":",
"False",
"}",
"# Check cur is greater than or equals cur_end. If cursor is in",
"# a position that has alread been processed/replaced, we don't",
"# process anything at all",
"if",
"not",
"uni_pass",
":",
"cur_end",
"=",
"cur",
"+",
"1",
"output",
".",
"append",
"(",
"i",
")",
"elif",
"cur",
">=",
"cur_end",
"and",
"uni_pass",
":",
"# Try looking in non rule patterns with current string portion",
"match",
"=",
"match_non_rule_patterns",
"(",
"fixed_text",
",",
"cur",
")",
"# Check if non rule patterns have matched",
"if",
"match",
"[",
"\"matched\"",
"]",
":",
"output",
".",
"append",
"(",
"match",
"[",
"\"replaced\"",
"]",
")",
"cur_end",
"=",
"cur",
"+",
"len",
"(",
"match",
"[",
"\"found\"",
"]",
")",
"else",
":",
"# if non rule patterns have not matched, try rule patterns",
"match",
"=",
"match_rule_patterns",
"(",
"fixed_text",
",",
"cur",
")",
"# Check if rule patterns have matched",
"if",
"match",
"[",
"\"matched\"",
"]",
":",
"# Update cur_end as cursor + length of match found",
"cur_end",
"=",
"cur",
"+",
"len",
"(",
"match",
"[",
"\"found\"",
"]",
")",
"# Process its rules",
"replaced",
"=",
"process_rules",
"(",
"rules",
"=",
"match",
"[",
"\"rules\"",
"]",
",",
"fixed_text",
"=",
"fixed_text",
",",
"cur",
"=",
"cur",
",",
"cur_end",
"=",
"cur_end",
")",
"# If any rules match, output replacement from the",
"# rule, else output it's default top-level/default",
"# replacement",
"if",
"replaced",
"is",
"not",
"None",
":",
"# Rule has matched",
"output",
".",
"append",
"(",
"replaced",
")",
"else",
":",
"# No rules have matched",
"# output common match",
"output",
".",
"append",
"(",
"match",
"[",
"\"replaced\"",
"]",
")",
"# If none matched, append present cursor value",
"if",
"not",
"match",
"[",
"\"matched\"",
"]",
":",
"cur_end",
"=",
"cur",
"+",
"1",
"output",
".",
"append",
"(",
"i",
")",
"# End looping through input text and produce output",
"return",
"''",
".",
"join",
"(",
"output",
")"
] | Parses input text, matches and replaces using avrodict
If a valid replacement is found, returns the replaced string. If
no replacement is found, returns the input text.
Usage:
::
from pyavrophonetic import avro
avro.parse("ami banglay gan gai") | [
"Parses",
"input",
"text",
"matches",
"and",
"replaces",
"using",
"avrodict"
] | 26b7d567d8db025f2cac4de817e716390d7ac337 | https://github.com/kaustavdm/pyAvroPhonetic/blob/26b7d567d8db025f2cac4de817e716390d7ac337/pyavrophonetic/avro.py#L38-L109 | train |
kaustavdm/pyAvroPhonetic | pyavrophonetic/avro.py | match_non_rule_patterns | def match_non_rule_patterns(fixed_text, cur=0):
"""Matches given text at cursor position with non rule patterns
Returns a dictionary of three elements:
- "matched" - Bool: depending on if match found
- "found" - string/None: Value of matched pattern's 'find' key or none
- "replaced": string Replaced string if match found else input string at
cursor
"""
pattern = exact_find_in_pattern(fixed_text, cur, NON_RULE_PATTERNS)
if len(pattern) > 0:
return {"matched": True, "found": pattern[0]['find'],
"replaced": pattern[0]['replace']}
else:
return {"matched": False, "found": None,
"replaced": fixed_text[cur]} | python | def match_non_rule_patterns(fixed_text, cur=0):
"""Matches given text at cursor position with non rule patterns
Returns a dictionary of three elements:
- "matched" - Bool: depending on if match found
- "found" - string/None: Value of matched pattern's 'find' key or none
- "replaced": string Replaced string if match found else input string at
cursor
"""
pattern = exact_find_in_pattern(fixed_text, cur, NON_RULE_PATTERNS)
if len(pattern) > 0:
return {"matched": True, "found": pattern[0]['find'],
"replaced": pattern[0]['replace']}
else:
return {"matched": False, "found": None,
"replaced": fixed_text[cur]} | [
"def",
"match_non_rule_patterns",
"(",
"fixed_text",
",",
"cur",
"=",
"0",
")",
":",
"pattern",
"=",
"exact_find_in_pattern",
"(",
"fixed_text",
",",
"cur",
",",
"NON_RULE_PATTERNS",
")",
"if",
"len",
"(",
"pattern",
")",
">",
"0",
":",
"return",
"{",
"\"matched\"",
":",
"True",
",",
"\"found\"",
":",
"pattern",
"[",
"0",
"]",
"[",
"'find'",
"]",
",",
"\"replaced\"",
":",
"pattern",
"[",
"0",
"]",
"[",
"'replace'",
"]",
"}",
"else",
":",
"return",
"{",
"\"matched\"",
":",
"False",
",",
"\"found\"",
":",
"None",
",",
"\"replaced\"",
":",
"fixed_text",
"[",
"cur",
"]",
"}"
] | Matches given text at cursor position with non rule patterns
Returns a dictionary of three elements:
- "matched" - Bool: depending on if match found
- "found" - string/None: Value of matched pattern's 'find' key or none
- "replaced": string Replaced string if match found else input string at
cursor | [
"Matches",
"given",
"text",
"at",
"cursor",
"position",
"with",
"non",
"rule",
"patterns"
] | 26b7d567d8db025f2cac4de817e716390d7ac337 | https://github.com/kaustavdm/pyAvroPhonetic/blob/26b7d567d8db025f2cac4de817e716390d7ac337/pyavrophonetic/avro.py#L111-L128 | train |
kaustavdm/pyAvroPhonetic | pyavrophonetic/avro.py | match_rule_patterns | def match_rule_patterns(fixed_text, cur=0):
"""Matches given text at cursor position with rule patterns
Returns a dictionary of four elements:
- "matched" - Bool: depending on if match found
- "found" - string/None: Value of matched pattern's 'find' key or none
- "replaced": string Replaced string if match found else input string at
cursor
- "rules": dict/None: A dict of rules or None if no match found
"""
pattern = exact_find_in_pattern(fixed_text, cur, RULE_PATTERNS)
# if len(pattern) == 1:
if len(pattern) > 0:
return {"matched": True, "found": pattern[0]['find'],
"replaced": pattern[0]['replace'], "rules": pattern[0]['rules']}
else:
return {"matched": False, "found": None,
"replaced": fixed_text[cur], "rules": None} | python | def match_rule_patterns(fixed_text, cur=0):
"""Matches given text at cursor position with rule patterns
Returns a dictionary of four elements:
- "matched" - Bool: depending on if match found
- "found" - string/None: Value of matched pattern's 'find' key or none
- "replaced": string Replaced string if match found else input string at
cursor
- "rules": dict/None: A dict of rules or None if no match found
"""
pattern = exact_find_in_pattern(fixed_text, cur, RULE_PATTERNS)
# if len(pattern) == 1:
if len(pattern) > 0:
return {"matched": True, "found": pattern[0]['find'],
"replaced": pattern[0]['replace'], "rules": pattern[0]['rules']}
else:
return {"matched": False, "found": None,
"replaced": fixed_text[cur], "rules": None} | [
"def",
"match_rule_patterns",
"(",
"fixed_text",
",",
"cur",
"=",
"0",
")",
":",
"pattern",
"=",
"exact_find_in_pattern",
"(",
"fixed_text",
",",
"cur",
",",
"RULE_PATTERNS",
")",
"# if len(pattern) == 1:",
"if",
"len",
"(",
"pattern",
")",
">",
"0",
":",
"return",
"{",
"\"matched\"",
":",
"True",
",",
"\"found\"",
":",
"pattern",
"[",
"0",
"]",
"[",
"'find'",
"]",
",",
"\"replaced\"",
":",
"pattern",
"[",
"0",
"]",
"[",
"'replace'",
"]",
",",
"\"rules\"",
":",
"pattern",
"[",
"0",
"]",
"[",
"'rules'",
"]",
"}",
"else",
":",
"return",
"{",
"\"matched\"",
":",
"False",
",",
"\"found\"",
":",
"None",
",",
"\"replaced\"",
":",
"fixed_text",
"[",
"cur",
"]",
",",
"\"rules\"",
":",
"None",
"}"
] | Matches given text at cursor position with rule patterns
Returns a dictionary of four elements:
- "matched" - Bool: depending on if match found
- "found" - string/None: Value of matched pattern's 'find' key or none
- "replaced": string Replaced string if match found else input string at
cursor
- "rules": dict/None: A dict of rules or None if no match found | [
"Matches",
"given",
"text",
"at",
"cursor",
"position",
"with",
"rule",
"patterns"
] | 26b7d567d8db025f2cac4de817e716390d7ac337 | https://github.com/kaustavdm/pyAvroPhonetic/blob/26b7d567d8db025f2cac4de817e716390d7ac337/pyavrophonetic/avro.py#L130-L149 | train |
kaustavdm/pyAvroPhonetic | pyavrophonetic/avro.py | exact_find_in_pattern | def exact_find_in_pattern(fixed_text, cur=0, patterns=PATTERNS):
"""Returns pattern items that match given text, cur position and pattern"""
return [x for x in patterns if (cur + len(x['find']) <= len(fixed_text))
and x['find'] == fixed_text[cur:(cur + len(x['find']))]] | python | def exact_find_in_pattern(fixed_text, cur=0, patterns=PATTERNS):
"""Returns pattern items that match given text, cur position and pattern"""
return [x for x in patterns if (cur + len(x['find']) <= len(fixed_text))
and x['find'] == fixed_text[cur:(cur + len(x['find']))]] | [
"def",
"exact_find_in_pattern",
"(",
"fixed_text",
",",
"cur",
"=",
"0",
",",
"patterns",
"=",
"PATTERNS",
")",
":",
"return",
"[",
"x",
"for",
"x",
"in",
"patterns",
"if",
"(",
"cur",
"+",
"len",
"(",
"x",
"[",
"'find'",
"]",
")",
"<=",
"len",
"(",
"fixed_text",
")",
")",
"and",
"x",
"[",
"'find'",
"]",
"==",
"fixed_text",
"[",
"cur",
":",
"(",
"cur",
"+",
"len",
"(",
"x",
"[",
"'find'",
"]",
")",
")",
"]",
"]"
] | Returns pattern items that match given text, cur position and pattern | [
"Returns",
"pattern",
"items",
"that",
"match",
"given",
"text",
"cur",
"position",
"and",
"pattern"
] | 26b7d567d8db025f2cac4de817e716390d7ac337 | https://github.com/kaustavdm/pyAvroPhonetic/blob/26b7d567d8db025f2cac4de817e716390d7ac337/pyavrophonetic/avro.py#L151-L154 | train |
kaustavdm/pyAvroPhonetic | pyavrophonetic/avro.py | process_rules | def process_rules(rules, fixed_text, cur = 0, cur_end = 1):
"""Process rules matched in pattern and returns suitable replacement
If any rule's condition is satisfied, output the rules "replace",
else output None
"""
replaced = ''
# iterate through rules
for rule in rules:
matched = False
# iterate through matches
for match in rule['matches']:
matched = process_match(match, fixed_text, cur, cur_end)
# Break out of loop if we dont' have a match. Here we are
# trusting avrodict to have listed matches sequentially
if not matched:
break
# If a match is found, stop looping through rules any further
if matched:
replaced = rule['replace']
break
# if any match has been found return replace value
if matched:
return replaced
else:
return None | python | def process_rules(rules, fixed_text, cur = 0, cur_end = 1):
"""Process rules matched in pattern and returns suitable replacement
If any rule's condition is satisfied, output the rules "replace",
else output None
"""
replaced = ''
# iterate through rules
for rule in rules:
matched = False
# iterate through matches
for match in rule['matches']:
matched = process_match(match, fixed_text, cur, cur_end)
# Break out of loop if we dont' have a match. Here we are
# trusting avrodict to have listed matches sequentially
if not matched:
break
# If a match is found, stop looping through rules any further
if matched:
replaced = rule['replace']
break
# if any match has been found return replace value
if matched:
return replaced
else:
return None | [
"def",
"process_rules",
"(",
"rules",
",",
"fixed_text",
",",
"cur",
"=",
"0",
",",
"cur_end",
"=",
"1",
")",
":",
"replaced",
"=",
"''",
"# iterate through rules",
"for",
"rule",
"in",
"rules",
":",
"matched",
"=",
"False",
"# iterate through matches",
"for",
"match",
"in",
"rule",
"[",
"'matches'",
"]",
":",
"matched",
"=",
"process_match",
"(",
"match",
",",
"fixed_text",
",",
"cur",
",",
"cur_end",
")",
"# Break out of loop if we dont' have a match. Here we are",
"# trusting avrodict to have listed matches sequentially",
"if",
"not",
"matched",
":",
"break",
"# If a match is found, stop looping through rules any further",
"if",
"matched",
":",
"replaced",
"=",
"rule",
"[",
"'replace'",
"]",
"break",
"# if any match has been found return replace value",
"if",
"matched",
":",
"return",
"replaced",
"else",
":",
"return",
"None"
] | Process rules matched in pattern and returns suitable replacement
If any rule's condition is satisfied, output the rules "replace",
else output None | [
"Process",
"rules",
"matched",
"in",
"pattern",
"and",
"returns",
"suitable",
"replacement"
] | 26b7d567d8db025f2cac4de817e716390d7ac337 | https://github.com/kaustavdm/pyAvroPhonetic/blob/26b7d567d8db025f2cac4de817e716390d7ac337/pyavrophonetic/avro.py#L156-L183 | train |
kaustavdm/pyAvroPhonetic | pyavrophonetic/avro.py | process_match | def process_match(match, fixed_text, cur, cur_end):
"""Processes a single match in rules"""
# Set our tools
# -- Initial/default value for replace
replace = True
# -- Set check cursor depending on match['type']
if match['type'] == 'prefix':
chk = cur - 1
else:
# suffix
chk = cur_end
# -- Set scope based on whether scope is negative
if match['scope'].startswith('!'):
scope = match['scope'][1:]
negative = True
else:
scope = match['scope']
negative = False
# Let the matching begin
# -- Punctuations
if scope == 'punctuation':
# Conditions: XORd with negative
if (not ((chk < 0 and match['type'] == 'prefix') or
(chk >= len(fixed_text) and match['type'] == 'suffix') or
validate.is_punctuation(fixed_text[chk]))
^ negative):
replace = False
# -- Vowels -- Checks: 1. Cursor should not be at first character
# -- if prefix or last character if suffix, 2. Character at chk
# -- should be a vowel. 3. 'negative' will invert the value of 1
# -- AND 2
elif scope == 'vowel':
if (not (((chk >= 0 and match['type'] == 'prefix') or
(chk < len(fixed_text) and match['type'] == 'suffix'))
and validate.is_vowel(fixed_text[chk]))
^ negative):
replace = False
# -- Consonants -- Checks: 1. Cursor should not be at first
# -- character if prefix or last character if suffix, 2. Character
# -- at chk should be a consonant. 3. 'negative' will invert the
# -- value of 1 AND 2
elif scope == 'consonant':
if (not (((chk >= 0 and match['type'] == 'prefix') or
(chk < len(fixed_text) and match['type'] == 'suffix'))
and validate.is_consonant(fixed_text[chk]))
^ negative):
replace = False
# -- Exacts
elif scope == 'exact':
# Prepare cursor for exact search
if match['type'] == 'prefix':
exact_start = cur - len(match['value'])
exact_end = cur
else:
# suffix
exact_start = cur_end
exact_end = cur_end + len(match['value'])
# Validate exact find.
if not validate.is_exact(match['value'], fixed_text, exact_start,
exact_end, negative):
replace = False
# Return replace, which will be true if none of the checks above match
return replace | python | def process_match(match, fixed_text, cur, cur_end):
"""Processes a single match in rules"""
# Set our tools
# -- Initial/default value for replace
replace = True
# -- Set check cursor depending on match['type']
if match['type'] == 'prefix':
chk = cur - 1
else:
# suffix
chk = cur_end
# -- Set scope based on whether scope is negative
if match['scope'].startswith('!'):
scope = match['scope'][1:]
negative = True
else:
scope = match['scope']
negative = False
# Let the matching begin
# -- Punctuations
if scope == 'punctuation':
# Conditions: XORd with negative
if (not ((chk < 0 and match['type'] == 'prefix') or
(chk >= len(fixed_text) and match['type'] == 'suffix') or
validate.is_punctuation(fixed_text[chk]))
^ negative):
replace = False
# -- Vowels -- Checks: 1. Cursor should not be at first character
# -- if prefix or last character if suffix, 2. Character at chk
# -- should be a vowel. 3. 'negative' will invert the value of 1
# -- AND 2
elif scope == 'vowel':
if (not (((chk >= 0 and match['type'] == 'prefix') or
(chk < len(fixed_text) and match['type'] == 'suffix'))
and validate.is_vowel(fixed_text[chk]))
^ negative):
replace = False
# -- Consonants -- Checks: 1. Cursor should not be at first
# -- character if prefix or last character if suffix, 2. Character
# -- at chk should be a consonant. 3. 'negative' will invert the
# -- value of 1 AND 2
elif scope == 'consonant':
if (not (((chk >= 0 and match['type'] == 'prefix') or
(chk < len(fixed_text) and match['type'] == 'suffix'))
and validate.is_consonant(fixed_text[chk]))
^ negative):
replace = False
# -- Exacts
elif scope == 'exact':
# Prepare cursor for exact search
if match['type'] == 'prefix':
exact_start = cur - len(match['value'])
exact_end = cur
else:
# suffix
exact_start = cur_end
exact_end = cur_end + len(match['value'])
# Validate exact find.
if not validate.is_exact(match['value'], fixed_text, exact_start,
exact_end, negative):
replace = False
# Return replace, which will be true if none of the checks above match
return replace | [
"def",
"process_match",
"(",
"match",
",",
"fixed_text",
",",
"cur",
",",
"cur_end",
")",
":",
"# Set our tools",
"# -- Initial/default value for replace",
"replace",
"=",
"True",
"# -- Set check cursor depending on match['type']",
"if",
"match",
"[",
"'type'",
"]",
"==",
"'prefix'",
":",
"chk",
"=",
"cur",
"-",
"1",
"else",
":",
"# suffix",
"chk",
"=",
"cur_end",
"# -- Set scope based on whether scope is negative",
"if",
"match",
"[",
"'scope'",
"]",
".",
"startswith",
"(",
"'!'",
")",
":",
"scope",
"=",
"match",
"[",
"'scope'",
"]",
"[",
"1",
":",
"]",
"negative",
"=",
"True",
"else",
":",
"scope",
"=",
"match",
"[",
"'scope'",
"]",
"negative",
"=",
"False",
"# Let the matching begin",
"# -- Punctuations",
"if",
"scope",
"==",
"'punctuation'",
":",
"# Conditions: XORd with negative",
"if",
"(",
"not",
"(",
"(",
"chk",
"<",
"0",
"and",
"match",
"[",
"'type'",
"]",
"==",
"'prefix'",
")",
"or",
"(",
"chk",
">=",
"len",
"(",
"fixed_text",
")",
"and",
"match",
"[",
"'type'",
"]",
"==",
"'suffix'",
")",
"or",
"validate",
".",
"is_punctuation",
"(",
"fixed_text",
"[",
"chk",
"]",
")",
")",
"^",
"negative",
")",
":",
"replace",
"=",
"False",
"# -- Vowels -- Checks: 1. Cursor should not be at first character",
"# -- if prefix or last character if suffix, 2. Character at chk",
"# -- should be a vowel. 3. 'negative' will invert the value of 1",
"# -- AND 2",
"elif",
"scope",
"==",
"'vowel'",
":",
"if",
"(",
"not",
"(",
"(",
"(",
"chk",
">=",
"0",
"and",
"match",
"[",
"'type'",
"]",
"==",
"'prefix'",
")",
"or",
"(",
"chk",
"<",
"len",
"(",
"fixed_text",
")",
"and",
"match",
"[",
"'type'",
"]",
"==",
"'suffix'",
")",
")",
"and",
"validate",
".",
"is_vowel",
"(",
"fixed_text",
"[",
"chk",
"]",
")",
")",
"^",
"negative",
")",
":",
"replace",
"=",
"False",
"# -- Consonants -- Checks: 1. Cursor should not be at first",
"# -- character if prefix or last character if suffix, 2. Character",
"# -- at chk should be a consonant. 3. 'negative' will invert the",
"# -- value of 1 AND 2",
"elif",
"scope",
"==",
"'consonant'",
":",
"if",
"(",
"not",
"(",
"(",
"(",
"chk",
">=",
"0",
"and",
"match",
"[",
"'type'",
"]",
"==",
"'prefix'",
")",
"or",
"(",
"chk",
"<",
"len",
"(",
"fixed_text",
")",
"and",
"match",
"[",
"'type'",
"]",
"==",
"'suffix'",
")",
")",
"and",
"validate",
".",
"is_consonant",
"(",
"fixed_text",
"[",
"chk",
"]",
")",
")",
"^",
"negative",
")",
":",
"replace",
"=",
"False",
"# -- Exacts",
"elif",
"scope",
"==",
"'exact'",
":",
"# Prepare cursor for exact search",
"if",
"match",
"[",
"'type'",
"]",
"==",
"'prefix'",
":",
"exact_start",
"=",
"cur",
"-",
"len",
"(",
"match",
"[",
"'value'",
"]",
")",
"exact_end",
"=",
"cur",
"else",
":",
"# suffix",
"exact_start",
"=",
"cur_end",
"exact_end",
"=",
"cur_end",
"+",
"len",
"(",
"match",
"[",
"'value'",
"]",
")",
"# Validate exact find.",
"if",
"not",
"validate",
".",
"is_exact",
"(",
"match",
"[",
"'value'",
"]",
",",
"fixed_text",
",",
"exact_start",
",",
"exact_end",
",",
"negative",
")",
":",
"replace",
"=",
"False",
"# Return replace, which will be true if none of the checks above match",
"return",
"replace"
] | Processes a single match in rules | [
"Processes",
"a",
"single",
"match",
"in",
"rules"
] | 26b7d567d8db025f2cac4de817e716390d7ac337 | https://github.com/kaustavdm/pyAvroPhonetic/blob/26b7d567d8db025f2cac4de817e716390d7ac337/pyavrophonetic/avro.py#L185-L248 | train |
digidotcom/python-wvalib | wva/cli.py | cli | def cli(ctx, hostname, username, password, config_dir, https):
"""Command-line interface for interacting with a WVA device"""
ctx.is_root = True
ctx.user_values_entered = False
ctx.config_dir = os.path.abspath(os.path.expanduser(config_dir))
ctx.config = load_config(ctx)
ctx.hostname = hostname
ctx.username = username
ctx.password = password
ctx.https = https
# Creating the WVA object is deferred as some commands like clearconfig
# should not require a username/password to perform them
ctx.wva = None | python | def cli(ctx, hostname, username, password, config_dir, https):
"""Command-line interface for interacting with a WVA device"""
ctx.is_root = True
ctx.user_values_entered = False
ctx.config_dir = os.path.abspath(os.path.expanduser(config_dir))
ctx.config = load_config(ctx)
ctx.hostname = hostname
ctx.username = username
ctx.password = password
ctx.https = https
# Creating the WVA object is deferred as some commands like clearconfig
# should not require a username/password to perform them
ctx.wva = None | [
"def",
"cli",
"(",
"ctx",
",",
"hostname",
",",
"username",
",",
"password",
",",
"config_dir",
",",
"https",
")",
":",
"ctx",
".",
"is_root",
"=",
"True",
"ctx",
".",
"user_values_entered",
"=",
"False",
"ctx",
".",
"config_dir",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"os",
".",
"path",
".",
"expanduser",
"(",
"config_dir",
")",
")",
"ctx",
".",
"config",
"=",
"load_config",
"(",
"ctx",
")",
"ctx",
".",
"hostname",
"=",
"hostname",
"ctx",
".",
"username",
"=",
"username",
"ctx",
".",
"password",
"=",
"password",
"ctx",
".",
"https",
"=",
"https",
"# Creating the WVA object is deferred as some commands like clearconfig",
"# should not require a username/password to perform them",
"ctx",
".",
"wva",
"=",
"None"
] | Command-line interface for interacting with a WVA device | [
"Command",
"-",
"line",
"interface",
"for",
"interacting",
"with",
"a",
"WVA",
"device"
] | 4252735e2775f80ebaffd813fbe84046d26906b3 | https://github.com/digidotcom/python-wvalib/blob/4252735e2775f80ebaffd813fbe84046d26906b3/wva/cli.py#L110-L123 | train |
digidotcom/python-wvalib | wva/cli.py | get | def get(ctx, uri):
"""Perform an HTTP GET of the provided URI
The URI provided is relative to the /ws base to allow for easy navigation of
the resources exposed by the WVA. Example Usage::
\b
$ wva get /
{'ws': ['vehicle',
'hw',
'config',
'state',
'files',
'alarms',
'subscriptions',
'password']}
$ wva get /vehicle
{'vehicle': ['vehicle/ecus', 'vehicle/data', 'vehicle/dtc']}
$ wva get /vehicle/ecus
{'ecus': ['vehicle/ecus/can0ecu0', 'vehicle/ecus/can0ecu251']}
$ wva get /vehicle/ecus/can0ecu0
{'can0ecu0': ['vehicle/ecus/can0ecu0/name',
'vehicle/ecus/can0ecu0/address',
'vehicle/ecus/can0ecu0/function',
'vehicle/ecus/can0ecu0/bus',
'vehicle/ecus/can0ecu0/channel',
'vehicle/ecus/can0ecu0/make',
'vehicle/ecus/can0ecu0/model',
'vehicle/ecus/can0ecu0/serial_number',
'vehicle/ecus/can0ecu0/unit_number',
'vehicle/ecus/can0ecu0/VIN']}
$ wva get /vehicle/ecus/can0ecu0/bus
{'bus': 'J1939'}
"""
http_client = get_wva(ctx).get_http_client()
cli_pprint(http_client.get(uri)) | python | def get(ctx, uri):
"""Perform an HTTP GET of the provided URI
The URI provided is relative to the /ws base to allow for easy navigation of
the resources exposed by the WVA. Example Usage::
\b
$ wva get /
{'ws': ['vehicle',
'hw',
'config',
'state',
'files',
'alarms',
'subscriptions',
'password']}
$ wva get /vehicle
{'vehicle': ['vehicle/ecus', 'vehicle/data', 'vehicle/dtc']}
$ wva get /vehicle/ecus
{'ecus': ['vehicle/ecus/can0ecu0', 'vehicle/ecus/can0ecu251']}
$ wva get /vehicle/ecus/can0ecu0
{'can0ecu0': ['vehicle/ecus/can0ecu0/name',
'vehicle/ecus/can0ecu0/address',
'vehicle/ecus/can0ecu0/function',
'vehicle/ecus/can0ecu0/bus',
'vehicle/ecus/can0ecu0/channel',
'vehicle/ecus/can0ecu0/make',
'vehicle/ecus/can0ecu0/model',
'vehicle/ecus/can0ecu0/serial_number',
'vehicle/ecus/can0ecu0/unit_number',
'vehicle/ecus/can0ecu0/VIN']}
$ wva get /vehicle/ecus/can0ecu0/bus
{'bus': 'J1939'}
"""
http_client = get_wva(ctx).get_http_client()
cli_pprint(http_client.get(uri)) | [
"def",
"get",
"(",
"ctx",
",",
"uri",
")",
":",
"http_client",
"=",
"get_wva",
"(",
"ctx",
")",
".",
"get_http_client",
"(",
")",
"cli_pprint",
"(",
"http_client",
".",
"get",
"(",
"uri",
")",
")"
] | Perform an HTTP GET of the provided URI
The URI provided is relative to the /ws base to allow for easy navigation of
the resources exposed by the WVA. Example Usage::
\b
$ wva get /
{'ws': ['vehicle',
'hw',
'config',
'state',
'files',
'alarms',
'subscriptions',
'password']}
$ wva get /vehicle
{'vehicle': ['vehicle/ecus', 'vehicle/data', 'vehicle/dtc']}
$ wva get /vehicle/ecus
{'ecus': ['vehicle/ecus/can0ecu0', 'vehicle/ecus/can0ecu251']}
$ wva get /vehicle/ecus/can0ecu0
{'can0ecu0': ['vehicle/ecus/can0ecu0/name',
'vehicle/ecus/can0ecu0/address',
'vehicle/ecus/can0ecu0/function',
'vehicle/ecus/can0ecu0/bus',
'vehicle/ecus/can0ecu0/channel',
'vehicle/ecus/can0ecu0/make',
'vehicle/ecus/can0ecu0/model',
'vehicle/ecus/can0ecu0/serial_number',
'vehicle/ecus/can0ecu0/unit_number',
'vehicle/ecus/can0ecu0/VIN']}
$ wva get /vehicle/ecus/can0ecu0/bus
{'bus': 'J1939'} | [
"Perform",
"an",
"HTTP",
"GET",
"of",
"the",
"provided",
"URI"
] | 4252735e2775f80ebaffd813fbe84046d26906b3 | https://github.com/digidotcom/python-wvalib/blob/4252735e2775f80ebaffd813fbe84046d26906b3/wva/cli.py#L161-L196 | train |
digidotcom/python-wvalib | wva/cli.py | delete | def delete(ctx, uri):
"""DELETE the specified URI
Example:
\b
$ wva get files/userfs/WEB/python
{'file_list': ['files/userfs/WEB/python/.ssh',
'files/userfs/WEB/python/README.md']}
$ wva delete files/userfs/WEB/python/README.md
''
$ wva get files/userfs/WEB/python
{'file_list': ['files/userfs/WEB/python/.ssh']}
"""
http_client = get_wva(ctx).get_http_client()
cli_pprint(http_client.delete(uri)) | python | def delete(ctx, uri):
"""DELETE the specified URI
Example:
\b
$ wva get files/userfs/WEB/python
{'file_list': ['files/userfs/WEB/python/.ssh',
'files/userfs/WEB/python/README.md']}
$ wva delete files/userfs/WEB/python/README.md
''
$ wva get files/userfs/WEB/python
{'file_list': ['files/userfs/WEB/python/.ssh']}
"""
http_client = get_wva(ctx).get_http_client()
cli_pprint(http_client.delete(uri)) | [
"def",
"delete",
"(",
"ctx",
",",
"uri",
")",
":",
"http_client",
"=",
"get_wva",
"(",
"ctx",
")",
".",
"get_http_client",
"(",
")",
"cli_pprint",
"(",
"http_client",
".",
"delete",
"(",
"uri",
")",
")"
] | DELETE the specified URI
Example:
\b
$ wva get files/userfs/WEB/python
{'file_list': ['files/userfs/WEB/python/.ssh',
'files/userfs/WEB/python/README.md']}
$ wva delete files/userfs/WEB/python/README.md
''
$ wva get files/userfs/WEB/python
{'file_list': ['files/userfs/WEB/python/.ssh']} | [
"DELETE",
"the",
"specified",
"URI"
] | 4252735e2775f80ebaffd813fbe84046d26906b3 | https://github.com/digidotcom/python-wvalib/blob/4252735e2775f80ebaffd813fbe84046d26906b3/wva/cli.py#L202-L217 | train |
digidotcom/python-wvalib | wva/cli.py | post | def post(ctx, uri, input_file):
"""POST file data to a specific URI
Note that POST is not used for most web services URIs. Instead,
PUT is used for creating resources.
"""
http_client = get_wva(ctx).get_http_client()
cli_pprint(http_client.post(uri, input_file.read())) | python | def post(ctx, uri, input_file):
"""POST file data to a specific URI
Note that POST is not used for most web services URIs. Instead,
PUT is used for creating resources.
"""
http_client = get_wva(ctx).get_http_client()
cli_pprint(http_client.post(uri, input_file.read())) | [
"def",
"post",
"(",
"ctx",
",",
"uri",
",",
"input_file",
")",
":",
"http_client",
"=",
"get_wva",
"(",
"ctx",
")",
".",
"get_http_client",
"(",
")",
"cli_pprint",
"(",
"http_client",
".",
"post",
"(",
"uri",
",",
"input_file",
".",
"read",
"(",
")",
")",
")"
] | POST file data to a specific URI
Note that POST is not used for most web services URIs. Instead,
PUT is used for creating resources. | [
"POST",
"file",
"data",
"to",
"a",
"specific",
"URI"
] | 4252735e2775f80ebaffd813fbe84046d26906b3 | https://github.com/digidotcom/python-wvalib/blob/4252735e2775f80ebaffd813fbe84046d26906b3/wva/cli.py#L224-L231 | train |
digidotcom/python-wvalib | wva/cli.py | sample | def sample(ctx, element, timestamp, repeat, delay):
"""Sample the value of a vehicle data element
This command allows for the current value of a vehicle data element
to be sampled:
\b
$ wva vehicle sample VehicleSpeed
168.15329
Optionally, the value may be samples multiple times:
\b
$ wva vehicle sample VehicleSpeed --repeat 10 --delay 1 --timestamp
148.076462 at Tue Mar 24 23:52:56 2015
145.564896 at Tue Mar 24 23:52:57 2015
143.057251 at Tue Mar 24 23:52:58 2015
138.03804 at Tue Mar 24 23:52:59 2015
135.526474 at Tue Mar 24 23:53:00 2015
133.018829 at Tue Mar 24 23:53:01 2015
130.507263 at Tue Mar 24 23:53:02 2015
127.999619 at Tue Mar 24 23:53:03 2015
125.48806 at Tue Mar 24 23:53:04 2015
122.976501 at Tue Mar 24 23:53:05 2015
For receiving large amounts of data on a periodic basis, use of subscriptions
and streams is enocuraged as it will be significantly more efficient.
"""
element = get_wva(ctx).get_vehicle_data_element(element)
for i in xrange(repeat):
curval = element.sample()
if timestamp:
print("{} at {}".format(curval.value, curval.timestamp.ctime()))
else:
print("{}".format(curval.value))
if i + 1 < repeat: # do not delay on last iteration
time.sleep(delay) | python | def sample(ctx, element, timestamp, repeat, delay):
"""Sample the value of a vehicle data element
This command allows for the current value of a vehicle data element
to be sampled:
\b
$ wva vehicle sample VehicleSpeed
168.15329
Optionally, the value may be samples multiple times:
\b
$ wva vehicle sample VehicleSpeed --repeat 10 --delay 1 --timestamp
148.076462 at Tue Mar 24 23:52:56 2015
145.564896 at Tue Mar 24 23:52:57 2015
143.057251 at Tue Mar 24 23:52:58 2015
138.03804 at Tue Mar 24 23:52:59 2015
135.526474 at Tue Mar 24 23:53:00 2015
133.018829 at Tue Mar 24 23:53:01 2015
130.507263 at Tue Mar 24 23:53:02 2015
127.999619 at Tue Mar 24 23:53:03 2015
125.48806 at Tue Mar 24 23:53:04 2015
122.976501 at Tue Mar 24 23:53:05 2015
For receiving large amounts of data on a periodic basis, use of subscriptions
and streams is enocuraged as it will be significantly more efficient.
"""
element = get_wva(ctx).get_vehicle_data_element(element)
for i in xrange(repeat):
curval = element.sample()
if timestamp:
print("{} at {}".format(curval.value, curval.timestamp.ctime()))
else:
print("{}".format(curval.value))
if i + 1 < repeat: # do not delay on last iteration
time.sleep(delay) | [
"def",
"sample",
"(",
"ctx",
",",
"element",
",",
"timestamp",
",",
"repeat",
",",
"delay",
")",
":",
"element",
"=",
"get_wva",
"(",
"ctx",
")",
".",
"get_vehicle_data_element",
"(",
"element",
")",
"for",
"i",
"in",
"xrange",
"(",
"repeat",
")",
":",
"curval",
"=",
"element",
".",
"sample",
"(",
")",
"if",
"timestamp",
":",
"print",
"(",
"\"{} at {}\"",
".",
"format",
"(",
"curval",
".",
"value",
",",
"curval",
".",
"timestamp",
".",
"ctime",
"(",
")",
")",
")",
"else",
":",
"print",
"(",
"\"{}\"",
".",
"format",
"(",
"curval",
".",
"value",
")",
")",
"if",
"i",
"+",
"1",
"<",
"repeat",
":",
"# do not delay on last iteration",
"time",
".",
"sleep",
"(",
"delay",
")"
] | Sample the value of a vehicle data element
This command allows for the current value of a vehicle data element
to be sampled:
\b
$ wva vehicle sample VehicleSpeed
168.15329
Optionally, the value may be samples multiple times:
\b
$ wva vehicle sample VehicleSpeed --repeat 10 --delay 1 --timestamp
148.076462 at Tue Mar 24 23:52:56 2015
145.564896 at Tue Mar 24 23:52:57 2015
143.057251 at Tue Mar 24 23:52:58 2015
138.03804 at Tue Mar 24 23:52:59 2015
135.526474 at Tue Mar 24 23:53:00 2015
133.018829 at Tue Mar 24 23:53:01 2015
130.507263 at Tue Mar 24 23:53:02 2015
127.999619 at Tue Mar 24 23:53:03 2015
125.48806 at Tue Mar 24 23:53:04 2015
122.976501 at Tue Mar 24 23:53:05 2015
For receiving large amounts of data on a periodic basis, use of subscriptions
and streams is enocuraged as it will be significantly more efficient. | [
"Sample",
"the",
"value",
"of",
"a",
"vehicle",
"data",
"element"
] | 4252735e2775f80ebaffd813fbe84046d26906b3 | https://github.com/digidotcom/python-wvalib/blob/4252735e2775f80ebaffd813fbe84046d26906b3/wva/cli.py#L293-L330 | train |
digidotcom/python-wvalib | wva/cli.py | list | def list(ctx):
"""List short name of all current subscriptions"""
wva = get_wva(ctx)
for subscription in wva.get_subscriptions():
print(subscription.short_name) | python | def list(ctx):
"""List short name of all current subscriptions"""
wva = get_wva(ctx)
for subscription in wva.get_subscriptions():
print(subscription.short_name) | [
"def",
"list",
"(",
"ctx",
")",
":",
"wva",
"=",
"get_wva",
"(",
"ctx",
")",
"for",
"subscription",
"in",
"wva",
".",
"get_subscriptions",
"(",
")",
":",
"print",
"(",
"subscription",
".",
"short_name",
")"
] | List short name of all current subscriptions | [
"List",
"short",
"name",
"of",
"all",
"current",
"subscriptions"
] | 4252735e2775f80ebaffd813fbe84046d26906b3 | https://github.com/digidotcom/python-wvalib/blob/4252735e2775f80ebaffd813fbe84046d26906b3/wva/cli.py#L344-L348 | train |
digidotcom/python-wvalib | wva/cli.py | delete | def delete(ctx, short_name):
"""Delete a specific subscription by short name"""
wva = get_wva(ctx)
subscription = wva.get_subscription(short_name)
subscription.delete() | python | def delete(ctx, short_name):
"""Delete a specific subscription by short name"""
wva = get_wva(ctx)
subscription = wva.get_subscription(short_name)
subscription.delete() | [
"def",
"delete",
"(",
"ctx",
",",
"short_name",
")",
":",
"wva",
"=",
"get_wva",
"(",
"ctx",
")",
"subscription",
"=",
"wva",
".",
"get_subscription",
"(",
"short_name",
")",
"subscription",
".",
"delete",
"(",
")"
] | Delete a specific subscription by short name | [
"Delete",
"a",
"specific",
"subscription",
"by",
"short",
"name"
] | 4252735e2775f80ebaffd813fbe84046d26906b3 | https://github.com/digidotcom/python-wvalib/blob/4252735e2775f80ebaffd813fbe84046d26906b3/wva/cli.py#L354-L358 | train |
digidotcom/python-wvalib | wva/cli.py | clear | def clear(ctx):
"""Remove all registered subscriptions
Example:
\b
$ wva subscriptions clear
Deleting engineload... Done
Deleting fuelrate... Done
Deleting throttle... Done
Deleting rpm... Done
Deleting speedy... Done
To remove a specific subscription, use 'wva subscription remove <name>' instead.
"""
wva = get_wva(ctx)
for subscription in wva.get_subscriptions():
sys.stdout.write("Deleting {}... ".format(subscription.short_name))
sys.stdout.flush()
subscription.delete()
print("Done") | python | def clear(ctx):
"""Remove all registered subscriptions
Example:
\b
$ wva subscriptions clear
Deleting engineload... Done
Deleting fuelrate... Done
Deleting throttle... Done
Deleting rpm... Done
Deleting speedy... Done
To remove a specific subscription, use 'wva subscription remove <name>' instead.
"""
wva = get_wva(ctx)
for subscription in wva.get_subscriptions():
sys.stdout.write("Deleting {}... ".format(subscription.short_name))
sys.stdout.flush()
subscription.delete()
print("Done") | [
"def",
"clear",
"(",
"ctx",
")",
":",
"wva",
"=",
"get_wva",
"(",
"ctx",
")",
"for",
"subscription",
"in",
"wva",
".",
"get_subscriptions",
"(",
")",
":",
"sys",
".",
"stdout",
".",
"write",
"(",
"\"Deleting {}... \"",
".",
"format",
"(",
"subscription",
".",
"short_name",
")",
")",
"sys",
".",
"stdout",
".",
"flush",
"(",
")",
"subscription",
".",
"delete",
"(",
")",
"print",
"(",
"\"Done\"",
")"
] | Remove all registered subscriptions
Example:
\b
$ wva subscriptions clear
Deleting engineload... Done
Deleting fuelrate... Done
Deleting throttle... Done
Deleting rpm... Done
Deleting speedy... Done
To remove a specific subscription, use 'wva subscription remove <name>' instead. | [
"Remove",
"all",
"registered",
"subscriptions"
] | 4252735e2775f80ebaffd813fbe84046d26906b3 | https://github.com/digidotcom/python-wvalib/blob/4252735e2775f80ebaffd813fbe84046d26906b3/wva/cli.py#L363-L383 | train |
digidotcom/python-wvalib | wva/cli.py | show | def show(ctx, short_name):
"""Show metadata for a specific subscription
Example:
\b
$ wva subscriptions show speed
{'buffer': 'queue', 'interval': 5, 'uri': 'vehicle/data/VehicleSpeed'}
"""
wva = get_wva(ctx)
subscription = wva.get_subscription(short_name)
cli_pprint(subscription.get_metadata()) | python | def show(ctx, short_name):
"""Show metadata for a specific subscription
Example:
\b
$ wva subscriptions show speed
{'buffer': 'queue', 'interval': 5, 'uri': 'vehicle/data/VehicleSpeed'}
"""
wva = get_wva(ctx)
subscription = wva.get_subscription(short_name)
cli_pprint(subscription.get_metadata()) | [
"def",
"show",
"(",
"ctx",
",",
"short_name",
")",
":",
"wva",
"=",
"get_wva",
"(",
"ctx",
")",
"subscription",
"=",
"wva",
".",
"get_subscription",
"(",
"short_name",
")",
"cli_pprint",
"(",
"subscription",
".",
"get_metadata",
"(",
")",
")"
] | Show metadata for a specific subscription
Example:
\b
$ wva subscriptions show speed
{'buffer': 'queue', 'interval': 5, 'uri': 'vehicle/data/VehicleSpeed'} | [
"Show",
"metadata",
"for",
"a",
"specific",
"subscription"
] | 4252735e2775f80ebaffd813fbe84046d26906b3 | https://github.com/digidotcom/python-wvalib/blob/4252735e2775f80ebaffd813fbe84046d26906b3/wva/cli.py#L389-L400 | train |
digidotcom/python-wvalib | wva/cli.py | add | def add(ctx, short_name, uri, interval, buffer):
"""Add a subscription with a given short_name for a given uri
This command can be used to create subscriptions to receive new pieces
of vehicle data on the stream channel on a periodic basis. By default,
subscriptions are buffered and have a 5 second interval:
\b
$ wva subscriptions add speed vehicle/data/VehicleSpeed
$ wva subscriptions show speed
{'buffer': 'queue', 'interval': 5, 'uri': 'vehicle/data/VehicleSpeed'}
These parameters can be modified by the use of optional arguments:
$ wva subscriptions add rpm vehicle/data/EngineSpeed --interval 1 --buffer discard
$ wva subscriptions show rpm
{'buffer': 'discard', 'interval': 1, 'uri': 'vehicle/data/EngineSpeed'}
To view the data coming in as a result of these subscriptions, one can use
either 'wva subscriptions listen' or 'wva subscriptions graph <name>'.
"""
wva = get_wva(ctx)
subscription = wva.get_subscription(short_name)
subscription.create(uri, buffer, interval) | python | def add(ctx, short_name, uri, interval, buffer):
"""Add a subscription with a given short_name for a given uri
This command can be used to create subscriptions to receive new pieces
of vehicle data on the stream channel on a periodic basis. By default,
subscriptions are buffered and have a 5 second interval:
\b
$ wva subscriptions add speed vehicle/data/VehicleSpeed
$ wva subscriptions show speed
{'buffer': 'queue', 'interval': 5, 'uri': 'vehicle/data/VehicleSpeed'}
These parameters can be modified by the use of optional arguments:
$ wva subscriptions add rpm vehicle/data/EngineSpeed --interval 1 --buffer discard
$ wva subscriptions show rpm
{'buffer': 'discard', 'interval': 1, 'uri': 'vehicle/data/EngineSpeed'}
To view the data coming in as a result of these subscriptions, one can use
either 'wva subscriptions listen' or 'wva subscriptions graph <name>'.
"""
wva = get_wva(ctx)
subscription = wva.get_subscription(short_name)
subscription.create(uri, buffer, interval) | [
"def",
"add",
"(",
"ctx",
",",
"short_name",
",",
"uri",
",",
"interval",
",",
"buffer",
")",
":",
"wva",
"=",
"get_wva",
"(",
"ctx",
")",
"subscription",
"=",
"wva",
".",
"get_subscription",
"(",
"short_name",
")",
"subscription",
".",
"create",
"(",
"uri",
",",
"buffer",
",",
"interval",
")"
] | Add a subscription with a given short_name for a given uri
This command can be used to create subscriptions to receive new pieces
of vehicle data on the stream channel on a periodic basis. By default,
subscriptions are buffered and have a 5 second interval:
\b
$ wva subscriptions add speed vehicle/data/VehicleSpeed
$ wva subscriptions show speed
{'buffer': 'queue', 'interval': 5, 'uri': 'vehicle/data/VehicleSpeed'}
These parameters can be modified by the use of optional arguments:
$ wva subscriptions add rpm vehicle/data/EngineSpeed --interval 1 --buffer discard
$ wva subscriptions show rpm
{'buffer': 'discard', 'interval': 1, 'uri': 'vehicle/data/EngineSpeed'}
To view the data coming in as a result of these subscriptions, one can use
either 'wva subscriptions listen' or 'wva subscriptions graph <name>'. | [
"Add",
"a",
"subscription",
"with",
"a",
"given",
"short_name",
"for",
"a",
"given",
"uri"
] | 4252735e2775f80ebaffd813fbe84046d26906b3 | https://github.com/digidotcom/python-wvalib/blob/4252735e2775f80ebaffd813fbe84046d26906b3/wva/cli.py#L409-L432 | train |
digidotcom/python-wvalib | wva/cli.py | listen | def listen(ctx):
"""Output the contents of the WVA event stream
This command shows the data being received from the WVA event stream based on
the subscriptions that have been set up and the data on the WVA vehicle bus.
\b
$ wva subscriptions listen
{'data': {'VehicleSpeed': {'timestamp': '2015-03-25T00:11:53Z',
'value': 198.272461},
'sequence': 124,
'short_name': 'speed',
'timestamp': '2015-03-25T00:11:53Z',
'uri': 'vehicle/data/VehicleSpeed'}}
{'data': {'EngineSpeed': {'timestamp': '2015-03-25T00:11:54Z',
'value': 6425.5},
'sequence': 274,
'short_name': 'rpm',
'timestamp': '2015-03-25T00:11:54Z',
'uri': 'vehicle/data/EngineSpeed'}}
...
^C
Aborted!
This command can be useful for debugging subscriptions or getting a quick
glimpse at what data is coming in to a WVA device.
"""
wva = get_wva(ctx)
es = wva.get_event_stream()
def cb(event):
cli_pprint(event)
es.add_event_listener(cb)
es.enable()
while True:
time.sleep(5) | python | def listen(ctx):
"""Output the contents of the WVA event stream
This command shows the data being received from the WVA event stream based on
the subscriptions that have been set up and the data on the WVA vehicle bus.
\b
$ wva subscriptions listen
{'data': {'VehicleSpeed': {'timestamp': '2015-03-25T00:11:53Z',
'value': 198.272461},
'sequence': 124,
'short_name': 'speed',
'timestamp': '2015-03-25T00:11:53Z',
'uri': 'vehicle/data/VehicleSpeed'}}
{'data': {'EngineSpeed': {'timestamp': '2015-03-25T00:11:54Z',
'value': 6425.5},
'sequence': 274,
'short_name': 'rpm',
'timestamp': '2015-03-25T00:11:54Z',
'uri': 'vehicle/data/EngineSpeed'}}
...
^C
Aborted!
This command can be useful for debugging subscriptions or getting a quick
glimpse at what data is coming in to a WVA device.
"""
wva = get_wva(ctx)
es = wva.get_event_stream()
def cb(event):
cli_pprint(event)
es.add_event_listener(cb)
es.enable()
while True:
time.sleep(5) | [
"def",
"listen",
"(",
"ctx",
")",
":",
"wva",
"=",
"get_wva",
"(",
"ctx",
")",
"es",
"=",
"wva",
".",
"get_event_stream",
"(",
")",
"def",
"cb",
"(",
"event",
")",
":",
"cli_pprint",
"(",
"event",
")",
"es",
".",
"add_event_listener",
"(",
"cb",
")",
"es",
".",
"enable",
"(",
")",
"while",
"True",
":",
"time",
".",
"sleep",
"(",
"5",
")"
] | Output the contents of the WVA event stream
This command shows the data being received from the WVA event stream based on
the subscriptions that have been set up and the data on the WVA vehicle bus.
\b
$ wva subscriptions listen
{'data': {'VehicleSpeed': {'timestamp': '2015-03-25T00:11:53Z',
'value': 198.272461},
'sequence': 124,
'short_name': 'speed',
'timestamp': '2015-03-25T00:11:53Z',
'uri': 'vehicle/data/VehicleSpeed'}}
{'data': {'EngineSpeed': {'timestamp': '2015-03-25T00:11:54Z',
'value': 6425.5},
'sequence': 274,
'short_name': 'rpm',
'timestamp': '2015-03-25T00:11:54Z',
'uri': 'vehicle/data/EngineSpeed'}}
...
^C
Aborted!
This command can be useful for debugging subscriptions or getting a quick
glimpse at what data is coming in to a WVA device. | [
"Output",
"the",
"contents",
"of",
"the",
"WVA",
"event",
"stream"
] | 4252735e2775f80ebaffd813fbe84046d26906b3 | https://github.com/digidotcom/python-wvalib/blob/4252735e2775f80ebaffd813fbe84046d26906b3/wva/cli.py#L437-L473 | train |
digidotcom/python-wvalib | wva/cli.py | graph | def graph(ctx, items, seconds, ylim):
"""Present a live graph of the incoming streaming data
This command requires that matplotlib be installed and accessible
to the application in order to work. The application reads
data from the WVA event stream and plots all data for specified
parameters within some time window. Subscriptions must be
set up prior to running this command for it to work.
As an example, let's say that I want to show the last 3 minutes (180 seconds)
of speed and rpm data for my device. In that case, I work set up my
subscriptions and execute the following...
\b
$ wva subscriptions graph --seconds=180 VehicleSpeed EngineSpeed
"""
wva = get_wva(ctx)
es = wva.get_event_stream()
try:
from wva import grapher
except ImportError:
print("Unable to graph... you must have matplotlib installed")
else:
stream_grapher = grapher.WVAStreamGrapher(wva, items, seconds=seconds, ylim=ylim)
es.enable()
stream_grapher.run() | python | def graph(ctx, items, seconds, ylim):
"""Present a live graph of the incoming streaming data
This command requires that matplotlib be installed and accessible
to the application in order to work. The application reads
data from the WVA event stream and plots all data for specified
parameters within some time window. Subscriptions must be
set up prior to running this command for it to work.
As an example, let's say that I want to show the last 3 minutes (180 seconds)
of speed and rpm data for my device. In that case, I work set up my
subscriptions and execute the following...
\b
$ wva subscriptions graph --seconds=180 VehicleSpeed EngineSpeed
"""
wva = get_wva(ctx)
es = wva.get_event_stream()
try:
from wva import grapher
except ImportError:
print("Unable to graph... you must have matplotlib installed")
else:
stream_grapher = grapher.WVAStreamGrapher(wva, items, seconds=seconds, ylim=ylim)
es.enable()
stream_grapher.run() | [
"def",
"graph",
"(",
"ctx",
",",
"items",
",",
"seconds",
",",
"ylim",
")",
":",
"wva",
"=",
"get_wva",
"(",
"ctx",
")",
"es",
"=",
"wva",
".",
"get_event_stream",
"(",
")",
"try",
":",
"from",
"wva",
"import",
"grapher",
"except",
"ImportError",
":",
"print",
"(",
"\"Unable to graph... you must have matplotlib installed\"",
")",
"else",
":",
"stream_grapher",
"=",
"grapher",
".",
"WVAStreamGrapher",
"(",
"wva",
",",
"items",
",",
"seconds",
"=",
"seconds",
",",
"ylim",
"=",
"ylim",
")",
"es",
".",
"enable",
"(",
")",
"stream_grapher",
".",
"run",
"(",
")"
] | Present a live graph of the incoming streaming data
This command requires that matplotlib be installed and accessible
to the application in order to work. The application reads
data from the WVA event stream and plots all data for specified
parameters within some time window. Subscriptions must be
set up prior to running this command for it to work.
As an example, let's say that I want to show the last 3 minutes (180 seconds)
of speed and rpm data for my device. In that case, I work set up my
subscriptions and execute the following...
\b
$ wva subscriptions graph --seconds=180 VehicleSpeed EngineSpeed | [
"Present",
"a",
"live",
"graph",
"of",
"the",
"incoming",
"streaming",
"data"
] | 4252735e2775f80ebaffd813fbe84046d26906b3 | https://github.com/digidotcom/python-wvalib/blob/4252735e2775f80ebaffd813fbe84046d26906b3/wva/cli.py#L481-L507 | train |
digidotcom/python-wvalib | wva/cli.py | authorize | def authorize(ctx, public_key, append):
"""Enable ssh login as the Python user for the current user
This command will create an authorized_keys file on the target device
containing the current users public key. This will allow ssh to
the WVA from this machine.
"""
wva = get_wva(ctx)
http_client = wva.get_http_client()
authorized_keys_uri = "/files/userfs/WEB/python/.ssh/authorized_keys"
authorized_key_contents = public_key
if append:
try:
existing_contents = http_client.get(authorized_keys_uri)
authorized_key_contents = "{}\n{}".format(existing_contents, public_key)
except WVAHttpNotFoundError:
pass # file doesn't exist, just write the public key
http_client.put(authorized_keys_uri, authorized_key_contents)
print("Public key written to authorized_keys for python user.")
print("You should now be able to ssh to the device by doing the following:")
print("")
print(" $ ssh python@{}".format(get_root_ctx(ctx).hostname)) | python | def authorize(ctx, public_key, append):
"""Enable ssh login as the Python user for the current user
This command will create an authorized_keys file on the target device
containing the current users public key. This will allow ssh to
the WVA from this machine.
"""
wva = get_wva(ctx)
http_client = wva.get_http_client()
authorized_keys_uri = "/files/userfs/WEB/python/.ssh/authorized_keys"
authorized_key_contents = public_key
if append:
try:
existing_contents = http_client.get(authorized_keys_uri)
authorized_key_contents = "{}\n{}".format(existing_contents, public_key)
except WVAHttpNotFoundError:
pass # file doesn't exist, just write the public key
http_client.put(authorized_keys_uri, authorized_key_contents)
print("Public key written to authorized_keys for python user.")
print("You should now be able to ssh to the device by doing the following:")
print("")
print(" $ ssh python@{}".format(get_root_ctx(ctx).hostname)) | [
"def",
"authorize",
"(",
"ctx",
",",
"public_key",
",",
"append",
")",
":",
"wva",
"=",
"get_wva",
"(",
"ctx",
")",
"http_client",
"=",
"wva",
".",
"get_http_client",
"(",
")",
"authorized_keys_uri",
"=",
"\"/files/userfs/WEB/python/.ssh/authorized_keys\"",
"authorized_key_contents",
"=",
"public_key",
"if",
"append",
":",
"try",
":",
"existing_contents",
"=",
"http_client",
".",
"get",
"(",
"authorized_keys_uri",
")",
"authorized_key_contents",
"=",
"\"{}\\n{}\"",
".",
"format",
"(",
"existing_contents",
",",
"public_key",
")",
"except",
"WVAHttpNotFoundError",
":",
"pass",
"# file doesn't exist, just write the public key",
"http_client",
".",
"put",
"(",
"authorized_keys_uri",
",",
"authorized_key_contents",
")",
"print",
"(",
"\"Public key written to authorized_keys for python user.\"",
")",
"print",
"(",
"\"You should now be able to ssh to the device by doing the following:\"",
")",
"print",
"(",
"\"\"",
")",
"print",
"(",
"\" $ ssh python@{}\"",
".",
"format",
"(",
"get_root_ctx",
"(",
"ctx",
")",
".",
"hostname",
")",
")"
] | Enable ssh login as the Python user for the current user
This command will create an authorized_keys file on the target device
containing the current users public key. This will allow ssh to
the WVA from this machine. | [
"Enable",
"ssh",
"login",
"as",
"the",
"Python",
"user",
"for",
"the",
"current",
"user"
] | 4252735e2775f80ebaffd813fbe84046d26906b3 | https://github.com/digidotcom/python-wvalib/blob/4252735e2775f80ebaffd813fbe84046d26906b3/wva/cli.py#L523-L546 | train |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.