body
stringlengths 26
98.2k
| body_hash
int64 -9,222,864,604,528,158,000
9,221,803,474B
| docstring
stringlengths 1
16.8k
| path
stringlengths 5
230
| name
stringlengths 1
96
| repository_name
stringlengths 7
89
| lang
stringclasses 1
value | body_without_docstring
stringlengths 20
98.2k
|
---|---|---|---|---|---|---|---|
def __str__(self):
'String method for the class\n\n The method transforms the configuration\n file object into its string representation.\n\n Returns\n -------\n a string representation of the object\n '
rstring = (str(self.header) + '\n')
for key in self.gkeys:
rstring += str(key)
for beam in self._beams:
rstring += str(beam)
return rstring | -8,765,402,822,851,663,000 | String method for the class
The method transforms the configuration
file object into its string representation.
Returns
-------
a string representation of the object | pyaxe/axesrc/configfile.py | __str__ | sosey/pyaxe | python | def __str__(self):
'String method for the class\n\n The method transforms the configuration\n file object into its string representation.\n\n Returns\n -------\n a string representation of the object\n '
rstring = (str(self.header) + '\n')
for key in self.gkeys:
rstring += str(key)
for beam in self._beams:
rstring += str(beam)
return rstring |
def _load_file(self, filename):
"Configuration file --> keyword list\n\n The method load a configuration file and\n extract all valid keyword-keyvalue-comment information\n from it. The keyword-keyvalue pairs are\n organized and returned as a list of\n configuration key objects.\n\n @param filename: name of the configuration file\n @type filename: String\n\n @return: list of ConfKey's\n @rtype: [ConfKey]\n "
keylist = []
fopen = open(filename, 'r')
for line in fopen:
str_line = line.strip()
if (len(str_line) and (str_line[0] != '#')):
keylist.append(self._key_from_line(str_line))
fopen.close()
return keylist | 2,015,909,464,358,111,200 | Configuration file --> keyword list
The method load a configuration file and
extract all valid keyword-keyvalue-comment information
from it. The keyword-keyvalue pairs are
organized and returned as a list of
configuration key objects.
@param filename: name of the configuration file
@type filename: String
@return: list of ConfKey's
@rtype: [ConfKey] | pyaxe/axesrc/configfile.py | _load_file | sosey/pyaxe | python | def _load_file(self, filename):
"Configuration file --> keyword list\n\n The method load a configuration file and\n extract all valid keyword-keyvalue-comment information\n from it. The keyword-keyvalue pairs are\n organized and returned as a list of\n configuration key objects.\n\n @param filename: name of the configuration file\n @type filename: String\n\n @return: list of ConfKey's\n @rtype: [ConfKey]\n "
keylist = []
fopen = open(filename, 'r')
for line in fopen:
str_line = line.strip()
if (len(str_line) and (str_line[0] != '#')):
keylist.append(self._key_from_line(str_line))
fopen.close()
return keylist |
def _get_gkey_index(self, keyword):
'Retrieve the index of a global keyword\n\n The method searches for the index of\n a requested keyword in the list of global\n keywords. If the keyword does not exists,\n the index -1 is returned\n\n Parameters\n ----------\n keyword: str\n name of the requested keyword\n\n Returns\n -------\n index: int\n the index of the keyword\n '
kindex = (- 1)
for index in range(len(self.gkeys)):
if (self.gkeys[index].keyword == keyword):
return index
return kindex | -1,735,433,516,064,157,400 | Retrieve the index of a global keyword
The method searches for the index of
a requested keyword in the list of global
keywords. If the keyword does not exists,
the index -1 is returned
Parameters
----------
keyword: str
name of the requested keyword
Returns
-------
index: int
the index of the keyword | pyaxe/axesrc/configfile.py | _get_gkey_index | sosey/pyaxe | python | def _get_gkey_index(self, keyword):
'Retrieve the index of a global keyword\n\n The method searches for the index of\n a requested keyword in the list of global\n keywords. If the keyword does not exists,\n the index -1 is returned\n\n Parameters\n ----------\n keyword: str\n name of the requested keyword\n\n Returns\n -------\n index: int\n the index of the keyword\n '
kindex = (- 1)
for index in range(len(self.gkeys)):
if (self.gkeys[index].keyword == keyword):
return index
return kindex |
def _key_from_line(self, line):
'Creates a keyword from a line\n\n The method extracts the konfiguration keyword,\n the associated value and, if present,\n a comment from a line in the configuration file.\n A configuration key object representing the extracted\n keyword is created and returned.\n\n Parameters\n ----------\n line: list\n line to analyze\n\n Returns\n -------\n configuration key object\n '
items = line.split()
if (len(items) > 1):
keyword = items[0].strip()
cpos = line.rfind(';')
if (cpos < 0):
keyvalue = line[(line.find(keyword) + len(keyword)):].strip()
comment = None
else:
tmp_val = line[(line.find(keyword) + len(keyword)):].strip()
keyvalue = tmp_val.split(';')[0].strip()
comment = tmp_val.split(';')[1].strip()
else:
err_msg = (('Only one item in: ' + line) + ' !')
raise aXeError(err_msg)
return ConfKey(keyword, keyvalue, comment) | -1,848,617,931,775,020,300 | Creates a keyword from a line
The method extracts the konfiguration keyword,
the associated value and, if present,
a comment from a line in the configuration file.
A configuration key object representing the extracted
keyword is created and returned.
Parameters
----------
line: list
line to analyze
Returns
-------
configuration key object | pyaxe/axesrc/configfile.py | _key_from_line | sosey/pyaxe | python | def _key_from_line(self, line):
'Creates a keyword from a line\n\n The method extracts the konfiguration keyword,\n the associated value and, if present,\n a comment from a line in the configuration file.\n A configuration key object representing the extracted\n keyword is created and returned.\n\n Parameters\n ----------\n line: list\n line to analyze\n\n Returns\n -------\n configuration key object\n '
items = line.split()
if (len(items) > 1):
keyword = items[0].strip()
cpos = line.rfind(';')
if (cpos < 0):
keyvalue = line[(line.find(keyword) + len(keyword)):].strip()
comment = None
else:
tmp_val = line[(line.find(keyword) + len(keyword)):].strip()
keyvalue = tmp_val.split(';')[0].strip()
comment = tmp_val.split(';')[1].strip()
else:
err_msg = (('Only one item in: ' + line) + ' !')
raise aXeError(err_msg)
return ConfKey(keyword, keyvalue, comment) |
def _find_gkeys(self, keylist):
'Finds and extracts the global keywords\n\n The method finds the all predefined global keywords in\n a keyword list. The list of global keywords is\n returned. Their counterparts in the input keyword list\n are deleted.\n\n Parameters\n ----------\n keylist: list\n list of keywords\n\n Returns\n -------\n keys: list\n global keywords\n '
gkeywords = ['INSTRUMENT', 'CAMERA', 'TELAREA', 'SCIENCE_EXT', 'ERRORS_EXT', 'DQ_EXT', 'OPTKEY1', 'OPTVAL1', 'FFNAME', 'DQMASK', 'DRZRESOLA', 'DRZSCALE', 'DRZLAMB0', 'DRZXINI', 'DRZROOT', 'EXPTIME', 'WEIGHT_EXT', 'DRZPFRAC', 'DRZPSCALE', 'DRZKERNEL', 'MODEL_EXT', 'VARIANCE_EXT', 'RDNOISE', 'PSFCOEFFS', 'PSFRANGE', 'IPIXFUNCTION', 'POBJSIZE', 'SMFACTOR']
gkeys = []
dindex = []
iindex = 0
for key in keylist:
if (key.keyword in gkeywords):
dindex.append(iindex)
gkeys.append(ConfKey(key.keyword, key.keyvalue, key.comment))
iindex += 1
dindex.sort()
dindex.reverse()
for index in dindex:
del keylist[index]
return gkeys | 8,598,817,602,959,492,000 | Finds and extracts the global keywords
The method finds the all predefined global keywords in
a keyword list. The list of global keywords is
returned. Their counterparts in the input keyword list
are deleted.
Parameters
----------
keylist: list
list of keywords
Returns
-------
keys: list
global keywords | pyaxe/axesrc/configfile.py | _find_gkeys | sosey/pyaxe | python | def _find_gkeys(self, keylist):
'Finds and extracts the global keywords\n\n The method finds the all predefined global keywords in\n a keyword list. The list of global keywords is\n returned. Their counterparts in the input keyword list\n are deleted.\n\n Parameters\n ----------\n keylist: list\n list of keywords\n\n Returns\n -------\n keys: list\n global keywords\n '
gkeywords = ['INSTRUMENT', 'CAMERA', 'TELAREA', 'SCIENCE_EXT', 'ERRORS_EXT', 'DQ_EXT', 'OPTKEY1', 'OPTVAL1', 'FFNAME', 'DQMASK', 'DRZRESOLA', 'DRZSCALE', 'DRZLAMB0', 'DRZXINI', 'DRZROOT', 'EXPTIME', 'WEIGHT_EXT', 'DRZPFRAC', 'DRZPSCALE', 'DRZKERNEL', 'MODEL_EXT', 'VARIANCE_EXT', 'RDNOISE', 'PSFCOEFFS', 'PSFRANGE', 'IPIXFUNCTION', 'POBJSIZE', 'SMFACTOR']
gkeys = []
dindex = []
iindex = 0
for key in keylist:
if (key.keyword in gkeywords):
dindex.append(iindex)
gkeys.append(ConfKey(key.keyword, key.keyvalue, key.comment))
iindex += 1
dindex.sort()
dindex.reverse()
for index in dindex:
del keylist[index]
return gkeys |
def _check_gfiles(self):
'Checks whether all files exist\n\n The method checks whether the files whose names\n are within the class data do exist or not.\n An error is reported in case that the files\n do not exist.\n '
fkeys = ['FFNAME']
for key in fkeys:
index = self._get_gkey_index(key)
if (index > (- 1)):
kvalue = self.gkeys[index].keyvalue
if ((kvalue.upper() is not 'NONE') and (not os.path.isfile(config_util.getCONF(kvalue)))):
err_msg = 'The file: {0:s} does not exist!'.format(config_util.getCONF(kvalue))
raise aXeError(err_msg) | 362,706,535,126,800,800 | Checks whether all files exist
The method checks whether the files whose names
are within the class data do exist or not.
An error is reported in case that the files
do not exist. | pyaxe/axesrc/configfile.py | _check_gfiles | sosey/pyaxe | python | def _check_gfiles(self):
'Checks whether all files exist\n\n The method checks whether the files whose names\n are within the class data do exist or not.\n An error is reported in case that the files\n do not exist.\n '
fkeys = ['FFNAME']
for key in fkeys:
index = self._get_gkey_index(key)
if (index > (- 1)):
kvalue = self.gkeys[index].keyvalue
if ((kvalue.upper() is not 'NONE') and (not os.path.isfile(config_util.getCONF(kvalue)))):
err_msg = 'The file: {0:s} does not exist!'.format(config_util.getCONF(kvalue))
raise aXeError(err_msg) |
def get_gkey(self, keyword):
"Retrieve a requested global keyword\n\n The method searches the list of global keywords\n for a fitting keyword. In case that the requested\n keyword exists, it is returned.\n If not 'None' is returned\n\n Parameters\n ----------\n keyword: str\n name of the requested keyword\n\n Returns\n -------\n key: str or None\n the requested keyword or 'None'\n "
rkey = None
index = self._get_gkey_index(keyword)
if (index > (- 1)):
return self.gkeys[index]
else:
return rkey | 5,200,086,462,804,408,000 | Retrieve a requested global keyword
The method searches the list of global keywords
for a fitting keyword. In case that the requested
keyword exists, it is returned.
If not 'None' is returned
Parameters
----------
keyword: str
name of the requested keyword
Returns
-------
key: str or None
the requested keyword or 'None' | pyaxe/axesrc/configfile.py | get_gkey | sosey/pyaxe | python | def get_gkey(self, keyword):
"Retrieve a requested global keyword\n\n The method searches the list of global keywords\n for a fitting keyword. In case that the requested\n keyword exists, it is returned.\n If not 'None' is returned\n\n Parameters\n ----------\n keyword: str\n name of the requested keyword\n\n Returns\n -------\n key: str or None\n the requested keyword or 'None'\n "
rkey = None
index = self._get_gkey_index(keyword)
if (index > (- 1)):
return self.gkeys[index]
else:
return rkey |
def add_gkey(self, keyword, keyvalue, comment=None):
'Add global keyword\n\n The method adds a keyword to the list of global\n keywords. In case that the keyword just exists,\n it is overwritten, otherwise it is appended\n to the global keyword list.\n\n Parameters\n ----------\n keyword: str\n name of the requested keyword\n keyvalue: any\n value of the requested keyword\n comment: str\n comment for the keyword\n '
index = self._get_gkey_index(keyword)
if (index > (- 1)):
self.gkeys[index].keyvalue = keyvalue
self.gkeys[index].comment = comment
else:
self.gkeys.append(ConfKey(keyword, keyvalue, comment)) | -5,217,385,031,061,948,000 | Add global keyword
The method adds a keyword to the list of global
keywords. In case that the keyword just exists,
it is overwritten, otherwise it is appended
to the global keyword list.
Parameters
----------
keyword: str
name of the requested keyword
keyvalue: any
value of the requested keyword
comment: str
comment for the keyword | pyaxe/axesrc/configfile.py | add_gkey | sosey/pyaxe | python | def add_gkey(self, keyword, keyvalue, comment=None):
'Add global keyword\n\n The method adds a keyword to the list of global\n keywords. In case that the keyword just exists,\n it is overwritten, otherwise it is appended\n to the global keyword list.\n\n Parameters\n ----------\n keyword: str\n name of the requested keyword\n keyvalue: any\n value of the requested keyword\n comment: str\n comment for the keyword\n '
index = self._get_gkey_index(keyword)
if (index > (- 1)):
self.gkeys[index].keyvalue = keyvalue
self.gkeys[index].comment = comment
else:
self.gkeys.append(ConfKey(keyword, keyvalue, comment)) |
def get_gvalue(self, keyword):
"Retrieve a requested global keyword value\n\n The method returns the value of the keyword\n which matches the requested value.\n If there is no matching keyword, 'None'\n is returned.\n\n Parameters\n ----------\n keyword: str\n name of the requested keyword\n\n Returns\n -------\n The keyword value\n "
rvalue = None
key = self.get_gkey(keyword)
if key:
rvalue = key.keyvalue
return rvalue | 1,171,936,061,796,204,500 | Retrieve a requested global keyword value
The method returns the value of the keyword
which matches the requested value.
If there is no matching keyword, 'None'
is returned.
Parameters
----------
keyword: str
name of the requested keyword
Returns
-------
The keyword value | pyaxe/axesrc/configfile.py | get_gvalue | sosey/pyaxe | python | def get_gvalue(self, keyword):
"Retrieve a requested global keyword value\n\n The method returns the value of the keyword\n which matches the requested value.\n If there is no matching keyword, 'None'\n is returned.\n\n Parameters\n ----------\n keyword: str\n name of the requested keyword\n\n Returns\n -------\n The keyword value\n "
rvalue = None
key = self.get_gkey(keyword)
if key:
rvalue = key.keyvalue
return rvalue |
def writeto(self, filename):
'Save the object to a file\n\n The method saves the object to a file\n with name specified in the input.\n\n Parameters\n ----------\n filename: str\n name of the file\n '
if os.path.isfile(filename):
os.unlink(filename)
ofile = open(filename, 'w')
ofile.write(str(self))
ofile.close() | 8,103,122,125,987,555,000 | Save the object to a file
The method saves the object to a file
with name specified in the input.
Parameters
----------
filename: str
name of the file | pyaxe/axesrc/configfile.py | writeto | sosey/pyaxe | python | def writeto(self, filename):
'Save the object to a file\n\n The method saves the object to a file\n with name specified in the input.\n\n Parameters\n ----------\n filename: str\n name of the file\n '
if os.path.isfile(filename):
os.unlink(filename)
ofile = open(filename, 'w')
ofile.write(str(self))
ofile.close() |
def flush(self):
'Save the object back to file\n\n The method saves the object back to a file\n with the identical filename it was read from.\n '
self.writeto(self.filename) | 4,265,907,186,187,180,000 | Save the object back to file
The method saves the object back to a file
with the identical filename it was read from. | pyaxe/axesrc/configfile.py | flush | sosey/pyaxe | python | def flush(self):
'Save the object back to file\n\n The method saves the object back to a file\n with the identical filename it was read from.\n '
self.writeto(self.filename) |
def check_files(self, check_glob=True):
'Checks whether all files exist\n\n The method checks whether the files whose names\n are within the class data do exist or not.\n An error is reported in case that the files\n do not exist.\n '
n_sens = 0
if check_glob:
self._check_gfiles()
for bkey in self.beams.keys():
n_sens += self.beams[bkey].check_files()
return n_sens | 2,228,897,884,204,414,200 | Checks whether all files exist
The method checks whether the files whose names
are within the class data do exist or not.
An error is reported in case that the files
do not exist. | pyaxe/axesrc/configfile.py | check_files | sosey/pyaxe | python | def check_files(self, check_glob=True):
'Checks whether all files exist\n\n The method checks whether the files whose names\n are within the class data do exist or not.\n An error is reported in case that the files\n do not exist.\n '
n_sens = 0
if check_glob:
self._check_gfiles()
for bkey in self.beams.keys():
n_sens += self.beams[bkey].check_files()
return n_sens |
def __init__(self, filename=None):
'\n Initializes the ConfigFile object either\n by reading in a configuration file\n or by creating a default configuration file\n\n Parameters\n ----------\n filename: str\n name of the configuration file\n '
_log.info(f'Initializing configfile with {filename}')
if (filename is None):
_log.info('No file given, can do nothing!!')
else:
self.filename = filename
keylist = self._load_file(filename)
header = ConfHeader(filename)
super(ConfigFile, self).__init__(keylist, header) | -1,057,489,752,674,759,800 | Initializes the ConfigFile object either
by reading in a configuration file
or by creating a default configuration file
Parameters
----------
filename: str
name of the configuration file | pyaxe/axesrc/configfile.py | __init__ | sosey/pyaxe | python | def __init__(self, filename=None):
'\n Initializes the ConfigFile object either\n by reading in a configuration file\n or by creating a default configuration file\n\n Parameters\n ----------\n filename: str\n name of the configuration file\n '
_log.info(f'Initializing configfile with {filename}')
if (filename is None):
_log.info('No file given, can do nothing!!')
else:
self.filename = filename
keylist = self._load_file(filename)
header = ConfHeader(filename)
super(ConfigFile, self).__init__(keylist, header) |
def _get_simul_name(self):
'Get the filename used in aXeSIM'
return (self.filename + '.simul') | -3,035,849,564,787,214,000 | Get the filename used in aXeSIM | pyaxe/axesrc/configfile.py | _get_simul_name | sosey/pyaxe | python | def _get_simul_name(self):
return (self.filename + '.simul') |
def confirm_extrkeys(self):
'Confirm that all keywords for the extraction exist'
extr_ready = 1
if (self['POBJSIZE'] is None):
extr_ready = 0
elif (float(self['POBJSIZE']) < 0.0):
extr_ready = 0
if (self['SMFACTOR'] is None):
extr_ready = 0
elif (float(self['SMFACTOR']) < 0.0):
extr_ready = 0
return extr_ready | -8,612,642,381,535,807,000 | Confirm that all keywords for the extraction exist | pyaxe/axesrc/configfile.py | confirm_extrkeys | sosey/pyaxe | python | def confirm_extrkeys(self):
extr_ready = 1
if (self['POBJSIZE'] is None):
extr_ready = 0
elif (float(self['POBJSIZE']) < 0.0):
extr_ready = 0
if (self['SMFACTOR'] is None):
extr_ready = 0
elif (float(self['SMFACTOR']) < 0.0):
extr_ready = 0
return extr_ready |
def confirm_lambda_psf(self):
"Check whether a 'lambda_psf' value is needed, provide one"
if ((self['PSFCOEFFS'] is not None) and (self['PSFRANGE'] is not None)):
psf_range = self['PSFRANGE'].split()
lambda_min = float(psf_range[0])
lambda_max = float(psf_range[1])
lambda_psf = (0.5 * (lambda_max + lambda_min))
else:
lambda_psf = None
return lambda_psf | 666,498,324,924,048,300 | Check whether a 'lambda_psf' value is needed, provide one | pyaxe/axesrc/configfile.py | confirm_lambda_psf | sosey/pyaxe | python | def confirm_lambda_psf(self):
if ((self['PSFCOEFFS'] is not None) and (self['PSFRANGE'] is not None)):
psf_range = self['PSFRANGE'].split()
lambda_min = float(psf_range[0])
lambda_max = float(psf_range[1])
lambda_psf = (0.5 * (lambda_max + lambda_min))
else:
lambda_psf = None
return lambda_psf |
def axesim_prep(self):
'Removes modifies some keywords'
new_name = self._get_simul_name()
if ((self['SCIENCE_EXT'] != 'SCI') and (self['SCIENCE_EXT'] != '2')):
index = self._find_gkey('SCIENCE_EXT')
if (index > (- 1)):
self.gkeys[index].keyvalue = 'SCI'
if (self['TELAREA'] is None):
self.add_gkey('TELAREA', 45238.93)
index = 1
while (self[('OPTKEY' + str(index))] is not None):
del self[('OPTKEY' + str(index))]
del self[('OPTVAL' + str(index))]
index += 1
self.add_gkey('ERRORS_EXT', 'ERR')
self.add_gkey('DQ_EXT', 'DQ')
self.writeto(new_name)
return os.path.basename(new_name) | 1,872,794,241,133,237,000 | Removes modifies some keywords | pyaxe/axesrc/configfile.py | axesim_prep | sosey/pyaxe | python | def axesim_prep(self):
new_name = self._get_simul_name()
if ((self['SCIENCE_EXT'] != 'SCI') and (self['SCIENCE_EXT'] != '2')):
index = self._find_gkey('SCIENCE_EXT')
if (index > (- 1)):
self.gkeys[index].keyvalue = 'SCI'
if (self['TELAREA'] is None):
self.add_gkey('TELAREA', 45238.93)
index = 1
while (self[('OPTKEY' + str(index))] is not None):
del self[('OPTKEY' + str(index))]
del self[('OPTVAL' + str(index))]
index += 1
self.add_gkey('ERRORS_EXT', 'ERR')
self.add_gkey('DQ_EXT', 'DQ')
self.writeto(new_name)
return os.path.basename(new_name) |
def __init__(self, ident=None, keylist=None):
'\n A configuration beam object is intialized. This is done\n by either extracting the relevant keywords for a certain\n beam from a keyword list or creating a default beam.\n\n Parameters\n ----------\n ident: char\n beam identification\n keylist: list\n list of keywords\n '
if ((ident is None) or (keylist is None)):
_log.info('No ID or no keywords given, can do nothing!!')
else:
try:
self.ident = ident
self.beamkeys = self._find_beamkeys(ident, keylist)
self.trace = ConfigTrace(ident, keylist)
self.disp = ConfigDisp(ident, keylist)
except CKeyNotFound:
raise BeamNotFound(ident) | -1,743,147,716,586,530,000 | A configuration beam object is intialized. This is done
by either extracting the relevant keywords for a certain
beam from a keyword list or creating a default beam.
Parameters
----------
ident: char
beam identification
keylist: list
list of keywords | pyaxe/axesrc/configfile.py | __init__ | sosey/pyaxe | python | def __init__(self, ident=None, keylist=None):
'\n A configuration beam object is intialized. This is done\n by either extracting the relevant keywords for a certain\n beam from a keyword list or creating a default beam.\n\n Parameters\n ----------\n ident: char\n beam identification\n keylist: list\n list of keywords\n '
if ((ident is None) or (keylist is None)):
_log.info('No ID or no keywords given, can do nothing!!')
else:
try:
self.ident = ident
self.beamkeys = self._find_beamkeys(ident, keylist)
self.trace = ConfigTrace(ident, keylist)
self.disp = ConfigDisp(ident, keylist)
except CKeyNotFound:
raise BeamNotFound(ident) |
def __str__(self):
'String method for the class\n\n The method transforms theconfiguration\n beam object into its string representation.\n '
rstring = '\n#-----------\n#\n# Beam {0:s}:\n#\n#-----------\n'.format(str(self.ident))
for key in self.beamkeys:
rstring += str(key)
rstring += str(self.trace)
rstring += str(self.disp)
return rstring | 2,905,879,936,868,817,400 | String method for the class
The method transforms theconfiguration
beam object into its string representation. | pyaxe/axesrc/configfile.py | __str__ | sosey/pyaxe | python | def __str__(self):
'String method for the class\n\n The method transforms theconfiguration\n beam object into its string representation.\n '
rstring = '\n#-----------\n#\n# Beam {0:s}:\n#\n#-----------\n'.format(str(self.ident))
for key in self.beamkeys:
rstring += str(key)
rstring += str(self.trace)
rstring += str(self.disp)
return rstring |
def _find_beamkeys(self, ident, keylist):
'Load the global beam keywords\n\n The method extracts all global beam keywords\n from a keyword list. The extracted keywords are returned\n as a list. They are removed from the input list.\n\n Parameters\n ----------\n ident: char\n beam identification\n keylist: list\n list of keywords\n '
bkeys = ['BEAM', 'MMAG_EXTRACT_', 'MMAG_MARK_', 'XOFF_', 'YOFF_', 'SENSITIVITY_']
okeys = ['PSF_OFFSET_']
id_keys = []
for key in bkeys:
id_keys.append((key + ident))
opt_keys = []
for key in okeys:
opt_keys.append((key + ident))
opt_keys.append((('DLD1P_' + ident) + '_PRANGE'))
bkeys = []
dindex = []
iindex = 0
nfound = 0
for key in keylist:
if (key.keyword in id_keys):
dindex.append(iindex)
bkeys.append(ConfKey(key.keyword, key.keyvalue, key.comment))
nfound += 1
elif (key.keyword in opt_keys):
dindex.append(iindex)
bkeys.append(ConfKey(key.keyword, key.keyvalue, key.comment))
iindex += 1
if (nfound < len(id_keys)):
raise CKeyNotFound('general')
dindex.sort()
dindex.reverse()
for iindex in dindex:
del keylist[iindex]
return bkeys | 4,132,190,447,457,948,700 | Load the global beam keywords
The method extracts all global beam keywords
from a keyword list. The extracted keywords are returned
as a list. They are removed from the input list.
Parameters
----------
ident: char
beam identification
keylist: list
list of keywords | pyaxe/axesrc/configfile.py | _find_beamkeys | sosey/pyaxe | python | def _find_beamkeys(self, ident, keylist):
'Load the global beam keywords\n\n The method extracts all global beam keywords\n from a keyword list. The extracted keywords are returned\n as a list. They are removed from the input list.\n\n Parameters\n ----------\n ident: char\n beam identification\n keylist: list\n list of keywords\n '
bkeys = ['BEAM', 'MMAG_EXTRACT_', 'MMAG_MARK_', 'XOFF_', 'YOFF_', 'SENSITIVITY_']
okeys = ['PSF_OFFSET_']
id_keys = []
for key in bkeys:
id_keys.append((key + ident))
opt_keys = []
for key in okeys:
opt_keys.append((key + ident))
opt_keys.append((('DLD1P_' + ident) + '_PRANGE'))
bkeys = []
dindex = []
iindex = 0
nfound = 0
for key in keylist:
if (key.keyword in id_keys):
dindex.append(iindex)
bkeys.append(ConfKey(key.keyword, key.keyvalue, key.comment))
nfound += 1
elif (key.keyword in opt_keys):
dindex.append(iindex)
bkeys.append(ConfKey(key.keyword, key.keyvalue, key.comment))
iindex += 1
if (nfound < len(id_keys)):
raise CKeyNotFound('general')
dindex.sort()
dindex.reverse()
for iindex in dindex:
del keylist[iindex]
return bkeys |
def _get_bkey_index(self, keyword):
'Retrieve the index of a beam keyword\n\n The method searches for the index of\n a requested keyword in the list of beam\n keywords. If the keyword does not exists,\n the index -1 is returned\n\n Parameters\n ----------\n keyword: str\n name of the requested keyword\n\n Returns\n -------\n index: int\n the index of the keyword\n '
bindex = (- 1)
for index in range(len(self.beamkeys)):
if (self.beamkeys[index].keyword == keyword):
return index
return bindex | 1,481,653,504,283,164,400 | Retrieve the index of a beam keyword
The method searches for the index of
a requested keyword in the list of beam
keywords. If the keyword does not exists,
the index -1 is returned
Parameters
----------
keyword: str
name of the requested keyword
Returns
-------
index: int
the index of the keyword | pyaxe/axesrc/configfile.py | _get_bkey_index | sosey/pyaxe | python | def _get_bkey_index(self, keyword):
'Retrieve the index of a beam keyword\n\n The method searches for the index of\n a requested keyword in the list of beam\n keywords. If the keyword does not exists,\n the index -1 is returned\n\n Parameters\n ----------\n keyword: str\n name of the requested keyword\n\n Returns\n -------\n index: int\n the index of the keyword\n '
bindex = (- 1)
for index in range(len(self.beamkeys)):
if (self.beamkeys[index].keyword == keyword):
return index
return bindex |
def get_bkey(self, keyword):
"Retrieve a requested beam keyword\n\n The method searches the list of beam keywords\n for a fitting keyword. In case that the requested\n keyword exists, it is returned.\n If not 'None' is returned\n\n Parameters\n ----------\n keyword: str\n name of the requested keyword\n\n Returns\n -------\n key: str or None\n the requested keyword or 'None'\n "
rkey = None
index = self._get_bkey_index(keyword)
if (index > (- 1)):
return self.beamkeys[index]
else:
return rkey | 6,752,099,410,041,932,000 | Retrieve a requested beam keyword
The method searches the list of beam keywords
for a fitting keyword. In case that the requested
keyword exists, it is returned.
If not 'None' is returned
Parameters
----------
keyword: str
name of the requested keyword
Returns
-------
key: str or None
the requested keyword or 'None' | pyaxe/axesrc/configfile.py | get_bkey | sosey/pyaxe | python | def get_bkey(self, keyword):
"Retrieve a requested beam keyword\n\n The method searches the list of beam keywords\n for a fitting keyword. In case that the requested\n keyword exists, it is returned.\n If not 'None' is returned\n\n Parameters\n ----------\n keyword: str\n name of the requested keyword\n\n Returns\n -------\n key: str or None\n the requested keyword or 'None'\n "
rkey = None
index = self._get_bkey_index(keyword)
if (index > (- 1)):
return self.beamkeys[index]
else:
return rkey |
def get_bvalue(self, keyword):
"Retrieve a requested beam-keyword value\n\n The method returns the value of the keyword\n which matches the requested value.\n If there is no matching keyword, 'None'\n is returned.\n\n Parameters\n ----------\n keyword: str\n name of the requested keyword\n\n Returns\n -------\n key: str or None\n the requested keyword or 'None'\n "
rvalue = None
key = self.get_bkey(keyword)
if key:
rvalue = key.keyvalue
return rvalue | -5,319,938,263,830,608,000 | Retrieve a requested beam-keyword value
The method returns the value of the keyword
which matches the requested value.
If there is no matching keyword, 'None'
is returned.
Parameters
----------
keyword: str
name of the requested keyword
Returns
-------
key: str or None
the requested keyword or 'None' | pyaxe/axesrc/configfile.py | get_bvalue | sosey/pyaxe | python | def get_bvalue(self, keyword):
"Retrieve a requested beam-keyword value\n\n The method returns the value of the keyword\n which matches the requested value.\n If there is no matching keyword, 'None'\n is returned.\n\n Parameters\n ----------\n keyword: str\n name of the requested keyword\n\n Returns\n -------\n key: str or None\n the requested keyword or 'None'\n "
rvalue = None
key = self.get_bkey(keyword)
if key:
rvalue = key.keyvalue
return rvalue |
def check_files(self):
'Checks whether all files exist\n\n The method checks whether the files whose names\n are within the class data do exist or not.\n An error is reported in case that the files\n do not exist.\n\n '
n_sens = 0
fkeys = ['SENSITIVITY_']
for key in fkeys:
full_keyword = (key + self.ident)
for bkey in self.beamkeys:
if ((bkey.keyword is full_keyword) and (bkey.keyvalue.upper() is not 'NONE')):
if (not os.path.isfile(config_util.getCONF(bkey.keyvalue))):
err_msg = 'The file: {0:s} does not exist!'.format(config_util.getCONF(bkey.keyvalue))
raise aXeError(err_msg)
else:
n_sens += 1
return n_sens | -7,950,295,730,071,243,000 | Checks whether all files exist
The method checks whether the files whose names
are within the class data do exist or not.
An error is reported in case that the files
do not exist. | pyaxe/axesrc/configfile.py | check_files | sosey/pyaxe | python | def check_files(self):
'Checks whether all files exist\n\n The method checks whether the files whose names\n are within the class data do exist or not.\n An error is reported in case that the files\n do not exist.\n\n '
n_sens = 0
fkeys = ['SENSITIVITY_']
for key in fkeys:
full_keyword = (key + self.ident)
for bkey in self.beamkeys:
if ((bkey.keyword is full_keyword) and (bkey.keyvalue.upper() is not 'NONE')):
if (not os.path.isfile(config_util.getCONF(bkey.keyvalue))):
err_msg = 'The file: {0:s} does not exist!'.format(config_util.getCONF(bkey.keyvalue))
raise aXeError(err_msg)
else:
n_sens += 1
return n_sens |
def __str__(self):
'The method transforms the 2D polynomial object into its str\n representation.\n\n Returns\n -------\n object: str\n string representation of the object\n '
rstring = str(self.norder)
for key in self.twodkeys:
rstring += str(key)
return rstring | 4,770,609,429,702,378,000 | The method transforms the 2D polynomial object into its str
representation.
Returns
-------
object: str
string representation of the object | pyaxe/axesrc/configfile.py | __str__ | sosey/pyaxe | python | def __str__(self):
'The method transforms the 2D polynomial object into its str\n representation.\n\n Returns\n -------\n object: str\n string representation of the object\n '
rstring = str(self.norder)
for key in self.twodkeys:
rstring += str(key)
return rstring |
def __getitem__(self, index):
'Getindex method for the class\n\n The operator method which is called\n when an index is requested on a\n class instace\n test = kkk[0]\n\n Parameters\n ----------\n index: int\n the index to address\n Returns\n -------\n key : ConfListKey\n the indexed object\n '
if (index > (len(self.twodkeys) - 1)):
err_msg = 'Index: {0:s} does not exist!'.format(str(index))
raise aXeError(err_msg)
return self.twodkeys[index] | -1,028,943,631,747,178,400 | Getindex method for the class
The operator method which is called
when an index is requested on a
class instace
test = kkk[0]
Parameters
----------
index: int
the index to address
Returns
-------
key : ConfListKey
the indexed object | pyaxe/axesrc/configfile.py | __getitem__ | sosey/pyaxe | python | def __getitem__(self, index):
'Getindex method for the class\n\n The operator method which is called\n when an index is requested on a\n class instace\n test = kkk[0]\n\n Parameters\n ----------\n index: int\n the index to address\n Returns\n -------\n key : ConfListKey\n the indexed object\n '
if (index > (len(self.twodkeys) - 1)):
err_msg = 'Index: {0:s} does not exist!'.format(str(index))
raise aXeError(err_msg)
return self.twodkeys[index] |
def __setitem__(self, index, obj):
'Setindex method for the class\n\n The operator method which is called\n when the index of a class instance is\n set to a value.\n kkk[0] = test\n\n Parameters\n ----------\n index: int\n the index to address\n obj: ConfListKey\n description of the object content\n '
if (index > (len(self.twodkeys) - 1)):
err_msg = (('Index ' + str(index)) + ' does not exist!')
raise aXeError(err_msg)
elif (not isinstance(type(self[0]), obj)):
err_msg = 'Object: {0:s} has wrong type: {1:s}!'.format(str(obj), str(type(obj)))
raise aXeError(err_msg)
self.twodkeys[index] = obj | 8,941,293,243,994,340,000 | Setindex method for the class
The operator method which is called
when the index of a class instance is
set to a value.
kkk[0] = test
Parameters
----------
index: int
the index to address
obj: ConfListKey
description of the object content | pyaxe/axesrc/configfile.py | __setitem__ | sosey/pyaxe | python | def __setitem__(self, index, obj):
'Setindex method for the class\n\n The operator method which is called\n when the index of a class instance is\n set to a value.\n kkk[0] = test\n\n Parameters\n ----------\n index: int\n the index to address\n obj: ConfListKey\n description of the object content\n '
if (index > (len(self.twodkeys) - 1)):
err_msg = (('Index ' + str(index)) + ' does not exist!')
raise aXeError(err_msg)
elif (not isinstance(type(self[0]), obj)):
err_msg = 'Object: {0:s} has wrong type: {1:s}!'.format(str(obj), str(type(obj)))
raise aXeError(err_msg)
self.twodkeys[index] = obj |
def _find_order(self, prefix, ident, keylist):
'Find the keyword with the polynomial order\n\n The method finds and extracts the keyword\n indicating the polynomial degree from\n a keyword list. The keyword is returned.\n\n Parameters\n ----------\n prefix: str\n keyword prefix\n ident: char\n beam identification\n keylist: list\n list of keywords\n\n Returns\n -------\n keyword: str\n keyword with number of orders\n '
order_key = ((prefix + 'ORDER_') + ident)
return self._find_key(order_key, keylist) | -3,704,321,376,325,030,000 | Find the keyword with the polynomial order
The method finds and extracts the keyword
indicating the polynomial degree from
a keyword list. The keyword is returned.
Parameters
----------
prefix: str
keyword prefix
ident: char
beam identification
keylist: list
list of keywords
Returns
-------
keyword: str
keyword with number of orders | pyaxe/axesrc/configfile.py | _find_order | sosey/pyaxe | python | def _find_order(self, prefix, ident, keylist):
'Find the keyword with the polynomial order\n\n The method finds and extracts the keyword\n indicating the polynomial degree from\n a keyword list. The keyword is returned.\n\n Parameters\n ----------\n prefix: str\n keyword prefix\n ident: char\n beam identification\n keylist: list\n list of keywords\n\n Returns\n -------\n keyword: str\n keyword with number of orders\n '
order_key = ((prefix + 'ORDER_') + ident)
return self._find_key(order_key, keylist) |
def _find_twodkeys(self, prefix, ident, keylist):
'Find the all 2D polynomial keywords\n\n Given a prefix and a beam identifier the method\n extracts all orders of the 2D polynomial which\n describes the trace or dispersion. The number\n of orders expected is taken from the object data.\n\n Parameters\n ----------\n prefix: str\n keyword prefix\n ident: char\n beam identification\n keylist: list\n list of keywords\n\n Returns\n -------\n keys: list\n list of keywords\n '
twodkeys = []
for ii in range((int(self.norder.keyvalue) + 1)):
twodkey = (((prefix + ident) + '_') + str(ii))
newkey = self._find_key(twodkey, keylist, 1)
if self._check_twodkey(newkey):
twodkeys.append(newkey)
else:
raise CKeyLengthWrong(ident, twodkey)
return twodkeys | -3,912,197,982,723,200,000 | Find the all 2D polynomial keywords
Given a prefix and a beam identifier the method
extracts all orders of the 2D polynomial which
describes the trace or dispersion. The number
of orders expected is taken from the object data.
Parameters
----------
prefix: str
keyword prefix
ident: char
beam identification
keylist: list
list of keywords
Returns
-------
keys: list
list of keywords | pyaxe/axesrc/configfile.py | _find_twodkeys | sosey/pyaxe | python | def _find_twodkeys(self, prefix, ident, keylist):
'Find the all 2D polynomial keywords\n\n Given a prefix and a beam identifier the method\n extracts all orders of the 2D polynomial which\n describes the trace or dispersion. The number\n of orders expected is taken from the object data.\n\n Parameters\n ----------\n prefix: str\n keyword prefix\n ident: char\n beam identification\n keylist: list\n list of keywords\n\n Returns\n -------\n keys: list\n list of keywords\n '
twodkeys = []
for ii in range((int(self.norder.keyvalue) + 1)):
twodkey = (((prefix + ident) + '_') + str(ii))
newkey = self._find_key(twodkey, keylist, 1)
if self._check_twodkey(newkey):
twodkeys.append(newkey)
else:
raise CKeyLengthWrong(ident, twodkey)
return twodkeys |
def _find_key(self, keyword, keylist, lkey=0):
'Extract a certain keyword from the list\n\n The methods searches for a particular keyword\n in a keyword list. If found, the keyword is\n copied and destroied in the input list.\n If not found, an exception is fired.\n\n Parameters\n ----------\n keyword: str\n the keyword name\n keylist: list\n list of keywords\n\n Returns\n -------\n keyword: str\n the extracted keyword\n '
iindex = 0
found = (- 1)
for key in keylist:
if (key.keyword == keyword):
if lkey:
nkey = ConfListKey(key.keyword, key.keyvalue, key.comment)
else:
nkey = ConfKey(key.keyword, key.keyvalue, key.comment)
found = iindex
iindex += 1
if (found < 0):
raise CKeyNotFound(keyword)
else:
del keylist[found]
return nkey | 7,262,245,317,334,940,000 | Extract a certain keyword from the list
The methods searches for a particular keyword
in a keyword list. If found, the keyword is
copied and destroied in the input list.
If not found, an exception is fired.
Parameters
----------
keyword: str
the keyword name
keylist: list
list of keywords
Returns
-------
keyword: str
the extracted keyword | pyaxe/axesrc/configfile.py | _find_key | sosey/pyaxe | python | def _find_key(self, keyword, keylist, lkey=0):
'Extract a certain keyword from the list\n\n The methods searches for a particular keyword\n in a keyword list. If found, the keyword is\n copied and destroied in the input list.\n If not found, an exception is fired.\n\n Parameters\n ----------\n keyword: str\n the keyword name\n keylist: list\n list of keywords\n\n Returns\n -------\n keyword: str\n the extracted keyword\n '
iindex = 0
found = (- 1)
for key in keylist:
if (key.keyword == keyword):
if lkey:
nkey = ConfListKey(key.keyword, key.keyvalue, key.comment)
else:
nkey = ConfKey(key.keyword, key.keyvalue, key.comment)
found = iindex
iindex += 1
if (found < 0):
raise CKeyNotFound(keyword)
else:
del keylist[found]
return nkey |
def _check_twodkey(self, inkey):
'Check the length of the a field dependent keyword\n\n Field dependent keywords such as the polynimial\n coefficients in the trace description and dispersion\n solution must have a certain number of values,\n which is:\n n = m^2/2 + m/2\n The method checks whether the number of values\n is in agreement with this.\n\n @param inkey: the keyword name\n @type inkey: ConfListKey\n\n @return: 1/0\n @rtype: int\n '
n = float(len(inkey.kvallist))
m = (((- 1.0) + math.sqrt((1.0 + (8.0 * n)))) / 2.0)
if (math.fabs((m - int(m))) > 1e-16):
return 0
return 1 | 8,013,850,881,918,405,000 | Check the length of the a field dependent keyword
Field dependent keywords such as the polynimial
coefficients in the trace description and dispersion
solution must have a certain number of values,
which is:
n = m^2/2 + m/2
The method checks whether the number of values
is in agreement with this.
@param inkey: the keyword name
@type inkey: ConfListKey
@return: 1/0
@rtype: int | pyaxe/axesrc/configfile.py | _check_twodkey | sosey/pyaxe | python | def _check_twodkey(self, inkey):
'Check the length of the a field dependent keyword\n\n Field dependent keywords such as the polynimial\n coefficients in the trace description and dispersion\n solution must have a certain number of values,\n which is:\n n = m^2/2 + m/2\n The method checks whether the number of values\n is in agreement with this.\n\n @param inkey: the keyword name\n @type inkey: ConfListKey\n\n @return: 1/0\n @rtype: int\n '
n = float(len(inkey.kvallist))
m = (((- 1.0) + math.sqrt((1.0 + (8.0 * n)))) / 2.0)
if (math.fabs((m - int(m))) > 1e-16):
return 0
return 1 |
def str_header(self, description):
'Create a header string\n\n The method offers to the subclasses the possibility\n to have a meaningful string header before the\n actual data string.\n\n Parameters\n ----------\n @param description: description of the object content\n @type description: string\n @return: the header string\n @rtype: string\n '
rstring = '\n#\n# '
rstring += description
rstring += ':\n#\n'
return rstring | -1,473,350,045,184,219,600 | Create a header string
The method offers to the subclasses the possibility
to have a meaningful string header before the
actual data string.
Parameters
----------
@param description: description of the object content
@type description: string
@return: the header string
@rtype: string | pyaxe/axesrc/configfile.py | str_header | sosey/pyaxe | python | def str_header(self, description):
'Create a header string\n\n The method offers to the subclasses the possibility\n to have a meaningful string header before the\n actual data string.\n\n Parameters\n ----------\n @param description: description of the object content\n @type description: string\n @return: the header string\n @rtype: string\n '
rstring = '\n#\n# '
rstring += description
rstring += ':\n#\n'
return rstring |
def __init__(self, ident=None, keylist=None):
'The method initializes a configuration beam\n\n object for a given beam identifier.\n All necessary keywords are extracted from\n an input keyword list.\n In case of missing keywords an exception\n is fired.\n\n Parameters\n ----------\n ident: char\n beam identification\n keylist: list\n list of keywords\n '
try:
self.ident = ident
self.norder = self._find_order('DYDX_', ident, keylist)
self.twodkeys = self._find_twodkeys('DYDX_', ident, keylist)
except CKeyNotFound as e:
raise TraceNotFound(ident, e.keyword)
except CKeyLengthWrong as e:
_log.info(('Field dependent keyword: ' + e.keyword)) | 6,891,045,205,303,271,000 | The method initializes a configuration beam
object for a given beam identifier.
All necessary keywords are extracted from
an input keyword list.
In case of missing keywords an exception
is fired.
Parameters
----------
ident: char
beam identification
keylist: list
list of keywords | pyaxe/axesrc/configfile.py | __init__ | sosey/pyaxe | python | def __init__(self, ident=None, keylist=None):
'The method initializes a configuration beam\n\n object for a given beam identifier.\n All necessary keywords are extracted from\n an input keyword list.\n In case of missing keywords an exception\n is fired.\n\n Parameters\n ----------\n ident: char\n beam identification\n keylist: list\n list of keywords\n '
try:
self.ident = ident
self.norder = self._find_order('DYDX_', ident, keylist)
self.twodkeys = self._find_twodkeys('DYDX_', ident, keylist)
except CKeyNotFound as e:
raise TraceNotFound(ident, e.keyword)
except CKeyLengthWrong as e:
_log.info(('Field dependent keyword: ' + e.keyword)) |
def __str__(self):
'Returns string representation of the object'
description = ('Trace description for Beam ' + str(self.ident))
rstring = super(ConfigTrace, self).str_header(description)
rstring += super(ConfigTrace, self).__str__()
return rstring | 445,771,185,114,878,600 | Returns string representation of the object | pyaxe/axesrc/configfile.py | __str__ | sosey/pyaxe | python | def __str__(self):
description = ('Trace description for Beam ' + str(self.ident))
rstring = super(ConfigTrace, self).str_header(description)
rstring += super(ConfigTrace, self).__str__()
return rstring |
def __init__(self, ident=None, keylist=None):
'The method initializes a configuration dispersion\n\n object for a given beam identifier.\n All necessary keywords are extracted from\n an input keyword list.\n In case of missing keywords an exception\n is fired.\n\n Parameters\n ----------\n ident: char\n beam identification\n keylist: list\n list of keywords\n '
try:
self.ident = ident
self.norder = self._find_order('DISP_', ident, keylist)
self.twodkeys = self._find_twodkeys('DLDP_', ident, keylist)
except CKeyNotFound as e:
try:
self.twodkeys = self._find_twodkeys('DLD1P_', ident, keylist)
except CKeyNotFound as e:
raise DispNotFound(ident, e.keyword)
except CKeyLengthWrong as e:
_log.info('\nField dependent keyword: {0:s} has wrong length!'.format(e.keyword))
raise DispNotFound(ident, e.keyword)
except CKeyLengthWrong as e:
_log.info('\nField dependent keyword: {0:s} has wrong length!'.format(e.keyword))
raise DispNotFound(ident, e.keyword) | 405,561,882,949,454,900 | The method initializes a configuration dispersion
object for a given beam identifier.
All necessary keywords are extracted from
an input keyword list.
In case of missing keywords an exception
is fired.
Parameters
----------
ident: char
beam identification
keylist: list
list of keywords | pyaxe/axesrc/configfile.py | __init__ | sosey/pyaxe | python | def __init__(self, ident=None, keylist=None):
'The method initializes a configuration dispersion\n\n object for a given beam identifier.\n All necessary keywords are extracted from\n an input keyword list.\n In case of missing keywords an exception\n is fired.\n\n Parameters\n ----------\n ident: char\n beam identification\n keylist: list\n list of keywords\n '
try:
self.ident = ident
self.norder = self._find_order('DISP_', ident, keylist)
self.twodkeys = self._find_twodkeys('DLDP_', ident, keylist)
except CKeyNotFound as e:
try:
self.twodkeys = self._find_twodkeys('DLD1P_', ident, keylist)
except CKeyNotFound as e:
raise DispNotFound(ident, e.keyword)
except CKeyLengthWrong as e:
_log.info('\nField dependent keyword: {0:s} has wrong length!'.format(e.keyword))
raise DispNotFound(ident, e.keyword)
except CKeyLengthWrong as e:
_log.info('\nField dependent keyword: {0:s} has wrong length!'.format(e.keyword))
raise DispNotFound(ident, e.keyword) |
def __str__(self):
'return string representation of the object'
description = ('Dispersion solution for Beam ' + str(self.ident))
rstring = super(ConfigDisp, self).str_header(description)
rstring += super(ConfigDisp, self).__str__()
return rstring | 3,565,906,571,381,236,000 | return string representation of the object | pyaxe/axesrc/configfile.py | __str__ | sosey/pyaxe | python | def __str__(self):
description = ('Dispersion solution for Beam ' + str(self.ident))
rstring = super(ConfigDisp, self).str_header(description)
rstring += super(ConfigDisp, self).__str__()
return rstring |
def __str__(self):
'returns string representation of the object'
rstring = ''
for line in self.header:
rstring += line
return rstring | -2,722,295,313,507,606,500 | returns string representation of the object | pyaxe/axesrc/configfile.py | __str__ | sosey/pyaxe | python | def __str__(self):
rstring =
for line in self.header:
rstring += line
return rstring |
def __init__(self, filename=None):
'Initializes the configuration header class\n\n The method extracts the header from a configuration\n file. If no filename is provided, a default\n header is created.\n\n Parameters\n ----------\n filename: str\n name of the configuration file\n '
if (filename is None):
super(ConfHeader, self).__init__()
else:
self.header = []
start = 1
fopen = open(filename, 'r')
for line in fopen:
if start:
str_line = line.strip()
if ((len(str_line) > 0) and (str_line[0] is '#')):
self.header.append((line.strip() + '\n'))
else:
start = 0
fopen.close | 9,167,558,304,559,034,000 | Initializes the configuration header class
The method extracts the header from a configuration
file. If no filename is provided, a default
header is created.
Parameters
----------
filename: str
name of the configuration file | pyaxe/axesrc/configfile.py | __init__ | sosey/pyaxe | python | def __init__(self, filename=None):
'Initializes the configuration header class\n\n The method extracts the header from a configuration\n file. If no filename is provided, a default\n header is created.\n\n Parameters\n ----------\n filename: str\n name of the configuration file\n '
if (filename is None):
super(ConfHeader, self).__init__()
else:
self.header = []
start = 1
fopen = open(filename, 'r')
for line in fopen:
if start:
str_line = line.strip()
if ((len(str_line) > 0) and (str_line[0] is '#')):
self.header.append((line.strip() + '\n'))
else:
start = 0
fopen.close |
def __init__(self, keyword, keyvalue, comment=None):
'Constructor for the keyword class\n\n The keyword instance is created using\n all input values.\n\n Parameter\n ---------\n keyword: str\n the keword name\n keyvalue: str\n the keyword value\n comment: str\n the keyword comment\n '
self.keyword = keyword
self.keyvalue = keyvalue
self.comment = comment | -6,223,254,573,613,425,000 | Constructor for the keyword class
The keyword instance is created using
all input values.
Parameter
---------
keyword: str
the keword name
keyvalue: str
the keyword value
comment: str
the keyword comment | pyaxe/axesrc/configfile.py | __init__ | sosey/pyaxe | python | def __init__(self, keyword, keyvalue, comment=None):
'Constructor for the keyword class\n\n The keyword instance is created using\n all input values.\n\n Parameter\n ---------\n keyword: str\n the keword name\n keyvalue: str\n the keyword value\n comment: str\n the keyword comment\n '
self.keyword = keyword
self.keyvalue = keyvalue
self.comment = comment |
def __str__(self):
'String method for the class\n\n The method creats and returns\n the string representation of the\n keyword.\n\n Returns\n -------\n obj: str\n string representation of the object\n '
rstring = ((self.keyword + ' ') + str(self.keyvalue))
if (self.comment is not None):
rstring = ((rstring + ' ; ') + self.comment)
rstring += '\n'
return rstring | 4,730,034,521,895,430,000 | String method for the class
The method creats and returns
the string representation of the
keyword.
Returns
-------
obj: str
string representation of the object | pyaxe/axesrc/configfile.py | __str__ | sosey/pyaxe | python | def __str__(self):
'String method for the class\n\n The method creats and returns\n the string representation of the\n keyword.\n\n Returns\n -------\n obj: str\n string representation of the object\n '
rstring = ((self.keyword + ' ') + str(self.keyvalue))
if (self.comment is not None):
rstring = ((rstring + ' ; ') + self.comment)
rstring += '\n'
return rstring |
def __init__(self, keyword, keyvalue, comment=None):
'Constructor for the keyword list class\n\n Initializer for the keyword list class.\n The keyword instance is created using\n all input values.\n\n Parameters\n ----------\n keyword: str\n the keword name\n keyvalue: str\n the keyword values\n comment: str\n the keyword comment\n '
self.kvallist = []
super(ConfListKey, self).__init__(keyword, keyvalue, comment)
vlist = self.keyvalue.split()
for value in vlist:
self.kvallist.append(float(value)) | -7,401,940,664,991,267,000 | Constructor for the keyword list class
Initializer for the keyword list class.
The keyword instance is created using
all input values.
Parameters
----------
keyword: str
the keword name
keyvalue: str
the keyword values
comment: str
the keyword comment | pyaxe/axesrc/configfile.py | __init__ | sosey/pyaxe | python | def __init__(self, keyword, keyvalue, comment=None):
'Constructor for the keyword list class\n\n Initializer for the keyword list class.\n The keyword instance is created using\n all input values.\n\n Parameters\n ----------\n keyword: str\n the keword name\n keyvalue: str\n the keyword values\n comment: str\n the keyword comment\n '
self.kvallist = []
super(ConfListKey, self).__init__(keyword, keyvalue, comment)
vlist = self.keyvalue.split()
for value in vlist:
self.kvallist.append(float(value)) |
def __getitem__(self, index):
'Getindex method for the class\n\n The operator method which is called\n when an index is requested on a\n class instace\n test = kkk[0]\n\n Parameters\n ----------\n index: int\n the index to address\n\n Returns\n -------\n obj: float\n the indexed object\n '
if (index > (len(self.kvallist) - 1)):
err_msg = (('Index: ' + str(index)) + ' does not exist!')
raise aXeError(err_msg)
return self.kvallist[index] | 2,011,000,079,791,154,000 | Getindex method for the class
The operator method which is called
when an index is requested on a
class instace
test = kkk[0]
Parameters
----------
index: int
the index to address
Returns
-------
obj: float
the indexed object | pyaxe/axesrc/configfile.py | __getitem__ | sosey/pyaxe | python | def __getitem__(self, index):
'Getindex method for the class\n\n The operator method which is called\n when an index is requested on a\n class instace\n test = kkk[0]\n\n Parameters\n ----------\n index: int\n the index to address\n\n Returns\n -------\n obj: float\n the indexed object\n '
if (index > (len(self.kvallist) - 1)):
err_msg = (('Index: ' + str(index)) + ' does not exist!')
raise aXeError(err_msg)
return self.kvallist[index] |
def __setitem__(self, index, obj):
'Setindex method for the class\n\n The operator method which is called\n when the index of a class instance is\n set to a value.\n kkk[0] = test\n\n Parameters\n ----------\n index: int\n the index to address\n obj: list\n description of the object content\n '
if (index > (len(self.kvallist) - 1)):
err_msg = (('Index ' + str(index)) + ' does not exist!')
raise aXeError(err_msg)
elif (not isinstance(type(self[0]), obj)):
err_msg = 'Object: {0:s} has wrong type: {1:s}!'.format(str(obj), str(type(obj)))
raise aXeError(err_msg)
self.kvallist[index] = obj | -4,370,131,239,325,133,300 | Setindex method for the class
The operator method which is called
when the index of a class instance is
set to a value.
kkk[0] = test
Parameters
----------
index: int
the index to address
obj: list
description of the object content | pyaxe/axesrc/configfile.py | __setitem__ | sosey/pyaxe | python | def __setitem__(self, index, obj):
'Setindex method for the class\n\n The operator method which is called\n when the index of a class instance is\n set to a value.\n kkk[0] = test\n\n Parameters\n ----------\n index: int\n the index to address\n obj: list\n description of the object content\n '
if (index > (len(self.kvallist) - 1)):
err_msg = (('Index ' + str(index)) + ' does not exist!')
raise aXeError(err_msg)
elif (not isinstance(type(self[0]), obj)):
err_msg = 'Object: {0:s} has wrong type: {1:s}!'.format(str(obj), str(type(obj)))
raise aXeError(err_msg)
self.kvallist[index] = obj |
def __str__(self):
'returns the string representation of the keyword.'
rstring = self.keyword
for value in self.kvallist:
rstring = (rstring + (' %12.6g' % value))
if (self.comment is not None):
rstring = ((rstring + ' ; ') + self.comment)
rstring += '\n'
return rstring | 6,901,117,975,011,775,000 | returns the string representation of the keyword. | pyaxe/axesrc/configfile.py | __str__ | sosey/pyaxe | python | def __str__(self):
rstring = self.keyword
for value in self.kvallist:
rstring = (rstring + (' %12.6g' % value))
if (self.comment is not None):
rstring = ((rstring + ' ; ') + self.comment)
rstring += '\n'
return rstring |
def fix_queryselector(elems):
"Workaround for web components breaking querySelector.\n\n Because someone thought it was a good idea to just yeet the moral equivalent\n of iframes everywhere over a single page 🤦\n\n Shadow DOM was a terrible idea and everyone involved should feel professionally\n ashamed of themselves. Every problem it tried to solved could and should have\n been solved in better ways that don't break the DOM.\n "
selectors = '").shadowRoot.querySelector("'.join(elems)
return (('return document.querySelector("' + selectors) + '")') | 4,373,821,098,347,562,500 | Workaround for web components breaking querySelector.
Because someone thought it was a good idea to just yeet the moral equivalent
of iframes everywhere over a single page 🤦
Shadow DOM was a terrible idea and everyone involved should feel professionally
ashamed of themselves. Every problem it tried to solved could and should have
been solved in better ways that don't break the DOM. | tests/integration/test_charm.py | fix_queryselector | VariableDeclared/kubeflow-dashboard-operator | python | def fix_queryselector(elems):
"Workaround for web components breaking querySelector.\n\n Because someone thought it was a good idea to just yeet the moral equivalent\n of iframes everywhere over a single page 🤦\n\n Shadow DOM was a terrible idea and everyone involved should feel professionally\n ashamed of themselves. Every problem it tried to solved could and should have\n been solved in better ways that don't break the DOM.\n "
selectors = '").shadowRoot.querySelector("'.join(elems)
return (('return document.querySelector("' + selectors) + '")') |
def get_ids(num_subjects=None, short=True):
'\n num_subjects : number of subject IDs to get\n short : True of False, specifies whether to get short or long subject IDs\n\n return:\n subject_IDs : list of subject IDs (length num_subjects)\n '
if short:
subject_IDs = np.loadtxt(os.path.join(root_folder, 'subject_IDs.txt'), dtype=int)
subject_IDs = subject_IDs.astype(str)
else:
subject_IDs = np.loadtxt(os.path.join(root_folder, 'full_IDs.txt'), dtype=str)
if (num_subjects is not None):
subject_IDs = subject_IDs[:num_subjects]
return subject_IDs | -9,211,025,603,926,083,000 | num_subjects : number of subject IDs to get
short : True of False, specifies whether to get short or long subject IDs
return:
subject_IDs : list of subject IDs (length num_subjects) | lib/abide_utils.py | get_ids | HoganZhang/gcn_metric_learning | python | def get_ids(num_subjects=None, short=True):
'\n num_subjects : number of subject IDs to get\n short : True of False, specifies whether to get short or long subject IDs\n\n return:\n subject_IDs : list of subject IDs (length num_subjects)\n '
if short:
subject_IDs = np.loadtxt(os.path.join(root_folder, 'subject_IDs.txt'), dtype=int)
subject_IDs = subject_IDs.astype(str)
else:
subject_IDs = np.loadtxt(os.path.join(root_folder, 'full_IDs.txt'), dtype=str)
if (num_subjects is not None):
subject_IDs = subject_IDs[:num_subjects]
return subject_IDs |
def fetch_filenames(subject_list, file_type):
'\n subject_list : list of short subject IDs in string format\n file_type : must be one of the available file types\n\n returns:\n\n filenames : list of filetypes (same length as subject_list)\n '
filemapping = {'func_preproc': '_func_preproc.nii.gz', 'rois_aal': '_rois_aal.1D', 'rois_cc200': '_rois_cc200.1D', 'rois_ho': '_rois_ho.1D'}
filenames = []
subject_IDs = get_ids(short=True)
subject_IDs = subject_IDs.tolist()
full_IDs = get_ids(short=False)
for s in subject_list:
try:
if (file_type in filemapping):
idx = subject_IDs.index(s)
pattern = (full_IDs[idx] + filemapping[file_type])
else:
pattern = (s + file_type)
filenames.append(os.path.join(root_folder, s, pattern))
except ValueError:
filenames.append('N/A')
return filenames | 4,503,734,664,731,265,500 | subject_list : list of short subject IDs in string format
file_type : must be one of the available file types
returns:
filenames : list of filetypes (same length as subject_list) | lib/abide_utils.py | fetch_filenames | HoganZhang/gcn_metric_learning | python | def fetch_filenames(subject_list, file_type):
'\n subject_list : list of short subject IDs in string format\n file_type : must be one of the available file types\n\n returns:\n\n filenames : list of filetypes (same length as subject_list)\n '
filemapping = {'func_preproc': '_func_preproc.nii.gz', 'rois_aal': '_rois_aal.1D', 'rois_cc200': '_rois_cc200.1D', 'rois_ho': '_rois_ho.1D'}
filenames = []
subject_IDs = get_ids(short=True)
subject_IDs = subject_IDs.tolist()
full_IDs = get_ids(short=False)
for s in subject_list:
try:
if (file_type in filemapping):
idx = subject_IDs.index(s)
pattern = (full_IDs[idx] + filemapping[file_type])
else:
pattern = (s + file_type)
filenames.append(os.path.join(root_folder, s, pattern))
except ValueError:
filenames.append('N/A')
return filenames |
def fetch_subject_files(subjectID):
'\n subjectID : short subject ID for which list of available files are fetched\n\n returns:\n\n onlyfiles : list of absolute paths for available subject files\n '
subject_IDs = get_ids(short=True)
subject_IDs = subject_IDs.tolist()
full_IDs = get_ids(short=False)
try:
idx = subject_IDs.index(subjectID)
subject_folder = os.path.join(root_folder, subjectID)
onlyfiles = [os.path.join(subject_folder, f) for f in os.listdir(subject_folder) if os.path.isfile(os.path.join(subject_folder, f))]
except ValueError:
onlyfiles = []
return onlyfiles | -7,182,902,865,471,219,000 | subjectID : short subject ID for which list of available files are fetched
returns:
onlyfiles : list of absolute paths for available subject files | lib/abide_utils.py | fetch_subject_files | HoganZhang/gcn_metric_learning | python | def fetch_subject_files(subjectID):
'\n subjectID : short subject ID for which list of available files are fetched\n\n returns:\n\n onlyfiles : list of absolute paths for available subject files\n '
subject_IDs = get_ids(short=True)
subject_IDs = subject_IDs.tolist()
full_IDs = get_ids(short=False)
try:
idx = subject_IDs.index(subjectID)
subject_folder = os.path.join(root_folder, subjectID)
onlyfiles = [os.path.join(subject_folder, f) for f in os.listdir(subject_folder) if os.path.isfile(os.path.join(subject_folder, f))]
except ValueError:
onlyfiles = []
return onlyfiles |
def fetch_conn_matrices(subject_list, atlas_name, kind):
'\n subject_list : list of short subject IDs in string format\n atlas_name : the atlas based on which the timeseries are generated e.g. aal, cc200\n kind : the kind of correlation used to estimate the matrices, i.e.\n\n returns:\n connectivity : list of square connectivity matrices, one for each subject in subject_list\n '
conn_files = fetch_filenames(subject_list, (((('_' + atlas_name) + '_') + kind.replace(' ', '_')) + '.mat'))
conn_matrices = []
for fl in conn_files:
print(('Reading connectivity file %s' % fl))
try:
mat = sio.loadmat(fl)['connectivity']
conn_matrices.append(mat)
except IOError:
print(('File %s does not exist' % fl))
return conn_matrices | 5,459,296,719,312,350,000 | subject_list : list of short subject IDs in string format
atlas_name : the atlas based on which the timeseries are generated e.g. aal, cc200
kind : the kind of correlation used to estimate the matrices, i.e.
returns:
connectivity : list of square connectivity matrices, one for each subject in subject_list | lib/abide_utils.py | fetch_conn_matrices | HoganZhang/gcn_metric_learning | python | def fetch_conn_matrices(subject_list, atlas_name, kind):
'\n subject_list : list of short subject IDs in string format\n atlas_name : the atlas based on which the timeseries are generated e.g. aal, cc200\n kind : the kind of correlation used to estimate the matrices, i.e.\n\n returns:\n connectivity : list of square connectivity matrices, one for each subject in subject_list\n '
conn_files = fetch_filenames(subject_list, (((('_' + atlas_name) + '_') + kind.replace(' ', '_')) + '.mat'))
conn_matrices = []
for fl in conn_files:
print(('Reading connectivity file %s' % fl))
try:
mat = sio.loadmat(fl)['connectivity']
conn_matrices.append(mat)
except IOError:
print(('File %s does not exist' % fl))
return conn_matrices |
def get_timeseries(subject_list, atlas_name):
'\n subject_list : list of short subject IDs in string format\n atlas_name : the atlas based on which the timeseries are generated e.g. aal, cc200\n\n returns:\n ts : list of timeseries arrays, each of shape (timepoints x regions)\n '
ts_files = fetch_filenames(subject_list, ('rois_' + atlas_name))
ts = []
for fl in ts_files:
print(('Reading timeseries file %s' % fl))
ts.append(np.loadtxt(fl, skiprows=0))
return ts | 8,516,775,852,561,009,000 | subject_list : list of short subject IDs in string format
atlas_name : the atlas based on which the timeseries are generated e.g. aal, cc200
returns:
ts : list of timeseries arrays, each of shape (timepoints x regions) | lib/abide_utils.py | get_timeseries | HoganZhang/gcn_metric_learning | python | def get_timeseries(subject_list, atlas_name):
'\n subject_list : list of short subject IDs in string format\n atlas_name : the atlas based on which the timeseries are generated e.g. aal, cc200\n\n returns:\n ts : list of timeseries arrays, each of shape (timepoints x regions)\n '
ts_files = fetch_filenames(subject_list, ('rois_' + atlas_name))
ts = []
for fl in ts_files:
print(('Reading timeseries file %s' % fl))
ts.append(np.loadtxt(fl, skiprows=0))
return ts |
def norm_timeseries(ts_list):
'\n ts_list : list of timeseries arrays, each of shape (timepoints x regions)\n\n returns:\n norm_ts : list of normalised timeseries arrays, same shape as ts_list\n '
norm_ts = []
for ts in ts_list:
norm_ts.append(nilearn.signal.clean(ts, detrend=False))
return norm_ts | -4,589,535,447,200,558,600 | ts_list : list of timeseries arrays, each of shape (timepoints x regions)
returns:
norm_ts : list of normalised timeseries arrays, same shape as ts_list | lib/abide_utils.py | norm_timeseries | HoganZhang/gcn_metric_learning | python | def norm_timeseries(ts_list):
'\n ts_list : list of timeseries arrays, each of shape (timepoints x regions)\n\n returns:\n norm_ts : list of normalised timeseries arrays, same shape as ts_list\n '
norm_ts = []
for ts in ts_list:
norm_ts.append(nilearn.signal.clean(ts, detrend=False))
return norm_ts |
def subject_connectivity(timeseries, subject, atlas_name, kind, save=True, save_path=root_folder):
'\n timeseries : timeseries table for subject (timepoints x regions)\n subject : the subject short ID\n atlas_name : name of the atlas used\n kind : the kind of connectivity to be used, e.g. lasso, partial correlation, correlation\n save : save the connectivity matrix to a file\n save_path : specify path to save the matrix if different from subject folder\n\n returns:\n connectivity : connectivity matrix (regions x regions)\n '
print(('Estimating %s matrix for subject %s' % (kind, subject)))
if (kind == 'lasso'):
covariance_estimator = GraphLassoCV(verbose=1)
covariance_estimator.fit(timeseries)
connectivity = covariance_estimator.covariance_
print('Covariance matrix has shape {0}.'.format(connectivity.shape))
elif (kind in ['tangent', 'partial correlation', 'correlation']):
conn_measure = connectome.ConnectivityMeasure(kind=kind)
connectivity = conn_measure.fit_transform([timeseries])[0]
if save:
subject_file = os.path.join(save_path, subject, (((((subject + '_') + atlas_name) + '_') + kind.replace(' ', '_')) + '.mat'))
sio.savemat(subject_file, {'connectivity': connectivity})
return connectivity | 2,734,517,113,801,625,600 | timeseries : timeseries table for subject (timepoints x regions)
subject : the subject short ID
atlas_name : name of the atlas used
kind : the kind of connectivity to be used, e.g. lasso, partial correlation, correlation
save : save the connectivity matrix to a file
save_path : specify path to save the matrix if different from subject folder
returns:
connectivity : connectivity matrix (regions x regions) | lib/abide_utils.py | subject_connectivity | HoganZhang/gcn_metric_learning | python | def subject_connectivity(timeseries, subject, atlas_name, kind, save=True, save_path=root_folder):
'\n timeseries : timeseries table for subject (timepoints x regions)\n subject : the subject short ID\n atlas_name : name of the atlas used\n kind : the kind of connectivity to be used, e.g. lasso, partial correlation, correlation\n save : save the connectivity matrix to a file\n save_path : specify path to save the matrix if different from subject folder\n\n returns:\n connectivity : connectivity matrix (regions x regions)\n '
print(('Estimating %s matrix for subject %s' % (kind, subject)))
if (kind == 'lasso'):
covariance_estimator = GraphLassoCV(verbose=1)
covariance_estimator.fit(timeseries)
connectivity = covariance_estimator.covariance_
print('Covariance matrix has shape {0}.'.format(connectivity.shape))
elif (kind in ['tangent', 'partial correlation', 'correlation']):
conn_measure = connectome.ConnectivityMeasure(kind=kind)
connectivity = conn_measure.fit_transform([timeseries])[0]
if save:
subject_file = os.path.join(save_path, subject, (((((subject + '_') + atlas_name) + '_') + kind.replace(' ', '_')) + '.mat'))
sio.savemat(subject_file, {'connectivity': connectivity})
return connectivity |
def group_connectivity(timeseries, subject_list, atlas_name, kind, save=True, save_path=root_folder):
'\n timeseries : list of timeseries tables for subjects (timepoints x regions)\n subject_list : the subject short IDs list\n atlas_name : name of the atlas used\n kind : the kind of connectivity to be used, e.g. lasso, partial correlation, correlation\n save : save the connectivity matrix to a file\n save_path : specify path to save the matrix if different from subject folder\n\n returns:\n connectivity : connectivity matrix (regions x regions)\n '
if (kind == 'lasso'):
covariance_estimator = GraphLassoCV(verbose=1)
connectivity_matrices = []
for (i, ts) in enumerate(timeseries):
covariance_estimator.fit(ts)
connectivity = covariance_estimator.covariance_
connectivity_matrices.append(connectivity)
print('Covariance matrix has shape {0}.'.format(connectivity.shape))
elif (kind in ['tangent', 'partial correlation', 'correlation']):
conn_measure = connectome.ConnectivityMeasure(kind=kind)
connectivity_matrices = conn_measure.fit_transform(timeseries)
if save:
for (i, subject) in enumerate(subject_list):
subject_file = os.path.join(save_path, subject_list[i], (((((subject_list[i] + '_') + atlas_name) + '_') + kind.replace(' ', '_')) + '.mat'))
sio.savemat(subject_file, {'connectivity': connectivity_matrices[i]})
print(('Saving connectivity matrix to %s' % subject_file))
return connectivity_matrices | -6,875,135,910,234,682,000 | timeseries : list of timeseries tables for subjects (timepoints x regions)
subject_list : the subject short IDs list
atlas_name : name of the atlas used
kind : the kind of connectivity to be used, e.g. lasso, partial correlation, correlation
save : save the connectivity matrix to a file
save_path : specify path to save the matrix if different from subject folder
returns:
connectivity : connectivity matrix (regions x regions) | lib/abide_utils.py | group_connectivity | HoganZhang/gcn_metric_learning | python | def group_connectivity(timeseries, subject_list, atlas_name, kind, save=True, save_path=root_folder):
'\n timeseries : list of timeseries tables for subjects (timepoints x regions)\n subject_list : the subject short IDs list\n atlas_name : name of the atlas used\n kind : the kind of connectivity to be used, e.g. lasso, partial correlation, correlation\n save : save the connectivity matrix to a file\n save_path : specify path to save the matrix if different from subject folder\n\n returns:\n connectivity : connectivity matrix (regions x regions)\n '
if (kind == 'lasso'):
covariance_estimator = GraphLassoCV(verbose=1)
connectivity_matrices = []
for (i, ts) in enumerate(timeseries):
covariance_estimator.fit(ts)
connectivity = covariance_estimator.covariance_
connectivity_matrices.append(connectivity)
print('Covariance matrix has shape {0}.'.format(connectivity.shape))
elif (kind in ['tangent', 'partial correlation', 'correlation']):
conn_measure = connectome.ConnectivityMeasure(kind=kind)
connectivity_matrices = conn_measure.fit_transform(timeseries)
if save:
for (i, subject) in enumerate(subject_list):
subject_file = os.path.join(save_path, subject_list[i], (((((subject_list[i] + '_') + atlas_name) + '_') + kind.replace(' ', '_')) + '.mat'))
sio.savemat(subject_file, {'connectivity': connectivity_matrices[i]})
print(('Saving connectivity matrix to %s' % subject_file))
return connectivity_matrices |
def get_subject_label(subject_list, label_name):
'\n subject_list : the subject short IDs list\n label_name : name of the label to be retrieved\n\n returns:\n label : dictionary of subject labels\n '
label = {}
with open(os.path.join(save_path, 'ABIDE_pcp/Phenotypic_V1_0b_preprocessed1.csv')) as csvfile:
reader = csv.DictReader(csvfile)
for row in reader:
if (row['subject'] in subject_list):
label[row['subject']] = row[label_name]
return label | -7,788,276,538,155,215,000 | subject_list : the subject short IDs list
label_name : name of the label to be retrieved
returns:
label : dictionary of subject labels | lib/abide_utils.py | get_subject_label | HoganZhang/gcn_metric_learning | python | def get_subject_label(subject_list, label_name):
'\n subject_list : the subject short IDs list\n label_name : name of the label to be retrieved\n\n returns:\n label : dictionary of subject labels\n '
label = {}
with open(os.path.join(save_path, 'ABIDE_pcp/Phenotypic_V1_0b_preprocessed1.csv')) as csvfile:
reader = csv.DictReader(csvfile)
for row in reader:
if (row['subject'] in subject_list):
label[row['subject']] = row[label_name]
return label |
def load_all_networks(subject_list, kind, atlas_name='aal'):
'\n subject_list : the subject short IDs list\n kind : the kind of connectivity to be used, e.g. lasso, partial correlation, correlation\n atlas_name : name of the atlas used\n\n returns:\n all_networks : list of connectivity matrices (regions x regions)\n '
all_networks = []
for subject in subject_list:
fl = os.path.join(root_folder, subject, (((((subject + '_') + atlas_name) + '_') + kind) + '.mat'))
matrix = sio.loadmat(fl)['connectivity']
if (atlas_name == 'ho'):
matrix = np.delete(matrix, 82, axis=0)
matrix = np.delete(matrix, 82, axis=1)
all_networks.append(matrix)
return all_networks | -100,557,484,637,785,340 | subject_list : the subject short IDs list
kind : the kind of connectivity to be used, e.g. lasso, partial correlation, correlation
atlas_name : name of the atlas used
returns:
all_networks : list of connectivity matrices (regions x regions) | lib/abide_utils.py | load_all_networks | HoganZhang/gcn_metric_learning | python | def load_all_networks(subject_list, kind, atlas_name='aal'):
'\n subject_list : the subject short IDs list\n kind : the kind of connectivity to be used, e.g. lasso, partial correlation, correlation\n atlas_name : name of the atlas used\n\n returns:\n all_networks : list of connectivity matrices (regions x regions)\n '
all_networks = []
for subject in subject_list:
fl = os.path.join(root_folder, subject, (((((subject + '_') + atlas_name) + '_') + kind) + '.mat'))
matrix = sio.loadmat(fl)['connectivity']
if (atlas_name == 'ho'):
matrix = np.delete(matrix, 82, axis=0)
matrix = np.delete(matrix, 82, axis=1)
all_networks.append(matrix)
return all_networks |
def get_net_vectors(subject_list, kind, atlas_name='aal'):
'\n subject_list : the subject short IDs list\n kind : the kind of connectivity to be used, e.g. lasso, partial correlation, correlation\n atlas_name : name of the atlas used\n\n returns:\n matrix : matrix of connectivity vectors (num_subjects x num_connections)\n '
networks = load_all_networks(subject_list, kind, atlas_name=atlas_name)
norm_networks = [np.arctanh(mat) for mat in networks]
idx = np.triu_indices_from(norm_networks[0], 1)
vec_networks = [mat[idx] for mat in norm_networks]
matrix = np.vstack(vec_networks)
return matrix | 5,627,430,259,171,655,000 | subject_list : the subject short IDs list
kind : the kind of connectivity to be used, e.g. lasso, partial correlation, correlation
atlas_name : name of the atlas used
returns:
matrix : matrix of connectivity vectors (num_subjects x num_connections) | lib/abide_utils.py | get_net_vectors | HoganZhang/gcn_metric_learning | python | def get_net_vectors(subject_list, kind, atlas_name='aal'):
'\n subject_list : the subject short IDs list\n kind : the kind of connectivity to be used, e.g. lasso, partial correlation, correlation\n atlas_name : name of the atlas used\n\n returns:\n matrix : matrix of connectivity vectors (num_subjects x num_connections)\n '
networks = load_all_networks(subject_list, kind, atlas_name=atlas_name)
norm_networks = [np.arctanh(mat) for mat in networks]
idx = np.triu_indices_from(norm_networks[0], 1)
vec_networks = [mat[idx] for mat in norm_networks]
matrix = np.vstack(vec_networks)
return matrix |
def get_atlas_coords(atlas_name='ho'):
'\n atlas_name : name of the atlas used\n\n returns:\n matrix : matrix of roi 3D coordinates in MNI space (num_rois x 3)\n '
coords_file = os.path.join(root_folder, (atlas_name + '_coords.csv'))
coords = np.loadtxt(coords_file, delimiter=',')
if (atlas_name == 'ho'):
coords = np.delete(coords, 82, axis=0)
return coords | -7,176,198,256,274,610,000 | atlas_name : name of the atlas used
returns:
matrix : matrix of roi 3D coordinates in MNI space (num_rois x 3) | lib/abide_utils.py | get_atlas_coords | HoganZhang/gcn_metric_learning | python | def get_atlas_coords(atlas_name='ho'):
'\n atlas_name : name of the atlas used\n\n returns:\n matrix : matrix of roi 3D coordinates in MNI space (num_rois x 3)\n '
coords_file = os.path.join(root_folder, (atlas_name + '_coords.csv'))
coords = np.loadtxt(coords_file, delimiter=',')
if (atlas_name == 'ho'):
coords = np.delete(coords, 82, axis=0)
return coords |
def mask_nan(arrays: List[np.ndarray]) -> List[np.ndarray]:
'\n Drop indices from equal-sized arrays if the element at that index is NaN in\n any of the input arrays.\n\n Parameters\n ----------\n arrays : List[np.ndarray]\n list of ndarrays containing NaNs, to be masked\n\n Returns\n -------\n List[np.ndarray]\n masked arrays (free of NaNs)\n\n Notes\n -----\n This function find the indices where one or more elements is NaN in one or\n more of the input arrays, then drops those indices from all arrays.\n For example:\n >> a = np.array([0, 1, np.nan, 3])\n >> b = np.array([np.nan, 5, np.nan, 7])\n >> c = np.array([8, 9, 10, 11])\n >> mask_nan([a, b, c])\n [array([ 1., 3.]), array([ 5., 7.]), array([ 9, 11])]\n\n '
n = arrays[0].size
assert all(((a.size == n) for a in arrays[1:]))
mask = np.array(([False] * n))
for arr in arrays:
mask = np.logical_or(mask, np.isnan(arr))
return [arr[np.where((~ mask))[0]] for arr in arrays] | -4,966,370,888,865,612,000 | Drop indices from equal-sized arrays if the element at that index is NaN in
any of the input arrays.
Parameters
----------
arrays : List[np.ndarray]
list of ndarrays containing NaNs, to be masked
Returns
-------
List[np.ndarray]
masked arrays (free of NaNs)
Notes
-----
This function find the indices where one or more elements is NaN in one or
more of the input arrays, then drops those indices from all arrays.
For example:
>> a = np.array([0, 1, np.nan, 3])
>> b = np.array([np.nan, 5, np.nan, 7])
>> c = np.array([8, 9, 10, 11])
>> mask_nan([a, b, c])
[array([ 1., 3.]), array([ 5., 7.]), array([ 9, 11])] | jburt/mask.py | mask_nan | jbburt/jburt | python | def mask_nan(arrays: List[np.ndarray]) -> List[np.ndarray]:
'\n Drop indices from equal-sized arrays if the element at that index is NaN in\n any of the input arrays.\n\n Parameters\n ----------\n arrays : List[np.ndarray]\n list of ndarrays containing NaNs, to be masked\n\n Returns\n -------\n List[np.ndarray]\n masked arrays (free of NaNs)\n\n Notes\n -----\n This function find the indices where one or more elements is NaN in one or\n more of the input arrays, then drops those indices from all arrays.\n For example:\n >> a = np.array([0, 1, np.nan, 3])\n >> b = np.array([np.nan, 5, np.nan, 7])\n >> c = np.array([8, 9, 10, 11])\n >> mask_nan([a, b, c])\n [array([ 1., 3.]), array([ 5., 7.]), array([ 9, 11])]\n\n '
n = arrays[0].size
assert all(((a.size == n) for a in arrays[1:]))
mask = np.array(([False] * n))
for arr in arrays:
mask = np.logical_or(mask, np.isnan(arr))
return [arr[np.where((~ mask))[0]] for arr in arrays] |
def _bit_list_to_bytes(bit_list):
"Converts an iterable of 1's and 0's to bytes.\n\n Combines the list 8 at a time, treating each group of 8 bits\n as a single byte.\n "
num_bits = len(bit_list)
byte_vals = bytearray()
for start in sixm.moves.xrange(0, num_bits, 8):
curr_bits = bit_list[start:(start + 8)]
char_val = sum(((val * digit) for (val, digit) in zip(_POW2, curr_bits)))
byte_vals.append(char_val)
return bytes(byte_vals) | 1,053,106,055,171,523,000 | Converts an iterable of 1's and 0's to bytes.
Combines the list 8 at a time, treating each group of 8 bits
as a single byte. | test/lib/oauth2client/_pure_python_crypt.py | _bit_list_to_bytes | giangpvit/googledrivepythonsample | python | def _bit_list_to_bytes(bit_list):
"Converts an iterable of 1's and 0's to bytes.\n\n Combines the list 8 at a time, treating each group of 8 bits\n as a single byte.\n "
num_bits = len(bit_list)
byte_vals = bytearray()
for start in sixm.moves.xrange(0, num_bits, 8):
curr_bits = bit_list[start:(start + 8)]
char_val = sum(((val * digit) for (val, digit) in zip(_POW2, curr_bits)))
byte_vals.append(char_val)
return bytes(byte_vals) |
def verify(self, message, signature):
'Verifies a message against a signature.\n\n Args:\n message: string or bytes, The message to verify. If string, will be\n encoded to bytes as utf-8.\n signature: string or bytes, The signature on the message. If\n string, will be encoded to bytes as utf-8.\n\n Returns:\n True if message was signed by the private key associated with the\n public key that this object was constructed with.\n '
message = _helpers._to_bytes(message, encoding='utf-8')
try:
return rsa.pkcs1.verify(message, signature, self._pubkey)
except (ValueError, rsa.pkcs1.VerificationError):
return False | 8,373,880,066,434,508,000 | Verifies a message against a signature.
Args:
message: string or bytes, The message to verify. If string, will be
encoded to bytes as utf-8.
signature: string or bytes, The signature on the message. If
string, will be encoded to bytes as utf-8.
Returns:
True if message was signed by the private key associated with the
public key that this object was constructed with. | test/lib/oauth2client/_pure_python_crypt.py | verify | giangpvit/googledrivepythonsample | python | def verify(self, message, signature):
'Verifies a message against a signature.\n\n Args:\n message: string or bytes, The message to verify. If string, will be\n encoded to bytes as utf-8.\n signature: string or bytes, The signature on the message. If\n string, will be encoded to bytes as utf-8.\n\n Returns:\n True if message was signed by the private key associated with the\n public key that this object was constructed with.\n '
message = _helpers._to_bytes(message, encoding='utf-8')
try:
return rsa.pkcs1.verify(message, signature, self._pubkey)
except (ValueError, rsa.pkcs1.VerificationError):
return False |
@classmethod
def from_string(cls, key_pem, is_x509_cert):
'Construct an RsaVerifier instance from a string.\n\n Args:\n key_pem: string, public key in PEM format.\n is_x509_cert: bool, True if key_pem is an X509 cert, otherwise it\n is expected to be an RSA key in PEM format.\n\n Returns:\n RsaVerifier instance.\n\n Raises:\n ValueError: if the key_pem can\'t be parsed. In either case, error\n will begin with \'No PEM start marker\'. If\n ``is_x509_cert`` is True, will fail to find the\n "-----BEGIN CERTIFICATE-----" error, otherwise fails\n to find "-----BEGIN RSA PUBLIC KEY-----".\n '
key_pem = _helpers._to_bytes(key_pem)
if is_x509_cert:
der = rsa.pem.load_pem(key_pem, 'CERTIFICATE')
(asn1_cert, remaining) = decoder.decode(der, asn1Spec=Certificate())
if (remaining != b''):
raise ValueError('Unused bytes', remaining)
cert_info = asn1_cert['tbsCertificate']['subjectPublicKeyInfo']
key_bytes = _bit_list_to_bytes(cert_info['subjectPublicKey'])
pubkey = rsa.PublicKey.load_pkcs1(key_bytes, 'DER')
else:
pubkey = rsa.PublicKey.load_pkcs1(key_pem, 'PEM')
return cls(pubkey) | 1,120,892,723,722,349,200 | Construct an RsaVerifier instance from a string.
Args:
key_pem: string, public key in PEM format.
is_x509_cert: bool, True if key_pem is an X509 cert, otherwise it
is expected to be an RSA key in PEM format.
Returns:
RsaVerifier instance.
Raises:
ValueError: if the key_pem can't be parsed. In either case, error
will begin with 'No PEM start marker'. If
``is_x509_cert`` is True, will fail to find the
"-----BEGIN CERTIFICATE-----" error, otherwise fails
to find "-----BEGIN RSA PUBLIC KEY-----". | test/lib/oauth2client/_pure_python_crypt.py | from_string | giangpvit/googledrivepythonsample | python | @classmethod
def from_string(cls, key_pem, is_x509_cert):
'Construct an RsaVerifier instance from a string.\n\n Args:\n key_pem: string, public key in PEM format.\n is_x509_cert: bool, True if key_pem is an X509 cert, otherwise it\n is expected to be an RSA key in PEM format.\n\n Returns:\n RsaVerifier instance.\n\n Raises:\n ValueError: if the key_pem can\'t be parsed. In either case, error\n will begin with \'No PEM start marker\'. If\n ``is_x509_cert`` is True, will fail to find the\n "-----BEGIN CERTIFICATE-----" error, otherwise fails\n to find "-----BEGIN RSA PUBLIC KEY-----".\n '
key_pem = _helpers._to_bytes(key_pem)
if is_x509_cert:
der = rsa.pem.load_pem(key_pem, 'CERTIFICATE')
(asn1_cert, remaining) = decoder.decode(der, asn1Spec=Certificate())
if (remaining != b):
raise ValueError('Unused bytes', remaining)
cert_info = asn1_cert['tbsCertificate']['subjectPublicKeyInfo']
key_bytes = _bit_list_to_bytes(cert_info['subjectPublicKey'])
pubkey = rsa.PublicKey.load_pkcs1(key_bytes, 'DER')
else:
pubkey = rsa.PublicKey.load_pkcs1(key_pem, 'PEM')
return cls(pubkey) |
def sign(self, message):
'Signs a message.\n\n Args:\n message: bytes, Message to be signed.\n\n Returns:\n string, The signature of the message for the given key.\n '
message = _helpers._to_bytes(message, encoding='utf-8')
return rsa.pkcs1.sign(message, self._key, 'SHA-256') | 3,153,042,305,313,748 | Signs a message.
Args:
message: bytes, Message to be signed.
Returns:
string, The signature of the message for the given key. | test/lib/oauth2client/_pure_python_crypt.py | sign | giangpvit/googledrivepythonsample | python | def sign(self, message):
'Signs a message.\n\n Args:\n message: bytes, Message to be signed.\n\n Returns:\n string, The signature of the message for the given key.\n '
message = _helpers._to_bytes(message, encoding='utf-8')
return rsa.pkcs1.sign(message, self._key, 'SHA-256') |
@classmethod
def from_string(cls, key, password='notasecret'):
'Construct an RsaSigner instance from a string.\n\n Args:\n key: string, private key in PEM format.\n password: string, password for private key file. Unused for PEM\n files.\n\n Returns:\n RsaSigner instance.\n\n Raises:\n ValueError if the key cannot be parsed as PKCS#1 or PKCS#8 in\n PEM format.\n '
key = _helpers._from_bytes(key)
(marker_id, key_bytes) = pem.readPemBlocksFromFile(sixm.StringIO(key), _PKCS1_MARKER, _PKCS8_MARKER)
if (marker_id == 0):
pkey = rsa.key.PrivateKey.load_pkcs1(key_bytes, format='DER')
elif (marker_id == 1):
(key_info, remaining) = decoder.decode(key_bytes, asn1Spec=_PKCS8_SPEC)
if (remaining != b''):
raise ValueError('Unused bytes', remaining)
pkey_info = key_info.getComponentByName('privateKey')
pkey = rsa.key.PrivateKey.load_pkcs1(pkey_info.asOctets(), format='DER')
else:
raise ValueError('No key could be detected.')
return cls(pkey) | -8,973,487,569,601,698,000 | Construct an RsaSigner instance from a string.
Args:
key: string, private key in PEM format.
password: string, password for private key file. Unused for PEM
files.
Returns:
RsaSigner instance.
Raises:
ValueError if the key cannot be parsed as PKCS#1 or PKCS#8 in
PEM format. | test/lib/oauth2client/_pure_python_crypt.py | from_string | giangpvit/googledrivepythonsample | python | @classmethod
def from_string(cls, key, password='notasecret'):
'Construct an RsaSigner instance from a string.\n\n Args:\n key: string, private key in PEM format.\n password: string, password for private key file. Unused for PEM\n files.\n\n Returns:\n RsaSigner instance.\n\n Raises:\n ValueError if the key cannot be parsed as PKCS#1 or PKCS#8 in\n PEM format.\n '
key = _helpers._from_bytes(key)
(marker_id, key_bytes) = pem.readPemBlocksFromFile(sixm.StringIO(key), _PKCS1_MARKER, _PKCS8_MARKER)
if (marker_id == 0):
pkey = rsa.key.PrivateKey.load_pkcs1(key_bytes, format='DER')
elif (marker_id == 1):
(key_info, remaining) = decoder.decode(key_bytes, asn1Spec=_PKCS8_SPEC)
if (remaining != b):
raise ValueError('Unused bytes', remaining)
pkey_info = key_info.getComponentByName('privateKey')
pkey = rsa.key.PrivateKey.load_pkcs1(pkey_info.asOctets(), format='DER')
else:
raise ValueError('No key could be detected.')
return cls(pkey) |
def send_await_disconnect(self, message, timeout=30):
'Sends a message to the node and wait for disconnect.\n\n This is used when we want to send a message into the node that we expect\n will get us disconnected, eg an invalid block.'
self.send_message(message)
wait_until((lambda : (not self.connected)), timeout=timeout, lock=mininode_lock) | 4,941,045,123,737,797,000 | Sends a message to the node and wait for disconnect.
This is used when we want to send a message into the node that we expect
will get us disconnected, eg an invalid block. | test/functional/p2p_compactblocks.py | send_await_disconnect | RitoProject/Ravencoin | python | def send_await_disconnect(self, message, timeout=30):
'Sends a message to the node and wait for disconnect.\n\n This is used when we want to send a message into the node that we expect\n will get us disconnected, eg an invalid block.'
self.send_message(message)
wait_until((lambda : (not self.connected)), timeout=timeout, lock=mininode_lock) |
def setup_platform(hass, config, add_entities, discovery_info=None):
'Set up the QRCode image platform.'
add_entities([QRCodeCamera(hass, 'remote_access', 'remote_access')]) | -4,680,799,103,156,011,000 | Set up the QRCode image platform. | homeassistant/components/ais_qrcode/camera.py | setup_platform | DRubioBizcaino/AIS-home-assistant | python | def setup_platform(hass, config, add_entities, discovery_info=None):
add_entities([QRCodeCamera(hass, 'remote_access', 'remote_access')]) |
def __init__(self, hass, name, entity_ids):
'Initialize the QRCode entity.'
super().__init__()
self._hass = hass
self._name = name
self._entities = entity_ids
self._image = io.BytesIO()
self._refresh_() | 693,735,822,991,084,900 | Initialize the QRCode entity. | homeassistant/components/ais_qrcode/camera.py | __init__ | DRubioBizcaino/AIS-home-assistant | python | def __init__(self, hass, name, entity_ids):
super().__init__()
self._hass = hass
self._name = name
self._entities = entity_ids
self._image = io.BytesIO()
self._refresh_() |
async def async_added_to_hass(self):
'Register callbacks.'
@callback
def qr_state_listener(entity, old_state, new_state):
'Handle device state changes.'
self._refresh_()
@callback
def qr_sensor_startup(event):
'Update template on startup.'
async_track_state_change(self.hass, self._entities, qr_state_listener)
self.hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, qr_sensor_startup) | 423,380,977,470,289,540 | Register callbacks. | homeassistant/components/ais_qrcode/camera.py | async_added_to_hass | DRubioBizcaino/AIS-home-assistant | python | async def async_added_to_hass(self):
@callback
def qr_state_listener(entity, old_state, new_state):
'Handle device state changes.'
self._refresh_()
@callback
def qr_sensor_startup(event):
'Update template on startup.'
async_track_state_change(self.hass, self._entities, qr_state_listener)
self.hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, qr_sensor_startup) |
@property
def name(self):
'Return the name of the image processor.'
return self._name | -6,951,592,156,995,671,000 | Return the name of the image processor. | homeassistant/components/ais_qrcode/camera.py | name | DRubioBizcaino/AIS-home-assistant | python | @property
def name(self):
return self._name |
@property
def should_poll(self):
'Update the recording state periodically.'
return True | -4,912,315,392,805,007,000 | Update the recording state periodically. | homeassistant/components/ais_qrcode/camera.py | should_poll | DRubioBizcaino/AIS-home-assistant | python | @property
def should_poll(self):
return True |
def camera_image(self):
'Process the image.'
return self._image.getvalue() | 7,730,349,533,930,145,000 | Process the image. | homeassistant/components/ais_qrcode/camera.py | camera_image | DRubioBizcaino/AIS-home-assistant | python | def camera_image(self):
return self._image.getvalue() |
def turn_on(self):
'Turn on camera.'
self._refresh_() | 1,927,745,159,555,773,000 | Turn on camera. | homeassistant/components/ais_qrcode/camera.py | turn_on | DRubioBizcaino/AIS-home-assistant | python | def turn_on(self):
self._refresh_() |
@callback
def qr_state_listener(entity, old_state, new_state):
'Handle device state changes.'
self._refresh_() | -1,729,874,226,238,035,500 | Handle device state changes. | homeassistant/components/ais_qrcode/camera.py | qr_state_listener | DRubioBizcaino/AIS-home-assistant | python | @callback
def qr_state_listener(entity, old_state, new_state):
self._refresh_() |
@callback
def qr_sensor_startup(event):
'Update template on startup.'
async_track_state_change(self.hass, self._entities, qr_state_listener) | 5,025,705,161,518,244,000 | Update template on startup. | homeassistant/components/ais_qrcode/camera.py | qr_sensor_startup | DRubioBizcaino/AIS-home-assistant | python | @callback
def qr_sensor_startup(event):
async_track_state_change(self.hass, self._entities, qr_state_listener) |
def run():
'Run the mark_bad_channels command.'
from mne.commands.utils import get_optparser
parser = get_optparser(__file__, usage='usage: %prog options args', prog_prefix='mne_bids', version=mne_bids.__version__)
parser.add_option('--ch_name', dest='ch_names', action='append', default=[], help='The names of the bad channels. If multiple channels are bad, pass the --ch_name parameter multiple times.')
parser.add_option('--description', dest='descriptions', action='append', default=[], help='Descriptions as to why the channels are bad. Must match the number of bad channels provided. Pass multiple times to supply more than one value in that case.')
parser.add_option('--bids_root', dest='bids_root', help='The path of the folder containing the BIDS dataset')
parser.add_option('--subject_id', dest='subject', help='Subject name')
parser.add_option('--session_id', dest='session', help='Session name')
parser.add_option('--task', dest='task', help='Task name')
parser.add_option('--acq', dest='acquisition', help='Acquisition parameter')
parser.add_option('--run', dest='run', help='Run number')
parser.add_option('--proc', dest='processing', help='Processing label.')
parser.add_option('--rec', dest='recording', help='Recording name')
parser.add_option('--type', dest='datatype', help='Recording data type, e.g. meg, ieeg or eeg')
parser.add_option('--suffix', dest='suffix', help='The filename suffix, i.e. the last part before the extension')
parser.add_option('--ext', dest='extension', help='The filename extension, including the leading period, e.g. .fif')
parser.add_option('--overwrite', dest='overwrite', action='store_true', help='Replace existing channel status entries')
parser.add_option('--verbose', dest='verbose', action='store_true', help='Whether do generate additional diagnostic output')
(opt, args) = parser.parse_args()
if args:
parser.print_help()
parser.error(f'''Please do not specify arguments without flags. Got: {args}.
''')
if (opt.bids_root is None):
parser.print_help()
parser.error('You must specify bids_root')
if (opt.ch_names is None):
parser.print_help()
parser.error('You must specify some --ch_name parameters.')
ch_names = ([] if (opt.ch_names == ['']) else opt.ch_names)
bids_path = BIDSPath(subject=opt.subject, session=opt.session, task=opt.task, acquisition=opt.acquisition, run=opt.run, processing=opt.processing, recording=opt.recording, datatype=opt.datatype, suffix=opt.suffix, extension=opt.extension, root=opt.bids_root)
bids_paths = bids_path.match()
allowed_extensions = list(reader.keys())
bids_paths = [p for p in bids_paths if (p.extension in allowed_extensions)]
if (not bids_paths):
logger.info('No matching files found. Please consider using a less restrictive set of entities to broaden the search.')
return
logger.info(f"Marking channels {', '.join(ch_names)} as bad in {len(bids_paths)} recording(s) …")
for bids_path in bids_paths:
logger.info(f'Processing: {bids_path.basename}')
mark_bad_channels(ch_names=ch_names, descriptions=opt.descriptions, bids_path=bids_path, overwrite=opt.overwrite, verbose=opt.verbose) | -2,400,883,623,979,624,400 | Run the mark_bad_channels command. | mne_bids/commands/mne_bids_mark_bad_channels.py | run | adam2392/mne-bids | python | def run():
from mne.commands.utils import get_optparser
parser = get_optparser(__file__, usage='usage: %prog options args', prog_prefix='mne_bids', version=mne_bids.__version__)
parser.add_option('--ch_name', dest='ch_names', action='append', default=[], help='The names of the bad channels. If multiple channels are bad, pass the --ch_name parameter multiple times.')
parser.add_option('--description', dest='descriptions', action='append', default=[], help='Descriptions as to why the channels are bad. Must match the number of bad channels provided. Pass multiple times to supply more than one value in that case.')
parser.add_option('--bids_root', dest='bids_root', help='The path of the folder containing the BIDS dataset')
parser.add_option('--subject_id', dest='subject', help='Subject name')
parser.add_option('--session_id', dest='session', help='Session name')
parser.add_option('--task', dest='task', help='Task name')
parser.add_option('--acq', dest='acquisition', help='Acquisition parameter')
parser.add_option('--run', dest='run', help='Run number')
parser.add_option('--proc', dest='processing', help='Processing label.')
parser.add_option('--rec', dest='recording', help='Recording name')
parser.add_option('--type', dest='datatype', help='Recording data type, e.g. meg, ieeg or eeg')
parser.add_option('--suffix', dest='suffix', help='The filename suffix, i.e. the last part before the extension')
parser.add_option('--ext', dest='extension', help='The filename extension, including the leading period, e.g. .fif')
parser.add_option('--overwrite', dest='overwrite', action='store_true', help='Replace existing channel status entries')
parser.add_option('--verbose', dest='verbose', action='store_true', help='Whether do generate additional diagnostic output')
(opt, args) = parser.parse_args()
if args:
parser.print_help()
parser.error(f'Please do not specify arguments without flags. Got: {args}.
')
if (opt.bids_root is None):
parser.print_help()
parser.error('You must specify bids_root')
if (opt.ch_names is None):
parser.print_help()
parser.error('You must specify some --ch_name parameters.')
ch_names = ([] if (opt.ch_names == []) else opt.ch_names)
bids_path = BIDSPath(subject=opt.subject, session=opt.session, task=opt.task, acquisition=opt.acquisition, run=opt.run, processing=opt.processing, recording=opt.recording, datatype=opt.datatype, suffix=opt.suffix, extension=opt.extension, root=opt.bids_root)
bids_paths = bids_path.match()
allowed_extensions = list(reader.keys())
bids_paths = [p for p in bids_paths if (p.extension in allowed_extensions)]
if (not bids_paths):
logger.info('No matching files found. Please consider using a less restrictive set of entities to broaden the search.')
return
logger.info(f"Marking channels {', '.join(ch_names)} as bad in {len(bids_paths)} recording(s) …")
for bids_path in bids_paths:
logger.info(f'Processing: {bids_path.basename}')
mark_bad_channels(ch_names=ch_names, descriptions=opt.descriptions, bids_path=bids_path, overwrite=opt.overwrite, verbose=opt.verbose) |
def __init__(self):
'\n :param api_key:\n :param secret:\n :param trade_type: 交易的类型, only support future and spot.\n '
self.http_client = BinanceSpotHttp(api_key=config.api_key, secret=config.api_secret, proxy_host=config.proxy_host, proxy_port=config.proxy_port)
self.buy_orders = []
self.sell_orders = [] | 8,054,059,871,128,726,000 | :param api_key:
:param secret:
:param trade_type: 交易的类型, only support future and spot. | trader/binance_trader.py | __init__ | xgy560501/binance_grid_trader | python | def __init__(self):
'\n :param api_key:\n :param secret:\n :param trade_type: 交易的类型, only support future and spot.\n '
self.http_client = BinanceSpotHttp(api_key=config.api_key, secret=config.api_secret, proxy_host=config.proxy_host, proxy_port=config.proxy_port)
self.buy_orders = []
self.sell_orders = [] |
def grid_trader(self):
'\n 执行核心逻辑,网格交易的逻辑.\n :return:\n '
(bid_price, ask_price) = self.get_bid_ask_price()
print(f'bid_price: {bid_price}, ask_price: {ask_price}')
quantity = round_to(float(config.quantity), float(config.min_qty))
self.buy_orders.sort(key=(lambda x: float(x['price'])), reverse=True)
self.sell_orders.sort(key=(lambda x: float(x['price'])), reverse=True)
print(f'buy orders: {self.buy_orders}')
print('------------------------------')
print(f'sell orders: {self.sell_orders}')
buy_delete_orders = []
sell_delete_orders = []
for buy_order in self.buy_orders:
check_order = self.http_client.get_order(buy_order.get('symbol', config.symbol), client_order_id=buy_order.get('clientOrderId'))
if check_order:
if (check_order.get('status') == OrderStatus.CANCELED.value):
buy_delete_orders.append(buy_order)
print(f"buy order status was canceled: {check_order.get('status')}")
elif (check_order.get('status') == OrderStatus.FILLED.value):
logging.info(f"买单成交时间: {datetime.now()}, 价格: {check_order.get('price')}, 数量: {check_order.get('origQty')}")
sell_price = round_to((float(check_order.get('price')) * (1 + float(config.gap_percent))), float(config.min_price))
if (0 < sell_price < ask_price):
sell_price = round_to(ask_price, float(config.min_price))
new_sell_order = self.http_client.place_order(symbol=config.symbol, order_side=OrderSide.SELL, order_type=OrderType.LIMIT, quantity=quantity, price=sell_price)
if new_sell_order:
buy_delete_orders.append(buy_order)
self.sell_orders.append(new_sell_order)
buy_price = round_to((float(check_order.get('price')) * (1 - float(config.gap_percent))), config.min_price)
if (buy_price > bid_price > 0):
buy_price = round_to(bid_price, float(config.min_price))
new_buy_order = self.http_client.place_order(symbol=config.symbol, order_side=OrderSide.BUY, order_type=OrderType.LIMIT, quantity=quantity, price=buy_price)
if new_buy_order:
self.buy_orders.append(new_buy_order)
elif (check_order.get('status') == OrderStatus.NEW.value):
print('buy order status is: New')
else:
print(f"buy order status is not above options: {check_order.get('status')}")
for delete_order in buy_delete_orders:
self.buy_orders.remove(delete_order)
for sell_order in self.sell_orders:
check_order = self.http_client.get_order(sell_order.get('symbol', config.symbol), client_order_id=sell_order.get('clientOrderId'))
if check_order:
if (check_order.get('status') == OrderStatus.CANCELED.value):
sell_delete_orders.append(sell_order)
print(f"sell order status was canceled: {check_order.get('status')}")
elif (check_order.get('status') == OrderStatus.FILLED.value):
logging.info(f"卖单成交时间: {datetime.now()}, 价格: {check_order.get('price')}, 数量: {check_order.get('origQty')}")
buy_price = round_to((float(check_order.get('price')) * (1 - float(config.gap_percent))), float(config.min_price))
if (buy_price > bid_price > 0):
buy_price = round_to(bid_price, float(config.min_price))
new_buy_order = self.http_client.place_order(symbol=config.symbol, order_side=OrderSide.BUY, order_type=OrderType.LIMIT, quantity=quantity, price=buy_price)
if new_buy_order:
sell_delete_orders.append(sell_order)
self.buy_orders.append(new_buy_order)
sell_price = round_to((float(check_order.get('price')) * (1 + float(config.gap_percent))), float(config.min_price))
if (0 < sell_price < ask_price):
sell_price = round_to(ask_price, float(config.min_price))
new_sell_order = self.http_client.place_order(symbol=config.symbol, order_side=OrderSide.SELL, order_type=OrderType.LIMIT, quantity=quantity, price=sell_price)
if new_sell_order:
self.sell_orders.append(new_sell_order)
elif (check_order.get('status') == OrderStatus.NEW.value):
print('sell order status is: New')
else:
print(f"sell order status is not in above options: {check_order.get('status')}")
for delete_order in sell_delete_orders:
self.sell_orders.remove(delete_order)
if (len(self.buy_orders) <= 0):
if (bid_price > 0):
price = round_to((bid_price * (1 - float(config.gap_percent))), float(config.min_price))
buy_order = self.http_client.place_order(symbol=config.symbol, order_side=OrderSide.BUY, order_type=OrderType.LIMIT, quantity=quantity, price=price)
if buy_order:
self.buy_orders.append(buy_order)
elif (len(self.buy_orders) > int(config.max_orders)):
self.buy_orders.sort(key=(lambda x: float(x['price'])), reverse=False)
delete_order = self.buy_orders[0]
order = self.http_client.cancel_order(delete_order.get('symbol'), client_order_id=delete_order.get('clientOrderId'))
if order:
self.buy_orders.remove(delete_order)
if (len(self.sell_orders) <= 0):
if (ask_price > 0):
price = round_to((ask_price * (1 + float(config.gap_percent))), float(config.min_price))
order = self.http_client.place_order(symbol=config.symbol, order_side=OrderSide.SELL, order_type=OrderType.LIMIT, quantity=quantity, price=price)
if order:
self.sell_orders.append(order)
elif (len(self.sell_orders) > int(config.max_orders)):
self.sell_orders.sort(key=(lambda x: x['price']), reverse=True)
delete_order = self.sell_orders[0]
order = self.http_client.cancel_order(delete_order.get('symbol'), client_order_id=delete_order.get('clientOrderId'))
if order:
self.sell_orders.remove(delete_order) | -3,668,909,501,779,043,300 | 执行核心逻辑,网格交易的逻辑.
:return: | trader/binance_trader.py | grid_trader | xgy560501/binance_grid_trader | python | def grid_trader(self):
'\n 执行核心逻辑,网格交易的逻辑.\n :return:\n '
(bid_price, ask_price) = self.get_bid_ask_price()
print(f'bid_price: {bid_price}, ask_price: {ask_price}')
quantity = round_to(float(config.quantity), float(config.min_qty))
self.buy_orders.sort(key=(lambda x: float(x['price'])), reverse=True)
self.sell_orders.sort(key=(lambda x: float(x['price'])), reverse=True)
print(f'buy orders: {self.buy_orders}')
print('------------------------------')
print(f'sell orders: {self.sell_orders}')
buy_delete_orders = []
sell_delete_orders = []
for buy_order in self.buy_orders:
check_order = self.http_client.get_order(buy_order.get('symbol', config.symbol), client_order_id=buy_order.get('clientOrderId'))
if check_order:
if (check_order.get('status') == OrderStatus.CANCELED.value):
buy_delete_orders.append(buy_order)
print(f"buy order status was canceled: {check_order.get('status')}")
elif (check_order.get('status') == OrderStatus.FILLED.value):
logging.info(f"买单成交时间: {datetime.now()}, 价格: {check_order.get('price')}, 数量: {check_order.get('origQty')}")
sell_price = round_to((float(check_order.get('price')) * (1 + float(config.gap_percent))), float(config.min_price))
if (0 < sell_price < ask_price):
sell_price = round_to(ask_price, float(config.min_price))
new_sell_order = self.http_client.place_order(symbol=config.symbol, order_side=OrderSide.SELL, order_type=OrderType.LIMIT, quantity=quantity, price=sell_price)
if new_sell_order:
buy_delete_orders.append(buy_order)
self.sell_orders.append(new_sell_order)
buy_price = round_to((float(check_order.get('price')) * (1 - float(config.gap_percent))), config.min_price)
if (buy_price > bid_price > 0):
buy_price = round_to(bid_price, float(config.min_price))
new_buy_order = self.http_client.place_order(symbol=config.symbol, order_side=OrderSide.BUY, order_type=OrderType.LIMIT, quantity=quantity, price=buy_price)
if new_buy_order:
self.buy_orders.append(new_buy_order)
elif (check_order.get('status') == OrderStatus.NEW.value):
print('buy order status is: New')
else:
print(f"buy order status is not above options: {check_order.get('status')}")
for delete_order in buy_delete_orders:
self.buy_orders.remove(delete_order)
for sell_order in self.sell_orders:
check_order = self.http_client.get_order(sell_order.get('symbol', config.symbol), client_order_id=sell_order.get('clientOrderId'))
if check_order:
if (check_order.get('status') == OrderStatus.CANCELED.value):
sell_delete_orders.append(sell_order)
print(f"sell order status was canceled: {check_order.get('status')}")
elif (check_order.get('status') == OrderStatus.FILLED.value):
logging.info(f"卖单成交时间: {datetime.now()}, 价格: {check_order.get('price')}, 数量: {check_order.get('origQty')}")
buy_price = round_to((float(check_order.get('price')) * (1 - float(config.gap_percent))), float(config.min_price))
if (buy_price > bid_price > 0):
buy_price = round_to(bid_price, float(config.min_price))
new_buy_order = self.http_client.place_order(symbol=config.symbol, order_side=OrderSide.BUY, order_type=OrderType.LIMIT, quantity=quantity, price=buy_price)
if new_buy_order:
sell_delete_orders.append(sell_order)
self.buy_orders.append(new_buy_order)
sell_price = round_to((float(check_order.get('price')) * (1 + float(config.gap_percent))), float(config.min_price))
if (0 < sell_price < ask_price):
sell_price = round_to(ask_price, float(config.min_price))
new_sell_order = self.http_client.place_order(symbol=config.symbol, order_side=OrderSide.SELL, order_type=OrderType.LIMIT, quantity=quantity, price=sell_price)
if new_sell_order:
self.sell_orders.append(new_sell_order)
elif (check_order.get('status') == OrderStatus.NEW.value):
print('sell order status is: New')
else:
print(f"sell order status is not in above options: {check_order.get('status')}")
for delete_order in sell_delete_orders:
self.sell_orders.remove(delete_order)
if (len(self.buy_orders) <= 0):
if (bid_price > 0):
price = round_to((bid_price * (1 - float(config.gap_percent))), float(config.min_price))
buy_order = self.http_client.place_order(symbol=config.symbol, order_side=OrderSide.BUY, order_type=OrderType.LIMIT, quantity=quantity, price=price)
if buy_order:
self.buy_orders.append(buy_order)
elif (len(self.buy_orders) > int(config.max_orders)):
self.buy_orders.sort(key=(lambda x: float(x['price'])), reverse=False)
delete_order = self.buy_orders[0]
order = self.http_client.cancel_order(delete_order.get('symbol'), client_order_id=delete_order.get('clientOrderId'))
if order:
self.buy_orders.remove(delete_order)
if (len(self.sell_orders) <= 0):
if (ask_price > 0):
price = round_to((ask_price * (1 + float(config.gap_percent))), float(config.min_price))
order = self.http_client.place_order(symbol=config.symbol, order_side=OrderSide.SELL, order_type=OrderType.LIMIT, quantity=quantity, price=price)
if order:
self.sell_orders.append(order)
elif (len(self.sell_orders) > int(config.max_orders)):
self.sell_orders.sort(key=(lambda x: x['price']), reverse=True)
delete_order = self.sell_orders[0]
order = self.http_client.cancel_order(delete_order.get('symbol'), client_order_id=delete_order.get('clientOrderId'))
if order:
self.sell_orders.remove(delete_order) |
def AddCommonDaisyArgs(parser, add_log_location=True):
'Common arguments for Daisy builds.'
if add_log_location:
parser.add_argument('--log-location', help='Directory in Cloud Storage to hold build logs. If not set, ```gs://<project num>.cloudbuild-logs.googleusercontent.com/``` is created and used.')
parser.add_argument('--timeout', type=arg_parsers.Duration(), default='2h', help=' Maximum time a build can last before it fails as "TIMEOUT".\n For example, specifying `2h` fails the process after 2 hours.\n See $ gcloud topic datetimes for information about duration formats.\n ')
base.ASYNC_FLAG.AddToParser(parser) | -2,194,424,139,332,781,000 | Common arguments for Daisy builds. | mac/google-cloud-sdk/lib/googlecloudsdk/api_lib/compute/daisy_utils.py | AddCommonDaisyArgs | bopopescu/cndw | python | def AddCommonDaisyArgs(parser, add_log_location=True):
if add_log_location:
parser.add_argument('--log-location', help='Directory in Cloud Storage to hold build logs. If not set, ```gs://<project num>.cloudbuild-logs.googleusercontent.com/``` is created and used.')
parser.add_argument('--timeout', type=arg_parsers.Duration(), default='2h', help=' Maximum time a build can last before it fails as "TIMEOUT".\n For example, specifying `2h` fails the process after 2 hours.\n See $ gcloud topic datetimes for information about duration formats.\n ')
base.ASYNC_FLAG.AddToParser(parser) |
def AddExtraCommonDaisyArgs(parser):
'Extra common arguments for Daisy builds.'
parser.add_argument('--docker-image-tag', default=_DEFAULT_BUILDER_VERSION, hidden=True, help=' Specify which docker image tag (of tools from compute-image-tools)\n should be used for this command. By default it\'s "release", while\n "latest" is supported as well. There may be more versions supported in\n the future.\n ') | 6,787,699,014,774,607,000 | Extra common arguments for Daisy builds. | mac/google-cloud-sdk/lib/googlecloudsdk/api_lib/compute/daisy_utils.py | AddExtraCommonDaisyArgs | bopopescu/cndw | python | def AddExtraCommonDaisyArgs(parser):
parser.add_argument('--docker-image-tag', default=_DEFAULT_BUILDER_VERSION, hidden=True, help=' Specify which docker image tag (of tools from compute-image-tools)\n should be used for this command. By default it\'s "release", while\n "latest" is supported as well. There may be more versions supported in\n the future.\n ') |
def _CheckIamPermissions(project_id):
'Check for needed IAM permissions and prompt to add if missing.\n\n Args:\n project_id: A string with the name of the project.\n '
project = projects_api.Get(project_id)
expected_services = ['cloudbuild.googleapis.com', 'logging.googleapis.com']
for service_name in expected_services:
if (not services_api.IsServiceEnabled(project.projectId, service_name)):
prompt_message = 'The "{0}" service is not enabled for this project. It is required for this operation.\n'.format(service_name)
console_io.PromptContinue(prompt_message, 'Would you like to enable this service?', throw_if_unattended=True, cancel_on_no=True)
services_api.EnableService(project.projectId, service_name)
service_account = 'serviceAccount:[email protected]'.format(project.projectNumber)
expected_permissions = {'roles/compute.admin': service_account}
for role in SERVICE_ACCOUNT_ROLES:
expected_permissions[role] = service_account
permissions = projects_api.GetIamPolicy(project_id)
for binding in permissions.bindings:
if (expected_permissions.get(binding.role) in binding.members):
del expected_permissions[binding.role]
if expected_permissions:
ep_table = ['{0} {1}'.format(role, account) for (role, account) in expected_permissions.items()]
prompt_message = 'The following IAM permissions are needed for this operation:\n[{0}]\n'.format('\n'.join(ep_table))
console_io.PromptContinue(message=prompt_message, prompt_string='Would you like to add the permissions', throw_if_unattended=True, cancel_on_no=True)
for (role, account) in expected_permissions.items():
log.info('Adding [{0}] to [{1}]'.format(account, role))
projects_api.AddIamPolicyBinding(project_id, account, role) | 1,559,943,286,587,097,300 | Check for needed IAM permissions and prompt to add if missing.
Args:
project_id: A string with the name of the project. | mac/google-cloud-sdk/lib/googlecloudsdk/api_lib/compute/daisy_utils.py | _CheckIamPermissions | bopopescu/cndw | python | def _CheckIamPermissions(project_id):
'Check for needed IAM permissions and prompt to add if missing.\n\n Args:\n project_id: A string with the name of the project.\n '
project = projects_api.Get(project_id)
expected_services = ['cloudbuild.googleapis.com', 'logging.googleapis.com']
for service_name in expected_services:
if (not services_api.IsServiceEnabled(project.projectId, service_name)):
prompt_message = 'The "{0}" service is not enabled for this project. It is required for this operation.\n'.format(service_name)
console_io.PromptContinue(prompt_message, 'Would you like to enable this service?', throw_if_unattended=True, cancel_on_no=True)
services_api.EnableService(project.projectId, service_name)
service_account = 'serviceAccount:[email protected]'.format(project.projectNumber)
expected_permissions = {'roles/compute.admin': service_account}
for role in SERVICE_ACCOUNT_ROLES:
expected_permissions[role] = service_account
permissions = projects_api.GetIamPolicy(project_id)
for binding in permissions.bindings:
if (expected_permissions.get(binding.role) in binding.members):
del expected_permissions[binding.role]
if expected_permissions:
ep_table = ['{0} {1}'.format(role, account) for (role, account) in expected_permissions.items()]
prompt_message = 'The following IAM permissions are needed for this operation:\n[{0}]\n'.format('\n'.join(ep_table))
console_io.PromptContinue(message=prompt_message, prompt_string='Would you like to add the permissions', throw_if_unattended=True, cancel_on_no=True)
for (role, account) in expected_permissions.items():
log.info('Adding [{0}] to [{1}]'.format(account, role))
projects_api.AddIamPolicyBinding(project_id, account, role) |
def _CreateCloudBuild(build_config, client, messages):
'Create a build in cloud build.\n\n Args:\n build_config: A cloud build Build message.\n client: The cloud build api client.\n messages: The cloud build api messages module.\n\n Returns:\n Tuple containing a cloud build build object and the resource reference\n for that build.\n '
log.debug('submitting build: {0}'.format(repr(build_config)))
op = client.projects_builds.Create(messages.CloudbuildProjectsBuildsCreateRequest(build=build_config, projectId=properties.VALUES.core.project.Get()))
json = encoding.MessageToJson(op.metadata)
build = encoding.JsonToMessage(messages.BuildOperationMetadata, json).build
build_ref = resources.REGISTRY.Create(collection='cloudbuild.projects.builds', projectId=build.projectId, id=build.id)
log.CreatedResource(build_ref)
if build.logUrl:
log.status.Print('Logs are available at [{0}].'.format(build.logUrl))
else:
log.status.Print('Logs are available in the Cloud Console.')
return (build, build_ref) | -8,884,530,913,900,584,000 | Create a build in cloud build.
Args:
build_config: A cloud build Build message.
client: The cloud build api client.
messages: The cloud build api messages module.
Returns:
Tuple containing a cloud build build object and the resource reference
for that build. | mac/google-cloud-sdk/lib/googlecloudsdk/api_lib/compute/daisy_utils.py | _CreateCloudBuild | bopopescu/cndw | python | def _CreateCloudBuild(build_config, client, messages):
'Create a build in cloud build.\n\n Args:\n build_config: A cloud build Build message.\n client: The cloud build api client.\n messages: The cloud build api messages module.\n\n Returns:\n Tuple containing a cloud build build object and the resource reference\n for that build.\n '
log.debug('submitting build: {0}'.format(repr(build_config)))
op = client.projects_builds.Create(messages.CloudbuildProjectsBuildsCreateRequest(build=build_config, projectId=properties.VALUES.core.project.Get()))
json = encoding.MessageToJson(op.metadata)
build = encoding.JsonToMessage(messages.BuildOperationMetadata, json).build
build_ref = resources.REGISTRY.Create(collection='cloudbuild.projects.builds', projectId=build.projectId, id=build.id)
log.CreatedResource(build_ref)
if build.logUrl:
log.status.Print('Logs are available at [{0}].'.format(build.logUrl))
else:
log.status.Print('Logs are available in the Cloud Console.')
return (build, build_ref) |
def GetDaisyBucketName(bucket_location=None):
'Determine bucket name for daisy.\n\n Args:\n bucket_location: str, specified bucket location.\n\n Returns:\n str, bucket name for daisy.\n '
project = properties.VALUES.core.project.GetOrFail()
safe_project = project.replace(':', '-')
safe_project = safe_project.replace('.', '-')
bucket_name = '{0}-daisy-bkt'.format(safe_project)
if bucket_location:
bucket_name = '{0}-{1}'.format(bucket_name, bucket_location).lower()
safe_bucket_name = _GetSafeBucketName(bucket_name)
return safe_bucket_name | -41,550,496,668,432,180 | Determine bucket name for daisy.
Args:
bucket_location: str, specified bucket location.
Returns:
str, bucket name for daisy. | mac/google-cloud-sdk/lib/googlecloudsdk/api_lib/compute/daisy_utils.py | GetDaisyBucketName | bopopescu/cndw | python | def GetDaisyBucketName(bucket_location=None):
'Determine bucket name for daisy.\n\n Args:\n bucket_location: str, specified bucket location.\n\n Returns:\n str, bucket name for daisy.\n '
project = properties.VALUES.core.project.GetOrFail()
safe_project = project.replace(':', '-')
safe_project = safe_project.replace('.', '-')
bucket_name = '{0}-daisy-bkt'.format(safe_project)
if bucket_location:
bucket_name = '{0}-{1}'.format(bucket_name, bucket_location).lower()
safe_bucket_name = _GetSafeBucketName(bucket_name)
return safe_bucket_name |
def GetSubnetRegion():
"Gets region from global properties/args that should be used for subnet arg.\n\n Returns:\n str, region\n Raises:\n SubnetException: if region couldn't be inferred.\n "
if properties.VALUES.compute.zone.Get():
return utils.ZoneNameToRegionName(properties.VALUES.compute.zone.Get())
elif properties.VALUES.compute.region.Get():
return properties.VALUES.compute.region.Get()
raise SubnetException('Region or zone should be specified.') | 1,669,325,022,092,909,300 | Gets region from global properties/args that should be used for subnet arg.
Returns:
str, region
Raises:
SubnetException: if region couldn't be inferred. | mac/google-cloud-sdk/lib/googlecloudsdk/api_lib/compute/daisy_utils.py | GetSubnetRegion | bopopescu/cndw | python | def GetSubnetRegion():
"Gets region from global properties/args that should be used for subnet arg.\n\n Returns:\n str, region\n Raises:\n SubnetException: if region couldn't be inferred.\n "
if properties.VALUES.compute.zone.Get():
return utils.ZoneNameToRegionName(properties.VALUES.compute.zone.Get())
elif properties.VALUES.compute.region.Get():
return properties.VALUES.compute.region.Get()
raise SubnetException('Region or zone should be specified.') |
def AppendNetworkAndSubnetArgs(args, builder_args):
'Extracts network/subnet out of CLI args and append for importer.\n\n Args:\n args: list of str, CLI args that might contain network/subnet args.\n builder_args: list of str, args for builder.\n '
if args.subnet:
AppendArg(builder_args, 'subnet', args.subnet.lower())
if args.network:
AppendArg(builder_args, 'network', args.network.lower()) | 8,540,173,118,059,027,000 | Extracts network/subnet out of CLI args and append for importer.
Args:
args: list of str, CLI args that might contain network/subnet args.
builder_args: list of str, args for builder. | mac/google-cloud-sdk/lib/googlecloudsdk/api_lib/compute/daisy_utils.py | AppendNetworkAndSubnetArgs | bopopescu/cndw | python | def AppendNetworkAndSubnetArgs(args, builder_args):
'Extracts network/subnet out of CLI args and append for importer.\n\n Args:\n args: list of str, CLI args that might contain network/subnet args.\n builder_args: list of str, args for builder.\n '
if args.subnet:
AppendArg(builder_args, 'subnet', args.subnet.lower())
if args.network:
AppendArg(builder_args, 'network', args.network.lower()) |
def RunImageImport(args, import_args, tags, output_filter, docker_image_tag=_DEFAULT_BUILDER_VERSION):
"Run a build over gce_vm_image_import on Google Cloud Builder.\n\n Args:\n args: An argparse namespace. All the arguments that were provided to this\n command invocation.\n import_args: A list of key-value pairs to pass to importer.\n tags: A list of strings for adding tags to the Argo build.\n output_filter: A list of strings indicating what lines from the log should\n be output. Only lines that start with one of the strings in output_filter\n will be displayed.\n docker_image_tag: Specified docker image tag.\n\n Returns:\n A build object that either streams the output or is displayed as a\n link to the build.\n\n Raises:\n FailedBuildException: If the build is completed and not 'SUCCESS'.\n "
builder = _IMAGE_IMPORT_BUILDER.format(docker_image_tag)
return RunImageCloudBuild(args, builder, import_args, tags, output_filter) | 7,050,490,164,758,734,000 | Run a build over gce_vm_image_import on Google Cloud Builder.
Args:
args: An argparse namespace. All the arguments that were provided to this
command invocation.
import_args: A list of key-value pairs to pass to importer.
tags: A list of strings for adding tags to the Argo build.
output_filter: A list of strings indicating what lines from the log should
be output. Only lines that start with one of the strings in output_filter
will be displayed.
docker_image_tag: Specified docker image tag.
Returns:
A build object that either streams the output or is displayed as a
link to the build.
Raises:
FailedBuildException: If the build is completed and not 'SUCCESS'. | mac/google-cloud-sdk/lib/googlecloudsdk/api_lib/compute/daisy_utils.py | RunImageImport | bopopescu/cndw | python | def RunImageImport(args, import_args, tags, output_filter, docker_image_tag=_DEFAULT_BUILDER_VERSION):
"Run a build over gce_vm_image_import on Google Cloud Builder.\n\n Args:\n args: An argparse namespace. All the arguments that were provided to this\n command invocation.\n import_args: A list of key-value pairs to pass to importer.\n tags: A list of strings for adding tags to the Argo build.\n output_filter: A list of strings indicating what lines from the log should\n be output. Only lines that start with one of the strings in output_filter\n will be displayed.\n docker_image_tag: Specified docker image tag.\n\n Returns:\n A build object that either streams the output or is displayed as a\n link to the build.\n\n Raises:\n FailedBuildException: If the build is completed and not 'SUCCESS'.\n "
builder = _IMAGE_IMPORT_BUILDER.format(docker_image_tag)
return RunImageCloudBuild(args, builder, import_args, tags, output_filter) |
def RunImageExport(args, export_args, tags, output_filter, docker_image_tag=_DEFAULT_BUILDER_VERSION):
"Run a build over gce_vm_image_export on Google Cloud Builder.\n\n Args:\n args: An argparse namespace. All the arguments that were provided to this\n command invocation.\n export_args: A list of key-value pairs to pass to exporter.\n tags: A list of strings for adding tags to the Argo build.\n output_filter: A list of strings indicating what lines from the log should\n be output. Only lines that start with one of the strings in output_filter\n will be displayed.\n docker_image_tag: Specified docker image tag.\n\n Returns:\n A build object that either streams the output or is displayed as a\n link to the build.\n\n Raises:\n FailedBuildException: If the build is completed and not 'SUCCESS'.\n "
builder = _IMAGE_EXPORT_BUILDER.format(docker_image_tag)
return RunImageCloudBuild(args, builder, export_args, tags, output_filter) | 6,988,291,480,681,775,000 | Run a build over gce_vm_image_export on Google Cloud Builder.
Args:
args: An argparse namespace. All the arguments that were provided to this
command invocation.
export_args: A list of key-value pairs to pass to exporter.
tags: A list of strings for adding tags to the Argo build.
output_filter: A list of strings indicating what lines from the log should
be output. Only lines that start with one of the strings in output_filter
will be displayed.
docker_image_tag: Specified docker image tag.
Returns:
A build object that either streams the output or is displayed as a
link to the build.
Raises:
FailedBuildException: If the build is completed and not 'SUCCESS'. | mac/google-cloud-sdk/lib/googlecloudsdk/api_lib/compute/daisy_utils.py | RunImageExport | bopopescu/cndw | python | def RunImageExport(args, export_args, tags, output_filter, docker_image_tag=_DEFAULT_BUILDER_VERSION):
"Run a build over gce_vm_image_export on Google Cloud Builder.\n\n Args:\n args: An argparse namespace. All the arguments that were provided to this\n command invocation.\n export_args: A list of key-value pairs to pass to exporter.\n tags: A list of strings for adding tags to the Argo build.\n output_filter: A list of strings indicating what lines from the log should\n be output. Only lines that start with one of the strings in output_filter\n will be displayed.\n docker_image_tag: Specified docker image tag.\n\n Returns:\n A build object that either streams the output or is displayed as a\n link to the build.\n\n Raises:\n FailedBuildException: If the build is completed and not 'SUCCESS'.\n "
builder = _IMAGE_EXPORT_BUILDER.format(docker_image_tag)
return RunImageCloudBuild(args, builder, export_args, tags, output_filter) |
def RunImageCloudBuild(args, builder, builder_args, tags, output_filter):
"Run a build related to image on Google Cloud Builder.\n\n Args:\n args: An argparse namespace. All the arguments that were provided to this\n command invocation.\n builder: Path to builder image.\n builder_args: A list of key-value pairs to pass to builder.\n tags: A list of strings for adding tags to the Argo build.\n output_filter: A list of strings indicating what lines from the log should\n be output. Only lines that start with one of the strings in output_filter\n will be displayed.\n\n Returns:\n A build object that either streams the output or is displayed as a\n link to the build.\n\n Raises:\n FailedBuildException: If the build is completed and not 'SUCCESS'.\n "
project_id = projects_util.ParseProject(properties.VALUES.core.project.GetOrFail())
_CheckIamPermissions(project_id)
return _RunCloudBuild(args, builder, builder_args, (['gce-daisy'] + tags), output_filter, args.log_location) | 6,806,106,212,134,878,000 | Run a build related to image on Google Cloud Builder.
Args:
args: An argparse namespace. All the arguments that were provided to this
command invocation.
builder: Path to builder image.
builder_args: A list of key-value pairs to pass to builder.
tags: A list of strings for adding tags to the Argo build.
output_filter: A list of strings indicating what lines from the log should
be output. Only lines that start with one of the strings in output_filter
will be displayed.
Returns:
A build object that either streams the output or is displayed as a
link to the build.
Raises:
FailedBuildException: If the build is completed and not 'SUCCESS'. | mac/google-cloud-sdk/lib/googlecloudsdk/api_lib/compute/daisy_utils.py | RunImageCloudBuild | bopopescu/cndw | python | def RunImageCloudBuild(args, builder, builder_args, tags, output_filter):
"Run a build related to image on Google Cloud Builder.\n\n Args:\n args: An argparse namespace. All the arguments that were provided to this\n command invocation.\n builder: Path to builder image.\n builder_args: A list of key-value pairs to pass to builder.\n tags: A list of strings for adding tags to the Argo build.\n output_filter: A list of strings indicating what lines from the log should\n be output. Only lines that start with one of the strings in output_filter\n will be displayed.\n\n Returns:\n A build object that either streams the output or is displayed as a\n link to the build.\n\n Raises:\n FailedBuildException: If the build is completed and not 'SUCCESS'.\n "
project_id = projects_util.ParseProject(properties.VALUES.core.project.GetOrFail())
_CheckIamPermissions(project_id)
return _RunCloudBuild(args, builder, builder_args, (['gce-daisy'] + tags), output_filter, args.log_location) |
def _RunCloudBuild(args, builder, build_args, build_tags=None, output_filter=None, log_location=None, backoff=(lambda elapsed: 1)):
"Run a build with a specific builder on Google Cloud Builder.\n\n Args:\n args: an argparse namespace. All the arguments that were provided to this\n command invocation.\n builder: path to builder image\n build_args: args to be sent to builder\n build_tags: tags to be attached to the build\n output_filter: A list of strings indicating what lines from the log should\n be output. Only lines that start with one of the strings in output_filter\n will be displayed.\n log_location: GCS path to directory where logs will be stored.\n backoff: A function that takes the current elapsed time and returns\n the next sleep length. Both are in seconds.\n\n Returns:\n A build object that either streams the output or is displayed as a\n link to the build.\n\n Raises:\n FailedBuildException: If the build is completed and not 'SUCCESS'.\n "
client = cloudbuild_util.GetClientInstance()
messages = cloudbuild_util.GetMessagesModule()
build_config = messages.Build(steps=[messages.BuildStep(name=builder, args=build_args)], tags=build_tags, timeout='{0}s'.format(args.timeout))
if log_location:
gcs_log_dir = resources.REGISTRY.Parse(args.log_location, collection='storage.objects')
build_config.logsBucket = 'gs://{0}/{1}'.format(gcs_log_dir.bucket, gcs_log_dir.object)
(build, build_ref) = _CreateCloudBuild(build_config, client, messages)
if args.async_:
return build
mash_handler = execution.MashHandler(execution.GetCancelBuildHandler(client, messages, build_ref))
with execution_utils.CtrlCSection(mash_handler):
build = CloudBuildClientWithFiltering(client, messages).StreamWithFilter(build_ref, backoff, output_filter=output_filter)
if (build.status == messages.Build.StatusValueValuesEnum.TIMEOUT):
log.status.Print('Your build timed out. Use the [--timeout=DURATION] flag to change the timeout threshold.')
if (build.status != messages.Build.StatusValueValuesEnum.SUCCESS):
raise FailedBuildException(build)
return build | -7,934,000,592,701,394,000 | Run a build with a specific builder on Google Cloud Builder.
Args:
args: an argparse namespace. All the arguments that were provided to this
command invocation.
builder: path to builder image
build_args: args to be sent to builder
build_tags: tags to be attached to the build
output_filter: A list of strings indicating what lines from the log should
be output. Only lines that start with one of the strings in output_filter
will be displayed.
log_location: GCS path to directory where logs will be stored.
backoff: A function that takes the current elapsed time and returns
the next sleep length. Both are in seconds.
Returns:
A build object that either streams the output or is displayed as a
link to the build.
Raises:
FailedBuildException: If the build is completed and not 'SUCCESS'. | mac/google-cloud-sdk/lib/googlecloudsdk/api_lib/compute/daisy_utils.py | _RunCloudBuild | bopopescu/cndw | python | def _RunCloudBuild(args, builder, build_args, build_tags=None, output_filter=None, log_location=None, backoff=(lambda elapsed: 1)):
"Run a build with a specific builder on Google Cloud Builder.\n\n Args:\n args: an argparse namespace. All the arguments that were provided to this\n command invocation.\n builder: path to builder image\n build_args: args to be sent to builder\n build_tags: tags to be attached to the build\n output_filter: A list of strings indicating what lines from the log should\n be output. Only lines that start with one of the strings in output_filter\n will be displayed.\n log_location: GCS path to directory where logs will be stored.\n backoff: A function that takes the current elapsed time and returns\n the next sleep length. Both are in seconds.\n\n Returns:\n A build object that either streams the output or is displayed as a\n link to the build.\n\n Raises:\n FailedBuildException: If the build is completed and not 'SUCCESS'.\n "
client = cloudbuild_util.GetClientInstance()
messages = cloudbuild_util.GetMessagesModule()
build_config = messages.Build(steps=[messages.BuildStep(name=builder, args=build_args)], tags=build_tags, timeout='{0}s'.format(args.timeout))
if log_location:
gcs_log_dir = resources.REGISTRY.Parse(args.log_location, collection='storage.objects')
build_config.logsBucket = 'gs://{0}/{1}'.format(gcs_log_dir.bucket, gcs_log_dir.object)
(build, build_ref) = _CreateCloudBuild(build_config, client, messages)
if args.async_:
return build
mash_handler = execution.MashHandler(execution.GetCancelBuildHandler(client, messages, build_ref))
with execution_utils.CtrlCSection(mash_handler):
build = CloudBuildClientWithFiltering(client, messages).StreamWithFilter(build_ref, backoff, output_filter=output_filter)
if (build.status == messages.Build.StatusValueValuesEnum.TIMEOUT):
log.status.Print('Your build timed out. Use the [--timeout=DURATION] flag to change the timeout threshold.')
if (build.status != messages.Build.StatusValueValuesEnum.SUCCESS):
raise FailedBuildException(build)
return build |
def RunOVFImportBuild(args, compute_client, instance_name, source_uri, no_guest_environment, can_ip_forward, deletion_protection, description, labels, machine_type, network, network_tier, subnet, private_network_ip, no_restart_on_failure, os, tags, zone, project, output_filter, compute_release_track):
'Run a OVF import build on Google Cloud Builder.\n\n Args:\n args: an argparse namespace. All the arguments that were provided to this\n command invocation.\n compute_client: Google Compute Engine client.\n instance_name: Name of the instance to be imported.\n source_uri: A GCS path to OVA or OVF package.\n no_guest_environment: If set to True, Google Guest Environment won\'t be\n installed on the boot disk of the VM.\n can_ip_forward: If set to True, allows the instances to send and receive\n packets with non-matching destination or source IP addresses.\n deletion_protection: Enables deletion protection for the instance.\n description: Specifies a textual description of the instances.\n labels: List of label KEY=VALUE pairs to add to the instance.\n machine_type: Specifies the machine type used for the instances.\n network: Specifies the network that the instances will be part of.\n network_tier: Specifies the network tier of the interface. NETWORK_TIER must\n be one of: PREMIUM, STANDARD.\n subnet: Specifies the subnet that the instances will be part of.\n private_network_ip: Specifies the RFC1918 IP to assign to the instance.\n no_restart_on_failure: The instances will NOT be restarted if they are\n terminated by Compute Engine.\n os: Specifies the OS of the boot disk being imported.\n tags: A list of strings for adding tags to the Argo build.\n zone: The GCP zone to tell Daisy to do work in. If unspecified, defaults to\n wherever the Argo runner happens to be.\n project: The Google Cloud Platform project name to use for OVF import.\n output_filter: A list of strings indicating what lines from the log should\n be output. Only lines that start with one of the strings in output_filter\n will be displayed.\n compute_release_track: release track to be used for Compute API calls. One\n of - "alpha", "beta" or ""\n\n Returns:\n A build object that either streams the output or is displayed as a\n link to the build.\n\n Raises:\n FailedBuildException: If the build is completed and not \'SUCCESS\'.\n '
project_id = projects_util.ParseProject(properties.VALUES.core.project.GetOrFail())
_CheckIamPermissions(project_id)
two_percent = int((args.timeout * 0.02))
ovf_import_timeout = (args.timeout - min(two_percent, 300))
ovf_importer_args = []
AppendArg(ovf_importer_args, 'instance-names', instance_name)
AppendArg(ovf_importer_args, 'client-id', 'gcloud')
AppendArg(ovf_importer_args, 'ovf-gcs-path', source_uri)
AppendBoolArg(ovf_importer_args, 'no-guest-environment', no_guest_environment)
AppendBoolArg(ovf_importer_args, 'can-ip-forward', can_ip_forward)
AppendBoolArg(ovf_importer_args, 'deletion-protection', deletion_protection)
AppendArg(ovf_importer_args, 'description', description)
if labels:
AppendArg(ovf_importer_args, 'labels', ','.join(['{}={}'.format(k, v) for (k, v) in labels.items()]))
AppendArg(ovf_importer_args, 'machine-type', machine_type)
AppendArg(ovf_importer_args, 'network', network)
AppendArg(ovf_importer_args, 'network-tier', network_tier)
AppendArg(ovf_importer_args, 'subnet', subnet)
AppendArg(ovf_importer_args, 'private-network-ip', private_network_ip)
AppendBoolArg(ovf_importer_args, 'no-restart-on-failure', no_restart_on_failure)
AppendArg(ovf_importer_args, 'os', os)
if tags:
AppendArg(ovf_importer_args, 'tags', ','.join(tags))
AppendArg(ovf_importer_args, 'zone', zone)
AppendArg(ovf_importer_args, 'timeout', ovf_import_timeout, '-{0}={1}s')
AppendArg(ovf_importer_args, 'project', project)
_AppendNodeAffinityLabelArgs(ovf_importer_args, args, compute_client.messages)
if compute_release_track:
AppendArg(ovf_importer_args, 'release-track', compute_release_track)
build_tags = ['gce-ovf-import']
backoff = (lambda elapsed: (2 if (elapsed < 30) else 15))
return _RunCloudBuild(args, _OVF_IMPORT_BUILDER.format(args.docker_image_tag), ovf_importer_args, build_tags, output_filter, backoff=backoff) | -6,585,953,439,309,196,000 | Run a OVF import build on Google Cloud Builder.
Args:
args: an argparse namespace. All the arguments that were provided to this
command invocation.
compute_client: Google Compute Engine client.
instance_name: Name of the instance to be imported.
source_uri: A GCS path to OVA or OVF package.
no_guest_environment: If set to True, Google Guest Environment won't be
installed on the boot disk of the VM.
can_ip_forward: If set to True, allows the instances to send and receive
packets with non-matching destination or source IP addresses.
deletion_protection: Enables deletion protection for the instance.
description: Specifies a textual description of the instances.
labels: List of label KEY=VALUE pairs to add to the instance.
machine_type: Specifies the machine type used for the instances.
network: Specifies the network that the instances will be part of.
network_tier: Specifies the network tier of the interface. NETWORK_TIER must
be one of: PREMIUM, STANDARD.
subnet: Specifies the subnet that the instances will be part of.
private_network_ip: Specifies the RFC1918 IP to assign to the instance.
no_restart_on_failure: The instances will NOT be restarted if they are
terminated by Compute Engine.
os: Specifies the OS of the boot disk being imported.
tags: A list of strings for adding tags to the Argo build.
zone: The GCP zone to tell Daisy to do work in. If unspecified, defaults to
wherever the Argo runner happens to be.
project: The Google Cloud Platform project name to use for OVF import.
output_filter: A list of strings indicating what lines from the log should
be output. Only lines that start with one of the strings in output_filter
will be displayed.
compute_release_track: release track to be used for Compute API calls. One
of - "alpha", "beta" or ""
Returns:
A build object that either streams the output or is displayed as a
link to the build.
Raises:
FailedBuildException: If the build is completed and not 'SUCCESS'. | mac/google-cloud-sdk/lib/googlecloudsdk/api_lib/compute/daisy_utils.py | RunOVFImportBuild | bopopescu/cndw | python | def RunOVFImportBuild(args, compute_client, instance_name, source_uri, no_guest_environment, can_ip_forward, deletion_protection, description, labels, machine_type, network, network_tier, subnet, private_network_ip, no_restart_on_failure, os, tags, zone, project, output_filter, compute_release_track):
'Run a OVF import build on Google Cloud Builder.\n\n Args:\n args: an argparse namespace. All the arguments that were provided to this\n command invocation.\n compute_client: Google Compute Engine client.\n instance_name: Name of the instance to be imported.\n source_uri: A GCS path to OVA or OVF package.\n no_guest_environment: If set to True, Google Guest Environment won\'t be\n installed on the boot disk of the VM.\n can_ip_forward: If set to True, allows the instances to send and receive\n packets with non-matching destination or source IP addresses.\n deletion_protection: Enables deletion protection for the instance.\n description: Specifies a textual description of the instances.\n labels: List of label KEY=VALUE pairs to add to the instance.\n machine_type: Specifies the machine type used for the instances.\n network: Specifies the network that the instances will be part of.\n network_tier: Specifies the network tier of the interface. NETWORK_TIER must\n be one of: PREMIUM, STANDARD.\n subnet: Specifies the subnet that the instances will be part of.\n private_network_ip: Specifies the RFC1918 IP to assign to the instance.\n no_restart_on_failure: The instances will NOT be restarted if they are\n terminated by Compute Engine.\n os: Specifies the OS of the boot disk being imported.\n tags: A list of strings for adding tags to the Argo build.\n zone: The GCP zone to tell Daisy to do work in. If unspecified, defaults to\n wherever the Argo runner happens to be.\n project: The Google Cloud Platform project name to use for OVF import.\n output_filter: A list of strings indicating what lines from the log should\n be output. Only lines that start with one of the strings in output_filter\n will be displayed.\n compute_release_track: release track to be used for Compute API calls. One\n of - "alpha", "beta" or \n\n Returns:\n A build object that either streams the output or is displayed as a\n link to the build.\n\n Raises:\n FailedBuildException: If the build is completed and not \'SUCCESS\'.\n '
project_id = projects_util.ParseProject(properties.VALUES.core.project.GetOrFail())
_CheckIamPermissions(project_id)
two_percent = int((args.timeout * 0.02))
ovf_import_timeout = (args.timeout - min(two_percent, 300))
ovf_importer_args = []
AppendArg(ovf_importer_args, 'instance-names', instance_name)
AppendArg(ovf_importer_args, 'client-id', 'gcloud')
AppendArg(ovf_importer_args, 'ovf-gcs-path', source_uri)
AppendBoolArg(ovf_importer_args, 'no-guest-environment', no_guest_environment)
AppendBoolArg(ovf_importer_args, 'can-ip-forward', can_ip_forward)
AppendBoolArg(ovf_importer_args, 'deletion-protection', deletion_protection)
AppendArg(ovf_importer_args, 'description', description)
if labels:
AppendArg(ovf_importer_args, 'labels', ','.join(['{}={}'.format(k, v) for (k, v) in labels.items()]))
AppendArg(ovf_importer_args, 'machine-type', machine_type)
AppendArg(ovf_importer_args, 'network', network)
AppendArg(ovf_importer_args, 'network-tier', network_tier)
AppendArg(ovf_importer_args, 'subnet', subnet)
AppendArg(ovf_importer_args, 'private-network-ip', private_network_ip)
AppendBoolArg(ovf_importer_args, 'no-restart-on-failure', no_restart_on_failure)
AppendArg(ovf_importer_args, 'os', os)
if tags:
AppendArg(ovf_importer_args, 'tags', ','.join(tags))
AppendArg(ovf_importer_args, 'zone', zone)
AppendArg(ovf_importer_args, 'timeout', ovf_import_timeout, '-{0}={1}s')
AppendArg(ovf_importer_args, 'project', project)
_AppendNodeAffinityLabelArgs(ovf_importer_args, args, compute_client.messages)
if compute_release_track:
AppendArg(ovf_importer_args, 'release-track', compute_release_track)
build_tags = ['gce-ovf-import']
backoff = (lambda elapsed: (2 if (elapsed < 30) else 15))
return _RunCloudBuild(args, _OVF_IMPORT_BUILDER.format(args.docker_image_tag), ovf_importer_args, build_tags, output_filter, backoff=backoff) |
def MakeGcsObjectOrPathUri(uri):
'Creates Google Cloud Storage URI for an object or a path.\n\n Raises storage_util.InvalidObjectNameError if a path contains only bucket\n name.\n\n Args:\n uri: a string to a Google Cloud Storage object or a path. Can be a gs:// or\n an https:// variant.\n\n Returns:\n Google Cloud Storage URI for an object or a path.\n '
obj_ref = resources.REGISTRY.Parse(uri)
if hasattr(obj_ref, 'object'):
return 'gs://{0}/{1}'.format(obj_ref.bucket, obj_ref.object)
else:
raise storage_util.InvalidObjectNameError(uri, 'Missing object name') | -4,744,423,058,397,379,000 | Creates Google Cloud Storage URI for an object or a path.
Raises storage_util.InvalidObjectNameError if a path contains only bucket
name.
Args:
uri: a string to a Google Cloud Storage object or a path. Can be a gs:// or
an https:// variant.
Returns:
Google Cloud Storage URI for an object or a path. | mac/google-cloud-sdk/lib/googlecloudsdk/api_lib/compute/daisy_utils.py | MakeGcsObjectOrPathUri | bopopescu/cndw | python | def MakeGcsObjectOrPathUri(uri):
'Creates Google Cloud Storage URI for an object or a path.\n\n Raises storage_util.InvalidObjectNameError if a path contains only bucket\n name.\n\n Args:\n uri: a string to a Google Cloud Storage object or a path. Can be a gs:// or\n an https:// variant.\n\n Returns:\n Google Cloud Storage URI for an object or a path.\n '
obj_ref = resources.REGISTRY.Parse(uri)
if hasattr(obj_ref, 'object'):
return 'gs://{0}/{1}'.format(obj_ref.bucket, obj_ref.object)
else:
raise storage_util.InvalidObjectNameError(uri, 'Missing object name') |
def _PrintLogLine(self, text):
'Override PrintLogLine method to use self.filter.'
if self.filter:
output_lines = text.splitlines()
for line in output_lines:
for match in self.filter:
if line.startswith(match):
self.out.Print(line)
break
else:
self.out.Print(text) | 4,026,509,995,065,777,000 | Override PrintLogLine method to use self.filter. | mac/google-cloud-sdk/lib/googlecloudsdk/api_lib/compute/daisy_utils.py | _PrintLogLine | bopopescu/cndw | python | def _PrintLogLine(self, text):
if self.filter:
output_lines = text.splitlines()
for line in output_lines:
for match in self.filter:
if line.startswith(match):
self.out.Print(line)
break
else:
self.out.Print(text) |
def StreamWithFilter(self, build_ref, backoff, output_filter=None):
'Stream the logs for a build using whitelist filter.\n\n Args:\n build_ref: Build reference, The build whose logs shall be streamed.\n backoff: A function that takes the current elapsed time\n and returns the next sleep length. Both are in seconds.\n output_filter: List of strings, The output will only be shown if the line\n starts with one of the strings in the list.\n\n Raises:\n NoLogsBucketException: If the build does not specify a logsBucket.\n\n Returns:\n Build message, The completed or terminated build as read for the final\n poll.\n '
build = self.GetBuild(build_ref)
log_tailer = FilteredLogTailer.FromBuild(build)
log_tailer.filter = output_filter
statuses = self.messages.Build.StatusValueValuesEnum
working_statuses = [statuses.QUEUED, statuses.WORKING]
seconds_between_poll = backoff(0)
seconds_elapsed = 0
while (build.status in working_statuses):
log_tailer.Poll()
time.sleep(seconds_between_poll)
build = self.GetBuild(build_ref)
seconds_elapsed += seconds_between_poll
seconds_between_poll = backoff(seconds_elapsed)
log_tailer.Poll(is_last=True)
return build | -6,379,679,588,292,934,000 | Stream the logs for a build using whitelist filter.
Args:
build_ref: Build reference, The build whose logs shall be streamed.
backoff: A function that takes the current elapsed time
and returns the next sleep length. Both are in seconds.
output_filter: List of strings, The output will only be shown if the line
starts with one of the strings in the list.
Raises:
NoLogsBucketException: If the build does not specify a logsBucket.
Returns:
Build message, The completed or terminated build as read for the final
poll. | mac/google-cloud-sdk/lib/googlecloudsdk/api_lib/compute/daisy_utils.py | StreamWithFilter | bopopescu/cndw | python | def StreamWithFilter(self, build_ref, backoff, output_filter=None):
'Stream the logs for a build using whitelist filter.\n\n Args:\n build_ref: Build reference, The build whose logs shall be streamed.\n backoff: A function that takes the current elapsed time\n and returns the next sleep length. Both are in seconds.\n output_filter: List of strings, The output will only be shown if the line\n starts with one of the strings in the list.\n\n Raises:\n NoLogsBucketException: If the build does not specify a logsBucket.\n\n Returns:\n Build message, The completed or terminated build as read for the final\n poll.\n '
build = self.GetBuild(build_ref)
log_tailer = FilteredLogTailer.FromBuild(build)
log_tailer.filter = output_filter
statuses = self.messages.Build.StatusValueValuesEnum
working_statuses = [statuses.QUEUED, statuses.WORKING]
seconds_between_poll = backoff(0)
seconds_elapsed = 0
while (build.status in working_statuses):
log_tailer.Poll()
time.sleep(seconds_between_poll)
build = self.GetBuild(build_ref)
seconds_elapsed += seconds_between_poll
seconds_between_poll = backoff(seconds_elapsed)
log_tailer.Poll(is_last=True)
return build |
def __init__(self, category: str, owner: str, ttl: Optional[float]=None):
'\n :param category: Lock category name\n :param owner: Lock owner id\n :param ttl: Default lock ttl in seconds\n '
super().__init__(category, owner, ttl=ttl)
self.collection = self.get_collection()
self.release_all() | -6,989,409,611,335,985,000 | :param category: Lock category name
:param owner: Lock owner id
:param ttl: Default lock ttl in seconds | core/lock/distributed.py | __init__ | sbworth/getnoc | python | def __init__(self, category: str, owner: str, ttl: Optional[float]=None):
'\n :param category: Lock category name\n :param owner: Lock owner id\n :param ttl: Default lock ttl in seconds\n '
super().__init__(category, owner, ttl=ttl)
self.collection = self.get_collection()
self.release_all() |
def release_all(self):
'\n Release all locks held by owner\n '
self.collection.delete_many({'owner': self.owner}) | 5,212,372,657,485,805,000 | Release all locks held by owner | core/lock/distributed.py | release_all | sbworth/getnoc | python | def release_all(self):
'\n \n '
self.collection.delete_many({'owner': self.owner}) |
def get_collection_name(self) -> str:
'\n Get name of the lock collection\n '
return f'locks.{self.category}' | -66,151,164,867,025,640 | Get name of the lock collection | core/lock/distributed.py | get_collection_name | sbworth/getnoc | python | def get_collection_name(self) -> str:
'\n \n '
return f'locks.{self.category}' |
def get_collection(self) -> Collection:
'\n Ensure the collection is exists and indexed properly\n '
coll = get_db()[self.get_collection_name()]
coll.create_index([('items', pymongo.ASCENDING)], unique=True)
coll.create_index([('expires', pymongo.ASCENDING)], expireAfterSeconds=0)
return coll | 4,952,093,121,502,522,000 | Ensure the collection is exists and indexed properly | core/lock/distributed.py | get_collection | sbworth/getnoc | python | def get_collection(self) -> Collection:
'\n \n '
coll = get_db()[self.get_collection_name()]
coll.create_index([('items', pymongo.ASCENDING)], unique=True)
coll.create_index([('expires', pymongo.ASCENDING)], expireAfterSeconds=0)
return coll |
def acquire_by_items(self, items: List[str], ttl: Optional[float]=None) -> str:
'\n Acquire lock by list of items\n '
lock_id = ObjectId()
ttl = (ttl or self.ttl or DEFAULT_TTL)
metrics[f'lock_{self.category}_requests'] += 1
logger.debug('[%s|%s] Acquiring lock for %s (%s seconds)', self.category, self.owner, ', '.join(items), ttl)
while True:
try:
self.collection.insert_one({'_id': lock_id, 'items': items, 'owner': self.owner, 'expire': (datetime.datetime.now() + datetime.timedelta(seconds=ttl))})
return str(lock_id)
except pymongo.errors.DuplicateKeyError:
metrics[f'lock_{self.category}_misses'] += 1
jitter = ((random.random() * DEFAULT_LOCK_WAIT_JITTER) * DEFAULT_LOCK_WAIT)
timeout = (DEFAULT_LOCK_WAIT + jitter)
logger.debug('[%s|%s] Cannnot get lock. Waiting %s seconds', self.category, self.owner, timeout)
time.sleep(timeout) | -5,344,109,815,606,256,000 | Acquire lock by list of items | core/lock/distributed.py | acquire_by_items | sbworth/getnoc | python | def acquire_by_items(self, items: List[str], ttl: Optional[float]=None) -> str:
'\n \n '
lock_id = ObjectId()
ttl = (ttl or self.ttl or DEFAULT_TTL)
metrics[f'lock_{self.category}_requests'] += 1
logger.debug('[%s|%s] Acquiring lock for %s (%s seconds)', self.category, self.owner, ', '.join(items), ttl)
while True:
try:
self.collection.insert_one({'_id': lock_id, 'items': items, 'owner': self.owner, 'expire': (datetime.datetime.now() + datetime.timedelta(seconds=ttl))})
return str(lock_id)
except pymongo.errors.DuplicateKeyError:
metrics[f'lock_{self.category}_misses'] += 1
jitter = ((random.random() * DEFAULT_LOCK_WAIT_JITTER) * DEFAULT_LOCK_WAIT)
timeout = (DEFAULT_LOCK_WAIT + jitter)
logger.debug('[%s|%s] Cannnot get lock. Waiting %s seconds', self.category, self.owner, timeout)
time.sleep(timeout) |
def release_by_lock_id(self, lock_id: str):
'\n Release lock by id\n '
self.collection.delete_one({'_id': ObjectId(lock_id)}) | -2,106,621,128,667,154,700 | Release lock by id | core/lock/distributed.py | release_by_lock_id | sbworth/getnoc | python | def release_by_lock_id(self, lock_id: str):
'\n \n '
self.collection.delete_one({'_id': ObjectId(lock_id)}) |
def totalvalue(comb):
' Totalise a particular combination of items'
totwt = totval = 0
for (item, wt, val) in comb:
totwt += wt
totval += val
return ((totval, (- totwt)) if (totwt <= 400) else (0, 0)) | 3,194,895,844,354,115,600 | Totalise a particular combination of items | lang/Python/knapsack-problem-0-1-2.py | totalvalue | ethansaxenian/RosettaDecode | python | def totalvalue(comb):
' '
totwt = totval = 0
for (item, wt, val) in comb:
totwt += wt
totval += val
return ((totval, (- totwt)) if (totwt <= 400) else (0, 0)) |
def get(self, resource_group_name, service_name, **kwargs):
'Get the Monitoring Setting and its properties.\n\n :param resource_group_name: The name of the resource group that contains the resource. You can\n obtain this value from the Azure Resource Manager API or the portal.\n :type resource_group_name: str\n :param service_name: The name of the Service resource.\n :type service_name: str\n :keyword callable cls: A custom type or function that will be passed the direct response\n :return: MonitoringSettingResource, or the result of cls(response)\n :rtype: ~azure.mgmt.appplatform.v2020_11_01_preview.models.MonitoringSettingResource\n :raises: ~azure.core.exceptions.HttpResponseError\n '
cls = kwargs.pop('cls', None)
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = '2020-11-01-preview'
accept = 'application/json'
url = self.get.metadata['url']
path_format_arguments = {'subscriptionId': self._serialize.url('self._config.subscription_id', self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url('resource_group_name', resource_group_name, 'str'), 'serviceName': self._serialize.url('service_name', service_name, 'str')}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters['api-version'] = self._serialize.query('api_version', api_version, 'str')
header_parameters = {}
header_parameters['Accept'] = self._serialize.header('accept', accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if (response.status_code not in [200]):
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('MonitoringSettingResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized | 3,907,705,224,089,740,300 | Get the Monitoring Setting and its properties.
:param resource_group_name: The name of the resource group that contains the resource. You can
obtain this value from the Azure Resource Manager API or the portal.
:type resource_group_name: str
:param service_name: The name of the Service resource.
:type service_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: MonitoringSettingResource, or the result of cls(response)
:rtype: ~azure.mgmt.appplatform.v2020_11_01_preview.models.MonitoringSettingResource
:raises: ~azure.core.exceptions.HttpResponseError | sdk/appplatform/azure-mgmt-appplatform/azure/mgmt/appplatform/v2020_11_01_preview/operations/_monitoring_settings_operations.py | get | AriZavala2/azure-sdk-for-python | python | def get(self, resource_group_name, service_name, **kwargs):
'Get the Monitoring Setting and its properties.\n\n :param resource_group_name: The name of the resource group that contains the resource. You can\n obtain this value from the Azure Resource Manager API or the portal.\n :type resource_group_name: str\n :param service_name: The name of the Service resource.\n :type service_name: str\n :keyword callable cls: A custom type or function that will be passed the direct response\n :return: MonitoringSettingResource, or the result of cls(response)\n :rtype: ~azure.mgmt.appplatform.v2020_11_01_preview.models.MonitoringSettingResource\n :raises: ~azure.core.exceptions.HttpResponseError\n '
cls = kwargs.pop('cls', None)
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = '2020-11-01-preview'
accept = 'application/json'
url = self.get.metadata['url']
path_format_arguments = {'subscriptionId': self._serialize.url('self._config.subscription_id', self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url('resource_group_name', resource_group_name, 'str'), 'serviceName': self._serialize.url('service_name', service_name, 'str')}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters['api-version'] = self._serialize.query('api_version', api_version, 'str')
header_parameters = {}
header_parameters['Accept'] = self._serialize.header('accept', accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if (response.status_code not in [200]):
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('MonitoringSettingResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized |
def begin_update_put(self, resource_group_name, service_name, monitoring_setting_resource, **kwargs):
"Update the Monitoring Setting.\n\n :param resource_group_name: The name of the resource group that contains the resource. You can\n obtain this value from the Azure Resource Manager API or the portal.\n :type resource_group_name: str\n :param service_name: The name of the Service resource.\n :type service_name: str\n :param monitoring_setting_resource: Parameters for the update operation.\n :type monitoring_setting_resource: ~azure.mgmt.appplatform.v2020_11_01_preview.models.MonitoringSettingResource\n :keyword callable cls: A custom type or function that will be passed the direct response\n :keyword str continuation_token: A continuation token to restart a poller from a saved state.\n :keyword polling: Pass in True if you'd like the ARMPolling polling method,\n False for no polling, or your own initialized polling object for a personal polling strategy.\n :paramtype polling: bool or ~azure.core.polling.PollingMethod\n :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.\n :return: An instance of LROPoller that returns either MonitoringSettingResource or the result of cls(response)\n :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2020_11_01_preview.models.MonitoringSettingResource]\n :raises ~azure.core.exceptions.HttpResponseError:\n "
polling = kwargs.pop('polling', True)
cls = kwargs.pop('cls', None)
lro_delay = kwargs.pop('polling_interval', self._config.polling_interval)
cont_token = kwargs.pop('continuation_token', None)
if (cont_token is None):
raw_result = self._update_put_initial(resource_group_name=resource_group_name, service_name=service_name, monitoring_setting_resource=monitoring_setting_resource, cls=(lambda x, y, z: x), **kwargs)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('MonitoringSettingResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {'subscriptionId': self._serialize.url('self._config.subscription_id', self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url('resource_group_name', resource_group_name, 'str'), 'serviceName': self._serialize.url('service_name', service_name, 'str')}
if (polling is True):
polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif (polling is False):
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method) | -7,195,068,083,915,552,000 | Update the Monitoring Setting.
:param resource_group_name: The name of the resource group that contains the resource. You can
obtain this value from the Azure Resource Manager API or the portal.
:type resource_group_name: str
:param service_name: The name of the Service resource.
:type service_name: str
:param monitoring_setting_resource: Parameters for the update operation.
:type monitoring_setting_resource: ~azure.mgmt.appplatform.v2020_11_01_preview.models.MonitoringSettingResource
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either MonitoringSettingResource or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2020_11_01_preview.models.MonitoringSettingResource]
:raises ~azure.core.exceptions.HttpResponseError: | sdk/appplatform/azure-mgmt-appplatform/azure/mgmt/appplatform/v2020_11_01_preview/operations/_monitoring_settings_operations.py | begin_update_put | AriZavala2/azure-sdk-for-python | python | def begin_update_put(self, resource_group_name, service_name, monitoring_setting_resource, **kwargs):
"Update the Monitoring Setting.\n\n :param resource_group_name: The name of the resource group that contains the resource. You can\n obtain this value from the Azure Resource Manager API or the portal.\n :type resource_group_name: str\n :param service_name: The name of the Service resource.\n :type service_name: str\n :param monitoring_setting_resource: Parameters for the update operation.\n :type monitoring_setting_resource: ~azure.mgmt.appplatform.v2020_11_01_preview.models.MonitoringSettingResource\n :keyword callable cls: A custom type or function that will be passed the direct response\n :keyword str continuation_token: A continuation token to restart a poller from a saved state.\n :keyword polling: Pass in True if you'd like the ARMPolling polling method,\n False for no polling, or your own initialized polling object for a personal polling strategy.\n :paramtype polling: bool or ~azure.core.polling.PollingMethod\n :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.\n :return: An instance of LROPoller that returns either MonitoringSettingResource or the result of cls(response)\n :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2020_11_01_preview.models.MonitoringSettingResource]\n :raises ~azure.core.exceptions.HttpResponseError:\n "
polling = kwargs.pop('polling', True)
cls = kwargs.pop('cls', None)
lro_delay = kwargs.pop('polling_interval', self._config.polling_interval)
cont_token = kwargs.pop('continuation_token', None)
if (cont_token is None):
raw_result = self._update_put_initial(resource_group_name=resource_group_name, service_name=service_name, monitoring_setting_resource=monitoring_setting_resource, cls=(lambda x, y, z: x), **kwargs)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('MonitoringSettingResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {'subscriptionId': self._serialize.url('self._config.subscription_id', self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url('resource_group_name', resource_group_name, 'str'), 'serviceName': self._serialize.url('service_name', service_name, 'str')}
if (polling is True):
polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif (polling is False):
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.